mypy-0.761/0000755€tŠÔÚ€2›s®0000000000013576752267016723 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/LICENSE0000644€tŠÔÚ€2›s®0000002605113576752246017731 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy (and mypyc) are licensed under the terms of the MIT license, reproduced below. = = = = = The MIT License Copyright (c) 2015-2019 Jukka Lehtosalo and contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. = = = = = Portions of mypy and mypyc are licensed under different licenses. The files under stdlib-samples as well as the files mypyc/lib-rt/pythonsupport.h and mypyc/lib-rt/getargs.c are licensed under the PSF 2 License, reproduced below. = = = = = PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 ------------------------------------------- BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the Individual or Organization ("Licensee") accessing and otherwise using this software in source or binary form and its associated documentation ("the Software"). 2. Subject to the terms and conditions of this BeOpen Python License Agreement, BeOpen hereby grants Licensee a non-exclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use the Software alone or in any derivative version, provided, however, that the BeOpen Python License is retained in the Software, alone or in any derivative version prepared by Licensee. 3. BeOpen is making the Software available to Licensee on an "AS IS" basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 5. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 6. This License Agreement shall be governed by and interpreted in all respects by the law of the State of California, excluding conflict of law provisions. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between BeOpen and Licensee. This License Agreement does not grant permission to use BeOpen trademarks or trade names in a trademark sense to endorse or promote products or services of Licensee, or any third party. As an exception, the "BeOpen Python" logos available at http://www.pythonlabs.com/logos.html may be used according to the permissions granted on that web page. 7. By copying, installing or otherwise using the software, Licensee agrees to be bound by the terms and conditions of this License Agreement. CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 --------------------------------------- 1. This LICENSE AGREEMENT is between the Corporation for National Research Initiatives, having an office at 1895 Preston White Drive, Reston, VA 20191 ("CNRI"), and the Individual or Organization ("Licensee") accessing and otherwise using Python 1.6.1 software in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, CNRI hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 1.6.1 alone or in any derivative version, provided, however, that CNRI's License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) 1995-2001 Corporation for National Research Initiatives; All Rights Reserved" are retained in Python 1.6.1 alone or in any derivative version prepared by Licensee. Alternately, in lieu of CNRI's License Agreement, Licensee may substitute the following text (omitting the quotes): "Python 1.6.1 is made available subject to the terms and conditions in CNRI's License Agreement. This Agreement together with Python 1.6.1 may be located on the Internet using the following unique, persistent identifier (known as a handle): 1895.22/1013. This Agreement may also be obtained from a proxy server on the Internet using the following URL: http://hdl.handle.net/1895.22/1013". 3. In the event Licensee prepares a derivative work that is based on or incorporates Python 1.6.1 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python 1.6.1. 4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. This License Agreement shall be governed by the federal intellectual property law of the United States, including without limitation the federal copyright law, and, to the extent such U.S. federal law does not apply, by the law of the Commonwealth of Virginia, excluding Virginia's conflict of law provisions. Notwithstanding the foregoing, with regard to derivative works based on Python 1.6.1 that incorporate non-separable material that was previously distributed under the GNU General Public License (GPL), the law of the Commonwealth of Virginia shall govern this License Agreement only as to issues arising under or with respect to Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between CNRI and Licensee. This License Agreement does not grant permission to use CNRI trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By clicking on the "ACCEPT" button where indicated, or by copying, installing or otherwise using Python 1.6.1, Licensee agrees to be bound by the terms and conditions of this License Agreement. ACCEPT CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 -------------------------------------------------- Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, The Netherlands. All rights reserved. Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies and that both that copyright notice and this permission notice appear in supporting documentation, and that the name of Stichting Mathematisch Centrum or CWI not be used in advertising or publicity pertaining to distribution of the software without specific, written prior permission. STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. mypy-0.761/MANIFEST.in0000644€tŠÔÚ€2›s®0000000050013576752246020451 0ustar jukkaDROPBOX\Domain Users00000000000000recursive-include scripts * recursive-include test-data * recursive-include extensions * recursive-include docs * recursive-include mypy/typeshed *.py *.pyi recursive-include mypy/xml *.xsd *.xslt *.css recursive-include mypyc/lib-rt *.c *.h *.tmpl include mypy_bootstrap.ini include mypy_self_check.ini include LICENSE mypy-0.761/PKG-INFO0000644€tŠÔÚ€2›s®0000000234413576752267020023 0ustar jukkaDROPBOX\Domain Users00000000000000Metadata-Version: 2.1 Name: mypy Version: 0.761 Summary: Optional static typing for Python Home-page: http://www.mypy-lang.org/ Author: Jukka Lehtosalo Author-email: jukka.lehtosalo@iki.fi License: MIT License Description: Mypy -- Optional Static Typing for Python ========================================= Add type annotations to your Python programs, and use mypy to type check them. Mypy is essentially a Python linter on steroids, and it can catch many programming errors by analyzing your program, without actually having to run it. Mypy has a powerful type system with features such as type inference, gradual typing, generics and union types. Platform: UNKNOWN Classifier: Development Status :: 4 - Beta Classifier: Environment :: Console Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Topic :: Software Development Requires-Python: >=3.5 Provides-Extra: dmypy mypy-0.761/README.md0000644€tŠÔÚ€2›s®0000002322313576752246020201 0ustar jukkaDROPBOX\Domain Users00000000000000mypy logo Mypy: Optional Static Typing for Python ======================================= [![Build Status](https://api.travis-ci.org/python/mypy.svg?branch=master)](https://travis-ci.org/python/mypy) [![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/) Got a question? Join us on Gitter! ---------------------------------- We don't have a mailing list; but we are always happy to answer questions on [gitter chat](https://gitter.im/python/typing). If you are sure you've found a bug please search our issue trackers for a duplicate before filing a new issue: - [mypy tracker](https://github.com/python/mypy/issues) for mypy issues - [typeshed tracker](https://github.com/python/typeshed/issues) for issues with specific modules - [typing tracker](https://github.com/python/typing/issues) for discussion of new type system features (PEP 484 changes) and runtime bugs in the typing module What is mypy? ------------- Mypy is an optional static type checker for Python. You can add type hints ([PEP 484](https://www.python.org/dev/peps/pep-0484/)) to your Python programs, and use mypy to type check them statically. Find bugs in your programs without even running them! You can mix dynamic and static typing in your programs. You can always fall back to dynamic typing when static typing is not convenient, such as for legacy code. Here is a small example to whet your appetite (Python 3): ```python from typing import Iterator def fib(n: int) -> Iterator[int]: a, b = 0, 1 while a < n: yield a a, b = b, a + b ``` See [the documentation](http://mypy.readthedocs.io/en/stable/introduction.html) for more examples. For Python 2.7, the standard annotations are written as comments: ```python def is_palindrome(s): # type: (str) -> bool return s == s[::-1] ``` See [the documentation for Python 2 support](http://mypy.readthedocs.io/en/latest/python2.html). Mypy is in development; some features are missing and there are bugs. See 'Development status' below. Requirements ------------ You need Python 3.5 or later to run mypy. You can have multiple Python versions (2.x and 3.x) installed on the same system without problems. In Ubuntu, Mint and Debian you can install Python 3 like this: $ sudo apt-get install python3 python3-pip For other Linux flavors, macOS and Windows, packages are available at http://www.python.org/getit/ Quick start ----------- Mypy can be installed using pip: $ python3 -m pip install -U mypy If you want to run the latest version of the code, you can install from git: $ python3 -m pip install -U git+git://github.com/python/mypy.git Now, if Python on your system is configured properly (else see "Troubleshooting" below), you can type-check the [statically typed parts] of a program like this: $ mypy PROGRAM You can always use a Python interpreter to run your statically typed programs, even if they have type errors: $ python3 PROGRAM You can also try mypy in an [online playground](https://mypy-play.net/) (developed by Yusuke Miyazaki). [statically typed parts]: https://mypy.readthedocs.io/en/latest/getting_started.html#function-signatures-and-dynamic-vs-static-typing IDE, Linter Integrations, and Pre-commit ---------------------------------------- Mypy can be integrated into popular IDEs: * Vim: * Using [Syntastic](https://github.com/vim-syntastic/syntastic): in `~/.vimrc` add `let g:syntastic_python_checkers=['mypy']` * Using [ALE](https://github.com/dense-analysis/ale): should be enabled by default when `mypy` is installed, or can be explicitly enabled by adding `let b:ale_linters = ['mypy']` in `~/vim/ftplugin/python.vim` * Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy) * Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy) * Atom: [linter-mypy](https://atom.io/packages/linter-mypy) * PyCharm: [mypy plugin](https://github.com/dropbox/mypy-PyCharm-plugin) (PyCharm integrates [its own implementation of PEP 484](https://www.jetbrains.com/help/pycharm/type-hinting-in-product.html)) * VS Code: provides [basic integration](https://code.visualstudio.com/docs/python/linting#_mypy) with mypy. Mypy can also be integrated into [Flake8] using [flake8-mypy], or can be set up as a pre-commit hook using [pre-commit mirrors-mypy]. [Flake8]: http://flake8.pycqa.org/ [flake8-mypy]: https://github.com/ambv/flake8-mypy [pre-commit mirrors-mypy]: https://github.com/pre-commit/mirrors-mypy Web site and documentation -------------------------- Documentation and additional information is available at the web site: http://www.mypy-lang.org/ Or you can jump straight to the documentation: http://mypy.readthedocs.io/ Troubleshooting --------------- Depending on your configuration, you may have to run `pip` like this: $ python3 -m pip install -U mypy This should automatically install the appropriate version of mypy's parser, typed-ast. If for some reason it does not, you can install it manually: $ python3 -m pip install -U typed-ast If the `mypy` command isn't found after installation: After `python3 -m pip install`, the `mypy` script and dependencies, including the `typing` module, will be installed to system-dependent locations. Sometimes the script directory will not be in `PATH`, and you have to add the target directory to `PATH` manually or create a symbolic link to the script. In particular, on macOS, the script may be installed under `/Library/Frameworks`: /Library/Frameworks/Python.framework/Versions//bin In Windows, the script is generally installed in `\PythonNN\Scripts`. So, type check a program like this (replace `\Python34` with your Python installation path): C:\>\Python34\python \Python34\Scripts\mypy PROGRAM ### Working with `virtualenv` If you are using [`virtualenv`](https://virtualenv.pypa.io/en/stable/), make sure you are running a python3 environment. Installing via `pip3` in a v2 environment will not configure the environment to run installed modules from the command line. $ python3 -m pip install -U virtualenv $ python3 -m virtualenv env Quick start for contributing to mypy ------------------------------------ If you want to contribute, first clone the mypy git repository: $ git clone --recurse-submodules https://github.com/python/mypy.git If you've already cloned the repo without `--recurse-submodules`, you need to pull in the typeshed repo as follows: $ git submodule init $ git submodule update Either way you should now have a subdirectory `typeshed` inside your mypy repo, your folders tree should be like `mypy/mypy/typeshed`, containing a clone of the typeshed repo (`https://github.com/python/typeshed`). From the mypy directory, use pip to install mypy: $ cd mypy $ python3 -m pip install -U . Replace `python3` with your Python 3 interpreter. You may have to do the above as root. For example, in Ubuntu: $ sudo python3 -m pip install -U . Now you can use the `mypy` program just as above. In case of trouble see "Troubleshooting" above. Working with the git version of mypy ------------------------------------ mypy contains a submodule, "typeshed". See http://github.com/python/typeshed. This submodule contains types for the Python standard library. Due to the way git submodules work, you'll have to do ``` git submodule update mypy/typeshed ``` whenever you change branches, merge, rebase, or pull. (It's possible to automate this: Search Google for "git hook update submodule") Tests ----- The basic way to run tests: $ pip3 install -r test-requirements.txt $ python2 -m pip install -U typing $ ./runtests.py For more on the tests, see [Test README.md](test-data/unit/README.md) Development status ------------------ Mypy is beta software, but it has already been used in production for several years at Dropbox, and it has an extensive test suite. See [the roadmap](ROADMAP.md) if you are interested in plans for the future. Changelog --------- Follow mypy's updates on the blog: http://mypy-lang.blogspot.com/ Issue tracker ------------- Please report any bugs and enhancement ideas using the mypy issue tracker: https://github.com/python/mypy/issues If you have any questions about using mypy or types, please ask in the typing gitter instead: https://gitter.im/python/typing Compiled version of mypy ------------------------ We have built a compiled version of mypy using the [mypyc compiler](https://github.com/python/mypy/tree/master/mypyc) for mypy-annotated Python code. It is approximately 4 times faster than interpreted mypy and is available (and the default) for 64-bit Windows, macOS, and Linux. To install an interpreted mypy instead, use: $ python3 -m pip install --no-binary mypy -U mypy If you wish to test out the compiled version of a development version of mypy, you can directly install a binary from https://github.com/mypyc/mypy_mypyc-wheels/releases/latest. Help wanted ----------- Any help in testing, development, documentation and other tasks is highly appreciated and useful to the project. There are tasks for contributors of all experience levels. If you're just getting started, ask on the [gitter chat](https://gitter.im/python/typing) for ideas of good beginner issues. For more details, see the file [CONTRIBUTING.md](CONTRIBUTING.md). License ------- Mypy is licensed under the terms of the MIT License (see the file LICENSE). mypy-0.761/docs/0000755€tŠÔÚ€2›s®0000000000013576752266017652 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/docs/Makefile0000644€tŠÔÚ€2›s®0000001515313576752246021315 0ustar jukkaDROPBOX\Domain Users00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Mypy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Mypy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Mypy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Mypy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." mypy-0.761/docs/README.md0000644€tŠÔÚ€2›s®0000000207113576752246021127 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy Documentation ================== What's this? ------------ This directory contains the source code for Mypy documentation (under `source/`) and build scripts. The documentation uses Sphinx and reStructuredText. We use `sphinx-rtd-theme` as the documentation theme. Building the documentation -------------------------- Install Sphinx and other dependencies (i.e. theme) needed for the documentation. From the `docs` directory, use `pip`: ``` $ pip install -r requirements-docs.txt ``` Build the documentation like this: ``` $ make html ``` The built documentation will be placed in the `docs/build` directory. Open `docs/build/index.html` to view the documentation. Helpful documentation build commands ------------------------------------ Clean the documentation build: ``` $ make clean ``` Test and check the links found in the documentation: ``` $ make linkcheck ``` Documentation on Read The Docs ------------------------------ The mypy documentation is hosted on Read The Docs, and the latest version can be found at https://mypy.readthedocs.io/en/latest. mypy-0.761/docs/make.bat0000755€tŠÔÚ€2›s®0000001506213576752246021264 0ustar jukkaDROPBOX\Domain Users00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source set I18NSPHINXOPTS=%SPHINXOPTS% source if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Mypy.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Mypy.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end mypy-0.761/docs/requirements-docs.txt0000644€tŠÔÚ€2›s®0000000005213576752246024057 0ustar jukkaDROPBOX\Domain Users00000000000000Sphinx >= 1.4.4 sphinx-rtd-theme >= 0.1.9 mypy-0.761/docs/source/0000755€tŠÔÚ€2›s®0000000000013576752266021152 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/docs/source/additional_features.rst0000644€tŠÔÚ€2›s®0000003706713576752246025725 0ustar jukkaDROPBOX\Domain Users00000000000000Additional features ------------------- This section discusses various features that did not fit in naturally in one of the previous sections. .. _dataclasses_support: Dataclasses *********** In Python 3.7, a new :py:mod:`dataclasses` module has been added to the standard library. This module allows defining and customizing simple boilerplate-free classes. They can be defined using the :py:func:`@dataclasses.dataclass ` decorator: .. code-block:: python from dataclasses import dataclass, field @dataclass class Application: name: str plugins: List[str] = field(default_factory=list) test = Application("Testing...") # OK bad = Application("Testing...", "with plugin") # Error: List[str] expected Mypy will detect special methods (such as :py:meth:`__lt__ `) depending on the flags used to define dataclasses. For example: .. code-block:: python from dataclasses import dataclass @dataclass(order=True) class OrderedPoint: x: int y: int @dataclass(order=False) class UnorderedPoint: x: int y: int OrderedPoint(1, 2) < OrderedPoint(3, 4) # OK UnorderedPoint(1, 2) < UnorderedPoint(3, 4) # Error: Unsupported operand types Dataclasses can be generic and can be used in any other way a normal class can be used: .. code-block:: python from dataclasses import dataclass from typing import Generic, TypeVar T = TypeVar('T') @dataclass class BoxedData(Generic[T]): data: T label: str def unbox(bd: BoxedData[T]) -> T: ... val = unbox(BoxedData(42, "")) # OK, inferred type is int For more information see :doc:`official docs ` and :pep:`557`. Caveats/Known Issues ==================== Some functions in the :py:mod:`dataclasses` module, such as :py:func:`~dataclasses.replace` and :py:func:`~dataclasses.asdict`, have imprecise (too permissive) types. This will be fixed in future releases. Mypy does not yet recognize aliases of :py:func:`dataclasses.dataclass `, and will probably never recognize dynamically computed decorators. The following examples do **not** work: .. code-block:: python from dataclasses import dataclass dataclass_alias = dataclass def dataclass_wrapper(cls): return dataclass(cls) @dataclass_alias class AliasDecorated: """ Mypy doesn't recognize this as a dataclass because it is decorated by an alias of `dataclass` rather than by `dataclass` itself. """ attribute: int @dataclass_wrapper class DynamicallyDecorated: """ Mypy doesn't recognize this as a dataclass because it is decorated by a function returning `dataclass` rather than by `dataclass` itself. """ attribute: int AliasDecorated(attribute=1) # error: Unexpected keyword argument DynamicallyDecorated(attribute=1) # error: Unexpected keyword argument .. _attrs_package: The attrs package ***************** :doc:`attrs ` is a package that lets you define classes without writing boilerplate code. Mypy can detect uses of the package and will generate the necessary method definitions for decorated classes using the type annotations it finds. Type annotations can be added as follows: .. code-block:: python import attr @attr.s class A: one: int = attr.ib() # Variable annotation (Python 3.6+) two = attr.ib() # type: int # Type comment three = attr.ib(type=int) # type= argument If you're using ``auto_attribs=True`` you must use variable annotations. .. code-block:: python import attr @attr.s(auto_attribs=True) class A: one: int two: int = 7 three: int = attr.ib(8) Typeshed has a couple of "white lie" annotations to make type checking easier. :py:func:`attr.ib` and :py:class:`attr.Factory` actually return objects, but the annotation says these return the types that they expect to be assigned to. That enables this to work: .. code-block:: python import attr from typing import Dict @attr.s(auto_attribs=True) class A: one: int = attr.ib(8) two: Dict[str, str] = attr.Factory(dict) bad: str = attr.ib(16) # Error: can't assign int to str Caveats/Known Issues ==================== * The detection of attr classes and attributes works by function name only. This means that if you have your own helper functions that, for example, ``return attr.ib()`` mypy will not see them. * All boolean arguments that mypy cares about must be literal ``True`` or ``False``. e.g the following will not work: .. code-block:: python import attr YES = True @attr.s(init=YES) class A: ... * Currently, ``converter`` only supports named functions. If mypy finds something else it will complain about not understanding the argument and the type annotation in :py:meth:`__init__ ` will be replaced by ``Any``. * :ref:`Validator decorators ` and `default decorators `_ are not type-checked against the attribute they are setting/validating. * Method definitions added by mypy currently overwrite any existing method definitions. .. _remote-cache: Using a remote cache to speed up mypy runs ****************************************** Mypy performs type checking *incrementally*, reusing results from previous runs to speed up successive runs. If you are type checking a large codebase, mypy can still be sometimes slower than desirable. For example, if you create a new branch based on a much more recent commit than the target of the previous mypy run, mypy may have to process almost every file, as a large fraction of source files may have changed. This can also happen after you've rebased a local branch. Mypy supports using a *remote cache* to improve performance in cases such as the above. In a large codebase, remote caching can sometimes speed up mypy runs by a factor of 10, or more. Mypy doesn't include all components needed to set this up -- generally you will have to perform some simple integration with your Continuous Integration (CI) or build system to configure mypy to use a remote cache. This discussion assumes you have a CI system set up for the mypy build you want to speed up, and that you are using a central git repository. Generalizing to different environments should not be difficult. Here are the main components needed: * A shared repository for storing mypy cache files for all landed commits. * CI build that uploads mypy incremental cache files to the shared repository for each commit for which the CI build runs. * A wrapper script around mypy that developers use to run mypy with remote caching enabled. Below we discuss each of these components in some detail. Shared repository for cache files ================================= You need a repository that allows you to upload mypy cache files from your CI build and make the cache files available for download based on a commit id. A simple approach would be to produce an archive of the ``.mypy_cache`` directory (which contains the mypy cache data) as a downloadable *build artifact* from your CI build (depending on the capabilities of your CI system). Alternatively, you could upload the data to a web server or to S3, for example. Continuous Integration build ============================ The CI build would run a regular mypy build and create an archive containing the ``.mypy_cache`` directory produced by the build. Finally, it will produce the cache as a build artifact or upload it to a repository where it is accessible by the mypy wrapper script. Your CI script might work like this: * Run mypy normally. This will generate cache data under the ``.mypy_cache`` directory. * Create a tarball from the ``.mypy_cache`` directory. * Determine the current git master branch commit id (say, using ``git rev-parse HEAD``). * Upload the tarball to the shared repository with a name derived from the commit id. Mypy wrapper script =================== The wrapper script is used by developers to run mypy locally during development instead of invoking mypy directly. The wrapper first populates the local ``.mypy_cache`` directory from the shared repository and then runs a normal incremental build. The wrapper script needs some logic to determine the most recent central repository commit (by convention, the ``origin/master`` branch for git) the local development branch is based on. In a typical git setup you can do it like this: .. code:: git merge-base HEAD origin/master The next step is to download the cache data (contents of the ``.mypy_cache`` directory) from the shared repository based on the commit id of the merge base produced by the git command above. The script will decompress the data so that mypy will start with a fresh ``.mypy_cache``. Finally, the script runs mypy normally. And that's all! Caching with mypy daemon ======================== You can also use remote caching with the :ref:`mypy daemon `. The remote cache will significantly speed up the first ``dmypy check`` run after starting or restarting the daemon. The mypy daemon requires extra fine-grained dependency data in the cache files which aren't included by default. To use caching with the mypy daemon, use the :option:`--cache-fine-grained ` option in your CI build:: $ mypy --cache-fine-grained This flag adds extra information for the daemon to the cache. In order to use this extra information, you will also need to use the ``--use-fine-grained-cache`` option with ``dmypy start`` or ``dmypy restart``. Example:: $ dmypy start -- --use-fine-grained-cache Now your first ``dmypy check`` run should be much faster, as it can use cache information to avoid processing the whole program. Refinements =========== There are several optional refinements that may improve things further, at least if your codebase is hundreds of thousands of lines or more: * If the wrapper script determines that the merge base hasn't changed from a previous run, there's no need to download the cache data and it's better to instead reuse the existing local cache data. * If you use the mypy daemon, you may want to restart the daemon each time after the merge base or local branch has changed to avoid processing a potentially large number of changes in an incremental build, as this can be much slower than downloading cache data and restarting the daemon. * If the current local branch is based on a very recent master commit, the remote cache data may not yet be available for that commit, as there will necessarily be some latency to build the cache files. It may be a good idea to look for cache data for, say, the 5 latest master commits and use the most recent data that is available. * If the remote cache is not accessible for some reason (say, from a public network), the script can still fall back to a normal incremental build. * You can have multiple local cache directories for different local branches using the :option:`--cache-dir ` option. If the user switches to an existing branch where downloaded cache data is already available, you can continue to use the existing cache data instead of redownloading the data. * You can set up your CI build to use a remote cache to speed up the CI build. This would be particularly useful if each CI build starts from a fresh state without access to cache files from previous builds. It's still recommended to run a full, non-incremental mypy build to create the cache data, as repeatedly updating cache data incrementally could result in drift over a long time period (due to a mypy caching issue, perhaps). .. _extended_callable: Extended Callable types *********************** .. note:: This feature is deprecated. You can use :ref:`callback protocols ` as a replacement. As an experimental mypy extension, you can specify :py:data:`~typing.Callable` types that support keyword arguments, optional arguments, and more. When you specify the arguments of a :py:data:`~typing.Callable`, you can choose to supply just the type of a nameless positional argument, or an "argument specifier" representing a more complicated form of argument. This allows one to more closely emulate the full range of possibilities given by the ``def`` statement in Python. As an example, here's a complicated function definition and the corresponding :py:data:`~typing.Callable`: .. code-block:: python from typing import Callable from mypy_extensions import (Arg, DefaultArg, NamedArg, DefaultNamedArg, VarArg, KwArg) def func(__a: int, # This convention is for nameless arguments b: int, c: int = 0, *args: int, d: int, e: int = 0, **kwargs: int) -> int: ... F = Callable[[int, # Or Arg(int) Arg(int, 'b'), DefaultArg(int, 'c'), VarArg(int), NamedArg(int, 'd'), DefaultNamedArg(int, 'e'), KwArg(int)], int] f: F = func Argument specifiers are special function calls that can specify the following aspects of an argument: - its type (the only thing that the basic format supports) - its name (if it has one) - whether it may be omitted - whether it may or must be passed using a keyword - whether it is a ``*args`` argument (representing the remaining positional arguments) - whether it is a ``**kwargs`` argument (representing the remaining keyword arguments) The following functions are available in ``mypy_extensions`` for this purpose: .. code-block:: python def Arg(type=Any, name=None): # A normal, mandatory, positional argument. # If the name is specified it may be passed as a keyword. def DefaultArg(type=Any, name=None): # An optional positional argument (i.e. with a default value). # If the name is specified it may be passed as a keyword. def NamedArg(type=Any, name=None): # A mandatory keyword-only argument. def DefaultNamedArg(type=Any, name=None): # An optional keyword-only argument (i.e. with a default value). def VarArg(type=Any): # A *args-style variadic positional argument. # A single VarArg() specifier represents all remaining # positional arguments. def KwArg(type=Any): # A **kwargs-style variadic keyword argument. # A single KwArg() specifier represents all remaining # keyword arguments. In all cases, the ``type`` argument defaults to ``Any``, and if the ``name`` argument is omitted the argument has no name (the name is required for ``NamedArg`` and ``DefaultNamedArg``). A basic :py:data:`~typing.Callable` such as .. code-block:: python MyFunc = Callable[[int, str, int], float] is equivalent to the following: .. code-block:: python MyFunc = Callable[[Arg(int), Arg(str), Arg(int)], float] A :py:data:`~typing.Callable` with unspecified argument types, such as .. code-block:: python MyOtherFunc = Callable[..., int] is (roughly) equivalent to .. code-block:: python MyOtherFunc = Callable[[VarArg(), KwArg()], int] .. note:: Each of the functions above currently just returns its ``type`` argument at runtime, so the information contained in the argument specifiers is not available at runtime. This limitation is necessary for backwards compatibility with the existing ``typing.py`` module as present in the Python 3.5+ standard library and distributed via PyPI. mypy-0.761/docs/source/builtin_types.rst0000644€tŠÔÚ€2›s®0000000401113576752246024570 0ustar jukkaDROPBOX\Domain Users00000000000000Built-in types ============== These are examples of some of the most common built-in types: ====================== =============================== Type Description ====================== =============================== ``int`` integer ``float`` floating point number ``bool`` boolean value ``str`` string (unicode) ``bytes`` 8-bit string ``object`` an arbitrary object (``object`` is the common base class) ``List[str]`` list of ``str`` objects ``Tuple[int, int]`` tuple of two ``int`` objects (``Tuple[()]`` is the empty tuple) ``Tuple[int, ...]`` tuple of an arbitrary number of ``int`` objects ``Dict[str, int]`` dictionary from ``str`` keys to ``int`` values ``Iterable[int]`` iterable object containing ints ``Sequence[bool]`` sequence of booleans (read-only) ``Mapping[str, int]`` mapping from ``str`` keys to ``int`` values (read-only) ``Any`` dynamically typed value with an arbitrary type ====================== =============================== The type ``Any`` and type constructors such as ``List``, ``Dict``, ``Iterable`` and ``Sequence`` are defined in the :py:mod:`typing` module. The type ``Dict`` is a *generic* class, signified by type arguments within ``[...]``. For example, ``Dict[int, str]`` is a dictionary from integers to strings and ``Dict[Any, Any]`` is a dictionary of dynamically typed (arbitrary) values and keys. ``List`` is another generic class. ``Dict`` and ``List`` are aliases for the built-ins ``dict`` and ``list``, respectively. ``Iterable``, ``Sequence``, and ``Mapping`` are generic types that correspond to Python protocols. For example, a ``str`` object or a ``List[str]`` object is valid when ``Iterable[str]`` or ``Sequence[str]`` is expected. Note that even though they are similar to abstract base classes defined in :py:mod:`collections.abc` (formerly ``collections``), they are not identical, since the built-in collection type objects do not support indexing. mypy-0.761/docs/source/casts.rst0000644€tŠÔÚ€2›s®0000000305713576752246023024 0ustar jukkaDROPBOX\Domain Users00000000000000.. _casts: Casts and type assertions ========================= Mypy supports type casts that are usually used to coerce a statically typed value to a subtype. Unlike languages such as Java or C#, however, mypy casts are only used as hints for the type checker, and they don't perform a runtime type check. Use the function :py:func:`~typing.cast` to perform a cast: .. code-block:: python from typing import cast, List o: object = [1] x = cast(List[int], o) # OK y = cast(List[str], o) # OK (cast performs no actual runtime check) To support runtime checking of casts such as the above, we'd have to check the types of all list items, which would be very inefficient for large lists. Casts are used to silence spurious type checker warnings and give the type checker a little help when it can't quite understand what is going on. .. note:: You can use an assertion if you want to perform an actual runtime check: .. code-block:: python def foo(o: object) -> None: print(o + 5) # Error: can't add 'object' and 'int' assert isinstance(o, int) print(o + 5) # OK: type of 'o' is 'int' here You don't need a cast for expressions with type ``Any``, or when assigning to a variable with type ``Any``, as was explained earlier. You can also use ``Any`` as the cast target type -- this lets you perform any operations on the result. For example: .. code-block:: python from typing import cast, Any x = 1 x.whatever() # Type check error y = cast(Any, x) y.whatever() # Type check OK (runtime error) mypy-0.761/docs/source/cheat_sheet.rst0000644€tŠÔÚ€2›s®0000002025413576752246024161 0ustar jukkaDROPBOX\Domain Users00000000000000.. _cheat-sheet-py2: Type hints cheat sheet (Python 2) ================================= This document is a quick cheat sheet showing how the :pep:`484` type language represents various common types in Python 2. .. note:: Technically many of the type annotations shown below are redundant, because mypy can derive them from the type of the expression. So many of the examples have a dual purpose: show how to write the annotation, and show the inferred types. Built-in types ************** .. code-block:: python from typing import List, Set, Dict, Tuple, Text, Optional # For simple built-in types, just use the name of the type x = 1 # type: int x = 1.0 # type: float x = True # type: bool x = "test" # type: str x = u"test" # type: unicode # For collections, the name of the type is capitalized, and the # name of the type inside the collection is in brackets x = [1] # type: List[int] x = {6, 7} # type: Set[int] # For mappings, we need the types of both keys and values x = {'field': 2.0} # type: Dict[str, float] # For tuples, we specify the types of all the elements x = (3, "yes", 7.5) # type: Tuple[int, str, float] # For textual data, use Text # ("Text" means "unicode" in Python 2 and "str" in Python 3) x = [u"one", u"two"] # type: List[Text] # Use Optional[] for values that could be None x = some_function() # type: Optional[str] # Mypy understands a value can't be None in an if-statement if x is not None: print x.upper() # If a value can never be None due to some invariants, use an assert assert x is not None print x.upper() Functions ********* .. code-block:: python from typing import Callable, Iterator, Union, Optional, List # This is how you annotate a function definition def stringify(num): # type: (int) -> str """Your function docstring goes here after the type definition.""" return str(num) # This function has no parameters and also returns nothing. Annotations # can also be placed on the same line as their function headers. def greet_world(): # type: () -> None print "Hello, world!" # And here's how you specify multiple arguments def plus(num1, num2): # type: (int, int) -> int return num1 + num2 # Add type annotations for arguments with default values as though they # had no defaults def f(num1, my_float=3.5): # type: (int, float) -> float return num1 + my_float # An argument can be declared positional-only by giving it a name # starting with two underscores def quux(__x): # type: (int) -> None pass quux(3) # Fine quux(__x=3) # Error # This is how you annotate a callable (function) value x = f # type: Callable[[int, float], float] # A generator function that yields ints is secretly just a function that # returns an iterator of ints, so that's how we annotate it def g(n): # type: (int) -> Iterator[int] i = 0 while i < n: yield i i += 1 # There's an alternative syntax for functions with many arguments def send_email(address, # type: Union[str, List[str]] sender, # type: str cc, # type: Optional[List[str]] bcc, # type: Optional[List[str]] subject='', body=None # type: List[str] ): # type: (...) -> bool When you're puzzled or when things are complicated ************************************************** .. code-block:: python from typing import Union, Any, List, Optional, cast # To find out what type mypy infers for an expression anywhere in # your program, wrap it in reveal_type(). Mypy will print an error # message with the type; remove it again before running the code. reveal_type(1) # -> Revealed type is 'builtins.int' # Use Union when something could be one of a few types x = [3, 5, "test", "fun"] # type: List[Union[int, str]] # Use Any if you don't know the type of something or it's too # dynamic to write a type for x = mystery_function() # type: Any # If you initialize a variable with an empty container or "None" # you may have to help mypy a bit by providing a type annotation x = [] # type: List[str] x = None # type: Optional[str] # This makes each positional arg and each keyword arg a "str" def call(self, *args, **kwargs): # type: (*str, **str) -> str request = make_request(*args, **kwargs) return self.do_api_query(request) # Use a "type: ignore" comment to suppress errors on a given line, # when your code confuses mypy or runs into an outright bug in mypy. # Good practice is to comment every "ignore" with a bug link # (in mypy, typeshed, or your own code) or an explanation of the issue. x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167 # "cast" is a helper function that lets you override the inferred # type of an expression. It's only for mypy -- there's no runtime check. a = [4] b = cast(List[int], a) # Passes fine c = cast(List[str], a) # Passes fine (no runtime check) reveal_type(c) # -> Revealed type is 'builtins.list[builtins.str]' print c # -> [4]; the object is not cast # If you want dynamic attributes on your class, have it override "__setattr__" # or "__getattr__" in a stub or in your source code. # # "__setattr__" allows for dynamic assignment to names # "__getattr__" allows for dynamic access to names class A: # This will allow assignment to any A.x, if x is the same type as "value" # (use "value: Any" to allow arbitrary types) def __setattr__(self, name, value): # type: (str, int) -> None ... a.foo = 42 # Works a.bar = 'Ex-parrot' # Fails type checking Standard "duck types" ********************* In typical Python code, many functions that can take a list or a dict as an argument only need their argument to be somehow "list-like" or "dict-like". A specific meaning of "list-like" or "dict-like" (or something-else-like) is called a "duck type", and several duck types that are common in idiomatic Python are standardized. .. code-block:: python from typing import Mapping, MutableMapping, Sequence, Iterable # Use Iterable for generic iterables (anything usable in "for"), # and Sequence where a sequence (supporting "len" and "__getitem__") is # required def f(iterable_of_ints): # type: (Iterable[int]) -> List[str] return [str(x) for x in iterator_of_ints] f(range(1, 3)) # Mapping describes a dict-like object (with "__getitem__") that we won't # mutate, and MutableMapping one (with "__setitem__") that we might def f(my_dict): # type: (Mapping[int, str]) -> List[int] return list(my_dict.keys()) f({3: 'yes', 4: 'no'}) def f(my_mapping): # type: (MutableMapping[int, str]) -> Set[str] my_mapping[5] = 'maybe' return set(my_mapping.values()) f({3: 'yes', 4: 'no'}) Classes ******* .. code-block:: python class MyClass(object): # For instance methods, omit type for "self" def my_method(self, num, str1): # type: (int, str) -> str return num * str1 # The "__init__" method doesn't return anything, so it gets return # type "None" just like any other method that doesn't return anything def __init__(self): # type: () -> None pass # User-defined classes are valid as types in annotations x = MyClass() # type: MyClass Miscellaneous ************* .. code-block:: python import sys import re from typing import Match, AnyStr, IO # "typing.Match" describes regex matches from the re module x = re.match(r'[0-9]+', "15") # type: Match[str] # Use IO[] for functions that should accept or return any # object that comes from an open() call (IO[] does not # distinguish between reading, writing or other modes) def get_sys_IO(mode='w'): # type: (str) -> IO[str] if mode == 'w': return sys.stdout elif mode == 'r': return sys.stdin else: return sys.stdout mypy-0.761/docs/source/cheat_sheet_py3.rst0000644€tŠÔÚ€2›s®0000002301613576752246024753 0ustar jukkaDROPBOX\Domain Users00000000000000.. _cheat-sheet-py3: Type hints cheat sheet (Python 3) ================================= This document is a quick cheat sheet showing how the :pep:`484` type annotation notation represents various common types in Python 3. .. note:: Technically many of the type annotations shown below are redundant, because mypy can derive them from the type of the expression. So many of the examples have a dual purpose: show how to write the annotation, and show the inferred types. Variables ********* Python 3.6 introduced a syntax for annotating variables in :pep:`526` and we use it in most examples. .. code-block:: python # This is how you declare the type of a variable type in Python 3.6 age: int = 1 # In Python 3.5 and earlier you can use a type comment instead # (equivalent to the previous definition) age = 1 # type: int # You don't need to initialize a variable to annotate it a: int # Ok (no value at runtime until assigned) # The latter is useful in conditional branches child: bool if age < 18: child = True else: child = False Built-in types ************** .. code-block:: python from typing import List, Set, Dict, Tuple, Optional # For simple built-in types, just use the name of the type x: int = 1 x: float = 1.0 x: bool = True x: str = "test" x: bytes = b"test" # For collections, the name of the type is capitalized, and the # name of the type inside the collection is in brackets x: List[int] = [1] x: Set[int] = {6, 7} # Same as above, but with type comment syntax x = [1] # type: List[int] # For mappings, we need the types of both keys and values x: Dict[str, float] = {'field': 2.0} # For tuples, we specify the types of all the elements x: Tuple[int, str, float] = (3, "yes", 7.5) # Use Optional[] for values that could be None x: Optional[str] = some_function() # Mypy understands a value can't be None in an if-statement if x is not None: print(x.upper()) # If a value can never be None due to some invariants, use an assert assert x is not None print(x.upper()) Functions ********* Python 3 supports an annotation syntax for function declarations. .. code-block:: python from typing import Callable, Iterator, Union, Optional, List # This is how you annotate a function definition def stringify(num: int) -> str: return str(num) # And here's how you specify multiple arguments def plus(num1: int, num2: int) -> int: return num1 + num2 # Add default value for an argument after the type annotation def f(num1: int, my_float: float = 3.5) -> float: return num1 + my_float # This is how you annotate a callable (function) value x: Callable[[int, float], float] = f # A generator function that yields ints is secretly just a function that # returns an iterator of ints, so that's how we annotate it def g(n: int) -> Iterator[int]: i = 0 while i < n: yield i i += 1 # You can of course split a function annotation over multiple lines def send_email(address: Union[str, List[str]], sender: str, cc: Optional[List[str]], bcc: Optional[List[str]], subject='', body: Optional[List[str]] = None ) -> bool: ... # An argument can be declared positional-only by giving it a name # starting with two underscores: def quux(__x: int) -> None: pass quux(3) # Fine quux(__x=3) # Error When you're puzzled or when things are complicated ************************************************** .. code-block:: python from typing import Union, Any, List, Optional, cast # To find out what type mypy infers for an expression anywhere in # your program, wrap it in reveal_type(). Mypy will print an error # message with the type; remove it again before running the code. reveal_type(1) # -> Revealed type is 'builtins.int' # Use Union when something could be one of a few types x: List[Union[int, str]] = [3, 5, "test", "fun"] # Use Any if you don't know the type of something or it's too # dynamic to write a type for x: Any = mystery_function() # If you initialize a variable with an empty container or "None" # you may have to help mypy a bit by providing a type annotation x: List[str] = [] x: Optional[str] = None # This makes each positional arg and each keyword arg a "str" def call(self, *args: str, **kwargs: str) -> str: request = make_request(*args, **kwargs) return self.do_api_query(request) # Use a "type: ignore" comment to suppress errors on a given line, # when your code confuses mypy or runs into an outright bug in mypy. # Good practice is to comment every "ignore" with a bug link # (in mypy, typeshed, or your own code) or an explanation of the issue. x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167 # "cast" is a helper function that lets you override the inferred # type of an expression. It's only for mypy -- there's no runtime check. a = [4] b = cast(List[int], a) # Passes fine c = cast(List[str], a) # Passes fine (no runtime check) reveal_type(c) # -> Revealed type is 'builtins.list[builtins.str]' print(c) # -> [4]; the object is not cast # If you want dynamic attributes on your class, have it override "__setattr__" # or "__getattr__" in a stub or in your source code. # # "__setattr__" allows for dynamic assignment to names # "__getattr__" allows for dynamic access to names class A: # This will allow assignment to any A.x, if x is the same type as "value" # (use "value: Any" to allow arbitrary types) def __setattr__(self, name: str, value: int) -> None: ... # This will allow access to any A.x, if x is compatible with the return type def __getattr__(self, name: str) -> int: ... a.foo = 42 # Works a.bar = 'Ex-parrot' # Fails type checking Standard "duck types" ********************* In typical Python code, many functions that can take a list or a dict as an argument only need their argument to be somehow "list-like" or "dict-like". A specific meaning of "list-like" or "dict-like" (or something-else-like) is called a "duck type", and several duck types that are common in idiomatic Python are standardized. .. code-block:: python from typing import Mapping, MutableMapping, Sequence, Iterable, List, Set # Use Iterable for generic iterables (anything usable in "for"), # and Sequence where a sequence (supporting "len" and "__getitem__") is # required def f(ints: Iterable[int]) -> List[str]: return [str(x) for x in ints] f(range(1, 3)) # Mapping describes a dict-like object (with "__getitem__") that we won't # mutate, and MutableMapping one (with "__setitem__") that we might def f(my_dict: Mapping[int, str]) -> List[int]: my_mapping[5] = 'maybe' # if we try this, mypy will throw an error... return list(my_dict.keys()) f({3: 'yes', 4: 'no'}) def f(my_mapping: MutableMapping[int, str]) -> Set[str]: my_mapping[5] = 'maybe' # ...but mypy is OK with this. return set(my_mapping.values()) f({3: 'yes', 4: 'no'}) Classes ******* .. code-block:: python class MyClass: # You can optionally declare instance variables in the class body attr: int # This is an instance variable with a default value charge_percent: int = 100 # The "__init__" method doesn't return anything, so it gets return # type "None" just like any other method that doesn't return anything def __init__(self) -> None: ... # For instance methods, omit type for "self" def my_method(self, num: int, str1: str) -> str: return num * str1 # User-defined classes are valid as types in annotations x: MyClass = MyClass() # You can use the ClassVar annotation to declare a class variable class Car: seats: ClassVar[int] = 4 passengers: ClassVar[List[str]] # You can also declare the type of an attribute in "__init__" class Box: def __init__(self) -> None: self.items: List[str] = [] Coroutines and asyncio ********************** See :ref:`async-and-await` for the full detail on typing coroutines and asynchronous code. .. code-block:: python import asyncio # A coroutine is typed like a normal function async def countdown35(tag: str, count: int) -> str: while count > 0: print('T-minus {} ({})'.format(count, tag)) await asyncio.sleep(0.1) count -= 1 return "Blastoff!" Miscellaneous ************* .. code-block:: python import sys import re from typing import Match, AnyStr, IO # "typing.Match" describes regex matches from the re module x: Match[str] = re.match(r'[0-9]+', "15") # Use IO[] for functions that should accept or return any # object that comes from an open() call (IO[] does not # distinguish between reading, writing or other modes) def get_sys_IO(mode: str = 'w') -> IO[str]: if mode == 'w': return sys.stdout elif mode == 'r': return sys.stdin else: return sys.stdout # Forward references are useful if you want to reference a class before # it is defined def f(foo: A) -> int: # This will fail ... class A: ... # If you use the string literal 'A', it will pass as long as there is a # class of that name later on in the file def f(foo: 'A') -> int: # Ok ... mypy-0.761/docs/source/class_basics.rst0000644€tŠÔÚ€2›s®0000002406513576752246024342 0ustar jukkaDROPBOX\Domain Users00000000000000Class basics ============ This section will help get you started annotating your classes. Built-in classes such as ``int`` also follow these same rules. Instance and class attributes ***************************** The mypy type checker detects if you are trying to access a missing attribute, which is a very common programming error. For this to work correctly, instance and class attributes must be defined or initialized within the class. Mypy infers the types of attributes: .. code-block:: python class A: def __init__(self, x: int) -> None: self.x = x # Aha, attribute 'x' of type 'int' a = A(1) a.x = 2 # OK! a.y = 3 # Error: 'A' has no attribute 'y' This is a bit like each class having an implicitly defined :py:data:`__slots__ ` attribute. This is only enforced during type checking and not when your program is running. You can declare types of variables in the class body explicitly using a type annotation: .. code-block:: python class A: x: List[int] # Declare attribute 'x' of type List[int] a = A() a.x = [1] # OK As in Python generally, a variable defined in the class body can be used as a class or an instance variable. (As discussed in the next section, you can override this with a :py:data:`~typing.ClassVar` annotation.) Type comments work as well, if you need to support Python versions earlier than 3.6: .. code-block:: python class A: x = None # type: List[int] # Declare attribute 'x' of type List[int] Note that attribute definitions in the class body that use a type comment are special: a ``None`` value is valid as the initializer, even though the declared type is not optional. This should be used sparingly, as this can result in ``None``-related runtime errors that mypy can't detect. Similarly, you can give explicit types to instance variables defined in a method: .. code-block:: python class A: def __init__(self) -> None: self.x: List[int] = [] def f(self) -> None: self.y: Any = 0 You can only define an instance variable within a method if you assign to it explicitly using ``self``: .. code-block:: python class A: def __init__(self) -> None: self.y = 1 # Define 'y' a = self a.x = 1 # Error: 'x' not defined Annotating __init__ methods *************************** The :py:meth:`__init__ ` method is somewhat special -- it doesn't return a value. This is best expressed as ``-> None``. However, since many feel this is redundant, it is allowed to omit the return type declaration on :py:meth:`__init__ ` methods **if at least one argument is annotated**. For example, in the following classes :py:meth:`__init__ ` is considered fully annotated: .. code-block:: python class C1: def __init__(self) -> None: self.var = 42 class C2: def __init__(self, arg: int): self.var = arg However, if :py:meth:`__init__ ` has no annotated arguments and no return type annotation, it is considered an untyped method: .. code-block:: python class C3: def __init__(self): # This body is not type checked self.var = 42 + 'abc' Class attribute annotations *************************** You can use a :py:data:`ClassVar[t] ` annotation to explicitly declare that a particular attribute should not be set on instances: .. code-block:: python from typing import ClassVar class A: x: ClassVar[int] = 0 # Class variable only A.x += 1 # OK a = A() a.x = 1 # Error: Cannot assign to class variable "x" via instance print(a.x) # OK -- can be read through an instance .. note:: If you need to support Python 3 versions 3.5.2 or earlier, you have to import ``ClassVar`` from ``typing_extensions`` instead (available on PyPI). If you use Python 2.7, you can import it from ``typing``. It's not necessary to annotate all class variables using :py:data:`~typing.ClassVar`. An attribute without the :py:data:`~typing.ClassVar` annotation can still be used as a class variable. However, mypy won't prevent it from being used as an instance variable, as discussed previously: .. code-block:: python class A: x = 0 # Can be used as a class or instance variable A.x += 1 # OK a = A() a.x = 1 # Also OK Note that :py:data:`~typing.ClassVar` is not a class, and you can't use it with :py:func:`isinstance` or :py:func:`issubclass`. It does not change Python runtime behavior -- it's only for type checkers such as mypy (and also helpful for human readers). You can also omit the square brackets and the variable type in a :py:data:`~typing.ClassVar` annotation, but this might not do what you'd expect: .. code-block:: python class A: y: ClassVar = 0 # Type implicitly Any! In this case the type of the attribute will be implicitly ``Any``. This behavior will change in the future, since it's surprising. .. note:: A :py:data:`~typing.ClassVar` type parameter cannot include type variables: ``ClassVar[T]`` and ``ClassVar[List[T]]`` are both invalid if ``T`` is a type variable (see :ref:`generic-classes` for more about type variables). Overriding statically typed methods *********************************** When overriding a statically typed method, mypy checks that the override has a compatible signature: .. code-block:: python class Base: def f(self, x: int) -> None: ... class Derived1(Base): def f(self, x: str) -> None: # Error: type of 'x' incompatible ... class Derived2(Base): def f(self, x: int, y: int) -> None: # Error: too many arguments ... class Derived3(Base): def f(self, x: int) -> None: # OK ... class Derived4(Base): def f(self, x: float) -> None: # OK: mypy treats int as a subtype of float ... class Derived5(Base): def f(self, x: int, y: int = 0) -> None: # OK: accepts more than the base ... # class method .. note:: You can also vary return types **covariantly** in overriding. For example, you could override the return type ``Iterable[int]`` with a subtype such as ``List[int]``. Similarly, you can vary argument types **contravariantly** -- subclasses can have more general argument types. You can also override a statically typed method with a dynamically typed one. This allows dynamically typed code to override methods defined in library classes without worrying about their type signatures. As always, relying on dynamically typed code can be unsafe. There is no runtime enforcement that the method override returns a value that is compatible with the original return type, since annotations have no effect at runtime: .. code-block:: python class Base: def inc(self, x: int) -> int: return x + 1 class Derived(Base): def inc(self, x): # Override, dynamically typed return 'hello' # Incompatible with 'Base', but no mypy error Abstract base classes and multiple inheritance ********************************************** Mypy supports Python :doc:`abstract base classes ` (ABCs). Abstract classes have at least one abstract method or property that must be implemented by any *concrete* (non-abstract) subclass. You can define abstract base classes using the :py:class:`abc.ABCMeta` metaclass and the :py:func:`@abc.abstractmethod ` function decorator. Example: .. code-block:: python from abc import ABCMeta, abstractmethod class Animal(metaclass=ABCMeta): @abstractmethod def eat(self, food: str) -> None: pass @property @abstractmethod def can_walk(self) -> bool: pass class Cat(Animal): def eat(self, food: str) -> None: ... # Body omitted @property def can_walk(self) -> bool: return True x = Animal() # Error: 'Animal' is abstract due to 'eat' and 'can_walk' y = Cat() # OK .. note:: In Python 2.7 you have to use :py:func:`@abc.abstractproperty ` to define an abstract property. Note that mypy performs checking for unimplemented abstract methods even if you omit the :py:class:`~abc.ABCMeta` metaclass. This can be useful if the metaclass would cause runtime metaclass conflicts. Since you can't create instances of ABCs, they are most commonly used in type annotations. For example, this method accepts arbitrary iterables containing arbitrary animals (instances of concrete ``Animal`` subclasses): .. code-block:: python def feed_all(animals: Iterable[Animal], food: str) -> None: for animal in animals: animal.eat(food) There is one important peculiarity about how ABCs work in Python -- whether a particular class is abstract or not is somewhat implicit. In the example below, ``Derived`` is treated as an abstract base class since ``Derived`` inherits an abstract ``f`` method from ``Base`` and doesn't explicitly implement it. The definition of ``Derived`` generates no errors from mypy, since it's a valid ABC: .. code-block:: python from abc import ABCMeta, abstractmethod class Base(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> None: pass class Derived(Base): # No error -- Derived is implicitly abstract def g(self) -> None: ... Attempting to create an instance of ``Derived`` will be rejected, however: .. code-block:: python d = Derived() # Error: 'Derived' is abstract .. note:: It's a common error to forget to implement an abstract method. As shown above, the class definition will not generate an error in this case, but any attempt to construct an instance will be flagged as an error. A class can inherit any number of classes, both abstract and concrete. As with normal overrides, a dynamically typed method can override or implement a statically typed method defined in any base class, including an abstract method defined in an abstract base class. You can implement an abstract property using either a normal property or an instance variable. mypy-0.761/docs/source/command_line.rst0000644€tŠÔÚ€2›s®0000006541613576752246024343 0ustar jukkaDROPBOX\Domain Users00000000000000.. _command-line: .. program:: mypy The mypy command line ===================== This section documents mypy's command line interface. You can view a quick summary of the available flags by running :option:`mypy --help`. .. note:: Command line flags are liable to change between releases. Specifying what to type check ***************************** By default, you can specify what code you want mypy to type check by passing in the paths to what you want to have type checked:: $ mypy foo.py bar.py some_directory Note that directories are checked recursively. Mypy also lets you specify what code to type check in several other ways. A short summary of the relevant flags is included below: for full details, see :ref:`running-mypy`. .. option:: -m MODULE, --module MODULE Asks mypy to type check the provided module. This flag may be repeated multiple times. Mypy *will not* recursively type check any submodules of the provided module. .. option:: -p PACKAGE, --package PACKAGE Asks mypy to type check the provided package. This flag may be repeated multiple times. Mypy *will* recursively type check any submodules of the provided package. This flag is identical to :option:`--module` apart from this behavior. .. option:: -c PROGRAM_TEXT, --command PROGRAM_TEXT Asks mypy to type check the provided string as a program. Optional arguments ****************** .. option:: -h, --help Show help message and exit. .. option:: -v, --verbose More verbose messages. .. option:: -V, --version Show program's version number and exit. .. _config-file-flag: Config file *********** .. option:: --config-file CONFIG_FILE This flag makes mypy read configuration settings from the given file. By default settings are read from ``mypy.ini`` or ``setup.cfg`` in the current directory, or ``.mypy.ini`` in the user's home directory. Settings override mypy's built-in defaults and command line flags can override settings. Specifying :option:`--config-file= <--config-file>` (with no filename) will ignore *all* config files. See :ref:`config-file` for the syntax of configuration files. .. option:: --warn-unused-configs This flag makes mypy warn about unused ``[mypy-]`` config file sections. (This requires turning off incremental mode using :option:`--no-incremental`.) .. _import-discovery: Import discovery **************** The following flags customize how exactly mypy discovers and follows imports. .. option:: --namespace-packages This flag enables import discovery to use namespace packages (see :pep:`420`). In particular, this allows discovery of imported packages that don't have an ``__init__.py`` (or ``__init__.pyi``) file. Namespace packages are found (using the PEP 420 rules, which prefers "classic" packages over namespace packages) along the module search path -- this is primarily set from the source files passed on the command line, the ``MYPYPATH`` environment variable, and the :ref:`mypy_path config option `. Note that this only affects import discovery -- for modules and packages explicitly passed on the command line, mypy still searches for ``__init__.py[i]`` files in order to determine the fully-qualified module/package name. .. option:: --ignore-missing-imports This flag makes mypy ignore all missing imports. It is equivalent to adding ``# type: ignore`` comments to all unresolved imports within your codebase. Note that this flag does *not* suppress errors about missing names in successfully resolved modules. For example, if one has the following files:: package/__init__.py package/mod.py Then mypy will generate the following errors with :option:`--ignore-missing-imports`: .. code-block:: python import package.unknown # No error, ignored x = package.unknown.func() # OK. 'func' is assumed to be of type 'Any' from package import unknown # No error, ignored from package.mod import NonExisting # Error: Module has no attribute 'NonExisting' For more details, see :ref:`ignore-missing-imports`. .. option:: --follow-imports {normal,silent,skip,error} This flag adjusts how mypy follows imported modules that were not explicitly passed in via the command line. The default option is ``normal``: mypy will follow and type check all modules. For more information on what the other options do, see :ref:`Following imports `. .. option:: --python-executable EXECUTABLE This flag will have mypy collect type information from :pep:`561` compliant packages installed for the Python executable ``EXECUTABLE``. If not provided, mypy will use PEP 561 compliant packages installed for the Python executable running mypy. See :ref:`installed-packages` for more on making PEP 561 compliant packages. .. option:: --no-site-packages This flag will disable searching for :pep:`561` compliant packages. This will also disable searching for a usable Python executable. Use this flag if mypy cannot find a Python executable for the version of Python being checked, and you don't need to use PEP 561 typed packages. Otherwise, use :option:`--python-executable`. .. option:: --no-silence-site-packages By default, mypy will suppress any error messages generated within :pep:`561` compliant packages. Adding this flag will disable this behavior. .. _platform-configuration: Platform configuration ********************** By default, mypy will assume that you intend to run your code using the same operating system and Python version you are using to run mypy itself. The following flags let you modify this behavior. For more information on how to use these flags, see :ref:`version_and_platform_checks`. .. option:: --python-version X.Y This flag will make mypy type check your code as if it were run under Python version X.Y. Without this option, mypy will default to using whatever version of Python is running mypy. Note that the :option:`-2` and :option:`--py2` flags are aliases for :option:`--python-version 2.7 <--python-version>`. This flag will attempt to find a Python executable of the corresponding version to search for :pep:`561` compliant packages. If you'd like to disable this, use the :option:`--no-site-packages` flag (see :ref:`import-discovery` for more details). .. option:: -2, --py2 Equivalent to running :option:`--python-version 2.7 <--python-version>`. .. option:: --platform PLATFORM This flag will make mypy type check your code as if it were run under the given operating system. Without this option, mypy will default to using whatever operating system you are currently using. The ``PLATFORM`` parameter may be any string supported by :py:data:`sys.platform`. .. _always-true: .. option:: --always-true NAME This flag will treat all variables named ``NAME`` as compile-time constants that are always true. This flag may be repeated. .. option:: --always-false NAME This flag will treat all variables named ``NAME`` as compile-time constants that are always false. This flag may be repeated. .. _disallow-dynamic-typing: Disallow dynamic typing *********************** The ``Any`` type is used represent a value that has a :ref:`dynamic type `. The ``--disallow-any`` family of flags will disallow various uses of the ``Any`` type in a module -- this lets us strategically disallow the use of dynamic typing in a controlled way. The following options are available: .. option:: --disallow-any-unimported This flag disallows usage of types that come from unfollowed imports (such types become aliases for ``Any``). Unfollowed imports occur either when the imported module does not exist or when :option:`--follow-imports=skip <--follow-imports>` is set. .. option:: --disallow-any-expr This flag disallows all expressions in the module that have type ``Any``. If an expression of type ``Any`` appears anywhere in the module mypy will output an error unless the expression is immediately used as an argument to :py:func:`~typing.cast` or assigned to a variable with an explicit type annotation. In addition, declaring a variable of type ``Any`` or casting to type ``Any`` is not allowed. Note that calling functions that take parameters of type ``Any`` is still allowed. .. option:: --disallow-any-decorated This flag disallows functions that have ``Any`` in their signature after decorator transformation. .. option:: --disallow-any-explicit This flag disallows explicit ``Any`` in type positions such as type annotations and generic type parameters. .. option:: --disallow-any-generics This flag disallows usage of generic types that do not specify explicit type parameters. Moreover, built-in collections (such as :py:class:`list` and :py:class:`dict`) become disallowed as you should use their aliases from the :py:mod:`typing` module (such as :py:class:`List[int] ` and :py:class:`Dict[str, str] `). .. option:: --disallow-subclassing-any This flag reports an error whenever a class subclasses a value of type ``Any``. This may occur when the base class is imported from a module that doesn't exist (when using :option:`--ignore-missing-imports`) or is ignored due to :option:`--follow-imports=skip <--follow-imports>` or a ``# type: ignore`` comment on the ``import`` statement. Since the module is silenced, the imported class is given a type of ``Any``. By default mypy will assume that the subclass correctly inherited the base class even though that may not actually be the case. This flag makes mypy raise an error instead. .. _untyped-definitions-and-calls: Untyped definitions and calls ***************************** The following flags configure how mypy handles untyped function definitions or calls. .. option:: --disallow-untyped-calls This flag reports an error whenever a function with type annotations calls a function defined without annotations. .. option:: --disallow-untyped-defs This flag reports an error whenever it encounters a function definition without type annotations. .. option:: --disallow-incomplete-defs This flag reports an error whenever it encounters a partly annotated function definition. .. option:: --check-untyped-defs This flag is less severe than the previous two options -- it type checks the body of every function, regardless of whether it has type annotations. (By default the bodies of functions without annotations are not type checked.) It will assume all arguments have type ``Any`` and always infer ``Any`` as the return type. .. option:: --disallow-untyped-decorators This flag reports an error whenever a function with type annotations is decorated with a decorator without annotations. .. _none-and-optional-handling: None and Optional handling ************************** The following flags adjust how mypy handles values of type ``None``. For more details, see :ref:`no_strict_optional`. .. _no-implicit-optional: .. option:: --no-implicit-optional This flag causes mypy to stop treating arguments with a ``None`` default value as having an implicit :py:data:`~typing.Optional` type. For example, by default mypy will assume that the ``x`` parameter is of type ``Optional[int]`` in the code snippet below since the default parameter is ``None``: .. code-block:: python def foo(x: int = None) -> None: print(x) If this flag is set, the above snippet will no longer type check: we must now explicitly indicate that the type is ``Optional[int]``: .. code-block:: python def foo(x: Optional[int] = None) -> None: print(x) .. option:: --no-strict-optional This flag disables strict checking of :py:data:`~typing.Optional` types and ``None`` values. With this option, mypy doesn't generally check the use of ``None`` values -- they are valid everywhere. See :ref:`no_strict_optional` for more about this feature. **Note:** Strict optional checking was enabled by default starting in mypy 0.600, and in previous versions it had to be explicitly enabled using ``--strict-optional`` (which is still accepted). .. _configuring-warnings: Configuring warnings ******************** The follow flags enable warnings for code that is sound but is potentially problematic or redundant in some way. .. option:: --warn-redundant-casts This flag will make mypy report an error whenever your code uses an unnecessary cast that can safely be removed. .. option:: --warn-unused-ignores This flag will make mypy report an error whenever your code uses a ``# type: ignore`` comment on a line that is not actually generating an error message. This flag, along with the :option:`--warn-redundant-casts` flag, are both particularly useful when you are upgrading mypy. Previously, you may have needed to add casts or ``# type: ignore`` annotations to work around bugs in mypy or missing stubs for 3rd party libraries. These two flags let you discover cases where either workarounds are no longer necessary. .. option:: --no-warn-no-return By default, mypy will generate errors when a function is missing return statements in some execution paths. The only exceptions are when: - The function has a ``None`` or ``Any`` return type - The function has an empty body or a body that is just ellipsis (``...``). Empty functions are often used for abstract methods. Passing in :option:`--no-warn-no-return` will disable these error messages in all cases. .. option:: --warn-return-any This flag causes mypy to generate a warning when returning a value with type ``Any`` from a function declared with a non-``Any`` return type. .. option:: --warn-unreachable This flag will make mypy report an error whenever it encounters code determined to be unreachable or redundant after performing type analysis. This can be a helpful way of detecting certain kinds of bugs in your code. For example, enabling this flag will make mypy report that the ``x > 7`` check is redundant and that the ``else`` block below is unreachable. .. code-block:: python def process(x: int) -> None: # Error: Right operand of 'or' is never evaluated if isinstance(x, int) or x > 7: # Error: Unsupported operand types for + ("int" and "str") print(x + "bad") else: # Error: 'Statement is unreachable' error print(x + "bad") To help prevent mypy from generating spurious warnings, the "Statement is unreachable" warning will be silenced in exactly two cases: 1. When the unreachable statement is a ``raise`` statement, is an ``assert False`` statement, or calls a function that has the :py:data:`~typing.NoReturn` return type hint. In other words, when the unreachable statement throws an error or terminates the program in some way. 2. When the unreachable statement was *intentionally* marked as unreachable using :ref:`version_and_platform_checks`. .. note:: Mypy currently cannot detect and report unreachable or redundant code inside any functions using :ref:`type-variable-value-restriction`. This limitation will be removed in future releases of mypy. Miscellaneous strictness flags ****************************** This section documents any other flags that do not neatly fall under any of the above sections. .. option:: --allow-untyped-globals This flag causes mypy to suppress errors caused by not being able to fully infer the types of global and class variables. .. option:: --allow-redefinition By default, mypy won't allow a variable to be redefined with an unrelated type. This flag enables redefinion of a variable with an arbitrary type *in some contexts*: only redefinitions within the same block and nesting depth as the original definition are allowed. Example where this can be useful: .. code-block:: python def process(items: List[str]) -> None: # 'items' has type List[str] items = [item.split() for item in items] # 'items' now has type List[List[str]] ... .. option:: --no-implicit-reexport By default, imported values to a module are treated as exported and mypy allows other modules to import them. This flag changes the behavior to not re-export unless the item is imported using from-as or is included in ``__all__``. Note this is always treated as enabled for stub files. For example: .. code-block:: python # This won't re-export the value from foo import bar # This will re-export it as bar and allow other modules to import it from foo import bar as bar # This will also re-export bar from foo import bar __all__ = ['bar'] .. option:: --strict-equality By default, mypy allows always-false comparisons like ``42 == 'no'``. Use this flag to prohibit such comparisons of non-overlapping types, and similar identity and container checks: .. code-block:: python from typing import List, Text items: List[int] if 'some string' in items: # Error: non-overlapping container check! ... text: Text if text != b'other bytes': # Error: non-overlapping equality check! ... assert text is not None # OK, check against None is allowed as a special case. .. option:: --strict This flag mode enables all optional error checking flags. You can see the list of flags enabled by strict mode in the full :option:`mypy --help` output. Note: the exact list of flags enabled by running :option:`--strict` may change over time. .. _configuring-error-messages: Configuring error messages ************************** The following flags let you adjust how much detail mypy displays in error messages. .. option:: --show-error-context This flag will precede all errors with "note" messages explaining the context of the error. For example, consider the following program: .. code-block:: python class Test: def foo(self, x: int) -> int: return x + "bar" Mypy normally displays an error message that looks like this:: main.py:3: error: Unsupported operand types for + ("int" and "str") If we enable this flag, the error message now looks like this:: main.py: note: In member "foo" of class "Test": main.py:3: error: Unsupported operand types for + ("int" and "str") .. option:: --show-column-numbers This flag will add column offsets to error messages. For example, the following indicates an error in line 12, column 9 (note that column offsets are 0-based):: main.py:12:9: error: Unsupported operand types for / ("int" and "str") .. option:: --show-error-codes This flag will add an error code ``[]`` to error messages. The error code is shown after each error message:: prog.py:1: error: "str" has no attribute "trim" [attr-defined] See :ref:`error-codes` for more information. .. option:: --pretty Use visually nicer output in error messages: use soft word wrap, show source code snippets, and show error location markers. .. option:: --no-color-output This flag will disable color output in error messages, enabled by default. .. option:: --no-error-summary This flag will disable error summary. By default mypy shows a summary line including total number of errors, number of files with errors, and number of files checked. .. option:: --show-absolute-path Show absolute paths to files. .. _incremental: Incremental mode **************** By default, mypy will store type information into a cache. Mypy will use this information to avoid unnecessary recomputation when it type checks your code again. This can help speed up the type checking process, especially when most parts of your program have not changed since the previous mypy run. If you want to speed up how long it takes to recheck your code beyond what incremental mode can offer, try running mypy in :ref:`daemon mode `. .. option:: --no-incremental This flag disables incremental mode: mypy will no longer reference the cache when re-run. Note that mypy will still write out to the cache even when incremental mode is disabled: see the :option:`--cache-dir` flag below for more details. .. option:: --cache-dir DIR By default, mypy stores all cache data inside of a folder named ``.mypy_cache`` in the current directory. This flag lets you change this folder. This flag can also be useful for controlling cache use when using :ref:`remote caching `. This setting will override the ``MYPY_CACHE_DIR`` environment variable if it is set. Mypy will also always write to the cache even when incremental mode is disabled so it can "warm up" the cache. To disable writing to the cache, use ``--cache-dir=/dev/null`` (UNIX) or ``--cache-dir=nul`` (Windows). .. option:: --sqlite-cache Use an `SQLite`_ database to store the cache. .. option:: --cache-fine-grained Include fine-grained dependency information in the cache for the mypy daemon. .. option:: --skip-version-check By default, mypy will ignore cache data generated by a different version of mypy. This flag disables that behavior. .. option:: --skip-cache-mtime-checks Skip cache internal consistency checks based on mtime. Advanced options **************** The following flags are useful mostly for people who are interested in developing or debugging mypy internals. .. option:: --pdb This flag will invoke the Python debugger when mypy encounters a fatal error. .. option:: --show-traceback, --tb If set, this flag will display a full traceback when mypy encounters a fatal error. .. option:: --raise-exceptions Raise exception on fatal error. .. option:: --custom-typing-module MODULE This flag lets you use a custom module as a substitute for the :py:mod:`typing` module. .. option:: --custom-typeshed-dir DIR This flag specifies the directory where mypy looks for typeshed stubs, instead of the typeshed that ships with mypy. This is primarily intended to make it easier to test typeshed changes before submitting them upstream, but also allows you to use a forked version of typeshed. .. _warn-incomplete-stub: .. option:: --warn-incomplete-stub This flag modifies both the :option:`--disallow-untyped-defs` and :option:`--disallow-incomplete-defs` flags so they also report errors if stubs in typeshed are missing type annotations or has incomplete annotations. If both flags are missing, :option:`--warn-incomplete-stub` also does nothing. This flag is mainly intended to be used by people who want contribute to typeshed and would like a convenient way to find gaps and omissions. If you want mypy to report an error when your codebase *uses* an untyped function, whether that function is defined in typeshed or not, use the :option:`--disallow-untyped-calls` flag. See :ref:`untyped-definitions-and-calls` for more details. .. _shadow-file: .. option:: --shadow-file SOURCE_FILE SHADOW_FILE When mypy is asked to type check ``SOURCE_FILE``, this flag makes mypy read from and type check the contents of ``SHADOW_FILE`` instead. However, diagnostics will continue to refer to ``SOURCE_FILE``. Specifying this argument multiple times (``--shadow-file X1 Y1 --shadow-file X2 Y2``) will allow mypy to perform multiple substitutions. This allows tooling to create temporary files with helpful modifications without having to change the source file in place. For example, suppose we have a pipeline that adds ``reveal_type`` for certain variables. This pipeline is run on ``original.py`` to produce ``temp.py``. Running ``mypy --shadow-file original.py temp.py original.py`` will then cause mypy to type check the contents of ``temp.py`` instead of ``original.py``, but error messages will still reference ``original.py``. Report generation ***************** If these flags are set, mypy will generate a report in the specified format into the specified directory. .. option:: --any-exprs-report DIR Causes mypy to generate a text file report documenting how many expressions of type ``Any`` are present within your codebase. .. option:: --cobertura-xml-report DIR Causes mypy to generate a Cobertura XML type checking coverage report. You must install the `lxml`_ library to generate this report. .. option:: --html-report / --xslt-html-report DIR Causes mypy to generate an HTML type checking coverage report. You must install the `lxml`_ library to generate this report. .. option:: --linecount-report DIR Causes mypy to generate a text file report documenting the functions and lines that are typed and untyped within your codebase. .. option:: --linecoverage-report DIR Causes mypy to generate a JSON file that maps each source file's absolute filename to a list of line numbers that belong to typed functions in that file. .. option:: --lineprecision-report DIR Causes mypy to generate a flat text file report with per-module statistics of how many lines are typechecked etc. .. option:: --txt-report / --xslt-txt-report DIR Causes mypy to generate a text file type checking coverage report. You must install the `lxml`_ library to generate this report. .. option:: --xml-report DIR Causes mypy to generate an XML type checking coverage report. You must install the `lxml`_ library to generate this report. Miscellaneous ************* .. option:: --junit-xml JUNIT_XML Causes mypy to generate a JUnit XML test result document with type checking results. This can make it easier to integrate mypy with continuous integration (CI) tools. .. option:: --find-occurrences CLASS.MEMBER This flag will make mypy print out all usages of a class member based on static type information. This feature is experimental. .. option:: --scripts-are-modules This flag will give command line arguments that appear to be scripts (i.e. files whose name does not end in ``.py``) a module name derived from the script name rather than the fixed name :py:mod:`__main__`. This lets you check more than one script in a single mypy invocation. (The default :py:mod:`__main__` is technically more correct, but if you have many scripts that import a large package, the behavior enabled by this flag is often more convenient.) .. _lxml: https://pypi.org/project/lxml/ .. _SQLite: https://www.sqlite.org/ mypy-0.761/docs/source/common_issues.rst0000644€tŠÔÚ€2›s®0000005302413576752246024571 0ustar jukkaDROPBOX\Domain Users00000000000000.. _common_issues: Common issues and solutions =========================== This section has examples of cases when you need to update your code to use static typing, and ideas for working around issues if mypy doesn't work as expected. Statically typed code is often identical to normal Python code (except for type annotations), but sometimes you need to do things slightly differently. Can't install mypy using pip ---------------------------- If installation fails, you've probably hit one of these issues: * Mypy needs Python 3.5 or later to run. * You may have to run pip like this: ``python3 -m pip install mypy``. .. _annotations_needed: No errors reported for obviously wrong code ------------------------------------------- There are several common reasons why obviously wrong code is not flagged as an error. - **The function containing the error is not annotated.** Functions that do not have any annotations (neither for any argument nor for the return type) are not type-checked, and even the most blatant type errors (e.g. ``2 + 'a'``) pass silently. The solution is to add annotations. Where that isn't possible, functions without annotations can be checked using :option:`--check-untyped-defs `. Example: .. code-block:: python def foo(a): return '(' + a.split() + ')' # No error! This gives no error even though ``a.split()`` is "obviously" a list (the author probably meant ``a.strip()``). The error is reported once you add annotations: .. code-block:: python def foo(a: str) -> str: return '(' + a.split() + ')' # error: Unsupported operand types for + ("str" and List[str]) If you don't know what types to add, you can use ``Any``, but beware: - **One of the values involved has type 'Any'.** Extending the above example, if we were to leave out the annotation for ``a``, we'd get no error: .. code-block:: python def foo(a) -> str: return '(' + a.split() + ')' # No error! The reason is that if the type of ``a`` is unknown, the type of ``a.split()`` is also unknown, so it is inferred as having type ``Any``, and it is no error to add a string to an ``Any``. If you're having trouble debugging such situations, :ref:`reveal_type() ` might come in handy. Note that sometimes library stubs have imprecise type information, e.g. the :py:func:`pow` builtin returns ``Any`` (see `typeshed issue 285 `_ for the reason). - **Some imports may be silently ignored**. Another source of unexpected ``Any`` values are the :option:`--ignore-missing-imports ` and :option:`--follow-imports=skip ` flags. When you use :option:`--ignore-missing-imports `, any imported module that cannot be found is silently replaced with ``Any``. When using :option:`--follow-imports=skip ` the same is true for modules for which a ``.py`` file is found but that are not specified on the command line. (If a ``.pyi`` stub is found it is always processed normally, regardless of the value of :option:`--follow-imports `.) To help debug the former situation (no module found at all) leave out :option:`--ignore-missing-imports `; to get clarity about the latter use :option:`--follow-imports=error `. You can read up about these and other useful flags in :ref:`command-line`. - **A function annotated as returning a non-optional type returns 'None' and mypy doesn't complain**. .. code-block:: python def foo() -> str: return None # No error! You may have disabled strict optional checking (see :ref:`no_strict_optional` for more). .. _silencing_checker: Spurious errors and locally silencing the checker ------------------------------------------------- You can use a ``# type: ignore`` comment to silence the type checker on a particular line. For example, let's say our code is using the C extension module ``frobnicate``, and there's no stub available. Mypy will complain about this, as it has no information about the module: .. code-block:: python import frobnicate # Error: No module "frobnicate" frobnicate.start() You can add a ``# type: ignore`` comment to tell mypy to ignore this error: .. code-block:: python import frobnicate # type: ignore frobnicate.start() # Okay! The second line is now fine, since the ignore comment causes the name ``frobnicate`` to get an implicit ``Any`` type. .. note:: You can use the form ``# type: ignore[]`` to only ignore specific errors on the line. This way you are less likely to silence unexpected errors that are not safe to ignore, and this will also document what the purpose of the comment is. See :ref:`error-codes` for more information. .. note:: The ``# type: ignore`` comment will only assign the implicit ``Any`` type if mypy cannot find information about that particular module. So, if we did have a stub available for ``frobnicate`` then mypy would ignore the ``# type: ignore`` comment and typecheck the stub as usual. Another option is to explicitly annotate values with type ``Any`` -- mypy will let you perform arbitrary operations on ``Any`` values. Sometimes there is no more precise type you can use for a particular value, especially if you use dynamic Python features such as :py:meth:`__getattr__ `: .. code-block:: python class Wrapper: ... def __getattr__(self, a: str) -> Any: return getattr(self._wrapped, a) Finally, you can create a stub file (``.pyi``) for a file that generates spurious errors. Mypy will only look at the stub file and ignore the implementation, since stub files take precedence over ``.py`` files. Ignoring a whole file --------------------- A ``# type: ignore`` comment at the top of a module (before any statements, including imports or docstrings) has the effect of ignoring the *entire* module. .. code-block:: python # type: ignore import foo foo.bar() Unexpected errors about 'None' and/or 'Optional' types ------------------------------------------------------ Starting from mypy 0.600, mypy uses :ref:`strict optional checking ` by default, and the ``None`` value is not compatible with non-optional types. It's easy to switch back to the older behavior where ``None`` was compatible with arbitrary types (see :ref:`no_strict_optional`). You can also fall back to this behavior if strict optional checking would require a large number of ``assert foo is not None`` checks to be inserted, and you want to minimize the number of code changes required to get a clean mypy run. Mypy runs are slow ------------------ If your mypy runs feel slow, you should probably use the :ref:`mypy daemon `, which can speed up incremental mypy runtimes by a factor of 10 or more. :ref:`Remote caching ` can make cold mypy runs several times faster. Types of empty collections -------------------------- You often need to specify the type when you assign an empty list or dict to a new variable, as mentioned earlier: .. code-block:: python a: List[int] = [] Without the annotation mypy can't always figure out the precise type of ``a``. You can use a simple empty list literal in a dynamically typed function (as the type of ``a`` would be implicitly ``Any`` and need not be inferred), if type of the variable has been declared or inferred before, or if you perform a simple modification operation in the same scope (such as ``append`` for a list): .. code-block:: python a = [] # Okay because followed by append, inferred type List[int] for i in range(n): a.append(i * i) However, in more complex cases an explicit type annotation can be required (mypy will tell you this). Often the annotation can make your code easier to understand, so it doesn't only help mypy but everybody who is reading the code! Redefinitions with incompatible types ------------------------------------- Each name within a function only has a single 'declared' type. You can reuse for loop indices etc., but if you want to use a variable with multiple types within a single function, you may need to declare it with the ``Any`` type. .. code-block:: python def f() -> None: n = 1 ... n = 'x' # Type error: n has type int .. note:: This limitation could be lifted in a future mypy release. Note that you can redefine a variable with a more *precise* or a more concrete type. For example, you can redefine a sequence (which does not support ``sort()``) as a list and sort it in-place: .. code-block:: python def f(x: Sequence[int]) -> None: # Type of x is Sequence[int] here; we don't know the concrete type. x = list(x) # Type of x is List[int] here. x.sort() # Okay! .. _variance: Invariance vs covariance ------------------------ Most mutable generic collections are invariant, and mypy considers all user-defined generic classes invariant by default (see :ref:`variance-of-generics` for motivation). This could lead to some unexpected errors when combined with type inference. For example: .. code-block:: python class A: ... class B(A): ... lst = [A(), A()] # Inferred type is List[A] new_lst = [B(), B()] # inferred type is List[B] lst = new_lst # mypy will complain about this, because List is invariant Possible strategies in such situations are: * Use an explicit type annotation: .. code-block:: python new_lst: List[A] = [B(), B()] lst = new_lst # OK * Make a copy of the right hand side: .. code-block:: python lst = list(new_lst) # Also OK * Use immutable collections as annotations whenever possible: .. code-block:: python def f_bad(x: List[A]) -> A: return x[0] f_bad(new_lst) # Fails def f_good(x: Sequence[A]) -> A: return x[0] f_good(new_lst) # OK Declaring a supertype as variable type -------------------------------------- Sometimes the inferred type is a subtype (subclass) of the desired type. The type inference uses the first assignment to infer the type of a name (assume here that ``Shape`` is the base class of both ``Circle`` and ``Triangle``): .. code-block:: python shape = Circle() # Infer shape to be Circle ... shape = Triangle() # Type error: Triangle is not a Circle You can just give an explicit type for the variable in cases such the above example: .. code-block:: python shape = Circle() # type: Shape # The variable s can be any Shape, # not just Circle ... shape = Triangle() # OK Complex type tests ------------------ Mypy can usually infer the types correctly when using :py:func:`isinstance ` type tests, but for other kinds of checks you may need to add an explicit type cast: .. code-block:: python def f(o: object) -> None: if type(o) is int: o = cast(int, o) g(o + 1) # This would be an error without the cast ... else: ... .. note:: Note that the :py:class:`object` type used in the above example is similar to ``Object`` in Java: it only supports operations defined for *all* objects, such as equality and :py:func:`isinstance`. The type ``Any``, in contrast, supports all operations, even if they may fail at runtime. The cast above would have been unnecessary if the type of ``o`` was ``Any``. Mypy can't infer the type of ``o`` after the :py:class:`type() ` check because it only knows about :py:func:`isinstance` (and the latter is better style anyway). We can write the above code without a cast by using :py:func:`isinstance`: .. code-block:: python def f(o: object) -> None: if isinstance(o, int): # Mypy understands isinstance checks g(o + 1) # Okay; type of o is inferred as int here ... Type inference in mypy is designed to work well in common cases, to be predictable and to let the type checker give useful error messages. More powerful type inference strategies often have complex and difficult-to-predict failure modes and could result in very confusing error messages. The tradeoff is that you as a programmer sometimes have to give the type checker a little help. .. _version_and_platform_checks: Python version and system platform checks ----------------------------------------- Mypy supports the ability to perform Python version checks and platform checks (e.g. Windows vs Posix), ignoring code paths that won't be run on the targeted Python version or platform. This allows you to more effectively typecheck code that supports multiple versions of Python or multiple operating systems. More specifically, mypy will understand the use of :py:data:`sys.version_info` and :py:data:`sys.platform` checks within ``if/elif/else`` statements. For example: .. code-block:: python import sys # Distinguishing between different versions of Python: if sys.version_info >= (3, 5): # Python 3.5+ specific definitions and imports elif sys.version_info[0] >= 3: # Python 3 specific definitions and imports else: # Python 2 specific definitions and imports # Distinguishing between different operating systems: if sys.platform.startswith("linux"): # Linux-specific code elif sys.platform == "darwin": # Mac-specific code elif sys.platform == "win32": # Windows-specific code else: # Other systems As a special case, you can also use one of these checks in a top-level (unindented) ``assert``; this makes mypy skip the rest of the file. Example: .. code-block:: python import sys assert sys.platform != 'win32' # The rest of this file doesn't apply to Windows. Some other expressions exhibit similar behavior; in particular, :py:data:`~typing.TYPE_CHECKING`, variables named ``MYPY``, and any variable whose name is passed to :option:`--always-true ` or :option:`--always-false `. (However, ``True`` and ``False`` are not treated specially!) .. note:: Mypy currently does not support more complex checks, and does not assign any special meaning when assigning a :py:data:`sys.version_info` or :py:data:`sys.platform` check to a variable. This may change in future versions of mypy. By default, mypy will use your current version of Python and your current operating system as default values for ``sys.version_info`` and ``sys.platform``. To target a different Python version, use the :option:`--python-version X.Y ` flag. For example, to verify your code typechecks if were run using Python 2, pass in :option:`--python-version 2.7 ` from the command line. Note that you do not need to have Python 2.7 installed to perform this check. To target a different operating system, use the :option:`--platform PLATFORM ` flag. For example, to verify your code typechecks if it were run in Windows, pass in :option:`--platform win32 `. See the documentation for :py:data:`sys.platform` for examples of valid platform parameters. .. _reveal-type: Displaying the type of an expression ------------------------------------ You can use ``reveal_type(expr)`` to ask mypy to display the inferred static type of an expression. This can be useful when you don't quite understand how mypy handles a particular piece of code. Example: .. code-block:: python reveal_type((1, 'hello')) # Revealed type is 'Tuple[builtins.int, builtins.str]' You can also use ``reveal_locals()`` at any line in a file to see the types of all local variables at once. Example: .. code-block:: python a = 1 b = 'one' reveal_locals() # Revealed local types are: # a: builtins.int # b: builtins.str .. note:: ``reveal_type`` and ``reveal_locals`` are only understood by mypy and don't exist in Python. If you try to run your program, you'll have to remove any ``reveal_type`` and ``reveal_locals`` calls before you can run your code. Both are always available and you don't need to import them. .. _import-cycles: Import cycles ------------- An import cycle occurs where module A imports module B and module B imports module A (perhaps indirectly, e.g. ``A -> B -> C -> A``). Sometimes in order to add type annotations you have to add extra imports to a module and those imports cause cycles that didn't exist before. If those cycles become a problem when running your program, there's a trick: if the import is only needed for type annotations in forward references (string literals) or comments, you can write the imports inside ``if TYPE_CHECKING:`` so that they are not executed at runtime. Example: File ``foo.py``: .. code-block:: python from typing import List, TYPE_CHECKING if TYPE_CHECKING: import bar def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]': return [arg] File ``bar.py``: .. code-block:: python from typing import List from foo import listify class BarClass: def listifyme(self) -> 'List[BarClass]': return listify(self) .. note:: The :py:data:`~typing.TYPE_CHECKING` constant defined by the :py:mod:`typing` module is ``False`` at runtime but ``True`` while type checking. Python 3.5.1 doesn't have :py:data:`~typing.TYPE_CHECKING`. An alternative is to define a constant named ``MYPY`` that has the value ``False`` at runtime. Mypy considers it to be ``True`` when type checking. Here's the above example modified to use ``MYPY``: .. code-block:: python from typing import List MYPY = False if MYPY: import bar def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]': return [arg] .. _not-generic-runtime: Using classes that are generic in stubs but not at runtime ---------------------------------------------------------- Some classes are declared as generic in stubs, but not at runtime. Examples in the standard library include :py:class:`os.PathLike` and :py:class:`queue.Queue`. Subscripting such a class will result in a runtime error: .. code-block:: python from queue import Queue class Tasks(Queue[str]): # TypeError: 'type' object is not subscriptable ... results: Queue[int] = Queue() # TypeError: 'type' object is not subscriptable To avoid these errors while still having precise types you can either use string literal types or :py:data:`~typing.TYPE_CHECKING`: .. code-block:: python from queue import Queue from typing import TYPE_CHECKING if TYPE_CHECKING: BaseQueue = Queue[str] # this is only processed by mypy else: BaseQueue = Queue # this is not seen by mypy but will be executed at runtime. class Tasks(BaseQueue): # OK ... results: 'Queue[int]' = Queue() # OK If you are running Python 3.7+ you can use ``from __future__ import annotations`` as a (nicer) alternative to string quotes, read more in :pep:`563`. For example: .. code-block:: python from __future__ import annotations from queue import Queue results: Queue[int] = Queue() # This works at runtime .. _silencing-linters: Silencing linters ----------------- In some cases, linters will complain about unused imports or code. In these cases, you can silence them with a comment after type comments, or on the same line as the import: .. code-block:: python # to silence complaints about unused imports from typing import List # noqa a = None # type: List[int] To silence the linter on the same line as a type comment put the linter comment *after* the type comment: .. code-block:: python a = some_complex_thing() # type: ignore # noqa Covariant subtyping of mutable protocol members is rejected ----------------------------------------------------------- Mypy rejects this because this is potentially unsafe. Consider this example: .. code-block:: python from typing_extensions import Protocol class P(Protocol): x: float def fun(arg: P) -> None: arg.x = 3.14 class C: x = 42 c = C() fun(c) # This is not safe c.x << 5 # Since this will fail! To work around this problem consider whether "mutating" is actually part of a protocol. If not, then one can use a :py:class:`@property ` in the protocol definition: .. code-block:: python from typing_extensions import Protocol class P(Protocol): @property def x(self) -> float: pass def fun(arg: P) -> None: ... class C: x = 42 fun(C()) # OK Dealing with conflicting names ------------------------------ Suppose you have a class with a method whose name is the same as an imported (or built-in) type, and you want to use the type in another method signature. E.g.: .. code-block:: python class Message: def bytes(self): ... def register(self, path: bytes): # error: Invalid type "mod.Message.bytes" ... The third line elicits an error because mypy sees the argument type ``bytes`` as a reference to the method by that name. Other than renaming the method, a work-around is to use an alias: .. code-block:: python bytes_ = bytes class Message: def bytes(self): ... def register(self, path: bytes_): ... Using a development mypy build ------------------------------ You can install the latest development version of mypy from source. Clone the `mypy repository on GitHub `_, and then run ``pip install`` locally: .. code-block:: text git clone --recurse-submodules https://github.com/python/mypy.git cd mypy sudo python3 -m pip install --upgrade . mypy-0.761/docs/source/conf.py0000644€tŠÔÚ€2›s®0000002113513576752246022451 0ustar jukkaDROPBOX\Domain Users00000000000000# -*- coding: utf-8 -*- # # Mypy documentation build configuration file, created by # sphinx-quickstart on Sun Sep 14 19:50:35 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../..')) from mypy.version import __version__ as mypy_version # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.intersphinx'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Mypy' copyright = u'2016, Jukka Lehtosalo' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = mypy_version.split('-')[0] # The full version, including alpha/beta/rc tags. release = mypy_version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. try: import sphinx_rtd_theme except: html_theme = 'default' else: html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Mypydoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'Mypy.tex', u'Mypy Documentation', u'Jukka', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'mypy', u'Mypy Documentation', [u'Jukka Lehtosalo'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Mypy', u'Mypy Documentation', u'Jukka', 'Mypy', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False rst_prolog = '.. |...| unicode:: U+2026 .. ellipsis\n' intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), 'six': ('https://six.readthedocs.io', None), 'attrs': ('http://www.attrs.org/en/stable', None), 'cython': ('http://docs.cython.org/en/latest', None), 'monkeytype': ('https://monkeytype.readthedocs.io/en/latest', None), 'setuptools': ('https://setuptools.readthedocs.io/en/latest', None), } mypy-0.761/docs/source/config_file.rst0000644€tŠÔÚ€2›s®0000005220313576752246024150 0ustar jukkaDROPBOX\Domain Users00000000000000.. _config-file: The mypy configuration file =========================== Mypy supports reading configuration settings from a file. By default it uses the file ``mypy.ini`` with fallback to ``setup.cfg`` in the current directory, then ``$XDG_CONFIG_HOME/mypy/config``, then ``~/.config/mypy/config``, and finally ``.mypy.ini`` in the user home directory if none of them are found; the :option:`--config-file ` command-line flag can be used to read a different file instead (see :ref:`config-file-flag`). It is important to understand that there is no merging of configuration files, as it would lead to ambiguity. The :option:`--config-file ` flag has the highest precedence and must be correct; otherwise mypy will report an error and exit. Without command line option, mypy will look for defaults, but will use only one of them. The first one to read is ``mypy.ini``, and then ``setup.cfg``. Most flags correspond closely to :ref:`command-line flags ` but there are some differences in flag names and some flags may take a different value based on the module being processed. Some flags support user home directory and environment variable expansion. To refer to the user home directory, use ``~`` at the beginning of the path. To expand environment variables use ``$VARNAME`` or ``${VARNAME}``. Config file format ****************** The configuration file format is the usual :doc:`ini file ` format. It should contain section names in square brackets and flag settings of the form `NAME = VALUE`. Comments start with ``#`` characters. - A section named ``[mypy]`` must be present. This specifies the global flags. The ``setup.cfg`` file is an exception to this. - Additional sections named ``[mypy-PATTERN1,PATTERN2,...]`` may be present, where ``PATTERN1``, ``PATTERN2``, etc., are comma-separated patterns of fully-qualified module names, with some components optionally replaced by the '*' character (e.g. ``foo.bar``, ``foo.bar.*``, ``foo.*.baz``). These sections specify additional flags that only apply to *modules* whose name matches at least one of the patterns. A pattern of the form ``qualified_module_name`` matches only the named module, while ``dotted_module_name.*`` matches ``dotted_module_name`` and any submodules (so ``foo.bar.*`` would match all of ``foo.bar``, ``foo.bar.baz``, and ``foo.bar.baz.quux``). Patterns may also be "unstructured" wildcards, in which stars may appear in the middle of a name (e.g ``site.*.migrations.*``). Stars match zero or more module components (so ``site.*.migrations.*`` can match ``site.migrations``). .. _config-precedence: When options conflict, the precedence order for configuration is: 1. :ref:`Inline configuration ` in the source file 2. Sections with concrete module names (``foo.bar``) 3. Sections with "unstructured" wildcard patterns (``foo.*.baz``), with sections later in the configuration file overriding sections earlier. 4. Sections with "well-structured" wildcard patterns (``foo.bar.*``), with more specific overriding more general. 5. Command line options. 6. Top-level configuration file options. The difference in precedence order between "structured" patterns (by specificity) and "unstructured" patterns (by order in the file) is unfortunate, and is subject to change in future versions. .. note:: The ``warn_unused_configs`` flag may be useful to debug misspelled section names. .. note:: Configuration flags are liable to change between releases. Per-module and global options ***************************** Some of the config options may be set either globally (in the ``[mypy]`` section) or on a per-module basis (in sections like ``[mypy-foo.bar]``). If you set an option both globally and for a specific module, the module configuration options take precedence. This lets you set global defaults and override them on a module-by-module basis. If multiple pattern sections match a module, :ref:`the options from the most specific section are used where they disagree `. Some other options, as specified in their description, may only be set in the global section (``[mypy]``). Inverting option values *********************** Options that take a boolean value may be inverted by adding ``no_`` to their name or by (when applicable) swapping their prefix from ``disallow`` to ``allow`` (and vice versa). Examples ******** Here is an example of a ``mypy.ini`` file. To use this config file, place it at the root of your repo and run mypy. .. code-block:: ini # Global options: [mypy] python_version = 2.7 warn_return_any = True warn_unused_configs = True # Per-module options: [mypy-mycode.foo.*] disallow_untyped_defs = True [mypy-mycode.bar] warn_return_any = False [mypy-somelibrary] ignore_missing_imports = True This config file specifies three global options in the ``[mypy]`` section. These three options will: 1. Type-check your entire project assuming it will be run using Python 2.7. (This is equivalent to using the :option:`--python-version 2.7 ` or :option:`-2 ` flag). 2. Report an error whenever a function returns a value that is inferred to have type ``Any``. 3. Report any config options that are unused by mypy. (This will help us catch typos when making changes to our config file). Next, this module specifies three per-module options. The first two options change how mypy type checks code in ``mycode.foo.*`` and ``mycode.bar``, which we assume here are two modules that you wrote. The final config option changes how mypy type checks ``somelibrary``, which we assume here is some 3rd party library you've installed and are importing. These options will: 1. Selectively disallow untyped function definitions only within the ``mycode.foo`` package -- that is, only for function definitions defined in the ``mycode/foo`` directory. 2. Selectively *disable* the "function is returning any" warnings within ``mycode.bar`` only. This overrides the global default we set earlier. 3. Suppress any error messages generated when your codebase tries importing the module ``somelibrary``. This is useful if ``somelibrary`` is some 3rd party library missing type hints. .. _config-file-import-discovery: Import discovery **************** For more information, see the :ref:`Import discovery ` section of the command line docs. ``mypy_path`` (string) Specifies the paths to use, after trying the paths from ``MYPYPATH`` environment variable. Useful if you'd like to keep stubs in your repo, along with the config file. Multiple paths are always separated with a ``:`` or ``,`` regardless of the platform. User home directory and environment variables will be expanded. This option may only be set in the global section (``[mypy]``). **Note:** On Windows, use UNC paths to avoid using ``:`` (e.g. ``\\127.0.0.1\X$\MyDir`` where ``X`` is the drive letter). ``files`` (comma-separated list of strings) A comma-separated list of paths which should be checked by mypy if none are given on the command line. Supports recursive file globbing using :py:mod:`glob`, where ``*`` (e.g. ``*.py``) matches files in the current directory and ``**/`` (e.g. ``**/*.py``) matches files in any directories below the current one. User home directory and environment variables will be expanded. This option may only be set in the global section (``[mypy]``). ``namespace_packages`` (bool, default False) Enables :pep:`420` style namespace packages. See :ref:`the corresponding flag ` for more information. This option may only be set in the global section (``[mypy]``). ``ignore_missing_imports`` (bool, default False) Suppresses error messages about imports that cannot be resolved. If this option is used in a per-module section, the module name should match the name of the *imported* module, not the module containing the import statement. ``follow_imports`` (string, default ``normal``) Directs what to do with imports when the imported module is found as a ``.py`` file and not part of the files, modules and packages provided on the command line. The four possible values are ``normal``, ``silent``, ``skip`` and ``error``. For explanations see the discussion for the :ref:`--follow-imports ` command line flag. If this option is used in a per-module section, the module name should match the name of the *imported* module, not the module containing the import statement. ``follow_imports_for_stubs`` (bool, default False) Determines whether to respect the ``follow_imports`` setting even for stub (``.pyi``) files. Used in conjunction with ``follow_imports=skip``, this can be used to suppress the import of a module from ``typeshed``, replacing it with ``Any``. Used in conjunction with ``follow_imports=error``, this can be used to make any use of a particular ``typeshed`` module an error. ``python_executable`` (string) Specifies the path to the Python executable to inspect to collect a list of available :ref:`PEP 561 packages `. User home directory and environment variables will be expanded. Defaults to the executable used to run mypy. This option may only be set in the global section (``[mypy]``). ``no_silence_site_packages`` (bool, default False) Enables reporting error messages generated within :pep:`561` compliant packages. Those error messages are suppressed by default, since you are usually not able to control errors in 3rd party code. This option may only be set in the global section (``[mypy]``). Platform configuration ********************** ``python_version`` (string) Specifies the Python version used to parse and check the target program. The string should be in the format ``DIGIT.DIGIT`` -- for example ``2.7``. The default is the version of the Python interpreter used to run mypy. This option may only be set in the global section (``[mypy]``). ``platform`` (string) Specifies the OS platform for the target program, for example ``darwin`` or ``win32`` (meaning OS X or Windows, respectively). The default is the current platform as revealed by Python's :py:data:`sys.platform` variable. This option may only be set in the global section (``[mypy]``). ``always_true`` (comma-separated list of strings) Specifies a list of variables that mypy will treat as compile-time constants that are always true. ``always_false`` (comma-separated list of strings) Specifies a list of variables that mypy will treat as compile-time constants that are always false. Disallow dynamic typing *********************** For more information, see the :ref:`Disallow dynamic typing ` section of the command line docs. ``disallow_any_unimported`` (bool, default False) Disallows usage of types that come from unfollowed imports (anything imported from an unfollowed import is automatically given a type of ``Any``). ``disallow_any_expr`` (bool, default False) Disallows all expressions in the module that have type ``Any``. ``disallow_any_decorated`` (bool, default False) Disallows functions that have ``Any`` in their signature after decorator transformation. ``disallow_any_explicit`` (bool, default False) Disallows explicit ``Any`` in type positions such as type annotations and generic type parameters. ``disallow_any_generics`` (bool, default False) Disallows usage of generic types that do not specify explicit type parameters. ``disallow_subclassing_any`` (bool, default False) Disallows subclassing a value of type ``Any``. Untyped definitions and calls ***************************** For more information, see the :ref:`Untyped definitions and calls ` section of the command line docs. ``disallow_untyped_calls`` (bool, default False) Disallows calling functions without type annotations from functions with type annotations. ``disallow_untyped_defs`` (bool, default False) Disallows defining functions without type annotations or with incomplete type annotations. ``disallow_incomplete_defs`` (bool, default False) Disallows defining functions with incomplete type annotations. ``check_untyped_defs`` (bool, default False) Type-checks the interior of functions without type annotations. ``disallow_untyped_decorators`` (bool, default False) Reports an error whenever a function with type annotations is decorated with a decorator without annotations. .. _config-file-none-and-optional-handling: None and Optional handling ************************** For more information, see the :ref:`None and Optional handling ` section of the command line docs. ``no_implicit_optional`` (bool, default False) Changes the treatment of arguments with a default value of ``None`` by not implicitly making their type :py:data:`~typing.Optional`. ``strict_optional`` (bool, default True) Enables or disables strict Optional checks. If False, mypy treats ``None`` as compatible with every type. **Note:** This was False by default in mypy versions earlier than 0.600. Configuring warnings ******************** For more information, see the :ref:`Configuring warnings ` section of the command line docs. ``warn_redundant_casts`` (bool, default False) Warns about casting an expression to its inferred type. This option may only be set in the global section (``[mypy]``). ``warn_unused_ignores`` (bool, default False) Warns about unneeded ``# type: ignore`` comments. ``warn_no_return`` (bool, default True) Shows errors for missing return statements on some execution paths. ``warn_return_any`` (bool, default False) Shows a warning when returning a value with type ``Any`` from a function declared with a non- ``Any`` return type. ``warn_unreachable`` (bool, default False) Shows a warning when encountering any code inferred to be unreachable or redundant after performing type analysis. Suppressing errors ****************** Note: these configuration options are available in the config file only. There is no analog available via the command line options. ``show_none_errors`` (bool, default True) Shows errors related to strict ``None`` checking, if the global ``strict_optional`` flag is enabled. ``ignore_errors`` (bool, default False) Ignores all non-fatal errors. Miscellaneous strictness flags ****************************** ``allow_untyped_globals`` (bool, default False) Causes mypy to suppress errors caused by not being able to fully infer the types of global and class variables. ``allow_redefinition`` (bool, default False) Allows variables to be redefined with an arbitrary type, as long as the redefinition is in the same block and nesting level as the original definition. ``implicit_reexport`` (bool, default True) By default, imported values to a module are treated as exported and mypy allows other modules to import them. When false, mypy will not re-export unless the item is imported using from-as or is included in ``__all__``. Note that mypy treats stub files as if this is always disabled. For example: .. code-block:: python # This won't re-export the value from foo import bar # This will re-export it as bar and allow other modules to import it from foo import bar as bar # This will also re-export bar from foo import bar __all__ = ['bar'] ``strict_equality`` (bool, default False) Prohibit equality checks, identity checks, and container checks between non-overlapping types. Configuring error messages ************************** For more information, see the :ref:`Configuring error messages ` section of the command line docs. These options may only be set in the global section (``[mypy]``). ``show_error_context`` (bool, default False) Prefixes each error with the relevant context. ``show_column_numbers`` (bool, default False) Shows column numbers in error messages. ``show_error_codes`` (bool, default False) Shows error codes in error messages. See :ref:`error-codes` for more information. ``pretty`` (bool, default False) Use visually nicer output in error messages: use soft word wrap, show source code snippets, and show error location markers. ``color_output`` (bool, default True) Shows error messages with color enabled. ``error_summary`` (bool, default True) Shows a short summary line after error messages. ``show_absolute_path`` (bool, default False) Show absolute paths to files. Incremental mode **************** These options may only be set in the global section (``[mypy]``). ``incremental`` (bool, default True) Enables :ref:`incremental mode `. ``cache_dir`` (string, default ``.mypy_cache``) Specifies the location where mypy stores incremental cache info. User home directory and environment variables will be expanded. This setting will be overridden by the ``MYPY_CACHE_DIR`` environment variable. Note that the cache is only read when incremental mode is enabled but is always written to, unless the value is set to ``/dev/null`` (UNIX) or ``nul`` (Windows). ``sqlite_cache`` (bool, default False) Use an `SQLite`_ database to store the cache. ``cache_fine_grained`` (bool, default False) Include fine-grained dependency information in the cache for the mypy daemon. ``skip_version_check`` (bool, default False) Makes mypy use incremental cache data even if it was generated by a different version of mypy. (By default, mypy will perform a version check and regenerate the cache if it was written by older versions of mypy.) ``skip_cache_mtime_checks`` (bool, default False) Skip cache internal consistency checks based on mtime. Advanced options **************** These options may only be set in the global section (``[mypy]``). ``pdb`` (bool, default False) Invokes pdb on fatal error. ``show_traceback`` (bool, default False) Shows traceback on fatal error. ``raise_exceptions`` (bool, default False) Raise exception on fatal error. ``custom_typing_module`` (string) Specifies a custom module to use as a substitute for the :py:mod:`typing` module. ``custom_typeshed_dir`` (string) Specifies an alternative directory to look for stubs instead of the default ``typeshed`` directory. User home directory and environment variables will be expanded. ``warn_incomplete_stub`` (bool, default False) Warns about missing type annotations in typeshed. This is only relevant in combination with ``disallow_untyped_defs`` or ``disallow_incomplete_defs``. Report generation ***************** If these options are set, mypy will generate a report in the specified format into the specified directory. ``any_exprs_report`` (string) Causes mypy to generate a text file report documenting how many expressions of type ``Any`` are present within your codebase. ``cobertura_xml_report`` (string) Causes mypy to generate a Cobertura XML type checking coverage report. You must install the `lxml`_ library to generate this report. ``html_report`` / ``xslt_html_report`` (string) Causes mypy to generate an HTML type checking coverage report. You must install the `lxml`_ library to generate this report. ``linecount_report`` (string) Causes mypy to generate a text file report documenting the functions and lines that are typed and untyped within your codebase. ``linecoverage_report`` (string) Causes mypy to generate a JSON file that maps each source file's absolute filename to a list of line numbers that belong to typed functions in that file. ``lineprecision_report`` (string) Causes mypy to generate a flat text file report with per-module statistics of how many lines are typechecked etc. ``txt_report`` / ``xslt_txt_report`` (string) Causes mypy to generate a text file type checking coverage report. You must install the `lxml`_ library to generate this report. ``xml_report`` (string) Causes mypy to generate an XML type checking coverage report. You must install the `lxml`_ library to generate this report. Miscellaneous ************* These options may only be set in the global section (``[mypy]``). ``junit_xml`` (string) Causes mypy to generate a JUnit XML test result document with type checking results. This can make it easier to integrate mypy with continuous integration (CI) tools. ``scripts_are_modules`` (bool, default False) Makes script ``x`` become module ``x`` instead of ``__main__``. This is useful when checking multiple scripts in a single run. ``warn_unused_configs`` (bool, default False) Warns about per-module sections in the config file that do not match any files processed when invoking mypy. (This requires turning off incremental mode using ``incremental = False``.) ``verbosity`` (integer, default 0) Controls how much debug output will be generated. Higher numbers are more verbose. .. _lxml: https://pypi.org/project/lxml/ .. _SQLite: https://www.sqlite.org/ mypy-0.761/docs/source/duck_type_compatibility.rst0000644€tŠÔÚ€2›s®0000000350013576752246026620 0ustar jukkaDROPBOX\Domain Users00000000000000Duck type compatibility ----------------------- In Python, certain types are compatible even though they aren't subclasses of each other. For example, ``int`` objects are valid whenever ``float`` objects are expected. Mypy supports this idiom via *duck type compatibility*. This is supported for a small set of built-in types: * ``int`` is duck type compatible with ``float`` and ``complex``. * ``float`` is duck type compatible with ``complex``. * In Python 2, ``str`` is duck type compatible with ``unicode``. For example, mypy considers an ``int`` object to be valid whenever a ``float`` object is expected. Thus code like this is nice and clean and also behaves as expected: .. code-block:: python import math def degrees_to_radians(degrees: float) -> float: return math.pi * degrees / 180 n = 90 # Inferred type 'int' print(degrees_to_radians(n)) # Okay! You can also often use :ref:`protocol-types` to achieve a similar effect in a more principled and extensible fashion. Protocols don't apply to cases like ``int`` being compatible with ``float``, since ``float`` is not a protocol class but a regular, concrete class, and many standard library functions expect concrete instances of ``float`` (or ``int``). .. note:: Note that in Python 2 a ``str`` object with non-ASCII characters is often *not valid* when a unicode string is expected. The mypy type system does not consider a string with non-ASCII values as a separate type so some programs with this kind of error will silently pass type checking. In Python 3 ``str`` and ``bytes`` are separate, unrelated types and this kind of error is easy to detect. This a good reason for preferring Python 3 over Python 2! See :ref:`text-and-anystr` for details on how to enforce that a value must be a unicode string in a cross-compatible way. mypy-0.761/docs/source/dynamic_typing.rst0000644€tŠÔÚ€2›s®0000000502113576752246024716 0ustar jukkaDROPBOX\Domain Users00000000000000.. _dynamic-typing: Dynamically typed code ====================== As mentioned earlier, bodies of functions that don't have any explicit types in their function annotation are dynamically typed (operations are checked at runtime). Code outside functions is statically typed by default, and types of variables are inferred. This does usually the right thing, but you can also make any variable dynamically typed by defining it explicitly with the type ``Any``: .. code-block:: python from typing import Any s = 1 # Statically typed (type int) d: Any = 1 # Dynamically typed (type Any) s = 'x' # Type check error d = 'x' # OK Operations on Any values ------------------------ You can do anything using a value with type ``Any``, and type checker does not complain: .. code-block:: python def f(x: Any) -> int: # All of these are valid! x.foobar(1, y=2) print(x[3] + 'f') if x: x.z = x(2) open(x).read() return x Values derived from an ``Any`` value also often have the type ``Any`` implicitly, as mypy can't infer a more precise result type. For example, if you get the attribute of an ``Any`` value or call a ``Any`` value the result is ``Any``: .. code-block:: python def f(x: Any) -> None: y = x.foo() # y has type Any y.bar() # Okay as well! ``Any`` types may propagate through your program, making type checking less effective, unless you are careful. Any vs. object -------------- The type :py:class:`object` is another type that can have an instance of arbitrary type as a value. Unlike ``Any``, :py:class:`object` is an ordinary static type (it is similar to ``Object`` in Java), and only operations valid for *all* types are accepted for :py:class:`object` values. These are all valid: .. code-block:: python def f(o: object) -> None: if o: print(o) print(isinstance(o, int)) o = 2 o = 'foo' These are, however, flagged as errors, since not all objects support these operations: .. code-block:: python def f(o: object) -> None: o.foo() # Error! o + 2 # Error! open(o) # Error! n = 1 # type: int n = o # Error! You can use :py:func:`~typing.cast` (see chapter :ref:`casts`) or :py:func:`isinstance` to go from a general type such as :py:class:`object` to a more specific type (subtype) such as ``int``. :py:func:`~typing.cast` is not needed with dynamically typed values (values with type ``Any``). mypy-0.761/docs/source/error_code_list.rst0000644€tŠÔÚ€2›s®0000004672513576752246025076 0ustar jukkaDROPBOX\Domain Users00000000000000.. _error-code-list: Error codes enabled by default ============================== This section documents various errors codes that mypy can generate with default options. See :ref:`error-codes` for general documentation about error codes. :ref:`error-codes-optional` documents additional error codes that you can enable. Check that attribute exists [attr-defined] ------------------------------------------ Mypy checks that an attribute is defined in the target class or module when using the dot operator. This applies to both getting and setting an attribute. New attributes are defined by assignments in the class body, or assignments to ``self.x`` in methods. These assignments don't generate ``attr-defined`` errors. Example: .. code-block:: python class Resource: def __init__(self, name: str) -> None: self.name = name r = Resouce('x') print(r.name) # OK print(r.id) # Error: "Resource" has no attribute "id" [attr-defined] r.id = 5 # Error: "Resource" has no attribute "id" [attr-defined] This error code is also generated if an imported name is not defined in the module in a ``from ... import`` statement (as long as the target module can be found): .. code-block:: python # Error: Module 'os' has no attribute 'non_existent' [attr-defined] from os import non_existent A reference to a missing attribute is given the ``Any`` type. In the above example, the type of ``non_existent`` will be ``Any``, which can be important if you silence the error. Check that attribute exists in each union item [union-attr] ----------------------------------------------------------- If you access the attribute of a value with a union type, mypy checks that the attribute is defined for *every* type in that union. Otherwise the operation can fail at runtime. This also applies to optional types. Example: .. code-block:: python from typing import Union class Cat: def sleep(self) -> None: ... def miaow(self) -> None: ... class Dog: def sleep(self) -> None: ... def follow_me(self) -> None: ... def func(animal: Union[Cat, Dog]) -> None: # OK: 'sleep' is defined for both Cat and Dog animal.sleep() # Error: Item "Cat" of "Union[Cat, Dog]" has no attribute "follow_me" [union-attr] animal.follow_me() You can often work around these errors by using ``assert isinstance(obj, ClassName)`` or ``assert obj is not None`` to tell mypy that you know that the type is more specific than what mypy thinks. Check that name is defined [name-defined] ----------------------------------------- Mypy expects that all references to names have a corresponding definition in an active scope, such as an assignment, function definition or an import. This can catch missing definitions, missing imports, and typos. This example accidentally calls ``sort()`` instead of :py:func:`sorted`: .. code-block:: python x = sort([3, 2, 4]) # Error: Name 'sort' is not defined [name-defined] Check arguments in calls [call-arg] ----------------------------------- Mypy expects that the number and names of arguments match the called function. Note that argument type checks have a separate error code ``arg-type``. Example: .. code-block:: python from typing import Sequence def greet(name: str) -> None: print('hello', name) greet('jack') # OK greet('jill', 'jack') # Error: Too many arguments for "greet" [call-arg] Check argument types [arg-type] ------------------------------- Mypy checks that argument types in a call match the declared argument types in the signature of the called function (if one exists). Example: .. code-block:: python from typing import List, Optional def first(x: List[int]) -> Optional[int]: return x[0] if x else 0 t = (5, 4) # Error: Argument 1 to "first" has incompatible type "Tuple[int, int]"; # expected "List[int]" [arg-type] print(first(t)) Check calls to overloaded functions [call-overload] --------------------------------------------------- When you call an overloaded function, mypy checks that at least one of the signatures of the overload items match the argument types in the call. Example: .. code-block:: python from typing import overload, Optional @overload def inc_maybe(x: None) -> None: ... @overload def inc_maybe(x: int) -> int: ... def inc_maybe(x: Optional[int]) -> Optional[int]: if x is None: return None else: return x + 1 inc_maybe(None) # OK inc_maybe(5) # OK # Error: No overload variant of "inc_maybe" matches argument type "float" [call-overload] inc_maybe(1.2) Check validity of types [valid-type] ------------------------------------ Mypy checks that each type annotation and any expression that represents a type is a valid type. Examples of valid types include classes, union types, callable types, type aliases, and literal types. Examples of invalid types include bare integer literals, functions, variables, and modules. This example incorrectly uses the function ``log`` as a type: .. code-block:: python from typing import List def log(x: object) -> None: print('log:', repr(x)) # Error: Function "t.log" is not valid as a type [valid-type] def log_all(objs: List[object], f: log) -> None: for x in objs: f(x) You can use :py:data:`~typing.Callable` as the type for callable objects: .. code-block:: python from typing import List, Callable # OK def log_all(objs: List[object], f: Callable[[object], None]) -> None: for x in objs: f(x) Require annotation if variable type is unclear [var-annotated] -------------------------------------------------------------- In some cases mypy can't infer the type of a variable without an explicit annotation. Mypy treats this as an error. This typically happens when you initialize a variable with an empty collection or ``None``. If mypy can't infer the collection item type, mypy replaces any parts of the type it couldn't infer with ``Any`` and generates an error. Example with an error: .. code-block:: python class Bundle: def __init__(self) -> None: # Error: Need type annotation for 'items' # (hint: "items: List[] = ...") [var-annotated] self.items = [] reveal_type(Bundle().items) # list[Any] To address this, we add an explicit annotation: .. code-block:: python from typing import List class Bundle: def __init__(self) -> None: self.items: List[str] = [] # OK reveal_type(Bundle().items) # list[str] Check validity of overrides [override] -------------------------------------- Mypy checks that an overridden method or attribute is compatible with the base class. A method in a subclass must accept all arguments that the base class method accepts, and the return type must conform to the return type in the base class (Liskov substitution principle). Argument types can be more general is a subclass (i.e., they can vary contravariantly). The return type can be narrowed in a subclass (i.e., it can vary covariantly). It's okay to define additional arguments in a subclass method, as long all extra arguments have default values or can be left out (``*args``, for example). Example: .. code-block:: python from typing import Optional, Union class Base: def method(self, arg: int) -> Optional[int]: ... class Derived(Base): def method(self, arg: Union[int, str]) -> int: # OK ... class DerivedBad(Base): # Error: Argument 1 of "method" is incompatible with "Base" [override] def method(self, arg: bool) -> int: ... Check that function returns a value [return] -------------------------------------------- If a function has a non-``None`` return type, mypy expects that the function always explicitly returns a value (or raises an exception). The function should not fall off the end of the function, since this is often a bug. Example: .. code-block:: python # Error: Missing return statement [return] def show(x: int) -> int: print(x) # Error: Missing return statement [return] def pred1(x: int) -> int: if x > 0: return x - 1 # OK def pred2(x: int) -> int: if x > 0: return x - 1 else: raise ValueError('not defined for zero') Check that return value is compatible [return-value] ---------------------------------------------------- Mypy checks that the returned value is compatible with the type signature of the function. Example: .. code-block:: python def func(x: int) -> str: # Error: Incompatible return value type (got "int", expected "str") [return-value] return x + 1 Check types in assignment statement [assignment] ------------------------------------------------ Mypy checks that the assigned expression is compatible with the assignment target (or targets). Example: .. code-block:: python class Resource: def __init__(self, name: str) -> None: self.name = name r = Resource('A') r.name = 'B' # OK # Error: Incompatible types in assignment (expression has type "int", # variable has type "str") [assignment] r.name = 5 Check type variable values [type-var] ------------------------------------- Mypy checks that value of a type variable is compatible with a value restriction or the upper bound type. Example: .. code-block:: python from typing import TypeVar T1 = TypeVar('T1', int, float) def add(x: T1, y: T1) -> T1: return x + y add(4, 5.5) # OK # Error: Value of type variable "T1" of "add" cannot be "str" [type-var] add('x', 'y') Check uses of various operators [operator] ------------------------------------------ Mypy checks that operands support a binary or unary operation, such as ``+`` or ``~``. Indexing operations are so common that they have their own error code ``index`` (see below). Example: .. code-block:: python # Error: Unsupported operand types for + ("int" and "str") [operator] 1 + 'x' Check indexing operations [index] --------------------------------- Mypy checks that the indexed value in indexing operation such as ``x[y]`` supports indexing, and that the index expression has a valid type. Example: .. code-block:: python a = {'x': 1, 'y': 2} a['x'] # OK # Error: Invalid index type "int" for "Dict[str, int]"; expected type "str" [index] print(a[1]) # Error: Invalid index type "bytes" for "Dict[str, int]"; expected type "str" [index] a[b'x'] = 4 Check list items [list-item] ---------------------------- When constructing a list using ``[item, ...]``, mypy checks that each item is compatible with the list type that is inferred from the surrounding context. Example: .. code-block:: python from typing import List # Error: List item 0 has incompatible type "int"; expected "str" [list-item] a: List[str] = [0] Check dict items [dict-item] ---------------------------- When constructing a dictionary using ``{key: value, ...}`` or ``dict(key=value, ...)``, mypy checks that each key and value is compatible with the dictionary type that is inferred from the surrounding context. Example: .. code-block:: python from typing import Dict # Error: Dict entry 0 has incompatible type "str": "str"; expected "str": "int" [dict-item] d: Dict[str, int] = {'key': 'value'} Check TypedDict items [typeddict-item] -------------------------------------- When constructing a ``TypedDict`` object, mypy checks that each key and value is compatible with the ``TypedDict`` type that is inferred from the surrounding context. Example: .. code-block:: python from typing_extensions import TypedDict class Point(TypedDict): x: int y: int # Error: Incompatible types (expression has type "float", # TypedDict item "x" has type "int") [typeddict-item] p: Point = {'x': 1.2, 'y': 4} Check that type of target is known [has-type] --------------------------------------------- Mypy sometimes generates an error when it hasn't inferred any type for a variable being referenced. This can happen for references to variables that are initialized later in the source file, and for references across modules that form an import cycle. When this happens, the reference gets an implicit ``Any`` type. In this example the definitions of ``x`` and ``y`` are circular: .. code-block:: python class Problem: def set_x(self) -> None: # Error: Cannot determine type of 'y' [has-type] self.x = self.y def set_y(self) -> None: self.y = self.x To work around this error, you can add an explicit type annotation to the target variable or attribute. Sometimes you can also reorganize the code so that the definition of the variable is placed earlier than the reference to the variable in a source file. Untangling cyclic imports may also help. We add an explicit annotation to the ``y`` attribute to work around the issue: .. code-block:: python class Problem: def set_x(self) -> None: self.x = self.y # OK def set_y(self) -> None: self.y: int = self.x # Added annotation here Check that import target can be found [import] ---------------------------------------------- Mypy generates an error if it can't find the source code or a stub file for an imported module. Example: .. code-block:: python # Error: Cannot find implementation or library stub for module named 'acme' [import] import acme See :ref:`ignore-missing-imports` for how to work around these errors. Check that each name is defined once [no-redef] ----------------------------------------------- Mypy may generate an error if you have multiple definitions for a name in the same namespace. The reason is that this is often an error, as the second definition may overwrite the first one. Also, mypy often can't be able to determine whether references point to the first or the second definition, which would compromise type checking. If you silence this error, all references to the defined name refer to the *first* definition. Example: .. code-block:: python class A: def __init__(self, x: int) -> None: ... class A: # Error: Name 'A' already defined on line 1 [no-redef] def __init__(self, x: str) -> None: ... # Error: Argument 1 to "A" has incompatible type "str"; expected "int" # (the first definition wins!) A('x') Check that called function returns a value [func-returns-value] --------------------------------------------------------------- Mypy reports an error if you call a function with a ``None`` return type and don't ignore the return value, as this is usually (but not always) a programming error. In this example, the ``if f()`` check is always false since ``f`` returns ``None``: .. code-block:: python def f() -> None: ... # OK: we don't do anything with the return value f() # Error: "f" does not return a value [func-returns-value] if f(): print("not false") Check instantiation of abstract classes [abstract] -------------------------------------------------- Mypy generates an error if you try to instantiate an abstract base class (ABC). An abtract base class is a class with at least one abstract method or attribute. (See also :py:mod:`abc` module documentation) Sometimes a class is made accidentally abstract, often due to an unimplemented abstract method. In a case like this you need to provide an implementation for the method to make the class concrete (non-abstract). Example: .. code-block:: python from abc import ABCMeta, abstractmethod class Persistent(metaclass=ABCMeta): @abstractmethod def save(self) -> None: ... class Thing(Persistent): def __init__(self) -> None: ... ... # No "save" method # Error: Cannot instantiate abstract class 'Thing' with abstract attribute 'save' [abstract] t = Thing() Check the target of NewType [valid-newtype] ------------------------------------------- The target of a :py:func:`NewType ` definition must be a class type. It can't be a union type, ``Any``, or various other special types. You can also get this error if the target has been imported from a module whose source mypy cannot find, since any such definitions are treated by mypy as values with ``Any`` types. Example: .. code-block:: python from typing import NewType # The source for "acme" is not available for mypy from acme import Entity # type: ignore # Error: Argument 2 to NewType(...) must be subclassable (got "Any") [valid-newtype] UserEntity = NewType('UserEntity', Entity) To work around the issue, you can either give mypy access to the sources for ``acme`` or create a stub file for the module. See :ref:`ignore-missing-imports` for more information. Check the return type of __exit__ [exit-return] ----------------------------------------------- If mypy can determine that :py:meth:`__exit__ ` always returns ``False``, mypy checks that the return type is *not* ``bool``. The boolean value of the return type affects which lines mypy thinks are reachable after a ``with`` statement, since any :py:meth:`__exit__ ` method that can return ``True`` may swallow exceptions. An imprecise return type can result in mysterious errors reported near ``with`` statements. To fix this, use either ``typing_extensions.Literal[False]`` or ``None`` as the return type. Returning ``None`` is equivalent to returning ``False`` in this context, since both are treated as false values. Example: .. code-block:: python class MyContext: ... def __exit__(self, exc, value, tb) -> bool: # Error print('exit') return False This produces the following output from mypy: .. code-block:: text example.py:3: error: "bool" is invalid as return type for "__exit__" that always returns False example.py:3: note: Use "typing_extensions.Literal[False]" as the return type or change it to "None" example.py:3: note: If return type of "__exit__" implies that it may return True, the context manager may swallow exceptions You can use ``Literal[False]`` to fix the error: .. code-block:: python from typing_extensions import Literal class MyContext: ... def __exit__(self, exc, value, tb) -> Literal[False]: # OK print('exit') return False You can also use ``None``: .. code-block:: python class MyContext: ... def __exit__(self, exc, value, tb) -> None: # Also OK print('exit') Report syntax errors [syntax] ----------------------------- If the code being checked is not syntactically valid, mypy issues a syntax error. Most, but not all, syntax errors are *blocking errors*: they can't be ignored with a ``# type: ignore`` comment. Miscellaneous checks [misc] --------------------------- Mypy performs numerous other, less commonly failing checks that don't have specific error codes. These use the ``misc`` error code. Other than being used for multiple unrelated errors, the ``misc`` error code is not special. For example, you can ignore all errors in this category by using ``# type: ignore[misc]`` comment. Since these errors are not expected to be common, it's unlikely that you'll see two *different* errors with the ``misc`` code on a single line -- though this can certainly happen once in a while. .. note:: Future mypy versions will likely add new error codes for some errors that currently use the ``misc`` error code. mypy-0.761/docs/source/error_code_list2.rst0000644€tŠÔÚ€2›s®0000001274613576752246025154 0ustar jukkaDROPBOX\Domain Users00000000000000.. _error-codes-optional: Error codes for optional checks =============================== This section documents various errors codes that mypy generates only if you enable certain options. See :ref:`error-codes` for general documentation about error codes. :ref:`error-code-list` documents error codes that are enabled by default. .. note:: The examples in this section use :ref:`inline configuration ` to specify mypy options. You can also set the same options by using a :ref:`configuration file ` or :ref:`command-line options `. Check that type arguments exist [type-arg] ------------------------------------------ If you use :option:`--disallow-any-generics `, mypy requires that each generic type has values for each type argument. For example, the types ``List`` or ``dict`` would be rejected. You should instead use types like ``List[int]`` or ``Dict[str, int]``. Any omitted generic type arguments get implicit ``Any`` values. The type ``List`` is equivalent to ``List[Any]``, and so on. Example: .. code-block:: python # mypy: disallow-any-generics from typing import List # Error: Missing type parameters for generic type "List" [type-arg] def remove_dups(items: List) -> List: ... Check that every function has an annotation [no-untyped-def] ------------------------------------------------------------ If you use :option:`--disallow-untyped-defs `, mypy requires that all functions have annotations (either a Python 3 annotation or a type comment). Example: .. code-block:: python # mypy: disallow-untyped-defs def inc(x): # Error: Function is missing a type annotation [no-untyped-def] return x + 1 def inc_ok(x: int) -> int: # OK return x + 1 class Counter: # Error: Function is missing a type annotation [no-untyped-def] def __init__(self): self.value = 0 class CounterOk: # OK: An explicit "-> None" is needed if "__init__" takes no arguments def __init__(self) -> None: self.value = 0 Check that cast is not redundant [redundant-cast] ------------------------------------------------- If you use :option:`--warn-redundant-casts `, mypy will generate an error if the source type of a cast is the same as the target type. Example: .. code-block:: python # mypy: warn-redundant-casts from typing import cast Count = int def example(x: Count) -> int: # Error: Redundant cast to "int" [redundant-cast] return cast(int, x) Check that comparisons are overlapping [comparison-overlap] ----------------------------------------------------------- If you use :option:`--strict-equality `, mypy will generate an error if it thinks that a comparison operation is always true or false. These are often bugs. Sometimes mypy is too picky and the comparison can actually be useful. Instead of disabling strict equality checking everywhere, you can use ``# type: ignore[comparison-overlap]`` to ignore the issue on a particular line only. Example: .. code-block:: python # mypy: strict-equality def is_magic(x: bytes) -> bool: # Error: Non-overlapping equality check (left operand type: "bytes", # right operand type: "str") [comparison-overlap] return x == 'magic' We can fix the error by changing the string literal to a bytes literal: .. code-block:: python # mypy: strict-equality def is_magic(x: bytes) -> bool: return x == b'magic' # OK Check that no untyped functions are called [no-untyped-call] ------------------------------------------------------------ If you use :option:`--disallow-untyped-calls `, mypy generates an error when you call an unannotated function in an annotated function. Example: .. code-block:: python # mypy: disallow-untyped-calls def do_it() -> None: # Error: Call to untyped function "bad" in typed context [no-untyped-call] bad() def bad(): ... Check that function does not return Any value [no-any-return] ------------------------------------------------------------- If you use :option:`--warn-return-any `, mypy generates an error if you return a value with an ``Any`` type in a function that is annotated to return a non-``Any`` value. Example: .. code-block:: python # mypy: warn-return-any def fields(s): return s.split(',') def first_field(x: str) -> str: # Error: Returning Any from function declared to return "str" [no-any-return] return fields(x)[0] Check that types have no Any components due to missing imports [no-any-unimported] ---------------------------------------------------------------------------------- If you use :option:`--disallow-any-unimported `, mypy generates an error if a component of a type becomes ``Any`` because mypy couldn't resolve an import. These "stealth" ``Any`` types can be surprising and accidentally cause imprecise type checking. In this example, we assume that mypy can't find the module ``animals``, which means that ``Cat`` falls back to ``Any`` in a type annotation: .. code-block:: python # mypy: disallow-any-unimported from animals import Cat # type: ignore # Error: Argument 1 to "feed" becomes "Any" due to an unfollowed import [no-any-unimported] def feed(cat: Cat) -> None: ... mypy-0.761/docs/source/error_codes.rst0000644€tŠÔÚ€2›s®0000000357013576752246024215 0ustar jukkaDROPBOX\Domain Users00000000000000.. _error-codes: Error codes =========== Mypy can optionally display an error code such as ``[attr-defined]`` after each error message. Error codes serve two purposes: 1. It's possible to silence specific error codes on a line using ``# type: ignore[code]``. This way you won't accidentally ignore other, potentially more serious errors. 2. The error code can be used to find documentation about the error. The next two topics (:ref:`error-code-list` and :ref:`error-codes-optional`) document the various error codes mypy can report. Most error codes are shared between multiple related error messages. Error codes may change in future mypy releases. Displaying error codes ---------------------- Error codes are not displayed by default. Use :option:`--show-error-codes ` to display error codes. Error codes are shown inside square brackets: .. code-block:: text $ mypy --show-error-codes prog.py prog.py:1: error: "str" has no attribute "trim" [attr-defined] Silencing errors based on error codes ------------------------------------- You can use a special comment ``# type: ignore[code, ...]`` to only ignore errors with a specific error code (or codes) on a particular line. This can be used even if you have not configured mypy to show error codes. Currently it's only possible to disable arbitrary error codes on individual lines using this comment. .. note:: There are command-line flags and config file settings for enabling certain optional error codes, such as :option:`--disallow-untyped-defs `, which enables the ``no-untyped-def`` error code. This example shows how to ignore an error about an imported name mypy thinks is undefined: .. code-block:: python # 'foo' is defined in 'foolib', even though mypy can't see the # definition. from foolib import foo # type: ignore[attr-defined] mypy-0.761/docs/source/existing_code.rst0000644€tŠÔÚ€2›s®0000001432113576752246024527 0ustar jukkaDROPBOX\Domain Users00000000000000.. _existing-code: Using mypy with an existing codebase ==================================== This section explains how to get started using mypy with an existing, significant codebase that has little or no type annotations. If you are a beginner, you can skip this section. These steps will get you started with mypy on an existing codebase: 1. Start small -- get a clean mypy build for some files, with few annotations 2. Write a mypy runner script to ensure consistent results 3. Run mypy in Continuous Integration to prevent type errors 4. Gradually annotate commonly imported modules 5. Write annotations as you modify existing code and write new code 6. Use :doc:`monkeytype:index` or `PyAnnotate`_ to automatically annotate legacy code We discuss all of these points in some detail below, and a few optional follow-up steps. Start small ----------- If your codebase is large, pick a subset of your codebase (say, 5,000 to 50,000 lines) and run mypy only on this subset at first, *without any annotations*. This shouldn't take more than a day or two to implement, so you start enjoying benefits soon. You'll likely need to fix some mypy errors, either by inserting annotations requested by mypy or by adding ``# type: ignore`` comments to silence errors you don't want to fix now. In particular, mypy often generates errors about modules that it can't find or that don't have stub files: .. code-block:: text core/config.py:7: error: Cannot find implementation or library stub for module named 'frobnicate' core/model.py:9: error: Cannot find implementation or library stub for module named 'acme' ... This is normal, and you can easily ignore these errors. For example, here we ignore an error about a third-party module ``frobnicate`` that doesn't have stubs using ``# type: ignore``: .. code-block:: python import frobnicate # type: ignore ... frobnicate.initialize() # OK (but not checked) You can also use a mypy configuration file, which is convenient if there are a large number of errors to ignore. For example, to disable errors about importing ``frobnicate`` and ``acme`` everywhere in your codebase, use a config like this: .. code-block:: text [mypy-frobnicate.*] ignore_missing_imports = True [mypy-acme.*] ignore_missing_imports = True You can add multiple sections for different modules that should be ignored. If your config file is named ``mypy.ini``, this is how you run mypy: .. code-block:: text mypy --config-file mypy.ini mycode/ If you get a large number of errors, you may want to ignore all errors about missing imports. This can easily cause problems later on and hide real errors, and it's only recommended as a last resort. For more details, look :ref:`here `. Mypy follows imports by default. This can result in a few files passed on the command line causing mypy to process a large number of imported files, resulting in lots of errors you don't want to deal with at the moment. There is a config file option to disable this behavior, but since this can hide errors, it's not recommended for most users. Mypy runner script ------------------ Introduce a mypy runner script that runs mypy, so that every developer will use mypy consistently. Here are some things you may want to do in the script: * Ensure that the correct version of mypy is installed. * Specify mypy config file or command-line options. * Provide set of files to type check. You may want to implement inclusion and exclusion filters for full control of the file list. Continuous Integration ---------------------- Once you have a clean mypy run and a runner script for a part of your codebase, set up your Continuous Integration (CI) system to run mypy to ensure that developers won't introduce bad annotations. A simple CI script could look something like this: .. code-block:: text python3 -m pip install mypy==0.600 # Pinned version avoids surprises scripts/mypy # Runs with the correct options Annotate widely imported modules -------------------------------- Most projects have some widely imported modules, such as utilities or model classes. It's a good idea to annotate these pretty early on, since this allows code using these modules to be type checked more effectively. Since mypy supports gradual typing, it's okay to leave some of these modules unannotated. The more you annotate, the more useful mypy will be, but even a little annotation coverage is useful. Write annotations as you go --------------------------- Now you are ready to include type annotations in your development workflows. Consider adding something like these in your code style conventions: 1. Developers should add annotations for any new code. 2. It's also encouraged to write annotations when you modify existing code. This way you'll gradually increase annotation coverage in your codebase without much effort. Automate annotation of legacy code ---------------------------------- There are tools for automatically adding draft annotations based on type profiles collected at runtime. Tools include :doc:`monkeytype:index` (Python 3) and `PyAnnotate`_ (type comments only). A simple approach is to collect types from test runs. This may work well if your test coverage is good (and if your tests aren't very slow). Another approach is to enable type collection for a small, random fraction of production network requests. This clearly requires more care, as type collection could impact the reliability or the performance of your service. Speed up mypy runs ------------------ You can use :ref:`mypy daemon ` to get much faster incremental mypy runs. The larger your project is, the more useful this will be. If your project has at least 100,000 lines of code or so, you may also want to set up :ref:`remote caching ` for further speedups. Introduce stricter options -------------------------- Mypy is very configurable. Once you get started with static typing, you may want to explore the various strictness options mypy provides to catch more bugs. For example, you can ask mypy to require annotations for all functions in certain modules to avoid accidentally introducing code that won't be type checked. Refer to :ref:`command-line` for the details. .. _PyAnnotate: https://github.com/dropbox/pyannotate mypy-0.761/docs/source/extending_mypy.rst0000644€tŠÔÚ€2›s®0000002443113576752246024751 0ustar jukkaDROPBOX\Domain Users00000000000000.. _extending-mypy: Extending and integrating mypy ============================== .. _integrating-mypy: Integrating mypy into another Python application ************************************************ It is possible to integrate mypy into another Python 3 application by importing ``mypy.api`` and calling the ``run`` function with a parameter of type ``List[str]``, containing what normally would have been the command line arguments to mypy. Function ``run`` returns a ``Tuple[str, str, int]``, namely ``(, , )``, in which ```` is what mypy normally writes to :py:data:`sys.stdout`, ```` is what mypy normally writes to :py:data:`sys.stderr` and ``exit_status`` is the exit status mypy normally returns to the operating system. A trivial example of using the api is the following .. code-block:: python import sys from mypy import api result = api.run(sys.argv[1:]) if result[0]: print('\nType checking report:\n') print(result[0]) # stdout if result[1]: print('\nError report:\n') print(result[1]) # stderr print('\nExit status:', result[2]) Extending mypy using plugins **************************** Python is a highly dynamic language and has extensive metaprogramming capabilities. Many popular libraries use these to create APIs that may be more flexible and/or natural for humans, but are hard to express using static types. Extending the :pep:`484` type system to accommodate all existing dynamic patterns is impractical and often just impossible. Mypy supports a plugin system that lets you customize the way mypy type checks code. This can be useful if you want to extend mypy so it can type check code that uses a library that is difficult to express using just :pep:`484` types. The plugin system is focused on improving mypy's understanding of *semantics* of third party frameworks. There is currently no way to define new first class kinds of types. .. note:: The plugin system is experimental and prone to change. If you want to write a mypy plugin, we recommend you start by contacting the mypy core developers on `gitter `_. In particular, there are no guarantees about backwards compatibility. Backwards incompatible changes may be made without a deprecation period, but we will announce them in `the plugin API changes announcement issue `_. Configuring mypy to use plugins ******************************* Plugins are Python files that can be specified in a mypy :ref:`config file ` using one of the two formats: relative or absolute path to the plugin to the plugin file, or a module name (if the plugin is installed using ``pip install`` in the same virtual environment where mypy is running). The two formats can be mixed, for example: .. code-block:: ini [mypy] plugins = /one/plugin.py, other.plugin Mypy will try to import the plugins and will look for an entry point function named ``plugin``. If the plugin entry point function has a different name, it can be specified after colon: .. code-block:: ini [mypy] plugins = custom_plugin:custom_entry_point In the following sections we describe the basics of the plugin system with some examples. For more technical details, please read the docstrings in `mypy/plugin.py `_ in mypy source code. Also you can find good examples in the bundled plugins located in `mypy/plugins `_. High-level overview ******************* Every entry point function should accept a single string argument that is a full mypy version and return a subclass of ``mypy.plugin.Plugin``: .. code-block:: python from mypy.plugin import Plugin class CustomPlugin(Plugin): def get_type_analyze_hook(self, fullname: str): # see explanation below ... def plugin(version: str): # ignore version argument if the plugin works with all mypy versions. return CustomPlugin During different phases of analyzing the code (first in semantic analysis, and then in type checking) mypy calls plugin methods such as ``get_type_analyze_hook()`` on user plugins. This particular method, for example, can return a callback that mypy will use to analyze unbound types with the given full name. See the full plugin hook method list :ref:`below `. Mypy maintains a list of plugins it gets from the config file plus the default (built-in) plugin that is always enabled. Mypy calls a method once for each plugin in the list until one of the methods returns a non-``None`` value. This callback will be then used to customize the corresponding aspect of analyzing/checking the current abstract syntax tree node. The callback returned by the ``get_xxx`` method will be given a detailed current context and an API to create new nodes, new types, emit error messages, etc., and the result will be used for further processing. Plugin developers should ensure that their plugins work well in incremental and daemon modes. In particular, plugins should not hold global state due to caching of plugin hook results. .. _plugin_hooks: Current list of plugin hooks **************************** **get_type_analyze_hook()** customizes behaviour of the type analyzer. For example, :pep:`484` doesn't support defining variadic generic types: .. code-block:: python from lib import Vector a: Vector[int, int] b: Vector[int, int, int] When analyzing this code, mypy will call ``get_type_analyze_hook("lib.Vector")``, so the plugin can return some valid type for each variable. **get_function_hook()** is used to adjust the return type of a function call. This is a good choice if the return type of some function depends on *values* of some arguments that can't be expressed using literal types (for example a function may return an ``int`` for positive arguments and a ``float`` for negative arguments). This hook will be also called for instantiation of classes. For example: .. code-block:: python from contextlib import contextmanager from typing import TypeVar, Callable T = TypeVar('T') @contextmanager # built-in plugin can infer a precise type here def stopwatch(timer: Callable[[], T]) -> Iterator[T]: ... yield timer() **get_method_hook()** is the same as ``get_function_hook()`` but for methods instead of module level functions. **get_method_signature_hook()** is used to adjust the signature of a method. This includes special Python methods except :py:meth:`~object.__init__` and :py:meth:`~object.__new__`. For example in this code: .. code-block:: python from ctypes import Array, c_int x: Array[c_int] x[0] = 42 mypy will call ``get_method_signature_hook("ctypes.Array.__setitem__")`` so that the plugin can mimic the :py:mod:`ctypes` auto-convert behavior. **get_attribute_hook()** overrides instance member field lookups and property access (not assignments, and not method calls). This hook is only called for fields which already exist on the class. *Exception:* if :py:meth:`__getattr__ ` or :py:meth:`__getattribute__ ` is a method on the class, the hook is called for all fields which do not refer to methods. **get_class_decorator_hook()** can be used to update class definition for given class decorators. For example, you can add some attributes to the class to match runtime behaviour: .. code-block:: python from lib import customize @customize class UserDefined: pass var = UserDefined var.customized # mypy can understand this using a plugin **get_metaclass_hook()** is similar to above, but for metaclasses. **get_base_class_hook()** is similar to above, but for base classes. **get_dynamic_class_hook()** can be used to allow dynamic class definitions in mypy. This plugin hook is called for every assignment to a simple name where right hand side is a function call: .. code-block:: python from lib import dynamic_class X = dynamic_class('X', []) For such definition, mypy will call ``get_dynamic_class_hook("lib.dynamic_class")``. The plugin should create the corresponding ``mypy.nodes.TypeInfo`` object, and place it into a relevant symbol table. (Instances of this class represent classes in mypy and hold essential information such as qualified name, method resolution order, etc.) **get_customize_class_mro_hook()** can be used to modify class MRO (for example insert some entries there) before the class body is analyzed. **get_additional_deps()** can be used to add new dependencies for a module. It is called before semantic analysis. For example, this can be used if a library has dependencies that are dynamically loaded based on configuration information. **report_config_data()** can be used if the plugin has some sort of per-module configuration that can affect typechecking. In that case, when the configuration for a module changes, we want to invalidate mypy's cache for that module so that it can be rechecked. This hook should be used to report to mypy any relevant configuration data, so that mypy knows to recheck the module if the configuration changes. The hooks hould return data encodable as JSON. Notes about the semantic analyzer ********************************* Mypy 0.710 introduced a new semantic analyzer, and the old semantic analyzer was removed in mypy 0.730. Support for the new semantic analyzer required some changes to existing plugins. Here is a short summary of the most important changes: * The order of processing AST nodes is different. Code outside functions is processed first, and functions and methods are processed afterwards. * Each AST node can be processed multiple times to resolve forward references. The same plugin hook may be called multiple times, so they need to be idempotent. * The ``anal_type()`` API method returns ``None`` if some part of the type is not available yet due to forward references, for example. * When looking up symbols, you may encounter *placeholder nodes* that are used for names that haven't been fully processed yet. You'll generally want to request another semantic analysis iteration by *deferring* in that case. See the docstring at the top of `mypy/plugin.py `_ for more details. mypy-0.761/docs/source/faq.rst0000644€tŠÔÚ€2›s®0000002210513576752246022451 0ustar jukkaDROPBOX\Domain Users00000000000000Frequently Asked Questions ========================== Why have both dynamic and static typing? **************************************** Dynamic typing can be flexible, powerful, convenient and easy. But it's not always the best approach; there are good reasons why many developers choose to use statically typed languages or static typing for Python. Here are some potential benefits of mypy-style static typing: - Static typing can make programs easier to understand and maintain. Type declarations can serve as machine-checked documentation. This is important as code is typically read much more often than modified, and this is especially important for large and complex programs. - Static typing can help you find bugs earlier and with less testing and debugging. Especially in large and complex projects this can be a major time-saver. - Static typing can help you find difficult-to-find bugs before your code goes into production. This can improve reliability and reduce the number of security issues. - Static typing makes it practical to build very useful development tools that can improve programming productivity or software quality, including IDEs with precise and reliable code completion, static analysis tools, etc. - You can get the benefits of both dynamic and static typing in a single language. Dynamic typing can be perfect for a small project or for writing the UI of your program, for example. As your program grows, you can adapt tricky application logic to static typing to help maintenance. See also the `front page `_ of the mypy web site. Would my project benefit from static typing? ******************************************** For many projects dynamic typing is perfectly fine (we think that Python is a great language). But sometimes your projects demand bigger guns, and that's when mypy may come in handy. If some of these ring true for your projects, mypy (and static typing) may be useful: - Your project is large or complex. - Your codebase must be maintained for a long time. - Multiple developers are working on the same code. - Running tests takes a lot of time or work (type checking helps you find errors quickly early in development, reducing the number of testing iterations). - Some project members (devs or management) don't like dynamic typing, but others prefer dynamic typing and Python syntax. Mypy could be a solution that everybody finds easy to accept. - You want to future-proof your project even if currently none of the above really apply. The earlier you start, the easier it will be to adopt static typing. Can I use mypy to type check my existing Python code? ***************************************************** Mypy supports most Python features and idioms, and many large Python projects are using mypy successfully. Code that uses complex introspection or metaprogramming may be impractical to type check, but it should still be possible to use static typing in other parts of a codebase that are less dynamic. Will static typing make my programs run faster? *********************************************** Mypy only does static type checking and it does not improve performance. It has a minimal performance impact. In the future, there could be other tools that can compile statically typed mypy code to C modules or to efficient JVM bytecode, for example, but this is outside the scope of the mypy project. How do I type check my Python 2 code? ************************************* You can use a :pep:`comment-based function annotation syntax <484#suggested-syntax-for-python-2-7-and-straddling-code>` and use the :option:`--py2 ` command-line option to type check your Python 2 code. You'll also need to install ``typing`` for Python 2 via ``pip install typing``. Is mypy free? ************* Yes. Mypy is free software, and it can also be used for commercial and proprietary projects. Mypy is available under the MIT license. Can I use duck typing with mypy? ******************************** Mypy provides support for both `nominal subtyping `_ and `structural subtyping `_. Structural subtyping can be thought of as "static duck typing". Some argue that structural subtyping is better suited for languages with duck typing such as Python. Mypy however primarily uses nominal subtyping, leaving structural subtyping mostly opt-in (except for built-in protocols such as :py:class:`~typing.Iterable` that always support structural subtyping). Here are some reasons why: 1. It is easy to generate short and informative error messages when using a nominal type system. This is especially important when using type inference. 2. Python provides built-in support for nominal :py:func:`isinstance` tests and they are widely used in programs. Only limited support for structural :py:func:`isinstance` is available, and it's less type safe than nominal type tests. 3. Many programmers are already familiar with static, nominal subtyping and it has been successfully used in languages such as Java, C++ and C#. Fewer languages use structural subtyping. However, structural subtyping can also be useful. For example, a "public API" may be more flexible if it is typed with protocols. Also, using protocol types removes the necessity to explicitly declare implementations of ABCs. As a rule of thumb, we recommend using nominal classes where possible, and protocols where necessary. For more details about protocol types and structural subtyping see :ref:`protocol-types` and :pep:`544`. I like Python and I have no need for static typing ************************************************** The aim of mypy is not to convince everybody to write statically typed Python -- static typing is entirely optional, now and in the future. The goal is to give more options for Python programmers, to make Python a more competitive alternative to other statically typed languages in large projects, to improve programmer productivity, and to improve software quality. How are mypy programs different from normal Python? *************************************************** Since you use a vanilla Python implementation to run mypy programs, mypy programs are also Python programs. The type checker may give warnings for some valid Python code, but the code is still always runnable. Also, some Python features and syntax are still not supported by mypy, but this is gradually improving. The obvious difference is the availability of static type checking. The section :ref:`common_issues` mentions some modifications to Python code that may be required to make code type check without errors. Also, your code must make attributes explicit. Mypy supports modular, efficient type checking, and this seems to rule out type checking some language features, such as arbitrary monkey patching of methods. How is mypy different from Cython? ********************************** :doc:`Cython ` is a variant of Python that supports compilation to CPython C modules. It can give major speedups to certain classes of programs compared to CPython, and it provides static typing (though this is different from mypy). Mypy differs in the following aspects, among others: - Cython is much more focused on performance than mypy. Mypy is only about static type checking, and increasing performance is not a direct goal. - The mypy syntax is arguably simpler and more "Pythonic" (no cdef/cpdef, etc.) for statically typed code. - The mypy syntax is compatible with Python. Mypy programs are normal Python programs that can be run using any Python implementation. Cython has many incompatible extensions to Python syntax, and Cython programs generally cannot be run without first compiling them to CPython extension modules via C. Cython also has a pure Python mode, but it seems to support only a subset of Cython functionality, and the syntax is quite verbose. - Mypy has a different set of type system features. For example, mypy has genericity (parametric polymorphism), function types and bidirectional type inference, which are not supported by Cython. (Cython has fused types that are different but related to mypy generics. Mypy also has a similar feature as an extension of generics.) - The mypy type checker knows about the static types of many Python stdlib modules and can effectively type check code that uses them. - Cython supports accessing C functions directly and many features are defined in terms of translating them to C or C++. Mypy just uses Python semantics, and mypy does not deal with accessing C library functionality. Mypy is a cool project. Can I help? *********************************** Any help is much appreciated! `Contact `_ the developers if you would like to contribute. Any help related to development, design, publicity, documentation, testing, web site maintenance, financing, etc. can be helpful. You can learn a lot by contributing, and anybody can help, even beginners! However, some knowledge of compilers and/or type systems is essential if you want to work on mypy internals. mypy-0.761/docs/source/final_attrs.rst0000644€tŠÔÚ€2›s®0000001463213576752246024216 0ustar jukkaDROPBOX\Domain Users00000000000000.. _final_attrs: Final names, methods and classes ================================ This section introduces these related features: 1. *Final names* are variables or attributes that should not be reassigned after initialization. They are useful for declaring constants. 2. *Final methods* should not be overridden in a subclass. 3. *Final classes* should not be subclassed. All of these are only enforced by mypy, and only in annotated code. They is no runtime enforcement by the Python runtime. .. note:: These are experimental features. They might change in later versions of mypy. The *final* qualifiers are available through the ``typing_extensions`` package on PyPI. Final names ----------- You can use the ``typing_extensions.Final`` qualifier to indicate that a name or attribute should not be reassigned, redefined, or overridden. This is often useful for module and class level constants as a way to prevent unintended modification. Mypy will prevent further assignments to final names in type-checked code: .. code-block:: python from typing_extensions import Final RATE: Final = 3000 class Base: DEFAULT_ID: Final = 0 RATE = 300 # Error: can't assign to final attribute Base.DEFAULT_ID = 1 # Error: can't override a final attribute Another use case for final attributes is to protect certain attributes from being overridden in a subclass: .. code-block:: python from typing_extensions import Final class Window: BORDER_WIDTH: Final = 2.5 ... class ListView(Window): BORDER_WIDTH = 3 # Error: can't override a final attribute You can use :py:class:`@property ` to make an attribute read-only, but unlike ``Final``, it doesn't work with module attributes, and it doesn't prevent overriding in subclasses. Syntax variants *************** You can use ``Final`` in one of these forms: * You can provide an explicit type using the syntax ``Final[]``. Example: .. code-block:: python ID: Final[float] = 1 * You can omit the type: .. code-block:: python ID: Final = 1 Here mypy will infer type ``int`` for ``ID``. Note that unlike for generic classes this is *not* the same as ``Final[Any]``. * In class bodies and stub files you can omit the right hand side and just write ``ID: Final[float]``. * Finally, you can write ``self.id: Final = 1`` (also optionally with a type in square brackets). This is allowed *only* in :py:meth:`__init__ ` methods, so that the final instance attribute is assigned only once when an instance is created. Details of using ``Final`` ************************** These are the two main rules for defining a final name: * There can be *at most one* final declaration per module or class for a given attribute. There can't be separate class-level and instance-level constants with the same name. * There must be *exactly one* assignment to a final name. A final attribute declared in a class body without an initializer must be initialized in the :py:meth:`__init__ ` method (you can skip the initializer in stub files): .. code-block:: python class ImmutablePoint: x: Final[int] y: Final[int] # Error: final attribute without an initializer def __init__(self) -> None: self.x = 1 # Good ``Final`` can only be used as the outermost type in assignments or variable annotations. Using it in any other position is an error. In particular, ``Final`` can't be used in annotations for function arguments: .. code-block:: python x: List[Final[int]] = [] # Error! def fun(x: Final[List[int]]) -> None: # Error! ... ``Final`` and :py:data:`~typing.ClassVar` should not be used together. Mypy will infer the scope of a final declaration automatically depending on whether it was initialized in the class body or in :py:meth:`__init__ `. A final attribute can't be overridden by a subclass (even with another explicit final declaration). Note however that a final attribute can override a read-only property: .. code-block:: python class Base: @property def ID(self) -> int: ... class Derived(Base): ID: Final = 1 # OK Declaring a name as final only guarantees that the name wll not be re-bound to another value. It doesn't make the value immutable. You can use immutable ABCs and containers to prevent mutating such values: .. code-block:: python x: Final = ['a', 'b'] x.append('c') # OK y: Final[Sequence[str]] = ['a', 'b'] y.append('x') # Error: Sequence is immutable z: Final = ('a', 'b') # Also an option Final methods ------------- Like with attributes, sometimes it is useful to protect a method from overriding. You can use the ``typing_extensions.final`` decorator for this purpose: .. code-block:: python from typing_extensions import final class Base: @final def common_name(self) -> None: ... class Derived(Base): def common_name(self) -> None: # Error: cannot override a final method ... This ``@final`` decorator can be used with instance methods, class methods, static methods, and properties. For overloaded methods you should add ``@final`` on the implementation to make it final (or on the first overload in stubs): .. code-block:: python from typing import Any, overload class Base: @overload def method(self) -> None: ... @overload def method(self, arg: int) -> int: ... @final def method(self, x=None): ... Final classes ------------- You can apply the ``typing_extensions.final`` decorator to a class to indicate to mypy that it should not be subclassed: .. code-block:: python from typing_extensions import final @final class Leaf: ... class MyLeaf(Leaf): # Error: Leaf can't be subclassed ... The decorator acts as a declaration for mypy (and as documentation for humans), but it doesn't actually prevent subclassing at runtime. Here are some situations where using a final class may be useful: * A class wasn't designed to be subclassed. Perhaps subclassing would not work as expected, or subclassing would be error-prone. * Subclassing would make code harder to understand or maintain. For example, you may want to prevent unnecessarily tight coupling between base classes and subclasses. * You want to retain the freedom to arbitrarily change the class implementation in the future, and these changes might break subclasses. mypy-0.761/docs/source/generics.rst0000644€tŠÔÚ€2›s®0000005463213576752246023513 0ustar jukkaDROPBOX\Domain Users00000000000000Generics ======== This section explains how you can define your own generic classes that take one or more type parameters, similar to built-in types such as ``List[X]``. User-defined generics are a moderately advanced feature and you can get far without ever using them -- feel free to skip this section and come back later. .. _generic-classes: Defining generic classes ************************ The built-in collection classes are generic classes. Generic types have one or more type parameters, which can be arbitrary types. For example, ``Dict[int, str]`` has the type parameters ``int`` and ``str``, and ``List[int]`` has a type parameter ``int``. Programs can also define new generic classes. Here is a very simple generic class that represents a stack: .. code-block:: python from typing import TypeVar, Generic T = TypeVar('T') class Stack(Generic[T]): def __init__(self) -> None: # Create an empty list with items of type T self.items: List[T] = [] def push(self, item: T) -> None: self.items.append(item) def pop(self) -> T: return self.items.pop() def empty(self) -> bool: return not self.items The ``Stack`` class can be used to represent a stack of any type: ``Stack[int]``, ``Stack[Tuple[int, str]]``, etc. Using ``Stack`` is similar to built-in container types: .. code-block:: python # Construct an empty Stack[int] instance stack = Stack[int]() stack.push(2) stack.pop() stack.push('x') # Type error Type inference works for user-defined generic types as well: .. code-block:: python def process(stack: Stack[int]) -> None: ... process(Stack()) # Argument has inferred type Stack[int] Construction of instances of generic types is also type checked: .. code-block:: python class Box(Generic[T]): def __init__(self, content: T) -> None: self.content = content Box(1) # OK, inferred type is Box[int] Box[int](1) # Also OK s = 'some string' Box[int](s) # Type error Generic class internals *********************** You may wonder what happens at runtime when you index ``Stack``. Actually, indexing ``Stack`` returns essentially a copy of ``Stack`` that returns instances of the original class on instantiation: .. code-block:: python >>> print(Stack) __main__.Stack >>> print(Stack[int]) __main__.Stack[int] >>> print(Stack[int]().__class__) __main__.Stack Note that built-in types :py:class:`list`, :py:class:`dict` and so on do not support indexing in Python. This is why we have the aliases :py:class:`~typing.List`, :py:class:`~typing.Dict` and so on in the :py:mod:`typing` module. Indexing these aliases gives you a class that directly inherits from the target class in Python: .. code-block:: python >>> from typing import List >>> List[int] typing.List[int] >>> List[int].__bases__ (, typing.MutableSequence) Generic types could be instantiated or subclassed as usual classes, but the above examples illustrate that type variables are erased at runtime. Generic ``Stack`` instances are just ordinary Python objects, and they have no extra runtime overhead or magic due to being generic, other than a metaclass that overloads the indexing operator. .. _generic-subclasses: Defining sub-classes of generic classes *************************************** User-defined generic classes and generic classes defined in :py:mod:`typing` can be used as base classes for another classes, both generic and non-generic. For example: .. code-block:: python from typing import Generic, TypeVar, Mapping, Iterator, Dict KT = TypeVar('KT') VT = TypeVar('VT') class MyMap(Mapping[KT, VT]): # This is a generic subclass of Mapping def __getitem__(self, k: KT) -> VT: ... # Implementations omitted def __iter__(self) -> Iterator[KT]: ... def __len__(self) -> int: ... items: MyMap[str, int] # Okay class StrDict(Dict[str, str]): # This is a non-generic subclass of Dict def __str__(self) -> str: return 'StrDict({})'.format(super().__str__()) data: StrDict[int, int] # Error! StrDict is not generic data2: StrDict # OK class Receiver(Generic[T]): def accept(self, value: T) -> None: ... class AdvancedReceiver(Receiver[T]): ... .. note:: You have to add an explicit :py:class:`~typing.Mapping` base class if you want mypy to consider a user-defined class as a mapping (and :py:class:`~typing.Sequence` for sequences, etc.). This is because mypy doesn't use *structural subtyping* for these ABCs, unlike simpler protocols like :py:class:`~typing.Iterable`, which use :ref:`structural subtyping `. :py:class:`Generic ` can be omitted from bases if there are other base classes that include type variables, such as ``Mapping[KT, VT]`` in the above example. If you include ``Generic[...]`` in bases, then it should list all type variables present in other bases (or more, if needed). The order of type variables is defined by the following rules: * If ``Generic[...]`` is present, then the order of variables is always determined by their order in ``Generic[...]``. * If there are no ``Generic[...]`` in bases, then all type variables are collected in the lexicographic order (i.e. by first appearance). For example: .. code-block:: python from typing import Generic, TypeVar, Any T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') class One(Generic[T]): ... class Another(Generic[T]): ... class First(One[T], Another[S]): ... class Second(One[T], Another[S], Generic[S, U, T]): ... x: First[int, str] # Here T is bound to int, S is bound to str y: Second[int, str, Any] # Here T is Any, S is int, and U is str .. _generic-functions: Generic functions ***************** Generic type variables can also be used to define generic functions: .. code-block:: python from typing import TypeVar, Sequence T = TypeVar('T') # Declare type variable def first(seq: Sequence[T]) -> T: # Generic function return seq[0] As with generic classes, the type variable can be replaced with any type. That means ``first`` can be used with any sequence type, and the return type is derived from the sequence item type. For example: .. code-block:: python # Assume first defined as above. s = first('foo') # s has type str. n = first([1, 2, 3]) # n has type int. Note also that a single definition of a type variable (such as ``T`` above) can be used in multiple generic functions or classes. In this example we use the same type variable in two generic functions: .. code-block:: python from typing import TypeVar, Sequence T = TypeVar('T') # Declare type variable def first(seq: Sequence[T]) -> T: return seq[0] def last(seq: Sequence[T]) -> T: return seq[-1] A variable cannot have a type variable in its type unless the type variable is bound in a containing generic class or function. .. _generic-methods-and-generic-self: Generic methods and generic self ******************************** You can also define generic methods — just use a type variable in the method signature that is different from class type variables. In particular, ``self`` may also be generic, allowing a method to return the most precise type known at the point of access. .. note:: This feature is experimental. Checking code with type annotations for self arguments is still not fully implemented. Mypy may disallow valid code or allow unsafe code. In this way, for example, you can typecheck chaining of setter methods: .. code-block:: python from typing import TypeVar T = TypeVar('T', bound='Shape') class Shape: def set_scale(self: T, scale: float) -> T: self.scale = scale return self class Circle(Shape): def set_radius(self, r: float) -> 'Circle': self.radius = r return self class Square(Shape): def set_width(self, w: float) -> 'Square': self.width = w return self circle = Circle().set_scale(0.5).set_radius(2.7) # type: Circle square = Square().set_scale(0.5).set_width(3.2) # type: Square Without using generic ``self``, the last two lines could not be type-checked properly. Other uses are factory methods, such as copy and deserialization. For class methods, you can also define generic ``cls``, using :py:class:`Type[T] `: .. code-block:: python from typing import TypeVar, Tuple, Type T = TypeVar('T', bound='Friend') class Friend: other = None # type: Friend @classmethod def make_pair(cls: Type[T]) -> Tuple[T, T]: a, b = cls(), cls() a.other = b b.other = a return a, b class SuperFriend(Friend): pass a, b = SuperFriend.make_pair() Note that when overriding a method with generic ``self``, you must either return a generic ``self`` too, or return an instance of the current class. In the latter case, you must implement this method in all future subclasses. Note also that mypy cannot always verify that the implementation of a copy or a deserialization method returns the actual type of self. Therefore you may need to silence mypy inside these methods (but not at the call site), possibly by making use of the ``Any`` type. For some advanced uses of self-types see :ref:`additional examples `. .. _variance-of-generics: Variance of generic types ************************* There are three main kinds of generic types with respect to subtype relations between them: invariant, covariant, and contravariant. Assuming that we have a pair of types ``A`` and ``B``, and ``B`` is a subtype of ``A``, these are defined as follows: * A generic class ``MyCovGen[T, ...]`` is called covariant in type variable ``T`` if ``MyCovGen[B, ...]`` is always a subtype of ``MyCovGen[A, ...]``. * A generic class ``MyContraGen[T, ...]`` is called contravariant in type variable ``T`` if ``MyContraGen[A, ...]`` is always a subtype of ``MyContraGen[B, ...]``. * A generic class ``MyInvGen[T, ...]`` is called invariant in ``T`` if neither of the above is true. Let us illustrate this by few simple examples: * :py:data:`~typing.Union` is covariant in all variables: ``Union[Cat, int]`` is a subtype of ``Union[Animal, int]``, ``Union[Dog, int]`` is also a subtype of ``Union[Animal, int]``, etc. Most immutable containers such as :py:class:`~typing.Sequence` and :py:class:`~typing.FrozenSet` are also covariant. * :py:data:`~typing.Callable` is an example of type that behaves contravariant in types of arguments, namely ``Callable[[Employee], int]`` is a subtype of ``Callable[[Manager], int]``. To understand this, consider a function: .. code-block:: python def salaries(staff: List[Manager], accountant: Callable[[Manager], int]) -> List[int]: ... This function needs a callable that can calculate a salary for managers, and if we give it a callable that can calculate a salary for an arbitrary employee, it's still safe. * :py:class:`~typing.List` is an invariant generic type. Naively, one would think that it is covariant, but let us consider this code: .. code-block:: python class Shape: pass class Circle(Shape): def rotate(self): ... def add_one(things: List[Shape]) -> None: things.append(Shape()) my_things: List[Circle] = [] add_one(my_things) # This may appear safe, but... my_things[0].rotate() # ...this will fail Another example of invariant type is :py:class:`~typing.Dict`. Most mutable containers are invariant. By default, mypy assumes that all user-defined generics are invariant. To declare a given generic class as covariant or contravariant use type variables defined with special keyword arguments ``covariant`` or ``contravariant``. For example: .. code-block:: python from typing import Generic, TypeVar T_co = TypeVar('T_co', covariant=True) class Box(Generic[T_co]): # this type is declared covariant def __init__(self, content: T_co) -> None: self._content = content def get_content(self) -> T_co: return self._content def look_into(box: Box[Animal]): ... my_box = Box(Cat()) look_into(my_box) # OK, but mypy would complain here for an invariant type .. _type-variable-value-restriction: Type variables with value restriction ************************************* By default, a type variable can be replaced with any type. However, sometimes it's useful to have a type variable that can only have some specific types as its value. A typical example is a type variable that can only have values ``str`` and ``bytes``: .. code-block:: python from typing import TypeVar AnyStr = TypeVar('AnyStr', str, bytes) This is actually such a common type variable that :py:data:`~typing.AnyStr` is defined in :py:mod:`typing` and we don't need to define it ourselves. We can use :py:data:`~typing.AnyStr` to define a function that can concatenate two strings or bytes objects, but it can't be called with other argument types: .. code-block:: python from typing import AnyStr def concat(x: AnyStr, y: AnyStr) -> AnyStr: return x + y concat('a', 'b') # Okay concat(b'a', b'b') # Okay concat(1, 2) # Error! Note that this is different from a union type, since combinations of ``str`` and ``bytes`` are not accepted: .. code-block:: python concat('string', b'bytes') # Error! In this case, this is exactly what we want, since it's not possible to concatenate a string and a bytes object! The type checker will reject this function: .. code-block:: python def union_concat(x: Union[str, bytes], y: Union[str, bytes]) -> Union[str, bytes]: return x + y # Error: can't concatenate str and bytes Another interesting special case is calling ``concat()`` with a subtype of ``str``: .. code-block:: python class S(str): pass ss = concat(S('foo'), S('bar')) You may expect that the type of ``ss`` is ``S``, but the type is actually ``str``: a subtype gets promoted to one of the valid values for the type variable, which in this case is ``str``. This is thus subtly different from *bounded quantification* in languages such as Java, where the return type would be ``S``. The way mypy implements this is correct for ``concat``, since ``concat`` actually returns a ``str`` instance in the above example: .. code-block:: python >>> print(type(ss)) You can also use a :py:class:`~typing.TypeVar` with a restricted set of possible values when defining a generic class. For example, mypy uses the type :py:class:`Pattern[AnyStr] ` for the return value of :py:func:`re.compile`, since regular expressions can be based on a string or a bytes pattern. .. _type-variable-upper-bound: Type variables with upper bounds ******************************** A type variable can also be restricted to having values that are subtypes of a specific type. This type is called the upper bound of the type variable, and is specified with the ``bound=...`` keyword argument to :py:class:`~typing.TypeVar`. .. code-block:: python from typing import TypeVar, SupportsAbs T = TypeVar('T', bound=SupportsAbs[float]) In the definition of a generic function that uses such a type variable ``T``, the type represented by ``T`` is assumed to be a subtype of its upper bound, so the function can use methods of the upper bound on values of type ``T``. .. code-block:: python def largest_in_absolute_value(*xs: T) -> T: return max(xs, key=abs) # Okay, because T is a subtype of SupportsAbs[float]. In a call to such a function, the type ``T`` must be replaced by a type that is a subtype of its upper bound. Continuing the example above, .. code-block:: python largest_in_absolute_value(-3.5, 2) # Okay, has type float. largest_in_absolute_value(5+6j, 7) # Okay, has type complex. largest_in_absolute_value('a', 'b') # Error: 'str' is not a subtype of SupportsAbs[float]. Type parameters of generic classes may also have upper bounds, which restrict the valid values for the type parameter in the same way. A type variable may not have both a value restriction (see :ref:`type-variable-value-restriction`) and an upper bound. .. _declaring-decorators: Declaring decorators ******************** One common application of type variable upper bounds is in declaring a decorator that preserves the signature of the function it decorates, regardless of that signature. Here's a complete example: .. code-block:: python from typing import Any, Callable, TypeVar, Tuple, cast FuncType = Callable[..., Any] F = TypeVar('F', bound=FuncType) # A decorator that preserves the signature. def my_decorator(func: F) -> F: def wrapper(*args, **kwds): print("Calling", func) return func(*args, **kwds) return cast(F, wrapper) # A decorated function. @my_decorator def foo(a: int) -> str: return str(a) # Another. @my_decorator def bar(x: float, y: float) -> Tuple[float, float, bool]: return (x, y, x > y) a = foo(12) reveal_type(a) # str b = bar(3.14, 0) reveal_type(b) # Tuple[float, float, bool] foo('x') # Type check error: incompatible type "str"; expected "int" From the final block we see that the signatures of the decorated functions ``foo()`` and ``bar()`` are the same as those of the original functions (before the decorator is applied). The bound on ``F`` is used so that calling the decorator on a non-function (e.g. ``my_decorator(1)``) will be rejected. Also note that the ``wrapper()`` function is not type-checked. Wrapper functions are typically small enough that this is not a big problem. This is also the reason for the :py:func:`~typing.cast` call in the ``return`` statement in ``my_decorator()``. See :ref:`casts`. Generic protocols ***************** Mypy supports generic protocols (see also :ref:`protocol-types`). Several :ref:`predefined protocols ` are generic, such as :py:class:`Iterable[T] `, and you can define additional generic protocols. Generic protocols mostly follow the normal rules for generic classes. Example: .. code-block:: python from typing import TypeVar from typing_extensions import Protocol T = TypeVar('T') class Box(Protocol[T]): content: T def do_stuff(one: Box[str], other: Box[bytes]) -> None: ... class StringWrapper: def __init__(self, content: str) -> None: self.content = content class BytesWrapper: def __init__(self, content: bytes) -> None: self.content = content do_stuff(StringWrapper('one'), BytesWrapper(b'other')) # OK x: Box[float] = ... y: Box[int] = ... x = y # Error -- Box is invariant The main difference between generic protocols and ordinary generic classes is that mypy checks that the declared variances of generic type variables in a protocol match how they are used in the protocol definition. The protocol in this example is rejected, since the type variable ``T`` is used covariantly as a return type, but the type variable is invariant: .. code-block:: python from typing import TypeVar from typing_extensions import Protocol T = TypeVar('T') class ReadOnlyBox(Protocol[T]): # Error: covariant type variable expected def content(self) -> T: ... This example correctly uses a covariant type variable: .. code-block:: python from typing import TypeVar from typing_extensions import Protocol T_co = TypeVar('T_co', covariant=True) class ReadOnlyBox(Protocol[T_co]): # OK def content(self) -> T_co: ... ax: ReadOnlyBox[float] = ... ay: ReadOnlyBox[int] = ... ax = ay # OK -- ReadOnlyBox is covariant See :ref:`variance-of-generics` for more about variance. Generic protocols can also be recursive. Example: .. code-block:: python T = TypeVar('T') class Linked(Protocol[T]): val: T def next(self) -> 'Linked[T]': ... class L: val: int ... # details omitted def next(self) -> 'L': ... # details omitted def last(seq: Linked[T]) -> T: ... # implementation omitted result = last(L()) # Inferred type of 'result' is 'int' .. _generic-type-aliases: Generic type aliases ******************** Type aliases can be generic. In this case they can be used in two ways: Subscripted aliases are equivalent to original types with substituted type variables, so the number of type arguments must match the number of free type variables in the generic type alias. Unsubscripted aliases are treated as original types with free variables replaced with ``Any``. Examples (following :pep:`PEP 484: Type aliases <484#type-aliases>`): .. code-block:: python from typing import TypeVar, Iterable, Tuple, Union, Callable S = TypeVar('S') TInt = Tuple[int, S] UInt = Union[S, int] CBack = Callable[..., S] def response(query: str) -> UInt[str]: # Same as Union[str, int] ... def activate(cb: CBack[S]) -> S: # Same as Callable[..., S] ... table_entry: TInt # Same as Tuple[int, Any] T = TypeVar('T', int, float, complex) Vec = Iterable[Tuple[T, T]] def inproduct(v: Vec[T]) -> T: return sum(x*y for x, y in v) def dilate(v: Vec[T], scale: T) -> Vec[T]: return ((x * scale, y * scale) for x, y in v) v1: Vec[int] = [] # Same as Iterable[Tuple[int, int]] v2: Vec = [] # Same as Iterable[Tuple[Any, Any]] v3: Vec[int, int] = [] # Error: Invalid alias, too many type arguments! Type aliases can be imported from modules just like other names. An alias can also target another alias, although building complex chains of aliases is not recommended -- this impedes code readability, thus defeating the purpose of using aliases. Example: .. code-block:: python from typing import TypeVar, Generic, Optional from example1 import AliasType from example2 import Vec # AliasType and Vec are type aliases (Vec as defined above) def fun() -> AliasType: ... T = TypeVar('T') class NewVec(Vec[T]): ... for i, j in NewVec[int](): ... OIntVec = Optional[Vec[int]] .. note:: A type alias does not define a new type. For generic type aliases this means that variance of type variables used for alias definition does not apply to aliases. A parameterized generic alias is treated simply as an original type with the corresponding type variables substituted. mypy-0.761/docs/source/getting_started.rst0000644€tŠÔÚ€2›s®0000003453013576752246025076 0ustar jukkaDROPBOX\Domain Users00000000000000.. _getting-started: Getting started =============== This chapter introduces some core concepts of mypy, including function annotations, the :py:mod:`typing` module, library stubs, and more. Be sure to read this chapter carefully, as the rest of the documentation may not make much sense otherwise. Installing and running mypy *************************** Mypy requires Python 3.5 or later to run. Once you've `installed Python 3 `_, install mypy using pip: .. code-block:: shell $ python3 -m pip install mypy Once mypy is installed, run it by using the ``mypy`` tool: .. code-block:: shell $ mypy program.py This command makes mypy *type check* your ``program.py`` file and print out any errors it finds. Mypy will type check your code *statically*: this means that it will check for errors without ever running your code, just like a linter. This means that you are always free to ignore the errors mypy reports and treat them as just warnings, if you so wish: mypy runs independently from Python itself. However, if you try directly running mypy on your existing Python code, it will most likely report little to no errors: you must add *type annotations* to your code to take full advantage of mypy. See the section below for details. .. note:: Although you must install Python 3 to run mypy, mypy is fully capable of type checking Python 2 code as well: just pass in the :option:`--py2 ` flag. See :ref:`python2` for more details. .. code-block:: shell $ mypy --py2 program.py Function signatures and dynamic vs static typing ************************************************ A function without type annotations is considered to be *dynamically typed* by mypy: .. code-block:: python def greeting(name): return 'Hello ' + name By default, mypy will **not** type check dynamically typed functions. This means that with a few exceptions, mypy will not report any errors with regular unannotated Python. This is the case even if you misuse the function: for example, mypy would currently not report any errors if you tried running ``greeting(3)`` or ``greeting(b"Alice")`` even though those function calls would result in errors at runtime. You can teach mypy to detect these kinds of bugs by adding *type annotations* (also known as *type hints*). For example, you can teach mypy that ``greeting`` both accepts and returns a string like so: .. code-block:: python def greeting(name: str) -> str: return 'Hello ' + name This function is now *statically typed*: mypy can use the provided type hints to detect incorrect usages of the ``greeting`` function. For example, it will reject the following calls since the arguments have invalid types: .. code-block:: python def greeting(name: str) -> str: return 'Hello ' + name greeting(3) # Argument 1 to "greeting" has incompatible type "int"; expected "str" greeting(b'Alice') # Argument 1 to "greeting" has incompatible type "bytes"; expected "str" Note that this is all still valid Python 3 code! The function annotation syntax shown above was added to Python :pep:`as a part of Python 3.0 <3107>`. If you are trying to type check Python 2 code, you can add type hints using a comment-based syntax instead of the Python 3 annotation syntax. See our section on :ref:`typing Python 2 code ` for more details. Being able to pick whether you want a function to be dynamically or statically typed can be very helpful. For example, if you are migrating an existing Python codebase to use static types, it's usually easier to migrate by incrementally adding type hints to your code rather than adding them all at once. Similarly, when you are prototyping a new feature, it may be convenient to initially implement the code using dynamic typing and only add type hints later once the code is more stable. Once you are finished migrating or prototyping your code, you can make mypy warn you if you add a dynamic function by mistake by using the :option:`--disallow-untyped-defs ` flag. See :ref:`command-line` for more information on configuring mypy. .. note:: The earlier stages of analysis performed by mypy may report errors even for dynamically typed functions. However, you should not rely on this, as this may change in the future. More function signatures ************************ Here are a few more examples of adding type hints to function signatures. If a function does not explicitly return a value, give it a return type of ``None``. Using a ``None`` result in a statically typed context results in a type check error: .. code-block:: python def p() -> None: print('hello') a = p() # Error: "p" does not return a value Make sure to remember to include ``None``: if you don't, the function will be dynamically typed. For example: .. code-block:: python def f(): 1 + 'x' # No static type error (dynamically typed) def g() -> None: 1 + 'x' # Type check error (statically typed) Arguments with default values can be annotated like so: .. code-block:: python def greeting(name: str, excited: bool = False) -> str: message = 'Hello, {}'.format(name) if excited: message += '!!!' return message ``*args`` and ``**kwargs`` arguments can be annotated like so: .. code-block:: python def stars(*args: int, **kwargs: float) -> None: # 'args' has type 'Tuple[int, ...]' (a tuple of ints) # 'kwargs' has type 'Dict[str, float]' (a dict of strs to floats) for arg in args: print(arg) for key, value in kwargs: print(key, value) The typing module ***************** So far, we've added type hints that use only basic concrete types like ``str`` and ``float``. What if we want to express more complex types, such as "a list of strings" or "an iterable of ints"? You can find many of these more complex static types inside of the :py:mod:`typing` module. For example, to indicate that some function can accept a list of strings, use the :py:class:`~typing.List` type: .. code-block:: python from typing import List def greet_all(names: List[str]) -> None: for name in names: print('Hello ' + name) names = ["Alice", "Bob", "Charlie"] ages = [10, 20, 30] greet_all(names) # Ok! greet_all(ages) # Error due to incompatible types The :py:class:`~typing.List` type is an example of something called a *generic type*: it can accept one or more *type parameters*. In this case, we *parameterized* :py:class:`~typing.List` by writing ``List[str]``. This lets mypy know that ``greet_all`` accepts specifically lists containing strings, and not lists containing ints or any other type. In this particular case, the type signature is perhaps a little too rigid. After all, there's no reason why this function must accept *specifically* a list -- it would run just fine if you were to pass in a tuple, a set, or any other custom iterable. You can express this idea using the :py:class:`~typing.Iterable` type instead of :py:class:`~typing.List`: .. code-block:: python from typing import Iterable def greet_all(names: Iterable[str]) -> None: for name in names: print('Hello ' + name) As another example, suppose you want to write a function that can accept *either* ints or strings, but no other types. You can express this using the :py:data:`~typing.Union` type: .. code-block:: python from typing import Union def normalize_id(user_id: Union[int, str]) -> str: if isinstance(user_id, int): return 'user-{}'.format(100000 + user_id) else: return user_id Similarly, suppose that you want the function to accept only strings or ``None``. You can again use :py:data:`~typing.Union` and use ``Union[str, None]`` -- or alternatively, use the type ``Optional[str]``. These two types are identical and interchangeable: ``Optional[str]`` is just a shorthand or *alias* for ``Union[str, None]``. It exists mostly as a convenience to help function signatures look a little cleaner: .. code-block:: python from typing import Optional def greeting(name: Optional[str] = None) -> str: # Optional[str] means the same thing as Union[str, None] if name is None: name = 'stranger' return 'Hello, ' + name The :py:mod:`typing` module contains many other useful types. You can find a quick overview by looking through the :ref:`mypy cheatsheets ` and a more detailed overview (including information on how to make your own generic types or your own type aliases) by looking through the :ref:`type system reference `. One final note: when adding types, the convention is to import types using the form ``from typing import Iterable`` (as opposed to doing just ``import typing`` or ``import typing as t`` or ``from typing import *``). For brevity, we often omit these :py:mod:`typing` imports in code examples, but mypy will give an error if you use types such as :py:class:`~typing.Iterable` without first importing them. Local type inference ******************** Once you have added type hints to a function (i.e. made it statically typed), mypy will automatically type check that function's body. While doing so, mypy will try and *infer* as many details as possible. We saw an example of this in the ``normalize_id`` function above -- mypy understands basic :py:func:`isinstance ` checks and so can infer that the ``user_id`` variable was of type ``int`` in the if-branch and of type ``str`` in the else-branch. Similarly, mypy was able to understand that ``name`` could not possibly be ``None`` in the ``greeting`` function above, based both on the ``name is None`` check and the variable assignment in that if statement. As another example, consider the following function. Mypy can type check this function without a problem: it will use the available context and deduce that ``output`` must be of type ``List[float]`` and that ``num`` must be of type ``float``: .. code-block:: python def nums_below(numbers: Iterable[float], limit: float) -> List[float]: output = [] for num in numbers: if num < limit: output.append(num) return output Mypy will warn you if it is unable to determine the type of some variable -- for example, when assigning an empty dictionary to some global value: .. code-block:: python my_global_dict = {} # Error: Need type annotation for 'my_global_dict' You can teach mypy what type ``my_global_dict`` is meant to have by giving it a type hint. For example, if you knew this variable is supposed to be a dict of ints to floats, you could annotate it using either variable annotations (introduced in Python 3.6 by :pep:`526`) or using a comment-based syntax like so: .. code-block:: python # If you're using Python 3.6+ my_global_dict: Dict[int, float] = {} # If you want compatibility with older versions of Python my_global_dict = {} # type: Dict[int, float] .. _stubs-intro: Library stubs and typeshed ************************** Mypy uses library *stubs* to type check code interacting with library modules, including the Python standard library. A library stub defines a skeleton of the public interface of the library, including classes, variables and functions, and their types. Mypy ships with stubs from the `typeshed `_ project, which contains library stubs for the Python builtins, the standard library, and selected third-party packages. For example, consider this code: .. code-block:: python x = chr(4) Without a library stub, mypy would have no way of inferring the type of ``x`` and checking that the argument to :py:func:`chr` has a valid type. Mypy complains if it can't find a stub (or a real module) for a library module that you import. Some modules ship with stubs that mypy can automatically find, or you can install a 3rd party module with additional stubs (see :ref:`installed-packages` for details). You can also :ref:`create stubs ` easily. We discuss ways of silencing complaints about missing stubs in :ref:`ignore-missing-imports`. Configuring mypy **************** Mypy supports many command line options that you can use to tweak how mypy behaves: see :ref:`command-line` for more details. For example, suppose you want to make sure *all* functions within your codebase are using static typing and make mypy report an error if you add a dynamically-typed function by mistake. You can make mypy do this by running mypy with the :option:`--disallow-untyped-defs ` flag. Another potentially useful flag is :option:`--strict `, which enables many (though not all) of the available strictness options -- including :option:`--disallow-untyped-defs `. This flag is mostly useful if you're starting a new project from scratch and want to maintain a high degree of type safety from day one. However, this flag will probably be too aggressive if you either plan on using many untyped third party libraries or are trying to add static types to a large, existing codebase. See :ref:`existing-code` for more suggestions on how to handle the latter case. Next steps ********** If you are in a hurry and don't want to read lots of documentation before getting started, here are some pointers to quick learning resources: * Read the :ref:`mypy cheatsheet ` (also for :ref:`Python 2 `). * Read :ref:`existing-code` if you have a significant existing codebase without many type annotations. * Read the `blog post `_ about the Zulip project's experiences with adopting mypy. * If you prefer watching talks instead of reading, here are some ideas: * Carl Meyer: `Type Checked Python in the Real World `_ (PyCon 2018) * Greg Price: `Clearer Code at Scale: Static Types at Zulip and Dropbox `_ (PyCon 2018) * Look at :ref:`solutions to common issues ` with mypy if you encounter problems. * You can ask questions about mypy in the `mypy issue tracker `_ and typing `Gitter chat `_. You can also continue reading this document and skip sections that aren't relevant for you. You don't need to read sections in order. mypy-0.761/docs/source/index.rst0000644€tŠÔÚ€2›s®0000000255513576752246023020 0ustar jukkaDROPBOX\Domain Users00000000000000.. Mypy documentation master file, created by sphinx-quickstart on Sun Sep 14 19:50:35 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to Mypy documentation! ============================== Mypy is a static type checker for Python 3 and Python 2.7. .. toctree:: :maxdepth: 2 :caption: First steps introduction getting_started existing_code .. _overview-cheat-sheets: .. toctree:: :maxdepth: 2 :caption: Cheat sheets cheat_sheet_py3 cheat_sheet .. _overview-type-system-reference: .. toctree:: :maxdepth: 2 :caption: Type system reference builtin_types type_inference_and_annotations kinds_of_types class_basics protocols python2 dynamic_typing casts duck_type_compatibility stubs generics more_types literal_types final_attrs metaclasses .. toctree:: :maxdepth: 2 :caption: Configuring and running mypy running_mypy command_line config_file inline_config mypy_daemon installed_packages extending_mypy stubgen .. toctree:: :maxdepth: 2 :caption: Miscellaneous common_issues supported_python_features error_codes error_code_list error_code_list2 python36 additional_features faq Indices and tables ================== * :ref:`genindex` * :ref:`search` mypy-0.761/docs/source/inline_config.rst0000644€tŠÔÚ€2›s®0000000221413576752246024504 0ustar jukkaDROPBOX\Domain Users00000000000000.. _inline-config: Inline configuration ==================== Mypy supports setting per-file configuration options inside files themselves using ``# mypy:`` comments. For example: .. code-block:: python # mypy: disallow-any-generics Inline configuration comments take precedence over all other configuration mechanisms. Configuration comment format **************************** Flags correspond to :ref:`config file flags ` but allow hyphens to be substituted for underscores. Values are specified using ``=``, but ``= True`` may be omitted: .. code-block:: python # mypy: disallow-any-generics # mypy: always-true=FOO Multiple flags can be separated by commas or placed on separate lines. To include a comma as part of an option's value, place the value inside quotes: .. code-block:: python # mypy: disallow-untyped-defs, always-false="FOO,BAR" Like in the configuration file, options that take a boolean value may be inverted by adding ``no-`` to their name or by (when applicable) swapping their prefix from ``disallow`` to ``allow`` (and vice versa): .. code-block:: python # mypy: allow-untyped-defs, no-strict-optional mypy-0.761/docs/source/installed_packages.rst0000644€tŠÔÚ€2›s®0000001061513576752246025522 0ustar jukkaDROPBOX\Domain Users00000000000000.. _installed-packages: Using installed packages ======================== :pep:`561` specifies how to mark a package as supporting type checking. Below is a summary of how to create PEP 561 compatible packages and have mypy use them in type checking. Using PEP 561 compatible packages with mypy ******************************************* Generally, you do not need to do anything to use installed packages that support typing for the Python executable used to run mypy. Note that most packages do not support typing. Packages that do support typing should be automatically picked up by mypy and used for type checking. By default, mypy searches for packages installed for the Python executable running mypy. It is highly unlikely you want this situation if you have installed typed packages in another Python's package directory. Generally, you can use the :option:`--python-version ` flag and mypy will try to find the correct package directory. If that fails, you can use the :option:`--python-executable ` flag to point to the exact executable, and mypy will find packages installed for that Python executable. Note that mypy does not support some more advanced import features, such as zip imports and custom import hooks. If you do not want to use typed packages, use the :option:`--no-site-packages ` flag to disable searching. Note that stub-only packages (defined in :pep:`PEP 561: Stub-only Packages <561#stub-only-packages>`) cannot be used with ``MYPYPATH``. If you want mypy to find the package, it must be installed. For a package ``foo``, the name of the stub-only package (``foo-stubs``) is not a legal package name, so mypy will not find it, unless it is installed. Making PEP 561 compatible packages ********************************** :pep:`561` notes three main ways to distribute type information. The first is a package that has only inline type annotations in the code itself. The second is a package that ships :ref:`stub files ` with type information alongside the runtime code. The third method, also known as a "stub only package" is a package that ships type information for a package separately as stub files. If you would like to publish a library package to a package repository (e.g. PyPI) for either internal or external use in type checking, packages that supply type information via type comments or annotations in the code should put a ``py.typed`` file in their package directory. For example, with a directory structure as follows .. code-block:: text setup.py package_a/ __init__.py lib.py py.typed the ``setup.py`` might look like .. code-block:: python from distutils.core import setup setup( name="SuperPackageA", author="Me", version="0.1", package_data={"package_a": ["py.typed"]}, packages=["package_a"] ) .. note:: If you use :doc:`setuptools `, you must pass the option ``zip_safe=False`` to ``setup()``, or mypy will not be able to find the installed package. Some packages have a mix of stub files and runtime files. These packages also require a ``py.typed`` file. An example can be seen below .. code-block:: text setup.py package_b/ __init__.py lib.py lib.pyi py.typed the ``setup.py`` might look like: .. code-block:: python from distutils.core import setup setup( name="SuperPackageB", author="Me", version="0.1", package_data={"package_b": ["py.typed", "lib.pyi"]}, packages=["package_b"] ) In this example, both ``lib.py`` and the ``lib.pyi`` stub file exist. At runtime, the Python interpreter will use ``lib.py``, but mypy will use ``lib.pyi`` instead. If the package is stub-only (not imported at runtime), the package should have a prefix of the runtime package name and a suffix of ``-stubs``. A ``py.typed`` file is not needed for stub-only packages. For example, if we had stubs for ``package_c``, we might do the following: .. code-block:: text setup.py package_c-stubs/ __init__.pyi lib.pyi the ``setup.py`` might look like: .. code-block:: python from distutils.core import setup setup( name="SuperPackageC", author="Me", version="0.1", package_data={"package_c-stubs": ["__init__.pyi", "lib.pyi"]}, packages=["package_c-stubs"] ) mypy-0.761/docs/source/introduction.rst0000644€tŠÔÚ€2›s®0000000310213576752246024417 0ustar jukkaDROPBOX\Domain Users00000000000000Introduction ============ Mypy is a static type checker for Python 3 and Python 2.7. If you sprinkle your code with type annotations, mypy can type check your code and find common bugs. As mypy is a static analyzer, or a lint-like tool, the type annotations are just hints for mypy and don't interfere when running your program. You run your program with a standard Python interpreter, and the annotations are treated effectively as comments. Using the Python 3 function annotation syntax (using the :pep:`484` notation) or a comment-based annotation syntax for Python 2 code, you will be able to efficiently annotate your code and use mypy to check the code for common errors. Mypy has a powerful and easy-to-use type system with modern features such as type inference, generics, callable types, tuple types, union types, and structural subtyping. As a developer, you decide how to use mypy in your workflow. You can always escape to dynamic typing as mypy's approach to static typing doesn't restrict what you can do in your programs. Using mypy will make your programs easier to understand, debug, and maintain. This documentation provides a short introduction to mypy. It will help you get started writing statically typed code. Knowledge of Python and a statically typed object-oriented language, such as Java, are assumed. .. note:: Mypy is used in production by many companies and projects, but mypy is officially beta software. There will be occasional changes that break backward compatibility. The mypy development team tries to minimize the impact of changes to user code. mypy-0.761/docs/source/kinds_of_types.rst0000644€tŠÔÚ€2›s®0000005753413576752246024740 0ustar jukkaDROPBOX\Domain Users00000000000000Kinds of types ============== We've mostly restricted ourselves to built-in types until now. This section introduces several additional kinds of types. You are likely to need at least some of them to type check any non-trivial programs. Class types *********** Every class is also a valid type. Any instance of a subclass is also compatible with all superclasses -- it follows that every value is compatible with the :py:class:`object` type (and incidentally also the ``Any`` type, discussed below). Mypy analyzes the bodies of classes to determine which methods and attributes are available in instances. This example uses subclassing: .. code-block:: python class A: def f(self) -> int: # Type of self inferred (A) return 2 class B(A): def f(self) -> int: return 3 def g(self) -> int: return 4 def foo(a: A) -> None: print(a.f()) # 3 a.g() # Error: "A" has no attribute "g" foo(B()) # OK (B is a subclass of A) The Any type ************ A value with the ``Any`` type is dynamically typed. Mypy doesn't know anything about the possible runtime types of such value. Any operations are permitted on the value, and the operations are only checked at runtime. You can use ``Any`` as an "escape hatch" when you can't use a more precise type for some reason. ``Any`` is compatible with every other type, and vice versa. You can freely assign a value of type ``Any`` to a variable with a more precise type: .. code-block:: python a: Any = None s: str = '' a = 2 # OK (assign "int" to "Any") s = a # OK (assign "Any" to "str") Declared (and inferred) types are ignored (or *erased*) at runtime. They are basically treated as comments, and thus the above code does not generate a runtime error, even though ``s`` gets an ``int`` value when the program is run, while the declared type of ``s`` is actually ``str``! You need to be careful with ``Any`` types, since they let you lie to mypy, and this could easily hide bugs. If you do not define a function return value or argument types, these default to ``Any``: .. code-block:: python def show_heading(s) -> None: print('=== ' + s + ' ===') # No static type checking, as s has type Any show_heading(1) # OK (runtime error only; mypy won't generate an error) You should give a statically typed function an explicit ``None`` return type even if it doesn't return a value, as this lets mypy catch additional type errors: .. code-block:: python def wait(t: float): # Implicit Any return value print('Waiting...') time.sleep(t) if wait(2) > 1: # Mypy doesn't catch this error! ... If we had used an explicit ``None`` return type, mypy would have caught the error: .. code-block:: python def wait(t: float) -> None: print('Waiting...') time.sleep(t) if wait(2) > 1: # Error: can't compare None and int ... The ``Any`` type is discussed in more detail in section :ref:`dynamic-typing`. .. note:: A function without any types in the signature is dynamically typed. The body of a dynamically typed function is not checked statically, and local variables have implicit ``Any`` types. This makes it easier to migrate legacy Python code to mypy, as mypy won't complain about dynamically typed functions. .. _tuple-types: Tuple types *********** The type ``Tuple[T1, ..., Tn]`` represents a tuple with the item types ``T1``, ..., ``Tn``: .. code-block:: python def f(t: Tuple[int, str]) -> None: t = 1, 'foo' # OK t = 'foo', 1 # Type check error A tuple type of this kind has exactly a specific number of items (2 in the above example). Tuples can also be used as immutable, varying-length sequences. You can use the type ``Tuple[T, ...]`` (with a literal ``...`` -- it's part of the syntax) for this purpose. Example: .. code-block:: python def print_squared(t: Tuple[int, ...]) -> None: for n in t: print(n, n ** 2) print_squared(()) # OK print_squared((1, 3, 5)) # OK print_squared([1, 2]) # Error: only a tuple is valid .. note:: Usually it's a better idea to use ``Sequence[T]`` instead of ``Tuple[T, ...]``, as :py:class:`~typing.Sequence` is also compatible with lists and other non-tuple sequences. .. note:: ``Tuple[...]`` is valid as a base class in Python 3.6 and later, and always in stub files. In earlier Python versions you can sometimes work around this limitation by using a named tuple as a base class (see section :ref:`named-tuples`). .. _callable-types: Callable types (and lambdas) **************************** You can pass around function objects and bound methods in statically typed code. The type of a function that accepts arguments ``A1``, ..., ``An`` and returns ``Rt`` is ``Callable[[A1, ..., An], Rt]``. Example: .. code-block:: python from typing import Callable def twice(i: int, next: Callable[[int], int]) -> int: return next(next(i)) def add(i: int) -> int: return i + 1 print(twice(3, add)) # 5 You can only have positional arguments, and only ones without default values, in callable types. These cover the vast majority of uses of callable types, but sometimes this isn't quite enough. Mypy recognizes a special form ``Callable[..., T]`` (with a literal ``...``) which can be used in less typical cases. It is compatible with arbitrary callable objects that return a type compatible with ``T``, independent of the number, types or kinds of arguments. Mypy lets you call such callable values with arbitrary arguments, without any checking -- in this respect they are treated similar to a ``(*args: Any, **kwargs: Any)`` function signature. Example: .. code-block:: python from typing import Callable def arbitrary_call(f: Callable[..., int]) -> int: return f('x') + f(y=2) # OK arbitrary_call(ord) # No static error, but fails at runtime arbitrary_call(open) # Error: does not return an int arbitrary_call(1) # Error: 'int' is not callable In situations where more precise or complex types of callbacks are necessary one can use flexible :ref:`callback protocols `. Lambdas are also supported. The lambda argument and return value types cannot be given explicitly; they are always inferred based on context using bidirectional type inference: .. code-block:: python l = map(lambda x: x + 1, [1, 2, 3]) # Infer x as int and l as List[int] If you want to give the argument or return value types explicitly, use an ordinary, perhaps nested function definition. .. _union-types: Union types *********** Python functions often accept values of two or more different types. You can use :ref:`overloading ` to represent this, but union types are often more convenient. Use the ``Union[T1, ..., Tn]`` type constructor to construct a union type. For example, if an argument has type ``Union[int, str]``, both integers and strings are valid argument values. You can use an :py:func:`isinstance` check to narrow down a union type to a more specific type: .. code-block:: python from typing import Union def f(x: Union[int, str]) -> None: x + 1 # Error: str + int is not valid if isinstance(x, int): # Here type of x is int. x + 1 # OK else: # Here type of x is str. x + 'a' # OK f(1) # OK f('x') # OK f(1.1) # Error .. note:: Operations are valid for union types only if they are valid for *every* union item. This is why it's often necessary to use an :py:func:`isinstance` check to first narrow down a union type to a non-union type. This also means that it's recommended to avoid union types as function return types, since the caller may have to use :py:func:`isinstance` before doing anything interesting with the value. .. _strict_optional: Optional types and the None type ******************************** You can use the :py:data:`~typing.Optional` type modifier to define a type variant that allows ``None``, such as ``Optional[int]`` (``Optional[X]`` is the preferred shorthand for ``Union[X, None]``): .. code-block:: python from typing import Optional def strlen(s: str) -> Optional[int]: if not s: return None # OK return len(s) def strlen_invalid(s: str) -> int: if not s: return None # Error: None not compatible with int return len(s) Most operations will not be allowed on unguarded ``None`` or :py:data:`~typing.Optional` values: .. code-block:: python def my_inc(x: Optional[int]) -> int: return x + 1 # Error: Cannot add None and int Instead, an explicit ``None`` check is required. Mypy has powerful type inference that lets you use regular Python idioms to guard against ``None`` values. For example, mypy recognizes ``is None`` checks: .. code-block:: python def my_inc(x: Optional[int]) -> int: if x is None: return 0 else: # The inferred type of x is just int here. return x + 1 Mypy will infer the type of ``x`` to be ``int`` in the else block due to the check against ``None`` in the if condition. Other supported checks for guarding against a ``None`` value include ``if x is not None``, ``if x`` and ``if not x``. Additionally, mypy understands ``None`` checks within logical expressions: .. code-block:: python def concat(x: Optional[str], y: Optional[str]) -> Optional[str]: if x is not None and y is not None: # Both x and y are not None here return x + y else: return None Sometimes mypy doesn't realize that a value is never ``None``. This notably happens when a class instance can exist in a partially defined state, where some attribute is initialized to ``None`` during object construction, but a method assumes that the attribute is no longer ``None``. Mypy will complain about the possible ``None`` value. You can use ``assert x is not None`` to work around this in the method: .. code-block:: python class Resource: path: Optional[str] = None def initialize(self, path: str) -> None: self.path = path def read(self) -> str: # We require that the object has been initialized. assert self.path is not None with open(self.path) as f: # OK return f.read() r = Resource() r.initialize('/foo/bar') r.read() When initializing a variable as ``None``, ``None`` is usually an empty place-holder value, and the actual value has a different type. This is why you need to annotate an attribute in a cases like the class ``Resource`` above: .. code-block:: python class Resource: path: Optional[str] = None ... This also works for attributes defined within methods: .. code-block:: python class Counter: def __init__(self) -> None: self.count: Optional[int] = None As a special case, you can use a non-optional type when initializing an attribute to ``None`` inside a class body *and* using a type comment, since when using a type comment, an initializer is syntactically required, and ``None`` is used as a dummy, placeholder initializer: .. code-block:: python from typing import List class Container: items = None # type: List[str] # OK (only with type comment) This is not a problem when using variable annotations, since no initializer is needed: .. code-block:: python from typing import List class Container: items: List[str] # No initializer Mypy generally uses the first assignment to a variable to infer the type of the variable. However, if you assign both a ``None`` value and a non-``None`` value in the same scope, mypy can usually do the right thing without an annotation: .. code-block:: python def f(i: int) -> None: n = None # Inferred type Optional[int] because of the assignment below if i > 0: n = i ... Sometimes you may get the error "Cannot determine type of ". In this case you should add an explicit ``Optional[...]`` annotation (or type comment). .. note:: ``None`` is a type with only one value, ``None``. ``None`` is also used as the return type for functions that don't return a value, i.e. functions that implicitly return ``None``. .. note:: The Python interpreter internally uses the name ``NoneType`` for the type of ``None``, but ``None`` is always used in type annotations. The latter is shorter and reads better. (Besides, ``NoneType`` is not even defined in the standard library.) .. note:: ``Optional[...]`` *does not* mean a function argument with a default value. However, if the default value of an argument is ``None``, you can use an optional type for the argument, but it's not enforced by default. You can use the :option:`--no-implicit-optional ` command-line option to stop treating arguments with a ``None`` default value as having an implicit ``Optional[...]`` type. It's possible that this will become the default behavior in the future. .. _no_strict_optional: Disabling strict optional checking ********************************** Mypy also has an option to treat ``None`` as a valid value for every type (in case you know Java, it's useful to think of it as similar to the Java ``null``). In this mode ``None`` is also valid for primitive types such as ``int`` and ``float``, and :py:data:`~typing.Optional` types are not required. The mode is enabled through the :option:`--no-strict-optional ` command-line option. In mypy versions before 0.600 this was the default mode. You can enable this option explicitly for backward compatibility with earlier mypy versions, in case you don't want to introduce optional types to your codebase yet. It will cause mypy to silently accept some buggy code, such as this example -- it's not recommended if you can avoid it: .. code-block:: python def inc(x: int) -> int: return x + 1 x = inc(None) # No error reported by mypy if strict optional mode disabled! However, making code "optional clean" can take some work! You can also use :ref:`the mypy configuration file ` to migrate your code to strict optional checking one file at a time, since there exists the :ref:`per-module flag ` ``strict_optional`` to control strict optional mode. Often it's still useful to document whether a variable can be ``None``. For example, this function accepts a ``None`` argument, but it's not obvious from its signature: .. code-block:: python def greeting(name: str) -> str: if name: return 'Hello, {}'.format(name) else: return 'Hello, stranger' print(greeting('Python')) # Okay! print(greeting(None)) # Also okay! You can still use :py:data:`Optional[t] ` to document that ``None`` is a valid argument type, even if strict ``None`` checking is not enabled: .. code-block:: python from typing import Optional def greeting(name: Optional[str]) -> str: if name: return 'Hello, {}'.format(name) else: return 'Hello, stranger' Mypy treats this as semantically equivalent to the previous example if strict optional checking is disabled, since ``None`` is implicitly valid for any type, but it's much more useful for a programmer who is reading the code. This also makes it easier to migrate to strict ``None`` checking in the future. Class name forward references ***************************** Python does not allow references to a class object before the class is defined. Thus this code does not work as expected: .. code-block:: python def f(x: A) -> None: # Error: Name A not defined ... class A: ... In cases like these you can enter the type as a string literal — this is a *forward reference*: .. code-block:: python def f(x: 'A') -> None: # OK ... class A: ... Of course, instead of using a string literal type, you could move the function definition after the class definition. This is not always desirable or even possible, though. Any type can be entered as a string literal, and you can combine string-literal types with non-string-literal types freely: .. code-block:: python def f(a: List['A']) -> None: ... # OK def g(n: 'int') -> None: ... # OK, though not useful class A: pass String literal types are never needed in ``# type:`` comments. String literal types must be defined (or imported) later *in the same module*. They cannot be used to leave cross-module references unresolved. (For dealing with import cycles, see :ref:`import-cycles`.) .. _type-aliases: Type aliases ************ In certain situations, type names may end up being long and painful to type: .. code-block:: python def f() -> Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]: ... When cases like this arise, you can define a type alias by simply assigning the type to a variable: .. code-block:: python AliasType = Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]] # Now we can use AliasType in place of the full name: def f() -> AliasType: ... .. note:: A type alias does not create a new type. It's just a shorthand notation for another type -- it's equivalent to the target type except for :ref:`generic aliases `. .. _named-tuples: Named tuples ************ Mypy recognizes named tuples and can type check code that defines or uses them. In this example, we can detect code trying to access a missing attribute: .. code-block:: python Point = namedtuple('Point', ['x', 'y']) p = Point(x=1, y=2) print(p.z) # Error: Point has no attribute 'z' If you use :py:func:`namedtuple ` to define your named tuple, all the items are assumed to have ``Any`` types. That is, mypy doesn't know anything about item types. You can use :py:class:`~typing.NamedTuple` to also define item types: .. code-block:: python from typing import NamedTuple Point = NamedTuple('Point', [('x', int), ('y', int)]) p = Point(x=1, y='x') # Argument has incompatible type "str"; expected "int" Python 3.6 introduced an alternative, class-based syntax for named tuples with types: .. code-block:: python from typing import NamedTuple class Point(NamedTuple): x: int y: int p = Point(x=1, y='x') # Argument has incompatible type "str"; expected "int" .. _type-of-class: The type of class objects ************************* (Freely after :pep:`PEP 484: The type of class objects <484#the-type-of-class-objects>`.) Sometimes you want to talk about class objects that inherit from a given class. This can be spelled as :py:class:`Type[C] ` where ``C`` is a class. In other words, when ``C`` is the name of a class, using ``C`` to annotate an argument declares that the argument is an instance of ``C`` (or of a subclass of ``C``), but using :py:class:`Type[C] ` as an argument annotation declares that the argument is a class object deriving from ``C`` (or ``C`` itself). For example, assume the following classes: .. code-block:: python class User: # Defines fields like name, email class BasicUser(User): def upgrade(self): """Upgrade to Pro""" class ProUser(User): def pay(self): """Pay bill""" Note that ``ProUser`` doesn't inherit from ``BasicUser``. Here's a function that creates an instance of one of these classes if you pass it the right class object: .. code-block:: python def new_user(user_class): user = user_class() # (Here we could write the user object to a database) return user How would we annotate this function? Without :py:class:`~typing.Type` the best we could do would be: .. code-block:: python def new_user(user_class: type) -> User: # Same implementation as before This seems reasonable, except that in the following example, mypy doesn't see that the ``buyer`` variable has type ``ProUser``: .. code-block:: python buyer = new_user(ProUser) buyer.pay() # Rejected, not a method on User However, using :py:class:`~typing.Type` and a type variable with an upper bound (see :ref:`type-variable-upper-bound`) we can do better: .. code-block:: python U = TypeVar('U', bound=User) def new_user(user_class: Type[U]) -> U: # Same implementation as before Now mypy will infer the correct type of the result when we call ``new_user()`` with a specific subclass of ``User``: .. code-block:: python beginner = new_user(BasicUser) # Inferred type is BasicUser beginner.upgrade() # OK .. note:: The value corresponding to :py:class:`Type[C] ` must be an actual class object that's a subtype of ``C``. Its constructor must be compatible with the constructor of ``C``. If ``C`` is a type variable, its upper bound must be a class object. For more details about ``Type[]`` see :pep:`PEP 484: The type of class objects <484#the-type-of-class-objects>`. .. _text-and-anystr: Text and AnyStr *************** Sometimes you may want to write a function which will accept only unicode strings. This can be challenging to do in a codebase intended to run in both Python 2 and Python 3 since ``str`` means something different in both versions and ``unicode`` is not a keyword in Python 3. To help solve this issue, use :py:class:`~typing.Text` which is aliased to ``unicode`` in Python 2 and to ``str`` in Python 3. This allows you to indicate that a function should accept only unicode strings in a cross-compatible way: .. code-block:: python from typing import Text def unicode_only(s: Text) -> Text: return s + u'\u2713' In other cases, you may want to write a function that will work with any kind of string but will not let you mix two different string types. To do so use :py:data:`~typing.AnyStr`: .. code-block:: python from typing import AnyStr def concat(x: AnyStr, y: AnyStr) -> AnyStr: return x + y concat('a', 'b') # Okay concat(b'a', b'b') # Okay concat('a', b'b') # Error: cannot mix bytes and unicode For more details, see :ref:`type-variable-value-restriction`. .. note:: How ``bytes``, ``str``, and ``unicode`` are handled between Python 2 and Python 3 may change in future versions of mypy. .. _generators: Generators ********** A basic generator that only yields values can be annotated as having a return type of either :py:class:`Iterator[YieldType] ` or :py:class:`Iterable[YieldType] `. For example: .. code-block:: python def squares(n: int) -> Iterator[int]: for i in range(n): yield i * i If you want your generator to accept values via the :py:meth:`~generator.send` method or return a value, you should use the :py:class:`Generator[YieldType, SendType, ReturnType] ` generic type instead. For example: .. code-block:: python def echo_round() -> Generator[int, float, str]: sent = yield 0 while sent >= 0: sent = yield round(sent) return 'Done' Note that unlike many other generics in the typing module, the ``SendType`` of :py:class:`~typing.Generator` behaves contravariantly, not covariantly or invariantly. If you do not plan on receiving or returning values, then set the ``SendType`` or ``ReturnType`` to ``None``, as appropriate. For example, we could have annotated the first example as the following: .. code-block:: python def squares(n: int) -> Generator[int, None, None]: for i in range(n): yield i * i This is slightly different from using ``Iterable[int]`` or ``Iterator[int]``, since generators have :py:meth:`~generator.close`, :py:meth:`~generator.send`, and :py:meth:`~generator.throw` methods that generic iterables don't. If you will call these methods on the returned generator, use the :py:class:`~typing.Generator` type instead of :py:class:`~typing.Iterable` or :py:class:`~typing.Iterator`. mypy-0.761/docs/source/literal_types.rst0000644€tŠÔÚ€2›s®0000001504113576752246024563 0ustar jukkaDROPBOX\Domain Users00000000000000.. _literal_types: Literal types ============= .. note:: ``Literal`` is an officially supported feature, but is highly experimental and should be considered to be in alpha stage. It is very likely that future releases of mypy will modify the behavior of literal types, either by adding new features or by tuning or removing problematic ones. Literal types let you indicate that an expression is equal to some specific primitive value. For example, if we annotate a variable with type ``Literal["foo"]``, mypy will understand that variable is not only of type ``str``, but is also equal to specifically the string ``"foo"``. This feature is primarily useful when annotating functions that behave differently based on the exact value the caller provides. For example, suppose we have a function ``fetch_data(...)`` that returns ``bytes`` if the first argument is ``True``, and ``str`` if it's ``False``. We can construct a precise type signature for this function using ``Literal[...]`` and overloads: .. code-block:: python from typing import overload, Union from typing_extensions import Literal # The first two overloads use Literal[...] so we can # have precise return types: @overload def fetch_data(raw: Literal[True]) -> bytes: ... @overload def fetch_data(raw: Literal[False]) -> str: ... # The last overload is a fallback in case the caller # provides a regular bool: @overload def fetch_data(raw: bool) -> Union[bytes, str]: ... def fetch_data(raw: bool) -> Union[bytes, str]: # Implementation is omitted ... reveal_type(fetch_data(True)) # Revealed type is 'bytes' reveal_type(fetch_data(False)) # Revealed type is 'str' # Variables declared without annotations will continue to have an # inferred type of 'bool'. variable = True reveal_type(fetch_data(variable)) # Revealed type is 'Union[bytes, str]' Parameterizing Literals *********************** Literal types may contain one or more literal bools, ints, strs, and bytes. However, literal types **cannot** contain arbitrary expressions: types like ``Literal[my_string.trim()]``, ``Literal[x > 3]``, or ``Literal[3j + 4]`` are all illegal. Literals containing two or more values are equivalent to the union of those values. So, ``Literal[-3, b"foo", True]`` is equivalent to ``Union[Literal[-3], Literal[b"foo"], Literal[True]]``. This makes writing more complex types involving literals a little more convenient. Literal types may also contain ``None``. Mypy will treat ``Literal[None]`` as being equivalent to just ``None``. This means that ``Literal[4, None]``, ``Union[Literal[4], None]``, and ``Optional[Literal[4]]`` are all equivalent. Literals may also contain aliases to other literal types. For example, the following program is legal: .. code-block:: python PrimaryColors = Literal["red", "blue", "yellow"] SecondaryColors = Literal["purple", "green", "orange"] AllowedColors = Literal[PrimaryColors, SecondaryColors] def paint(color: AllowedColors) -> None: ... paint("red") # Type checks! paint("turquoise") # Does not type check Literals may not contain any other kind of type or expression. This means doing ``Literal[my_instance]``, ``Literal[Any]``, ``Literal[3.14]``, or ``Literal[{"foo": 2, "bar": 5}]`` are all illegal. Future versions of mypy may relax some of these restrictions. For example, we plan on adding support for using enum values inside ``Literal[...]`` in an upcoming release. Declaring literal variables *************************** You must explicitly add an annotation to a variable to declare that it has a literal type: .. code-block:: python a: Literal[19] = 19 reveal_type(a) # Revealed type is 'Literal[19]' In order to preserve backwards-compatibility, variables without this annotation are **not** assumed to be literals: .. code-block:: python b = 19 reveal_type(b) # Revealed type is 'int' If you find repeating the value of the variable in the type hint to be tedious, you can instead change the variable to be ``Final`` (see :ref:`final_attrs`): .. code-block:: python from typing_extensions import Final, Literal def expects_literal(x: Literal[19]) -> None: pass c: Final = 19 reveal_type(c) # Revealed type is 'Literal[19]?' expects_literal(c) # ...and this type checks! If you do not provide an explicit type in the ``Final``, the type of ``c`` becomes *context-sensitive*: mypy will basically try "substituting" the original assigned value whenever it's used before performing type checking. This is why the revealed type of ``c`` is ``Literal[19]?``: the question mark at the end reflects this context-sensitive nature. For example, mypy will type check the above program almost as if it were written like so: .. code-block:: python from typing_extensions import Final, Literal def expects_literal(x: Literal[19]) -> None: pass reveal_type(19) expects_literal(19) This means that while changing a variable to be ``Final`` is not quite the same thing as adding an explicit ``Literal[...]`` annotation, it often leads to the same effect in practice. The main cases where the behavior of context-sensitive vs true literal types differ are when you try using those types in places that are not explicitly expecting a ``Literal[...]``. For example, compare and contrast what happens when you try appending these types to a list: .. code-block:: python from typing_extensions import Final, Literal a: Final = 19 b: Literal[19] = 19 # Mypy will chose to infer List[int] here. list_of_ints = [] list_of_ints.append(a) reveal_type(list_of_ints) # Revealed type is 'List[int]' # But if the variable you're appending is an explicit Literal, mypy # will infer List[Literal[19]]. list_of_lits = [] list_of_lits.append(b) reveal_type(list_of_lits) # Revealed type is 'List[Literal[19]]' Limitations *********** Mypy will not understand expressions that use variables of type ``Literal[..]`` on a deep level. For example, if you have a variable ``a`` of type ``Literal[3]`` and another variable ``b`` of type ``Literal[5]``, mypy will infer that ``a + b`` has type ``int``, **not** type ``Literal[8]``. The basic rule is that literal types are treated as just regular subtypes of whatever type the parameter has. For example, ``Literal[3]`` is treated as a subtype of ``int`` and so will inherit all of ``int``'s methods directly. This means that ``Literal[3].__add__`` accepts the same arguments and has the same return type as ``int.__add__``. mypy-0.761/docs/source/metaclasses.rst0000644€tŠÔÚ€2›s®0000000544113576752246024212 0ustar jukkaDROPBOX\Domain Users00000000000000.. _metaclasses: Metaclasses =========== A :ref:`metaclass ` is a class that describes the construction and behavior of other classes, similarly to how classes describe the construction and behavior of objects. The default metaclass is :py:class:`type`, but it's possible to use other metaclasses. Metaclasses allows one to create "a different kind of class", such as :py:class:`~enum.Enum`\s, :py:class:`~typing.NamedTuple`\s and singletons. Mypy has some special understanding of :py:class:`~abc.ABCMeta` and ``EnumMeta``. .. _defining: Defining a metaclass ******************** .. code-block:: python class M(type): pass class A(metaclass=M): pass In Python 2, the syntax for defining a metaclass is different: .. code-block:: python class A(object): __metaclass__ = M Mypy also supports using :py:func:`six.with_metaclass` and :py:func:`@six.add_metaclass ` to define metaclass in a portable way: .. code-block:: python import six class A(six.with_metaclass(M)): pass @six.add_metaclass(M) class C(object): pass .. _examples: Metaclass usage example *********************** Mypy supports the lookup of attributes in the metaclass: .. code-block:: python from typing import Type, TypeVar, ClassVar T = TypeVar('T') class M(type): count: ClassVar[int] = 0 def make(cls: Type[T]) -> T: M.count += 1 return cls() class A(metaclass=M): pass a: A = A.make() # make() is looked up at M; the result is an object of type A print(A.count) class B(A): pass b: B = B.make() # metaclasses are inherited print(B.count + " objects were created") # Error: Unsupported operand types for + ("int" and "str") .. _limitations: Gotchas and limitations of metaclass support ******************************************** Note that metaclasses pose some requirements on the inheritance structure, so it's better not to combine metaclasses and class hierarchies: .. code-block:: python class M1(type): pass class M2(type): pass class A1(metaclass=M1): pass class A2(metaclass=M2): pass class B1(A1, metaclass=M2): pass # Mypy Error: Inconsistent metaclass structure for 'B1' # At runtime the above definition raises an exception # TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases # Same runtime error as in B1, but mypy does not catch it yet class B12(A1, A2): pass * Mypy does not understand dynamically-computed metaclasses, such as ``class A(metaclass=f()): ...`` * Mypy does not and cannot understand arbitrary metaclass code. * Mypy only recognizes subclasses of :py:class:`type` as potential metaclasses. mypy-0.761/docs/source/more_types.rst0000644€tŠÔÚ€2›s®0000011404613576752246024076 0ustar jukkaDROPBOX\Domain Users00000000000000More types ========== This section introduces a few additional kinds of types, including :py:data:`~typing.NoReturn`, :py:func:`NewType `, ``TypedDict``, and types for async code. It also discusses how to give functions more precise types using overloads. All of these are only situationally useful, so feel free to skip this section and come back when you have a need for some of them. Here's a quick summary of what's covered here: * :py:data:`~typing.NoReturn` lets you tell mypy that a function never returns normally. * :py:func:`NewType ` lets you define a variant of a type that is treated as a separate type by mypy but is identical to the original type at runtime. For example, you can have ``UserId`` as a variant of ``int`` that is just an ``int`` at runtime. * :py:func:`@overload ` lets you define a function that can accept multiple distinct signatures. This is useful if you need to encode a relationship between the arguments and the return type that would be difficult to express normally. * ``TypedDict`` lets you give precise types for dictionaries that represent objects with a fixed schema, such as ``{'id': 1, 'items': ['x']}``. * Async types let you type check programs using ``async`` and ``await``. .. _noreturn: The NoReturn type ***************** Mypy provides support for functions that never return. For example, a function that unconditionally raises an exception: .. code-block:: python from typing import NoReturn def stop() -> NoReturn: raise Exception('no way') Mypy will ensure that functions annotated as returning :py:data:`~typing.NoReturn` truly never return, either implicitly or explicitly. Mypy will also recognize that the code after calls to such functions is unreachable and will behave accordingly: .. code-block:: python def f(x: int) -> int: if x == 0: return x stop() return 'whatever works' # No error in an unreachable block In earlier Python versions you need to install ``typing_extensions`` using pip to use :py:data:`~typing.NoReturn` in your code. Python 3 command line: .. code-block:: text python3 -m pip install --upgrade typing-extensions This works for Python 2: .. code-block:: text pip install --upgrade typing-extensions .. _newtypes: NewTypes ******** There are situations where you may want to avoid programming errors by creating simple derived classes that are only used to distinguish certain values from base class instances. Example: .. code-block:: python class UserId(int): pass def get_by_user_id(user_id: UserId): ... However, this approach introduces some runtime overhead. To avoid this, the typing module provides a helper function :py:func:`NewType ` that creates simple unique types with almost zero runtime overhead. Mypy will treat the statement ``Derived = NewType('Derived', Base)`` as being roughly equivalent to the following definition: .. code-block:: python class Derived(Base): def __init__(self, _x: Base) -> None: ... However, at runtime, ``NewType('Derived', Base)`` will return a dummy function that simply returns its argument: .. code-block:: python def Derived(_x): return _x Mypy will require explicit casts from ``int`` where ``UserId`` is expected, while implicitly casting from ``UserId`` where ``int`` is expected. Examples: .. code-block:: python from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: ... UserId('user') # Fails type check name_by_id(42) # Fails type check name_by_id(UserId(42)) # OK num = UserId(5) + 1 # type: int :py:func:`NewType ` accepts exactly two arguments. The first argument must be a string literal containing the name of the new type and must equal the name of the variable to which the new type is assigned. The second argument must be a properly subclassable class, i.e., not a type construct like :py:data:`~typing.Union`, etc. The function returned by :py:func:`NewType ` accepts only one argument; this is equivalent to supporting only one constructor accepting an instance of the base class (see above). Example: .. code-block:: python from typing import NewType class PacketId: def __init__(self, major: int, minor: int) -> None: self._major = major self._minor = minor TcpPacketId = NewType('TcpPacketId', PacketId) packet = PacketId(100, 100) tcp_packet = TcpPacketId(packet) # OK tcp_packet = TcpPacketId(127, 0) # Fails in type checker and at runtime You cannot use :py:func:`isinstance` or :py:func:`issubclass` on the object returned by :py:func:`~typing.NewType`, because function objects don't support these operations. You cannot create subclasses of these objects either. .. note:: Unlike type aliases, :py:func:`NewType ` will create an entirely new and unique type when used. The intended purpose of :py:func:`NewType ` is to help you detect cases where you accidentally mixed together the old base type and the new derived type. For example, the following will successfully typecheck when using type aliases: .. code-block:: python UserId = int def name_by_id(user_id: UserId) -> str: ... name_by_id(3) # ints and UserId are synonymous But a similar example using :py:func:`NewType ` will not typecheck: .. code-block:: python from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: ... name_by_id(3) # int is not the same as UserId .. _function-overloading: Function overloading ******************** Sometimes the arguments and types in a function depend on each other in ways that can't be captured with a :py:data:`~typing.Union`. For example, suppose we want to write a function that can accept x-y coordinates. If we pass in just a single x-y coordinate, we return a ``ClickEvent`` object. However, if we pass in two x-y coordinates, we return a ``DragEvent`` object. Our first attempt at writing this function might look like this: .. code-block:: python from typing import Union, Optional def mouse_event(x1: int, y1: int, x2: Optional[int] = None, y2: Optional[int] = None) -> Union[ClickEvent, DragEvent]: if x2 is None and y2 is None: return ClickEvent(x1, y1) elif x2 is not None and y2 is not None: return DragEvent(x1, y1, x2, y2) else: raise TypeError("Bad arguments") While this function signature works, it's too loose: it implies ``mouse_event`` could return either object regardless of the number of arguments we pass in. It also does not prohibit a caller from passing in the wrong number of ints: mypy would treat calls like ``mouse_event(1, 2, 20)`` as being valid, for example. We can do better by using :pep:`overloading <484#function-method-overloading>` which lets us give the same function multiple type annotations (signatures) to more accurately describe the function's behavior: .. code-block:: python from typing import Union, overload # Overload *variants* for 'mouse_event'. # These variants give extra information to the type checker. # They are ignored at runtime. @overload def mouse_event(x1: int, y1: int) -> ClickEvent: ... @overload def mouse_event(x1: int, y1: int, x2: int, y2: int) -> DragEvent: ... # The actual *implementation* of 'mouse_event'. # The implementation contains the actual runtime logic. # # It may or may not have type hints. If it does, mypy # will check the body of the implementation against the # type hints. # # Mypy will also check and make sure the signature is # consistent with the provided variants. def mouse_event(x1: int, y1: int, x2: Optional[int] = None, y2: Optional[int] = None) -> Union[ClickEvent, DragEvent]: if x2 is None and y2 is None: return ClickEvent(x1, y1) elif x2 is not None and y2 is not None: return DragEvent(x1, y1, x2, y2) else: raise TypeError("Bad arguments") This allows mypy to understand calls to ``mouse_event`` much more precisely. For example, mypy will understand that ``mouse_event(5, 25)`` will always have a return type of ``ClickEvent`` and will report errors for calls like ``mouse_event(5, 25, 2)``. As another example, suppose we want to write a custom container class that implements the :py:meth:`__getitem__ ` method (``[]`` bracket indexing). If this method receives an integer we return a single item. If it receives a ``slice``, we return a :py:class:`~typing.Sequence` of items. We can precisely encode this relationship between the argument and the return type by using overloads like so: .. code-block:: python from typing import Sequence, TypeVar, Union, overload T = TypeVar('T') class MyList(Sequence[T]): @overload def __getitem__(self, index: int) -> T: ... @overload def __getitem__(self, index: slice) -> Sequence[T]: ... def __getitem__(self, index: Union[int, slice]) -> Union[T, Sequence[T]]: if isinstance(index, int): # Return a T here elif isinstance(index, slice): # Return a sequence of Ts here else: raise TypeError(...) .. note:: If you just need to constrain a type variable to certain types or subtypes, you can use a :ref:`value restriction `. Runtime behavior ---------------- An overloaded function must consist of two or more overload *variants* followed by an *implementation*. The variants and the implementations must be adjacent in the code: think of them as one indivisible unit. The variant bodies must all be empty; only the implementation is allowed to contain code. This is because at runtime, the variants are completely ignored: they're overridden by the final implementation function. This means that an overloaded function is still an ordinary Python function! There is no automatic dispatch handling and you must manually handle the different types in the implementation (e.g. by using ``if`` statements and :py:func:`isinstance ` checks). If you are adding an overload within a stub file, the implementation function should be omitted: stubs do not contain runtime logic. .. note:: While we can leave the variant body empty using the ``pass`` keyword, the more common convention is to instead use the ellipsis (``...``) literal. Type checking calls to overloads -------------------------------- When you call an overloaded function, mypy will infer the correct return type by picking the best matching variant, after taking into consideration both the argument types and arity. However, a call is never type checked against the implementation. This is why mypy will report calls like ``mouse_event(5, 25, 3)`` as being invalid even though it matches the implementation signature. If there are multiple equally good matching variants, mypy will select the variant that was defined first. For example, consider the following program: .. code-block:: python from typing import List, overload @overload def summarize(data: List[int]) -> float: ... @overload def summarize(data: List[str]) -> str: ... def summarize(data): if not data: return 0.0 elif isinstance(data[0], int): # Do int specific code else: # Do str-specific code # What is the type of 'output'? float or str? output = summarize([]) The ``summarize([])`` call matches both variants: an empty list could be either a ``List[int]`` or a ``List[str]``. In this case, mypy will break the tie by picking the first matching variant: ``output`` will have an inferred type of ``float``. The implementor is responsible for making sure ``summarize`` breaks ties in the same way at runtime. However, there are two exceptions to the "pick the first match" rule. First, if multiple variants match due to an argument being of type ``Any``, mypy will make the inferred type also be ``Any``: .. code-block:: python dynamic_var: Any = some_dynamic_function() # output2 is of type 'Any' output2 = summarize(dynamic_var) Second, if multiple variants match due to one or more of the arguments being a union, mypy will make the inferred type be the union of the matching variant returns: .. code-block:: python some_list: Union[List[int], List[str]] # output3 is of type 'Union[float, str]' output3 = summarize(some_list) .. note:: Due to the "pick the first match" rule, changing the order of your overload variants can change how mypy type checks your program. To minimize potential issues, we recommend that you: 1. Make sure your overload variants are listed in the same order as the runtime checks (e.g. :py:func:`isinstance ` checks) in your implementation. 2. Order your variants and runtime checks from most to least specific. (See the following section for an example). Type checking the variants -------------------------- Mypy will perform several checks on your overload variant definitions to ensure they behave as expected. First, mypy will check and make sure that no overload variant is shadowing a subsequent one. For example, consider the following function which adds together two ``Expression`` objects, and contains a special-case to handle receiving two ``Literal`` types: .. code-block:: python from typing import overload, Union class Expression: # ...snip... class Literal(Expression): # ...snip... # Warning -- the first overload variant shadows the second! @overload def add(left: Expression, right: Expression) -> Expression: ... @overload def add(left: Literal, right: Literal) -> Literal: ... def add(left: Expression, right: Expression) -> Expression: # ...snip... While this code snippet is technically type-safe, it does contain an anti-pattern: the second variant will never be selected! If we try calling ``add(Literal(3), Literal(4))``, mypy will always pick the first variant and evaluate the function call to be of type ``Expression``, not ``Literal``. This is because ``Literal`` is a subtype of ``Expression``, which means the "pick the first match" rule will always halt after considering the first overload. Because having an overload variant that can never be matched is almost certainly a mistake, mypy will report an error. To fix the error, we can either 1) delete the second overload or 2) swap the order of the overloads: .. code-block:: python # Everything is ok now -- the variants are correctly ordered # from most to least specific. @overload def add(left: Literal, right: Literal) -> Literal: ... @overload def add(left: Expression, right: Expression) -> Expression: ... def add(left: Expression, right: Expression) -> Expression: # ...snip... Mypy will also type check the different variants and flag any overloads that have inherently unsafely overlapping variants. For example, consider the following unsafe overload definition: .. code-block:: python from typing import overload, Union @overload def unsafe_func(x: int) -> int: ... @overload def unsafe_func(x: object) -> str: ... def unsafe_func(x: object) -> Union[int, str]: if isinstance(x, int): return 42 else: return "some string" On the surface, this function definition appears to be fine. However, it will result in a discrepancy between the inferred type and the actual runtime type when we try using it like so: .. code-block:: python some_obj: object = 42 unsafe_func(some_obj) + " danger danger" # Type checks, yet crashes at runtime! Since ``some_obj`` is of type :py:class:`object`, mypy will decide that ``unsafe_func`` must return something of type ``str`` and concludes the above will type check. But in reality, ``unsafe_func`` will return an int, causing the code to crash at runtime! To prevent these kinds of issues, mypy will detect and prohibit inherently unsafely overlapping overloads on a best-effort basis. Two variants are considered unsafely overlapping when both of the following are true: 1. All of the arguments of the first variant are compatible with the second. 2. The return type of the first variant is *not* compatible with (e.g. is not a subtype of) the second. So in this example, the ``int`` argument in the first variant is a subtype of the ``object`` argument in the second, yet the ``int`` return type is not a subtype of ``str``. Both conditions are true, so mypy will correctly flag ``unsafe_func`` as being unsafe. However, mypy will not detect *all* unsafe uses of overloads. For example, suppose we modify the above snippet so it calls ``summarize`` instead of ``unsafe_func``: .. code-block:: python some_list: List[str] = [] summarize(some_list) + "danger danger" # Type safe, yet crashes at runtime! We run into a similar issue here. This program type checks if we look just at the annotations on the overloads. But since ``summarize(...)`` is designed to be biased towards returning a float when it receives an empty list, this program will actually crash during runtime. The reason mypy does not flag definitions like ``summarize`` as being potentially unsafe is because if it did, it would be extremely difficult to write a safe overload. For example, suppose we define an overload with two variants that accept types ``A`` and ``B`` respectively. Even if those two types were completely unrelated, the user could still potentially trigger a runtime error similar to the ones above by passing in a value of some third type ``C`` that inherits from both ``A`` and ``B``. Thankfully, these types of situations are relatively rare. What this does mean, however, is that you should exercise caution when designing or using an overloaded function that can potentially receive values that are an instance of two seemingly unrelated types. Type checking the implementation -------------------------------- The body of an implementation is type-checked against the type hints provided on the implementation. For example, in the ``MyList`` example up above, the code in the body is checked with argument list ``index: Union[int, slice]`` and a return type of ``Union[T, Sequence[T]]``. If there are no annotations on the implementation, then the body is not type checked. If you want to force mypy to check the body anyways, use the :option:`--check-untyped-defs ` flag (:ref:`more details here `). The variants must also also be compatible with the implementation type hints. In the ``MyList`` example, mypy will check that the parameter type ``int`` and the return type ``T`` are compatible with ``Union[int, slice]`` and ``Union[T, Sequence]`` for the first variant. For the second variant it verifies the parameter type ``slice`` and the return type ``Sequence[T]`` are compatible with ``Union[int, slice]`` and ``Union[T, Sequence]``. .. note:: The overload semantics documented above are new as of mypy 0.620. Previously, mypy used to perform type erasure on all overload variants. For example, the ``summarize`` example from the previous section used to be illegal because ``List[str]`` and ``List[int]`` both erased to just ``List[Any]``. This restriction was removed in mypy 0.620. Mypy also previously used to select the best matching variant using a different algorithm. If this algorithm failed to find a match, it would default to returning ``Any``. The new algorithm uses the "pick the first match" rule and will fall back to returning ``Any`` only if the input arguments also contain ``Any``. .. _advanced_self: Advanced uses of self-types *************************** Normally, mypy doesn't require annotations for the first arguments of instance and class methods. However, they may be needed to have more precise static typing for certain programming patterns. Restricted methods in generic classes ------------------------------------- In generic classes some methods may be allowed to be called only for certain values of type arguments: .. code-block:: python T = TypeVar('T') class Tag(Generic[T]): item: T def uppercase_item(self: Tag[str]) -> str: return self.item.upper() def label(ti: Tag[int], ts: Tag[str]) -> None: ti.uppercase_item() # E: Invalid self argument "Tag[int]" to attribute function # "uppercase_item" with type "Callable[[Tag[str]], str]" ts.uppercase_item() # This is OK This pattern also allows matching on nested types in situations where the type argument is itself generic: .. code-block:: python T = TypeVar('T') S = TypeVar('S') class Storage(Generic[T]): def __init__(self, content: T) -> None: self.content = content def first_chunk(self: Storage[Sequence[S]]) -> S: return self.content[0] page: Storage[List[str]] page.first_chunk() # OK, type is "str" Storage(0).first_chunk() # Error: Invalid self argument "Storage[int]" to attribute function # "first_chunk" with type "Callable[[Storage[Sequence[S]]], S]" Finally, one can use overloads on self-type to express precise types of some tricky methods: .. code-block:: python T = TypeVar('T') class Tag(Generic[T]): @overload def export(self: Tag[str]) -> str: ... @overload def export(self, converter: Callable[[T], str]) -> str: ... def export(self, converter=None): if isinstance(self.item, str): return self.item return converter(self.item) In particular, an :py:meth:`~object.__init__` method overloaded on self-type may be useful to annotate generic class constructors where type arguments depend on constructor parameters in a non-trivial way, see e.g. :py:class:`~subprocess.Popen`. Mixin classes ------------- Using host class protocol as a self-type in mixin methods allows more code re-usability for static typing of mixin classes. For example, one can define a protocol that defines common functionality for host classes instead of adding required abstract methods to every mixin: .. code-block:: python class Lockable(Protocol): @property def lock(self) -> Lock: ... class AtomicCloseMixin: def atomic_close(self: Lockable) -> int: with self.lock: # perform actions class AtomicOpenMixin: def atomic_open(self: Lockable) -> int: with self.lock: # perform actions class File(AtomicCloseMixin, AtomicOpenMixin): def __init__(self) -> None: self.lock = Lock() class Bad(AtomicCloseMixin): pass f = File() b: Bad f.atomic_close() # OK b.atomic_close() # Error: Invalid self type for "atomic_close" Note that the explicit self-type is *required* to be a protocol whenever it is not a supertype of the current class. In this case mypy will check the validity of the self-type only at the call site. Precise typing of alternative constructors ------------------------------------------ Some classes may define alternative constructors. If these classes are generic, self-type allows giving them precise signatures: .. code-block:: python T = TypeVar('T') class Base(Generic[T]): Q = TypeVar('Q', bound='Base[T]') def __init__(self, item: T) -> None: self.item = item @classmethod def make_pair(cls: Type[Q], item: T) -> Tuple[Q, Q]: return cls(item), cls(item) class Sub(Base[T]): ... pair = Sub.make_pair('yes') # Type is "Tuple[Sub[str], Sub[str]]" bad = Sub[int].make_pair('no') # Error: Argument 1 to "make_pair" of "Base" # has incompatible type "str"; expected "int" .. _async-and-await: Typing async/await ****************** Mypy supports the ability to type coroutines that use the ``async/await`` syntax introduced in Python 3.5. For more information regarding coroutines and this new syntax, see :pep:`492`. Functions defined using ``async def`` are typed just like normal functions. The return type annotation should be the same as the type of the value you expect to get back when ``await``-ing the coroutine. .. code-block:: python import asyncio async def format_string(tag: str, count: int) -> str: return 'T-minus {} ({})'.format(count, tag) async def countdown_1(tag: str, count: int) -> str: while count > 0: my_str = await format_string(tag, count) # has type 'str' print(my_str) await asyncio.sleep(0.1) count -= 1 return "Blastoff!" loop = asyncio.get_event_loop() loop.run_until_complete(countdown_1("Millennium Falcon", 5)) loop.close() The result of calling an ``async def`` function *without awaiting* will be a value of type :py:class:`Coroutine[Any, Any, T] `, which is a subtype of :py:class:`Awaitable[T] `: .. code-block:: python my_coroutine = countdown_1("Millennium Falcon", 5) reveal_type(my_coroutine) # has type 'Coroutine[Any, Any, str]' .. note:: :ref:`reveal_type() ` displays the inferred static type of an expression. If you want to use coroutines in Python 3.4, which does not support the ``async def`` syntax, you can instead use the :py:func:`@asyncio.coroutine ` decorator to convert a generator into a coroutine. Note that we set the ``YieldType`` of the generator to be ``Any`` in the following example. This is because the exact yield type is an implementation detail of the coroutine runner (e.g. the :py:mod:`asyncio` event loop) and your coroutine shouldn't have to know or care about what precisely that type is. .. code-block:: python from typing import Any, Generator import asyncio @asyncio.coroutine def countdown_2(tag: str, count: int) -> Generator[Any, None, str]: while count > 0: print('T-minus {} ({})'.format(count, tag)) yield from asyncio.sleep(0.1) count -= 1 return "Blastoff!" loop = asyncio.get_event_loop() loop.run_until_complete(countdown_2("USS Enterprise", 5)) loop.close() As before, the result of calling a generator decorated with :py:func:`@asyncio.coroutine ` will be a value of type :py:class:`Awaitable[T] `. .. note:: At runtime, you are allowed to add the :py:func:`@asyncio.coroutine ` decorator to both functions and generators. This is useful when you want to mark a work-in-progress function as a coroutine, but have not yet added ``yield`` or ``yield from`` statements: .. code-block:: python import asyncio @asyncio.coroutine def serialize(obj: object) -> str: # todo: add yield/yield from to turn this into a generator return "placeholder" However, mypy currently does not support converting functions into coroutines. Support for this feature will be added in a future version, but for now, you can manually force the function to be a generator by doing something like this: .. code-block:: python from typing import Generator import asyncio @asyncio.coroutine def serialize(obj: object) -> Generator[None, None, str]: # todo: add yield/yield from to turn this into a generator if False: yield return "placeholder" You may also choose to create a subclass of :py:class:`~typing.Awaitable` instead: .. code-block:: python from typing import Any, Awaitable, Generator import asyncio class MyAwaitable(Awaitable[str]): def __init__(self, tag: str, count: int) -> None: self.tag = tag self.count = count def __await__(self) -> Generator[Any, None, str]: for i in range(n, 0, -1): print('T-minus {} ({})'.format(i, tag)) yield from asyncio.sleep(0.1) return "Blastoff!" def countdown_3(tag: str, count: int) -> Awaitable[str]: return MyAwaitable(tag, count) loop = asyncio.get_event_loop() loop.run_until_complete(countdown_3("Heart of Gold", 5)) loop.close() To create an iterable coroutine, subclass :py:class:`~typing.AsyncIterator`: .. code-block:: python from typing import Optional, AsyncIterator import asyncio class arange(AsyncIterator[int]): def __init__(self, start: int, stop: int, step: int) -> None: self.start = start self.stop = stop self.step = step self.count = start - step def __aiter__(self) -> AsyncIterator[int]: return self async def __anext__(self) -> int: self.count += self.step if self.count == self.stop: raise StopAsyncIteration else: return self.count async def countdown_4(tag: str, n: int) -> str: async for i in arange(n, 0, -1): print('T-minus {} ({})'.format(i, tag)) await asyncio.sleep(0.1) return "Blastoff!" loop = asyncio.get_event_loop() loop.run_until_complete(countdown_4("Serenity", 5)) loop.close() For a more concrete example, the mypy repo has a toy webcrawler that demonstrates how to work with coroutines. One version `uses async/await `_ and one `uses yield from `_. .. _typeddict: TypedDict ********* Python programs often use dictionaries with string keys to represent objects. Here is a typical example: .. code-block:: python movie = {'name': 'Blade Runner', 'year': 1982} Only a fixed set of string keys is expected (``'name'`` and ``'year'`` above), and each key has an independent value type (``str`` for ``'name'`` and ``int`` for ``'year'`` above). We've previously seen the ``Dict[K, V]`` type, which lets you declare uniform dictionary types, where every value has the same type, and arbitrary keys are supported. This is clearly not a good fit for ``movie`` above. Instead, you can use a ``TypedDict`` to give a precise type for objects like ``movie``, where the type of each dictionary value depends on the key: .. code-block:: python from typing_extensions import TypedDict Movie = TypedDict('Movie', {'name': str, 'year': int}) movie = {'name': 'Blade Runner', 'year': 1982} # type: Movie ``Movie`` is a ``TypedDict`` type with two items: ``'name'`` (with type ``str``) and ``'year'`` (with type ``int``). Note that we used an explicit type annotation for the ``movie`` variable. This type annotation is important -- without it, mypy will try to infer a regular, uniform :py:class:`~typing.Dict` type for ``movie``, which is not what we want here. .. note:: If you pass a ``TypedDict`` object as an argument to a function, no type annotation is usually necessary since mypy can infer the desired type based on the declared argument type. Also, if an assignment target has been previously defined, and it has a ``TypedDict`` type, mypy will treat the assigned value as a ``TypedDict``, not :py:class:`~typing.Dict`. Now mypy will recognize these as valid: .. code-block:: python name = movie['name'] # Okay; type of name is str year = movie['year'] # Okay; type of year is int Mypy will detect an invalid key as an error: .. code-block:: python director = movie['director'] # Error: 'director' is not a valid key Mypy will also reject a runtime-computed expression as a key, as it can't verify that it's a valid key. You can only use string literals as ``TypedDict`` keys. The ``TypedDict`` type object can also act as a constructor. It returns a normal :py:class:`dict` object at runtime -- a ``TypedDict`` does not define a new runtime type: .. code-block:: python toy_story = Movie(name='Toy Story', year=1995) This is equivalent to just constructing a dictionary directly using ``{ ... }`` or ``dict(key=value, ...)``. The constructor form is sometimes convenient, since it can be used without a type annotation, and it also makes the type of the object explicit. Like all types, ``TypedDict``\s can be used as components to build arbitrarily complex types. For example, you can define nested ``TypedDict``\s and containers with ``TypedDict`` items. Unlike most other types, mypy uses structural compatibility checking (or structural subtyping) with ``TypedDict``\s. A ``TypedDict`` object with extra items is a compatible with (a subtype of) a narrower ``TypedDict``, assuming item types are compatible (*totality* also affects subtyping, as discussed below). A ``TypedDict`` object is not a subtype of the regular ``Dict[...]`` type (and vice versa), since :py:class:`~typing.Dict` allows arbitrary keys to be added and removed, unlike ``TypedDict``. However, any ``TypedDict`` object is a subtype of (that is, compatible with) ``Mapping[str, object]``, since :py:class:`~typing.Mapping` only provides read-only access to the dictionary items: .. code-block:: python def print_typed_dict(obj: Mapping[str, object]) -> None: for key, value in obj.items(): print('{}: {}'.format(key, value)) print_typed_dict(Movie(name='Toy Story', year=1995)) # OK .. note:: Unless you are on Python 3.8 or newer (where ``TypedDict`` is available in standard library :py:mod:`typing` module) you need to install ``typing_extensions`` using pip to use ``TypedDict``: .. code-block:: text python3 -m pip install --upgrade typing-extensions Or, if you are using Python 2: .. code-block:: text pip install --upgrade typing-extensions Totality -------- By default mypy ensures that a ``TypedDict`` object has all the specified keys. This will be flagged as an error: .. code-block:: python # Error: 'year' missing toy_story = {'name': 'Toy Story'} # type: Movie Sometimes you want to allow keys to be left out when creating a ``TypedDict`` object. You can provide the ``total=False`` argument to ``TypedDict(...)`` to achieve this: .. code-block:: python GuiOptions = TypedDict( 'GuiOptions', {'language': str, 'color': str}, total=False) options = {} # type: GuiOptions # Okay options['language'] = 'en' You may need to use :py:meth:`~dict.get` to access items of a partial (non-total) ``TypedDict``, since indexing using ``[]`` could fail at runtime. However, mypy still lets use ``[]`` with a partial ``TypedDict`` -- you just need to be careful with it, as it could result in a :py:exc:`KeyError`. Requiring :py:meth:`~dict.get` everywhere would be too cumbersome. (Note that you are free to use :py:meth:`~dict.get` with total ``TypedDict``\s as well.) Keys that aren't required are shown with a ``?`` in error messages: .. code-block:: python # Revealed type is 'TypedDict('GuiOptions', {'language'?: builtins.str, # 'color'?: builtins.str})' reveal_type(options) Totality also affects structural compatibility. You can't use a partial ``TypedDict`` when a total one is expected. Also, a total ``TypedDict`` is not valid when a partial one is expected. Supported operations -------------------- ``TypedDict`` objects support a subset of dictionary operations and methods. You must use string literals as keys when calling most of the methods, as otherwise mypy won't be able to check that the key is valid. List of supported operations: * Anything included in :py:class:`~typing.Mapping`: * ``d[key]`` * ``key in d`` * ``len(d)`` * ``for key in d`` (iteration) * :py:meth:`d.get(key[, default]) ` * :py:meth:`d.keys() ` * :py:meth:`d.values() ` * :py:meth:`d.items() ` * :py:meth:`d.copy() ` * :py:meth:`d.setdefault(key, default) ` * :py:meth:`d1.update(d2) ` * :py:meth:`d.pop(key[, default]) ` (partial ``TypedDict``\s only) * ``del d[key]`` (partial ``TypedDict``\s only) In Python 2 code, these methods are also supported: * ``has_key(key)`` * ``viewitems()`` * ``viewkeys()`` * ``viewvalues()`` .. note:: :py:meth:`~dict.clear` and :py:meth:`~dict.popitem` are not supported since they are unsafe -- they could delete required ``TypedDict`` items that are not visible to mypy because of structural subtyping. Class-based syntax ------------------ An alternative, class-based syntax to define a ``TypedDict`` is supported in Python 3.6 and later: .. code-block:: python from typing_extensions import TypedDict class Movie(TypedDict): name: str year: int The above definition is equivalent to the original ``Movie`` definition. It doesn't actually define a real class. This syntax also supports a form of inheritance -- subclasses can define additional items. However, this is primarily a notational shortcut. Since mypy uses structural compatibility with ``TypedDict``\s, inheritance is not required for compatibility. Here is an example of inheritance: .. code-block:: python class Movie(TypedDict): name: str year: int class BookBasedMovie(Movie): based_on: str Now ``BookBasedMovie`` has keys ``name``, ``year`` and ``based_on``. Mixing required and non-required items -------------------------------------- In addition to allowing reuse across ``TypedDict`` types, inheritance also allows you to mix required and non-required (using ``total=False``) items in a single ``TypedDict``. Example: .. code-block:: python class MovieBase(TypedDict): name: str year: int class Movie(MovieBase, total=False): based_on: str Now ``Movie`` has required keys ``name`` and ``year``, while ``based_on`` can be left out when constructing an object. A ``TypedDict`` with a mix of required and non-required keys, such as ``Movie`` above, will only be compatible with another ``TypedDict`` if all required keys in the other ``TypedDict`` are required keys in the first ``TypedDict``, and all non-required keys of the other ``TypedDict`` are also non-required keys in the first ``TypedDict``. mypy-0.761/docs/source/mypy_daemon.rst0000644€tŠÔÚ€2›s®0000002433613576752246024233 0ustar jukkaDROPBOX\Domain Users00000000000000.. _mypy_daemon: .. program:: dmypy Mypy daemon (mypy server) ========================= Instead of running mypy as a command-line tool, you can also run it as a long-running daemon (server) process and use a command-line client to send type-checking requests to the server. This way mypy can perform type checking much faster, since program state cached from previous runs is kept in memory and doesn't have to be read from the file system on each run. The server also uses finer-grained dependency tracking to reduce the amount of work that needs to be done. If you have a large codebase to check, running mypy using the mypy daemon can be *10 or more times faster* than the regular command-line ``mypy`` tool, especially if your workflow involves running mypy repeatedly after small edits -- which is often a good idea, as this way you'll find errors sooner. .. note:: The command-line of interface of mypy daemon may change in future mypy releases. .. note:: Each mypy daemon process supports one user and one set of source files, and it can only process one type checking request at a time. You can run multiple mypy daemon processes to type check multiple repositories. Basic usage *********** The client utility ``dmypy`` is used to control the mypy daemon. Use ``dmypy run -- `` to typecheck a set of files (or directories). This will launch the daemon if it is not running. You can use almost arbitrary mypy flags after ``--``. The daemon will always run on the current host. Example:: dmypy run -- --follow-imports=error prog.py pkg1/ pkg2/ .. note:: You'll need to use either the :option:`--follow-imports=error ` or the :option:`--follow-imports=skip ` option with dmypy because the current implementation can't follow imports. See :ref:`follow-imports` for details on how these work. You can also define these using a :ref:`configuration file `. ``dmypy run`` will automatically restart the daemon if the configuration or mypy version changes. You need to provide all files or directories you want to type check (other than stubs) as arguments. This is a result of the :option:`--follow-imports ` restriction mentioned above. The initial run will process all the code and may take a while to finish, but subsequent runs will be quick, especially if you've only changed a few files. You can use :ref:`remote caching ` to speed up the initial run. The speedup can be significant if you have a large codebase. Daemon client commands ********************** While ``dmypy run`` is sufficient for most uses, some workflows (ones using :ref:`remote caching `, perhaps), require more precise control over the lifetime of the daemon process: * ``dmypy stop`` stops the daemon. * ``dmypy start -- `` starts the daemon but does not check any files. You can use almost arbitrary mypy flags after ``--``. * ``dmypy restart -- `` restarts the daemon. The flags are the same as with ``dmypy start``. This is equivalent to a stop command followed by a start. * Use ``dmypy run --timeout SECONDS -- `` (or ``start`` or ``restart``) to automatically shut down the daemon after inactivity. By default, the daemon runs until it's explicitly stopped. * ``dmypy check `` checks a set of files using an already running daemon. * ``dmypy recheck`` checks the same set of files as the most recent ``check`` or ``recheck`` command. (You can also use the :option:`--update` and :option:`--remove` options to alter the set of files, and to define which files should be processed.) * ``dmypy status`` checks whether a daemon is running. It prints a diagnostic and exits with ``0`` if there is a running daemon. Use ``dmypy --help`` for help on additional commands and command-line options not discussed here, and ``dmypy --help`` for help on command-specific options. Additional daemon flags *********************** .. option:: --status-file FILE Use ``FILE`` as the status file for storing daemon runtime state. This is normally a JSON file that contains information about daemon process and connection. The default path is ``.dmypy.json`` in the current working directory. .. option:: --log-file FILE Direct daemon stdout/stderr to ``FILE``. This is useful for debugging daemon crashes, since the server traceback is not always printed by the client. This is available for the ``start``, ``restart``, and ``run`` commands. .. option:: --timeout TIMEOUT Automatically shut down server after ``TIMEOUT`` seconds of inactivity. This is available for the ``start``, ``restart``, and ``run`` commands. .. option:: --update FILE Re-check ``FILE``, or add it to the set of files being checked (and check it). This option may be repeated, and it's only available for the ``recheck`` command. By default, mypy finds and checks all files changed since the previous run and files that depend on them. However, if you use this option (and/or :option:`--remove`), mypy assumes that only the explicitly specified files have changed. This is only useful to speed up mypy if you type check a very large number of files, and use an external, fast file system watcher, such as `watchman`_ or `watchdog`_, to determine which files got edited or deleted. *Note:* This option is never required and is only available for performance tuning. .. option:: --remove FILE Remove ``FILE`` from the set of files being checked. This option may be repeated. This is only available for the ``recheck`` command. See :option:`--update` above for when this may be useful. *Note:* This option is never required and is only available for performance tuning. .. option:: --fswatcher-dump-file FILE Collect information about the current internal file state. This is only available for the ``status`` command. This will dump JSON to ``FILE`` in the format ``{path: [modification_time, size, content_hash]}``. This is useful for debugging the built-in file system watcher. *Note:* This is an internal flag and the format may change. .. option:: --perf-stats-file FILE Write performance profiling information to ``FILE``. This is only available for the ``check``, ``recheck``, and ``run`` commands. Static inference of annotations ******************************* The mypy daemon supports (as an experimental feature) statically inferring draft function and method type annotations. Use ``dmypy suggest FUNCTION`` to generate a draft signature in the format ``(param_type_1, param_type_2, ...) -> ret_type`` (types are included for all arguments, including keyword-only arguments, ``*args`` and ``**kwargs``). This is a low-level feature intended to be used by editor integrations, IDEs, and other tools (for example, the `mypy plugin for PyCharm`_), to automatically add annotations to source files, or to propose function signatures. In this example, the function ``format_id()`` has no annotation: .. code-block:: python def format_id(user): return "User: {}".format(user) root = format_id(0) ``dymypy suggest`` uses call sites, return statements, and other heuristics (such as looking for signatures in base classes) to infer that ``format_id()`` accepts an ``int`` argument and returns a ``str``. Use ``dmypy suggest module.format_id`` to print the suggested signature for the function. More generally, the target function may be specified in two ways: * By its fully qualified name, i.e. ``[package.]module.[class.]function``. * By its location in a source file, i.e. ``/path/to/file.py:line``. The path can be absolute or relative, and ``line`` can refer to any line number within the function body. This command can also be used to find a more precise alternative for an existing, imprecise annotation with some ``Any`` types. The following flags customize various aspects of the ``dmypy suggest`` command. .. option:: --json Output the signature as JSON, so that `PyAnnotate`_ can read it and add the signature to the source file. Here is what the JSON looks like: .. code-block:: python [{"func_name": "example.format_id", "line": 1, "path": "/absolute/path/to/example.py", "samples": 0, "signature": {"arg_types": ["int"], "return_type": "str"}}] .. option:: --no-errors Only produce suggestions that cause no errors in the checked code. By default, mypy will try to find the most precise type, even if it causes some type errors. .. option:: --no-any Only produce suggestions that don't contain ``Any`` types. By default mypy proposes the most precise signature found, even if it contains ``Any`` types. .. option:: --flex-any FRACTION Only allow some fraction of types in the suggested signature to be ``Any`` types. The fraction ranges from ``0`` (same as ``--no-any``) to ``1``. .. option:: --try-text Try also using ``unicode`` wherever ``str`` is inferred. This flag may be useful for annotating Python 2/3 straddling code. .. option:: --callsites Only find call sites for a given function instead of suggesting a type. This will produce a list with line numbers and types of actual arguments for each call: ``/path/to/file.py:line: (arg_type_1, arg_type_2, ...)``. .. option:: --use-fixme NAME Use a dummy name instead of plain ``Any`` for types that cannot be inferred. This may be useful to emphasize to a user that a given type couldn't be inferred and needs to be entered manually. .. option:: --max-guesses NUMBER Set the maximum number of types to try for a function (default: ``64``). .. TODO: Add similar sections about go to definition, find usages, and reveal type when added, and then move this to a separate file. Limitations *********** * You have to use either the :option:`--follow-imports=error ` or the :option:`--follow-imports=skip ` option because of an implementation limitation. This can be defined through the command line or through a :ref:`configuration file `. .. _watchman: https://facebook.github.io/watchman/ .. _watchdog: https://pypi.org/project/watchdog/ .. _PyAnnotate: https://github.com/dropbox/pyannotate .. _mypy plugin for PyCharm: https://github.com/dropbox/mypy-PyCharm-plugin mypy-0.761/docs/source/protocols.rst0000644€tŠÔÚ€2›s®0000003243313576752246023733 0ustar jukkaDROPBOX\Domain Users00000000000000.. _protocol-types: Protocols and structural subtyping ================================== Mypy supports two ways of deciding whether two classes are compatible as types: nominal subtyping and structural subtyping. *Nominal* subtyping is strictly based on the class hierarchy. If class ``D`` inherits class ``C``, it's also a subtype of ``C``, and instances of ``D`` can be used when ``C`` instances are expected. This form of subtyping is used by default in mypy, since it's easy to understand and produces clear and concise error messages, and since it matches how the native :py:func:`isinstance ` check works -- based on class hierarchy. *Structural* subtyping can also be useful. Class ``D`` is a structural subtype of class ``C`` if the former has all attributes and methods of the latter, and with compatible types. Structural subtyping can be seen as a static equivalent of duck typing, which is well known to Python programmers. Mypy provides support for structural subtyping via protocol classes described below. See :pep:`544` for the detailed specification of protocols and structural subtyping in Python. .. _predefined_protocols: Predefined protocols ******************** The :py:mod:`typing` module defines various protocol classes that correspond to common Python protocols, such as :py:class:`Iterable[T] `. If a class defines a suitable :py:meth:`__iter__ ` method, mypy understands that it implements the iterable protocol and is compatible with :py:class:`Iterable[T] `. For example, ``IntList`` below is iterable, over ``int`` values: .. code-block:: python from typing import Iterator, Iterable, Optional class IntList: def __init__(self, value: int, next: Optional['IntList']) -> None: self.value = value self.next = next def __iter__(self) -> Iterator[int]: current = self while current: yield current.value current = current.next def print_numbered(items: Iterable[int]) -> None: for n, x in enumerate(items): print(n + 1, x) x = IntList(3, IntList(5, None)) print_numbered(x) # OK print_numbered([4, 5]) # Also OK The subsections below introduce all built-in protocols defined in :py:mod:`typing` and the signatures of the corresponding methods you need to define to implement each protocol (the signatures can be left out, as always, but mypy won't type check unannotated methods). Iteration protocols ................... The iteration protocols are useful in many contexts. For example, they allow iteration of objects in for loops. Iterable[T] ----------- The :ref:`example above ` has a simple implementation of an :py:meth:`__iter__ ` method. .. code-block:: python def __iter__(self) -> Iterator[T] See also :py:class:`~typing.Iterable`. Iterator[T] ----------- .. code-block:: python def __next__(self) -> T def __iter__(self) -> Iterator[T] See also :py:class:`~typing.Iterator`. Collection protocols .................... Many of these are implemented by built-in container types such as :py:class:`list` and :py:class:`dict`, and these are also useful for user-defined collection objects. Sized ----- This is a type for objects that support :py:func:`len(x) `. .. code-block:: python def __len__(self) -> int See also :py:class:`~typing.Sized`. Container[T] ------------ This is a type for objects that support the ``in`` operator. .. code-block:: python def __contains__(self, x: object) -> bool See also :py:class:`~typing.Container`. Collection[T] ------------- .. code-block:: python def __len__(self) -> int def __iter__(self) -> Iterator[T] def __contains__(self, x: object) -> bool See also :py:class:`~typing.Collection`. One-off protocols ................. These protocols are typically only useful with a single standard library function or class. Reversible[T] ------------- This is a type for objects that support :py:func:`reversed(x) `. .. code-block:: python def __reversed__(self) -> Iterator[T] See also :py:class:`~typing.Reversible`. SupportsAbs[T] -------------- This is a type for objects that support :py:func:`abs(x) `. ``T`` is the type of value returned by :py:func:`abs(x) `. .. code-block:: python def __abs__(self) -> T See also :py:class:`~typing.SupportsAbs`. SupportsBytes ------------- This is a type for objects that support :py:class:`bytes(x) `. .. code-block:: python def __bytes__(self) -> bytes See also :py:class:`~typing.SupportsBytes`. .. _supports-int-etc: SupportsComplex --------------- This is a type for objects that support :py:class:`complex(x) `. Note that no arithmetic operations are supported. .. code-block:: python def __complex__(self) -> complex See also :py:class:`~typing.SupportsComplex`. SupportsFloat ------------- This is a type for objects that support :py:class:`float(x) `. Note that no arithmetic operations are supported. .. code-block:: python def __float__(self) -> float See also :py:class:`~typing.SupportsFloat`. SupportsInt ----------- This is a type for objects that support :py:class:`int(x) `. Note that no arithmetic operations are supported. .. code-block:: python def __int__(self) -> int See also :py:class:`~typing.SupportsInt`. SupportsRound[T] ---------------- This is a type for objects that support :py:func:`round(x) `. .. code-block:: python def __round__(self) -> T See also :py:class:`~typing.SupportsRound`. Async protocols ............... These protocols can be useful in async code. See :ref:`async-and-await` for more information. Awaitable[T] ------------ .. code-block:: python def __await__(self) -> Generator[Any, None, T] See also :py:class:`~typing.Awaitable`. AsyncIterable[T] ---------------- .. code-block:: python def __aiter__(self) -> AsyncIterator[T] See also :py:class:`~typing.AsyncIterable`. AsyncIterator[T] ---------------- .. code-block:: python def __anext__(self) -> Awaitable[T] def __aiter__(self) -> AsyncIterator[T] See also :py:class:`~typing.AsyncIterator`. Context manager protocols ......................... There are two protocols for context managers -- one for regular context managers and one for async ones. These allow defining objects that can be used in ``with`` and ``async with`` statements. ContextManager[T] ----------------- .. code-block:: python def __enter__(self) -> T def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool] See also :py:class:`~typing.ContextManager`. AsyncContextManager[T] ---------------------- .. code-block:: python def __aenter__(self) -> Awaitable[T] def __aexit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Awaitable[Optional[bool]] See also :py:class:`~typing.AsyncContextManager`. Simple user-defined protocols ***************************** You can define your own protocol class by inheriting the special ``Protocol`` class: .. code-block:: python from typing import Iterable from typing_extensions import Protocol class SupportsClose(Protocol): def close(self) -> None: ... # Empty method body (explicit '...') class Resource: # No SupportsClose base class! # ... some methods ... def close(self) -> None: self.resource.release() def close_all(items: Iterable[SupportsClose]) -> None: for item in items: item.close() close_all([Resource(), open('some/file')]) # Okay! ``Resource`` is a subtype of the ``SupportsClose`` protocol since it defines a compatible ``close`` method. Regular file objects returned by :py:func:`open` are similarly compatible with the protocol, as they support ``close()``. .. note:: The ``Protocol`` base class is provided in the ``typing_extensions`` package for Python 2.7 and 3.4-3.7. Starting with Python 3.8, ``Protocol`` is included in the ``typing`` module. Defining subprotocols and subclassing protocols *********************************************** You can also define subprotocols. Existing protocols can be extended and merged using multiple inheritance. Example: .. code-block:: python # ... continuing from the previous example class SupportsRead(Protocol): def read(self, amount: int) -> bytes: ... class TaggedReadableResource(SupportsClose, SupportsRead, Protocol): label: str class AdvancedResource(Resource): def __init__(self, label: str) -> None: self.label = label def read(self, amount: int) -> bytes: # some implementation ... resource: TaggedReadableResource resource = AdvancedResource('handle with care') # OK Note that inheriting from an existing protocol does not automatically turn the subclass into a protocol -- it just creates a regular (non-protocol) class or ABC that implements the given protocol (or protocols). The ``Protocol`` base class must always be explicitly present if you are defining a protocol: .. code-block:: python class NotAProtocol(SupportsClose): # This is NOT a protocol new_attr: int class Concrete: new_attr: int = 0 def close(self) -> None: ... # Error: nominal subtyping used by default x: NotAProtocol = Concrete() # Error! You can also include default implementations of methods in protocols. If you explicitly subclass these protocols you can inherit these default implementations. Explicitly including a protocol as a base class is also a way of documenting that your class implements a particular protocol, and it forces mypy to verify that your class implementation is actually compatible with the protocol. .. note:: You can use Python 3.6 variable annotations (:pep:`526`) to declare protocol attributes. On Python 2.7 and earlier Python 3 versions you can use type comments and properties. Recursive protocols ******************* Protocols can be recursive (self-referential) and mutually recursive. This is useful for declaring abstract recursive collections such as trees and linked lists: .. code-block:: python from typing import TypeVar, Optional from typing_extensions import Protocol class TreeLike(Protocol): value: int @property def left(self) -> Optional['TreeLike']: ... @property def right(self) -> Optional['TreeLike']: ... class SimpleTree: def __init__(self, value: int) -> None: self.value = value self.left: Optional['SimpleTree'] = None self.right: Optional['SimpleTree'] = None root: TreeLike = SimpleTree(0) # OK Using isinstance() with protocols ********************************* You can use a protocol class with :py:func:`isinstance` if you decorate it with the ``@runtime_checkable`` class decorator. The decorator adds support for basic runtime structural checks: .. code-block:: python from typing_extensions import Protocol, runtime_checkable @runtime_checkable class Portable(Protocol): handles: int class Mug: def __init__(self) -> None: self.handles = 1 mug = Mug() if isinstance(mug, Portable): use(mug.handles) # Works statically and at runtime :py:func:`isinstance` also works with the :ref:`predefined protocols ` in :py:mod:`typing` such as :py:class:`~typing.Iterable`. .. note:: :py:func:`isinstance` with protocols is not completely safe at runtime. For example, signatures of methods are not checked. The runtime implementation only checks that all protocol members are defined. .. _callback_protocols: Callback protocols ****************** Protocols can be used to define flexible callback types that are hard (or even impossible) to express using the :py:data:`Callable[...] ` syntax, such as variadic, overloaded, and complex generic callbacks. They are defined with a special :py:meth:`__call__ ` member: .. code-block:: python from typing import Optional, Iterable, List from typing_extensions import Protocol class Combiner(Protocol): def __call__(self, *vals: bytes, maxlen: Optional[int] = None) -> List[bytes]: ... def batch_proc(data: Iterable[bytes], cb_results: Combiner) -> bytes: for item in data: ... def good_cb(*vals: bytes, maxlen: Optional[int] = None) -> List[bytes]: ... def bad_cb(*vals: bytes, maxitems: Optional[int]) -> List[bytes]: ... batch_proc([], good_cb) # OK batch_proc([], bad_cb) # Error! Argument 2 has incompatible type because of # different name and kind in the callback Callback protocols and :py:data:`~typing.Callable` types can be used interchangeably. Keyword argument names in :py:meth:`__call__ ` methods must be identical, unless a double underscore prefix is used. For example: .. code-block:: python from typing import Callable, TypeVar from typing_extensions import Protocol T = TypeVar('T') class Copy(Protocol): def __call__(self, __origin: T) -> T: ... copy_a: Callable[[T], T] copy_b: Copy copy_a = copy_b # OK copy_b = copy_a # Also OK mypy-0.761/docs/source/python2.rst0000644€tŠÔÚ€2›s®0000001117113576752246023306 0ustar jukkaDROPBOX\Domain Users00000000000000.. _python2: Type checking Python 2 code =========================== For code that needs to be Python 2.7 compatible, function type annotations are given in comments, since the function annotation syntax was introduced in Python 3. The comment-based syntax is specified in :pep:`484`. Run mypy in Python 2 mode by using the :option:`--py2 ` option:: $ mypy --py2 program.py To run your program, you must have the ``typing`` module in your Python 2 module search path. Use ``pip install typing`` to install the module. This also works for Python 3 versions prior to 3.5 that don't include :py:mod:`typing` in the standard library. The example below illustrates the Python 2 function type annotation syntax. This syntax is also valid in Python 3 mode: .. code-block:: python from typing import List def hello(): # type: () -> None print 'hello' class Example: def method(self, lst, opt=0, *args, **kwargs): # type: (List[str], int, *str, **bool) -> int """Docstring comes after type comment.""" ... It's worth going through these details carefully to avoid surprises: - You don't provide an annotation for the ``self`` / ``cls`` variable of methods. - Docstring always comes *after* the type comment. - For ``*args`` and ``**kwargs`` the type should be prefixed with ``*`` or ``**``, respectively (except when using the multi-line annotation syntax described below). Again, the above example illustrates this. - Things like ``Any`` must be imported from ``typing``, even if they are only used in comments. - In Python 2 mode ``str`` is implicitly promoted to ``unicode``, similar to how ``int`` is compatible with ``float``. This is unlike ``bytes`` and ``str`` in Python 3, which are incompatible. ``bytes`` in Python 2 is equivalent to ``str``. (This might change in the future.) .. _multi_line_annotation: Multi-line Python 2 function annotations ---------------------------------------- Mypy also supports a multi-line comment annotation syntax. You can provide a separate annotation for each argument using the variable annotation syntax. When using the single-line annotation syntax described above, functions with long argument lists tend to result in overly long type comments and it's often tricky to see which argument type corresponds to which argument. The alternative, multi-line annotation syntax makes long annotations easier to read and write. Here is an example (from :pep:`484`): .. code-block:: python def send_email(address, # type: Union[str, List[str]] sender, # type: str cc, # type: Optional[List[str]] bcc, # type: Optional[List[str]] subject='', body=None # type: List[str] ): # type: (...) -> bool """Send an email message. Return True if successful.""" You write a separate annotation for each function argument on the same line as the argument. Each annotation must be on a separate line. If you leave out an annotation for an argument, it defaults to ``Any``. You provide a return type annotation in the body of the function using the form ``# type: (...) -> rt``, where ``rt`` is the return type. Note that the return type annotation contains literal three dots. When using multi-line comments, you do not need to prefix the types of your ``*arg`` and ``**kwarg`` parameters with ``*`` or ``**``. For example, here is how you would annotate the first example using multi-line comments: .. code-block:: python from typing import List class Example: def method(self, lst, # type: List[str] opt=0, # type: int *args, # type: str **kwargs # type: bool ): # type: (...) -> int """Docstring comes after type comment.""" ... Additional notes ---------------- - You should include types for arguments with default values in the annotation. The ``opt`` argument of ``method`` in the example at the beginning of this section is an example of this. - The annotation can be on the same line as the function header or on the following line. - Variables use a comment-based type syntax (explained in :ref:`explicit-var-types`). - You don't need to use string literal escapes for forward references within comments (string literal escapes are explained later). - Mypy uses a separate set of library stub files in `typeshed `_ for Python 2. Library support may vary between Python 2 and Python 3. mypy-0.761/docs/source/python36.rst0000644€tŠÔÚ€2›s®0000000403113576752246023372 0ustar jukkaDROPBOX\Domain Users00000000000000.. _python-36: New features in Python 3.6 ========================== Mypy has supported all language features new in Python 3.6 starting with mypy 0.510. This section introduces Python 3.6 features that interact with type checking. Syntax for variable annotations (:pep:`526`) -------------------------------------------- Python 3.6 introduced a new syntax for variable annotations (in global, class and local scopes). There are two variants of the syntax, with or without an initializer expression: .. code-block:: python from typing import Optional foo: Optional[int] # No initializer bar: List[str] = [] # Initializer .. _class-var: You can also mark names intended to be used as class variables with :py:data:`~typing.ClassVar`. In a pinch you can also use :py:data:`~typing.ClassVar` in ``# type`` comments. Example: .. code-block:: python from typing import ClassVar class C: x: int # Instance variable y: ClassVar[int] # Class variable z = None # type: ClassVar[int] def foo(self) -> None: self.x = 0 # OK self.y = 0 # Error: Cannot assign to class variable "y" via instance C.y = 0 # This is OK .. _async_generators_and_comprehensions: Asynchronous generators (:pep:`525`) and comprehensions (:pep:`530`) -------------------------------------------------------------------- Python 3.6 allows coroutines defined with ``async def`` (:pep:`492`) to be generators, i.e. contain ``yield`` expressions. It also introduced a syntax for asynchronous comprehensions. This example uses the :py:class:`~typing.AsyncIterator` type to define an async generator: .. code-block:: python from typing import AsyncIterator async def gen() -> AsyncIterator[bytes]: lst = [b async for b in gen()] # Inferred type is "List[bytes]" yield 'no way' # Error: Incompatible types (got "str", expected "bytes") New named tuple syntax ---------------------- Python 3.6 supports an alternative, class-based syntax for named tuples. See :ref:`named-tuples` for the details. mypy-0.761/docs/source/running_mypy.rst0000644€tŠÔÚ€2›s®0000004152113576752246024443 0ustar jukkaDROPBOX\Domain Users00000000000000.. _running-mypy: Running mypy and managing imports ================================= The :ref:`getting-started` page should have already introduced you to the basics of how to run mypy -- pass in the files and directories you want to type check via the command line:: $ mypy foo.py bar.py some_directory This page discusses in more detail how exactly to specify what files you want mypy to type check, how mypy discovers imported modules, and recommendations on how to handle any issues you may encounter along the way. If you are interested in learning about how to configure the actual way mypy type checks your code, see our :ref:`command-line` guide. .. _specifying-code-to-be-checked: Specifying code to be checked ***************************** Mypy lets you specify what files it should type check in several different ways. 1. First, you can pass in paths to Python files and directories you want to type check. For example:: $ mypy file_1.py foo/file_2.py file_3.pyi some/directory The above command tells mypy it should type check all of the provided files together. In addition, mypy will recursively type check the entire contents of any provided directories. For more details about how exactly this is done, see :ref:`Mapping file paths to modules `. 2. Second, you can use the :option:`-m ` flag (long form: :option:`--module `) to specify a module name to be type checked. The name of a module is identical to the name you would use to import that module within a Python program. For example, running:: $ mypy -m html.parser ...will type check the module ``html.parser`` (this happens to be a library stub). Mypy will use an algorithm very similar to the one Python uses to find where modules and imports are located on the file system. For more details, see :ref:`finding-imports`. 3. Third, you can use the :option:`-p ` (long form: :option:`--package `) flag to specify a package to be (recursively) type checked. This flag is almost identical to the :option:`-m ` flag except that if you give it a package name, mypy will recursively type check all submodules and subpackages of that package. For example, running:: $ mypy -p html ...will type check the entire ``html`` package (of library stubs). In contrast, if we had used the :option:`-m ` flag, mypy would have type checked just ``html``'s ``__init__.py`` file and anything imported from there. Note that we can specify multiple packages and modules on the command line. For example:: $ mypy --package p.a --package p.b --module c 4. Fourth, you can also instruct mypy to directly type check small strings as programs by using the :option:`-c ` (long form: :option:`--command `) flag. For example:: $ mypy -c 'x = [1, 2]; print(x())' ...will type check the above string as a mini-program (and in this case, will report that ``List[int]`` is not callable). Reading a list of files from a file *********************************** Finally, any command-line argument starting with ``@`` reads additional command-line arguments from the file following the ``@`` character. This is primarily useful if you have a file containing a list of files that you want to be type-checked: instead of using shell syntax like:: $ mypy $(cat file_of_files.txt) you can use this instead:: $ mypy @file_of_files.txt This file can technically also contain any command line flag, not just file paths. However, if you want to configure many different flags, the recommended approach is to use a :ref:`configuration file ` instead. How mypy handles imports ************************ When mypy encounters an ``import`` statement, it will first :ref:`attempt to locate ` that module or type stubs for that module in the file system. Mypy will then type check the imported module. There are three different outcomes of this process: 1. Mypy is unable to follow the import: the module either does not exist, or is a third party library that does not use type hints. 2. Mypy is able to follow and type check the import, but you did not want mypy to type check that module at all. 3. Mypy is able to successfully both follow and type check the module, and you want mypy to type check that module. The third outcome is what mypy will do in the ideal case. The following sections will discuss what to do in the other two cases. .. _ignore-missing-imports: Missing imports --------------- When you import a module, mypy may report that it is unable to follow the import. This can cause a lot of errors that look like the following:: main.py:1: error: No library stub file for standard library module 'antigravity' main.py:2: error: No library stub file for module 'flask' main.py:3: error: Cannot find implementation or library stub for module named 'this_module_does_not_exist' There are several different things you can try doing, depending on the exact nature of the module. If the module is a part of your own codebase, try: 1. Making sure your import does not contain a typo. 2. Reading the :ref:`finding-imports` section below to make sure you understand how exactly mypy searches for and finds modules and modify how you're invoking mypy accordingly. 3. Adding the directory containing that module to either the ``MYPYPATH`` environment variable or the ``mypy_path`` :ref:`config file option `. Note: if the module you are trying to import is actually a *submodule* of some package, you should add the directory containing the *entire* package to ``MYPYPATH``. For example, suppose you are trying to add the module ``foo.bar.baz``, which is located at ``~/foo-project/src/foo/bar/baz.py``. In this case, you should add ``~/foo-project/src`` to ``MYPYPATH``. If the module is a third party library, you must make sure that there are type hints available for that library. Mypy by default will not attempt to infer the types of any 3rd party libraries you may have installed unless they either have declared themselves to be :ref:`PEP 561 compliant stub package ` or have registered themselves on `typeshed `_, the repository of types for the standard library and some 3rd party libraries. If you are getting an import-related error, this means the library you are trying to use has done neither of these things. In that case, you can try: 1. Searching to see if there is a :ref:`PEP 561 compliant stub package `. corresponding to your third party library. Stub packages let you install type hints independently from the library itself. 2. :ref:`Writing your own stub files ` containing type hints for the library. You can point mypy at your type hints either by passing them in via the command line, by adding the location to the ``MYPYPATH`` environment variable, or by using the ``mypy_path`` :ref:`config file option `. Note that if you decide to write your own stub files, they don't need to be complete! A good strategy is to add stubs for just the parts of the library you need and iterate on them over time. If you want to share your work, you can try contributing your stubs back to the library -- see our documentation on creating :ref:`PEP 561 compliant packages `. If the module is a third party library, but you cannot find any existing type hints nor have time to write your own, you can *silence* the errors: 1. To silence a *single* missing import error, add a ``# type: ignore`` at the end of the line containing the import. 2. To silence *all* missing import imports errors from a single library, add a section to your :ref:`mypy config file ` for that library setting ``ignore_missing_imports`` to True. For example, suppose your codebase makes heavy use of an (untyped) library named ``foobar``. You can silence all import errors associated with that library and that library alone by adding the following section to your config file:: [mypy-foobar] ignore_missing_imports = True Note: this option is equivalent to adding a ``# type: ignore`` to every import of ``foobar`` in your codebase. For more information, see the documentation about configuring :ref:`import discovery ` in config files. 3. To silence *all* missing import errors for *all* libraries in your codebase, invoke mypy with the :option:`--ignore-missing-imports ` command line flag or set the ``ignore_missing_imports`` :ref:`config file option ` to True in the *global* section of your mypy config file:: [mypy] ignore_missing_imports = True We recommend using this approach only as a last resort: it's equivalent to adding a ``# type: ignore`` to all unresolved imports in your codebase. If the module is a part of the standard library, try: 1. Updating mypy and re-running it. It's possible type hints for that corner of the standard library were added in a later version of mypy. 2. Filing a bug report on `typeshed `_, the repository of type hints for the standard library that comes bundled with mypy. You can expedite this process by also submitting a pull request fixing the bug. Changes to typeshed will come bundled with mypy the next time it's released. In the meantime, you can add a ``# type: ignore`` to silence any relevant errors. After upgrading, we recommend running mypy using the :option:`--warn-unused-ignores ` flag to help you find any ``# type: ignore`` annotations you no longer need. .. _follow-imports: Following imports ----------------- Mypy is designed to :ref:`doggedly follow all imports `, even if the imported module is not a file you explicitly wanted mypy to check. For example, suppose we have two modules ``mycode.foo`` and ``mycode.bar``: the former has type hints and the latter does not. We run ``mypy -m mycode.foo`` and mypy discovers that ``mycode.foo`` imports ``mycode.bar``. How do we want mypy to type check ``mycode.bar``? We can configure the desired behavior by using the :option:`--follow-imports ` flag. This flag accepts one of four string values: - ``normal`` (the default) follows all imports normally and type checks all top level code (as well as the bodies of all functions and methods with at least one type annotation in the signature). - ``silent`` behaves in the same way as ``normal`` but will additionally *suppress* any error messages. - ``skip`` will *not* follow imports and instead will silently replace the module (and *anything imported from it*) with an object of type ``Any``. - ``error`` behaves in the same way as ``skip`` but is not quite as silent -- it will flag the import as an error, like this:: main.py:1: note: Import of 'mycode.bar' ignored main.py:1: note: (Using --follow-imports=error, module not passed on command line) If you are starting a new codebase and plan on using type hints from the start, we recommend you use either :option:`--follow-imports=normal ` (the default) or :option:`--follow-imports=error `. Either option will help make sure you are not skipping checking any part of your codebase by accident. If you are planning on adding type hints to a large, existing code base, we recommend you start by trying to make your entire codebase (including files that do not use type hints) pass under :option:`--follow-imports=normal `. This is usually not too difficult to do: mypy is designed to report as few error messages as possible when it is looking at unannotated code. If doing this is intractable, we recommend passing mypy just the files you want to type check and use :option:`--follow-imports=silent `. Even if mypy is unable to perfectly type check a file, it can still glean some useful information by parsing it (for example, understanding what methods a given object has). See :ref:`existing-code` for more recommendations. We do not recommend using ``skip`` unless you know what you are doing: while this option can be quite powerful, it can also cause many hard-to-debug errors. .. _mapping-paths-to-modules: Mapping file paths to modules ***************************** One of the main ways you can tell mypy what files to type check is by providing mypy the paths to those files. For example:: $ mypy file_1.py foo/file_2.py file_3.pyi some/directory This section describes how exactly mypy maps the provided paths to modules to type check. - Files ending in ``.py`` (and stub files ending in ``.pyi``) are checked as Python modules. - Files not ending in ``.py`` or ``.pyi`` are assumed to be Python scripts and checked as such. - Directories representing Python packages (i.e. containing a ``__init__.py[i]`` file) are checked as Python packages; all submodules and subpackages will be checked (subpackages must themselves have a ``__init__.py[i]`` file). - Directories that don't represent Python packages (i.e. not directly containing an ``__init__.py[i]`` file) are checked as follows: - All ``*.py[i]`` files contained directly therein are checked as toplevel Python modules; - All packages contained directly therein (i.e. immediate subdirectories with an ``__init__.py[i]`` file) are checked as toplevel Python packages. One more thing about checking modules and packages: if the directory *containing* a module or package specified on the command line has an ``__init__.py[i]`` file, mypy assigns these an absolute module name by crawling up the path until no ``__init__.py[i]`` file is found. For example, suppose we run the command ``mypy foo/bar/baz.py`` where ``foo/bar/__init__.py`` exists but ``foo/__init__.py`` does not. Then the module name assumed is ``bar.baz`` and the directory ``foo`` is added to mypy's module search path. On the other hand, if ``foo/bar/__init__.py`` did not exist, ``foo/bar`` would be added to the module search path instead, and the module name assumed is just ``baz``. If a script (a file not ending in ``.py[i]``) is processed, the module name assumed is ``__main__`` (matching the behavior of the Python interpreter), unless :option:`--scripts-are-modules ` is passed. .. _finding-imports: How imports are found ********************* When mypy encounters an ``import`` statement or receives module names from the command line via the :option:`--module ` or :option:`--package ` flags, mypy tries to find the module on the file system similar to the way Python finds it. However, there are some differences. First, mypy has its own search path. This is computed from the following items: - The ``MYPYPATH`` environment variable (a colon-separated list of directories). - The ``mypy_path`` :ref:`config file option `. - The directories containing the sources given on the command line (see below). - The installed packages marked as safe for type checking (see :ref:`PEP 561 support `) - The relevant directories of the `typeshed `_ repo. .. note:: You cannot point to a :pep:`561` package via the ``MYPYPATH``, it must be installed (see :ref:`PEP 561 support `) For sources given on the command line, the path is adjusted by crawling up from the given file or package to the nearest directory that does not contain an ``__init__.py`` or ``__init__.pyi`` file. If the given path is relative, it will only crawl as far as the current working directory. Second, mypy searches for stub files in addition to regular Python files and packages. The rules for searching for a module ``foo`` are as follows: - The search looks in each of the directories in the search path (see above) until a match is found. - If a package named ``foo`` is found (i.e. a directory ``foo`` containing an ``__init__.py`` or ``__init__.pyi`` file) that's a match. - If a stub file named ``foo.pyi`` is found, that's a match. - If a Python module named ``foo.py`` is found, that's a match. These matches are tried in order, so that if multiple matches are found in the same directory on the search path (e.g. a package and a Python file, or a stub file and a Python file) the first one in the above list wins. In particular, if a Python file and a stub file are both present in the same directory on the search path, only the stub file is used. (However, if the files are in different directories, the one found in the earlier directory is used.) mypy-0.761/docs/source/stubgen.rst0000644€tŠÔÚ€2›s®0000001416213576752246023355 0ustar jukkaDROPBOX\Domain Users00000000000000.. _stugen: .. program:: stubgen Automatic stub generation (stubgen) =================================== A stub file (see :pep:`484`) contains only type hints for the public interface of a module, with empty function bodies. Mypy can use a stub file instead of the real implementation to provide type information for the module. They are useful for third-party modules whose authors have not yet added type hints (and when no stubs are available in typeshed) and C extension modules (which mypy can't directly process). Mypy includes the ``stubgen`` tool that can automatically generate stub files (``.pyi`` files) for Python modules and C extension modules. For example, consider this source file: .. code-block:: python from other_module import dynamic BORDER_WIDTH = 15 class Window: parent = dynamic() def __init__(self, width, height): self.width = width self.height = height def create_empty() -> Window: return Window(0, 0) Stubgen can generate this stub file based on the above file: .. code-block:: python from typing import Any BORDER_WIDTH: int = ... class Window: parent: Any = ... width: Any = ... height: Any = ... def __init__(self, width, height) -> None: ... def create_empty() -> Window: ... Stubgen generates *draft* stubs. The auto-generated stub files often require some manual updates, and most types will default to ``Any``. The stubs will be much more useful if you add more precise type annotations, at least for the most commonly used functionality. The rest of this section documents the command line interface of stubgen. Run ``stubgen --help`` for a quick summary of options. .. note:: The command-line flags may change between releases. Specifying what to stub *********************** You can give stubgen paths of the source files for which you want to generate stubs:: $ stubgen foo.py bar.py This generates stubs ``out/foo.pyi`` and ``out/bar.pyi``. The default output directory ``out`` can be overridden with :option:`-o DIR <-o>`. You can also pass directories, and stubgen will recursively search them for any ``.py`` files and generate stubs for all of them:: $ stubgen my_pkg_dir Alternatively, you can give module or package names using the ``-m`` or ``-p`` options:: $ stubgen -m foo -m bar -p my_pkg_dir Details of the options: .. option:: -m MODULE, --module MODULE Generate a stub file for the given module. This flag may be repeated multiple times. Stubgen *will not* recursively generate stubs for any submodules of the provided module. .. option:: -p PACKAGE, --package PACKAGE Generate stubs for the given package. This flag maybe repeated multiple times. Stubgen *will* recursively generate stubs for all submodules of the provided package. This flag is identical to :option:`--module` apart from this behavior. .. note:: You can't mix paths and :option:`-m`/:option:`-p` options in the same stubgen invocation. Stubgen applies heuristics to avoid generating stubs for submodules that include tests or vendored third-party packages. Specifying how to generate stubs ******************************** By default stubgen will try to import the target modules and packages. This allows stubgen to use runtime introspection to generate stubs for C extension modules and to improve the quality of the generated stubs. By default, stubgen will also use mypy to perform light-weight semantic analysis of any Python modules. Use the following flags to alter the default behavior: .. option:: --no-import Don't try to import modules. Instead only use mypy's normal search mechanism to find sources. This does not support C extension modules. This flag also disables runtime introspection functionality, which mypy uses to find the value of ``__all__``. As result the set of exported imported names in stubs may be incomplete. This flag is generally only useful when importing a module causes unwanted side effects, such as the running of tests. Stubgen tries to skip test modules even without this option, but this does not always work. .. option:: --parse-only Don't perform semantic analysis of source files. This may generate worse stubs -- in particular, some module, class, and function aliases may be represented as variables with the ``Any`` type. This is generally only useful if semantic analysis causes a critical mypy error. .. option:: --doc-dir PATH Try to infer better signatures by parsing .rst documentation in ``PATH``. This may result in better stubs, but currently it only works for C extension modules. Additional flags **************** .. option:: --py2 Run stubgen in Python 2 mode (the default is Python 3 mode). .. option:: --ignore-errors If an exception was raised during stub generation, continue to process any remaining modules instead of immediately failing with an error. .. option:: --include-private Include definitions that are considered private in stubs (with names such as ``_foo`` with single leading underscore and no trailing underscores). .. option:: --export-less Don't export all names imported from other modules within the same package. Instead, only export imported names that are not referenced in the module that contains the import. .. option:: --search-path PATH Specify module search directories, separated by colons (only used if :option:`--no-import` is given). .. option:: --python-executable PATH Use Python interpreter at ``PATH`` for importing modules and runtime introspection. This has no effect with :option:`--no-import`, and this only works in Python 2 mode. In Python 3 mode the Python interpreter used to run stubgen will always be used. .. option:: -o PATH, --output PATH Change the output directory. By default the stubs are written in the ``./out`` directory. The output directory will be created if it doesn't exist. Existing stubs in the output directory will be overwritten without warning. .. option:: -v, --verbose Produce more verbose output. .. option:: -q, --quiet Produce less verbose output. mypy-0.761/docs/source/stubs.rst0000644€tŠÔÚ€2›s®0000001250413576752246023044 0ustar jukkaDROPBOX\Domain Users00000000000000.. _stub-files: Stub files ========== Mypy uses stub files stored in the `typeshed `_ repository to determine the types of standard library and third-party library functions, classes, and other definitions. You can also create your own stubs that will be used to type check your code. The basic properties of stubs were introduced back in :ref:`stubs-intro`. Creating a stub *************** Here is an overview of how to create a stub file: * Write a stub file for the library (or an arbitrary module) and store it as a ``.pyi`` file in the same directory as the library module. * Alternatively, put your stubs (``.pyi`` files) in a directory reserved for stubs (e.g., :file:`myproject/stubs`). In this case you have to set the environment variable ``MYPYPATH`` to refer to the directory. For example:: $ export MYPYPATH=~/work/myproject/stubs Use the normal Python file name conventions for modules, e.g. :file:`csv.pyi` for module ``csv``. Use a subdirectory with :file:`__init__.pyi` for packages. Note that :pep:`561` stub-only packages must be installed, and may not be pointed at through the ``MYPYPATH`` (see :ref:`PEP 561 support `). If a directory contains both a ``.py`` and a ``.pyi`` file for the same module, the ``.pyi`` file takes precedence. This way you can easily add annotations for a module even if you don't want to modify the source code. This can be useful, for example, if you use 3rd party open source libraries in your program (and there are no stubs in typeshed yet). That's it! Now you can access the module in mypy programs and type check code that uses the library. If you write a stub for a library module, consider making it available for other programmers that use mypy by contributing it back to the typeshed repo. There is more information about creating stubs in the `mypy wiki `_. The following sections explain the kinds of type annotations you can use in your programs and stub files. .. note:: You may be tempted to point ``MYPYPATH`` to the standard library or to the :file:`site-packages` directory where your 3rd party packages are installed. This is almost always a bad idea -- you will likely get tons of error messages about code you didn't write and that mypy can't analyze all that well yet, and in the worst case scenario mypy may crash due to some construct in a 3rd party package that it didn't expect. Stub file syntax **************** Stub files are written in normal Python 3 syntax, but generally leaving out runtime logic like variable initializers, function bodies, and default arguments. If it is not possible to completely leave out some piece of runtime logic, the recommended convention is to replace or elide them with ellipsis expressions (``...``). Each ellipsis below is literally written in the stub file as three dots: .. code-block:: python # Variables with annotations do not need to be assigned a value. # So by convention, we omit them in the stub file. x: int # Function bodies cannot be completely removed. By convention, # we replace them with `...` instead of the `pass` statement. def func_1(code: str) -> int: ... # We can do the same with default arguments. def func_2(a: int, b: int = ...) -> int: ... .. note:: The ellipsis ``...`` is also used with a different meaning in :ref:`callable types ` and :ref:`tuple types `. .. note:: It is always legal to use Python 3 syntax in stub files, even when writing Python 2 code. The example above is a valid stub file for both Python 2 and 3. Using stub file syntax at runtime ********************************* You may also occasionally need to elide actual logic in regular Python code -- for example, when writing methods in :ref:`overload variants ` or :ref:`custom protocols `. The recommended style is to use ellipses to do so, just like in stub files. It is also considered stylistically acceptable to throw a :py:exc:`NotImplementedError` in cases where the user of the code may accidentally call functions with no actual logic. You can also elide default arguments as long as the function body also contains no runtime logic: the function body only contains a single ellipsis, the pass statement, or a ``raise NotImplementedError()``. It is also acceptable for the function body to contain a docstring. For example: .. code-block:: python from typing import List from typing_extensions import Protocol class Resource(Protocol): def ok_1(self, foo: List[str] = ...) -> None: ... def ok_2(self, foo: List[str] = ...) -> None: raise NotImplementedError() def ok_3(self, foo: List[str] = ...) -> None: """Some docstring""" pass # Error: Incompatible default for argument "foo" (default has # type "ellipsis", argument has type "List[str]") def not_ok(self, foo: List[str] = ...) -> None: print(foo) .. note:: Ellipsis expressions are legal syntax in Python 3 only. This means it is not possible to elide default arguments in Python 2 code. You can still elide function bodies in Python 2 by using either the ``pass`` statement or by throwing a :py:exc:`NotImplementedError`. mypy-0.761/docs/source/supported_python_features.rst0000644€tŠÔÚ€2›s®0000000165113576752246027231 0ustar jukkaDROPBOX\Domain Users00000000000000Supported Python features ========================= A list of unsupported Python features is maintained in the mypy wiki: - `Unsupported Python features `_ Runtime definition of methods and functions ******************************************* By default, mypy will complain if you add a function to a class or module outside its definition -- but only if this is visible to the type checker. This only affects static checking, as mypy performs no additional type checking at runtime. You can easily work around this. For example, you can use dynamically typed code or values with ``Any`` types, or you can use :py:func:`setattr` or other introspection features. However, you need to be careful if you decide to do this. If used indiscriminately, you may have difficulty using static typing effectively, since the type checker cannot see functions defined at runtime. mypy-0.761/docs/source/type_inference_and_annotations.rst0000644€tŠÔÚ€2›s®0000001550513576752246030146 0ustar jukkaDROPBOX\Domain Users00000000000000Type inference and type annotations =================================== Type inference ************** Mypy considers the initial assignment as the definition of a variable. If you do not explicitly specify the type of the variable, mypy infers the type based on the static type of the value expression: .. code-block:: python i = 1 # Infer type "int" for i l = [1, 2] # Infer type "List[int]" for l Type inference is not used in dynamically typed functions (those without a function type annotation) — every local variable type defaults to ``Any`` in such functions. ``Any`` is discussed later in more detail. .. _explicit-var-types: Explicit types for variables **************************** You can override the inferred type of a variable by using a variable type annotation: .. code-block:: python from typing import Union x: Union[int, str] = 1 Without the type annotation, the type of ``x`` would be just ``int``. We use an annotation to give it a more general type ``Union[int, str]`` (this type means that the value can be either an ``int`` or a ``str``). Mypy checks that the type of the initializer is compatible with the declared type. The following example is not valid, since the initializer is a floating point number, and this is incompatible with the declared type: .. code-block:: python x: Union[int, str] = 1.1 # Error! The variable annotation syntax is available starting from Python 3.6. In earlier Python versions, you can use a special comment after an assignment statement to declare the type of a variable: .. code-block:: python x = 1 # type: Union[int, str] We'll use both syntax variants in examples. The syntax variants are mostly interchangeable, but the variable annotation syntax allows defining the type of a variable without initialization, which is not possible with the comment syntax: .. code-block:: python x: str # Declare type of 'x' without initialization .. note:: The best way to think about this is that the type annotation sets the type of the variable, not the type of the expression. To force the type of an expression you can use :py:func:`cast(\, \) `. Explicit types for collections ****************************** The type checker cannot always infer the type of a list or a dictionary. This often arises when creating an empty list or dictionary and assigning it to a new variable that doesn't have an explicit variable type. Here is an example where mypy can't infer the type without some help: .. code-block:: python l = [] # Error: Need type annotation for 'l' In these cases you can give the type explicitly using a type annotation: .. code-block:: python l: List[int] = [] # Create empty list with type List[int] d: Dict[str, int] = {} # Create empty dictionary (str -> int) Similarly, you can also give an explicit type when creating an empty set: .. code-block:: python s: Set[int] = set() Compatibility of container types ******************************** The following program generates a mypy error, since ``List[int]`` is not compatible with ``List[object]``: .. code-block:: python def f(l: List[object], k: List[int]) -> None: l = k # Type check error: incompatible types in assignment The reason why the above assignment is disallowed is that allowing the assignment could result in non-int values stored in a list of ``int``: .. code-block:: python def f(l: List[object], k: List[int]) -> None: l = k l.append('x') print(k[-1]) # Ouch; a string in List[int] Other container types like :py:class:`~typing.Dict` and :py:class:`~typing.Set` behave similarly. We will discuss how you can work around this in :ref:`variance`. You can still run the above program; it prints ``x``. This illustrates the fact that static types are used during type checking, but they do not affect the runtime behavior of programs. You can run programs with type check failures, which is often very handy when performing a large refactoring. Thus you can always 'work around' the type system, and it doesn't really limit what you can do in your program. Context in type inference ************************* Type inference is *bidirectional* and takes context into account. For example, the following is valid: .. code-block:: python def f(l: List[object]) -> None: l = [1, 2] # Infer type List[object] for [1, 2], not List[int] In an assignment, the type context is determined by the assignment target. In this case this is ``l``, which has the type ``List[object]``. The value expression ``[1, 2]`` is type checked in this context and given the type ``List[object]``. In the previous example we introduced a new variable ``l``, and here the type context was empty. Declared argument types are also used for type context. In this program mypy knows that the empty list ``[]`` should have type ``List[int]`` based on the declared type of ``arg`` in ``foo``: .. code-block:: python def foo(arg: List[int]) -> None: print('Items:', ''.join(str(a) for a in arg)) foo([]) # OK However, context only works within a single statement. Here mypy requires an annotation for the empty list, since the context would only be available in the following statement: .. code-block:: python def foo(arg: List[int]) -> None: print('Items:', ', '.join(arg)) a = [] # Error: Need type annotation for 'a' foo(a) Working around the issue is easy by adding a type annotation: .. code-block:: Python ... a: List[int] = [] # OK foo(a) Declaring multiple variable types at a time ******************************************* You can declare more than a single variable at a time, but only with a type comment. In order to nicely work with multiple assignment, you must give each variable a type separately: .. code-block:: python i, found = 0, False # type: int, bool You can optionally use parentheses around the types, assignment targets and assigned expression: .. code-block:: python i, found = 0, False # type: (int, bool) # OK (i, found) = 0, False # type: int, bool # OK i, found = (0, False) # type: int, bool # OK (i, found) = (0, False) # type: (int, bool) # OK Starred expressions ******************* In most cases, mypy can infer the type of starred expressions from the right-hand side of an assignment, but not always: .. code-block:: python a, *bs = 1, 2, 3 # OK p, q, *rs = 1, 2 # Error: Type of rs cannot be inferred On first line, the type of ``bs`` is inferred to be ``List[int]``. However, on the second line, mypy cannot infer the type of ``rs``, because there is no right-hand side value for ``rs`` to infer the type from. In cases like these, the starred expression needs to be annotated with a starred type: .. code-block:: python p, q, *rs = 1, 2 # type: int, int, List[int] Here, the type of ``rs`` is set to ``List[int]``. mypy-0.761/mypy/0000755€tŠÔÚ€2›s®0000000000013576752266017720 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/__init__.py0000644€tŠÔÚ€2›s®0000000004513576752246022026 0ustar jukkaDROPBOX\Domain Users00000000000000# This page intentionally left blank mypy-0.761/mypy/__main__.py0000644€tŠÔÚ€2›s®0000000033413576752246022010 0ustar jukkaDROPBOX\Domain Users00000000000000"""Mypy type checker command line tool.""" import sys from mypy.main import main def console_entry() -> None: main(None, sys.stdout, sys.stderr) if __name__ == '__main__': main(None, sys.stdout, sys.stderr) mypy-0.761/mypy/api.py0000644€tŠÔÚ€2›s®0000000425013576752246021042 0ustar jukkaDROPBOX\Domain Users00000000000000"""This module makes it possible to use mypy as part of a Python application. Since mypy still changes, the API was kept utterly simple and non-intrusive. It just mimics command line activation without starting a new interpreter. So the normal docs about the mypy command line apply. Changes in the command line version of mypy will be immediately useable. Just import this module and then call the 'run' function with a parameter of type List[str], containing what normally would have been the command line arguments to mypy. Function 'run' returns a Tuple[str, str, int], namely (, , ), in which is what mypy normally writes to sys.stdout, is what mypy normally writes to sys.stderr and exit_status is the exit status mypy normally returns to the operating system. Any pretty formatting is left to the caller. The 'run_dmypy' function is similar, but instead mimics invocation of dmypy. Note that these APIs don't support incremental generation of error messages. Trivial example of code using this module: import sys from mypy import api result = api.run(sys.argv[1:]) if result[0]: print('\nType checking report:\n') print(result[0]) # stdout if result[1]: print('\nError report:\n') print(result[1]) # stderr print('\nExit status:', result[2]) """ from io import StringIO from typing import List, Tuple, TextIO, Callable def _run(main_wrapper: Callable[[TextIO, TextIO], None]) -> Tuple[str, str, int]: stdout = StringIO() stderr = StringIO() try: main_wrapper(stdout, stderr) exit_status = 0 except SystemExit as system_exit: exit_status = system_exit.code return stdout.getvalue(), stderr.getvalue(), exit_status def run(args: List[str]) -> Tuple[str, str, int]: # Lazy import to avoid needing to import all of mypy to call run_dmypy from mypy.main import main return _run(lambda stdout, stderr: main(None, args=args, stdout=stdout, stderr=stderr)) def run_dmypy(args: List[str]) -> Tuple[str, str, int]: from mypy.dmypy.client import main return _run(lambda stdout, stderr: main(args)) mypy-0.761/mypy/applytype.py0000644€tŠÔÚ€2›s®0000000712013576752246022317 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Sequence, Optional, Callable import mypy.subtypes import mypy.sametypes from mypy.expandtype import expand_type from mypy.types import ( Type, TypeVarId, TypeVarType, CallableType, AnyType, PartialType, get_proper_types ) from mypy.nodes import Context def apply_generic_arguments( callable: CallableType, orig_types: Sequence[Optional[Type]], report_incompatible_typevar_value: Callable[[CallableType, Type, str, Context], None], context: Context, skip_unsatisfied: bool = False) -> CallableType: """Apply generic type arguments to a callable type. For example, applying [int] to 'def [T] (T) -> T' results in 'def (int) -> int'. Note that each type can be None; in this case, it will not be applied. If `skip_unsatisfied` is True, then just skip the types that don't satisfy type variable bound or constraints, instead of giving an error. """ tvars = callable.variables assert len(tvars) == len(orig_types) # Check that inferred type variable values are compatible with allowed # values and bounds. Also, promote subtype values to allowed values. types = get_proper_types(orig_types) for i, type in enumerate(types): assert not isinstance(type, PartialType), "Internal error: must never apply partial type" values = get_proper_types(callable.variables[i].values) if type is None: continue if values: if isinstance(type, AnyType): continue if isinstance(type, TypeVarType) and type.values: # Allow substituting T1 for T if every allowed value of T1 # is also a legal value of T. if all(any(mypy.sametypes.is_same_type(v, v1) for v in values) for v1 in type.values): continue matching = [] for value in values: if mypy.subtypes.is_subtype(type, value): matching.append(value) if matching: best = matching[0] # If there are more than one matching value, we select the narrowest for match in matching[1:]: if mypy.subtypes.is_subtype(match, best): best = match types[i] = best else: if skip_unsatisfied: types[i] = None else: report_incompatible_typevar_value(callable, type, callable.variables[i].name, context) else: upper_bound = callable.variables[i].upper_bound if not mypy.subtypes.is_subtype(type, upper_bound): if skip_unsatisfied: types[i] = None else: report_incompatible_typevar_value(callable, type, callable.variables[i].name, context) # Create a map from type variable id to target type. id_to_type = {} # type: Dict[TypeVarId, Type] for i, tv in enumerate(tvars): typ = types[i] if typ: id_to_type[tv.id] = typ # Apply arguments to argument types. arg_types = [expand_type(at, id_to_type) for at in callable.arg_types] # The callable may retain some type vars if only some were applied. remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type] return callable.copy_modified( arg_types=arg_types, ret_type=expand_type(callable.ret_type, id_to_type), variables=remaining_tvars, ) mypy-0.761/mypy/argmap.py0000644€tŠÔÚ€2›s®0000002225413576752246021544 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for mapping between actual and formal arguments (and their types).""" from typing import List, Optional, Sequence, Callable, Set from mypy.types import ( Type, Instance, TupleType, AnyType, TypeOfAny, TypedDictType, get_proper_type ) from mypy import nodes def map_actuals_to_formals(actual_kinds: List[int], actual_names: Optional[Sequence[Optional[str]]], formal_kinds: List[int], formal_names: Sequence[Optional[str]], actual_arg_type: Callable[[int], Type]) -> List[List[int]]: """Calculate mapping between actual (caller) args and formals. The result contains a list of caller argument indexes mapping to each callee argument index, indexed by callee index. The caller_arg_type argument should evaluate to the type of the actual argument type with the given index. """ nformals = len(formal_kinds) formal_to_actual = [[] for i in range(nformals)] # type: List[List[int]] fi = 0 for ai, actual_kind in enumerate(actual_kinds): if actual_kind == nodes.ARG_POS: if fi < nformals: if formal_kinds[fi] in [nodes.ARG_POS, nodes.ARG_OPT, nodes.ARG_NAMED, nodes.ARG_NAMED_OPT]: formal_to_actual[fi].append(ai) fi += 1 elif formal_kinds[fi] == nodes.ARG_STAR: formal_to_actual[fi].append(ai) elif actual_kind == nodes.ARG_STAR: # We need to know the actual type to map varargs. actualt = get_proper_type(actual_arg_type(ai)) if isinstance(actualt, TupleType): # A tuple actual maps to a fixed number of formals. for _ in range(len(actualt.items)): if fi < nformals: if formal_kinds[fi] != nodes.ARG_STAR2: formal_to_actual[fi].append(ai) else: break if formal_kinds[fi] != nodes.ARG_STAR: fi += 1 else: # Assume that it is an iterable (if it isn't, there will be # an error later). while fi < nformals: if formal_kinds[fi] in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT, nodes.ARG_STAR2): break else: formal_to_actual[fi].append(ai) if formal_kinds[fi] == nodes.ARG_STAR: break fi += 1 elif actual_kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT): assert actual_names is not None, "Internal error: named kinds without names given" name = actual_names[ai] if name in formal_names: formal_to_actual[formal_names.index(name)].append(ai) elif nodes.ARG_STAR2 in formal_kinds: formal_to_actual[formal_kinds.index(nodes.ARG_STAR2)].append(ai) else: assert actual_kind == nodes.ARG_STAR2 actualt = get_proper_type(actual_arg_type(ai)) if isinstance(actualt, TypedDictType): for name, value in actualt.items.items(): if name in formal_names: formal_to_actual[formal_names.index(name)].append(ai) elif nodes.ARG_STAR2 in formal_kinds: formal_to_actual[formal_kinds.index(nodes.ARG_STAR2)].append(ai) else: # We don't exactly know which **kwargs are provided by the # caller. Assume that they will fill the remaining arguments. for fi in range(nformals): # TODO: If there are also tuple varargs, we might be missing some potential # matches if the tuple was short enough to not match everything. no_certain_match = ( not formal_to_actual[fi] or actual_kinds[formal_to_actual[fi][0]] == nodes.ARG_STAR) if ((formal_names[fi] and no_certain_match and formal_kinds[fi] != nodes.ARG_STAR) or formal_kinds[fi] == nodes.ARG_STAR2): formal_to_actual[fi].append(ai) return formal_to_actual def map_formals_to_actuals(actual_kinds: List[int], actual_names: Optional[Sequence[Optional[str]]], formal_kinds: List[int], formal_names: List[Optional[str]], actual_arg_type: Callable[[int], Type]) -> List[List[int]]: """Calculate the reverse mapping of map_actuals_to_formals.""" formal_to_actual = map_actuals_to_formals(actual_kinds, actual_names, formal_kinds, formal_names, actual_arg_type) # Now reverse the mapping. actual_to_formal = [[] for _ in actual_kinds] # type: List[List[int]] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: actual_to_formal[actual].append(formal) return actual_to_formal class ArgTypeExpander: """Utility class for mapping actual argument types to formal arguments. One of the main responsibilities is to expand caller tuple *args and TypedDict **kwargs, and to keep track of which tuple/TypedDict items have already been consumed. Example: def f(x: int, *args: str) -> None: ... f(*(1, 'x', 1.1)) We'd call expand_actual_type three times: 1. The first call would provide 'int' as the actual type of 'x' (from '1'). 2. The second call would provide 'str' as one of the actual types for '*args'. 2. The third call would provide 'float' as one of the actual types for '*args'. A single instance can process all the arguments for a single call. Each call needs a separate instance since instances have per-call state. """ def __init__(self) -> None: # Next tuple *args index to use. self.tuple_index = 0 # Keyword arguments in TypedDict **kwargs used. self.kwargs_used = set() # type: Set[str] def expand_actual_type(self, actual_type: Type, actual_kind: int, formal_name: Optional[str], formal_kind: int) -> Type: """Return the actual (caller) type(s) of a formal argument with the given kinds. If the actual argument is a tuple *args, return the next individual tuple item that maps to the formal arg. If the actual argument is a TypedDict **kwargs, return the next matching typed dict value type based on formal argument name and kind. This is supposed to be called for each formal, in order. Call multiple times per formal if multiple actuals map to a formal. """ actual_type = get_proper_type(actual_type) if actual_kind == nodes.ARG_STAR: if isinstance(actual_type, Instance): if actual_type.type.fullname == 'builtins.list': # List *arg. return actual_type.args[0] elif actual_type.args: # TODO: Try to map type arguments to Iterable return actual_type.args[0] else: return AnyType(TypeOfAny.from_error) elif isinstance(actual_type, TupleType): # Get the next tuple item of a tuple *arg. if self.tuple_index >= len(actual_type.items): # Exhausted a tuple -- continue to the next *args. self.tuple_index = 1 else: self.tuple_index += 1 return actual_type.items[self.tuple_index - 1] else: return AnyType(TypeOfAny.from_error) elif actual_kind == nodes.ARG_STAR2: if isinstance(actual_type, TypedDictType): if formal_kind != nodes.ARG_STAR2 and formal_name in actual_type.items: # Lookup type based on keyword argument name. assert formal_name is not None else: # Pick an arbitrary item if no specified keyword is expected. formal_name = (set(actual_type.items.keys()) - self.kwargs_used).pop() self.kwargs_used.add(formal_name) return actual_type.items[formal_name] elif (isinstance(actual_type, Instance) and (actual_type.type.fullname == 'builtins.dict')): # Dict **arg. # TODO: Handle arbitrary Mapping return actual_type.args[1] else: return AnyType(TypeOfAny.from_error) else: # No translation for other kinds -- 1:1 mapping. return actual_type mypy-0.761/mypy/binder.py0000644€tŠÔÚ€2›s®0000004172313576752246021542 0ustar jukkaDROPBOX\Domain Users00000000000000from contextlib import contextmanager from collections import defaultdict from typing import Dict, List, Set, Iterator, Union, Optional, Tuple, cast from typing_extensions import DefaultDict from mypy.types import ( Type, AnyType, PartialType, UnionType, TypeOfAny, NoneType, get_proper_type ) from mypy.subtypes import is_subtype from mypy.join import join_simple from mypy.sametypes import is_same_type from mypy.erasetype import remove_instance_last_known_values from mypy.nodes import Expression, Var, RefExpr from mypy.literals import Key, literal, literal_hash, subkeys from mypy.nodes import IndexExpr, MemberExpr, NameExpr BindableExpression = Union[IndexExpr, MemberExpr, NameExpr] class Frame: """A Frame represents a specific point in the execution of a program. It carries information about the current types of expressions at that point, arising either from assignments to those expressions or the result of isinstance checks. It also records whether it is possible to reach that point at all. This information is not copied into a new Frame when it is pushed onto the stack, so a given Frame only has information about types that were assigned in that frame. """ def __init__(self) -> None: self.types = {} # type: Dict[Key, Type] self.unreachable = False # Should be set only if we're entering a frame where it's not # possible to accurately determine whether or not contained # statements will be unreachable or not. # # Long-term, we should improve mypy to the point where we no longer # need this field. self.suppress_unreachable_warnings = False Assigns = DefaultDict[Expression, List[Tuple[Type, Optional[Type]]]] class ConditionalTypeBinder: """Keep track of conditional types of variables. NB: Variables are tracked by literal expression, so it is possible to confuse the binder; for example, ``` class A: a = None # type: Union[int, str] x = A() lst = [x] reveal_type(x.a) # Union[int, str] x.a = 1 reveal_type(x.a) # int reveal_type(lst[0].a) # Union[int, str] lst[0].a = 'a' reveal_type(x.a) # int reveal_type(lst[0].a) # str ``` """ # Stored assignments for situations with tuple/list lvalue and rvalue of union type. # This maps an expression to a list of bound types for every item in the union type. type_assignments = None # type: Optional[Assigns] def __init__(self) -> None: # The stack of frames currently used. These map # literal_hash(expr) -- literals like 'foo.bar' -- # to types. The last element of this list is the # top-most, current frame. Each earlier element # records the state as of when that frame was last # on top of the stack. self.frames = [Frame()] # For frames higher in the stack, we record the set of # Frames that can escape there, either by falling off # the end of the frame or by a loop control construct # or raised exception. The last element of self.frames # has no corresponding element in this list. self.options_on_return = [] # type: List[List[Frame]] # Maps literal_hash(expr) to get_declaration(expr) # for every expr stored in the binder self.declarations = {} # type: Dict[Key, Optional[Type]] # Set of other keys to invalidate if a key is changed, e.g. x -> {x.a, x[0]} # Whenever a new key (e.g. x.a.b) is added, we update this self.dependencies = {} # type: Dict[Key, Set[Key]] # Whether the last pop changed the newly top frame on exit self.last_pop_changed = False self.try_frames = set() # type: Set[int] self.break_frames = [] # type: List[int] self.continue_frames = [] # type: List[int] def _add_dependencies(self, key: Key, value: Optional[Key] = None) -> None: if value is None: value = key else: self.dependencies.setdefault(key, set()).add(value) for elt in subkeys(key): self._add_dependencies(elt, value) def push_frame(self) -> Frame: """Push a new frame into the binder.""" f = Frame() self.frames.append(f) self.options_on_return.append([]) return f def _put(self, key: Key, type: Type, index: int = -1) -> None: self.frames[index].types[key] = type def _get(self, key: Key, index: int = -1) -> Optional[Type]: if index < 0: index += len(self.frames) for i in range(index, -1, -1): if key in self.frames[i].types: return self.frames[i].types[key] return None def put(self, expr: Expression, typ: Type) -> None: if not isinstance(expr, (IndexExpr, MemberExpr, NameExpr)): return if not literal(expr): return key = literal_hash(expr) assert key is not None, 'Internal error: binder tried to put non-literal' if key not in self.declarations: self.declarations[key] = get_declaration(expr) self._add_dependencies(key) self._put(key, typ) def unreachable(self) -> None: self.frames[-1].unreachable = True def suppress_unreachable_warnings(self) -> None: self.frames[-1].suppress_unreachable_warnings = True def get(self, expr: Expression) -> Optional[Type]: key = literal_hash(expr) assert key is not None, 'Internal error: binder tried to get non-literal' return self._get(key) def is_unreachable(self) -> bool: # TODO: Copy the value of unreachable into new frames to avoid # this traversal on every statement? return any(f.unreachable for f in self.frames) def is_unreachable_warning_suppressed(self) -> bool: # TODO: See todo in 'is_unreachable' return any(f.suppress_unreachable_warnings for f in self.frames) def cleanse(self, expr: Expression) -> None: """Remove all references to a Node from the binder.""" key = literal_hash(expr) assert key is not None, 'Internal error: binder tried cleanse non-literal' self._cleanse_key(key) def _cleanse_key(self, key: Key) -> None: """Remove all references to a key from the binder.""" for frame in self.frames: if key in frame.types: del frame.types[key] def update_from_options(self, frames: List[Frame]) -> bool: """Update the frame to reflect that each key will be updated as in one of the frames. Return whether any item changes. If a key is declared as AnyType, only update it if all the options are the same. """ frames = [f for f in frames if not f.unreachable] changed = False keys = set(key for f in frames for key in f.types) for key in keys: current_value = self._get(key) resulting_values = [f.types.get(key, current_value) for f in frames] if any(x is None for x in resulting_values): # We didn't know anything about key before # (current_value must be None), and we still don't # know anything about key in at least one possible frame. continue type = resulting_values[0] assert type is not None declaration_type = get_proper_type(self.declarations.get(key)) if isinstance(declaration_type, AnyType): # At this point resulting values can't contain None, see continue above if not all(is_same_type(type, cast(Type, t)) for t in resulting_values[1:]): type = AnyType(TypeOfAny.from_another_any, source_any=declaration_type) else: for other in resulting_values[1:]: assert other is not None type = join_simple(self.declarations[key], type, other) if current_value is None or not is_same_type(type, current_value): self._put(key, type) changed = True self.frames[-1].unreachable = not frames return changed def pop_frame(self, can_skip: bool, fall_through: int) -> Frame: """Pop a frame and return it. See frame_context() for documentation of fall_through. """ if fall_through > 0: self.allow_jump(-fall_through) result = self.frames.pop() options = self.options_on_return.pop() if can_skip: options.insert(0, self.frames[-1]) self.last_pop_changed = self.update_from_options(options) return result @contextmanager def accumulate_type_assignments(self) -> 'Iterator[Assigns]': """Push a new map to collect assigned types in multiassign from union. If this map is not None, actual binding is deferred until all items in the union are processed (a union of collected items is later bound manually by the caller). """ old_assignments = None if self.type_assignments is not None: old_assignments = self.type_assignments self.type_assignments = defaultdict(list) yield self.type_assignments self.type_assignments = old_assignments def assign_type(self, expr: Expression, type: Type, declared_type: Optional[Type], restrict_any: bool = False) -> None: # We should erase last known value in binder, because if we are using it, # it means that the target is not final, and therefore can't hold a literal. type = remove_instance_last_known_values(type) type = get_proper_type(type) declared_type = get_proper_type(declared_type) if self.type_assignments is not None: # We are in a multiassign from union, defer the actual binding, # just collect the types. self.type_assignments[expr].append((type, declared_type)) return if not isinstance(expr, (IndexExpr, MemberExpr, NameExpr)): return None if not literal(expr): return self.invalidate_dependencies(expr) if declared_type is None: # Not sure why this happens. It seems to mainly happen in # member initialization. return if not is_subtype(type, declared_type): # Pretty sure this is only happens when there's a type error. # Ideally this function wouldn't be called if the # expression has a type error, though -- do other kinds of # errors cause this function to get called at invalid # times? return enclosing_type = get_proper_type(self.most_recent_enclosing_type(expr, type)) if isinstance(enclosing_type, AnyType) and not restrict_any: # If x is Any and y is int, after x = y we do not infer that x is int. # This could be changed. # Instead, since we narrowed type from Any in a recent frame (probably an # isinstance check), but now it is reassigned, we broaden back # to Any (which is the most recent enclosing type) self.put(expr, enclosing_type) # As a special case, when assigning Any to a variable with a # declared Optional type that has been narrowed to None, # replace all the Nones in the declared Union type with Any. # This overrides the normal behavior of ignoring Any assignments to variables # in order to prevent false positives. # (See discussion in #3526) elif (isinstance(type, AnyType) and isinstance(declared_type, UnionType) and any(isinstance(get_proper_type(item), NoneType) for item in declared_type.items) and isinstance(get_proper_type(self.most_recent_enclosing_type(expr, NoneType())), NoneType)): # Replace any Nones in the union type with Any new_items = [type if isinstance(get_proper_type(item), NoneType) else item for item in declared_type.items] self.put(expr, UnionType(new_items)) elif (isinstance(type, AnyType) and not (isinstance(declared_type, UnionType) and any(isinstance(get_proper_type(item), AnyType) for item in declared_type.items))): # Assigning an Any value doesn't affect the type to avoid false negatives, unless # there is an Any item in a declared union type. self.put(expr, declared_type) else: self.put(expr, type) for i in self.try_frames: # XXX This should probably not copy the entire frame, but # just copy this variable into a single stored frame. self.allow_jump(i) def invalidate_dependencies(self, expr: BindableExpression) -> None: """Invalidate knowledge of types that include expr, but not expr itself. For example, when expr is foo.bar, invalidate foo.bar.baz. It is overly conservative: it invalidates globally, including in code paths unreachable from here. """ key = literal_hash(expr) assert key is not None for dep in self.dependencies.get(key, set()): self._cleanse_key(dep) def most_recent_enclosing_type(self, expr: BindableExpression, type: Type) -> Optional[Type]: type = get_proper_type(type) if isinstance(type, AnyType): return get_declaration(expr) key = literal_hash(expr) assert key is not None enclosers = ([get_declaration(expr)] + [f.types[key] for f in self.frames if key in f.types and is_subtype(type, f.types[key])]) return enclosers[-1] def allow_jump(self, index: int) -> None: # self.frames and self.options_on_return have different lengths # so make sure the index is positive if index < 0: index += len(self.options_on_return) frame = Frame() for f in self.frames[index + 1:]: frame.types.update(f.types) if f.unreachable: frame.unreachable = True self.options_on_return[index].append(frame) def handle_break(self) -> None: self.allow_jump(self.break_frames[-1]) self.unreachable() def handle_continue(self) -> None: self.allow_jump(self.continue_frames[-1]) self.unreachable() @contextmanager def frame_context(self, *, can_skip: bool, fall_through: int = 1, break_frame: int = 0, continue_frame: int = 0, try_frame: bool = False) -> Iterator[Frame]: """Return a context manager that pushes/pops frames on enter/exit. If can_skip is True, control flow is allowed to bypass the newly-created frame. If fall_through > 0, then it will allow control flow that falls off the end of the frame to escape to its ancestor `fall_through` levels higher. Otherwise control flow ends at the end of the frame. If break_frame > 0, then 'break' statements within this frame will jump out to the frame break_frame levels higher than the frame created by this call to frame_context. Similarly for continue_frame and 'continue' statements. If try_frame is true, then execution is allowed to jump at any point within the newly created frame (or its descendants) to its parent (i.e., to the frame that was on top before this call to frame_context). After the context manager exits, self.last_pop_changed indicates whether any types changed in the newly-topmost frame as a result of popping this frame. """ assert len(self.frames) > 1 if break_frame: self.break_frames.append(len(self.frames) - break_frame) if continue_frame: self.continue_frames.append(len(self.frames) - continue_frame) if try_frame: self.try_frames.add(len(self.frames) - 1) new_frame = self.push_frame() if try_frame: # An exception may occur immediately self.allow_jump(-1) yield new_frame self.pop_frame(can_skip, fall_through) if break_frame: self.break_frames.pop() if continue_frame: self.continue_frames.pop() if try_frame: self.try_frames.remove(len(self.frames) - 1) @contextmanager def top_frame_context(self) -> Iterator[Frame]: """A variant of frame_context for use at the top level of a namespace (module, function, or class). """ assert len(self.frames) == 1 yield self.push_frame() self.pop_frame(True, 0) def get_declaration(expr: BindableExpression) -> Optional[Type]: if isinstance(expr, RefExpr) and isinstance(expr.node, Var): type = get_proper_type(expr.node.type) if not isinstance(type, PartialType): return type return None mypy-0.761/mypy/bogus_type.py0000644€tŠÔÚ€2›s®0000000141313576752246022447 0ustar jukkaDROPBOX\Domain Users00000000000000"""A Bogus[T] type alias for marking when we subvert the type system We need this for compiling with mypyc, which inserts runtime typechecks that cause problems when we subvert the type system. So when compiling with mypyc, we turn those places into Any, while keeping the types around for normal typechecks. Since this causes the runtime types to be Any, this is best used in places where efficient access to properties is not important. For those cases some other technique should be used. """ from mypy_extensions import FlexibleAlias from typing import TypeVar, Any T = TypeVar('T') # This won't ever be true at runtime, but we consider it true during # mypyc compilations. MYPYC = False if MYPYC: Bogus = FlexibleAlias[T, Any] else: Bogus = FlexibleAlias[T, T] mypy-0.761/mypy/build.py0000644€tŠÔÚ€2›s®0000041574113576752246021403 0ustar jukkaDROPBOX\Domain Users00000000000000"""Facilities to analyze entire programs, including imported modules. Parse and analyze the source files of a program in the correct order (based on file dependencies), and collect the results. This module only directs a build, which is performed in multiple passes per file. The individual passes are implemented in separate modules. The function build() is the main interface to this module. """ # TODO: More consistent terminology, e.g. path/fnam, module/id, state/file import contextlib import errno import gc import hashlib import json import os import pathlib import re import stat import sys import time import types from typing import (AbstractSet, Any, Dict, Iterable, Iterator, List, Sequence, Mapping, NamedTuple, Optional, Set, Tuple, Union, Callable, TextIO) from typing_extensions import ClassVar, Final, TYPE_CHECKING from mypy_extensions import TypedDict from mypy.nodes import MypyFile, ImportBase, Import, ImportFrom, ImportAll, SymbolTable from mypy.semanal_pass1 import SemanticAnalyzerPreAnalysis from mypy.semanal import SemanticAnalyzer import mypy.semanal_main from mypy.checker import TypeChecker from mypy.indirection import TypeIndirectionVisitor from mypy.errors import Errors, CompileError, ErrorInfo, report_internal_error from mypy.util import ( DecodeError, decode_python_encoding, is_sub_path, get_mypy_comments, module_prefix, read_py_file ) if TYPE_CHECKING: from mypy.report import Reports # Avoid unconditional slow import from mypy import moduleinfo from mypy.fixup import fixup_module from mypy.modulefinder import BuildSource, compute_search_paths, FindModuleCache, SearchPaths from mypy.nodes import Expression from mypy.options import Options from mypy.parse import parse from mypy.stats import dump_type_stats from mypy.types import Type from mypy.version import __version__ from mypy.plugin import Plugin, ChainedPlugin, ReportConfigContext from mypy.plugins.default import DefaultPlugin from mypy.fscache import FileSystemCache from mypy.metastore import MetadataStore, FilesystemMetadataStore, SqliteMetadataStore from mypy.typestate import TypeState, reset_global_state from mypy.renaming import VariableRenameVisitor from mypy.config_parser import parse_mypy_comments from mypy.freetree import free_tree from mypy import errorcodes as codes # Switch to True to produce debug output related to fine-grained incremental # mode only that is useful during development. This produces only a subset of # output compared to --verbose output. We use a global flag to enable this so # that it's easy to enable this when running tests. DEBUG_FINE_GRAINED = False # type: Final Graph = Dict[str, 'State'] # TODO: Get rid of BuildResult. We might as well return a BuildManager. class BuildResult: """The result of a successful build. Attributes: manager: The build manager. files: Dictionary from module name to related AST node. types: Dictionary from parse tree node to its inferred type. used_cache: Whether the build took advantage of a pre-existing cache errors: List of error messages. """ def __init__(self, manager: 'BuildManager', graph: Graph) -> None: self.manager = manager self.graph = graph self.files = manager.modules self.types = manager.all_types # Non-empty if export_types True in options self.used_cache = manager.cache_enabled self.errors = [] # type: List[str] # Filled in by build if desired class BuildSourceSet: """Efficiently test a file's membership in the set of build sources.""" def __init__(self, sources: List[BuildSource]) -> None: self.source_text_present = False self.source_modules = set() # type: Set[str] self.source_paths = set() # type: Set[str] for source in sources: if source.text is not None: self.source_text_present = True elif source.path: self.source_paths.add(source.path) else: self.source_modules.add(source.module) def is_source(self, file: MypyFile) -> bool: if file.path and file.path in self.source_paths: return True elif file._fullname in self.source_modules: return True elif self.source_text_present: return True else: return False def build(sources: List[BuildSource], options: Options, alt_lib_path: Optional[str] = None, flush_errors: Optional[Callable[[List[str], bool], None]] = None, fscache: Optional[FileSystemCache] = None, stdout: Optional[TextIO] = None, stderr: Optional[TextIO] = None, extra_plugins: Optional[Sequence[Plugin]] = None, ) -> BuildResult: """Analyze a program. A single call to build performs parsing, semantic analysis and optionally type checking for the program *and* all imported modules, recursively. Return BuildResult if successful or only non-blocking errors were found; otherwise raise CompileError. If a flush_errors callback is provided, all error messages will be passed to it and the errors and messages fields of BuildResult and CompileError (respectively) will be empty. Otherwise those fields will report any error messages. Args: sources: list of sources to build options: build options alt_lib_path: an additional directory for looking up library modules (takes precedence over other directories) flush_errors: optional function to flush errors after a file is processed fscache: optionally a file-system cacher """ # If we were not given a flush_errors, we use one that will populate those # fields for callers that want the traditional API. messages = [] def default_flush_errors(new_messages: List[str], is_serious: bool) -> None: messages.extend(new_messages) flush_errors = flush_errors or default_flush_errors stdout = stdout or sys.stdout stderr = stderr or sys.stderr extra_plugins = extra_plugins or [] try: result = _build( sources, options, alt_lib_path, flush_errors, fscache, stdout, stderr, extra_plugins ) result.errors = messages return result except CompileError as e: # CompileErrors raised from an errors object carry all of the # messages that have not been reported out by error streaming. # Patch it up to contain either none or all none of the messages, # depending on whether we are flushing errors. serious = not e.use_stdout flush_errors(e.messages, serious) e.messages = messages raise def _build(sources: List[BuildSource], options: Options, alt_lib_path: Optional[str], flush_errors: Callable[[List[str], bool], None], fscache: Optional[FileSystemCache], stdout: TextIO, stderr: TextIO, extra_plugins: Sequence[Plugin], ) -> BuildResult: # This seems the most reasonable place to tune garbage collection. gc.set_threshold(150 * 1000) data_dir = default_data_dir() fscache = fscache or FileSystemCache() search_paths = compute_search_paths(sources, options, data_dir, alt_lib_path) reports = None if options.report_dirs: # Import lazily to avoid slowing down startup. from mypy.report import Reports # noqa reports = Reports(data_dir, options.report_dirs) source_set = BuildSourceSet(sources) cached_read = fscache.read errors = Errors(options.show_error_context, options.show_column_numbers, options.show_error_codes, options.pretty, lambda path: read_py_file(path, cached_read, options.python_version), options.show_absolute_path) plugin, snapshot = load_plugins(options, errors, stdout, extra_plugins) # Construct a build manager object to hold state during the build. # # Ignore current directory prefix in error messages. manager = BuildManager(data_dir, search_paths, ignore_prefix=os.getcwd(), source_set=source_set, reports=reports, options=options, version_id=__version__, plugin=plugin, plugins_snapshot=snapshot, errors=errors, flush_errors=flush_errors, fscache=fscache, stdout=stdout, stderr=stderr) manager.trace(repr(options)) reset_global_state() try: graph = dispatch(sources, manager, stdout) if not options.fine_grained_incremental: TypeState.reset_all_subtype_caches() return BuildResult(manager, graph) finally: t0 = time.time() manager.metastore.commit() manager.add_stats(cache_commit_time=time.time() - t0) manager.log("Build finished in %.3f seconds with %d modules, and %d errors" % (time.time() - manager.start_time, len(manager.modules), manager.errors.num_messages())) manager.dump_stats() if reports is not None: # Finish the HTML or XML reports even if CompileError was raised. reports.finish() def default_data_dir() -> str: """Returns directory containing typeshed directory.""" return os.path.dirname(__file__) def normpath(path: str, options: Options) -> str: """Convert path to absolute; but to relative in bazel mode. (Bazel's distributed cache doesn't like filesystem metadata to end up in output files.) """ # TODO: Could we always use relpath? (A worry in non-bazel # mode would be that a moved file may change its full module # name without changing its size, mtime or hash.) if options.bazel: return os.path.relpath(path) else: return os.path.abspath(path) CacheMeta = NamedTuple('CacheMeta', [('id', str), ('path', str), ('mtime', int), ('size', int), ('hash', str), ('dependencies', List[str]), # names of imported modules ('data_mtime', int), # mtime of data_json ('data_json', str), # path of .data.json ('suppressed', List[str]), # dependencies that weren't imported ('options', Optional[Dict[str, object]]), # build options # dep_prios and dep_lines are in parallel with # dependencies + suppressed. ('dep_prios', List[int]), ('dep_lines', List[int]), ('interface_hash', str), # hash representing the public interface ('version_id', str), # mypy version for cache invalidation ('ignore_all', bool), # if errors were ignored ('plugin_data', Any), # config data from plugins ]) # NOTE: dependencies + suppressed == all reachable imports; # suppressed contains those reachable imports that were prevented by # silent mode or simply not found. # Metadata for the fine-grained dependencies file associated with a module. FgDepMeta = TypedDict('FgDepMeta', {'path': str, 'mtime': int}) def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta: """Build a CacheMeta object from a json metadata dictionary Args: meta: JSON metadata read from the metadata cache file data_json: Path to the .data.json file containing the AST trees """ sentinel = None # type: Any # Values to be validated by the caller return CacheMeta( meta.get('id', sentinel), meta.get('path', sentinel), int(meta['mtime']) if 'mtime' in meta else sentinel, meta.get('size', sentinel), meta.get('hash', sentinel), meta.get('dependencies', []), int(meta['data_mtime']) if 'data_mtime' in meta else sentinel, data_json, meta.get('suppressed', []), meta.get('options'), meta.get('dep_prios', []), meta.get('dep_lines', []), meta.get('interface_hash', ''), meta.get('version_id', sentinel), meta.get('ignore_all', True), meta.get('plugin_data', None), ) # Priorities used for imports. (Here, top-level includes inside a class.) # These are used to determine a more predictable order in which the # nodes in an import cycle are processed. PRI_HIGH = 5 # type: Final # top-level "from X import blah" PRI_MED = 10 # type: Final # top-level "import X" PRI_LOW = 20 # type: Final # either form inside a function PRI_MYPY = 25 # type: Final # inside "if MYPY" or "if TYPE_CHECKING" PRI_INDIRECT = 30 # type: Final # an indirect dependency PRI_ALL = 99 # type: Final # include all priorities def import_priority(imp: ImportBase, toplevel_priority: int) -> int: """Compute import priority from an import node.""" if not imp.is_top_level: # Inside a function return PRI_LOW if imp.is_mypy_only: # Inside "if MYPY" or "if typing.TYPE_CHECKING" return max(PRI_MYPY, toplevel_priority) # A regular import; priority determined by argument. return toplevel_priority def load_plugins_from_config( options: Options, errors: Errors, stdout: TextIO ) -> Tuple[List[Plugin], Dict[str, str]]: """Load all configured plugins. Return a list of all the loaded plugins from the config file. The second return value is a snapshot of versions/hashes of loaded user plugins (for cache validation). """ import importlib snapshot = {} # type: Dict[str, str] if not options.config_file: return [], snapshot line = find_config_file_line_number(options.config_file, 'mypy', 'plugins') if line == -1: line = 1 # We need to pick some line number that doesn't look too confusing def plugin_error(message: str) -> None: errors.report(line, 0, message) errors.raise_error(use_stdout=False) custom_plugins = [] # type: List[Plugin] errors.set_file(options.config_file, None) for plugin_path in options.plugins: func_name = 'plugin' plugin_dir = None # type: Optional[str] if ':' in os.path.basename(plugin_path): plugin_path, func_name = plugin_path.rsplit(':', 1) if plugin_path.endswith('.py'): # Plugin paths can be relative to the config file location. plugin_path = os.path.join(os.path.dirname(options.config_file), plugin_path) if not os.path.isfile(plugin_path): plugin_error("Can't find plugin '{}'".format(plugin_path)) # Use an absolute path to avoid populating the cache entry # for 'tmp' during tests, since it will be different in # different tests. plugin_dir = os.path.abspath(os.path.dirname(plugin_path)) fnam = os.path.basename(plugin_path) module_name = fnam[:-3] sys.path.insert(0, plugin_dir) elif re.search(r'[\\/]', plugin_path): fnam = os.path.basename(plugin_path) plugin_error("Plugin '{}' does not have a .py extension".format(fnam)) else: module_name = plugin_path try: module = importlib.import_module(module_name) except Exception as exc: plugin_error("Error importing plugin '{}': {}".format(plugin_path, exc)) finally: if plugin_dir is not None: assert sys.path[0] == plugin_dir del sys.path[0] if not hasattr(module, func_name): plugin_error('Plugin \'{}\' does not define entry point function "{}"'.format( plugin_path, func_name)) try: plugin_type = getattr(module, func_name)(__version__) except Exception: print('Error calling the plugin(version) entry point of {}\n'.format(plugin_path), file=stdout) raise # Propagate to display traceback if not isinstance(plugin_type, type): plugin_error( 'Type object expected as the return value of "plugin"; got {!r} (in {})'.format( plugin_type, plugin_path)) if not issubclass(plugin_type, Plugin): plugin_error( 'Return value of "plugin" must be a subclass of "mypy.plugin.Plugin" ' '(in {})'.format(plugin_path)) try: custom_plugins.append(plugin_type(options)) snapshot[module_name] = take_module_snapshot(module) except Exception: print('Error constructing plugin instance of {}\n'.format(plugin_type.__name__), file=stdout) raise # Propagate to display traceback return custom_plugins, snapshot def load_plugins(options: Options, errors: Errors, stdout: TextIO, extra_plugins: Sequence[Plugin], ) -> Tuple[Plugin, Dict[str, str]]: """Load all configured plugins. Return a plugin that encapsulates all plugins chained together. Always at least include the default plugin (it's last in the chain). The second return value is a snapshot of versions/hashes of loaded user plugins (for cache validation). """ custom_plugins, snapshot = load_plugins_from_config(options, errors, stdout) custom_plugins += extra_plugins default_plugin = DefaultPlugin(options) # type: Plugin if not custom_plugins: return default_plugin, snapshot # Custom plugins take precedence over the default plugin. return ChainedPlugin(options, custom_plugins + [default_plugin]), snapshot def take_module_snapshot(module: types.ModuleType) -> str: """Take plugin module snapshot by recording its version and hash. We record _both_ hash and the version to detect more possible changes (e.g. if there is a change in modules imported by a plugin). """ if hasattr(module, '__file__'): with open(module.__file__, 'rb') as f: digest = hashlib.md5(f.read()).hexdigest() else: digest = 'unknown' ver = getattr(module, '__version__', 'none') return '{}:{}'.format(ver, digest) def find_config_file_line_number(path: str, section: str, setting_name: str) -> int: """Return the approximate location of setting_name within mypy config file. Return -1 if can't determine the line unambiguously. """ in_desired_section = False try: results = [] with open(path) as f: for i, line in enumerate(f): line = line.strip() if line.startswith('[') and line.endswith(']'): current_section = line[1:-1].strip() in_desired_section = (current_section == section) elif in_desired_section and re.match(r'{}\s*='.format(setting_name), line): results.append(i + 1) if len(results) == 1: return results[0] except OSError: pass return -1 class BuildManager: """This class holds shared state for building a mypy program. It is used to coordinate parsing, import processing, semantic analysis and type checking. The actual build steps are carried out by dispatch(). Attributes: data_dir: Mypy data directory (contains stubs) search_paths: SearchPaths instance indicating where to look for modules modules: Mapping of module ID to MypyFile (shared by the passes) semantic_analyzer: Semantic analyzer, pass 2 semantic_analyzer_pass3: Semantic analyzer, pass 3 all_types: Map {Expression: Type} from all modules (enabled by export_types) options: Build options missing_modules: Set of modules that could not be imported encountered so far stale_modules: Set of modules that needed to be rechecked (only used by tests) fg_deps_meta: Metadata for fine-grained dependencies caches associated with modules fg_deps: A fine-grained dependency map version_id: The current mypy version (based on commit id when possible) plugin: Active mypy plugin(s) plugins_snapshot: Snapshot of currently active user plugins (versions and hashes) old_plugins_snapshot: Plugins snapshot from previous incremental run (or None in non-incremental mode and if cache was not found) errors: Used for reporting all errors flush_errors: A function for processing errors after each SCC cache_enabled: Whether cache is being read. This is set based on options, but is disabled if fine-grained cache loading fails and after an initial fine-grained load. This doesn't determine whether we write cache files or not. quickstart_state: A cache of filename -> mtime/size/hash info used to avoid needing to hash source files when using a cache with mismatching mtimes stats: Dict with various instrumentation numbers, it is used not only for debugging, but also required for correctness, in particular to check consistency of the fine-grained dependency cache. fscache: A file system cacher """ def __init__(self, data_dir: str, search_paths: SearchPaths, ignore_prefix: str, source_set: BuildSourceSet, reports: 'Optional[Reports]', options: Options, version_id: str, plugin: Plugin, plugins_snapshot: Dict[str, str], errors: Errors, flush_errors: Callable[[List[str], bool], None], fscache: FileSystemCache, stdout: TextIO, stderr: TextIO, ) -> None: self.stats = {} # type: Dict[str, Any] # Values are ints or floats self.stdout = stdout self.stderr = stderr self.start_time = time.time() self.data_dir = data_dir self.errors = errors self.errors.set_ignore_prefix(ignore_prefix) self.search_paths = search_paths self.source_set = source_set self.reports = reports self.options = options self.version_id = version_id self.modules = {} # type: Dict[str, MypyFile] self.missing_modules = set() # type: Set[str] self.fg_deps_meta = {} # type: Dict[str, FgDepMeta] # fg_deps holds the dependencies of every module that has been # processed. We store this in BuildManager so that we can compute # dependencies as we go, which allows us to free ASTs and type information, # saving a ton of memory on net. self.fg_deps = {} # type: Dict[str, Set[str]] # Always convert the plugin to a ChainedPlugin so that it can be manipulated if needed if not isinstance(plugin, ChainedPlugin): plugin = ChainedPlugin(options, [plugin]) self.plugin = plugin # Set of namespaces (module or class) that are being populated during semantic # analysis and may have missing definitions. self.incomplete_namespaces = set() # type: Set[str] self.semantic_analyzer = SemanticAnalyzer( self.modules, self.missing_modules, self.incomplete_namespaces, self.errors, self.plugin) self.all_types = {} # type: Dict[Expression, Type] # Enabled by export_types self.indirection_detector = TypeIndirectionVisitor() self.stale_modules = set() # type: Set[str] self.rechecked_modules = set() # type: Set[str] self.flush_errors = flush_errors has_reporters = reports is not None and reports.reporters self.cache_enabled = (options.incremental and (not options.fine_grained_incremental or options.use_fine_grained_cache) and not has_reporters) self.fscache = fscache self.find_module_cache = FindModuleCache(self.search_paths, self.fscache, self.options) self.metastore = create_metastore(options) # a mapping from source files to their corresponding shadow files # for efficient lookup self.shadow_map = {} # type: Dict[str, str] if self.options.shadow_file is not None: self.shadow_map = {source_file: shadow_file for (source_file, shadow_file) in self.options.shadow_file} # a mapping from each file being typechecked to its possible shadow file self.shadow_equivalence_map = {} # type: Dict[str, Optional[str]] self.plugin = plugin self.plugins_snapshot = plugins_snapshot self.old_plugins_snapshot = read_plugins_snapshot(self) self.quickstart_state = read_quickstart_file(options, self.stdout) # Fine grained targets (module top levels and top level functions) processed by # the semantic analyzer, used only for testing. Currently used only by the new # semantic analyzer. self.processed_targets = [] # type: List[str] def dump_stats(self) -> None: if self.options.dump_build_stats: print("Stats:") for key, value in sorted(self.stats_summary().items()): print("{:24}{}".format(key + ":", value)) def use_fine_grained_cache(self) -> bool: return self.cache_enabled and self.options.use_fine_grained_cache def maybe_swap_for_shadow_path(self, path: str) -> str: if not self.shadow_map: return path path = normpath(path, self.options) previously_checked = path in self.shadow_equivalence_map if not previously_checked: for source, shadow in self.shadow_map.items(): if self.fscache.samefile(path, source): self.shadow_equivalence_map[path] = shadow break else: self.shadow_equivalence_map[path] = None shadow_file = self.shadow_equivalence_map.get(path) return shadow_file if shadow_file else path def get_stat(self, path: str) -> os.stat_result: return self.fscache.stat(self.maybe_swap_for_shadow_path(path)) def getmtime(self, path: str) -> int: """Return a file's mtime; but 0 in bazel mode. (Bazel's distributed cache doesn't like filesystem metadata to end up in output files.) """ if self.options.bazel: return 0 else: return int(self.metastore.getmtime(path)) def all_imported_modules_in_file(self, file: MypyFile) -> List[Tuple[int, str, int]]: """Find all reachable import statements in a file. Return list of tuples (priority, module id, import line number) for all modules imported in file; lower numbers == higher priority. Can generate blocking errors on bogus relative imports. """ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: """Function to correct for relative imports.""" file_id = file.fullname rel = imp.relative if rel == 0: return imp.id if os.path.basename(file.path).startswith('__init__.'): rel -= 1 if rel != 0: file_id = ".".join(file_id.split(".")[:-rel]) new_id = file_id + "." + imp.id if imp.id else file_id if not new_id: self.errors.set_file(file.path, file.name) self.errors.report(imp.line, 0, "No parent module -- cannot perform relative import", blocker=True) return new_id res = [] # type: List[Tuple[int, str, int]] for imp in file.imports: if not imp.is_unreachable: if isinstance(imp, Import): pri = import_priority(imp, PRI_MED) ancestor_pri = import_priority(imp, PRI_LOW) for id, _ in imp.ids: # We append the target (e.g. foo.bar.baz) # before the ancestors (e.g. foo and foo.bar) # so that, if FindModuleCache finds the target # module in a package marked with py.typed # underneath a namespace package installed in # site-packages, (gasp), that cache's # knowledge of the ancestors can be primed # when it is asked to find the target. res.append((pri, id, imp.line)) ancestor_parts = id.split(".")[:-1] ancestors = [] for part in ancestor_parts: ancestors.append(part) res.append((ancestor_pri, ".".join(ancestors), imp.line)) elif isinstance(imp, ImportFrom): cur_id = correct_rel_imp(imp) pos = len(res) all_are_submodules = True # Also add any imported names that are submodules. pri = import_priority(imp, PRI_MED) for name, __ in imp.names: sub_id = cur_id + '.' + name if self.is_module(sub_id): res.append((pri, sub_id, imp.line)) else: all_are_submodules = False # Add cur_id as a dependency, even if all of the # imports are submodules. Processing import from will try # to look through cur_id, so we should depend on it. # As a workaround for for some bugs in cycle handling (#4498), # if all of the imports are submodules, do the import at a lower # priority. pri = import_priority(imp, PRI_HIGH if not all_are_submodules else PRI_LOW) res.insert(pos, ((pri, cur_id, imp.line))) elif isinstance(imp, ImportAll): pri = import_priority(imp, PRI_HIGH) res.append((pri, correct_rel_imp(imp), imp.line)) return res def is_module(self, id: str) -> bool: """Is there a file in the file system corresponding to module id?""" return find_module_simple(id, self) is not None def parse_file(self, id: str, path: str, source: str, ignore_errors: bool) -> MypyFile: """Parse the source of a file with the given name. Raise CompileError if there is a parse error. """ t0 = time.time() tree = parse(source, path, id, self.errors, options=self.options) tree._fullname = id self.add_stats(files_parsed=1, modules_parsed=int(not tree.is_stub), stubs_parsed=int(tree.is_stub), parse_time=time.time() - t0) if self.errors.is_blockers(): self.log("Bailing due to parse errors") self.errors.raise_error() self.errors.set_file_ignored_lines(path, tree.ignored_lines, ignore_errors) return tree def load_fine_grained_deps(self, id: str) -> Dict[str, Set[str]]: t0 = time.time() if id in self.fg_deps_meta: # TODO: Assert deps file wasn't changed. deps = json.loads(self.metastore.read(self.fg_deps_meta[id]['path'])) else: deps = {} val = {k: set(v) for k, v in deps.items()} self.add_stats(load_fg_deps_time=time.time() - t0) return val def report_file(self, file: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: if self.reports is not None and self.source_set.is_source(file): self.reports.file(file, self.modules, type_map, options) def verbosity(self) -> int: return self.options.verbosity def log(self, *message: str) -> None: if self.verbosity() >= 1: if message: print('LOG: ', *message, file=self.stderr) else: print(file=self.stderr) self.stderr.flush() def log_fine_grained(self, *message: str) -> None: import mypy.build if self.verbosity() >= 1: self.log('fine-grained:', *message) elif mypy.build.DEBUG_FINE_GRAINED: # Output log in a simplified format that is quick to browse. if message: print(*message, file=self.stderr) else: print(file=self.stderr) self.stderr.flush() def trace(self, *message: str) -> None: if self.verbosity() >= 2: print('TRACE:', *message, file=self.stderr) self.stderr.flush() def add_stats(self, **kwds: Any) -> None: for key, value in kwds.items(): if key in self.stats: self.stats[key] += value else: self.stats[key] = value def stats_summary(self) -> Mapping[str, object]: return self.stats def deps_to_json(x: Dict[str, Set[str]]) -> str: return json.dumps({k: list(v) for k, v in x.items()}) # File for storing metadata about all the fine-grained dependency caches DEPS_META_FILE = '@deps.meta.json' # type: Final # File for storing fine-grained dependencies that didn't a parent in the build DEPS_ROOT_FILE = '@root.deps.json' # type: Final # The name of the fake module used to store fine-grained dependencies that # have no other place to go. FAKE_ROOT_MODULE = '@root' # type: Final def write_deps_cache(rdeps: Dict[str, Dict[str, Set[str]]], manager: BuildManager, graph: Graph) -> None: """Write cache files for fine-grained dependencies. Serialize fine-grained dependencies map for fine grained mode. Dependencies on some module 'm' is stored in the dependency cache file m.deps.json. This entails some spooky action at a distance: if module 'n' depends on 'm', that produces entries in m.deps.json. When there is a dependency on a module that does not exist in the build, it is stored with its first existing parent module. If no such module exists, it is stored with the fake module FAKE_ROOT_MODULE. This means that the validity of the fine-grained dependency caches are a global property, so we store validity checking information for fine-grained dependencies in a global cache file: * We take a snapshot of current sources to later check consistency between the fine-grained dependency cache and module cache metadata * We store the mtime of all of the dependency files to verify they haven't changed """ metastore = manager.metastore error = False fg_deps_meta = manager.fg_deps_meta.copy() for id in rdeps: if id != FAKE_ROOT_MODULE: _, _, deps_json = get_cache_names(id, graph[id].xpath, manager.options) else: deps_json = DEPS_ROOT_FILE assert deps_json manager.log("Writing deps cache", deps_json) if not manager.metastore.write(deps_json, deps_to_json(rdeps[id])): manager.log("Error writing fine-grained deps JSON file {}".format(deps_json)) error = True else: fg_deps_meta[id] = {'path': deps_json, 'mtime': manager.getmtime(deps_json)} meta_snapshot = {} # type: Dict[str, str] for id, st in graph.items(): # If we didn't parse a file (so it doesn't have a # source_hash), then it must be a module with a fresh cache, # so use the hash from that. if st.source_hash: hash = st.source_hash else: assert st.meta, "Module must be either parsed or cached" hash = st.meta.hash meta_snapshot[id] = hash meta = {'snapshot': meta_snapshot, 'deps_meta': fg_deps_meta} if not metastore.write(DEPS_META_FILE, json.dumps(meta)): manager.log("Error writing fine-grained deps meta JSON file {}".format(DEPS_META_FILE)) error = True if error: manager.errors.set_file(_cache_dir_prefix(manager.options), None) manager.errors.report(0, 0, "Error writing fine-grained dependencies cache", blocker=True) def invert_deps(deps: Dict[str, Set[str]], graph: Graph) -> Dict[str, Dict[str, Set[str]]]: """Splits fine-grained dependencies based on the module of the trigger. Returns a dictionary from module ids to all dependencies on that module. Dependencies not associated with a module in the build will be associated with the nearest parent module that is in the build, or the fake module FAKE_ROOT_MODULE if none are. """ # Lazy import to speed up startup from mypy.server.target import trigger_to_target # Prepopulate the map for all the modules that have been processed, # so that we always generate files for processed modules (even if # there aren't any dependencies to them.) rdeps = {id: {} for id, st in graph.items() if st.tree} # type: Dict[str, Dict[str, Set[str]]] for trigger, targets in deps.items(): module = module_prefix(graph, trigger_to_target(trigger)) if not module or not graph[module].tree: module = FAKE_ROOT_MODULE mod_rdeps = rdeps.setdefault(module, {}) mod_rdeps.setdefault(trigger, set()).update(targets) return rdeps def generate_deps_for_cache(manager: BuildManager, graph: Graph) -> Dict[str, Dict[str, Set[str]]]: """Generate fine-grained dependencies into a form suitable for serializing. This does a couple things: 1. Splits fine-grained deps based on the module of the trigger 2. For each module we generated fine-grained deps for, load any previous deps and merge them in. Returns a dictionary from module ids to all dependencies on that module. Dependencies not associated with a module in the build will be associated with the nearest parent module that is in the build, or the fake module FAKE_ROOT_MODULE if none are. """ from mypy.server.deps import merge_dependencies # Lazy import to speed up startup # Split the dependencies out into based on the module that is depended on. rdeps = invert_deps(manager.fg_deps, graph) # We can't just clobber existing dependency information, so we # load the deps for every module we've generated new dependencies # to and merge the new deps into them. for module, mdeps in rdeps.items(): old_deps = manager.load_fine_grained_deps(module) merge_dependencies(old_deps, mdeps) return rdeps PLUGIN_SNAPSHOT_FILE = '@plugins_snapshot.json' # type: Final def write_plugins_snapshot(manager: BuildManager) -> None: """Write snapshot of versions and hashes of currently active plugins.""" if not manager.metastore.write(PLUGIN_SNAPSHOT_FILE, json.dumps(manager.plugins_snapshot)): manager.errors.set_file(_cache_dir_prefix(manager.options), None) manager.errors.report(0, 0, "Error writing plugins snapshot", blocker=True) def read_plugins_snapshot(manager: BuildManager) -> Optional[Dict[str, str]]: """Read cached snapshot of versions and hashes of plugins from previous run.""" snapshot = _load_json_file(PLUGIN_SNAPSHOT_FILE, manager, log_sucess='Plugins snapshot ', log_error='Could not load plugins snapshot: ') if snapshot is None: return None if not isinstance(snapshot, dict): manager.log('Could not load plugins snapshot: cache is not a dict: {}' .format(type(snapshot))) return None return snapshot def read_quickstart_file(options: Options, stdout: TextIO, ) -> Optional[Dict[str, Tuple[float, int, str]]]: quickstart = None # type: Optional[Dict[str, Tuple[float, int, str]]] if options.quickstart_file: # This is very "best effort". If the file is missing or malformed, # just ignore it. raw_quickstart = {} # type: Dict[str, Any] try: with open(options.quickstart_file, "r") as f: raw_quickstart = json.load(f) quickstart = {} for file, (x, y, z) in raw_quickstart.items(): quickstart[file] = (x, y, z) except Exception as e: print("Warning: Failed to load quickstart file: {}\n".format(str(e)), file=stdout) return quickstart def read_deps_cache(manager: BuildManager, graph: Graph) -> Optional[Dict[str, FgDepMeta]]: """Read and validate the fine-grained dependencies cache. See the write_deps_cache documentation for more information on the details of the cache. Returns None if the cache was invalid in some way. """ deps_meta = _load_json_file(DEPS_META_FILE, manager, log_sucess='Deps meta ', log_error='Could not load fine-grained dependency metadata: ') if deps_meta is None: return None meta_snapshot = deps_meta['snapshot'] # Take a snapshot of the source hashes from all of the metas we found. # (Including the ones we rejected because they were out of date.) # We use this to verify that they match up with the proto_deps. current_meta_snapshot = {id: st.meta_source_hash for id, st in graph.items() if st.meta_source_hash is not None} common = set(meta_snapshot.keys()) & set(current_meta_snapshot.keys()) if any(meta_snapshot[id] != current_meta_snapshot[id] for id in common): # TODO: invalidate also if options changed (like --strict-optional)? manager.log('Fine-grained dependencies cache inconsistent, ignoring') return None module_deps_metas = deps_meta['deps_meta'] if not manager.options.skip_cache_mtime_checks: for id, meta in module_deps_metas.items(): try: matched = manager.getmtime(meta['path']) == meta['mtime'] except FileNotFoundError: matched = False if not matched: manager.log('Invalid or missing fine-grained deps cache: {}'.format(meta['path'])) return None return module_deps_metas def _load_json_file(file: str, manager: BuildManager, log_sucess: str, log_error: str) -> Optional[Dict[str, Any]]: """A simple helper to read a JSON file with logging.""" t0 = time.time() try: data = manager.metastore.read(file) except IOError: manager.log(log_error + file) return None manager.add_stats(metastore_read_time=time.time() - t0) # Only bother to compute the log message if we are logging it, since it could be big if manager.verbosity() >= 2: manager.trace(log_sucess + data.rstrip()) try: result = json.loads(data) except ValueError: # TODO: JSONDecodeError in 3.5 manager.errors.set_file(file, None) manager.errors.report(-1, -1, "Error reading JSON file;" " you likely have a bad cache.\n" "Try removing the {cache_dir} directory" " and run mypy again.".format( cache_dir=manager.options.cache_dir ), blocker=True) return None else: return result def _cache_dir_prefix(options: Options) -> str: """Get current cache directory (or file if id is given).""" if options.bazel: # This is needed so the cache map works. return os.curdir cache_dir = options.cache_dir pyversion = options.python_version base = os.path.join(cache_dir, '%d.%d' % pyversion) return base def create_metastore(options: Options) -> MetadataStore: """Create the appropriate metadata store.""" if options.sqlite_cache: return SqliteMetadataStore(_cache_dir_prefix(options)) else: return FilesystemMetadataStore(_cache_dir_prefix(options)) def get_cache_names(id: str, path: str, options: Options) -> Tuple[str, str, Optional[str]]: """Return the file names for the cache files. Args: id: module ID path: module path cache_dir: cache directory pyversion: Python version (major, minor) Returns: A tuple with the file names to be used for the meta JSON, the data JSON, and the fine-grained deps JSON, respectively. """ if options.cache_map: pair = options.cache_map.get(normpath(path, options)) else: pair = None if pair is not None: # The cache map paths were specified relative to the base directory, # but the filesystem metastore APIs operates relative to the cache # prefix directory. # Solve this by rewriting the paths as relative to the root dir. # This only makes sense when using the filesystem backed cache. root = _cache_dir_prefix(options) return (os.path.relpath(pair[0], root), os.path.relpath(pair[1], root), None) prefix = os.path.join(*id.split('.')) is_package = os.path.basename(path).startswith('__init__.py') if is_package: prefix = os.path.join(prefix, '__init__') deps_json = None if options.cache_fine_grained: deps_json = prefix + '.deps.json' return (prefix + '.meta.json', prefix + '.data.json', deps_json) def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[CacheMeta]: """Find cache data for a module. Args: id: module ID path: module path manager: the build manager (for pyversion, log/trace, and build options) Returns: A CacheMeta instance if the cache data was found and appears valid; otherwise None. """ # TODO: May need to take more build options into account meta_json, data_json, _ = get_cache_names(id, path, manager.options) manager.trace('Looking for {} at {}'.format(id, meta_json)) t0 = time.time() meta = _load_json_file(meta_json, manager, log_sucess='Meta {} '.format(id), log_error='Could not load cache for {}: '.format(id)) t1 = time.time() if meta is None: return None if not isinstance(meta, dict): manager.log('Could not load cache for {}: meta cache is not a dict: {}' .format(id, repr(meta))) return None m = cache_meta_from_dict(meta, data_json) t2 = time.time() manager.add_stats(load_meta_time=t2 - t0, load_meta_load_time=t1 - t0, load_meta_from_dict_time=t2 - t1) # Don't check for path match, that is dealt with in validate_meta(). if (m.id != id or m.mtime is None or m.size is None or m.dependencies is None or m.data_mtime is None): manager.log('Metadata abandoned for {}: attributes are missing'.format(id)) return None # Ignore cache if generated by an older mypy version. if ((m.version_id != manager.version_id and not manager.options.skip_version_check) or m.options is None or len(m.dependencies) + len(m.suppressed) != len(m.dep_prios) or len(m.dependencies) + len(m.suppressed) != len(m.dep_lines)): manager.log('Metadata abandoned for {}: new attributes are missing'.format(id)) return None # Ignore cache if (relevant) options aren't the same. # Note that it's fine to mutilate cached_options since it's only used here. cached_options = m.options current_options = manager.options.clone_for_module(id).select_options_affecting_cache() if manager.options.skip_version_check: # When we're lax about version we're also lax about platform. cached_options['platform'] = current_options['platform'] if 'debug_cache' in cached_options: # Older versions included debug_cache, but it's silly to compare it. del cached_options['debug_cache'] if cached_options != current_options: manager.log('Metadata abandoned for {}: options differ'.format(id)) if manager.options.verbosity >= 2: for key in sorted(set(cached_options) | set(current_options)): if cached_options.get(key) != current_options.get(key): manager.trace(' {}: {} != {}' .format(key, cached_options.get(key), current_options.get(key))) return None if manager.old_plugins_snapshot and manager.plugins_snapshot: # Check if plugins are still the same. if manager.plugins_snapshot != manager.old_plugins_snapshot: manager.log('Metadata abandoned for {}: plugins differ'.format(id)) return None # So that plugins can return data with tuples in it without # things silently always invalidating modules, we round-trip # the config data. This isn't beautiful. plugin_data = json.loads(json.dumps( manager.plugin.report_config_data(ReportConfigContext(id, path, is_check=True)) )) if m.plugin_data != plugin_data: manager.log('Metadata abandoned for {}: plugin configuration differs'.format(id)) return None manager.add_stats(fresh_metas=1) return m def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str], ignore_all: bool, manager: BuildManager) -> Optional[CacheMeta]: '''Checks whether the cached AST of this module can be used. Returns: None, if the cached AST is unusable. Original meta, if mtime/size matched. Meta with mtime updated to match source file, if hash/size matched but mtime/path didn't. ''' # This requires two steps. The first one is obvious: we check that the module source file # contents is the same as it was when the cache data file was created. The second one is not # too obvious: we check that the cache data file mtime has not changed; it is needed because # we use cache data file mtime to propagate information about changes in the dependencies. if meta is None: manager.log('Metadata not found for {}'.format(id)) return None if meta.ignore_all and not ignore_all: manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id)) return None t0 = time.time() bazel = manager.options.bazel assert path is not None, "Internal error: meta was provided without a path" if not manager.options.skip_cache_mtime_checks: # Check data_json; assume if its mtime matches it's good. # TODO: stat() errors data_mtime = manager.getmtime(meta.data_json) if data_mtime != meta.data_mtime: manager.log('Metadata abandoned for {}: data cache is modified'.format(id)) return None if bazel: # Normalize path under bazel to make sure it isn't absolute path = normpath(path, manager.options) try: st = manager.get_stat(path) except OSError: return None if not stat.S_ISREG(st.st_mode): manager.log('Metadata abandoned for {}: file {} does not exist'.format(id, path)) return None manager.add_stats(validate_stat_time=time.time() - t0) # When we are using a fine-grained cache, we want our initial # build() to load all of the cache information and then do a # fine-grained incremental update to catch anything that has # changed since the cache was generated. We *don't* want to do a # coarse-grained incremental rebuild, so we accept the cache # metadata even if it doesn't match the source file. # # We still *do* the mtime/md5 checks, however, to enable # fine-grained mode to take advantage of the mtime-updating # optimization when mtimes differ but md5s match. There is # essentially no extra time cost to computing the hash here, since # it will be cached and will be needed for finding changed files # later anyways. fine_grained_cache = manager.use_fine_grained_cache() size = st.st_size # Bazel ensures the cache is valid. if size != meta.size and not bazel and not fine_grained_cache: manager.log('Metadata abandoned for {}: file {} has different size'.format(id, path)) return None # Bazel ensures the cache is valid. mtime = 0 if bazel else int(st.st_mtime) if not bazel and (mtime != meta.mtime or path != meta.path): if manager.quickstart_state and path in manager.quickstart_state: # If the mtime and the size of the file recorded in the quickstart dump matches # what we see on disk, we know (assume) that the hash matches the quickstart # data as well. If that hash matches the hash in the metadata, then we know # the file is up to date even though the mtime is wrong, without needing to hash it. qmtime, qsize, qhash = manager.quickstart_state[path] if int(qmtime) == mtime and qsize == size and qhash == meta.hash: manager.log('Metadata fresh (by quickstart) for {}: file {}'.format(id, path)) meta = meta._replace(mtime=mtime, path=path) return meta t0 = time.time() try: source_hash = manager.fscache.md5(path) except (OSError, UnicodeDecodeError, DecodeError): return None manager.add_stats(validate_hash_time=time.time() - t0) if source_hash != meta.hash: if fine_grained_cache: manager.log('Using stale metadata for {}: file {}'.format(id, path)) return meta else: manager.log('Metadata abandoned for {}: file {} has different hash'.format( id, path)) return None else: t0 = time.time() # Optimization: update mtime and path (otherwise, this mismatch will reappear). meta = meta._replace(mtime=mtime, path=path) # Construct a dict we can pass to json.dumps() (compare to write_cache()). meta_dict = { 'id': id, 'path': path, 'mtime': mtime, 'size': size, 'hash': source_hash, 'data_mtime': meta.data_mtime, 'dependencies': meta.dependencies, 'suppressed': meta.suppressed, 'options': (manager.options.clone_for_module(id) .select_options_affecting_cache()), 'dep_prios': meta.dep_prios, 'dep_lines': meta.dep_lines, 'interface_hash': meta.interface_hash, 'version_id': manager.version_id, 'ignore_all': meta.ignore_all, 'plugin_data': meta.plugin_data, } if manager.options.debug_cache: meta_str = json.dumps(meta_dict, indent=2, sort_keys=True) else: meta_str = json.dumps(meta_dict) meta_json, _, _ = get_cache_names(id, path, manager.options) manager.log('Updating mtime for {}: file {}, meta {}, mtime {}' .format(id, path, meta_json, meta.mtime)) t1 = time.time() manager.metastore.write(meta_json, meta_str) # Ignore errors, just an optimization. manager.add_stats(validate_update_time=time.time() - t1, validate_munging_time=t1 - t0) return meta # It's a match on (id, path, size, hash, mtime). manager.log('Metadata fresh for {}: file {}'.format(id, path)) return meta def compute_hash(text: str) -> str: # We use md5 instead of the builtin hash(...) function because the output of hash(...) # can differ between runs due to hash randomization (enabled by default in Python 3.3). # See the note in https://docs.python.org/3/reference/datamodel.html#object.__hash__. return hashlib.md5(text.encode('utf-8')).hexdigest() def json_dumps(obj: Any, debug_cache: bool) -> str: if debug_cache: return json.dumps(obj, indent=2, sort_keys=True) else: return json.dumps(obj, sort_keys=True) def write_cache(id: str, path: str, tree: MypyFile, dependencies: List[str], suppressed: List[str], dep_prios: List[int], dep_lines: List[int], old_interface_hash: str, source_hash: str, ignore_all: bool, manager: BuildManager) -> Tuple[str, Optional[CacheMeta]]: """Write cache files for a module. Note that this mypy's behavior is still correct when any given write_cache() call is replaced with a no-op, so error handling code that bails without writing anything is okay. Args: id: module ID path: module path tree: the fully checked module data dependencies: module IDs on which this module depends suppressed: module IDs which were suppressed as dependencies dep_prios: priorities (parallel array to dependencies) dep_lines: import line locations (parallel array to dependencies) old_interface_hash: the hash from the previous version of the data cache file source_hash: the hash of the source code ignore_all: the ignore_all flag for this module manager: the build manager (for pyversion, log/trace) Returns: A tuple containing the interface hash and CacheMeta corresponding to the metadata that was written (the latter may be None if the cache could not be written). """ metastore = manager.metastore # For Bazel we use relative paths and zero mtimes. bazel = manager.options.bazel # Obtain file paths. meta_json, data_json, _ = get_cache_names(id, path, manager.options) manager.log('Writing {} {} {} {}'.format( id, path, meta_json, data_json)) # Update tree.path so that in bazel mode it's made relative (since # sometimes paths leak out). if bazel: tree.path = path # Serialize data and analyze interface data = tree.serialize() data_str = json_dumps(data, manager.options.debug_cache) interface_hash = compute_hash(data_str) plugin_data = manager.plugin.report_config_data(ReportConfigContext(id, path, is_check=False)) # Obtain and set up metadata try: st = manager.get_stat(path) except OSError as err: manager.log("Cannot get stat for {}: {}".format(path, err)) # Remove apparently-invalid cache files. # (This is purely an optimization.) for filename in [data_json, meta_json]: try: os.remove(filename) except OSError: pass # Still return the interface hash we computed. return interface_hash, None # Write data cache file, if applicable # Note that for Bazel we don't record the data file's mtime. if old_interface_hash == interface_hash: # If the interface is unchanged, the cached data is guaranteed # to be equivalent, and we only need to update the metadata. data_mtime = manager.getmtime(data_json) manager.trace("Interface for {} is unchanged".format(id)) else: manager.trace("Interface for {} has changed".format(id)) if not metastore.write(data_json, data_str): # Most likely the error is the replace() call # (see https://github.com/python/mypy/issues/3215). manager.log("Error writing data JSON file {}".format(data_json)) # Let's continue without writing the meta file. Analysis: # If the replace failed, we've changed nothing except left # behind an extraneous temporary file; if the replace # worked but the getmtime() call failed, the meta file # will be considered invalid on the next run because the # data_mtime field won't match the data file's mtime. # Both have the effect of slowing down the next run a # little bit due to an out-of-date cache file. return interface_hash, None data_mtime = manager.getmtime(data_json) mtime = 0 if bazel else int(st.st_mtime) size = st.st_size # Note that the options we store in the cache are the options as # specified by the command line/config file and *don't* reflect # updates made by inline config directives in the file. This is # important, or otherwise the options would never match when # verifying the cache. options = manager.options.clone_for_module(id) assert source_hash is not None meta = {'id': id, 'path': path, 'mtime': mtime, 'size': size, 'hash': source_hash, 'data_mtime': data_mtime, 'dependencies': dependencies, 'suppressed': suppressed, 'options': options.select_options_affecting_cache(), 'dep_prios': dep_prios, 'dep_lines': dep_lines, 'interface_hash': interface_hash, 'version_id': manager.version_id, 'ignore_all': ignore_all, 'plugin_data': plugin_data, } # Write meta cache file meta_str = json_dumps(meta, manager.options.debug_cache) if not metastore.write(meta_json, meta_str): # Most likely the error is the replace() call # (see https://github.com/python/mypy/issues/3215). # The next run will simply find the cache entry out of date. manager.log("Error writing meta JSON file {}".format(meta_json)) return interface_hash, cache_meta_from_dict(meta, data_json) def delete_cache(id: str, path: str, manager: BuildManager) -> None: """Delete cache files for a module. The cache files for a module are deleted when mypy finds errors there. This avoids inconsistent states with cache files from different mypy runs, see #4043 for an example. """ # We don't delete .deps files on errors, since the dependencies # are mostly generated from other files and the metadata is # tracked separately. meta_path, data_path, _ = get_cache_names(id, path, manager.options) cache_paths = [meta_path, data_path] manager.log('Deleting {} {} {}'.format(id, path, " ".join(x for x in cache_paths if x))) for filename in cache_paths: try: manager.metastore.remove(filename) except OSError as e: if e.errno != errno.ENOENT: manager.log("Error deleting cache file {}: {}".format(filename, e.strerror)) """Dependency manager. Design ====== Ideally ------- A. Collapse cycles (each SCC -- strongly connected component -- becomes one "supernode"). B. Topologically sort nodes based on dependencies. C. Process from leaves towards roots. Wrinkles -------- a. Need to parse source modules to determine dependencies. b. Processing order for modules within an SCC. c. Must order mtimes of files to decide whether to re-process; depends on clock never resetting. d. from P import M; checks filesystem whether module P.M exists in filesystem. e. Race conditions, where somebody modifies a file while we're processing. Solved by using a FileSystemCache. Steps ----- 1. For each explicitly given module find the source file location. 2. For each such module load and check the cache metadata, and decide whether it's valid. 3. Now recursively (or iteratively) find dependencies and add those to the graph: - for cached nodes use the list of dependencies from the cache metadata (this will be valid even if we later end up re-parsing the same source); - for uncached nodes parse the file and process all imports found, taking care of (a) above. Step 3 should also address (d) above. Once step 3 terminates we have the entire dependency graph, and for each module we've either loaded the cache metadata or parsed the source code. (However, we may still need to parse those modules for which we have cache metadata but that depend, directly or indirectly, on at least one module for which the cache metadata is stale.) Now we can execute steps A-C from the first section. Finding SCCs for step A shouldn't be hard; there's a recipe here: http://code.activestate.com/recipes/578507/. There's also a plethora of topsort recipes, e.g. http://code.activestate.com/recipes/577413/. For single nodes, processing is simple. If the node was cached, we deserialize the cache data and fix up cross-references. Otherwise, we do semantic analysis followed by type checking. We also handle (c) above; if a module has valid cache data *but* any of its dependencies was processed from source, then the module should be processed from source. A relatively simple optimization (outside SCCs) we might do in the future is as follows: if a node's cache data is valid, but one or more of its dependencies are out of date so we have to re-parse the node from source, once we have fully type-checked the node, we can decide whether its symbol table actually changed compared to the cache data (by reading the cache data and comparing it to the data we would be writing). If there is no change we can declare the node up to date, and any node that depends (and for which we have cached data, and whose other dependencies are up to date) on it won't need to be re-parsed from source. Import cycles ------------- Finally we have to decide how to handle (c), import cycles. Here we'll need a modified version of the original state machine (build.py), but we only need to do this per SCC, and we won't have to deal with changes to the list of nodes while we're processing it. If all nodes in the SCC have valid cache metadata and all dependencies outside the SCC are still valid, we can proceed as follows: 1. Load cache data for all nodes in the SCC. 2. Fix up cross-references for all nodes in the SCC. Otherwise, the simplest (but potentially slow) way to proceed is to invalidate all cache data in the SCC and re-parse all nodes in the SCC from source. We can do this as follows: 1. Parse source for all nodes in the SCC. 2. Semantic analysis for all nodes in the SCC. 3. Type check all nodes in the SCC. (If there are more passes the process is the same -- each pass should be done for all nodes before starting the next pass for any nodes in the SCC.) We could process the nodes in the SCC in any order. For sentimental reasons, I've decided to process them in the reverse order in which we encountered them when originally constructing the graph. That's how the old build.py deals with cycles, and at least this reproduces the previous implementation more accurately. Can we do better than re-parsing all nodes in the SCC when any of its dependencies are out of date? It's doubtful. The optimization mentioned at the end of the previous section would require re-parsing and type-checking a node and then comparing its symbol table to the cached data; but because the node is part of a cycle we can't technically type-check it until the semantic analysis of all other nodes in the cycle has completed. (This is an important issue because Dropbox has a very large cycle in production code. But I'd like to deal with it later.) Additional wrinkles ------------------- During implementation more wrinkles were found. - When a submodule of a package (e.g. x.y) is encountered, the parent package (e.g. x) must also be loaded, but it is not strictly a dependency. See State.add_ancestors() below. """ class ModuleNotFound(Exception): """Control flow exception to signal that a module was not found.""" class State: """The state for a module. The source is only used for the -c command line option; in that case path is None. Otherwise source is None and path isn't. """ manager = None # type: BuildManager order_counter = 0 # type: ClassVar[int] order = None # type: int # Order in which modules were encountered id = None # type: str # Fully qualified module name path = None # type: Optional[str] # Path to module source xpath = None # type: str # Path or '' source = None # type: Optional[str] # Module source code source_hash = None # type: Optional[str] # Hash calculated based on the source code meta_source_hash = None # type: Optional[str] # Hash of the source given in the meta, if any meta = None # type: Optional[CacheMeta] data = None # type: Optional[str] tree = None # type: Optional[MypyFile] # We keep both a list and set of dependencies. A set because it makes it efficient to # prevent duplicates and the list because I am afraid of changing the order of # iteration over dependencies. # They should be managed with add_dependency and suppress_dependency. dependencies = None # type: List[str] # Modules directly imported by the module dependencies_set = None # type: Set[str] # The same but as a set for deduplication purposes suppressed = None # type: List[str] # Suppressed/missing dependencies suppressed_set = None # type: Set[str] # Suppressed/missing dependencies priorities = None # type: Dict[str, int] # Map each dependency to the line number where it is first imported dep_line_map = None # type: Dict[str, int] # Parent package, its parent, etc. ancestors = None # type: Optional[List[str]] # List of (path, line number) tuples giving context for import import_context = None # type: List[Tuple[str, int]] # The State from which this module was imported, if any caller_state = None # type: Optional[State] # If caller_state is set, the line number in the caller where the import occurred caller_line = 0 # If True, indicate that the public interface of this module is unchanged externally_same = True # Contains a hash of the public interface in incremental mode interface_hash = "" # type: str # Options, specialized for this file options = None # type: Options # Whether to ignore all errors ignore_all = False # Whether the module has an error or any of its dependencies have one. transitive_error = False # Errors reported before semantic analysis, to allow fine-grained # mode to keep reporting them. early_errors = None # type: List[ErrorInfo] # Type checker used for checking this file. Use type_checker() for # access and to construct this on demand. _type_checker = None # type: Optional[TypeChecker] fine_grained_deps_loaded = False def __init__(self, id: Optional[str], path: Optional[str], source: Optional[str], manager: BuildManager, caller_state: 'Optional[State]' = None, caller_line: int = 0, ancestor_for: 'Optional[State]' = None, root_source: bool = False, # If `temporary` is True, this State is being created to just # quickly parse/load the tree, without an intention to further # process it. With this flag, any changes to external state as well # as error reporting should be avoided. temporary: bool = False, ) -> None: if not temporary: assert id or path or source is not None, "Neither id, path nor source given" self.manager = manager State.order_counter += 1 self.order = State.order_counter self.caller_state = caller_state self.caller_line = caller_line if caller_state: self.import_context = caller_state.import_context[:] self.import_context.append((caller_state.xpath, caller_line)) else: self.import_context = [] self.id = id or '__main__' self.options = manager.options.clone_for_module(self.id) self.early_errors = [] self._type_checker = None if not path and source is None: assert id is not None try: path, follow_imports = find_module_and_diagnose( manager, id, self.options, caller_state, caller_line, ancestor_for, root_source, skip_diagnose=temporary) except ModuleNotFound: if not temporary: manager.missing_modules.add(id) raise if follow_imports == 'silent': self.ignore_all = True self.path = path self.xpath = path or '' if path and source is None and self.manager.fscache.isdir(path): source = '' self.source = source if path and source is None and self.manager.cache_enabled: self.meta = find_cache_meta(self.id, path, manager) # TODO: Get mtime if not cached. if self.meta is not None: self.interface_hash = self.meta.interface_hash self.meta_source_hash = self.meta.hash self.add_ancestors() t0 = time.time() self.meta = validate_meta(self.meta, self.id, self.path, self.ignore_all, manager) self.manager.add_stats(validate_meta_time=time.time() - t0) if self.meta: # Make copies, since we may modify these and want to # compare them to the originals later. self.dependencies = list(self.meta.dependencies) self.dependencies_set = set(self.dependencies) self.suppressed = list(self.meta.suppressed) self.suppressed_set = set(self.suppressed) all_deps = self.dependencies + self.suppressed assert len(all_deps) == len(self.meta.dep_prios) self.priorities = {id: pri for id, pri in zip(all_deps, self.meta.dep_prios)} assert len(all_deps) == len(self.meta.dep_lines) self.dep_line_map = {id: line for id, line in zip(all_deps, self.meta.dep_lines)} if temporary: self.load_tree(temporary=True) if not manager.use_fine_grained_cache(): # Special case: if there were a previously missing package imported here # and it is not present, then we need to re-calculate dependencies. # This is to support patterns like this: # from missing_package import missing_module # type: ignore # At first mypy doesn't know that `missing_module` is a module # (it may be a variable, a class, or a function), so it is not added to # suppressed dependencies. Therefore, when the package with module is added, # we need to re-calculate dependencies. # NOTE: see comment below for why we skip this in fine grained mode. if exist_added_packages(self.suppressed, manager, self.options): self.parse_file() # This is safe because the cache is anyway stale. self.compute_dependencies() else: # When doing a fine-grained cache load, pretend we only # know about modules that have cache information and defer # handling new modules until the fine-grained update. if manager.use_fine_grained_cache(): manager.log("Deferring module to fine-grained update %s (%s)" % (path, id)) raise ModuleNotFound # Parse the file (and then some) to get the dependencies. self.parse_file() self.compute_dependencies() @property def xmeta(self) -> CacheMeta: assert self.meta, "missing meta on allegedly fresh module" return self.meta def add_ancestors(self) -> None: if self.path is not None: _, name = os.path.split(self.path) base, _ = os.path.splitext(name) if '.' in base: # This is just a weird filename, don't add anything self.ancestors = [] return # All parent packages are new ancestors. ancestors = [] parent = self.id while '.' in parent: parent, _ = parent.rsplit('.', 1) ancestors.append(parent) self.ancestors = ancestors def is_fresh(self) -> bool: """Return whether the cache data for this file is fresh.""" # NOTE: self.dependencies may differ from # self.meta.dependencies when a dependency is dropped due to # suppression by silent mode. However when a suppressed # dependency is added back we find out later in the process. return (self.meta is not None and self.is_interface_fresh() and self.dependencies == self.meta.dependencies) def is_interface_fresh(self) -> bool: return self.externally_same def mark_as_rechecked(self) -> None: """Marks this module as having been fully re-analyzed by the type-checker.""" self.manager.rechecked_modules.add(self.id) def mark_interface_stale(self, *, on_errors: bool = False) -> None: """Marks this module as having a stale public interface, and discards the cache data.""" self.externally_same = False if not on_errors: self.manager.stale_modules.add(self.id) def check_blockers(self) -> None: """Raise CompileError if a blocking error is detected.""" if self.manager.errors.is_blockers(): self.manager.log("Bailing due to blocking errors") self.manager.errors.raise_error() @contextlib.contextmanager def wrap_context(self, check_blockers: bool = True) -> Iterator[None]: """Temporarily change the error import context to match this state. Also report an internal error if an unexpected exception was raised and raise an exception on a blocking error, unless check_blockers is False. Skipping blocking error reporting is used in the semantic analyzer so that we can report all blocking errors for a file (across multiple targets) to maintain backward compatibility. """ save_import_context = self.manager.errors.import_context() self.manager.errors.set_import_context(self.import_context) try: yield except CompileError: raise except Exception as err: report_internal_error(err, self.path, 0, self.manager.errors, self.options, self.manager.stdout, self.manager.stderr) self.manager.errors.set_import_context(save_import_context) # TODO: Move this away once we've removed the old semantic analyzer? if check_blockers: self.check_blockers() def load_fine_grained_deps(self) -> Dict[str, Set[str]]: return self.manager.load_fine_grained_deps(self.id) def load_tree(self, temporary: bool = False) -> None: assert self.meta is not None, "Internal error: this method must be called only" \ " for cached modules" t0 = time.time() raw = self.manager.metastore.read(self.meta.data_json) t1 = time.time() data = json.loads(raw) t2 = time.time() # TODO: Assert data file wasn't changed. self.tree = MypyFile.deserialize(data) t3 = time.time() self.manager.add_stats(data_read_time=t1 - t0, data_json_load_time=t2 - t1, deserialize_time=t3 - t2) if not temporary: self.manager.modules[self.id] = self.tree self.manager.add_stats(fresh_trees=1) def fix_cross_refs(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" # We need to set allow_missing when doing a fine grained cache # load because we need to gracefully handle missing modules. fixup_module(self.tree, self.manager.modules, self.options.use_fine_grained_cache) def fix_suppressed_dependencies(self, graph: Graph) -> None: """Corrects whether dependencies are considered stale in silent mode. This method is a hack to correct imports in silent mode + incremental mode. In particular, the problem is that when running mypy with a cold cache, the `parse_file(...)` function is called *at the start* of the `load_graph(...)` function. Note that load_graph will mark some dependencies as suppressed if they weren't specified on the command line in silent mode. However, if the interface for a module is changed, parse_file will be called within `process_stale_scc` -- *after* load_graph is finished, wiping out the changes load_graph previously made. This method is meant to be run after parse_file finishes in process_stale_scc and will recompute what modules should be considered suppressed in silent mode. """ # TODO: See if it's possible to move this check directly into parse_file in some way. # TODO: Find a way to write a test case for this fix. # TODO: I suspect that splitting compute_dependencies() out from parse_file # obviates the need for this but lacking a test case for the problem this fixed... silent_mode = (self.options.ignore_missing_imports or self.options.follow_imports == 'skip') if not silent_mode: return new_suppressed = [] new_dependencies = [] entry_points = self.manager.source_set.source_modules for dep in self.dependencies + self.suppressed: ignored = dep in self.suppressed_set and dep not in entry_points if ignored or dep not in graph: new_suppressed.append(dep) else: new_dependencies.append(dep) self.dependencies = new_dependencies self.dependencies_set = set(new_dependencies) self.suppressed = new_suppressed self.suppressed_set = set(new_suppressed) # Methods for processing modules from source code. def parse_file(self) -> None: """Parse file and run first pass of semantic analysis. Everything done here is local to the file. Don't depend on imported modules in any way. Also record module dependencies based on imports. """ if self.tree is not None: # The file was already parsed (in __init__()). return manager = self.manager modules = manager.modules manager.log("Parsing %s (%s)" % (self.xpath, self.id)) with self.wrap_context(): source = self.source self.source = None # We won't need it again. if self.path and source is None: try: path = manager.maybe_swap_for_shadow_path(self.path) source = decode_python_encoding(manager.fscache.read(path), manager.options.python_version) self.source_hash = manager.fscache.md5(path) except IOError as ioerr: # ioerr.strerror differs for os.stat failures between Windows and # other systems, but os.strerror(ioerr.errno) does not, so we use that. # (We want the error messages to be platform-independent so that the # tests have predictable output.) raise CompileError([ "mypy: can't read file '{}': {}".format( self.path, os.strerror(ioerr.errno))], module_with_blocker=self.id) except (UnicodeDecodeError, DecodeError) as decodeerr: if self.path.endswith('.pyd'): err = "mypy: stubgen does not support .pyd files: '{}'".format(self.path) else: err = "mypy: can't decode file '{}': {}".format(self.path, str(decodeerr)) raise CompileError([err], module_with_blocker=self.id) else: assert source is not None self.source_hash = compute_hash(source) self.parse_inline_configuration(source) self.tree = manager.parse_file(self.id, self.xpath, source, self.ignore_all or self.options.ignore_errors) modules[self.id] = self.tree # Make a copy of any errors produced during parse time so that # fine-grained mode can repeat them when the module is # reprocessed. self.early_errors = list(manager.errors.error_info_map.get(self.xpath, [])) self.semantic_analysis_pass1() self.check_blockers() def parse_inline_configuration(self, source: str) -> None: """Check for inline mypy: options directive and parse them.""" flags = get_mypy_comments(source) if flags: changes, config_errors = parse_mypy_comments(flags, self.options) self.options = self.options.apply_changes(changes) self.manager.errors.set_file(self.xpath, self.id) for lineno, error in config_errors: self.manager.errors.report(lineno, 0, error) def semantic_analysis_pass1(self) -> None: """Perform pass 1 of semantic analysis, which happens immediately after parsing. This pass can't assume that any other modules have been processed yet. """ options = self.options assert self.tree is not None # Do the first pass of semantic analysis: analyze the reachability # of blocks and import statements. We must do this before # processing imports, since this may mark some import statements as # unreachable. # # TODO: This should not be considered as a semantic analysis # pass -- it's an independent pass. analyzer = SemanticAnalyzerPreAnalysis() with self.wrap_context(): analyzer.visit_file(self.tree, self.xpath, self.id, options) # TODO: Do this while contructing the AST? self.tree.names = SymbolTable() if options.allow_redefinition: # Perform renaming across the AST to allow variable redefinitions self.tree.accept(VariableRenameVisitor()) def add_dependency(self, dep: str) -> None: if dep not in self.dependencies_set: self.dependencies.append(dep) self.dependencies_set.add(dep) if dep in self.suppressed_set: self.suppressed.remove(dep) self.suppressed_set.remove(dep) def suppress_dependency(self, dep: str) -> None: if dep in self.dependencies_set: self.dependencies.remove(dep) self.dependencies_set.remove(dep) if dep not in self.suppressed_set: self.suppressed.append(dep) self.suppressed_set.add(dep) def compute_dependencies(self) -> None: """Compute a module's dependencies after parsing it. This is used when we parse a file that we didn't have up-to-date cache information for. When we have an up-to-date cache, we just use the cached info. """ manager = self.manager assert self.tree is not None # Compute (direct) dependencies. # Add all direct imports (this is why we needed the first pass). # Also keep track of each dependency's source line. # Missing dependencies will be moved from dependencies to # suppressed when they fail to be loaded in load_graph. self.dependencies = [] self.dependencies_set = set() self.suppressed = [] self.suppressed_set = set() self.priorities = {} # id -> priority self.dep_line_map = {} # id -> line dep_entries = (manager.all_imported_modules_in_file(self.tree) + self.manager.plugin.get_additional_deps(self.tree)) for pri, id, line in dep_entries: self.priorities[id] = min(pri, self.priorities.get(id, PRI_ALL)) if id == self.id: continue self.add_dependency(id) if id not in self.dep_line_map: self.dep_line_map[id] = line # Every module implicitly depends on builtins. if self.id != 'builtins': self.add_dependency('builtins') self.check_blockers() # Can fail due to bogus relative imports def type_check_first_pass(self) -> None: if self.options.semantic_analysis_only: return with self.wrap_context(): self.type_checker().check_first_pass() def type_checker(self) -> TypeChecker: if not self._type_checker: assert self.tree is not None, "Internal error: must be called on parsed file only" manager = self.manager self._type_checker = TypeChecker(manager.errors, manager.modules, self.options, self.tree, self.xpath, manager.plugin) return self._type_checker def type_map(self) -> Dict[Expression, Type]: return self.type_checker().type_map def type_check_second_pass(self) -> bool: if self.options.semantic_analysis_only: return False with self.wrap_context(): return self.type_checker().check_second_pass() def finish_passes(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" manager = self.manager if self.options.semantic_analysis_only: return with self.wrap_context(): # Some tests (and tools) want to look at the set of all types. options = manager.options if options.export_types: manager.all_types.update(self.type_map()) # We should always patch indirect dependencies, even in full (non-incremental) builds, # because the cache still may be written, and it must be correct. self._patch_indirect_dependencies(self.type_checker().module_refs, self.type_map()) if self.options.dump_inference_stats: dump_type_stats(self.tree, self.xpath, modules=self.manager.modules, inferred=True, typemap=self.type_map()) manager.report_file(self.tree, self.type_map(), self.options) self.update_fine_grained_deps(self.manager.fg_deps) self.free_state() if not manager.options.fine_grained_incremental and not manager.options.preserve_asts: free_tree(self.tree) def free_state(self) -> None: if self._type_checker: self._type_checker.reset() self._type_checker = None def _patch_indirect_dependencies(self, module_refs: Set[str], type_map: Dict[Expression, Type]) -> None: types = set(type_map.values()) assert None not in types valid = self.valid_references() encountered = self.manager.indirection_detector.find_modules(types) | module_refs extra = encountered - valid for dep in sorted(extra): if dep not in self.manager.modules: continue if dep not in self.suppressed_set and dep not in self.manager.missing_modules: self.add_dependency(dep) self.priorities[dep] = PRI_INDIRECT elif dep not in self.suppressed_set and dep in self.manager.missing_modules: self.suppress_dependency(dep) def compute_fine_grained_deps(self) -> Dict[str, Set[str]]: assert self.tree is not None if '/typeshed/' in self.xpath or self.xpath.startswith('typeshed/'): # We don't track changes to typeshed -- the assumption is that they are only changed # as part of mypy updates, which will invalidate everything anyway. # # TODO: Not a reliable test, as we could have a package named typeshed. # TODO: Consider relaxing this -- maybe allow some typeshed changes to be tracked. return {} from mypy.server.deps import get_dependencies # Lazy import to speed up startup return get_dependencies(target=self.tree, type_map=self.type_map(), python_version=self.options.python_version, options=self.manager.options) def update_fine_grained_deps(self, deps: Dict[str, Set[str]]) -> None: options = self.manager.options if options.cache_fine_grained or options.fine_grained_incremental: from mypy.server.deps import merge_dependencies # Lazy import to speed up startup merge_dependencies(self.compute_fine_grained_deps(), deps) TypeState.update_protocol_deps(deps) def valid_references(self) -> Set[str]: assert self.ancestors is not None valid_refs = set(self.dependencies + self.suppressed + self.ancestors) valid_refs.add(self.id) if "os" in valid_refs: valid_refs.add("os.path") return valid_refs def write_cache(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" # We don't support writing cache files in fine-grained incremental mode. if (not self.path or self.options.cache_dir == os.devnull or self.options.fine_grained_incremental): return is_errors = self.transitive_error if is_errors: delete_cache(self.id, self.path, self.manager) self.meta = None self.mark_interface_stale(on_errors=True) return dep_prios = self.dependency_priorities() dep_lines = self.dependency_lines() assert self.source_hash is not None assert len(set(self.dependencies)) == len(self.dependencies), ( "Duplicates in dependencies list for {} ({})".format(self.id, self.dependencies)) new_interface_hash, self.meta = write_cache( self.id, self.path, self.tree, list(self.dependencies), list(self.suppressed), dep_prios, dep_lines, self.interface_hash, self.source_hash, self.ignore_all, self.manager) if new_interface_hash == self.interface_hash: self.manager.log("Cached module {} has same interface".format(self.id)) else: self.manager.log("Cached module {} has changed interface".format(self.id)) self.mark_interface_stale() self.interface_hash = new_interface_hash def verify_dependencies(self, suppressed_only: bool = False) -> None: """Report errors for import targets in modules that don't exist. If suppressed_only is set, only check suppressed dependencies. """ manager = self.manager assert self.ancestors is not None if suppressed_only: all_deps = self.suppressed else: # Strip out indirect dependencies. See comment in build.load_graph(). dependencies = [dep for dep in self.dependencies if self.priorities.get(dep) != PRI_INDIRECT] all_deps = dependencies + self.suppressed + self.ancestors for dep in all_deps: if dep in manager.modules: continue options = manager.options.clone_for_module(dep) if options.ignore_missing_imports: continue line = self.dep_line_map.get(dep, 1) try: if dep in self.ancestors: state, ancestor = None, self # type: (Optional[State], Optional[State]) else: state, ancestor = self, None # Called just for its side effects of producing diagnostics. find_module_and_diagnose( manager, dep, options, caller_state=state, caller_line=line, ancestor_for=ancestor) except (ModuleNotFound, CompileError): # Swallow up any ModuleNotFounds or CompilerErrors while generating # a diagnostic. CompileErrors may get generated in # fine-grained mode when an __init__.py is deleted, if a module # that was in that package has targets reprocessed before # it is renamed. pass def dependency_priorities(self) -> List[int]: return [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies + self.suppressed] def dependency_lines(self) -> List[int]: return [self.dep_line_map.get(dep, 1) for dep in self.dependencies + self.suppressed] def generate_unused_ignore_notes(self) -> None: if self.options.warn_unused_ignores: # If this file was initially loaded from the cache, it may have suppressed # dependencies due to imports with ignores on them. We need to generate # those errors to avoid spuriously flagging them as unused ignores. if self.meta: self.verify_dependencies(suppressed_only=True) self.manager.errors.generate_unused_ignore_errors(self.xpath) # Module import and diagnostic glue def find_module_and_diagnose(manager: BuildManager, id: str, options: Options, caller_state: 'Optional[State]' = None, caller_line: int = 0, ancestor_for: 'Optional[State]' = None, root_source: bool = False, skip_diagnose: bool = False) -> Tuple[str, str]: """Find a module by name, respecting follow_imports and producing diagnostics. If the module is not found, then the ModuleNotFound exception is raised. Args: id: module to find options: the options for the module being loaded caller_state: the state of the importing module, if applicable caller_line: the line number of the import ancestor_for: the child module this is an ancestor of, if applicable root_source: whether this source was specified on the command line skip_diagnose: skip any error diagnosis and reporting (but ModuleNotFound is still raised if the module is missing) The specified value of follow_imports for a module can be overridden if the module is specified on the command line or if it is a stub, so we compute and return the "effective" follow_imports of the module. Returns a tuple containing (file path, target's effective follow_imports setting) """ file_id = id if id == 'builtins' and options.python_version[0] == 2: # The __builtin__ module is called internally by mypy # 'builtins' in Python 2 mode (similar to Python 3), # but the stub file is __builtin__.pyi. The reason is # that a lot of code hard-codes 'builtins.x' and it's # easier to work it around like this. It also means # that the implementation can mostly ignore the # difference and just assume 'builtins' everywhere, # which simplifies code. file_id = '__builtin__' path = find_module_simple(file_id, manager) if path: # For non-stubs, look at options.follow_imports: # - normal (default) -> fully analyze # - silent -> analyze but silence errors # - skip -> don't analyze, make the type Any follow_imports = options.follow_imports if (root_source # Honor top-level modules or (not path.endswith('.py') # Stubs are always normal and not options.follow_imports_for_stubs) # except when they aren't or id in mypy.semanal_main.core_modules): # core is always normal follow_imports = 'normal' if skip_diagnose: pass elif follow_imports == 'silent': # Still import it, but silence non-blocker errors. manager.log("Silencing %s (%s)" % (path, id)) elif follow_imports == 'skip' or follow_imports == 'error': # In 'error' mode, produce special error messages. if id not in manager.missing_modules: manager.log("Skipping %s (%s)" % (path, id)) if follow_imports == 'error': if ancestor_for: skipping_ancestor(manager, id, path, ancestor_for) else: skipping_module(manager, caller_line, caller_state, id, path) raise ModuleNotFound if not manager.options.no_silence_site_packages: for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path: if is_sub_path(path, dir): # Silence errors in site-package dirs and typeshed follow_imports = 'silent' return (path, follow_imports) else: # Could not find a module. Typically the reason is a # misspelled module name, missing stub, module not in # search path or the module has not been installed. if skip_diagnose: raise ModuleNotFound if caller_state: if not (options.ignore_missing_imports or in_partial_package(id, manager)): module_not_found(manager, caller_line, caller_state, id) raise ModuleNotFound elif root_source: # If we can't find a root source it's always fatal. # TODO: This might hide non-fatal errors from # root sources processed earlier. raise CompileError(["mypy: can't find module '%s'" % id]) else: raise ModuleNotFound def exist_added_packages(suppressed: List[str], manager: BuildManager, options: Options) -> bool: """Find if there are any newly added packages that were previously suppressed. Exclude everything not in build for follow-imports=skip. """ for dep in suppressed: if dep in manager.source_set.source_modules: # We don't need to add any special logic for this. If a module # is added to build, importers will be invalidated by normal mechanism. continue path = find_module_simple(dep, manager) if not path: continue if (options.follow_imports == 'skip' and (not path.endswith('.pyi') or options.follow_imports_for_stubs)): continue if '__init__.py' in path: # It is better to have a bit lenient test, this will only slightly reduce # performance, while having a too strict test may affect correctness. return True return False def find_module_simple(id: str, manager: BuildManager) -> Optional[str]: """Find a filesystem path for module `id` or `None` if not found.""" t0 = time.time() x = manager.find_module_cache.find_module(id) manager.add_stats(find_module_time=time.time() - t0, find_module_calls=1) return x def in_partial_package(id: str, manager: BuildManager) -> bool: """Check if a missing module can potentially be a part of a package. This checks if there is any existing parent __init__.pyi stub that defines a module-level __getattr__ (a.k.a. partial stub package). """ while '.' in id: parent, _ = id.rsplit('.', 1) if parent in manager.modules: parent_mod = manager.modules[parent] # type: Optional[MypyFile] else: # Parent is not in build, try quickly if we can find it. try: parent_st = State(id=parent, path=None, source=None, manager=manager, temporary=True) except (ModuleNotFound, CompileError): parent_mod = None else: parent_mod = parent_st.tree if parent_mod is not None: if parent_mod.is_partial_stub_package: return True else: # Bail out soon, complete subpackage found return False id = parent return False def module_not_found(manager: BuildManager, line: int, caller_state: State, target: str) -> None: errors = manager.errors save_import_context = errors.import_context() errors.set_import_context(caller_state.import_context) errors.set_file(caller_state.xpath, caller_state.id) stub_msg = "(Stub files are from https://github.com/python/typeshed)" if target == 'builtins': errors.report(line, 0, "Cannot find 'builtins' module. Typeshed appears broken!", blocker=True) errors.raise_error() elif ((manager.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(target)) or (manager.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(target))): errors.report( line, 0, "No library stub file for standard library module '{}'".format(target), code=codes.IMPORT) errors.report(line, 0, stub_msg, severity='note', only_once=True, code=codes.IMPORT) elif moduleinfo.is_third_party_module(target): errors.report(line, 0, "No library stub file for module '{}'".format(target), code=codes.IMPORT) errors.report(line, 0, stub_msg, severity='note', only_once=True, code=codes.IMPORT) else: note = "See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports" errors.report( line, 0, "Cannot find implementation or library stub for module named '{}'".format(target), code=codes.IMPORT ) errors.report(line, 0, note, severity='note', only_once=True, code=codes.IMPORT) errors.set_import_context(save_import_context) def skipping_module(manager: BuildManager, line: int, caller_state: Optional[State], id: str, path: str) -> None: """Produce an error for an import ignored due to --follow_imports=error""" assert caller_state, (id, path) save_import_context = manager.errors.import_context() manager.errors.set_import_context(caller_state.import_context) manager.errors.set_file(caller_state.xpath, caller_state.id) manager.errors.report(line, 0, "Import of '%s' ignored" % (id,), severity='error') manager.errors.report(line, 0, "(Using --follow-imports=error, module not passed on command line)", severity='note', only_once=True) manager.errors.set_import_context(save_import_context) def skipping_ancestor(manager: BuildManager, id: str, path: str, ancestor_for: 'State') -> None: """Produce an error for an ancestor ignored due to --follow_imports=error""" # TODO: Read the path (the __init__.py file) and return # immediately if it's empty or only contains comments. # But beware, some package may be the ancestor of many modules, # so we'd need to cache the decision. manager.errors.set_import_context([]) manager.errors.set_file(ancestor_for.xpath, ancestor_for.id) manager.errors.report(-1, -1, "Ancestor package '%s' ignored" % (id,), severity='error', only_once=True) manager.errors.report(-1, -1, "(Using --follow-imports=error, submodule passed on command line)", severity='note', only_once=True) def log_configuration(manager: BuildManager) -> None: """Output useful configuration information to LOG and TRACE""" manager.log() configuration_vars = ( ("Mypy Version", __version__), ("Config File", (manager.options.config_file or "Default")), ("Configured Executable", manager.options.python_executable), ("Current Executable", sys.executable), ("Cache Dir", manager.options.cache_dir), ("Compiled", str(not __file__.endswith(".py"))), ) for conf_name, conf_value in configuration_vars: manager.log("{:24}{}".format(conf_name + ":", conf_value)) # Complete list of searched paths can get very long, put them under TRACE for path_type, paths in manager.search_paths._asdict().items(): if not paths: manager.trace("No %s" % path_type) continue manager.trace("%s:" % path_type) for pth in paths: manager.trace(" %s" % pth) # The driver def dispatch(sources: List[BuildSource], manager: BuildManager, stdout: TextIO, ) -> Graph: log_configuration(manager) t0 = time.time() graph = load_graph(sources, manager) # This is a kind of unfortunate hack to work around some of fine-grained's # fragility: if we have loaded less than 50% of the specified files from # cache in fine-grained cache mode, load the graph again honestly. # In this case, we just turn the cache off entirely, so we don't need # to worry about some files being loaded and some from cache and so # that fine-grained mode never *writes* to the cache. if manager.use_fine_grained_cache() and len(graph) < 0.50 * len(sources): manager.log("Redoing load_graph without cache because too much was missing") manager.cache_enabled = False graph = load_graph(sources, manager) t1 = time.time() manager.add_stats(graph_size=len(graph), stubs_found=sum(g.path is not None and g.path.endswith('.pyi') for g in graph.values()), graph_load_time=(t1 - t0), fm_cache_size=len(manager.find_module_cache.results), ) if not graph: print("Nothing to do?!", file=stdout) return graph manager.log("Loaded graph with %d nodes (%.3f sec)" % (len(graph), t1 - t0)) if manager.options.dump_graph: dump_graph(graph, stdout) return graph # Fine grained dependencies that didn't have an associated module in the build # are serialized separately, so we read them after we load the graph. # We need to read them both for running in daemon mode and if we are generating # a fine-grained cache (so that we can properly update them incrementally). # The `read_deps_cache` will also validate # the deps cache against the loaded individual cache files. if manager.options.cache_fine_grained or manager.use_fine_grained_cache(): t2 = time.time() fg_deps_meta = read_deps_cache(manager, graph) manager.add_stats(load_fg_deps_time=time.time() - t2) if fg_deps_meta is not None: manager.fg_deps_meta = fg_deps_meta elif manager.stats.get('fresh_metas', 0) > 0: # Clear the stats so we don't infinite loop because of positive fresh_metas manager.stats.clear() # There were some cache files read, but no fine-grained dependencies loaded. manager.log("Error reading fine-grained dependencies cache -- aborting cache load") manager.cache_enabled = False manager.log("Falling back to full run -- reloading graph...") return dispatch(sources, manager, stdout) # If we are loading a fine-grained incremental mode cache, we # don't want to do a real incremental reprocess of the # graph---we'll handle it all later. if not manager.use_fine_grained_cache(): process_graph(graph, manager) # Update plugins snapshot. write_plugins_snapshot(manager) manager.old_plugins_snapshot = manager.plugins_snapshot if manager.options.cache_fine_grained or manager.options.fine_grained_incremental: # If we are running a daemon or are going to write cache for further fine grained use, # then we need to collect fine grained protocol dependencies. # Since these are a global property of the program, they are calculated after we # processed the whole graph. TypeState.add_all_protocol_deps(manager.fg_deps) if not manager.options.fine_grained_incremental: rdeps = generate_deps_for_cache(manager, graph) write_deps_cache(rdeps, manager, graph) if manager.options.dump_deps: # This speeds up startup a little when not using the daemon mode. from mypy.server.deps import dump_all_dependencies dump_all_dependencies(manager.modules, manager.all_types, manager.options.python_version, manager.options) return graph class NodeInfo: """Some info about a node in the graph of SCCs.""" def __init__(self, index: int, scc: List[str]) -> None: self.node_id = "n%d" % index self.scc = scc self.sizes = {} # type: Dict[str, int] # mod -> size in bytes self.deps = {} # type: Dict[str, int] # node_id -> pri def dumps(self) -> str: """Convert to JSON string.""" total_size = sum(self.sizes.values()) return "[%s, %s, %s,\n %s,\n %s]" % (json.dumps(self.node_id), json.dumps(total_size), json.dumps(self.scc), json.dumps(self.sizes), json.dumps(self.deps)) def dump_graph(graph: Graph, stdout: Optional[TextIO] = None) -> None: """Dump the graph as a JSON string to stdout. This copies some of the work by process_graph() (sorted_components() and order_ascc()). """ stdout = stdout or sys.stdout nodes = [] sccs = sorted_components(graph) for i, ascc in enumerate(sccs): scc = order_ascc(graph, ascc) node = NodeInfo(i, scc) nodes.append(node) inv_nodes = {} # module -> node_id for node in nodes: for mod in node.scc: inv_nodes[mod] = node.node_id for node in nodes: for mod in node.scc: state = graph[mod] size = 0 if state.path: try: size = os.path.getsize(state.path) except os.error: pass node.sizes[mod] = size for dep in state.dependencies: if dep in state.priorities: pri = state.priorities[dep] if dep in inv_nodes: dep_id = inv_nodes[dep] if (dep_id != node.node_id and (dep_id not in node.deps or pri < node.deps[dep_id])): node.deps[dep_id] = pri print("[" + ",\n ".join(node.dumps() for node in nodes) + "\n]", file=stdout) def load_graph(sources: List[BuildSource], manager: BuildManager, old_graph: Optional[Graph] = None, new_modules: Optional[List[State]] = None) -> Graph: """Given some source files, load the full dependency graph. If an old_graph is passed in, it is used as the starting point and modified during graph loading. If a new_modules is passed in, any modules that are loaded are added to the list. This is an argument and not a return value so that the caller can access it even if load_graph fails. As this may need to parse files, this can raise CompileError in case there are syntax errors. """ graph = old_graph if old_graph is not None else {} # type: Graph # The deque is used to implement breadth-first traversal. # TODO: Consider whether to go depth-first instead. This may # affect the order in which we process files within import cycles. new = new_modules if new_modules is not None else [] entry_points = set() # type: Set[str] # Seed the graph with the initial root sources. for bs in sources: try: st = State(id=bs.module, path=bs.path, source=bs.text, manager=manager, root_source=True) except ModuleNotFound: continue if st.id in graph: manager.errors.set_file(st.xpath, st.id) manager.errors.report( -1, -1, "Duplicate module named '%s' (also at '%s')" % (st.id, graph[st.id].xpath) ) p1 = len(pathlib.PurePath(st.xpath).parents) p2 = len(pathlib.PurePath(graph[st.id].xpath).parents) if p1 != p2: manager.errors.report( -1, -1, "Are you missing an __init__.py?" ) manager.errors.raise_error() graph[st.id] = st new.append(st) entry_points.add(bs.module) # Collect dependencies. We go breadth-first. # More nodes might get added to new as we go, but that's fine. for st in new: assert st.ancestors is not None # Strip out indirect dependencies. These will be dealt with # when they show up as direct dependencies, and there's a # scenario where they hurt: # - Suppose A imports B and B imports C. # - Suppose on the next round: # - C is deleted; # - B is updated to remove the dependency on C; # - A is unchanged. # - In this case A's cached *direct* dependencies are still valid # (since direct dependencies reflect the imports found in the source) # but A's cached *indirect* dependency on C is wrong. dependencies = [dep for dep in st.dependencies if st.priorities.get(dep) != PRI_INDIRECT] if not manager.use_fine_grained_cache(): # TODO: Ideally we could skip here modules that appeared in st.suppressed # because they are not in build with `follow-imports=skip`. # This way we could avoid overhead of cloning options in `State.__init__()` # below to get the option value. This is quite minor performance loss however. added = [dep for dep in st.suppressed if find_module_simple(dep, manager)] else: # During initial loading we don't care about newly added modules, # they will be taken care of during fine grained update. See also # comment about this in `State.__init__()`. added = [] for dep in st.ancestors + dependencies + st.suppressed: ignored = dep in st.suppressed_set and dep not in entry_points if ignored and dep not in added: manager.missing_modules.add(dep) elif dep not in graph: try: if dep in st.ancestors: # TODO: Why not 'if dep not in st.dependencies' ? # Ancestors don't have import context. newst = State(id=dep, path=None, source=None, manager=manager, ancestor_for=st) else: newst = State(id=dep, path=None, source=None, manager=manager, caller_state=st, caller_line=st.dep_line_map.get(dep, 1)) except ModuleNotFound: if dep in st.dependencies_set: st.suppress_dependency(dep) else: assert newst.id not in graph, newst.id graph[newst.id] = newst new.append(newst) if dep in graph and dep in st.suppressed_set: # Previously suppressed file is now visible st.add_dependency(dep) manager.plugin.set_modules(manager.modules) return graph def process_graph(graph: Graph, manager: BuildManager) -> None: """Process everything in dependency order.""" sccs = sorted_components(graph) manager.log("Found %d SCCs; largest has %d nodes" % (len(sccs), max(len(scc) for scc in sccs))) fresh_scc_queue = [] # type: List[List[str]] # We're processing SCCs from leaves (those without further # dependencies) to roots (those from which everything else can be # reached). for ascc in sccs: # Order the SCC's nodes using a heuristic. # Note that ascc is a set, and scc is a list. scc = order_ascc(graph, ascc) # If builtins is in the list, move it last. (This is a bit of # a hack, but it's necessary because the builtins module is # part of a small cycle involving at least {builtins, abc, # typing}. Of these, builtins must be processed last or else # some builtin objects will be incompletely processed.) if 'builtins' in ascc: scc.remove('builtins') scc.append('builtins') if manager.options.verbosity >= 2: for id in scc: manager.trace("Priorities for %s:" % id, " ".join("%s:%d" % (x, graph[id].priorities[x]) for x in graph[id].dependencies if x in ascc and x in graph[id].priorities)) # Because the SCCs are presented in topological sort order, we # don't need to look at dependencies recursively for staleness # -- the immediate dependencies are sufficient. stale_scc = {id for id in scc if not graph[id].is_fresh()} fresh = not stale_scc deps = set() for id in scc: deps.update(graph[id].dependencies) deps -= ascc stale_deps = {id for id in deps if id in graph and not graph[id].is_interface_fresh()} fresh = fresh and not stale_deps undeps = set() if fresh: # Check if any dependencies that were suppressed according # to the cache have been added back in this run. # NOTE: Newly suppressed dependencies are handled by is_fresh(). for id in scc: undeps.update(graph[id].suppressed) undeps &= graph.keys() if undeps: fresh = False if fresh: # All cache files are fresh. Check that no dependency's # cache file is newer than any scc node's cache file. oldest_in_scc = min(graph[id].xmeta.data_mtime for id in scc) viable = {id for id in stale_deps if graph[id].meta is not None} newest_in_deps = 0 if not viable else max(graph[dep].xmeta.data_mtime for dep in viable) if manager.options.verbosity >= 3: # Dump all mtimes for extreme debugging. all_ids = sorted(ascc | viable, key=lambda id: graph[id].xmeta.data_mtime) for id in all_ids: if id in scc: if graph[id].xmeta.data_mtime < newest_in_deps: key = "*id:" else: key = "id:" else: if graph[id].xmeta.data_mtime > oldest_in_scc: key = "+dep:" else: key = "dep:" manager.trace(" %5s %.0f %s" % (key, graph[id].xmeta.data_mtime, id)) # If equal, give the benefit of the doubt, due to 1-sec time granularity # (on some platforms). if oldest_in_scc < newest_in_deps: fresh = False fresh_msg = "out of date by %.0f seconds" % (newest_in_deps - oldest_in_scc) else: fresh_msg = "fresh" elif undeps: fresh_msg = "stale due to changed suppression (%s)" % " ".join(sorted(undeps)) elif stale_scc: fresh_msg = "inherently stale" if stale_scc != ascc: fresh_msg += " (%s)" % " ".join(sorted(stale_scc)) if stale_deps: fresh_msg += " with stale deps (%s)" % " ".join(sorted(stale_deps)) else: fresh_msg = "stale due to deps (%s)" % " ".join(sorted(stale_deps)) # Initialize transitive_error for all SCC members from union # of transitive_error of dependencies. if any(graph[dep].transitive_error for dep in deps if dep in graph): for id in scc: graph[id].transitive_error = True scc_str = " ".join(scc) if fresh: manager.trace("Queuing %s SCC (%s)" % (fresh_msg, scc_str)) fresh_scc_queue.append(scc) else: if len(fresh_scc_queue) > 0: manager.log("Processing {} queued fresh SCCs".format(len(fresh_scc_queue))) # Defer processing fresh SCCs until we actually run into a stale SCC # and need the earlier modules to be loaded. # # Note that `process_graph` may end with us not having processed every # single fresh SCC. This is intentional -- we don't need those modules # loaded if there are no more stale SCCs to be rechecked. # # Also note we shouldn't have to worry about transitive_error here, # since modules with transitive errors aren't written to the cache, # and if any dependencies were changed, this SCC would be stale. # (Also, in quick_and_dirty mode we don't care about transitive errors.) # # TODO: see if it's possible to determine if we need to process only a # _subset_ of the past SCCs instead of having to process them all. for prev_scc in fresh_scc_queue: process_fresh_modules(graph, prev_scc, manager) fresh_scc_queue = [] size = len(scc) if size == 1: manager.log("Processing SCC singleton (%s) as %s" % (scc_str, fresh_msg)) else: manager.log("Processing SCC of size %d (%s) as %s" % (size, scc_str, fresh_msg)) process_stale_scc(graph, scc, manager) sccs_left = len(fresh_scc_queue) nodes_left = sum(len(scc) for scc in fresh_scc_queue) manager.add_stats(sccs_left=sccs_left, nodes_left=nodes_left) if sccs_left: manager.log("{} fresh SCCs ({} nodes) left in queue (and will remain unprocessed)" .format(sccs_left, nodes_left)) manager.trace(str(fresh_scc_queue)) else: manager.log("No fresh SCCs left in queue") def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) -> List[str]: """Come up with the ideal processing order within an SCC. Using the priorities assigned by all_imported_modules_in_file(), try to reduce the cycle to a DAG, by omitting arcs representing dependencies of lower priority. In the simplest case, if we have A <--> B where A has a top-level "import B" (medium priority) but B only has the reverse "import A" inside a function (low priority), we turn the cycle into a DAG by dropping the B --> A arc, which leaves only A --> B. If all arcs have the same priority, we fall back to sorting by reverse global order (the order in which modules were first encountered). The algorithm is recursive, as follows: when as arcs of different priorities are present, drop all arcs of the lowest priority, identify SCCs in the resulting graph, and apply the algorithm to each SCC thus found. The recursion is bounded because at each recursion the spread in priorities is (at least) one less. In practice there are only a few priority levels (less than a dozen) and in the worst case we just carry out the same algorithm for finding SCCs N times. Thus the complexity is no worse than the complexity of the original SCC-finding algorithm -- see strongly_connected_components() below for a reference. """ if len(ascc) == 1: return [s for s in ascc] pri_spread = set() for id in ascc: state = graph[id] for dep in state.dependencies: if dep in ascc: pri = state.priorities.get(dep, PRI_HIGH) if pri < pri_max: pri_spread.add(pri) if len(pri_spread) == 1: # Filtered dependencies are uniform -- order by global order. return sorted(ascc, key=lambda id: -graph[id].order) pri_max = max(pri_spread) sccs = sorted_components(graph, ascc, pri_max) # The recursion is bounded by the len(pri_spread) check above. return [s for ss in sccs for s in order_ascc(graph, ss, pri_max)] def process_fresh_modules(graph: Graph, modules: List[str], manager: BuildManager) -> None: """Process the modules in one group of modules from their cached data. This can be used to process an SCC of modules This involves loading the tree from JSON and then doing various cleanups. """ t0 = time.time() for id in modules: graph[id].load_tree() t1 = time.time() for id in modules: graph[id].fix_cross_refs() t2 = time.time() manager.add_stats(process_fresh_time=t2 - t0, load_tree_time=t1 - t0) def process_stale_scc(graph: Graph, scc: List[str], manager: BuildManager) -> None: """Process the modules in one SCC from source code. Exception: If quick_and_dirty is set, use the cache for fresh modules. """ stale = scc for id in stale: # We may already have parsed the module, or not. # If the former, parse_file() is a no-op. graph[id].parse_file() graph[id].fix_suppressed_dependencies(graph) if 'typing' in scc: # For historical reasons we need to manually add typing aliases # for built-in generic collections, see docstring of # SemanticAnalyzerPass2.add_builtin_aliases for details. typing_mod = graph['typing'].tree assert typing_mod, "The typing module was not parsed" mypy.semanal_main.semantic_analysis_for_scc(graph, scc, manager.errors) # Track what modules aren't yet done so we can finish them as soon # as possible, saving memory. unfinished_modules = set(stale) for id in stale: graph[id].type_check_first_pass() if not graph[id].type_checker().deferred_nodes: unfinished_modules.discard(id) graph[id].finish_passes() while unfinished_modules: for id in stale: if id not in unfinished_modules: continue if not graph[id].type_check_second_pass(): unfinished_modules.discard(id) graph[id].finish_passes() for id in stale: graph[id].generate_unused_ignore_notes() if any(manager.errors.is_errors_for_file(graph[id].xpath) for id in stale): for id in stale: graph[id].transitive_error = True for id in stale: manager.flush_errors(manager.errors.file_messages(graph[id].xpath), False) graph[id].write_cache() graph[id].mark_as_rechecked() def sorted_components(graph: Graph, vertices: Optional[AbstractSet[str]] = None, pri_max: int = PRI_ALL) -> List[AbstractSet[str]]: """Return the graph's SCCs, topologically sorted by dependencies. The sort order is from leaves (nodes without dependencies) to roots (nodes on which no other nodes depend). This works for a subset of the full dependency graph too; dependencies that aren't present in graph.keys() are ignored. """ # Compute SCCs. if vertices is None: vertices = set(graph) edges = {id: deps_filtered(graph, vertices, id, pri_max) for id in vertices} sccs = list(strongly_connected_components(vertices, edges)) # Topsort. sccsmap = {id: frozenset(scc) for scc in sccs for id in scc} data = {} # type: Dict[AbstractSet[str], Set[AbstractSet[str]]] for scc in sccs: deps = set() # type: Set[AbstractSet[str]] for id in scc: deps.update(sccsmap[x] for x in deps_filtered(graph, vertices, id, pri_max)) data[frozenset(scc)] = deps res = [] for ready in topsort(data): # Sort the sets in ready by reversed smallest State.order. Examples: # # - If ready is [{x}, {y}], x.order == 1, y.order == 2, we get # [{y}, {x}]. # # - If ready is [{a, b}, {c, d}], a.order == 1, b.order == 3, # c.order == 2, d.order == 4, the sort keys become [1, 2] # and the result is [{c, d}, {a, b}]. res.extend(sorted(ready, key=lambda scc: -min(graph[id].order for id in scc))) return res def deps_filtered(graph: Graph, vertices: AbstractSet[str], id: str, pri_max: int) -> List[str]: """Filter dependencies for id with pri < pri_max.""" if id not in vertices: return [] state = graph[id] return [dep for dep in state.dependencies if dep in vertices and state.priorities.get(dep, PRI_HIGH) < pri_max] def strongly_connected_components(vertices: AbstractSet[str], edges: Dict[str, List[str]]) -> Iterator[Set[str]]: """Compute Strongly Connected Components of a directed graph. Args: vertices: the labels for the vertices edges: for each vertex, gives the target vertices of its outgoing edges Returns: An iterator yielding strongly connected components, each represented as a set of vertices. Each input vertex will occur exactly once; vertices not part of a SCC are returned as singleton sets. From http://code.activestate.com/recipes/578507/. """ identified = set() # type: Set[str] stack = [] # type: List[str] index = {} # type: Dict[str, int] boundaries = [] # type: List[int] def dfs(v: str) -> Iterator[Set[str]]: index[v] = len(stack) stack.append(v) boundaries.append(index[v]) for w in edges[v]: if w not in index: yield from dfs(w) elif w not in identified: while index[w] < boundaries[-1]: boundaries.pop() if boundaries[-1] == index[v]: boundaries.pop() scc = set(stack[index[v]:]) del stack[index[v]:] identified.update(scc) yield scc for v in vertices: if v not in index: yield from dfs(v) def topsort(data: Dict[AbstractSet[str], Set[AbstractSet[str]]]) -> Iterable[Set[AbstractSet[str]]]: """Topological sort. Args: data: A map from SCCs (represented as frozen sets of strings) to sets of SCCs, its dependencies. NOTE: This data structure is modified in place -- for normalization purposes, self-dependencies are removed and entries representing orphans are added. Returns: An iterator yielding sets of SCCs that have an equivalent ordering. NOTE: The algorithm doesn't care about the internal structure of SCCs. Example: Suppose the input has the following structure: {A: {B, C}, B: {D}, C: {D}} This is normalized to: {A: {B, C}, B: {D}, C: {D}, D: {}} The algorithm will yield the following values: {D} {B, C} {A} From http://code.activestate.com/recipes/577413/. """ # TODO: Use a faster algorithm? for k, v in data.items(): v.discard(k) # Ignore self dependencies. for item in set.union(*data.values()) - set(data.keys()): data[item] = set() while True: ready = {item for item, dep in data.items() if not dep} if not ready: break yield ready data = {item: (dep - ready) for item, dep in data.items() if item not in ready} assert not data, "A cyclic dependency exists amongst %r" % data mypy-0.761/mypy/checker.py0000644€tŠÔÚ€2›s®0000073712313576752246021711 0ustar jukkaDROPBOX\Domain Users00000000000000"""Mypy type checker.""" import itertools import fnmatch from contextlib import contextmanager from typing import ( Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple, Iterator, Sequence, Mapping, ) from typing_extensions import Final from mypy.errors import Errors, report_internal_error from mypy.nodes import ( SymbolTable, Statement, MypyFile, Var, Expression, Lvalue, Node, OverloadedFuncDef, FuncDef, FuncItem, FuncBase, TypeInfo, ClassDef, Block, AssignmentStmt, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, ExpressionStmt, ReturnStmt, IfStmt, WhileStmt, OperatorAssignmentStmt, WithStmt, AssertStmt, RaiseStmt, TryStmt, ForStmt, DelStmt, CallExpr, IntExpr, StrExpr, UnicodeExpr, OpExpr, UnaryExpr, LambdaExpr, TempNode, SymbolTableNode, Context, Decorator, PrintStmt, BreakStmt, PassStmt, ContinueStmt, ComparisonExpr, StarExpr, EllipsisExpr, RefExpr, PromoteExpr, Import, ImportFrom, ImportAll, ImportBase, TypeAlias, ARG_POS, ARG_STAR, LITERAL_TYPE, MDEF, GDEF, CONTRAVARIANT, COVARIANT, INVARIANT, TypeVarExpr, AssignmentExpr, is_final_node, ARG_NAMED) from mypy import nodes from mypy.literals import literal, literal_hash from mypy.typeanal import has_any_from_unimported_type, check_for_explicit_any from mypy.types import ( Type, AnyType, CallableType, FunctionLike, Overloaded, TupleType, TypedDictType, Instance, NoneType, strip_type, TypeType, TypeOfAny, UnionType, TypeVarId, TypeVarType, PartialType, DeletedType, UninhabitedType, TypeVarDef, is_named_instance, union_items, TypeQuery, LiteralType, is_optional, remove_optional, TypeTranslator, StarType, get_proper_type, ProperType, get_proper_types, is_literal_type, TypeAliasType) from mypy.sametypes import is_same_type from mypy.messages import ( MessageBuilder, make_inferred_type_note, append_invariance_notes, format_type, format_type_bare, format_type_distinctly, ) import mypy.checkexpr from mypy.checkmember import ( analyze_member_access, analyze_descriptor_access, type_object_type, ) from mypy.typeops import ( map_type_from_supertype, bind_self, erase_to_bound, make_simplified_union, erase_def_to_union_or_bound, erase_to_union_or_bound, coerce_to_literal, try_getting_str_literals_from_type, try_getting_int_literals_from_type, tuple_fallback, is_singleton_type, try_expanding_enum_to_union, true_only, false_only, function_type, TypeVarExtractor, ) from mypy import message_registry from mypy.subtypes import ( is_subtype, is_equivalent, is_proper_subtype, is_more_precise, restrict_subtype_away, is_subtype_ignoring_tvars, is_callable_compatible, unify_generic_callable, find_member ) from mypy.constraints import SUPERTYPE_OF from mypy.maptype import map_instance_to_supertype from mypy.typevars import fill_typevars, has_no_typevars, fill_typevars_with_any from mypy.semanal import set_callable_name, refers_to_fullname from mypy.mro import calculate_mro from mypy.erasetype import erase_typevars, remove_instance_last_known_values from mypy.expandtype import expand_type, expand_type_by_instance from mypy.visitor import NodeVisitor from mypy.join import join_types from mypy.treetransform import TransformVisitor from mypy.binder import ConditionalTypeBinder, get_declaration from mypy.meet import is_overlapping_erased_types, is_overlapping_types from mypy.options import Options from mypy.plugin import Plugin, CheckerPluginInterface from mypy.sharedparse import BINARY_MAGIC_METHODS from mypy.scope import Scope from mypy import state, errorcodes as codes from mypy.traverser import has_return_statement, all_return_statements from mypy.errorcodes import ErrorCode T = TypeVar('T') DEFAULT_LAST_PASS = 1 # type: Final # Pass numbers start at 0 DeferredNodeType = Union[FuncDef, LambdaExpr, OverloadedFuncDef, Decorator] FineGrainedDeferredNodeType = Union[FuncDef, MypyFile, OverloadedFuncDef] # A node which is postponed to be processed during the next pass. # In normal mode one can defer functions and methods (also decorated and/or overloaded) # and lambda expressions. Nested functions can't be deferred -- only top-level functions # and methods of classes not defined within a function can be deferred. DeferredNode = NamedTuple( 'DeferredNode', [ ('node', DeferredNodeType), ('active_typeinfo', Optional[TypeInfo]), # And its TypeInfo (for semantic analysis # self type handling) ]) # Same as above, but for fine-grained mode targets. Only top-level functions/methods # and module top levels are allowed as such. FineGrainedDeferredNode = NamedTuple( 'FineGrainedDeferredNode', [ ('node', FineGrainedDeferredNodeType), ('active_typeinfo', Optional[TypeInfo]), ]) # Data structure returned by find_isinstance_check representing # information learned from the truth or falsehood of a condition. The # dict maps nodes representing expressions like 'a[0].x' to their # refined types under the assumption that the condition has a # particular truth value. A value of None means that the condition can # never have that truth value. # NB: The keys of this dict are nodes in the original source program, # which are compared by reference equality--effectively, being *the # same* expression of the program, not just two identical expressions # (such as two references to the same variable). TODO: it would # probably be better to have the dict keyed by the nodes' literal_hash # field instead. TypeMap = Optional[Dict[Expression, Type]] # An object that represents either a precise type or a type with an upper bound; # it is important for correct type inference with isinstance. TypeRange = NamedTuple( 'TypeRange', [ ('item', Type), ('is_upper_bound', bool), # False => precise type ]) # Keeps track of partial types in a single scope. In fine-grained incremental # mode partial types initially defined at the top level cannot be completed in # a function, and we use the 'is_function' attribute to enforce this. PartialTypeScope = NamedTuple('PartialTypeScope', [('map', Dict[Var, Context]), ('is_function', bool), ('is_local', bool), ]) class TypeChecker(NodeVisitor[None], CheckerPluginInterface): """Mypy type checker. Type check mypy source files that have been semantically analyzed. You must create a separate instance for each source file. """ # Are we type checking a stub? is_stub = False # Error message reporter errors = None # type: Errors # Utility for generating messages msg = None # type: MessageBuilder # Types of type checked nodes type_map = None # type: Dict[Expression, Type] # Helper for managing conditional types binder = None # type: ConditionalTypeBinder # Helper for type checking expressions expr_checker = None # type: mypy.checkexpr.ExpressionChecker tscope = None # type: Scope scope = None # type: CheckerScope # Stack of function return types return_types = None # type: List[Type] # Flags; true for dynamically typed functions dynamic_funcs = None # type: List[bool] # Stack of collections of variables with partial types partial_types = None # type: List[PartialTypeScope] # Vars for which partial type errors are already reported # (to avoid logically duplicate errors with different error context). partial_reported = None # type: Set[Var] globals = None # type: SymbolTable modules = None # type: Dict[str, MypyFile] # Nodes that couldn't be checked because some types weren't available. We'll run # another pass and try these again. deferred_nodes = None # type: List[DeferredNode] # Type checking pass number (0 = first pass) pass_num = 0 # Last pass number to take last_pass = DEFAULT_LAST_PASS # Have we deferred the current function? If yes, don't infer additional # types during this pass within the function. current_node_deferred = False # Is this file a typeshed stub? is_typeshed_stub = False # Should strict Optional-related errors be suppressed in this file? suppress_none_errors = False # TODO: Get it from options instead options = None # type: Options # Used for collecting inferred attribute types so that they can be checked # for consistency. inferred_attribute_types = None # type: Optional[Dict[Var, Type]] # Don't infer partial None types if we are processing assignment from Union no_partial_types = False # type: bool # The set of all dependencies (suppressed or not) that this module accesses, either # directly or indirectly. module_refs = None # type: Set[str] # Plugin that provides special type checking rules for specific library # functions such as open(), etc. plugin = None # type: Plugin def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Options, tree: MypyFile, path: str, plugin: Plugin) -> None: """Construct a type checker. Use errors to report type check errors. """ self.errors = errors self.modules = modules self.options = options self.tree = tree self.path = path self.msg = MessageBuilder(errors, modules) self.plugin = plugin self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg, self.plugin) self.tscope = Scope() self.scope = CheckerScope(tree) self.binder = ConditionalTypeBinder() self.globals = tree.names self.return_types = [] self.dynamic_funcs = [] self.partial_types = [] self.partial_reported = set() self.deferred_nodes = [] self.type_map = {} self.module_refs = set() self.pass_num = 0 self.current_node_deferred = False self.is_stub = tree.is_stub self.is_typeshed_stub = errors.is_typeshed_file(path) self.inferred_attribute_types = None if options.strict_optional_whitelist is None: self.suppress_none_errors = not options.show_none_errors else: self.suppress_none_errors = not any(fnmatch.fnmatch(path, pattern) for pattern in options.strict_optional_whitelist) # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True # This internal flag is used to track whether we a currently type-checking # a final declaration (assignment), so that some errors should be suppressed. # Should not be set manually, use get_final_context/enter_final_context instead. # NOTE: we use the context manager to avoid "threading" an additional `is_final_def` # argument through various `checker` and `checkmember` functions. self._is_final_def = False @property def type_context(self) -> List[Optional[Type]]: return self.expr_checker.type_context def reset(self) -> None: """Cleanup stale state that might be left over from a typechecking run. This allows us to reuse TypeChecker objects in fine-grained incremental mode. """ # TODO: verify this is still actually worth it over creating new checkers self.partial_reported.clear() self.module_refs.clear() self.binder = ConditionalTypeBinder() self.type_map.clear() assert self.inferred_attribute_types is None assert self.partial_types == [] assert self.deferred_nodes == [] assert len(self.scope.stack) == 1 assert self.partial_types == [] def check_first_pass(self) -> None: """Type check the entire file, but defer functions with unresolved references. Unresolved references are forward references to variables whose types haven't been inferred yet. They may occur later in the same file or in a different file that's being processed later (usually due to an import cycle). Deferred functions will be processed by check_second_pass(). """ self.recurse_into_functions = True with state.strict_optional_set(self.options.strict_optional): self.errors.set_file(self.path, self.tree.fullname, scope=self.tscope) self.tscope.enter_file(self.tree.fullname) with self.enter_partial_types(): with self.binder.top_frame_context(): for d in self.tree.defs: self.accept(d) assert not self.current_node_deferred all_ = self.globals.get('__all__') if all_ is not None and all_.type is not None: all_node = all_.node assert all_node is not None seq_str = self.named_generic_type('typing.Sequence', [self.named_type('builtins.str')]) if self.options.python_version[0] < 3: seq_str = self.named_generic_type('typing.Sequence', [self.named_type('builtins.unicode')]) if not is_subtype(all_.type, seq_str): str_seq_s, all_s = format_type_distinctly(seq_str, all_.type) self.fail(message_registry.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s), all_node) self.tscope.leave() def check_second_pass(self, todo: Optional[Sequence[Union[DeferredNode, FineGrainedDeferredNode]]] = None ) -> bool: """Run second or following pass of type checking. This goes through deferred nodes, returning True if there were any. """ self.recurse_into_functions = True with state.strict_optional_set(self.options.strict_optional): if not todo and not self.deferred_nodes: return False self.errors.set_file(self.path, self.tree.fullname, scope=self.tscope) self.tscope.enter_file(self.tree.fullname) self.pass_num += 1 if not todo: todo = self.deferred_nodes else: assert not self.deferred_nodes self.deferred_nodes = [] done = set() # type: Set[Union[DeferredNodeType, FineGrainedDeferredNodeType]] for node, active_typeinfo in todo: if node in done: continue # This is useful for debugging: # print("XXX in pass %d, class %s, function %s" % # (self.pass_num, type_name, node.fullname or node.name)) done.add(node) with self.tscope.class_scope(active_typeinfo) if active_typeinfo else nothing(): with self.scope.push_class(active_typeinfo) if active_typeinfo else nothing(): self.check_partial(node) self.tscope.leave() return True def check_partial(self, node: Union[DeferredNodeType, FineGrainedDeferredNodeType]) -> None: if isinstance(node, MypyFile): self.check_top_level(node) else: self.recurse_into_functions = True if isinstance(node, LambdaExpr): self.expr_checker.accept(node) else: self.accept(node) def check_top_level(self, node: MypyFile) -> None: """Check only the top-level of a module, skipping function definitions.""" self.recurse_into_functions = False with self.enter_partial_types(): with self.binder.top_frame_context(): for d in node.defs: d.accept(self) assert not self.current_node_deferred # TODO: Handle __all__ def defer_node(self, node: DeferredNodeType, enclosing_class: Optional[TypeInfo]) -> None: """Defer a node for processing during next type-checking pass. Args: node: function/method being deferred enclosing_class: for methods, the class where the method is defined NOTE: this can't handle nested functions/methods. """ # We don't freeze the entire scope since only top-level functions and methods # can be deferred. Only module/class level scope information is needed. # Module-level scope information is preserved in the TypeChecker instance. self.deferred_nodes.append(DeferredNode(node, enclosing_class)) def handle_cannot_determine_type(self, name: str, context: Context) -> None: node = self.scope.top_non_lambda_function() if self.pass_num < self.last_pass and isinstance(node, FuncDef): # Don't report an error yet. Just defer. Note that we don't defer # lambdas because they are coupled to the surrounding function # through the binder and the inferred type of the lambda, so it # would get messy. enclosing_class = self.scope.enclosing_class() self.defer_node(node, enclosing_class) # Set a marker so that we won't infer additional types in this # function. Any inferred types could be bogus, because there's at # least one type that we don't know. self.current_node_deferred = True else: self.msg.cannot_determine_type(name, context) def accept(self, stmt: Statement) -> None: """Type check a node in the given type context.""" try: stmt.accept(self) except Exception as err: report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options) def accept_loop(self, body: Statement, else_body: Optional[Statement] = None, *, exit_condition: Optional[Expression] = None) -> None: """Repeatedly type check a loop body until the frame doesn't change. If exit_condition is set, assume it must be False on exit from the loop. Then check the else_body. """ # The outer frame accumulates the results of all iterations with self.binder.frame_context(can_skip=False): while True: with self.binder.frame_context(can_skip=True, break_frame=2, continue_frame=1): self.accept(body) if not self.binder.last_pop_changed: break if exit_condition: _, else_map = self.find_isinstance_check(exit_condition) self.push_type_map(else_map) if else_body: self.accept(else_body) # # Definitions # def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: if not self.recurse_into_functions: return with self.tscope.function_scope(defn): self._visit_overloaded_func_def(defn) def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: num_abstract = 0 if not defn.items: # In this case we have already complained about none of these being # valid overloads. return None if len(defn.items) == 1: self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, defn) if defn.is_property: # HACK: Infer the type of the property. self.visit_decorator(cast(Decorator, defn.items[0])) for fdef in defn.items: assert isinstance(fdef, Decorator) self.check_func_item(fdef.func, name=fdef.func.name) if fdef.func.is_abstract: num_abstract += 1 if num_abstract not in (0, len(defn.items)): self.fail(message_registry.INCONSISTENT_ABSTRACT_OVERLOAD, defn) if defn.impl: defn.impl.accept(self) if defn.info: self.check_method_override(defn) self.check_inplace_operator_method(defn) if not defn.is_property: self.check_overlapping_overloads(defn) return None def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # At this point we should have set the impl already, and all remaining # items are decorators # Compute some info about the implementation (if it exists) for use below impl_type = None # type: Optional[CallableType] if defn.impl: if isinstance(defn.impl, FuncDef): inner_type = defn.impl.type # type: Optional[Type] elif isinstance(defn.impl, Decorator): inner_type = defn.impl.var.type else: assert False, "Impl isn't the right type" # This can happen if we've got an overload with a different # decorator or if the implementation is untyped -- we gave up on the types. inner_type = get_proper_type(inner_type) if inner_type is not None and not isinstance(inner_type, AnyType): assert isinstance(inner_type, CallableType) impl_type = inner_type is_descriptor_get = defn.info and defn.name == "__get__" for i, item in enumerate(defn.items): # TODO overloads involving decorators assert isinstance(item, Decorator) sig1 = self.function_type(item.func) assert isinstance(sig1, CallableType) for j, item2 in enumerate(defn.items[i + 1:]): assert isinstance(item2, Decorator) sig2 = self.function_type(item2.func) assert isinstance(sig2, CallableType) if not are_argument_counts_overlapping(sig1, sig2): continue if overload_can_never_match(sig1, sig2): self.msg.overloaded_signature_will_never_match( i + 1, i + j + 2, item2.func) elif not is_descriptor_get: # Note: we force mypy to check overload signatures in strict-optional mode # so we don't incorrectly report errors when a user tries typing an overload # that happens to have a 'if the argument is None' fallback. # # For example, the following is fine in strict-optional mode but would throw # the unsafe overlap error when strict-optional is disabled: # # @overload # def foo(x: None) -> int: ... # @overload # def foo(x: str) -> str: ... # # See Python 2's map function for a concrete example of this kind of overload. with state.strict_optional_set(True): if is_unsafe_overlapping_overload_signatures(sig1, sig2): self.msg.overloaded_signatures_overlap( i + 1, i + j + 2, item.func) if impl_type is not None: assert defn.impl is not None # We perform a unification step that's very similar to what # 'is_callable_compatible' would have done if we had set # 'unify_generics' to True -- the only difference is that # we check and see if the impl_type's return value is a # *supertype* of the overload alternative, not a *subtype*. # # This is to match the direction the implementation's return # needs to be compatible in. if impl_type.variables: impl = unify_generic_callable(impl_type, sig1, ignore_return=False, return_constraint_direction=SUPERTYPE_OF) if impl is None: self.msg.overloaded_signatures_typevar_specific(i + 1, defn.impl) continue else: impl = impl_type # Prevent extra noise from inconsistent use of @classmethod by copying # the first arg from the method being checked against. if sig1.arg_types and defn.info: impl = impl.copy_modified(arg_types=[sig1.arg_types[0]] + impl.arg_types[1:]) # Is the overload alternative's arguments subtypes of the implementation's? if not is_callable_compatible(impl, sig1, is_compat=is_subtype_no_promote, ignore_return=True): self.msg.overloaded_signatures_arg_specific(i + 1, defn.impl) # Is the overload alternative's return type a subtype of the implementation's? if not is_subtype_no_promote(sig1.ret_type, impl.ret_type): self.msg.overloaded_signatures_ret_specific(i + 1, defn.impl) # Here's the scoop about generators and coroutines. # # There are two kinds of generators: classic generators (functions # with `yield` or `yield from` in the body) and coroutines # (functions declared with `async def`). The latter are specified # in PEP 492 and only available in Python >= 3.5. # # Classic generators can be parameterized with three types: # - ty is the Yield type (the type of y in `yield y`) # - tc is the type reCeived by yield (the type of c in `c = yield`). # - tr is the Return type (the type of r in `return r`) # # A classic generator must define a return type that's either # `Generator[ty, tc, tr]`, Iterator[ty], or Iterable[ty] (or # object or Any). If tc/tr are not given, both are None. # # A coroutine must define a return type corresponding to tr; the # other two are unconstrained. The "external" return type (seen # by the caller) is Awaitable[tr]. # # In addition, there's the synthetic type AwaitableGenerator: it # inherits from both Awaitable and Generator and can be used both # in `yield from` and in `await`. This type is set automatically # for functions decorated with `@types.coroutine` or # `@asyncio.coroutine`. Its single parameter corresponds to tr. # # PEP 525 adds a new type, the asynchronous generator, which was # first released in Python 3.6. Async generators are `async def` # functions that can also `yield` values. They can be parameterized # with two types, ty and tc, because they cannot return a value. # # There are several useful methods, each taking a type t and a # flag c indicating whether it's for a generator or coroutine: # # - is_generator_return_type(t, c) returns whether t is a Generator, # Iterator, Iterable (if not c), or Awaitable (if c), or # AwaitableGenerator (regardless of c). # - is_async_generator_return_type(t) returns whether t is an # AsyncGenerator. # - get_generator_yield_type(t, c) returns ty. # - get_generator_receive_type(t, c) returns tc. # - get_generator_return_type(t, c) returns tr. def is_generator_return_type(self, typ: Type, is_coroutine: bool) -> bool: """Is `typ` a valid type for a generator/coroutine? True if `typ` is a *supertype* of Generator or Awaitable. Also true it it's *exactly* AwaitableGenerator (modulo type parameters). """ typ = get_proper_type(typ) if is_coroutine: # This means we're in Python 3.5 or later. at = self.named_generic_type('typing.Awaitable', [AnyType(TypeOfAny.special_form)]) if is_subtype(at, typ): return True else: any_type = AnyType(TypeOfAny.special_form) gt = self.named_generic_type('typing.Generator', [any_type, any_type, any_type]) if is_subtype(gt, typ): return True return isinstance(typ, Instance) and typ.type.fullname == 'typing.AwaitableGenerator' def is_async_generator_return_type(self, typ: Type) -> bool: """Is `typ` a valid type for an async generator? True if `typ` is a supertype of AsyncGenerator. """ try: any_type = AnyType(TypeOfAny.special_form) agt = self.named_generic_type('typing.AsyncGenerator', [any_type, any_type]) except KeyError: # we're running on a version of typing that doesn't have AsyncGenerator yet return False return is_subtype(agt, typ) def get_generator_yield_type(self, return_type: Type, is_coroutine: bool) -> Type: """Given the declared return type of a generator (t), return the type it yields (ty).""" return_type = get_proper_type(return_type) if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) elif (not self.is_generator_return_type(return_type, is_coroutine) and not self.is_async_generator_return_type(return_type)): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. return AnyType(TypeOfAny.from_error) elif not isinstance(return_type, Instance): # Same as above, but written as a separate branch so the typechecker can understand. return AnyType(TypeOfAny.from_error) elif return_type.type.fullname == 'typing.Awaitable': # Awaitable: ty is Any. return AnyType(TypeOfAny.special_form) elif return_type.args: # AwaitableGenerator, Generator, AsyncGenerator, Iterator, or Iterable; ty is args[0]. ret_type = return_type.args[0] # TODO not best fix, better have dedicated yield token return ret_type else: # If the function's declared supertype of Generator has no type # parameters (i.e. is `object`), then the yielded values can't # be accessed so any type is acceptable. IOW, ty is Any. # (However, see https://github.com/python/mypy/issues/1933) return AnyType(TypeOfAny.special_form) def get_generator_receive_type(self, return_type: Type, is_coroutine: bool) -> Type: """Given a declared generator return type (t), return the type its yield receives (tc).""" return_type = get_proper_type(return_type) if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) elif (not self.is_generator_return_type(return_type, is_coroutine) and not self.is_async_generator_return_type(return_type)): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. return AnyType(TypeOfAny.from_error) elif not isinstance(return_type, Instance): # Same as above, but written as a separate branch so the typechecker can understand. return AnyType(TypeOfAny.from_error) elif return_type.type.fullname == 'typing.Awaitable': # Awaitable, AwaitableGenerator: tc is Any. return AnyType(TypeOfAny.special_form) elif (return_type.type.fullname in ('typing.Generator', 'typing.AwaitableGenerator') and len(return_type.args) >= 3): # Generator: tc is args[1]. return return_type.args[1] elif return_type.type.fullname == 'typing.AsyncGenerator' and len(return_type.args) >= 2: return return_type.args[1] else: # `return_type` is a supertype of Generator, so callers won't be able to send it # values. IOW, tc is None. return NoneType() def get_coroutine_return_type(self, return_type: Type) -> Type: return_type = get_proper_type(return_type) if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) assert isinstance(return_type, Instance), "Should only be called on coroutine functions." # Note: return type is the 3rd type parameter of Coroutine. return return_type.args[2] def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Type: """Given the declared return type of a generator (t), return the type it returns (tr).""" return_type = get_proper_type(return_type) if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) elif not self.is_generator_return_type(return_type, is_coroutine): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. return AnyType(TypeOfAny.from_error) elif not isinstance(return_type, Instance): # Same as above, but written as a separate branch so the typechecker can understand. return AnyType(TypeOfAny.from_error) elif return_type.type.fullname == 'typing.Awaitable' and len(return_type.args) == 1: # Awaitable: tr is args[0]. return return_type.args[0] elif (return_type.type.fullname in ('typing.Generator', 'typing.AwaitableGenerator') and len(return_type.args) >= 3): # AwaitableGenerator, Generator: tr is args[2]. return return_type.args[2] else: # Supertype of Generator (Iterator, Iterable, object): tr is any. return AnyType(TypeOfAny.special_form) def visit_func_def(self, defn: FuncDef) -> None: if not self.recurse_into_functions: return with self.tscope.function_scope(defn): self._visit_func_def(defn) def _visit_func_def(self, defn: FuncDef) -> None: """Type check a function definition.""" self.check_func_item(defn, name=defn.name) if defn.info: if not defn.is_dynamic() and not defn.is_overload and not defn.is_decorated: # If the definition is the implementation for an # overload, the legality of the override has already # been typechecked, and decorated methods will be # checked when the decorator is. self.check_method_override(defn) self.check_inplace_operator_method(defn) if defn.original_def: # Override previous definition. new_type = self.function_type(defn) if isinstance(defn.original_def, FuncDef): # Function definition overrides function definition. if not is_same_type(new_type, self.function_type(defn.original_def)): self.msg.incompatible_conditional_function_def(defn) else: # Function definition overrides a variable initialized via assignment or a # decorated function. orig_type = defn.original_def.type if orig_type is None: # XXX This can be None, as happens in # test_testcheck_TypeCheckSuite.testRedefinedFunctionInTryWithElse self.msg.note("Internal mypy error checking function redefinition", defn) return if isinstance(orig_type, PartialType): if orig_type.type is None: # Ah this is a partial type. Give it the type of the function. orig_def = defn.original_def if isinstance(orig_def, Decorator): var = orig_def.var else: var = orig_def partial_types = self.find_partial_types(var) if partial_types is not None: var.type = new_type del partial_types[var] else: # Trying to redefine something like partial empty list as function. self.fail(message_registry.INCOMPATIBLE_REDEFINITION, defn) else: # TODO: Update conditional type binder. self.check_subtype(new_type, orig_type, defn, message_registry.INCOMPATIBLE_REDEFINITION, 'redefinition with type', 'original type') def check_func_item(self, defn: FuncItem, type_override: Optional[CallableType] = None, name: Optional[str] = None) -> None: """Type check a function. If type_override is provided, use it as the function type. """ self.dynamic_funcs.append(defn.is_dynamic() and not type_override) with self.enter_partial_types(is_function=True): typ = self.function_type(defn) if type_override: typ = type_override.copy_modified(line=typ.line, column=typ.column) if isinstance(typ, CallableType): with self.enter_attribute_inference_context(): self.check_func_def(defn, typ, name) else: raise RuntimeError('Not supported') self.dynamic_funcs.pop() self.current_node_deferred = False if name == '__exit__': self.check__exit__return_type(defn) @contextmanager def enter_attribute_inference_context(self) -> Iterator[None]: old_types = self.inferred_attribute_types self.inferred_attribute_types = {} yield None self.inferred_attribute_types = old_types def check_func_def(self, defn: FuncItem, typ: CallableType, name: Optional[str]) -> None: """Type check a function definition.""" # Expand type variables with value restrictions to ordinary types. expanded = self.expand_typevars(defn, typ) for item, typ in expanded: old_binder = self.binder self.binder = ConditionalTypeBinder() with self.binder.top_frame_context(): defn.expanded.append(item) # We may be checking a function definition or an anonymous # function. In the first case, set up another reference with the # precise type. if isinstance(item, FuncDef): fdef = item # Check if __init__ has an invalid, non-None return type. if (fdef.info and fdef.name in ('__init__', '__init_subclass__') and not isinstance(get_proper_type(typ.ret_type), NoneType) and not self.dynamic_funcs[-1]): self.fail(message_registry.MUST_HAVE_NONE_RETURN_TYPE.format(fdef.name), item) # Check validity of __new__ signature if fdef.info and fdef.name == '__new__': self.check___new___signature(fdef, typ) self.check_for_missing_annotations(fdef) if self.options.disallow_any_unimported: if fdef.type and isinstance(fdef.type, CallableType): ret_type = fdef.type.ret_type if has_any_from_unimported_type(ret_type): self.msg.unimported_type_becomes_any("Return type", ret_type, fdef) for idx, arg_type in enumerate(fdef.type.arg_types): if has_any_from_unimported_type(arg_type): prefix = "Argument {} to \"{}\"".format(idx + 1, fdef.name) self.msg.unimported_type_becomes_any(prefix, arg_type, fdef) check_for_explicit_any(fdef.type, self.options, self.is_typeshed_stub, self.msg, context=fdef) if name: # Special method names if defn.info and self.is_reverse_op_method(name): self.check_reverse_op_method(item, typ, name, defn) elif name in ('__getattr__', '__getattribute__'): self.check_getattr_method(typ, defn, name) elif name == '__setattr__': self.check_setattr_method(typ, defn) # Refuse contravariant return type variable if isinstance(typ.ret_type, TypeVarType): if typ.ret_type.variance == CONTRAVARIANT: self.fail(message_registry.RETURN_TYPE_CANNOT_BE_CONTRAVARIANT, typ.ret_type) # Check that Generator functions have the appropriate return type. if defn.is_generator: if defn.is_async_generator: if not self.is_async_generator_return_type(typ.ret_type): self.fail(message_registry.INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR, typ) else: if not self.is_generator_return_type(typ.ret_type, defn.is_coroutine): self.fail(message_registry.INVALID_RETURN_TYPE_FOR_GENERATOR, typ) # Python 2 generators aren't allowed to return values. orig_ret_type = get_proper_type(typ.ret_type) if (self.options.python_version[0] == 2 and isinstance(orig_ret_type, Instance) and orig_ret_type.type.fullname == 'typing.Generator'): if not isinstance(get_proper_type(orig_ret_type.args[2]), (NoneType, AnyType)): self.fail(message_registry.INVALID_GENERATOR_RETURN_ITEM_TYPE, typ) # Fix the type if decorated with `@types.coroutine` or `@asyncio.coroutine`. if defn.is_awaitable_coroutine: # Update the return type to AwaitableGenerator. # (This doesn't exist in typing.py, only in typing.pyi.) t = typ.ret_type c = defn.is_coroutine ty = self.get_generator_yield_type(t, c) tc = self.get_generator_receive_type(t, c) if c: tr = self.get_coroutine_return_type(t) else: tr = self.get_generator_return_type(t, c) ret_type = self.named_generic_type('typing.AwaitableGenerator', [ty, tc, tr, t]) typ = typ.copy_modified(ret_type=ret_type) defn.type = typ # Push return type. self.return_types.append(typ.ret_type) # Store argument types. for i in range(len(typ.arg_types)): arg_type = typ.arg_types[i] with self.scope.push_function(defn): # We temporary push the definition to get the self type as # visible from *inside* of this function/method. ref_type = self.scope.active_self_type() # type: Optional[Type] if (isinstance(defn, FuncDef) and ref_type is not None and i == 0 and not defn.is_static and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]): isclass = defn.is_class or defn.name in ('__new__', '__init_subclass__') if isclass: ref_type = mypy.types.TypeType.make_normalized(ref_type) erased = get_proper_type(erase_to_bound(arg_type)) if not is_subtype_ignoring_tvars(ref_type, erased): note = None if (isinstance(erased, Instance) and erased.type.is_protocol or isinstance(erased, TypeType) and isinstance(erased.item, Instance) and erased.item.type.is_protocol): # We allow the explicit self-type to be not a supertype of # the current class if it is a protocol. For such cases # the consistency check will be performed at call sites. msg = None elif typ.arg_names[i] in {'self', 'cls'}: if (self.options.python_version[0] < 3 and is_same_type(erased, arg_type) and not isclass): msg = message_registry.INVALID_SELF_TYPE_OR_EXTRA_ARG note = '(Hint: typically annotations omit the type for self)' else: msg = message_registry.ERASED_SELF_TYPE_NOT_SUPERTYPE.format( erased, ref_type) else: msg = message_registry.MISSING_OR_INVALID_SELF_TYPE if msg: self.fail(msg, defn) if note: self.note(note, defn) elif isinstance(arg_type, TypeVarType): # Refuse covariant parameter type variables # TODO: check recursively for inner type variables if ( arg_type.variance == COVARIANT and defn.name not in ('__init__', '__new__') ): ctx = arg_type # type: Context if ctx.line < 0: ctx = typ self.fail(message_registry.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, ctx) if typ.arg_kinds[i] == nodes.ARG_STAR: # builtins.tuple[T] is typing.Tuple[T, ...] arg_type = self.named_generic_type('builtins.tuple', [arg_type]) elif typ.arg_kinds[i] == nodes.ARG_STAR2: arg_type = self.named_generic_type('builtins.dict', [self.str_type(), arg_type]) item.arguments[i].variable.type = arg_type # Type check initialization expressions. body_is_trivial = self.is_trivial_body(defn.body) self.check_default_args(item, body_is_trivial) # Type check body in a new scope. with self.binder.top_frame_context(): with self.scope.push_function(defn): # We suppress reachability warnings when we use TypeVars with value # restrictions: we only want to report a warning if a certain statement is # marked as being suppressed in *all* of the expansions, but we currently # have no good way of doing this. # # TODO: Find a way of working around this limitation if len(expanded) >= 2: self.binder.suppress_unreachable_warnings() self.accept(item.body) unreachable = self.binder.is_unreachable() if (self.options.warn_no_return and not unreachable): if (defn.is_generator or is_named_instance(self.return_types[-1], 'typing.AwaitableGenerator')): return_type = self.get_generator_return_type(self.return_types[-1], defn.is_coroutine) elif defn.is_coroutine: return_type = self.get_coroutine_return_type(self.return_types[-1]) else: return_type = self.return_types[-1] return_type = get_proper_type(return_type) if not isinstance(return_type, (NoneType, AnyType)) and not body_is_trivial: # Control flow fell off the end of a function that was # declared to return a non-None type and is not # entirely pass/Ellipsis/raise NotImplementedError. if isinstance(return_type, UninhabitedType): # This is a NoReturn function self.msg.fail(message_registry.INVALID_IMPLICIT_RETURN, defn) else: self.msg.fail(message_registry.MISSING_RETURN_STATEMENT, defn, code=codes.RETURN) self.return_types.pop() self.binder = old_binder def check_default_args(self, item: FuncItem, body_is_trivial: bool) -> None: for arg in item.arguments: if arg.initializer is None: continue if body_is_trivial and isinstance(arg.initializer, EllipsisExpr): continue name = arg.variable.name msg = 'Incompatible default for ' if name.startswith('__tuple_arg_'): msg += "tuple argument {}".format(name[12:]) else: msg += 'argument "{}"'.format(name) self.check_simple_assignment( arg.variable.type, arg.initializer, context=arg.initializer, msg=msg, lvalue_name='argument', rvalue_name='default', code=codes.ASSIGNMENT) def is_forward_op_method(self, method_name: str) -> bool: if self.options.python_version[0] == 2 and method_name == '__div__': return True else: return method_name in nodes.reverse_op_methods def is_reverse_op_method(self, method_name: str) -> bool: if self.options.python_version[0] == 2 and method_name == '__rdiv__': return True else: return method_name in nodes.reverse_op_method_set def check_for_missing_annotations(self, fdef: FuncItem) -> None: # Check for functions with unspecified/not fully specified types. def is_unannotated_any(t: Type) -> bool: if not isinstance(t, ProperType): return False return isinstance(t, AnyType) and t.type_of_any == TypeOfAny.unannotated has_explicit_annotation = (isinstance(fdef.type, CallableType) and any(not is_unannotated_any(t) for t in fdef.type.arg_types + [fdef.type.ret_type])) show_untyped = not self.is_typeshed_stub or self.options.warn_incomplete_stub check_incomplete_defs = self.options.disallow_incomplete_defs and has_explicit_annotation if show_untyped and (self.options.disallow_untyped_defs or check_incomplete_defs): if fdef.type is None and self.options.disallow_untyped_defs: if (not fdef.arguments or (len(fdef.arguments) == 1 and (fdef.arg_names[0] == 'self' or fdef.arg_names[0] == 'cls'))): self.fail(message_registry.RETURN_TYPE_EXPECTED, fdef, code=codes.NO_UNTYPED_DEF) if not has_return_statement(fdef) and not fdef.is_generator: self.note('Use "-> None" if function does not return a value', fdef, code=codes.NO_UNTYPED_DEF) else: self.fail(message_registry.FUNCTION_TYPE_EXPECTED, fdef, code=codes.NO_UNTYPED_DEF) elif isinstance(fdef.type, CallableType): ret_type = get_proper_type(fdef.type.ret_type) if is_unannotated_any(ret_type): self.fail(message_registry.RETURN_TYPE_EXPECTED, fdef, code=codes.NO_UNTYPED_DEF) elif fdef.is_generator: if is_unannotated_any(self.get_generator_return_type(ret_type, fdef.is_coroutine)): self.fail(message_registry.RETURN_TYPE_EXPECTED, fdef, code=codes.NO_UNTYPED_DEF) elif fdef.is_coroutine and isinstance(ret_type, Instance): if is_unannotated_any(self.get_coroutine_return_type(ret_type)): self.fail(message_registry.RETURN_TYPE_EXPECTED, fdef, code=codes.NO_UNTYPED_DEF) if any(is_unannotated_any(t) for t in fdef.type.arg_types): self.fail(message_registry.ARGUMENT_TYPE_EXPECTED, fdef, code=codes.NO_UNTYPED_DEF) def check___new___signature(self, fdef: FuncDef, typ: CallableType) -> None: self_type = fill_typevars_with_any(fdef.info) bound_type = bind_self(typ, self_type, is_classmethod=True) # Check that __new__ (after binding cls) returns an instance # type (or any). if not isinstance(get_proper_type(bound_type.ret_type), (AnyType, Instance, TupleType)): self.fail( message_registry.NON_INSTANCE_NEW_TYPE.format( format_type(bound_type.ret_type)), fdef) else: # And that it returns a subtype of the class self.check_subtype( bound_type.ret_type, self_type, fdef, message_registry.INVALID_NEW_TYPE, 'returns', 'but must return a subtype of' ) def is_trivial_body(self, block: Block) -> bool: """Returns 'true' if the given body is "trivial" -- if it contains just a "pass", "..." (ellipsis), or "raise NotImplementedError()". A trivial body may also start with a statement containing just a string (e.g. a docstring). Note: functions that raise other kinds of exceptions do not count as "trivial". We use this function to help us determine when it's ok to relax certain checks on body, but functions that raise arbitrary exceptions are more likely to do non-trivial work. For example: def halt(self, reason: str = ...) -> NoReturn: raise MyCustomError("Fatal error: " + reason, self.line, self.context) A function that raises just NotImplementedError is much less likely to be this complex. """ body = block.body # Skip a docstring if (body and isinstance(body[0], ExpressionStmt) and isinstance(body[0].expr, (StrExpr, UnicodeExpr))): body = block.body[1:] if len(body) == 0: # There's only a docstring (or no body at all). return True elif len(body) > 1: return False stmt = body[0] if isinstance(stmt, RaiseStmt): expr = stmt.expr if expr is None: return False if isinstance(expr, CallExpr): expr = expr.callee return (isinstance(expr, NameExpr) and expr.fullname == 'builtins.NotImplementedError') return (isinstance(stmt, PassStmt) or (isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr))) def check_reverse_op_method(self, defn: FuncItem, reverse_type: CallableType, reverse_name: str, context: Context) -> None: """Check a reverse operator method such as __radd__.""" # Decides whether it's worth calling check_overlapping_op_methods(). # This used to check for some very obscure scenario. It now # just decides whether it's worth calling # check_overlapping_op_methods(). assert defn.info # First check for a valid signature method_type = CallableType([AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)], [nodes.ARG_POS, nodes.ARG_POS], [None, None], AnyType(TypeOfAny.special_form), self.named_type('builtins.function')) if not is_subtype(reverse_type, method_type): self.msg.invalid_signature(reverse_type, context) return if reverse_name in ('__eq__', '__ne__'): # These are defined for all objects => can't cause trouble. return # With 'Any' or 'object' return type we are happy, since any possible # return value is valid. ret_type = get_proper_type(reverse_type.ret_type) if isinstance(ret_type, AnyType): return if isinstance(ret_type, Instance): if ret_type.type.fullname == 'builtins.object': return if reverse_type.arg_kinds[0] == ARG_STAR: reverse_type = reverse_type.copy_modified(arg_types=[reverse_type.arg_types[0]] * 2, arg_kinds=[ARG_POS] * 2, arg_names=[reverse_type.arg_names[0], "_"]) assert len(reverse_type.arg_types) >= 2 if self.options.python_version[0] == 2 and reverse_name == '__rdiv__': forward_name = '__div__' else: forward_name = nodes.normal_from_reverse_op[reverse_name] forward_inst = get_proper_type(reverse_type.arg_types[1]) if isinstance(forward_inst, TypeVarType): forward_inst = get_proper_type(forward_inst.upper_bound) elif isinstance(forward_inst, TupleType): forward_inst = tuple_fallback(forward_inst) elif isinstance(forward_inst, (FunctionLike, TypedDictType, LiteralType)): forward_inst = forward_inst.fallback if isinstance(forward_inst, TypeType): item = forward_inst.item if isinstance(item, Instance): opt_meta = item.type.metaclass_type if opt_meta is not None: forward_inst = opt_meta if not (isinstance(forward_inst, (Instance, UnionType)) and forward_inst.has_readable_member(forward_name)): return forward_base = reverse_type.arg_types[1] forward_type = self.expr_checker.analyze_external_member_access(forward_name, forward_base, context=defn) self.check_overlapping_op_methods(reverse_type, reverse_name, defn.info, forward_type, forward_name, forward_base, context=defn) def check_overlapping_op_methods(self, reverse_type: CallableType, reverse_name: str, reverse_class: TypeInfo, forward_type: Type, forward_name: str, forward_base: Type, context: Context) -> None: """Check for overlapping method and reverse method signatures. This function assumes that: - The reverse method has valid argument count and kinds. - If the reverse operator method accepts some argument of type X, the forward operator method also belong to class X. For example, if we have the reverse operator `A.__radd__(B)`, then the corresponding forward operator must have the type `B.__add__(...)`. """ # Note: Suppose we have two operator methods "A.__rOP__(B) -> R1" and # "B.__OP__(C) -> R2". We check if these two methods are unsafely overlapping # by using the following algorithm: # # 1. Rewrite "B.__OP__(C) -> R1" to "temp1(B, C) -> R1" # # 2. Rewrite "A.__rOP__(B) -> R2" to "temp2(B, A) -> R2" # # 3. Treat temp1 and temp2 as if they were both variants in the same # overloaded function. (This mirrors how the Python runtime calls # operator methods: we first try __OP__, then __rOP__.) # # If the first signature is unsafely overlapping with the second, # report an error. # # 4. However, if temp1 shadows temp2 (e.g. the __rOP__ method can never # be called), do NOT report an error. # # This behavior deviates from how we handle overloads -- many of the # modules in typeshed seem to define __OP__ methods that shadow the # corresponding __rOP__ method. # # Note: we do not attempt to handle unsafe overlaps related to multiple # inheritance. (This is consistent with how we handle overloads: we also # do not try checking unsafe overlaps due to multiple inheritance there.) for forward_item in union_items(forward_type): if isinstance(forward_item, CallableType): if self.is_unsafe_overlapping_op(forward_item, forward_base, reverse_type): self.msg.operator_method_signatures_overlap( reverse_class, reverse_name, forward_base, forward_name, context) elif isinstance(forward_item, Overloaded): for item in forward_item.items(): if self.is_unsafe_overlapping_op(item, forward_base, reverse_type): self.msg.operator_method_signatures_overlap( reverse_class, reverse_name, forward_base, forward_name, context) elif not isinstance(forward_item, AnyType): self.msg.forward_operator_not_callable(forward_name, context) def is_unsafe_overlapping_op(self, forward_item: CallableType, forward_base: Type, reverse_type: CallableType) -> bool: # TODO: check argument kinds? if len(forward_item.arg_types) < 1: # Not a valid operator method -- can't succeed anyway. return False # Erase the type if necessary to make sure we don't have a single # TypeVar in forward_tweaked. (Having a function signature containing # just a single TypeVar can lead to unpredictable behavior.) forward_base_erased = forward_base if isinstance(forward_base, TypeVarType): forward_base_erased = erase_to_bound(forward_base) # Construct normalized function signatures corresponding to the # operator methods. The first argument is the left operand and the # second operand is the right argument -- we switch the order of # the arguments of the reverse method. forward_tweaked = forward_item.copy_modified( arg_types=[forward_base_erased, forward_item.arg_types[0]], arg_kinds=[nodes.ARG_POS] * 2, arg_names=[None] * 2, ) reverse_tweaked = reverse_type.copy_modified( arg_types=[reverse_type.arg_types[1], reverse_type.arg_types[0]], arg_kinds=[nodes.ARG_POS] * 2, arg_names=[None] * 2, ) reverse_base_erased = reverse_type.arg_types[0] if isinstance(reverse_base_erased, TypeVarType): reverse_base_erased = erase_to_bound(reverse_base_erased) if is_same_type(reverse_base_erased, forward_base_erased): return False elif is_subtype(reverse_base_erased, forward_base_erased): first = reverse_tweaked second = forward_tweaked else: first = forward_tweaked second = reverse_tweaked return is_unsafe_overlapping_overload_signatures(first, second) def check_inplace_operator_method(self, defn: FuncBase) -> None: """Check an inplace operator method such as __iadd__. They cannot arbitrarily overlap with __add__. """ method = defn.name if method not in nodes.inplace_operator_methods: return typ = bind_self(self.function_type(defn)) cls = defn.info other_method = '__' + method[3:] if cls.has_readable_member(other_method): instance = fill_typevars(cls) typ2 = get_proper_type(self.expr_checker.analyze_external_member_access( other_method, instance, defn)) fail = False if isinstance(typ2, FunctionLike): if not is_more_general_arg_prefix(typ, typ2): fail = True else: # TODO overloads fail = True if fail: self.msg.signatures_incompatible(method, other_method, defn) def check_getattr_method(self, typ: Type, context: Context, name: str) -> None: if len(self.scope.stack) == 1: # module scope if name == '__getattribute__': self.msg.fail(message_registry.MODULE_LEVEL_GETATTRIBUTE, context) return # __getattr__ is fine at the module level as of Python 3.7 (PEP 562). We could # show an error for Python < 3.7, but that would be annoying in code that supports # both 3.7 and older versions. method_type = CallableType([self.named_type('builtins.str')], [nodes.ARG_POS], [None], AnyType(TypeOfAny.special_form), self.named_type('builtins.function')) elif self.scope.active_class(): method_type = CallableType([AnyType(TypeOfAny.special_form), self.named_type('builtins.str')], [nodes.ARG_POS, nodes.ARG_POS], [None, None], AnyType(TypeOfAny.special_form), self.named_type('builtins.function')) else: return if not is_subtype(typ, method_type): self.msg.invalid_signature_for_special_method(typ, context, name) def check_setattr_method(self, typ: Type, context: Context) -> None: if not self.scope.active_class(): return method_type = CallableType([AnyType(TypeOfAny.special_form), self.named_type('builtins.str'), AnyType(TypeOfAny.special_form)], [nodes.ARG_POS, nodes.ARG_POS, nodes.ARG_POS], [None, None, None], NoneType(), self.named_type('builtins.function')) if not is_subtype(typ, method_type): self.msg.invalid_signature_for_special_method(typ, context, '__setattr__') def expand_typevars(self, defn: FuncItem, typ: CallableType) -> List[Tuple[FuncItem, CallableType]]: # TODO use generator subst = [] # type: List[List[Tuple[TypeVarId, Type]]] tvars = typ.variables or [] tvars = tvars[:] if defn.info: # Class type variables tvars += defn.info.defn.type_vars or [] for tvar in tvars: if tvar.values: subst.append([(tvar.id, value) for value in tvar.values]) # Make a copy of the function to check for each combination of # value restricted type variables. (Except when running mypyc, # where we need one canonical version of the function.) if subst and not self.options.mypyc: result = [] # type: List[Tuple[FuncItem, CallableType]] for substitutions in itertools.product(*subst): mapping = dict(substitutions) expanded = cast(CallableType, expand_type(typ, mapping)) result.append((expand_func(defn, mapping), expanded)) return result else: return [(defn, typ)] def check_method_override(self, defn: Union[FuncDef, OverloadedFuncDef, Decorator]) -> None: """Check if function definition is compatible with base classes. This may defer the method if a signature is not available in at least one base class. """ # Check against definitions in base classes. for base in defn.info.mro[1:]: if self.check_method_or_accessor_override_for_base(defn, base): # Node was deferred, we will have another attempt later. return def check_method_or_accessor_override_for_base(self, defn: Union[FuncDef, OverloadedFuncDef, Decorator], base: TypeInfo) -> bool: """Check if method definition is compatible with a base class. Return True if the node was deferred because one of the corresponding superclass nodes is not ready. """ if base: name = defn.name base_attr = base.names.get(name) if base_attr: # First, check if we override a final (always an error, even with Any types). if is_final_node(base_attr.node): self.msg.cant_override_final(name, base.name, defn) # Second, final can't override anything writeable independently of types. if defn.is_final: self.check_no_writable(name, base_attr.node, defn) # Check the type of override. if name not in ('__init__', '__new__', '__init_subclass__'): # Check method override # (__init__, __new__, __init_subclass__ are special). if self.check_method_override_for_base_with_name(defn, name, base): return True if name in nodes.inplace_operator_methods: # Figure out the name of the corresponding operator method. method = '__' + name[3:] # An inplace operator method such as __iadd__ might not be # always introduced safely if a base class defined __add__. # TODO can't come up with an example where this is # necessary; now it's "just in case" return self.check_method_override_for_base_with_name(defn, method, base) return False def check_method_override_for_base_with_name( self, defn: Union[FuncDef, OverloadedFuncDef, Decorator], name: str, base: TypeInfo) -> bool: """Check if overriding an attribute `name` of `base` with `defn` is valid. Return True if the supertype node was not analysed yet, and `defn` was deferred. """ base_attr = base.names.get(name) if base_attr: # The name of the method is defined in the base class. # Point errors at the 'def' line (important for backward compatibility # of type ignores). if not isinstance(defn, Decorator): context = defn else: context = defn.func # Construct the type of the overriding method. if isinstance(defn, (FuncDef, OverloadedFuncDef)): typ = self.function_type(defn) # type: Type override_class_or_static = defn.is_class or defn.is_static override_class = defn.is_class else: assert defn.var.is_ready assert defn.var.type is not None typ = defn.var.type override_class_or_static = defn.func.is_class or defn.func.is_static override_class = defn.func.is_class typ = get_proper_type(typ) if isinstance(typ, FunctionLike) and not is_static(context): typ = bind_self(typ, self.scope.active_self_type(), is_classmethod=override_class) # Map the overridden method type to subtype context so that # it can be checked for compatibility. original_type = get_proper_type(base_attr.type) original_node = base_attr.node if original_type is None: if self.pass_num < self.last_pass: # If there are passes left, defer this node until next pass, # otherwise try reconstructing the method type from available information. self.defer_node(defn, defn.info) return True elif isinstance(original_node, (FuncDef, OverloadedFuncDef)): original_type = self.function_type(original_node) elif isinstance(original_node, Decorator): original_type = self.function_type(original_node.func) else: assert False, str(base_attr.node) if isinstance(original_node, (FuncDef, OverloadedFuncDef)): original_class_or_static = original_node.is_class or original_node.is_static elif isinstance(original_node, Decorator): fdef = original_node.func original_class_or_static = fdef.is_class or fdef.is_static else: original_class_or_static = False # a variable can't be class or static if isinstance(original_type, AnyType) or isinstance(typ, AnyType): pass elif isinstance(original_type, FunctionLike) and isinstance(typ, FunctionLike): original = self.bind_and_map_method(base_attr, original_type, defn.info, base) # Check that the types are compatible. # TODO overloaded signatures self.check_override(typ, original, defn.name, name, base.name, original_class_or_static, override_class_or_static, context) elif is_equivalent(original_type, typ): # Assume invariance for a non-callable attribute here. Note # that this doesn't affect read-only properties which can have # covariant overrides. # # TODO: Allow covariance for read-only attributes? pass else: self.msg.signature_incompatible_with_supertype( defn.name, name, base.name, context) return False def bind_and_map_method(self, sym: SymbolTableNode, typ: FunctionLike, sub_info: TypeInfo, super_info: TypeInfo) -> FunctionLike: """Bind self-type and map type variables for a method. Arguments: sym: a symbol that points to method definition typ: method type on the definition sub_info: class where the method is used super_info: class where the method was defined """ if (isinstance(sym.node, (FuncDef, OverloadedFuncDef, Decorator)) and not is_static(sym.node)): if isinstance(sym.node, Decorator): is_class_method = sym.node.func.is_class else: is_class_method = sym.node.is_class bound = bind_self(typ, self.scope.active_self_type(), is_class_method) else: bound = typ return cast(FunctionLike, map_type_from_supertype(bound, sub_info, super_info)) def get_op_other_domain(self, tp: FunctionLike) -> Optional[Type]: if isinstance(tp, CallableType): if tp.arg_kinds and tp.arg_kinds[0] == ARG_POS: return tp.arg_types[0] return None elif isinstance(tp, Overloaded): raw_items = [self.get_op_other_domain(it) for it in tp.items()] items = [it for it in raw_items if it] if items: return make_simplified_union(items) return None else: assert False, "Need to check all FunctionLike subtypes here" def check_override(self, override: FunctionLike, original: FunctionLike, name: str, name_in_super: str, supertype: str, original_class_or_static: bool, override_class_or_static: bool, node: Context) -> None: """Check a method override with given signatures. Arguments: override: The signature of the overriding method. original: The signature of the original supertype method. name: The name of the subtype. This and the next argument are only used for generating error messages. supertype: The name of the supertype. """ # Use boolean variable to clarify code. fail = False op_method_wider_note = False if not is_subtype(override, original, ignore_pos_arg_names=True): fail = True elif isinstance(override, Overloaded) and self.is_forward_op_method(name): # Operator method overrides cannot extend the domain, as # this could be unsafe with reverse operator methods. original_domain = self.get_op_other_domain(original) override_domain = self.get_op_other_domain(override) if (original_domain and override_domain and not is_subtype(override_domain, original_domain)): fail = True op_method_wider_note = True if isinstance(original, FunctionLike) and isinstance(override, FunctionLike): if original_class_or_static and not override_class_or_static: fail = True if is_private(name): fail = False if fail: emitted_msg = False if (isinstance(override, CallableType) and isinstance(original, CallableType) and len(override.arg_types) == len(original.arg_types) and override.min_args == original.min_args): # Give more detailed messages for the common case of both # signatures having the same number of arguments and no # overloads. # override might have its own generic function type # variables. If an argument or return type of override # does not have the correct subtyping relationship # with the original type even after these variables # are erased, then it is definitely an incompatibility. override_ids = override.type_var_ids() type_name = None if isinstance(override.definition, FuncDef): type_name = override.definition.info.name def erase_override(t: Type) -> Type: return erase_typevars(t, ids_to_erase=override_ids) for i in range(len(override.arg_types)): if not is_subtype(original.arg_types[i], erase_override(override.arg_types[i])): arg_type_in_super = original.arg_types[i] self.msg.argument_incompatible_with_supertype( i + 1, name, type_name, name_in_super, arg_type_in_super, supertype, node ) emitted_msg = True if not is_subtype(erase_override(override.ret_type), original.ret_type): self.msg.return_type_incompatible_with_supertype( name, name_in_super, supertype, original.ret_type, override.ret_type, node) emitted_msg = True elif isinstance(override, Overloaded) and isinstance(original, Overloaded): # Give a more detailed message in the case where the user is trying to # override an overload, and the subclass's overload is plausible, except # that the order of the variants are wrong. # # For example, if the parent defines the overload f(int) -> int and f(str) -> str # (in that order), and if the child swaps the two and does f(str) -> str and # f(int) -> int order = [] for child_variant in override.items(): for i, parent_variant in enumerate(original.items()): if is_subtype(child_variant, parent_variant): order.append(i) break if len(order) == len(original.items()) and order != sorted(order): self.msg.overload_signature_incompatible_with_supertype( name, name_in_super, supertype, override, node) emitted_msg = True if not emitted_msg: # Fall back to generic incompatibility message. self.msg.signature_incompatible_with_supertype( name, name_in_super, supertype, node) if op_method_wider_note: self.note("Overloaded operator methods can't have wider argument types" " in overrides", node, code=codes.OVERRIDE) def check__exit__return_type(self, defn: FuncItem) -> None: """Generate error if the return type of __exit__ is problematic. If __exit__ always returns False but the return type is declared as bool, mypy thinks that a with statement may "swallow" exceptions even though this is not the case, resulting in invalid reachability inference. """ if not defn.type or not isinstance(defn.type, CallableType): return ret_type = get_proper_type(defn.type.ret_type) if not has_bool_item(ret_type): return returns = all_return_statements(defn) if not returns: return if all(isinstance(ret.expr, NameExpr) and ret.expr.fullname == 'builtins.False' for ret in returns): self.msg.incorrect__exit__return(defn) def visit_class_def(self, defn: ClassDef) -> None: """Type check a class definition.""" typ = defn.info for base in typ.mro[1:]: if base.is_final: self.fail(message_registry.CANNOT_INHERIT_FROM_FINAL.format(base.name), defn) with self.tscope.class_scope(defn.info), self.enter_partial_types(is_class=True): old_binder = self.binder self.binder = ConditionalTypeBinder() with self.binder.top_frame_context(): with self.scope.push_class(defn.info): self.accept(defn.defs) self.binder = old_binder if not (defn.info.typeddict_type or defn.info.tuple_type or defn.info.is_enum): # If it is not a normal class (not a special form) check class keywords. self.check_init_subclass(defn) if not defn.has_incompatible_baseclass: # Otherwise we've already found errors; more errors are not useful self.check_multiple_inheritance(typ) if defn.decorators: sig = type_object_type(defn.info, self.named_type) # type: Type # Decorators are applied in reverse order. for decorator in reversed(defn.decorators): if (isinstance(decorator, CallExpr) and isinstance(decorator.analyzed, PromoteExpr)): # _promote is a special type checking related construct. continue dec = self.expr_checker.accept(decorator) temp = self.temp_node(sig, context=decorator) fullname = None if isinstance(decorator, RefExpr): fullname = decorator.fullname # TODO: Figure out how to have clearer error messages. # (e.g. "class decorator must be a function that accepts a type." sig, _ = self.expr_checker.check_call(dec, [temp], [nodes.ARG_POS], defn, callable_name=fullname) # TODO: Apply the sig to the actual TypeInfo so we can handle decorators # that completely swap out the type. (e.g. Callable[[Type[A]], Type[B]]) if typ.is_protocol and typ.defn.type_vars: self.check_protocol_variance(defn) def check_init_subclass(self, defn: ClassDef) -> None: """Check that keywords in a class definition are valid arguments for __init_subclass__(). In this example: 1 class Base: 2 def __init_subclass__(cls, thing: int): 3 pass 4 class Child(Base, thing=5): 5 def __init_subclass__(cls): 6 pass 7 Child() Base.__init_subclass__(thing=5) is called at line 4. This is what we simulate here. Child.__init_subclass__ is never called. """ if (defn.info.metaclass_type and defn.info.metaclass_type.type.fullname not in ('builtins.type', 'abc.ABCMeta')): # We can't safely check situations when both __init_subclass__ and a custom # metaclass are present. return # At runtime, only Base.__init_subclass__ will be called, so # we skip the current class itself. for base in defn.info.mro[1:]: if '__init_subclass__' not in base.names: continue name_expr = NameExpr(defn.name) name_expr.node = base callee = MemberExpr(name_expr, '__init_subclass__') args = list(defn.keywords.values()) arg_names = list(defn.keywords.keys()) # type: List[Optional[str]] # 'metaclass' keyword is consumed by the rest of the type machinery, # and is never passed to __init_subclass__ implementations if 'metaclass' in arg_names: idx = arg_names.index('metaclass') arg_names.pop(idx) args.pop(idx) arg_kinds = [ARG_NAMED] * len(args) call_expr = CallExpr(callee, args, arg_kinds, arg_names) call_expr.line = defn.line call_expr.column = defn.column call_expr.end_line = defn.end_line self.expr_checker.accept(call_expr, allow_none_return=True, always_allow_any=True) # We are only interested in the first Base having __init_subclass__, # all other bases have already been checked. break def check_protocol_variance(self, defn: ClassDef) -> None: """Check that protocol definition is compatible with declared variances of type variables. Note that we also prohibit declaring protocol classes as invariant if they are actually covariant/contravariant, since this may break transitivity of subtyping, see PEP 544. """ info = defn.info object_type = Instance(info.mro[-1], []) tvars = info.defn.type_vars for i, tvar in enumerate(tvars): up_args = [object_type if i == j else AnyType(TypeOfAny.special_form) for j, _ in enumerate(tvars)] # type: List[Type] down_args = [UninhabitedType() if i == j else AnyType(TypeOfAny.special_form) for j, _ in enumerate(tvars)] # type: List[Type] up, down = Instance(info, up_args), Instance(info, down_args) # TODO: add advanced variance checks for recursive protocols if is_subtype(down, up, ignore_declared_variance=True): expected = COVARIANT elif is_subtype(up, down, ignore_declared_variance=True): expected = CONTRAVARIANT else: expected = INVARIANT if expected != tvar.variance: self.msg.bad_proto_variance(tvar.variance, tvar.name, expected, defn) def check_multiple_inheritance(self, typ: TypeInfo) -> None: """Check for multiple inheritance related errors.""" if len(typ.bases) <= 1: # No multiple inheritance. return # Verify that inherited attributes are compatible. mro = typ.mro[1:] for i, base in enumerate(mro): # Attributes defined in both the type and base are skipped. # Normal checks for attribute compatibility should catch any problems elsewhere. non_overridden_attrs = base.names.keys() - typ.names.keys() for name in non_overridden_attrs: if is_private(name): continue for base2 in mro[i + 1:]: # We only need to check compatibility of attributes from classes not # in a subclass relationship. For subclasses, normal (single inheritance) # checks suffice (these are implemented elsewhere). if name in base2.names and base2 not in base.mro: self.check_compatibility(name, base, base2, typ) def determine_type_of_class_member(self, sym: SymbolTableNode) -> Optional[Type]: if sym.type is not None: return sym.type if isinstance(sym.node, FuncBase): return self.function_type(sym.node) if isinstance(sym.node, TypeInfo): # nested class return type_object_type(sym.node, self.named_type) if isinstance(sym.node, TypeVarExpr): # Use of TypeVars is rejected in an expression/runtime context, so # we don't need to check supertype compatibility for them. return AnyType(TypeOfAny.special_form) return None def check_compatibility(self, name: str, base1: TypeInfo, base2: TypeInfo, ctx: TypeInfo) -> None: """Check if attribute name in base1 is compatible with base2 in multiple inheritance. Assume base1 comes before base2 in the MRO, and that base1 and base2 don't have a direct subclass relationship (i.e., the compatibility requirement only derives from multiple inheritance). This check verifies that a definition taken from base1 (and mapped to the current class ctx), is type compatible with the definition taken from base2 (also mapped), so that unsafe subclassing like this can be detected: class A(Generic[T]): def foo(self, x: T) -> None: ... class B: def foo(self, x: str) -> None: ... class C(B, A[int]): ... # this is unsafe because... x: A[int] = C() x.foo # ...runtime type is (str) -> None, while static type is (int) -> None """ if name in ('__init__', '__new__', '__init_subclass__'): # __init__ and friends can be incompatible -- it's a special case. return first = base1.names[name] second = base2.names[name] first_type = get_proper_type(self.determine_type_of_class_member(first)) second_type = get_proper_type(self.determine_type_of_class_member(second)) if (isinstance(first_type, FunctionLike) and isinstance(second_type, FunctionLike)): if first_type.is_type_obj() and second_type.is_type_obj(): # For class objects only check the subtype relationship of the classes, # since we allow incompatible overrides of '__init__'/'__new__' ok = is_subtype(left=fill_typevars_with_any(first_type.type_object()), right=fill_typevars_with_any(second_type.type_object())) else: # First bind/map method types when necessary. first_sig = self.bind_and_map_method(first, first_type, ctx, base1) second_sig = self.bind_and_map_method(second, second_type, ctx, base2) ok = is_subtype(first_sig, second_sig, ignore_pos_arg_names=True) elif first_type and second_type: ok = is_equivalent(first_type, second_type) if not ok: second_node = base2[name].node if isinstance(second_node, Decorator) and second_node.func.is_property: ok = is_subtype(first_type, cast(CallableType, second_type).ret_type) else: if first_type is None: self.msg.cannot_determine_type_in_base(name, base1.name, ctx) if second_type is None: self.msg.cannot_determine_type_in_base(name, base2.name, ctx) ok = True # Final attributes can never be overridden, but can override # non-final read-only attributes. if is_final_node(second.node): self.msg.cant_override_final(name, base2.name, ctx) if is_final_node(first.node): self.check_no_writable(name, second.node, ctx) # __slots__ is special and the type can vary across class hierarchy. if name == '__slots__': ok = True if not ok: self.msg.base_class_definitions_incompatible(name, base1, base2, ctx) def visit_import_from(self, node: ImportFrom) -> None: self.check_import(node) def visit_import_all(self, node: ImportAll) -> None: self.check_import(node) def visit_import(self, s: Import) -> None: pass def check_import(self, node: ImportBase) -> None: for assign in node.assignments: lvalue = assign.lvalues[0] lvalue_type, _, __ = self.check_lvalue(lvalue) if lvalue_type is None: # TODO: This is broken. lvalue_type = AnyType(TypeOfAny.special_form) message = '{} "{}"'.format(message_registry.INCOMPATIBLE_IMPORT_OF, cast(NameExpr, assign.rvalue).name) self.check_simple_assignment(lvalue_type, assign.rvalue, node, msg=message, lvalue_name='local name', rvalue_name='imported name') # # Statements # def visit_block(self, b: Block) -> None: if b.is_unreachable: # This block was marked as being unreachable during semantic analysis. # It turns out any blocks marked in this way are *intentionally* marked # as unreachable -- so we don't display an error. self.binder.unreachable() return for s in b.body: if self.binder.is_unreachable(): if (self.options.warn_unreachable and not self.binder.is_unreachable_warning_suppressed() and not self.is_raising_or_empty(s)): self.msg.unreachable_statement(s) break self.accept(s) def is_raising_or_empty(self, s: Statement) -> bool: """Returns 'true' if the given statement either throws an error of some kind or is a no-op. We use this function mostly while handling the '--warn-unreachable' flag. When that flag is present, we normally report an error on any unreachable statement. But if that statement is just something like a 'pass' or a just-in-case 'assert False', reporting an error would be annoying. """ if isinstance(s, AssertStmt) and is_false_literal(s.expr): return True elif isinstance(s, (RaiseStmt, PassStmt)): return True elif isinstance(s, ExpressionStmt): if isinstance(s.expr, EllipsisExpr): return True elif isinstance(s.expr, CallExpr): self.expr_checker.msg.disable_errors() typ = get_proper_type(self.expr_checker.accept( s.expr, allow_none_return=True, always_allow_any=True)) self.expr_checker.msg.enable_errors() if isinstance(typ, UninhabitedType): return True return False def visit_assignment_stmt(self, s: AssignmentStmt) -> None: """Type check an assignment statement. Handle all kinds of assignment statements (simple, indexed, multiple). """ with self.enter_final_context(s.is_final_def): self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None, s.new_syntax) if s.is_alias_def: # We do this mostly for compatibility with old semantic analyzer. # TODO: should we get rid of this? self.store_type(s.lvalues[-1], self.expr_checker.accept(s.rvalue)) if (s.type is not None and self.options.disallow_any_unimported and has_any_from_unimported_type(s.type)): if isinstance(s.lvalues[-1], TupleExpr): # This is a multiple assignment. Instead of figuring out which type is problematic, # give a generic error message. self.msg.unimported_type_becomes_any("A type on this line", AnyType(TypeOfAny.special_form), s) else: self.msg.unimported_type_becomes_any("Type of variable", s.type, s) check_for_explicit_any(s.type, self.options, self.is_typeshed_stub, self.msg, context=s) if len(s.lvalues) > 1: # Chained assignment (e.g. x = y = ...). # Make sure that rvalue type will not be reinferred. if s.rvalue not in self.type_map: self.expr_checker.accept(s.rvalue) rvalue = self.temp_node(self.type_map[s.rvalue], s) for lv in s.lvalues[:-1]: with self.enter_final_context(s.is_final_def): self.check_assignment(lv, rvalue, s.type is None) self.check_final(s) if (s.is_final_def and s.type and not has_no_typevars(s.type) and self.scope.active_class() is not None): self.fail(message_registry.DEPENDENT_FINAL_IN_CLASS_BODY, s) def check_assignment(self, lvalue: Lvalue, rvalue: Expression, infer_lvalue_type: bool = True, new_syntax: bool = False) -> None: """Type check a single assignment: lvalue = rvalue.""" if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): self.check_assignment_to_multiple_lvalues(lvalue.items, rvalue, rvalue, infer_lvalue_type) else: self.try_infer_partial_generic_type_from_assignment(lvalue, rvalue) lvalue_type, index_lvalue, inferred = self.check_lvalue(lvalue) # If we're assigning to __getattr__ or similar methods, check that the signature is # valid. if isinstance(lvalue, NameExpr) and lvalue.node: name = lvalue.node.name if name in ('__setattr__', '__getattribute__', '__getattr__'): # If an explicit type is given, use that. if lvalue_type: signature = lvalue_type else: signature = self.expr_checker.accept(rvalue) if signature: if name == '__setattr__': self.check_setattr_method(signature, lvalue) else: self.check_getattr_method(signature, lvalue, name) # Defer PartialType's super type checking. if (isinstance(lvalue, RefExpr) and not (isinstance(lvalue_type, PartialType) and lvalue_type.type is None)): if self.check_compatibility_all_supers(lvalue, lvalue_type, rvalue): # We hit an error on this line; don't check for any others return if lvalue_type: if isinstance(lvalue_type, PartialType) and lvalue_type.type is None: # Try to infer a proper type for a variable with a partial None type. rvalue_type = self.expr_checker.accept(rvalue) if isinstance(get_proper_type(rvalue_type), NoneType): # This doesn't actually provide any additional information -- multiple # None initializers preserve the partial None type. return if is_valid_inferred_type(rvalue_type): var = lvalue_type.var partial_types = self.find_partial_types(var) if partial_types is not None: if not self.current_node_deferred: # Partial type can't be final, so strip any literal values. rvalue_type = remove_instance_last_known_values(rvalue_type) inferred_type = make_simplified_union( [rvalue_type, NoneType()]) self.set_inferred_type(var, lvalue, inferred_type) else: var.type = None del partial_types[var] lvalue_type = var.type else: # Try to infer a partial type. No need to check the return value, as # an error will be reported elsewhere. self.infer_partial_type(lvalue_type.var, lvalue, rvalue_type) # Handle None PartialType's super type checking here, after it's resolved. if (isinstance(lvalue, RefExpr) and self.check_compatibility_all_supers(lvalue, lvalue_type, rvalue)): # We hit an error on this line; don't check for any others return elif (is_literal_none(rvalue) and isinstance(lvalue, NameExpr) and isinstance(lvalue.node, Var) and lvalue.node.is_initialized_in_class and not new_syntax): # Allow None's to be assigned to class variables with non-Optional types. rvalue_type = lvalue_type elif (isinstance(lvalue, MemberExpr) and lvalue.kind is None): # Ignore member access to modules instance_type = self.expr_checker.accept(lvalue.expr) rvalue_type, lvalue_type, infer_lvalue_type = self.check_member_assignment( instance_type, lvalue_type, rvalue, context=rvalue) else: rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, context=rvalue, code=codes.ASSIGNMENT) # Special case: only non-abstract non-protocol classes can be assigned to # variables with explicit type Type[A], where A is protocol or abstract. rvalue_type = get_proper_type(rvalue_type) lvalue_type = get_proper_type(lvalue_type) if (isinstance(rvalue_type, CallableType) and rvalue_type.is_type_obj() and (rvalue_type.type_object().is_abstract or rvalue_type.type_object().is_protocol) and isinstance(lvalue_type, TypeType) and isinstance(lvalue_type.item, Instance) and (lvalue_type.item.type.is_abstract or lvalue_type.item.type.is_protocol)): self.msg.concrete_only_assign(lvalue_type, rvalue) return if rvalue_type and infer_lvalue_type and not isinstance(lvalue_type, PartialType): # Don't use type binder for definitions of special forms, like named tuples. if not (isinstance(lvalue, NameExpr) and lvalue.is_special_form): self.binder.assign_type(lvalue, rvalue_type, lvalue_type, False) elif index_lvalue: self.check_indexed_assignment(index_lvalue, rvalue, lvalue) if inferred: rvalue_type = self.expr_checker.accept(rvalue) if not inferred.is_final: rvalue_type = remove_instance_last_known_values(rvalue_type) self.infer_variable_type(inferred, lvalue, rvalue_type, rvalue) def try_infer_partial_generic_type_from_assignment(self, lvalue: Lvalue, rvalue: Expression) -> None: """Try to infer a precise type for partial generic type from assignment. Example where this happens: x = [] if foo(): x = [1] # Infer List[int] as type of 'x' """ var = None if (isinstance(lvalue, NameExpr) and isinstance(lvalue.node, Var) and isinstance(lvalue.node.type, PartialType)): var = lvalue.node elif isinstance(lvalue, MemberExpr): var = self.expr_checker.get_partial_self_var(lvalue) if var is not None: typ = var.type assert isinstance(typ, PartialType) if typ.type is None: return # TODO: some logic here duplicates the None partial type counterpart # inlined in check_assignment(), see # 8043. partial_types = self.find_partial_types(var) if partial_types is None: return rvalue_type = self.expr_checker.accept(rvalue) rvalue_type = get_proper_type(rvalue_type) if isinstance(rvalue_type, Instance): if rvalue_type.type == typ.type and is_valid_inferred_type(rvalue_type): var.type = rvalue_type del partial_types[var] elif isinstance(rvalue_type, AnyType): var.type = fill_typevars_with_any(typ.type) del partial_types[var] def check_compatibility_all_supers(self, lvalue: RefExpr, lvalue_type: Optional[Type], rvalue: Expression) -> bool: lvalue_node = lvalue.node # Check if we are a class variable with at least one base class if (isinstance(lvalue_node, Var) and lvalue.kind in (MDEF, None) and # None for Vars defined via self len(lvalue_node.info.bases) > 0): for base in lvalue_node.info.mro[1:]: tnode = base.names.get(lvalue_node.name) if tnode is not None: if not self.check_compatibility_classvar_super(lvalue_node, base, tnode.node): # Show only one error per variable break if not self.check_compatibility_final_super(lvalue_node, base, tnode.node): # Show only one error per variable break direct_bases = lvalue_node.info.direct_base_classes() last_immediate_base = direct_bases[-1] if direct_bases else None for base in lvalue_node.info.mro[1:]: # Only check __slots__ against the 'object' # If a base class defines a Tuple of 3 elements, a child of # this class should not be allowed to define it as a Tuple of # anything other than 3 elements. The exception to this rule # is __slots__, where it is allowed for any child class to # redefine it. if lvalue_node.name == "__slots__" and base.fullname != "builtins.object": continue if is_private(lvalue_node.name): continue base_type, base_node = self.lvalue_type_from_base(lvalue_node, base) if base_type: assert base_node is not None if not self.check_compatibility_super(lvalue, lvalue_type, rvalue, base, base_type, base_node): # Only show one error per variable; even if other # base classes are also incompatible return True if base is last_immediate_base: # At this point, the attribute was found to be compatible with all # immediate parents. break return False def check_compatibility_super(self, lvalue: RefExpr, lvalue_type: Optional[Type], rvalue: Expression, base: TypeInfo, base_type: Type, base_node: Node) -> bool: lvalue_node = lvalue.node assert isinstance(lvalue_node, Var) # Do not check whether the rvalue is compatible if the # lvalue had a type defined; this is handled by other # parts, and all we have to worry about in that case is # that lvalue is compatible with the base class. compare_node = None if lvalue_type: compare_type = lvalue_type compare_node = lvalue.node else: compare_type = self.expr_checker.accept(rvalue, base_type) if isinstance(rvalue, NameExpr): compare_node = rvalue.node if isinstance(compare_node, Decorator): compare_node = compare_node.func base_type = get_proper_type(base_type) compare_type = get_proper_type(compare_type) if compare_type: if (isinstance(base_type, CallableType) and isinstance(compare_type, CallableType)): base_static = is_node_static(base_node) compare_static = is_node_static(compare_node) # In case compare_static is unknown, also check # if 'definition' is set. The most common case for # this is with TempNode(), where we lose all # information about the real rvalue node (but only get # the rvalue type) if compare_static is None and compare_type.definition: compare_static = is_node_static(compare_type.definition) # Compare against False, as is_node_static can return None if base_static is False and compare_static is False: # Class-level function objects and classmethods become bound # methods: the former to the instance, the latter to the # class base_type = bind_self(base_type, self.scope.active_self_type()) compare_type = bind_self(compare_type, self.scope.active_self_type()) # If we are a static method, ensure to also tell the # lvalue it now contains a static method if base_static and compare_static: lvalue_node.is_staticmethod = True return self.check_subtype(compare_type, base_type, rvalue, message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, 'expression has type', 'base class "%s" defined the type as' % base.name, code=codes.ASSIGNMENT) return True def lvalue_type_from_base(self, expr_node: Var, base: TypeInfo) -> Tuple[Optional[Type], Optional[Node]]: """For a NameExpr that is part of a class, walk all base classes and try to find the first class that defines a Type for the same name.""" expr_name = expr_node.name base_var = base.names.get(expr_name) if base_var: base_node = base_var.node base_type = base_var.type if isinstance(base_node, Decorator): base_node = base_node.func base_type = base_node.type if base_type: if not has_no_typevars(base_type): self_type = self.scope.active_self_type() assert self_type is not None, "Internal error: base lookup outside class" if isinstance(self_type, TupleType): instance = tuple_fallback(self_type) else: instance = self_type itype = map_instance_to_supertype(instance, base) base_type = expand_type_by_instance(base_type, itype) base_type = get_proper_type(base_type) if isinstance(base_type, CallableType) and isinstance(base_node, FuncDef): # If we are a property, return the Type of the return # value, not the Callable if base_node.is_property: base_type = get_proper_type(base_type.ret_type) if isinstance(base_type, FunctionLike) and isinstance(base_node, OverloadedFuncDef): # Same for properties with setter if base_node.is_property: base_type = base_type.items()[0].ret_type return base_type, base_node return None, None def check_compatibility_classvar_super(self, node: Var, base: TypeInfo, base_node: Optional[Node]) -> bool: if not isinstance(base_node, Var): return True if node.is_classvar and not base_node.is_classvar: self.fail(message_registry.CANNOT_OVERRIDE_INSTANCE_VAR.format(base.name), node) return False elif not node.is_classvar and base_node.is_classvar: self.fail(message_registry.CANNOT_OVERRIDE_CLASS_VAR.format(base.name), node) return False return True def check_compatibility_final_super(self, node: Var, base: TypeInfo, base_node: Optional[Node]) -> bool: """Check if an assignment overrides a final attribute in a base class. This only checks situations where either a node in base class is not a variable but a final method, or where override is explicitly declared as final. In these cases we give a more detailed error message. In addition, we check that a final variable doesn't override writeable attribute, which is not safe. Other situations are checked in `check_final()`. """ if not isinstance(base_node, (Var, FuncBase, Decorator)): return True if base_node.is_final and (node.is_final or not isinstance(base_node, Var)): # Give this error only for explicit override attempt with `Final`, or # if we are overriding a final method with variable. # Other override attempts will be flagged as assignment to constant # in `check_final()`. self.msg.cant_override_final(node.name, base.name, node) return False if node.is_final: self.check_no_writable(node.name, base_node, node) return True def check_no_writable(self, name: str, base_node: Optional[Node], ctx: Context) -> None: """Check that a final variable doesn't override writeable attribute. This is done to prevent situations like this: class C: attr = 1 class D(C): attr: Final = 2 x: C = D() x.attr = 3 # Oops! """ if isinstance(base_node, Var): ok = False elif isinstance(base_node, OverloadedFuncDef) and base_node.is_property: first_item = cast(Decorator, base_node.items[0]) ok = not first_item.var.is_settable_property else: ok = True if not ok: self.msg.final_cant_override_writable(name, ctx) def get_final_context(self) -> bool: """Check whether we a currently checking a final declaration.""" return self._is_final_def @contextmanager def enter_final_context(self, is_final_def: bool) -> Iterator[None]: """Store whether the current checked assignment is a final declaration.""" old_ctx = self._is_final_def self._is_final_def = is_final_def try: yield finally: self._is_final_def = old_ctx def check_final(self, s: Union[AssignmentStmt, OperatorAssignmentStmt, AssignmentExpr]) -> None: """Check if this assignment does not assign to a final attribute. This function performs the check only for name assignments at module and class scope. The assignments to `obj.attr` and `Cls.attr` are checked in checkmember.py. """ if isinstance(s, AssignmentStmt): lvs = self.flatten_lvalues(s.lvalues) elif isinstance(s, AssignmentExpr): lvs = [s.target] else: lvs = [s.lvalue] is_final_decl = s.is_final_def if isinstance(s, AssignmentStmt) else False if is_final_decl and self.scope.active_class(): lv = lvs[0] assert isinstance(lv, RefExpr) assert isinstance(lv.node, Var) if (lv.node.final_unset_in_class and not lv.node.final_set_in_init and not self.is_stub and # It is OK to skip initializer in stub files. # Avoid extra error messages, if there is no type in Final[...], # then we already reported the error about missing r.h.s. isinstance(s, AssignmentStmt) and s.type is not None): self.msg.final_without_value(s) for lv in lvs: if isinstance(lv, RefExpr) and isinstance(lv.node, Var): name = lv.node.name cls = self.scope.active_class() if cls is not None: # Theses additional checks exist to give more error messages # even if the final attribute was overridden with a new symbol # (which is itself an error)... for base in cls.mro[1:]: sym = base.names.get(name) # We only give this error if base node is variable, # overriding final method will be caught in # `check_compatibility_final_super()`. if sym and isinstance(sym.node, Var): if sym.node.is_final and not is_final_decl: self.msg.cant_assign_to_final(name, sym.node.info is None, s) # ...but only once break if lv.node.is_final and not is_final_decl: self.msg.cant_assign_to_final(name, lv.node.info is None, s) def check_assignment_to_multiple_lvalues(self, lvalues: List[Lvalue], rvalue: Expression, context: Context, infer_lvalue_type: bool = True) -> None: if isinstance(rvalue, TupleExpr) or isinstance(rvalue, ListExpr): # Recursively go into Tuple or List expression rhs instead of # using the type of rhs, because this allowed more fine grained # control in cases like: a, b = [int, str] where rhs would get # type List[object] rvalues = rvalue.items if self.check_rvalue_count_in_assignment(lvalues, len(rvalues), context): star_index = next((i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues)) left_lvs = lvalues[:star_index] star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None right_lvs = lvalues[star_index + 1:] left_rvs, star_rvs, right_rvs = self.split_around_star( rvalues, star_index, len(lvalues)) lr_pairs = list(zip(left_lvs, left_rvs)) if star_lv: rv_list = ListExpr(star_rvs) rv_list.set_line(rvalue.get_line()) lr_pairs.append((star_lv.expr, rv_list)) lr_pairs.extend(zip(right_lvs, right_rvs)) for lv, rv in lr_pairs: self.check_assignment(lv, rv, infer_lvalue_type) else: self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type) def check_rvalue_count_in_assignment(self, lvalues: List[Lvalue], rvalue_count: int, context: Context) -> bool: if any(isinstance(lvalue, StarExpr) for lvalue in lvalues): if len(lvalues) - 1 > rvalue_count: self.msg.wrong_number_values_to_unpack(rvalue_count, len(lvalues) - 1, context) return False elif rvalue_count != len(lvalues): self.msg.wrong_number_values_to_unpack(rvalue_count, len(lvalues), context) return False return True def check_multi_assignment(self, lvalues: List[Lvalue], rvalue: Expression, context: Context, infer_lvalue_type: bool = True, rv_type: Optional[Type] = None, undefined_rvalue: bool = False) -> None: """Check the assignment of one rvalue to a number of lvalues.""" # Infer the type of an ordinary rvalue expression. # TODO: maybe elsewhere; redundant. rvalue_type = get_proper_type(rv_type or self.expr_checker.accept(rvalue)) if isinstance(rvalue_type, UnionType): # If this is an Optional type in non-strict Optional code, unwrap it. relevant_items = rvalue_type.relevant_items() if len(relevant_items) == 1: rvalue_type = get_proper_type(relevant_items[0]) if isinstance(rvalue_type, AnyType): for lv in lvalues: if isinstance(lv, StarExpr): lv = lv.expr temp_node = self.temp_node(AnyType(TypeOfAny.from_another_any, source_any=rvalue_type), context) self.check_assignment(lv, temp_node, infer_lvalue_type) elif isinstance(rvalue_type, TupleType): self.check_multi_assignment_from_tuple(lvalues, rvalue, rvalue_type, context, undefined_rvalue, infer_lvalue_type) elif isinstance(rvalue_type, UnionType): self.check_multi_assignment_from_union(lvalues, rvalue, rvalue_type, context, infer_lvalue_type) else: self.check_multi_assignment_from_iterable(lvalues, rvalue_type, context, infer_lvalue_type) def check_multi_assignment_from_union(self, lvalues: List[Expression], rvalue: Expression, rvalue_type: UnionType, context: Context, infer_lvalue_type: bool) -> None: """Check assignment to multiple lvalue targets when rvalue type is a Union[...]. For example: t: Union[Tuple[int, int], Tuple[str, str]] x, y = t reveal_type(x) # Union[int, str] The idea in this case is to process the assignment for every item of the union. Important note: the types are collected in two places, 'union_types' contains inferred types for first assignments, 'assignments' contains the narrowed types for binder. """ self.no_partial_types = True transposed = tuple([] for _ in self.flatten_lvalues(lvalues)) # type: Tuple[List[Type], ...] # Notify binder that we want to defer bindings and instead collect types. with self.binder.accumulate_type_assignments() as assignments: for item in rvalue_type.items: # Type check the assignment separately for each union item and collect # the inferred lvalue types for each union item. self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type=infer_lvalue_type, rv_type=item, undefined_rvalue=True) for t, lv in zip(transposed, self.flatten_lvalues(lvalues)): t.append(self.type_map.pop(lv, AnyType(TypeOfAny.special_form))) union_types = tuple(make_simplified_union(col) for col in transposed) for expr, items in assignments.items(): # Bind a union of types collected in 'assignments' to every expression. if isinstance(expr, StarExpr): expr = expr.expr # TODO: See todo in binder.py, ConditionalTypeBinder.assign_type # It's unclear why the 'declared_type' param is sometimes 'None' clean_items = [] # type: List[Tuple[Type, Type]] for type, declared_type in items: assert declared_type is not None clean_items.append((type, declared_type)) types, declared_types = zip(*clean_items) self.binder.assign_type(expr, make_simplified_union(list(types)), make_simplified_union(list(declared_types)), False) for union, lv in zip(union_types, self.flatten_lvalues(lvalues)): # Properly store the inferred types. _1, _2, inferred = self.check_lvalue(lv) if inferred: self.set_inferred_type(inferred, lv, union) else: self.store_type(lv, union) self.no_partial_types = False def flatten_lvalues(self, lvalues: List[Expression]) -> List[Expression]: res = [] # type: List[Expression] for lv in lvalues: if isinstance(lv, (TupleExpr, ListExpr)): res.extend(self.flatten_lvalues(lv.items)) if isinstance(lv, StarExpr): # Unwrap StarExpr, since it is unwrapped by other helpers. lv = lv.expr res.append(lv) return res def check_multi_assignment_from_tuple(self, lvalues: List[Lvalue], rvalue: Expression, rvalue_type: TupleType, context: Context, undefined_rvalue: bool, infer_lvalue_type: bool = True) -> None: if self.check_rvalue_count_in_assignment(lvalues, len(rvalue_type.items), context): star_index = next((i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues)) left_lvs = lvalues[:star_index] star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None right_lvs = lvalues[star_index + 1:] if not undefined_rvalue: # Infer rvalue again, now in the correct type context. lvalue_type = self.lvalue_type_for_inference(lvalues, rvalue_type) reinferred_rvalue_type = get_proper_type(self.expr_checker.accept(rvalue, lvalue_type)) if isinstance(reinferred_rvalue_type, UnionType): # If this is an Optional type in non-strict Optional code, unwrap it. relevant_items = reinferred_rvalue_type.relevant_items() if len(relevant_items) == 1: reinferred_rvalue_type = get_proper_type(relevant_items[0]) if isinstance(reinferred_rvalue_type, UnionType): self.check_multi_assignment_from_union(lvalues, rvalue, reinferred_rvalue_type, context, infer_lvalue_type) return if isinstance(reinferred_rvalue_type, AnyType) and self.current_node_deferred: # Doing more inference in deferred nodes can be hard, so give up for now. return assert isinstance(reinferred_rvalue_type, TupleType) rvalue_type = reinferred_rvalue_type left_rv_types, star_rv_types, right_rv_types = self.split_around_star( rvalue_type.items, star_index, len(lvalues)) for lv, rv_type in zip(left_lvs, left_rv_types): self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type) if star_lv: list_expr = ListExpr([self.temp_node(rv_type, context) for rv_type in star_rv_types]) list_expr.set_line(context.get_line()) self.check_assignment(star_lv.expr, list_expr, infer_lvalue_type) for lv, rv_type in zip(right_lvs, right_rv_types): self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type) def lvalue_type_for_inference(self, lvalues: List[Lvalue], rvalue_type: TupleType) -> Type: star_index = next((i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues)) left_lvs = lvalues[:star_index] star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None right_lvs = lvalues[star_index + 1:] left_rv_types, star_rv_types, right_rv_types = self.split_around_star( rvalue_type.items, star_index, len(lvalues)) type_parameters = [] # type: List[Type] def append_types_for_inference(lvs: List[Expression], rv_types: List[Type]) -> None: for lv, rv_type in zip(lvs, rv_types): sub_lvalue_type, index_expr, inferred = self.check_lvalue(lv) if sub_lvalue_type and not isinstance(sub_lvalue_type, PartialType): type_parameters.append(sub_lvalue_type) else: # index lvalue # TODO Figure out more precise type context, probably # based on the type signature of the _set method. type_parameters.append(rv_type) append_types_for_inference(left_lvs, left_rv_types) if star_lv: sub_lvalue_type, index_expr, inferred = self.check_lvalue(star_lv.expr) if sub_lvalue_type and not isinstance(sub_lvalue_type, PartialType): type_parameters.extend([sub_lvalue_type] * len(star_rv_types)) else: # index lvalue # TODO Figure out more precise type context, probably # based on the type signature of the _set method. type_parameters.extend(star_rv_types) append_types_for_inference(right_lvs, right_rv_types) return TupleType(type_parameters, self.named_type('builtins.tuple')) def split_around_star(self, items: List[T], star_index: int, length: int) -> Tuple[List[T], List[T], List[T]]: """Splits a list of items in three to match another list of length 'length' that contains a starred expression at 'star_index' in the following way: star_index = 2, length = 5 (i.e., [a,b,*,c,d]), items = [1,2,3,4,5,6,7] returns in: ([1,2], [3,4,5], [6,7]) """ nr_right_of_star = length - star_index - 1 right_index = -nr_right_of_star if nr_right_of_star != 0 else len(items) left = items[:star_index] star = items[star_index:right_index] right = items[right_index:] return left, star, right def type_is_iterable(self, type: Type) -> bool: type = get_proper_type(type) if isinstance(type, CallableType) and type.is_type_obj(): type = type.fallback return is_subtype(type, self.named_generic_type('typing.Iterable', [AnyType(TypeOfAny.special_form)])) def check_multi_assignment_from_iterable(self, lvalues: List[Lvalue], rvalue_type: Type, context: Context, infer_lvalue_type: bool = True) -> None: rvalue_type = get_proper_type(rvalue_type) if self.type_is_iterable(rvalue_type) and isinstance(rvalue_type, Instance): item_type = self.iterable_item_type(rvalue_type) for lv in lvalues: if isinstance(lv, StarExpr): items_type = self.named_generic_type('builtins.list', [item_type]) self.check_assignment(lv.expr, self.temp_node(items_type, context), infer_lvalue_type) else: self.check_assignment(lv, self.temp_node(item_type, context), infer_lvalue_type) else: self.msg.type_not_iterable(rvalue_type, context) def check_lvalue(self, lvalue: Lvalue) -> Tuple[Optional[Type], Optional[IndexExpr], Optional[Var]]: lvalue_type = None index_lvalue = None inferred = None if self.is_definition(lvalue): if isinstance(lvalue, NameExpr): inferred = cast(Var, lvalue.node) assert isinstance(inferred, Var) else: assert isinstance(lvalue, MemberExpr) self.expr_checker.accept(lvalue.expr) inferred = lvalue.def_var elif isinstance(lvalue, IndexExpr): index_lvalue = lvalue elif isinstance(lvalue, MemberExpr): lvalue_type = self.expr_checker.analyze_ordinary_member_access(lvalue, True) self.store_type(lvalue, lvalue_type) elif isinstance(lvalue, NameExpr): lvalue_type = self.expr_checker.analyze_ref_expr(lvalue, lvalue=True) self.store_type(lvalue, lvalue_type) elif isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): types = [self.check_lvalue(sub_expr)[0] or # This type will be used as a context for further inference of rvalue, # we put Uninhabited if there is no information available from lvalue. UninhabitedType() for sub_expr in lvalue.items] lvalue_type = TupleType(types, self.named_type('builtins.tuple')) elif isinstance(lvalue, StarExpr): typ, _, _ = self.check_lvalue(lvalue.expr) lvalue_type = StarType(typ) if typ else None else: lvalue_type = self.expr_checker.accept(lvalue) return lvalue_type, index_lvalue, inferred def is_definition(self, s: Lvalue) -> bool: if isinstance(s, NameExpr): if s.is_inferred_def: return True # If the node type is not defined, this must the first assignment # that we process => this is a definition, even though the semantic # analyzer did not recognize this as such. This can arise in code # that uses isinstance checks, if type checking of the primary # definition is skipped due to an always False type check. node = s.node if isinstance(node, Var): return node.type is None elif isinstance(s, MemberExpr): return s.is_inferred_def return False def infer_variable_type(self, name: Var, lvalue: Lvalue, init_type: Type, context: Context) -> None: """Infer the type of initialized variables from initializer type.""" init_type = get_proper_type(init_type) if isinstance(init_type, DeletedType): self.msg.deleted_as_rvalue(init_type, context) elif not is_valid_inferred_type(init_type) and not self.no_partial_types: # We cannot use the type of the initialization expression for full type # inference (it's not specific enough), but we might be able to give # partial type which will be made more specific later. A partial type # gets generated in assignment like 'x = []' where item type is not known. if not self.infer_partial_type(name, lvalue, init_type): self.msg.need_annotation_for_var(name, context, self.options.python_version) self.set_inference_error_fallback_type(name, lvalue, init_type) elif (isinstance(lvalue, MemberExpr) and self.inferred_attribute_types is not None and lvalue.def_var and lvalue.def_var in self.inferred_attribute_types and not is_same_type(self.inferred_attribute_types[lvalue.def_var], init_type)): # Multiple, inconsistent types inferred for an attribute. self.msg.need_annotation_for_var(name, context, self.options.python_version) name.type = AnyType(TypeOfAny.from_error) else: # Infer type of the target. # Make the type more general (strip away function names etc.). init_type = strip_type(init_type) self.set_inferred_type(name, lvalue, init_type) def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool: init_type = get_proper_type(init_type) if isinstance(init_type, NoneType): partial_type = PartialType(None, name) elif isinstance(init_type, Instance): fullname = init_type.type.fullname if (isinstance(lvalue, (NameExpr, MemberExpr)) and (fullname == 'builtins.list' or fullname == 'builtins.set' or fullname == 'builtins.dict' or fullname == 'collections.OrderedDict') and all(isinstance(t, (NoneType, UninhabitedType)) for t in get_proper_types(init_type.args))): partial_type = PartialType(init_type.type, name) else: return False else: return False self.set_inferred_type(name, lvalue, partial_type) self.partial_types[-1].map[name] = lvalue return True def set_inferred_type(self, var: Var, lvalue: Lvalue, type: Type) -> None: """Store inferred variable type. Store the type to both the variable node and the expression node that refers to the variable (lvalue). If var is None, do nothing. """ if var and not self.current_node_deferred: var.type = type var.is_inferred = True if isinstance(lvalue, MemberExpr) and self.inferred_attribute_types is not None: # Store inferred attribute type so that we can check consistency afterwards. if lvalue.def_var is not None: self.inferred_attribute_types[lvalue.def_var] = type self.store_type(lvalue, type) def set_inference_error_fallback_type(self, var: Var, lvalue: Lvalue, type: Type) -> None: """Store best known type for variable if type inference failed. If a program ignores error on type inference error, the variable should get some inferred type so that if can used later on in the program. Example: x = [] # type: ignore x.append(1) # Should be ok! We implement this here by giving x a valid type (replacing inferred with Any). """ fallback = self.inference_error_fallback_type(type) self.set_inferred_type(var, lvalue, fallback) def inference_error_fallback_type(self, type: Type) -> Type: fallback = type.accept(SetNothingToAny()) # Type variables may leak from inference, see https://github.com/python/mypy/issues/5738, # we therefore need to erase them. return erase_typevars(fallback) def check_simple_assignment(self, lvalue_type: Optional[Type], rvalue: Expression, context: Context, msg: str = message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, lvalue_name: str = 'variable', rvalue_name: str = 'expression', *, code: Optional[ErrorCode] = None) -> Type: if self.is_stub and isinstance(rvalue, EllipsisExpr): # '...' is always a valid initializer in a stub. return AnyType(TypeOfAny.special_form) else: lvalue_type = get_proper_type(lvalue_type) always_allow_any = lvalue_type is not None and not isinstance(lvalue_type, AnyType) rvalue_type = self.expr_checker.accept(rvalue, lvalue_type, always_allow_any=always_allow_any) rvalue_type = get_proper_type(rvalue_type) if isinstance(rvalue_type, DeletedType): self.msg.deleted_as_rvalue(rvalue_type, context) if isinstance(lvalue_type, DeletedType): self.msg.deleted_as_lvalue(lvalue_type, context) elif lvalue_type: self.check_subtype(rvalue_type, lvalue_type, context, msg, '{} has type'.format(rvalue_name), '{} has type'.format(lvalue_name), code=code) return rvalue_type def check_member_assignment(self, instance_type: Type, attribute_type: Type, rvalue: Expression, context: Context) -> Tuple[Type, Type, bool]: """Type member assignment. This defers to check_simple_assignment, unless the member expression is a descriptor, in which case this checks descriptor semantics as well. Return the inferred rvalue_type, inferred lvalue_type, and whether to use the binder for this assignment. Note: this method exists here and not in checkmember.py, because we need to take care about interaction between binder and __set__(). """ instance_type = get_proper_type(instance_type) attribute_type = get_proper_type(attribute_type) # Descriptors don't participate in class-attribute access if ((isinstance(instance_type, FunctionLike) and instance_type.is_type_obj()) or isinstance(instance_type, TypeType)): rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context, code=codes.ASSIGNMENT) return rvalue_type, attribute_type, True if not isinstance(attribute_type, Instance): # TODO: support __set__() for union types. rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context, code=codes.ASSIGNMENT) return rvalue_type, attribute_type, True get_type = analyze_descriptor_access( instance_type, attribute_type, self.named_type, self.msg, context, chk=self) if not attribute_type.type.has_readable_member('__set__'): # If there is no __set__, we type-check that the assigned value matches # the return type of __get__. This doesn't match the python semantics, # (which allow you to override the descriptor with any value), but preserves # the type of accessing the attribute (even after the override). rvalue_type = self.check_simple_assignment(get_type, rvalue, context, code=codes.ASSIGNMENT) return rvalue_type, get_type, True dunder_set = attribute_type.type.get_method('__set__') if dunder_set is None: self.msg.fail(message_registry.DESCRIPTOR_SET_NOT_CALLABLE.format(attribute_type), context) return AnyType(TypeOfAny.from_error), get_type, False function = function_type(dunder_set, self.named_type('builtins.function')) bound_method = bind_self(function, attribute_type) typ = map_instance_to_supertype(attribute_type, dunder_set.info) dunder_set_type = expand_type_by_instance(bound_method, typ) # Here we just infer the type, the result should be type-checked like a normal assignment. # For this we use the rvalue as type context. self.msg.disable_errors() _, inferred_dunder_set_type = self.expr_checker.check_call( dunder_set_type, [TempNode(instance_type, context=context), rvalue], [nodes.ARG_POS, nodes.ARG_POS], context) self.msg.enable_errors() # And now we type check the call second time, to show errors related # to wrong arguments count, etc. self.expr_checker.check_call( dunder_set_type, [TempNode(instance_type, context=context), TempNode(AnyType(TypeOfAny.special_form), context=context)], [nodes.ARG_POS, nodes.ARG_POS], context) # should be handled by get_method above assert isinstance(inferred_dunder_set_type, CallableType) # type: ignore if len(inferred_dunder_set_type.arg_types) < 2: # A message already will have been recorded in check_call return AnyType(TypeOfAny.from_error), get_type, False set_type = inferred_dunder_set_type.arg_types[1] # Special case: if the rvalue_type is a subtype of both '__get__' and '__set__' types, # and '__get__' type is narrower than '__set__', then we invoke the binder to narrow type # by this assignment. Technically, this is not safe, but in practice this is # what a user expects. rvalue_type = self.check_simple_assignment(set_type, rvalue, context, code=codes.ASSIGNMENT) infer = is_subtype(rvalue_type, get_type) and is_subtype(get_type, set_type) return rvalue_type if infer else set_type, get_type, infer def check_indexed_assignment(self, lvalue: IndexExpr, rvalue: Expression, context: Context) -> None: """Type check indexed assignment base[index] = rvalue. The lvalue argument is the base[index] expression. """ self.try_infer_partial_type_from_indexed_assignment(lvalue, rvalue) basetype = get_proper_type(self.expr_checker.accept(lvalue.base)) if isinstance(basetype, TypedDictType): item_type = self.expr_checker.visit_typeddict_index_expr(basetype, lvalue.index) method_type = CallableType( arg_types=[self.named_type('builtins.str'), item_type], arg_kinds=[ARG_POS, ARG_POS], arg_names=[None, None], ret_type=NoneType(), fallback=self.named_type('builtins.function') ) # type: Type else: method_type = self.expr_checker.analyze_external_member_access( '__setitem__', basetype, context) lvalue.method_type = method_type self.expr_checker.check_method_call( '__setitem__', basetype, method_type, [lvalue.index, rvalue], [nodes.ARG_POS, nodes.ARG_POS], context) def try_infer_partial_type_from_indexed_assignment( self, lvalue: IndexExpr, rvalue: Expression) -> None: # TODO: Should we share some of this with try_infer_partial_type? var = None if isinstance(lvalue.base, RefExpr) and isinstance(lvalue.base.node, Var): var = lvalue.base.node elif isinstance(lvalue.base, MemberExpr): var = self.expr_checker.get_partial_self_var(lvalue.base) if isinstance(var, Var): if isinstance(var.type, PartialType): type_type = var.type.type if type_type is None: return # The partial type is None. partial_types = self.find_partial_types(var) if partial_types is None: return typename = type_type.fullname if typename == 'builtins.dict' or typename == 'collections.OrderedDict': # TODO: Don't infer things twice. key_type = self.expr_checker.accept(lvalue.index) value_type = self.expr_checker.accept(rvalue) if (is_valid_inferred_type(key_type) and is_valid_inferred_type(value_type)): if not self.current_node_deferred: var.type = self.named_generic_type(typename, [key_type, value_type]) del partial_types[var] def visit_expression_stmt(self, s: ExpressionStmt) -> None: self.expr_checker.accept(s.expr, allow_none_return=True, always_allow_any=True) def visit_return_stmt(self, s: ReturnStmt) -> None: """Type check a return statement.""" self.check_return_stmt(s) self.binder.unreachable() def check_return_stmt(self, s: ReturnStmt) -> None: defn = self.scope.top_function() if defn is not None: if defn.is_generator: return_type = self.get_generator_return_type(self.return_types[-1], defn.is_coroutine) elif defn.is_coroutine: return_type = self.get_coroutine_return_type(self.return_types[-1]) else: return_type = self.return_types[-1] return_type = get_proper_type(return_type) if isinstance(return_type, UninhabitedType): self.fail(message_registry.NO_RETURN_EXPECTED, s) return if s.expr: is_lambda = isinstance(self.scope.top_function(), LambdaExpr) declared_none_return = isinstance(return_type, NoneType) declared_any_return = isinstance(return_type, AnyType) # This controls whether or not we allow a function call that # returns None as the expression of this return statement. # E.g. `return f()` for some `f` that returns None. We allow # this only if we're in a lambda or in a function that returns # `None` or `Any`. allow_none_func_call = is_lambda or declared_none_return or declared_any_return # Return with a value. typ = get_proper_type(self.expr_checker.accept( s.expr, return_type, allow_none_return=allow_none_func_call)) if defn.is_async_generator: self.fail(message_registry.RETURN_IN_ASYNC_GENERATOR, s) return # Returning a value of type Any is always fine. if isinstance(typ, AnyType): # (Unless you asked to be warned in that case, and the # function is not declared to return Any) if (self.options.warn_return_any and not self.current_node_deferred and not is_proper_subtype(AnyType(TypeOfAny.special_form), return_type) and not (defn.name in BINARY_MAGIC_METHODS and is_literal_not_implemented(s.expr)) and not (isinstance(return_type, Instance) and return_type.type.fullname == 'builtins.object')): self.msg.incorrectly_returning_any(return_type, s) return # Disallow return expressions in functions declared to return # None, subject to two exceptions below. if declared_none_return: # Lambdas are allowed to have None returns. # Functions returning a value of type None are allowed to have a None return. if is_lambda or isinstance(typ, NoneType): return self.fail(message_registry.NO_RETURN_VALUE_EXPECTED, s, code=codes.RETURN_VALUE) else: self.check_subtype( subtype_label='got', subtype=typ, supertype_label='expected', supertype=return_type, context=s.expr, outer_context=s, msg=message_registry.INCOMPATIBLE_RETURN_VALUE_TYPE, code=codes.RETURN_VALUE) else: # Empty returns are valid in Generators with Any typed returns, but not in # coroutines. if (defn.is_generator and not defn.is_coroutine and isinstance(return_type, AnyType)): return if isinstance(return_type, (NoneType, AnyType)): return if self.in_checked_function(): self.fail(message_registry.RETURN_VALUE_EXPECTED, s, code=codes.RETURN_VALUE) def visit_if_stmt(self, s: IfStmt) -> None: """Type check an if statement.""" # This frame records the knowledge from previous if/elif clauses not being taken. # Fall-through to the original frame is handled explicitly in each block. with self.binder.frame_context(can_skip=False, fall_through=0): for e, b in zip(s.expr, s.body): t = get_proper_type(self.expr_checker.accept(e)) if isinstance(t, DeletedType): self.msg.deleted_as_rvalue(t, s) if_map, else_map = self.find_isinstance_check(e) # XXX Issue a warning if condition is always False? with self.binder.frame_context(can_skip=True, fall_through=2): self.push_type_map(if_map) self.accept(b) # XXX Issue a warning if condition is always True? self.push_type_map(else_map) with self.binder.frame_context(can_skip=False, fall_through=2): if s.else_body: self.accept(s.else_body) def visit_while_stmt(self, s: WhileStmt) -> None: """Type check a while statement.""" if_stmt = IfStmt([s.expr], [s.body], None) if_stmt.set_line(s.get_line(), s.get_column()) self.accept_loop(if_stmt, s.else_body, exit_condition=s.expr) def visit_operator_assignment_stmt(self, s: OperatorAssignmentStmt) -> None: """Type check an operator assignment statement, e.g. x += 1.""" if isinstance(s.lvalue, MemberExpr): # Special case, some additional errors may be given for # assignments to read-only or final attributes. lvalue_type = self.expr_checker.visit_member_expr(s.lvalue, True) else: lvalue_type = self.expr_checker.accept(s.lvalue) inplace, method = infer_operator_assignment_method(lvalue_type, s.op) if inplace: # There is __ifoo__, treat as x = x.__ifoo__(y) rvalue_type, method_type = self.expr_checker.check_op( method, lvalue_type, s.rvalue, s) if not is_subtype(rvalue_type, lvalue_type): self.msg.incompatible_operator_assignment(s.op, s) else: # There is no __ifoo__, treat as x = x y expr = OpExpr(s.op, s.lvalue, s.rvalue) expr.set_line(s) self.check_assignment(lvalue=s.lvalue, rvalue=expr, infer_lvalue_type=True, new_syntax=False) self.check_final(s) def visit_assert_stmt(self, s: AssertStmt) -> None: self.expr_checker.accept(s.expr) if isinstance(s.expr, TupleExpr) and len(s.expr.items) > 0: self.fail(message_registry.MALFORMED_ASSERT, s) # If this is asserting some isinstance check, bind that type in the following code true_map, else_map = self.find_isinstance_check(s.expr) if s.msg is not None: self.expr_checker.analyze_cond_branch(else_map, s.msg, None) self.push_type_map(true_map) def visit_raise_stmt(self, s: RaiseStmt) -> None: """Type check a raise statement.""" if s.expr: self.type_check_raise(s.expr, s) if s.from_expr: self.type_check_raise(s.from_expr, s, True) self.binder.unreachable() def type_check_raise(self, e: Expression, s: RaiseStmt, optional: bool = False) -> None: typ = get_proper_type(self.expr_checker.accept(e)) exc_type = self.named_type('builtins.BaseException') expected_type = UnionType([exc_type, TypeType(exc_type)]) if optional: expected_type.items.append(NoneType()) if self.options.python_version[0] == 2: # allow `raise type, value, traceback` # https://docs.python.org/2/reference/simple_stmts.html#the-raise-statement # TODO: Also check tuple item types. any_type = AnyType(TypeOfAny.implementation_artifact) tuple_type = self.named_type('builtins.tuple') expected_type.items.append(TupleType([any_type, any_type], tuple_type)) expected_type.items.append(TupleType([any_type, any_type, any_type], tuple_type)) self.check_subtype(typ, expected_type, s, message_registry.INVALID_EXCEPTION) def visit_try_stmt(self, s: TryStmt) -> None: """Type check a try statement.""" # Our enclosing frame will get the result if the try/except falls through. # This one gets all possible states after the try block exited abnormally # (by exception, return, break, etc.) with self.binder.frame_context(can_skip=False, fall_through=0): # Not only might the body of the try statement exit # abnormally, but so might an exception handler or else # clause. The finally clause runs in *all* cases, so we # need an outer try frame to catch all intermediate states # in case an exception is raised during an except or else # clause. As an optimization, only create the outer try # frame when there actually is a finally clause. self.visit_try_without_finally(s, try_frame=bool(s.finally_body)) if s.finally_body: # First we check finally_body is type safe on all abnormal exit paths self.accept(s.finally_body) if s.finally_body: # Then we try again for the more restricted set of options # that can fall through. (Why do we need to check the # finally clause twice? Depending on whether the finally # clause was reached by the try clause falling off the end # or exiting abnormally, after completing the finally clause # either flow will continue to after the entire try statement # or the exception/return/etc. will be processed and control # flow will escape. We need to check that the finally clause # type checks in both contexts, but only the resulting types # from the latter context affect the type state in the code # that follows the try statement.) if not self.binder.is_unreachable(): self.accept(s.finally_body) def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: """Type check a try statement, ignoring the finally block. On entry, the top frame should receive all flow that exits the try block abnormally (i.e., such that the else block does not execute), and its parent should receive all flow that exits the try block normally. """ # This frame will run the else block if the try fell through. # In that case, control flow continues to the parent of what # was the top frame on entry. with self.binder.frame_context(can_skip=False, fall_through=2, try_frame=try_frame): # This frame receives exit via exception, and runs exception handlers with self.binder.frame_context(can_skip=False, fall_through=2): # Finally, the body of the try statement with self.binder.frame_context(can_skip=False, fall_through=2, try_frame=True): self.accept(s.body) for i in range(len(s.handlers)): with self.binder.frame_context(can_skip=True, fall_through=4): typ = s.types[i] if typ: t = self.check_except_handler_test(typ) var = s.vars[i] if var: # To support local variables, we make this a definition line, # causing assignment to set the variable's type. var.is_inferred_def = True # We also temporarily set current_node_deferred to False to # make sure the inference happens. # TODO: Use a better solution, e.g. a # separate Var for each except block. am_deferring = self.current_node_deferred self.current_node_deferred = False self.check_assignment(var, self.temp_node(t, var)) self.current_node_deferred = am_deferring self.accept(s.handlers[i]) var = s.vars[i] if var: # Exception variables are deleted in python 3 but not python 2. # But, since it's bad form in python 2 and the type checking # wouldn't work very well, we delete it anyway. # Unfortunately, this doesn't let us detect usage before the # try/except block. if self.options.python_version[0] >= 3: source = var.name else: source = ('(exception variable "{}", which we do not ' 'accept outside except: blocks even in ' 'python 2)'.format(var.name)) cast(Var, var.node).type = DeletedType(source=source) self.binder.cleanse(var) if s.else_body: self.accept(s.else_body) def check_except_handler_test(self, n: Expression) -> Type: """Type check an exception handler test clause.""" typ = self.expr_checker.accept(n) all_types = [] # type: List[Type] test_types = self.get_types_from_except_handler(typ, n) for ttype in get_proper_types(test_types): if isinstance(ttype, AnyType): all_types.append(ttype) continue if isinstance(ttype, FunctionLike): item = ttype.items()[0] if not item.is_type_obj(): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) return AnyType(TypeOfAny.from_error) exc_type = item.ret_type elif isinstance(ttype, TypeType): exc_type = ttype.item else: self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) return AnyType(TypeOfAny.from_error) if not is_subtype(exc_type, self.named_type('builtins.BaseException')): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) return AnyType(TypeOfAny.from_error) all_types.append(exc_type) return make_simplified_union(all_types) def get_types_from_except_handler(self, typ: Type, n: Expression) -> List[Type]: """Helper for check_except_handler_test to retrieve handler types.""" typ = get_proper_type(typ) if isinstance(typ, TupleType): return typ.items elif isinstance(typ, UnionType): return [ union_typ for item in typ.relevant_items() for union_typ in self.get_types_from_except_handler(item, n) ] elif isinstance(typ, Instance) and is_named_instance(typ, 'builtins.tuple'): # variadic tuple return [typ.args[0]] else: return [typ] def visit_for_stmt(self, s: ForStmt) -> None: """Type check a for statement.""" if s.is_async: iterator_type, item_type = self.analyze_async_iterable_item_type(s.expr) else: iterator_type, item_type = self.analyze_iterable_item_type(s.expr) s.inferred_item_type = item_type s.inferred_iterator_type = iterator_type self.analyze_index_variables(s.index, item_type, s.index_type is None, s) self.accept_loop(s.body, s.else_body) def analyze_async_iterable_item_type(self, expr: Expression) -> Tuple[Type, Type]: """Analyse async iterable expression and return iterator and iterator item types.""" echk = self.expr_checker iterable = echk.accept(expr) iterator = echk.check_method_call_by_name('__aiter__', iterable, [], [], expr)[0] awaitable = echk.check_method_call_by_name('__anext__', iterator, [], [], expr)[0] item_type = echk.check_awaitable_expr(awaitable, expr, message_registry.INCOMPATIBLE_TYPES_IN_ASYNC_FOR) return iterator, item_type def analyze_iterable_item_type(self, expr: Expression) -> Tuple[Type, Type]: """Analyse iterable expression and return iterator and iterator item types.""" echk = self.expr_checker iterable = get_proper_type(echk.accept(expr)) iterator = echk.check_method_call_by_name('__iter__', iterable, [], [], expr)[0] if isinstance(iterable, TupleType): joined = UninhabitedType() # type: Type for item in iterable.items: joined = join_types(joined, item) return iterator, joined else: # Non-tuple iterable. if self.options.python_version[0] >= 3: nextmethod = '__next__' else: nextmethod = 'next' return iterator, echk.check_method_call_by_name(nextmethod, iterator, [], [], expr)[0] def analyze_container_item_type(self, typ: Type) -> Optional[Type]: """Check if a type is a nominal container of a union of such. Return the corresponding container item type. """ typ = get_proper_type(typ) if isinstance(typ, UnionType): types = [] # type: List[Type] for item in typ.items: c_type = self.analyze_container_item_type(item) if c_type: types.append(c_type) return UnionType.make_union(types) if isinstance(typ, Instance) and typ.type.has_base('typing.Container'): supertype = self.named_type('typing.Container').type super_instance = map_instance_to_supertype(typ, supertype) assert len(super_instance.args) == 1 return super_instance.args[0] return None def analyze_index_variables(self, index: Expression, item_type: Type, infer_lvalue_type: bool, context: Context) -> None: """Type check or infer for loop or list comprehension index vars.""" self.check_assignment(index, self.temp_node(item_type, context), infer_lvalue_type) def visit_del_stmt(self, s: DelStmt) -> None: if isinstance(s.expr, IndexExpr): e = s.expr m = MemberExpr(e.base, '__delitem__') m.line = s.line m.column = s.column c = CallExpr(m, [e.index], [nodes.ARG_POS], [None]) c.line = s.line c.column = s.column self.expr_checker.accept(c, allow_none_return=True) else: s.expr.accept(self.expr_checker) for elt in flatten(s.expr): if isinstance(elt, NameExpr): self.binder.assign_type(elt, DeletedType(source=elt.name), get_declaration(elt), False) def visit_decorator(self, e: Decorator) -> None: for d in e.decorators: if isinstance(d, RefExpr): if d.fullname == 'typing.no_type_check': e.var.type = AnyType(TypeOfAny.special_form) e.var.is_ready = True return if self.recurse_into_functions: with self.tscope.function_scope(e.func): self.check_func_item(e.func, name=e.func.name) # Process decorators from the inside out to determine decorated signature, which # may be different from the declared signature. sig = self.function_type(e.func) # type: Type for d in reversed(e.decorators): if refers_to_fullname(d, 'typing.overload'): self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, e) continue dec = self.expr_checker.accept(d) temp = self.temp_node(sig, context=e) fullname = None if isinstance(d, RefExpr): fullname = d.fullname self.check_for_untyped_decorator(e.func, dec, d) sig, t2 = self.expr_checker.check_call(dec, [temp], [nodes.ARG_POS], e, callable_name=fullname) self.check_untyped_after_decorator(sig, e.func) sig = set_callable_name(sig, e.func) e.var.type = sig e.var.is_ready = True if e.func.is_property: self.check_incompatible_property_override(e) if e.func.info and not e.func.is_dynamic(): self.check_method_override(e) if e.func.info and e.func.name in ('__init__', '__new__'): if e.type and not isinstance(get_proper_type(e.type), (FunctionLike, AnyType)): self.fail(message_registry.BAD_CONSTRUCTOR_TYPE, e) def check_for_untyped_decorator(self, func: FuncDef, dec_type: Type, dec_expr: Expression) -> None: if (self.options.disallow_untyped_decorators and is_typed_callable(func.type) and is_untyped_decorator(dec_type)): self.msg.typed_function_untyped_decorator(func.name, dec_expr) def check_incompatible_property_override(self, e: Decorator) -> None: if not e.var.is_settable_property and e.func.info: name = e.func.name for base in e.func.info.mro[1:]: base_attr = base.names.get(name) if not base_attr: continue if (isinstance(base_attr.node, OverloadedFuncDef) and base_attr.node.is_property and cast(Decorator, base_attr.node.items[0]).var.is_settable_property): self.fail(message_registry.READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE, e) def visit_with_stmt(self, s: WithStmt) -> None: exceptions_maybe_suppressed = False for expr, target in zip(s.expr, s.target): if s.is_async: exit_ret_type = self.check_async_with_item(expr, target, s.unanalyzed_type is None) else: exit_ret_type = self.check_with_item(expr, target, s.unanalyzed_type is None) # Based on the return type, determine if this context manager 'swallows' # exceptions or not. We determine this using a heuristic based on the # return type of the __exit__ method -- see the discussion in # https://github.com/python/mypy/issues/7214 and the section about context managers # in https://github.com/python/typeshed/blob/master/CONTRIBUTING.md#conventions # for more details. exit_ret_type = get_proper_type(exit_ret_type) if is_literal_type(exit_ret_type, "builtins.bool", False): continue if (is_literal_type(exit_ret_type, "builtins.bool", True) or (isinstance(exit_ret_type, Instance) and exit_ret_type.type.fullname == 'builtins.bool' and state.strict_optional)): # Note: if strict-optional is disabled, this bool instance # could actually be an Optional[bool]. exceptions_maybe_suppressed = True if exceptions_maybe_suppressed: # Treat this 'with' block in the same way we'd treat a 'try: BODY; except: pass' # block. This means control flow can continue after the 'with' even if the 'with' # block immediately returns. with self.binder.frame_context(can_skip=True, try_frame=True): self.accept(s.body) else: self.accept(s.body) def check_untyped_after_decorator(self, typ: Type, func: FuncDef) -> None: if not self.options.disallow_any_decorated or self.is_stub: return if mypy.checkexpr.has_any_type(typ): self.msg.untyped_decorated_function(typ, func) def check_async_with_item(self, expr: Expression, target: Optional[Expression], infer_lvalue_type: bool) -> Type: echk = self.expr_checker ctx = echk.accept(expr) obj = echk.check_method_call_by_name('__aenter__', ctx, [], [], expr)[0] obj = echk.check_awaitable_expr( obj, expr, message_registry.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER) if target: self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type) arg = self.temp_node(AnyType(TypeOfAny.special_form), expr) res, _ = echk.check_method_call_by_name( '__aexit__', ctx, [arg] * 3, [nodes.ARG_POS] * 3, expr) return echk.check_awaitable_expr( res, expr, message_registry.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT) def check_with_item(self, expr: Expression, target: Optional[Expression], infer_lvalue_type: bool) -> Type: echk = self.expr_checker ctx = echk.accept(expr) obj = echk.check_method_call_by_name('__enter__', ctx, [], [], expr)[0] if target: self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type) arg = self.temp_node(AnyType(TypeOfAny.special_form), expr) res, _ = echk.check_method_call_by_name( '__exit__', ctx, [arg] * 3, [nodes.ARG_POS] * 3, expr) return res def visit_print_stmt(self, s: PrintStmt) -> None: for arg in s.args: self.expr_checker.accept(arg) if s.target: target_type = get_proper_type(self.expr_checker.accept(s.target)) if not isinstance(target_type, NoneType): # TODO: Also verify the type of 'write'. self.expr_checker.analyze_external_member_access('write', target_type, s.target) def visit_break_stmt(self, s: BreakStmt) -> None: self.binder.handle_break() def visit_continue_stmt(self, s: ContinueStmt) -> None: self.binder.handle_continue() return None def intersect_instance_callable(self, typ: Instance, callable_type: CallableType) -> Instance: """Creates a fake type that represents the intersection of an Instance and a CallableType. It operates by creating a bare-minimum dummy TypeInfo that subclasses type and adds a __call__ method matching callable_type. """ # In order for this to work in incremental mode, the type we generate needs to # have a valid fullname and a corresponding entry in a symbol table. We generate # a unique name inside the symbol table of the current module. cur_module = cast(MypyFile, self.scope.stack[0]) gen_name = gen_unique_name("".format(typ.type.name), cur_module.names) # Build the fake ClassDef and TypeInfo together. # The ClassDef is full of lies and doesn't actually contain a body. # Use format_bare to generate a nice name for error messages. # We skip fully filling out a handful of TypeInfo fields because they # should be irrelevant for a generated type like this: # is_protocol, protocol_members, is_abstract short_name = format_type_bare(typ) cdef = ClassDef(short_name, Block([])) cdef.fullname = cur_module.fullname + '.' + gen_name info = TypeInfo(SymbolTable(), cdef, cur_module.fullname) cdef.info = info info.bases = [typ] calculate_mro(info) info.calculate_metaclass_type() # Build up a fake FuncDef so we can populate the symbol table. func_def = FuncDef('__call__', [], Block([]), callable_type) func_def._fullname = cdef.fullname + '.__call__' func_def.info = info info.names['__call__'] = SymbolTableNode(MDEF, func_def) cur_module.names[gen_name] = SymbolTableNode(GDEF, info) return Instance(info, []) def make_fake_callable(self, typ: Instance) -> Instance: """Produce a new type that makes type Callable with a generic callable type.""" fallback = self.named_type('builtins.function') callable_type = CallableType([AnyType(TypeOfAny.explicit), AnyType(TypeOfAny.explicit)], [nodes.ARG_STAR, nodes.ARG_STAR2], [None, None], ret_type=AnyType(TypeOfAny.explicit), fallback=fallback, is_ellipsis_args=True) return self.intersect_instance_callable(typ, callable_type) def partition_by_callable(self, typ: Type, unsound_partition: bool) -> Tuple[List[Type], List[Type]]: """Partitions a type into callable subtypes and uncallable subtypes. Thus, given: `callables, uncallables = partition_by_callable(type)` If we assert `callable(type)` then `type` has type Union[*callables], and If we assert `not callable(type)` then `type` has type Union[*uncallables] If unsound_partition is set, assume that anything that is not clearly callable is in fact not callable. Otherwise we generate a new subtype that *is* callable. Guaranteed to not return [], []. """ typ = get_proper_type(typ) if isinstance(typ, FunctionLike) or isinstance(typ, TypeType): return [typ], [] if isinstance(typ, AnyType): return [typ], [typ] if isinstance(typ, UnionType): callables = [] uncallables = [] for subtype in typ.relevant_items(): # Use unsound_partition when handling unions in order to # allow the expected type discrimination. subcallables, subuncallables = self.partition_by_callable(subtype, unsound_partition=True) callables.extend(subcallables) uncallables.extend(subuncallables) return callables, uncallables if isinstance(typ, TypeVarType): # We could do better probably? # Refine the the type variable's bound as our type in the case that # callable() is true. This unfortunately loses the information that # the type is a type variable in that branch. # This matches what is done for isinstance, but it may be possible to # do better. # If it is possible for the false branch to execute, return the original # type to avoid losing type information. callables, uncallables = self.partition_by_callable(erase_to_union_or_bound(typ), unsound_partition) uncallables = [typ] if len(uncallables) else [] return callables, uncallables # A TupleType is callable if its fallback is, but needs special handling # when we dummy up a new type. ityp = typ if isinstance(typ, TupleType): ityp = tuple_fallback(typ) if isinstance(ityp, Instance): method = ityp.type.get_method('__call__') if method and method.type: callables, uncallables = self.partition_by_callable(method.type, unsound_partition=False) if len(callables) and not len(uncallables): # Only consider the type callable if its __call__ method is # definitely callable. return [typ], [] if not unsound_partition: fake = self.make_fake_callable(ityp) if isinstance(typ, TupleType): fake.type.tuple_type = TupleType(typ.items, fake) return [fake.type.tuple_type], [typ] return [fake], [typ] if unsound_partition: return [], [typ] else: # We don't know how properly make the type callable. return [typ], [typ] def conditional_callable_type_map(self, expr: Expression, current_type: Optional[Type], ) -> Tuple[TypeMap, TypeMap]: """Takes in an expression and the current type of the expression. Returns a 2-tuple: The first element is a map from the expression to the restricted type if it were callable. The second element is a map from the expression to the type it would hold if it weren't callable. """ if not current_type: return {}, {} if isinstance(get_proper_type(current_type), AnyType): return {}, {} callables, uncallables = self.partition_by_callable(current_type, unsound_partition=False) if len(callables) and len(uncallables): callable_map = {expr: UnionType.make_union(callables)} if len(callables) else None uncallable_map = { expr: UnionType.make_union(uncallables)} if len(uncallables) else None return callable_map, uncallable_map elif len(callables): return {}, None return None, {} def find_isinstance_check(self, node: Expression ) -> Tuple[TypeMap, TypeMap]: """Find any isinstance checks (within a chain of ands). Includes implicit and explicit checks for None and calls to callable. Return value is a map of variables to their types if the condition is true and a map of variables to their types if the condition is false. If either of the values in the tuple is None, then that particular branch can never occur. Guaranteed to not return None, None. (But may return {}, {}) """ if_map, else_map = self.find_isinstance_check_helper(node) new_if_map = self.propagate_up_typemap_info(self.type_map, if_map) new_else_map = self.propagate_up_typemap_info(self.type_map, else_map) return new_if_map, new_else_map def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeMap]: type_map = self.type_map if is_true_literal(node): return {}, None elif is_false_literal(node): return None, {} elif isinstance(node, CallExpr): if refers_to_fullname(node.callee, 'builtins.isinstance'): if len(node.args) != 2: # the error will be reported elsewhere return {}, {} expr = node.args[0] if literal(expr) == LITERAL_TYPE: vartype = type_map[expr] type = get_isinstance_type(node.args[1], type_map) return conditional_type_map(expr, vartype, type) elif refers_to_fullname(node.callee, 'builtins.issubclass'): if len(node.args) != 2: # the error will be reported elsewhere return {}, {} expr = node.args[0] if literal(expr) == LITERAL_TYPE: return self.infer_issubclass_maps(node, expr, type_map) elif refers_to_fullname(node.callee, 'builtins.callable'): if len(node.args) != 1: # the error will be reported elsewhere return {}, {} expr = node.args[0] if literal(expr) == LITERAL_TYPE: vartype = type_map[expr] return self.conditional_callable_type_map(expr, vartype) elif isinstance(node, ComparisonExpr): operand_types = [coerce_to_literal(type_map[expr]) for expr in node.operands if expr in type_map] is_not = node.operators == ['is not'] if (is_not or node.operators == ['is']) and len(operand_types) == len(node.operands): if_vars = {} # type: TypeMap else_vars = {} # type: TypeMap for i, expr in enumerate(node.operands): var_type = operand_types[i] other_type = operand_types[1 - i] if literal(expr) == LITERAL_TYPE and is_singleton_type(other_type): # This should only be true at most once: there should be # exactly two elements in node.operands and if the 'other type' is # a singleton type, it by definition does not need to be narrowed: # it already has the most precise type possible so does not need to # be narrowed/included in the output map. # # TODO: Generalize this to handle the case where 'other_type' is # a union of singleton types. if isinstance(other_type, LiteralType) and other_type.is_enum_literal(): fallback_name = other_type.fallback.type.fullname var_type = try_expanding_enum_to_union(var_type, fallback_name) target_type = [TypeRange(other_type, is_upper_bound=False)] if_vars, else_vars = conditional_type_map(expr, var_type, target_type) break if is_not: if_vars, else_vars = else_vars, if_vars return if_vars, else_vars # Check for `x == y` where x is of type Optional[T] and y is of type T # or a type that overlaps with T (or vice versa). elif node.operators == ['==']: first_type = type_map[node.operands[0]] second_type = type_map[node.operands[1]] if is_optional(first_type) != is_optional(second_type): if is_optional(first_type): optional_type, comp_type = first_type, second_type optional_expr = node.operands[0] else: optional_type, comp_type = second_type, first_type optional_expr = node.operands[1] if is_overlapping_erased_types(optional_type, comp_type): return {optional_expr: remove_optional(optional_type)}, {} elif node.operators in [['in'], ['not in']]: expr = node.operands[0] left_type = type_map[expr] right_type = get_proper_type(builtin_item_type(type_map[node.operands[1]])) right_ok = right_type and (not is_optional(right_type) and (not isinstance(right_type, Instance) or right_type.type.fullname != 'builtins.object')) if (right_type and right_ok and is_optional(left_type) and literal(expr) == LITERAL_TYPE and not is_literal_none(expr) and is_overlapping_erased_types(left_type, right_type)): if node.operators == ['in']: return {expr: remove_optional(left_type)}, {} if node.operators == ['not in']: return {}, {expr: remove_optional(left_type)} elif isinstance(node, RefExpr): # Restrict the type of the variable to True-ish/False-ish in the if and else branches # respectively vartype = type_map[node] if_type = true_only(vartype) # type: Type else_type = false_only(vartype) # type: Type ref = node # type: Expression if_map = ({ref: if_type} if not isinstance(get_proper_type(if_type), UninhabitedType) else None) else_map = ({ref: else_type} if not isinstance(get_proper_type(else_type), UninhabitedType) else None) return if_map, else_map elif isinstance(node, OpExpr) and node.op == 'and': left_if_vars, left_else_vars = self.find_isinstance_check_helper(node.left) right_if_vars, right_else_vars = self.find_isinstance_check_helper(node.right) # (e1 and e2) is true if both e1 and e2 are true, # and false if at least one of e1 and e2 is false. return (and_conditional_maps(left_if_vars, right_if_vars), or_conditional_maps(left_else_vars, right_else_vars)) elif isinstance(node, OpExpr) and node.op == 'or': left_if_vars, left_else_vars = self.find_isinstance_check_helper(node.left) right_if_vars, right_else_vars = self.find_isinstance_check_helper(node.right) # (e1 or e2) is true if at least one of e1 or e2 is true, # and false if both e1 and e2 are false. return (or_conditional_maps(left_if_vars, right_if_vars), and_conditional_maps(left_else_vars, right_else_vars)) elif isinstance(node, UnaryExpr) and node.op == 'not': left, right = self.find_isinstance_check_helper(node.expr) return right, left # Not a supported isinstance check return {}, {} def propagate_up_typemap_info(self, existing_types: Mapping[Expression, Type], new_types: TypeMap) -> TypeMap: """Attempts refining parent expressions of any MemberExpr or IndexExprs in new_types. Specifically, this function accepts two mappings of expression to original types: the original mapping (existing_types), and a new mapping (new_types) intended to update the original. This function iterates through new_types and attempts to use the information to try refining any parent types that happen to be unions. For example, suppose there are two types "A = Tuple[int, int]" and "B = Tuple[str, str]". Next, suppose that 'new_types' specifies the expression 'foo[0]' has a refined type of 'int' and that 'foo' was previously deduced to be of type Union[A, B]. Then, this function will observe that since A[0] is an int and B[0] is not, the type of 'foo' can be further refined from Union[A, B] into just B. We perform this kind of "parent narrowing" for member lookup expressions and indexing expressions into tuples, namedtuples, and typeddicts. We repeat this narrowing recursively if the parent is also a "lookup expression". So for example, if we have the expression "foo['bar'].baz[0]", we'd potentially end up refining types for the expressions "foo", "foo['bar']", and "foo['bar'].baz". We return the newly refined map. This map is guaranteed to be a superset of 'new_types'. """ if new_types is None: return None output_map = {} for expr, expr_type in new_types.items(): # The original inferred type should always be present in the output map, of course output_map[expr] = expr_type # Next, try using this information to refine the parent types, if applicable. new_mapping = self.refine_parent_types(existing_types, expr, expr_type) for parent_expr, proposed_parent_type in new_mapping.items(): # We don't try inferring anything if we've already inferred something for # the parent expression. # TODO: Consider picking the narrower type instead of always discarding this? if parent_expr in new_types: continue output_map[parent_expr] = proposed_parent_type return output_map def refine_parent_types(self, existing_types: Mapping[Expression, Type], expr: Expression, expr_type: Type) -> Mapping[Expression, Type]: """Checks if the given expr is a 'lookup operation' into a union and iteratively refines the parent types based on the 'expr_type'. For example, if 'expr' is an expression like 'a.b.c.d', we'll potentially return refined types for expressions 'a', 'a.b', and 'a.b.c'. For more details about what a 'lookup operation' is and how we use the expr_type to refine the parent types of lookup_expr, see the docstring in 'propagate_up_typemap_info'. """ output = {} # type: Dict[Expression, Type] # Note: parent_expr and parent_type are progressively refined as we crawl up the # parent lookup chain. while True: # First, check if this expression is one that's attempting to # "lookup" some key in the parent type. If so, save the parent type # and create function that will try replaying the same lookup # operation against arbitrary types. if isinstance(expr, MemberExpr): parent_expr = expr.expr parent_type = existing_types.get(parent_expr) member_name = expr.name def replay_lookup(new_parent_type: ProperType) -> Optional[Type]: msg_copy = self.msg.clean_copy() msg_copy.disable_count = 0 member_type = analyze_member_access( name=member_name, typ=new_parent_type, context=parent_expr, is_lvalue=False, is_super=False, is_operator=False, msg=msg_copy, original_type=new_parent_type, chk=self, in_literal_context=False, ) if msg_copy.is_errors(): return None else: return member_type elif isinstance(expr, IndexExpr): parent_expr = expr.base parent_type = existing_types.get(parent_expr) index_type = existing_types.get(expr.index) if index_type is None: return output str_literals = try_getting_str_literals_from_type(index_type) if str_literals is not None: # Refactoring these two indexing replay functions is surprisingly # tricky -- see https://github.com/python/mypy/pull/7917, which # was blocked by https://github.com/mypyc/mypyc/issues/586 def replay_lookup(new_parent_type: ProperType) -> Optional[Type]: if not isinstance(new_parent_type, TypedDictType): return None try: assert str_literals is not None member_types = [new_parent_type.items[key] for key in str_literals] except KeyError: return None return make_simplified_union(member_types) else: int_literals = try_getting_int_literals_from_type(index_type) if int_literals is not None: def replay_lookup(new_parent_type: ProperType) -> Optional[Type]: if not isinstance(new_parent_type, TupleType): return None try: assert int_literals is not None member_types = [new_parent_type.items[key] for key in int_literals] except IndexError: return None return make_simplified_union(member_types) else: return output else: return output # If we somehow didn't previously derive the parent type, abort completely # with what we have so far: something went wrong at an earlier stage. if parent_type is None: return output # We currently only try refining the parent type if it's a Union. # If not, there's no point in trying to refine any further parents # since we have no further information we can use to refine the lookup # chain, so we end early as an optimization. parent_type = get_proper_type(parent_type) if not isinstance(parent_type, UnionType): return output # Take each element in the parent union and replay the original lookup procedure # to figure out which parents are compatible. new_parent_types = [] for item in parent_type.items: item = get_proper_type(item) member_type = replay_lookup(item) if member_type is None: # We were unable to obtain the member type. So, we give up on refining this # parent type entirely and abort. return output if is_overlapping_types(member_type, expr_type): new_parent_types.append(item) # If none of the parent types overlap (if we derived an empty union), something # went wrong. We should never hit this case, but deriving the uninhabited type or # reporting an error both seem unhelpful. So we abort. if not new_parent_types: return output expr = parent_expr expr_type = output[parent_expr] = make_simplified_union(new_parent_types) return output # # Helpers # def check_subtype(self, subtype: Type, supertype: Type, context: Context, msg: str = message_registry.INCOMPATIBLE_TYPES, subtype_label: Optional[str] = None, supertype_label: Optional[str] = None, *, code: Optional[ErrorCode] = None, outer_context: Optional[Context] = None) -> bool: """Generate an error if the subtype is not compatible with supertype.""" if is_subtype(subtype, supertype): return True subtype = get_proper_type(subtype) supertype = get_proper_type(supertype) if self.msg.try_report_long_tuple_assignment_error(subtype, supertype, context, msg, subtype_label, supertype_label, code=code): return False if self.should_suppress_optional_error([subtype]): return False extra_info = [] # type: List[str] note_msg = '' notes = [] # type: List[str] if subtype_label is not None or supertype_label is not None: subtype_str, supertype_str = format_type_distinctly(subtype, supertype) if subtype_label is not None: extra_info.append(subtype_label + ' ' + subtype_str) if supertype_label is not None: extra_info.append(supertype_label + ' ' + supertype_str) note_msg = make_inferred_type_note(outer_context or context, subtype, supertype, supertype_str) if isinstance(subtype, Instance) and isinstance(supertype, Instance): notes = append_invariance_notes([], subtype, supertype) if extra_info: msg += ' (' + ', '.join(extra_info) + ')' self.fail(msg, context, code=code) for note in notes: self.msg.note(note, context, code=code) if note_msg: self.note(note_msg, context, code=code) if (isinstance(supertype, Instance) and supertype.type.is_protocol and isinstance(subtype, (Instance, TupleType, TypedDictType))): self.msg.report_protocol_problems(subtype, supertype, context, code=code) if isinstance(supertype, CallableType) and isinstance(subtype, Instance): call = find_member('__call__', subtype, subtype, is_operator=True) if call: self.msg.note_call(subtype, call, context, code=code) if isinstance(subtype, (CallableType, Overloaded)) and isinstance(supertype, Instance): if supertype.type.is_protocol and supertype.type.protocol_members == ['__call__']: call = find_member('__call__', supertype, subtype, is_operator=True) assert call is not None self.msg.note_call(supertype, call, context, code=code) return False def contains_none(self, t: Type) -> bool: t = get_proper_type(t) return ( isinstance(t, NoneType) or (isinstance(t, UnionType) and any(self.contains_none(ut) for ut in t.items)) or (isinstance(t, TupleType) and any(self.contains_none(tt) for tt in t.items)) or (isinstance(t, Instance) and bool(t.args) and any(self.contains_none(it) for it in t.args)) ) def should_suppress_optional_error(self, related_types: List[Type]) -> bool: return self.suppress_none_errors and any(self.contains_none(t) for t in related_types) def named_type(self, name: str) -> Instance: """Return an instance type with given name and implicit Any type args. For example, named_type('builtins.object') produces the 'object' type. """ # Assume that the name refers to a type. sym = self.lookup_qualified(name) node = sym.node if isinstance(node, TypeAlias): assert isinstance(node.target, Instance) # type: ignore node = node.target.type assert isinstance(node, TypeInfo) any_type = AnyType(TypeOfAny.from_omitted_generics) return Instance(node, [any_type] * len(node.defn.type_vars)) def named_generic_type(self, name: str, args: List[Type]) -> Instance: """Return an instance with the given name and type arguments. Assume that the number of arguments is correct. Assume that the name refers to a compatible generic type. """ info = self.lookup_typeinfo(name) args = [remove_instance_last_known_values(arg) for arg in args] # TODO: assert len(args) == len(info.defn.type_vars) return Instance(info, args) def lookup_typeinfo(self, fullname: str) -> TypeInfo: # Assume that the name refers to a class. sym = self.lookup_qualified(fullname) node = sym.node assert isinstance(node, TypeInfo) return node def type_type(self) -> Instance: """Return instance type 'type'.""" return self.named_type('builtins.type') def str_type(self) -> Instance: """Return instance type 'str'.""" return self.named_type('builtins.str') def store_type(self, node: Expression, typ: Type) -> None: """Store the type of a node in the type map.""" self.type_map[node] = typ def in_checked_function(self) -> bool: """Should we type-check the current function? - Yes if --check-untyped-defs is set. - Yes outside functions. - Yes in annotated functions. - No otherwise. """ return (self.options.check_untyped_defs or not self.dynamic_funcs or not self.dynamic_funcs[-1]) def lookup(self, name: str, kind: int) -> SymbolTableNode: """Look up a definition from the symbol table with the given name. TODO remove kind argument """ if name in self.globals: return self.globals[name] else: b = self.globals.get('__builtins__', None) if b: table = cast(MypyFile, b.node).names if name in table: return table[name] raise KeyError('Failed lookup: {}'.format(name)) def lookup_qualified(self, name: str) -> SymbolTableNode: if '.' not in name: return self.lookup(name, GDEF) # FIX kind else: parts = name.split('.') n = self.modules[parts[0]] for i in range(1, len(parts) - 1): sym = n.names.get(parts[i]) assert sym is not None, "Internal error: attempted lookup of unknown name" n = cast(MypyFile, sym.node) last = parts[-1] if last in n.names: return n.names[last] elif len(parts) == 2 and parts[0] == 'builtins': raise KeyError("Could not find builtin symbol '{}'. (Are you running a " "test case? If so, make sure to include a fixture that " "defines this symbol.)".format(last)) else: msg = "Failed qualified lookup: '{}' (fullname = '{}')." raise KeyError(msg.format(last, name)) @contextmanager def enter_partial_types(self, *, is_function: bool = False, is_class: bool = False) -> Iterator[None]: """Enter a new scope for collecting partial types. Also report errors for (some) variables which still have partial types, i.e. we couldn't infer a complete type. """ is_local = (self.partial_types and self.partial_types[-1].is_local) or is_function self.partial_types.append(PartialTypeScope({}, is_function, is_local)) yield # Don't complain about not being able to infer partials if it is # at the toplevel (with allow_untyped_globals) or if it is in an # untyped function being checked with check_untyped_defs. permissive = (self.options.allow_untyped_globals and not is_local) or ( self.options.check_untyped_defs and self.dynamic_funcs and self.dynamic_funcs[-1] ) partial_types, _, _ = self.partial_types.pop() if not self.current_node_deferred: for var, context in partial_types.items(): # If we require local partial types, there are a few exceptions where # we fall back to inferring just "None" as the type from a None initializer: # # 1. If all happens within a single function this is acceptable, since only # the topmost function is a separate target in fine-grained incremental mode. # We primarily want to avoid "splitting" partial types across targets. # # 2. A None initializer in the class body if the attribute is defined in a base # class is fine, since the attribute is already defined and it's currently okay # to vary the type of an attribute covariantly. The None type will still be # checked for compatibility with base classes elsewhere. Without this exception # mypy could require an annotation for an attribute that already has been # declared in a base class, which would be bad. allow_none = (not self.options.local_partial_types or is_function or (is_class and self.is_defined_in_base_class(var))) if (allow_none and isinstance(var.type, PartialType) and var.type.type is None and not permissive): var.type = NoneType() else: if var not in self.partial_reported and not permissive: self.msg.need_annotation_for_var(var, context, self.options.python_version) self.partial_reported.add(var) if var.type: var.type = self.fixup_partial_type(var.type) def handle_partial_var_type( self, typ: PartialType, is_lvalue: bool, node: Var, context: Context) -> Type: """Handle a reference to a partial type through a var. (Used by checkexpr and checkmember.) """ in_scope, is_local, partial_types = self.find_partial_types_in_all_scopes(node) if typ.type is None and in_scope: # 'None' partial type. It has a well-defined type. In an lvalue context # we want to preserve the knowledge of it being a partial type. if not is_lvalue: return NoneType() else: return typ else: if partial_types is not None and not self.current_node_deferred: if in_scope: context = partial_types[node] if is_local or not self.options.allow_untyped_globals: self.msg.need_annotation_for_var(node, context, self.options.python_version) else: # Defer the node -- we might get a better type in the outer scope self.handle_cannot_determine_type(node.name, context) return self.fixup_partial_type(typ) def fixup_partial_type(self, typ: Type) -> Type: """Convert a partial type that we couldn't resolve into something concrete. This means, for None we make it Optional[Any], and for anything else we fill in all of the type arguments with Any. """ if not isinstance(typ, PartialType): return typ if typ.type is None: return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) else: return Instance( typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) def is_defined_in_base_class(self, var: Var) -> bool: if var.info: for base in var.info.mro[1:]: if base.get(var.name) is not None: return True if var.info.fallback_to_any: return True return False def find_partial_types(self, var: Var) -> Optional[Dict[Var, Context]]: """Look for an active partial type scope containing variable. A scope is active if assignments in the current context can refine a partial type originally defined in the scope. This is affected by the local_partial_types configuration option. """ in_scope, _, partial_types = self.find_partial_types_in_all_scopes(var) if in_scope: return partial_types return None def find_partial_types_in_all_scopes( self, var: Var) -> Tuple[bool, bool, Optional[Dict[Var, Context]]]: """Look for partial type scope containing variable. Return tuple (is the scope active, is the scope a local scope, scope). """ for scope in reversed(self.partial_types): if var in scope.map: # All scopes within the outermost function are active. Scopes out of # the outermost function are inactive to allow local reasoning (important # for fine-grained incremental mode). disallow_other_scopes = self.options.local_partial_types if isinstance(var.type, PartialType) and var.type.type is not None and var.info: # This is an ugly hack to make partial generic self attributes behave # as if --local-partial-types is always on (because it used to be like this). disallow_other_scopes = True scope_active = (not disallow_other_scopes or scope.is_local == self.partial_types[-1].is_local) return scope_active, scope.is_local, scope.map return False, False, None def temp_node(self, t: Type, context: Optional[Context] = None) -> TempNode: """Create a temporary node with the given, fixed type.""" return TempNode(t, context=context) def fail(self, msg: str, context: Context, *, code: Optional[ErrorCode] = None) -> None: """Produce an error message.""" self.msg.fail(msg, context, code=code) def note(self, msg: str, context: Context, offset: int = 0, *, code: Optional[ErrorCode] = None) -> None: """Produce a note.""" self.msg.note(msg, context, offset=offset, code=code) def iterable_item_type(self, instance: Instance) -> Type: iterable = map_instance_to_supertype( instance, self.lookup_typeinfo('typing.Iterable')) item_type = iterable.args[0] if not isinstance(get_proper_type(item_type), AnyType): # This relies on 'map_instance_to_supertype' returning 'Iterable[Any]' # in case there is no explicit base class. return item_type # Try also structural typing. iter_type = get_proper_type(find_member('__iter__', instance, instance, is_operator=True)) if iter_type and isinstance(iter_type, CallableType): ret_type = get_proper_type(iter_type.ret_type) if isinstance(ret_type, Instance): iterator = map_instance_to_supertype(ret_type, self.lookup_typeinfo('typing.Iterator')) item_type = iterator.args[0] return item_type def function_type(self, func: FuncBase) -> FunctionLike: return function_type(func, self.named_type('builtins.function')) def push_type_map(self, type_map: 'TypeMap') -> None: if type_map is None: self.binder.unreachable() else: for expr, type in type_map.items(): self.binder.put(expr, type) def infer_issubclass_maps(self, node: CallExpr, expr: Expression, type_map: Dict[Expression, Type] ) -> Tuple[TypeMap, TypeMap]: """Infer type restrictions for an expression in issubclass call.""" vartype = type_map[expr] type = get_isinstance_type(node.args[1], type_map) if isinstance(vartype, TypeVarType): vartype = vartype.upper_bound vartype = get_proper_type(vartype) if isinstance(vartype, UnionType): union_list = [] for t in get_proper_types(vartype.items): if isinstance(t, TypeType): union_list.append(t.item) else: # This is an error that should be reported earlier # if we reach here, we refuse to do any type inference. return {}, {} vartype = UnionType(union_list) elif isinstance(vartype, TypeType): vartype = vartype.item elif (isinstance(vartype, Instance) and vartype.type.fullname == 'builtins.type'): vartype = self.named_type('builtins.object') else: # Any other object whose type we don't know precisely # for example, Any or a custom metaclass. return {}, {} # unknown type yes_map, no_map = conditional_type_map(expr, vartype, type) yes_map, no_map = map(convert_to_typetype, (yes_map, no_map)) return yes_map, no_map def conditional_type_map(expr: Expression, current_type: Optional[Type], proposed_type_ranges: Optional[List[TypeRange]], ) -> Tuple[TypeMap, TypeMap]: """Takes in an expression, the current type of the expression, and a proposed type of that expression. Returns a 2-tuple: The first element is a map from the expression to the proposed type, if the expression can be the proposed type. The second element is a map from the expression to the type it would hold if it was not the proposed type, if any. None means bot, {} means top""" if proposed_type_ranges: proposed_items = [type_range.item for type_range in proposed_type_ranges] proposed_type = make_simplified_union(proposed_items) if current_type: if isinstance(proposed_type, AnyType): # We don't really know much about the proposed type, so we shouldn't # attempt to narrow anything. Instead, we broaden the expr to Any to # avoid false positives return {expr: proposed_type}, {} elif (not any(type_range.is_upper_bound for type_range in proposed_type_ranges) and is_proper_subtype(current_type, proposed_type)): # Expression is always of one of the types in proposed_type_ranges return {}, None elif not is_overlapping_types(current_type, proposed_type, prohibit_none_typevar_overlap=True): # Expression is never of any type in proposed_type_ranges return None, {} else: # we can only restrict when the type is precise, not bounded proposed_precise_type = UnionType([type_range.item for type_range in proposed_type_ranges if not type_range.is_upper_bound]) remaining_type = restrict_subtype_away(current_type, proposed_precise_type) return {expr: proposed_type}, {expr: remaining_type} else: return {expr: proposed_type}, {} else: # An isinstance check, but we don't understand the type return {}, {} def gen_unique_name(base: str, table: SymbolTable) -> str: """Generate a name that does not appear in table by appending numbers to base.""" if base not in table: return base i = 1 while base + str(i) in table: i += 1 return base + str(i) def is_true_literal(n: Expression) -> bool: return (refers_to_fullname(n, 'builtins.True') or isinstance(n, IntExpr) and n.value == 1) def is_false_literal(n: Expression) -> bool: return (refers_to_fullname(n, 'builtins.False') or isinstance(n, IntExpr) and n.value == 0) def is_literal_none(n: Expression) -> bool: return isinstance(n, NameExpr) and n.fullname == 'builtins.None' def is_literal_not_implemented(n: Expression) -> bool: return isinstance(n, NameExpr) and n.fullname == 'builtins.NotImplemented' def builtin_item_type(tp: Type) -> Optional[Type]: """Get the item type of a builtin container. If 'tp' is not one of the built containers (these includes NamedTuple and TypedDict) or if the container is not parameterized (like List or List[Any]) return None. This function is used to narrow optional types in situations like this: x: Optional[int] if x in (1, 2, 3): x + 42 # OK Note: this is only OK for built-in containers, where we know the behavior of __contains__. """ tp = get_proper_type(tp) if isinstance(tp, Instance): if tp.type.fullname in [ 'builtins.list', 'builtins.tuple', 'builtins.dict', 'builtins.set', 'builtins.frozenset', ]: if not tp.args: # TODO: fix tuple in lib-stub/builtins.pyi (it should be generic). return None if not isinstance(get_proper_type(tp.args[0]), AnyType): return tp.args[0] elif isinstance(tp, TupleType) and all(not isinstance(it, AnyType) for it in get_proper_types(tp.items)): return make_simplified_union(tp.items) # this type is not externally visible elif isinstance(tp, TypedDictType): # TypedDict always has non-optional string keys. Find the key type from the Mapping # base class. for base in tp.fallback.type.mro: if base.fullname == 'typing.Mapping': return map_instance_to_supertype(tp.fallback, base).args[0] assert False, 'No Mapping base class found for TypedDict fallback' return None def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: """Calculate what information we can learn from the truth of (e1 and e2) in terms of the information that we can learn from the truth of e1 and the truth of e2. """ if m1 is None or m2 is None: # One of the conditions can never be true. return None # Both conditions can be true; combine the information. Anything # we learn from either conditions's truth is valid. If the same # expression's type is refined by both conditions, we somewhat # arbitrarily give precedence to m2. (In the future, we could use # an intersection type.) result = m2.copy() m2_keys = set(literal_hash(n2) for n2 in m2) for n1 in m1: if literal_hash(n1) not in m2_keys: result[n1] = m1[n1] return result def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: """Calculate what information we can learn from the truth of (e1 or e2) in terms of the information that we can learn from the truth of e1 and the truth of e2. """ if m1 is None: return m2 if m2 is None: return m1 # Both conditions can be true. Combine information about # expressions whose type is refined by both conditions. (We do not # learn anything about expressions whose type is refined by only # one condition.) result = {} # type: Dict[Expression, Type] for n1 in m1: for n2 in m2: if literal_hash(n1) == literal_hash(n2): result[n1] = make_simplified_union([m1[n1], m2[n2]]) return result def convert_to_typetype(type_map: TypeMap) -> TypeMap: converted_type_map = {} # type: Dict[Expression, Type] if type_map is None: return None for expr, typ in type_map.items(): t = typ if isinstance(t, TypeVarType): t = t.upper_bound # TODO: should we only allow unions of instances as per PEP 484? if not isinstance(get_proper_type(t), (UnionType, Instance)): # unknown type; error was likely reported earlier return {} converted_type_map[expr] = TypeType.make_normalized(typ) return converted_type_map def flatten(t: Expression) -> List[Expression]: """Flatten a nested sequence of tuples/lists into one list of nodes.""" if isinstance(t, TupleExpr) or isinstance(t, ListExpr): return [b for a in t.items for b in flatten(a)] elif isinstance(t, StarExpr): return flatten(t.expr) else: return [t] def flatten_types(t: Type) -> List[Type]: """Flatten a nested sequence of tuples into one list of nodes.""" t = get_proper_type(t) if isinstance(t, TupleType): return [b for a in t.items for b in flatten_types(a)] else: return [t] def get_isinstance_type(expr: Expression, type_map: Dict[Expression, Type]) -> Optional[List[TypeRange]]: all_types = get_proper_types(flatten_types(type_map[expr])) types = [] # type: List[TypeRange] for typ in all_types: if isinstance(typ, FunctionLike) and typ.is_type_obj(): # Type variables may be present -- erase them, which is the best # we can do (outside disallowing them here). erased_type = erase_typevars(typ.items()[0].ret_type) types.append(TypeRange(erased_type, is_upper_bound=False)) elif isinstance(typ, TypeType): # Type[A] means "any type that is a subtype of A" rather than "precisely type A" # we indicate this by setting is_upper_bound flag types.append(TypeRange(typ.item, is_upper_bound=True)) elif isinstance(typ, Instance) and typ.type.fullname == 'builtins.type': object_type = Instance(typ.type.mro[-1], []) types.append(TypeRange(object_type, is_upper_bound=True)) elif isinstance(typ, AnyType): types.append(TypeRange(typ, is_upper_bound=False)) else: # we didn't see an actual type, but rather a variable whose value is unknown to us return None if not types: # this can happen if someone has empty tuple as 2nd argument to isinstance # strictly speaking, we should return UninhabitedType but for simplicity we will simply # refuse to do any type inference for now return None return types def expand_func(defn: FuncItem, map: Dict[TypeVarId, Type]) -> FuncItem: visitor = TypeTransformVisitor(map) ret = defn.accept(visitor) assert isinstance(ret, FuncItem) return ret class TypeTransformVisitor(TransformVisitor): def __init__(self, map: Dict[TypeVarId, Type]) -> None: super().__init__() self.map = map def type(self, type: Type) -> Type: return expand_type(type, self.map) def are_argument_counts_overlapping(t: CallableType, s: CallableType) -> bool: """Can a single call match both t and s, based just on positional argument counts? """ min_args = max(t.min_args, s.min_args) max_args = min(t.max_possible_positional_args(), s.max_possible_positional_args()) return min_args <= max_args def is_unsafe_overlapping_overload_signatures(signature: CallableType, other: CallableType) -> bool: """Check if two overloaded signatures are unsafely overlapping or partially overlapping. We consider two functions 's' and 't' to be unsafely overlapping if both of the following are true: 1. s's parameters are all more precise or partially overlapping with t's 2. s's return type is NOT a subtype of t's. Assumes that 'signature' appears earlier in the list of overload alternatives then 'other' and that their argument counts are overlapping. """ # Try detaching callables from the containing class so that all TypeVars # are treated as being free. # # This lets us identify cases where the two signatures use completely # incompatible types -- e.g. see the testOverloadingInferUnionReturnWithMixedTypevars # test case. signature = detach_callable(signature) other = detach_callable(other) # Note: We repeat this check twice in both directions due to a slight # asymmetry in 'is_callable_compatible'. When checking for partial overlaps, # we attempt to unify 'signature' and 'other' both against each other. # # If 'signature' cannot be unified with 'other', we end early. However, # if 'other' cannot be modified with 'signature', the function continues # using the older version of 'other'. # # This discrepancy is unfortunately difficult to get rid of, so we repeat the # checks twice in both directions for now. return (is_callable_compatible(signature, other, is_compat=is_overlapping_types_no_promote, is_compat_return=lambda l, r: not is_subtype_no_promote(l, r), ignore_return=False, check_args_covariantly=True, allow_partial_overlap=True) or is_callable_compatible(other, signature, is_compat=is_overlapping_types_no_promote, is_compat_return=lambda l, r: not is_subtype_no_promote(r, l), ignore_return=False, check_args_covariantly=False, allow_partial_overlap=True)) def detach_callable(typ: CallableType) -> CallableType: """Ensures that the callable's type variables are 'detached' and independent of the context. A callable normally keeps track of the type variables it uses within its 'variables' field. However, if the callable is from a method and that method is using a class type variable, the callable will not keep track of that type variable since it belongs to the class. This function will traverse the callable and find all used type vars and add them to the variables field if it isn't already present. The caller can then unify on all type variables whether or not the callable is originally from a class or not.""" type_list = typ.arg_types + [typ.ret_type] appear_map = {} # type: Dict[str, List[int]] for i, inner_type in enumerate(type_list): typevars_available = inner_type.accept(TypeVarExtractor()) for var in typevars_available: if var.fullname not in appear_map: appear_map[var.fullname] = [] appear_map[var.fullname].append(i) used_type_var_names = set() for var_name, appearances in appear_map.items(): used_type_var_names.add(var_name) all_type_vars = typ.accept(TypeVarExtractor()) new_variables = [] for var in set(all_type_vars): if var.fullname not in used_type_var_names: continue new_variables.append(TypeVarDef( name=var.name, fullname=var.fullname, id=var.id, values=var.values, upper_bound=var.upper_bound, variance=var.variance, )) out = typ.copy_modified( variables=new_variables, arg_types=type_list[:-1], ret_type=type_list[-1], ) return out def overload_can_never_match(signature: CallableType, other: CallableType) -> bool: """Check if the 'other' method can never be matched due to 'signature'. This can happen if signature's parameters are all strictly broader then other's parameters. Assumes that both signatures have overlapping argument counts. """ # The extra erasure is needed to prevent spurious errors # in situations where an `Any` overload is used as a fallback # for an overload with type variables. The spurious error appears # because the type variables turn into `Any` during unification in # the below subtype check and (surprisingly?) `is_proper_subtype(Any, Any)` # returns `True`. # TODO: find a cleaner solution instead of this ad-hoc erasure. exp_signature = expand_type(signature, {tvar.id: erase_def_to_union_or_bound(tvar) for tvar in signature.variables}) assert isinstance(exp_signature, ProperType) assert isinstance(exp_signature, CallableType) return is_callable_compatible(exp_signature, other, is_compat=is_more_precise, ignore_return=True) def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool: """Does t have wider arguments than s?""" # TODO should an overload with additional items be allowed to be more # general than one with fewer items (or just one item)? if isinstance(t, CallableType): if isinstance(s, CallableType): return is_callable_compatible(t, s, is_compat=is_proper_subtype, ignore_return=True) elif isinstance(t, FunctionLike): if isinstance(s, FunctionLike): if len(t.items()) == len(s.items()): return all(is_same_arg_prefix(items, itemt) for items, itemt in zip(t.items(), s.items())) return False def is_same_arg_prefix(t: CallableType, s: CallableType) -> bool: return is_callable_compatible(t, s, is_compat=is_same_type, ignore_return=True, check_args_covariantly=True, ignore_pos_arg_names=True) def infer_operator_assignment_method(typ: Type, operator: str) -> Tuple[bool, str]: """Determine if operator assignment on given value type is in-place, and the method name. For example, if operator is '+', return (True, '__iadd__') or (False, '__add__') depending on which method is supported by the type. """ typ = get_proper_type(typ) method = nodes.op_methods[operator] if isinstance(typ, Instance): if operator in nodes.ops_with_inplace_method: inplace_method = '__i' + method[2:] if typ.type.has_readable_member(inplace_method): return True, inplace_method return False, method def is_valid_inferred_type(typ: Type) -> bool: """Is an inferred type valid? Examples of invalid types include the None type or List[]. When not doing strict Optional checking, all types containing None are invalid. When doing strict Optional checking, only None and types that are incompletely defined (i.e. contain UninhabitedType) are invalid. """ if isinstance(get_proper_type(typ), (NoneType, UninhabitedType)): # With strict Optional checking, we *may* eventually infer NoneType when # the initializer is None, but we only do that if we can't infer a # specific Optional type. This resolution happens in # leave_partial_types when we pop a partial types scope. return False return not typ.accept(NothingSeeker()) class NothingSeeker(TypeQuery[bool]): """Find any types resulting from failed (ambiguous) type inference.""" def __init__(self) -> None: super().__init__(any) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return t.ambiguous class SetNothingToAny(TypeTranslator): """Replace all ambiguous types with Any (to avoid spurious extra errors).""" def visit_uninhabited_type(self, t: UninhabitedType) -> Type: if t.ambiguous: return AnyType(TypeOfAny.from_error) return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: # Target of the alias cannot by an ambigous , so we just # replace the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) def is_node_static(node: Optional[Node]) -> Optional[bool]: """Find out if a node describes a static function method.""" if isinstance(node, FuncDef): return node.is_static if isinstance(node, Var): return node.is_staticmethod return None class CheckerScope: # We keep two stacks combined, to maintain the relative order stack = None # type: List[Union[TypeInfo, FuncItem, MypyFile]] def __init__(self, module: MypyFile) -> None: self.stack = [module] def top_function(self) -> Optional[FuncItem]: for e in reversed(self.stack): if isinstance(e, FuncItem): return e return None def top_non_lambda_function(self) -> Optional[FuncItem]: for e in reversed(self.stack): if isinstance(e, FuncItem) and not isinstance(e, LambdaExpr): return e return None def active_class(self) -> Optional[TypeInfo]: if isinstance(self.stack[-1], TypeInfo): return self.stack[-1] return None def enclosing_class(self) -> Optional[TypeInfo]: """Is there a class *directly* enclosing this function?""" top = self.top_function() assert top, "This method must be called from inside a function" index = self.stack.index(top) assert index, "CheckerScope stack must always start with a module" enclosing = self.stack[index - 1] if isinstance(enclosing, TypeInfo): return enclosing return None def active_self_type(self) -> Optional[Union[Instance, TupleType]]: """An instance or tuple type representing the current class. This returns None unless we are in class body or in a method. In particular, inside a function nested in method this returns None. """ info = self.active_class() if not info and self.top_function(): info = self.enclosing_class() if info: return fill_typevars(info) return None @contextmanager def push_function(self, item: FuncItem) -> Iterator[None]: self.stack.append(item) yield self.stack.pop() @contextmanager def push_class(self, info: TypeInfo) -> Iterator[None]: self.stack.append(info) yield self.stack.pop() @contextmanager def nothing() -> Iterator[None]: yield def is_typed_callable(c: Optional[Type]) -> bool: c = get_proper_type(c) if not c or not isinstance(c, CallableType): return False return not all(isinstance(t, AnyType) and t.type_of_any == TypeOfAny.unannotated for t in get_proper_types(c.arg_types + [c.ret_type])) def is_untyped_decorator(typ: Optional[Type]) -> bool: typ = get_proper_type(typ) if not typ: return True elif isinstance(typ, CallableType): return not is_typed_callable(typ) elif isinstance(typ, Instance): method = typ.type.get_method('__call__') if method: return not is_typed_callable(method.type) else: return False elif isinstance(typ, Overloaded): return any(is_untyped_decorator(item) for item in typ.items()) return True def is_static(func: Union[FuncBase, Decorator]) -> bool: if isinstance(func, Decorator): return is_static(func.func) elif isinstance(func, FuncBase): return func.is_static assert False, "Unexpected func type: {}".format(type(func)) def is_subtype_no_promote(left: Type, right: Type) -> bool: return is_subtype(left, right, ignore_promotions=True) def is_overlapping_types_no_promote(left: Type, right: Type) -> bool: return is_overlapping_types(left, right, ignore_promotions=True) def is_private(node_name: str) -> bool: """Check if node is private to class definition.""" return node_name.startswith('__') and not node_name.endswith('__') def has_bool_item(typ: ProperType) -> bool: """Return True if type is 'bool' or a union with a 'bool' item.""" if is_named_instance(typ, 'builtins.bool'): return True if isinstance(typ, UnionType): return any(is_named_instance(item, 'builtins.bool') for item in typ.items) return False mypy-0.761/mypy/checkexpr.py0000644€tŠÔÚ€2›s®0000063247313576752246022263 0ustar jukkaDROPBOX\Domain Users00000000000000"""Expression type checker. This file is conceptually part of TypeChecker.""" from collections import OrderedDict from contextlib import contextmanager import itertools from typing import ( cast, Dict, Set, List, Tuple, Callable, Union, Optional, Sequence, Iterator ) from typing_extensions import ClassVar, Final, overload from mypy.errors import report_internal_error from mypy.typeanal import ( has_any_from_unimported_type, check_for_explicit_any, set_any_tvars, expand_type_alias, make_optional_type, ) from mypy.types import ( Type, AnyType, CallableType, Overloaded, NoneType, TypeVarDef, TupleType, TypedDictType, Instance, TypeVarType, ErasedType, UnionType, PartialType, DeletedType, UninhabitedType, TypeType, TypeOfAny, LiteralType, LiteralValue, is_named_instance, FunctionLike, StarType, is_optional, remove_optional, is_generic_instance, get_proper_type, ProperType, get_proper_types ) from mypy.nodes import ( NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr, MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr, OpExpr, UnaryExpr, IndexExpr, CastExpr, RevealExpr, TypeApplication, ListExpr, TupleExpr, DictExpr, LambdaExpr, SuperExpr, SliceExpr, Context, Expression, ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator, ConditionalExpr, ComparisonExpr, TempNode, SetComprehension, AssignmentExpr, DictionaryComprehension, ComplexExpr, EllipsisExpr, StarExpr, AwaitExpr, YieldExpr, YieldFromExpr, TypedDictExpr, PromoteExpr, NewTypeExpr, NamedTupleExpr, TypeVarExpr, TypeAliasExpr, BackquoteExpr, EnumCallExpr, TypeAlias, SymbolNode, PlaceholderNode, ARG_POS, ARG_OPT, ARG_NAMED, ARG_STAR, ARG_STAR2, LITERAL_TYPE, REVEAL_TYPE, SYMBOL_FUNCBASE_TYPES ) from mypy.literals import literal from mypy import nodes import mypy.checker from mypy import types from mypy.sametypes import is_same_type from mypy.erasetype import replace_meta_vars, erase_type, remove_instance_last_known_values from mypy.maptype import map_instance_to_supertype from mypy.messages import MessageBuilder from mypy import message_registry from mypy.infer import infer_type_arguments, infer_function_type_arguments from mypy import join from mypy.meet import narrow_declared_type, is_overlapping_types from mypy.subtypes import is_subtype, is_proper_subtype, is_equivalent, non_method_protocol_members from mypy import applytype from mypy import erasetype from mypy.checkmember import analyze_member_access, type_object_type from mypy.argmap import ArgTypeExpander, map_actuals_to_formals, map_formals_to_actuals from mypy.checkstrformat import StringFormatterChecker, custom_special_method from mypy.expandtype import expand_type, expand_type_by_instance, freshen_function_type_vars from mypy.util import split_module_names from mypy.typevars import fill_typevars from mypy.visitor import ExpressionVisitor from mypy.plugin import Plugin, MethodContext, MethodSigContext, FunctionContext from mypy.typeops import ( tuple_fallback, make_simplified_union, true_only, false_only, erase_to_union_or_bound, function_type, callable_type, try_getting_str_literals ) import mypy.errorcodes as codes # Type of callback user for checking individual function arguments. See # check_args() below for details. ArgChecker = Callable[[Type, Type, int, Type, int, int, CallableType, Context, Context, MessageBuilder], None] # Maximum nesting level for math union in overloads, setting this to large values # may cause performance issues. The reason is that although union math algorithm we use # nicely captures most corner cases, its worst case complexity is exponential, # see https://github.com/python/mypy/pull/5255#discussion_r196896335 for discussion. MAX_UNIONS = 5 # type: Final # Types considered safe for comparisons with --strict-equality due to known behaviour of __eq__. # NOTE: All these types are subtypes of AbstractSet. OVERLAPPING_TYPES_WHITELIST = ['builtins.set', 'builtins.frozenset', 'typing.KeysView', 'typing.ItemsView'] # type: Final class TooManyUnions(Exception): """Indicates that we need to stop splitting unions in an attempt to match an overload in order to save performance. """ def extract_refexpr_names(expr: RefExpr) -> Set[str]: """Recursively extracts all module references from a reference expression. Note that currently, the only two subclasses of RefExpr are NameExpr and MemberExpr.""" output = set() # type: Set[str] while isinstance(expr.node, MypyFile) or expr.fullname is not None: if isinstance(expr.node, MypyFile) and expr.fullname is not None: # If it's None, something's wrong (perhaps due to an # import cycle or a suppressed error). For now we just # skip it. output.add(expr.fullname) if isinstance(expr, NameExpr): is_suppressed_import = isinstance(expr.node, Var) and expr.node.is_suppressed_import if isinstance(expr.node, TypeInfo): # Reference to a class or a nested class output.update(split_module_names(expr.node.module_name)) elif expr.fullname is not None and '.' in expr.fullname and not is_suppressed_import: # Everything else (that is not a silenced import within a class) output.add(expr.fullname.rsplit('.', 1)[0]) break elif isinstance(expr, MemberExpr): if isinstance(expr.expr, RefExpr): expr = expr.expr else: break else: raise AssertionError("Unknown RefExpr subclass: {}".format(type(expr))) return output class Finished(Exception): """Raised if we can terminate overload argument check early (no match).""" class ExpressionChecker(ExpressionVisitor[Type]): """Expression type checker. This class works closely together with checker.TypeChecker. """ # Some services are provided by a TypeChecker instance. chk = None # type: mypy.checker.TypeChecker # This is shared with TypeChecker, but stored also here for convenience. msg = None # type: MessageBuilder # Type context for type inference type_context = None # type: List[Optional[Type]] strfrm_checker = None # type: StringFormatterChecker plugin = None # type: Plugin def __init__(self, chk: 'mypy.checker.TypeChecker', msg: MessageBuilder, plugin: Plugin) -> None: """Construct an expression type checker.""" self.chk = chk self.msg = msg self.plugin = plugin self.type_context = [None] # Temporary overrides for expression types. This is currently # used by the union math in overloads. # TODO: refactor this to use a pattern similar to one in # multiassign_from_union, or maybe even combine the two? self.type_overrides = {} # type: Dict[Expression, Type] self.strfrm_checker = StringFormatterChecker(self, self.chk, self.msg) def visit_name_expr(self, e: NameExpr) -> Type: """Type check a name expression. It can be of any kind: local, member or global. """ self.chk.module_refs.update(extract_refexpr_names(e)) result = self.analyze_ref_expr(e) return self.narrow_type_from_binder(e, result) def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: result = None # type: Optional[Type] node = e.node if isinstance(e, NameExpr) and e.is_special_form: # A special form definition, nothing to check here. return AnyType(TypeOfAny.special_form) if isinstance(node, Var): # Variable reference. result = self.analyze_var_ref(node, e) if isinstance(result, PartialType): result = self.chk.handle_partial_var_type(result, lvalue, node, e) elif isinstance(node, FuncDef): # Reference to a global function. result = function_type(node, self.named_type('builtins.function')) elif isinstance(node, OverloadedFuncDef) and node.type is not None: # node.type is None when there are multiple definitions of a function # and it's decorated by something that is not typing.overload # TODO: use a dummy Overloaded instead of AnyType in this case # like we do in mypy.types.function_type()? result = node.type elif isinstance(node, TypeInfo): # Reference to a type object. result = type_object_type(node, self.named_type) if (isinstance(result, CallableType) and isinstance(result.ret_type, Instance)): # type: ignore # We need to set correct line and column # TODO: always do this in type_object_type by passing the original context result.ret_type.line = e.line result.ret_type.column = e.column if isinstance(get_proper_type(self.type_context[-1]), TypeType): # This is the type in a Type[] expression, so substitute type # variables with Any. result = erasetype.erase_typevars(result) elif isinstance(node, MypyFile): # Reference to a module object. try: result = self.named_type('types.ModuleType') except KeyError: # In test cases might 'types' may not be available. # Fall back to a dummy 'object' type instead to # avoid a crash. result = self.named_type('builtins.object') elif isinstance(node, Decorator): result = self.analyze_var_ref(node.var, e) elif isinstance(node, TypeAlias): # Something that refers to a type alias appears in runtime context. # Note that we suppress bogus errors for alias redefinitions, # they are already reported in semanal.py. result = self.alias_type_in_runtime_context(node, node.no_args, e, alias_definition=e.is_alias_rvalue or lvalue) else: if isinstance(node, PlaceholderNode): assert False, 'PlaceholderNode %r leaked to checker' % node.fullname # Unknown reference; use any type implicitly to avoid # generating extra type errors. result = AnyType(TypeOfAny.from_error) assert result is not None return result def analyze_var_ref(self, var: Var, context: Context) -> Type: if var.type: var_type = get_proper_type(var.type) if isinstance(var_type, Instance): if self.is_literal_context() and var_type.last_known_value is not None: return var_type.last_known_value if var.name in {'True', 'False'}: return self.infer_literal_expr_type(var.name == 'True', 'builtins.bool') return var.type else: if not var.is_ready and self.chk.in_checked_function(): self.chk.handle_cannot_determine_type(var.name, context) # Implicit 'Any' type. return AnyType(TypeOfAny.special_form) def visit_call_expr(self, e: CallExpr, allow_none_return: bool = False) -> Type: """Type check a call expression.""" if e.analyzed: if isinstance(e.analyzed, NamedTupleExpr) and not e.analyzed.is_typed: # Type check the arguments, but ignore the results. This relies # on the typeshed stubs to type check the arguments. self.visit_call_expr_inner(e) # It's really a special form that only looks like a call. return self.accept(e.analyzed, self.type_context[-1]) return self.visit_call_expr_inner(e, allow_none_return=allow_none_return) def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> Type: if isinstance(e.callee, NameExpr) and isinstance(e.callee.node, TypeInfo) and \ e.callee.node.typeddict_type is not None: # Use named fallback for better error messages. typeddict_type = e.callee.node.typeddict_type.copy_modified( fallback=Instance(e.callee.node, [])) return self.check_typeddict_call(typeddict_type, e.arg_kinds, e.arg_names, e.args, e) if (isinstance(e.callee, NameExpr) and e.callee.name in ('isinstance', 'issubclass') and len(e.args) == 2): for typ in mypy.checker.flatten(e.args[1]): node = None if isinstance(typ, NameExpr): try: node = self.chk.lookup_qualified(typ.name) except KeyError: # Undefined names should already be reported in semantic analysis. pass if is_expr_literal_type(typ): self.msg.cannot_use_function_with_type(e.callee.name, "Literal", e) continue if (node and isinstance(node.node, TypeAlias) and isinstance(get_proper_type(node.node.target), AnyType)): self.msg.cannot_use_function_with_type(e.callee.name, "Any", e) continue if ((isinstance(typ, IndexExpr) and isinstance(typ.analyzed, (TypeApplication, TypeAliasExpr))) or (isinstance(typ, NameExpr) and node and isinstance(node.node, TypeAlias) and not node.node.no_args)): self.msg.type_arguments_not_allowed(e) if isinstance(typ, RefExpr) and isinstance(typ.node, TypeInfo): if typ.node.typeddict_type: self.msg.cannot_use_function_with_type(e.callee.name, "TypedDict", e) elif typ.node.is_newtype: self.msg.cannot_use_function_with_type(e.callee.name, "NewType", e) self.try_infer_partial_type(e) type_context = None if isinstance(e.callee, LambdaExpr): formal_to_actual = map_actuals_to_formals( e.arg_kinds, e.arg_names, e.callee.arg_kinds, e.callee.arg_names, lambda i: self.accept(e.args[i])) arg_types = [join.join_type_list([self.accept(e.args[j]) for j in formal_to_actual[i]]) for i in range(len(e.callee.arg_kinds))] type_context = CallableType(arg_types, e.callee.arg_kinds, e.callee.arg_names, ret_type=self.object_type(), fallback=self.named_type('builtins.function')) callee_type = get_proper_type(self.accept(e.callee, type_context, always_allow_any=True)) if (self.chk.options.disallow_untyped_calls and self.chk.in_checked_function() and isinstance(callee_type, CallableType) and callee_type.implicit): return self.msg.untyped_function_call(callee_type, e) # Figure out the full name of the callee for plugin lookup. object_type = None member = None fullname = None if isinstance(e.callee, RefExpr): # There are two special cases where plugins might act: # * A "static" reference/alias to a class or function; # get_function_hook() will be invoked for these. fullname = e.callee.fullname if isinstance(e.callee.node, TypeAlias): target = get_proper_type(e.callee.node.target) if isinstance(target, Instance): fullname = target.type.fullname # * Call to a method on object that has a full name (see # method_fullname() for details on supported objects); # get_method_hook() and get_method_signature_hook() will # be invoked for these. if (fullname is None and isinstance(e.callee, MemberExpr) and e.callee.expr in self.chk.type_map): member = e.callee.name object_type = self.chk.type_map[e.callee.expr] ret_type = self.check_call_expr_with_callee_type(callee_type, e, fullname, object_type, member) if isinstance(e.callee, RefExpr) and len(e.args) == 2: if e.callee.fullname in ('builtins.isinstance', 'builtins.issubclass'): self.check_runtime_protocol_test(e) if e.callee.fullname == 'builtins.issubclass': self.check_protocol_issubclass(e) if isinstance(e.callee, MemberExpr) and e.callee.name == 'format': self.check_str_format_call(e) ret_type = get_proper_type(ret_type) if isinstance(ret_type, UninhabitedType) and not ret_type.ambiguous: self.chk.binder.unreachable() # Warn on calls to functions that always return None. The check # of ret_type is both a common-case optimization and prevents reporting # the error in dynamic functions (where it will be Any). if (not allow_none_return and isinstance(ret_type, NoneType) and self.always_returns_none(e.callee)): self.chk.msg.does_not_return_value(callee_type, e) return AnyType(TypeOfAny.from_error) return ret_type def check_str_format_call(self, e: CallExpr) -> None: """More precise type checking for str.format() calls on literals.""" assert isinstance(e.callee, MemberExpr) format_value = None if isinstance(e.callee.expr, (StrExpr, UnicodeExpr)): format_value = e.callee.expr.value elif e.callee.expr in self.chk.type_map: base_typ = try_getting_literal(self.chk.type_map[e.callee.expr]) if isinstance(base_typ, LiteralType) and isinstance(base_typ.value, str): format_value = base_typ.value if format_value is not None: self.strfrm_checker.check_str_format_call(e, format_value) def method_fullname(self, object_type: Type, method_name: str) -> Optional[str]: """Convert a method name to a fully qualified name, based on the type of the object that it is invoked on. Return `None` if the name of `object_type` cannot be determined. """ object_type = get_proper_type(object_type) if isinstance(object_type, CallableType) and object_type.is_type_obj(): # For class method calls, object_type is a callable representing the class object. # We "unwrap" it to a regular type, as the class/instance method difference doesn't # affect the fully qualified name. object_type = get_proper_type(object_type.ret_type) elif isinstance(object_type, TypeType): object_type = object_type.item type_name = None if isinstance(object_type, Instance): type_name = object_type.type.fullname elif isinstance(object_type, (TypedDictType, LiteralType)): info = object_type.fallback.type.get_containing_type_info(method_name) type_name = info.fullname if info is not None else None elif isinstance(object_type, TupleType): type_name = tuple_fallback(object_type).type.fullname if type_name is not None: return '{}.{}'.format(type_name, method_name) else: return None def always_returns_none(self, node: Expression) -> bool: """Check if `node` refers to something explicitly annotated as only returning None.""" if isinstance(node, RefExpr): if self.defn_returns_none(node.node): return True if isinstance(node, MemberExpr) and node.node is None: # instance or class attribute typ = get_proper_type(self.chk.type_map.get(node.expr)) if isinstance(typ, Instance): info = typ.type elif isinstance(typ, CallableType) and typ.is_type_obj(): ret_type = get_proper_type(typ.ret_type) if isinstance(ret_type, Instance): info = ret_type.type else: return False else: return False sym = info.get(node.name) if sym and self.defn_returns_none(sym.node): return True return False def defn_returns_none(self, defn: Optional[SymbolNode]) -> bool: """Check if `defn` can _only_ return None.""" if isinstance(defn, FuncDef): return (isinstance(defn.type, CallableType) and isinstance(get_proper_type(defn.type.ret_type), NoneType)) if isinstance(defn, OverloadedFuncDef): return all(self.defn_returns_none(item) for item in defn.items) if isinstance(defn, Var): typ = get_proper_type(defn.type) if (not defn.is_inferred and isinstance(typ, CallableType) and isinstance(get_proper_type(typ.ret_type), NoneType)): return True if isinstance(typ, Instance): sym = typ.type.get('__call__') if sym and self.defn_returns_none(sym.node): return True return False def check_runtime_protocol_test(self, e: CallExpr) -> None: for expr in mypy.checker.flatten(e.args[1]): tp = get_proper_type(self.chk.type_map[expr]) if (isinstance(tp, CallableType) and tp.is_type_obj() and tp.type_object().is_protocol and not tp.type_object().runtime_protocol): self.chk.fail(message_registry.RUNTIME_PROTOCOL_EXPECTED, e) def check_protocol_issubclass(self, e: CallExpr) -> None: for expr in mypy.checker.flatten(e.args[1]): tp = get_proper_type(self.chk.type_map[expr]) if (isinstance(tp, CallableType) and tp.is_type_obj() and tp.type_object().is_protocol): attr_members = non_method_protocol_members(tp.type_object()) if attr_members: self.chk.msg.report_non_method_protocol(tp.type_object(), attr_members, e) def check_typeddict_call(self, callee: TypedDictType, arg_kinds: List[int], arg_names: Sequence[Optional[str]], args: List[Expression], context: Context) -> Type: if len(args) >= 1 and all([ak == ARG_NAMED for ak in arg_kinds]): # ex: Point(x=42, y=1337) assert all(arg_name is not None for arg_name in arg_names) item_names = cast(List[str], arg_names) item_args = args return self.check_typeddict_call_with_kwargs( callee, OrderedDict(zip(item_names, item_args)), context) if len(args) == 1 and arg_kinds[0] == ARG_POS: unique_arg = args[0] if isinstance(unique_arg, DictExpr): # ex: Point({'x': 42, 'y': 1337}) return self.check_typeddict_call_with_dict(callee, unique_arg, context) if isinstance(unique_arg, CallExpr) and isinstance(unique_arg.analyzed, DictExpr): # ex: Point(dict(x=42, y=1337)) return self.check_typeddict_call_with_dict(callee, unique_arg.analyzed, context) if len(args) == 0: # ex: EmptyDict() return self.check_typeddict_call_with_kwargs( callee, OrderedDict(), context) self.chk.fail(message_registry.INVALID_TYPEDDICT_ARGS, context) return AnyType(TypeOfAny.from_error) def check_typeddict_call_with_dict(self, callee: TypedDictType, kwargs: DictExpr, context: Context) -> Type: item_args = [item[1] for item in kwargs.items] item_names = [] # List[str] for item_name_expr, item_arg in kwargs.items: literal_value = None if item_name_expr: key_type = self.accept(item_name_expr) values = try_getting_str_literals(item_name_expr, key_type) if values and len(values) == 1: literal_value = values[0] if literal_value is None: key_context = item_name_expr or item_arg self.chk.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, key_context) return AnyType(TypeOfAny.from_error) else: item_names.append(literal_value) return self.check_typeddict_call_with_kwargs( callee, OrderedDict(zip(item_names, item_args)), context) def check_typeddict_call_with_kwargs(self, callee: TypedDictType, kwargs: 'OrderedDict[str, Expression]', context: Context) -> Type: if not (callee.required_keys <= set(kwargs.keys()) <= set(callee.items.keys())): expected_keys = [key for key in callee.items.keys() if key in callee.required_keys or key in kwargs.keys()] actual_keys = kwargs.keys() self.msg.unexpected_typeddict_keys( callee, expected_keys=expected_keys, actual_keys=list(actual_keys), context=context) return AnyType(TypeOfAny.from_error) for (item_name, item_expected_type) in callee.items.items(): if item_name in kwargs: item_value = kwargs[item_name] self.chk.check_simple_assignment( lvalue_type=item_expected_type, rvalue=item_value, context=item_value, msg=message_registry.INCOMPATIBLE_TYPES, lvalue_name='TypedDict item "{}"'.format(item_name), rvalue_name='expression', code=codes.TYPEDDICT_ITEM) return callee def get_partial_self_var(self, expr: MemberExpr) -> Optional[Var]: """Get variable node for a partial self attribute. If the expression is not a self attribute, or attribute is not variable, or variable is not partial, return None. """ if not (isinstance(expr.expr, NameExpr) and isinstance(expr.expr.node, Var) and expr.expr.node.is_self): # Not a self.attr expression. return None info = self.chk.scope.enclosing_class() if not info or expr.name not in info.names: # Don't mess with partial types in superclasses. return None sym = info.names[expr.name] if isinstance(sym.node, Var) and isinstance(sym.node.type, PartialType): return sym.node return None # Types and methods that can be used to infer partial types. item_args = {'builtins.list': ['append'], 'builtins.set': ['add', 'discard'], } # type: ClassVar[Dict[str, List[str]]] container_args = {'builtins.list': {'extend': ['builtins.list']}, 'builtins.dict': {'update': ['builtins.dict']}, 'collections.OrderedDict': {'update': ['builtins.dict']}, 'builtins.set': {'update': ['builtins.set', 'builtins.list']}, } # type: ClassVar[Dict[str, Dict[str, List[str]]]] def try_infer_partial_type(self, e: CallExpr) -> None: if isinstance(e.callee, MemberExpr) and isinstance(e.callee.expr, RefExpr): var = e.callee.expr.node if var is None and isinstance(e.callee.expr, MemberExpr): var = self.get_partial_self_var(e.callee.expr) if not isinstance(var, Var): return partial_types = self.chk.find_partial_types(var) if partial_types is not None and not self.chk.current_node_deferred: partial_type = var.type if (partial_type is None or not isinstance(partial_type, PartialType) or partial_type.type is None): # A partial None type -> can't infer anything. return typename = partial_type.type.fullname methodname = e.callee.name # Sometimes we can infer a full type for a partial List, Dict or Set type. # TODO: Don't infer argument expression twice. if (typename in self.item_args and methodname in self.item_args[typename] and e.arg_kinds == [ARG_POS]): item_type = self.accept(e.args[0]) if mypy.checker.is_valid_inferred_type(item_type): var.type = self.chk.named_generic_type(typename, [item_type]) del partial_types[var] elif (typename in self.container_args and methodname in self.container_args[typename] and e.arg_kinds == [ARG_POS]): arg_type = get_proper_type(self.accept(e.args[0])) if isinstance(arg_type, Instance): arg_typename = arg_type.type.fullname if arg_typename in self.container_args[typename][methodname]: if all(mypy.checker.is_valid_inferred_type(item_type) for item_type in arg_type.args): var.type = self.chk.named_generic_type(typename, list(arg_type.args)) del partial_types[var] def apply_function_plugin(self, callee: CallableType, arg_kinds: List[int], arg_types: List[Type], arg_names: Optional[Sequence[Optional[str]]], formal_to_actual: List[List[int]], args: List[Expression], fullname: str, object_type: Optional[Type], context: Context) -> Type: """Use special case logic to infer the return type of a specific named function/method. Caller must ensure that a plugin hook exists. There are two different cases: - If object_type is None, the caller must ensure that a function hook exists for fullname. - If object_type is not None, the caller must ensure that a method hook exists for fullname. Return the inferred return type. """ num_formals = len(callee.arg_types) formal_arg_types = [[] for _ in range(num_formals)] # type: List[List[Type]] formal_arg_exprs = [[] for _ in range(num_formals)] # type: List[List[Expression]] formal_arg_names = [[] for _ in range(num_formals)] # type: List[List[Optional[str]]] formal_arg_kinds = [[] for _ in range(num_formals)] # type: List[List[int]] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: formal_arg_types[formal].append(arg_types[actual]) formal_arg_exprs[formal].append(args[actual]) if arg_names: formal_arg_names[formal].append(arg_names[actual]) formal_arg_kinds[formal].append(arg_kinds[actual]) if object_type is None: # Apply function plugin callback = self.plugin.get_function_hook(fullname) assert callback is not None # Assume that caller ensures this return callback( FunctionContext(formal_arg_types, formal_arg_kinds, callee.arg_names, formal_arg_names, callee.ret_type, formal_arg_exprs, context, self.chk)) else: # Apply method plugin method_callback = self.plugin.get_method_hook(fullname) assert method_callback is not None # Assume that caller ensures this object_type = get_proper_type(object_type) return method_callback( MethodContext(object_type, formal_arg_types, formal_arg_kinds, callee.arg_names, formal_arg_names, callee.ret_type, formal_arg_exprs, context, self.chk)) def apply_method_signature_hook( self, callee: FunctionLike, args: List[Expression], arg_kinds: List[int], context: Context, arg_names: Optional[Sequence[Optional[str]]], object_type: Type, signature_hook: Callable[[MethodSigContext], CallableType]) -> FunctionLike: """Apply a plugin hook that may infer a more precise signature for a method.""" if isinstance(callee, CallableType): num_formals = len(callee.arg_kinds) formal_to_actual = map_actuals_to_formals( arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: self.accept(args[i])) formal_arg_exprs = [[] for _ in range(num_formals)] # type: List[List[Expression]] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: formal_arg_exprs[formal].append(args[actual]) object_type = get_proper_type(object_type) return signature_hook( MethodSigContext(object_type, formal_arg_exprs, callee, context, self.chk)) else: assert isinstance(callee, Overloaded) items = [] for item in callee.items(): adjusted = self.apply_method_signature_hook( item, args, arg_kinds, context, arg_names, object_type, signature_hook) assert isinstance(adjusted, CallableType) items.append(adjusted) return Overloaded(items) def transform_callee_type( self, callable_name: Optional[str], callee: Type, args: List[Expression], arg_kinds: List[int], context: Context, arg_names: Optional[Sequence[Optional[str]]] = None, object_type: Optional[Type] = None) -> Type: """Attempt to determine a more accurate signature for a method call. This is done by looking up and applying a method signature hook (if one exists for the given method name). If no matching method signature hook is found, callee is returned unmodified. The same happens if the arguments refer to a non-method callable (this is allowed so that the code calling transform_callee_type needs to perform fewer boilerplate checks). Note: this method is *not* called automatically as part of check_call, because in some cases check_call is called multiple times while checking a single call (for example when dealing with overloads). Instead, this method needs to be called explicitly (if appropriate) before the signature is passed to check_call. """ callee = get_proper_type(callee) if (callable_name is not None and object_type is not None and isinstance(callee, FunctionLike)): signature_hook = self.plugin.get_method_signature_hook(callable_name) if signature_hook: return self.apply_method_signature_hook( callee, args, arg_kinds, context, arg_names, object_type, signature_hook) return callee def check_call_expr_with_callee_type(self, callee_type: Type, e: CallExpr, callable_name: Optional[str], object_type: Optional[Type], member: Optional[str] = None) -> Type: """Type check call expression. The callee_type should be used as the type of callee expression. In particular, in case of a union type this can be a particular item of the union, so that we can apply plugin hooks to each item. The 'member', 'callable_name' and 'object_type' are only used to call plugin hooks. If 'callable_name' is None but 'member' is not None (member call), try constructing 'callable_name' using 'object_type' (the base type on which the method is called), for example 'typing.Mapping.get'. """ if callable_name is None and member is not None: assert object_type is not None callable_name = self.method_fullname(object_type, member) object_type = get_proper_type(object_type) if callable_name: # Try to refine the call signature using plugin hooks before checking the call. callee_type = self.transform_callee_type( callable_name, callee_type, e.args, e.arg_kinds, e, e.arg_names, object_type) # Unions are special-cased to allow plugins to act on each item in the union. elif member is not None and isinstance(object_type, UnionType): return self.check_union_call_expr(e, object_type, member) return self.check_call(callee_type, e.args, e.arg_kinds, e, e.arg_names, callable_node=e.callee, callable_name=callable_name, object_type=object_type)[0] def check_union_call_expr(self, e: CallExpr, object_type: UnionType, member: str) -> Type: """"Type check calling a member expression where the base type is a union.""" res = [] # type: List[Type] for typ in object_type.relevant_items(): # Member access errors are already reported when visiting the member expression. self.msg.disable_errors() item = analyze_member_access(member, typ, e, False, False, False, self.msg, original_type=object_type, chk=self.chk, in_literal_context=self.is_literal_context(), self_type=typ) self.msg.enable_errors() narrowed = self.narrow_type_from_binder(e.callee, item, skip_non_overlapping=True) if narrowed is None: continue callable_name = self.method_fullname(typ, member) item_object_type = typ if callable_name else None res.append(self.check_call_expr_with_callee_type(narrowed, e, callable_name, item_object_type)) return make_simplified_union(res) def check_call(self, callee: Type, args: List[Expression], arg_kinds: List[int], context: Context, arg_names: Optional[Sequence[Optional[str]]] = None, callable_node: Optional[Expression] = None, arg_messages: Optional[MessageBuilder] = None, callable_name: Optional[str] = None, object_type: Optional[Type] = None) -> Tuple[Type, Type]: """Type check a call. Also infer type arguments if the callee is a generic function. Return (result type, inferred callee type). Arguments: callee: type of the called value args: actual argument expressions arg_kinds: contains nodes.ARG_* constant for each argument in args describing whether the argument is positional, *arg, etc. arg_names: names of arguments (optional) callable_node: associate the inferred callable type to this node, if specified arg_messages: TODO callable_name: Fully-qualified name of the function/method to call, or None if unavailable (examples: 'builtins.open', 'typing.Mapping.get') object_type: If callable_name refers to a method, the type of the object on which the method is being called """ arg_messages = arg_messages or self.msg callee = get_proper_type(callee) if isinstance(callee, CallableType): return self.check_callable_call(callee, args, arg_kinds, context, arg_names, callable_node, arg_messages, callable_name, object_type) elif isinstance(callee, Overloaded): return self.check_overload_call(callee, args, arg_kinds, arg_names, callable_name, object_type, context, arg_messages) elif isinstance(callee, AnyType) or not self.chk.in_checked_function(): return self.check_any_type_call(args, callee) elif isinstance(callee, UnionType): return self.check_union_call(callee, args, arg_kinds, arg_names, context, arg_messages) elif isinstance(callee, Instance): call_function = analyze_member_access('__call__', callee, context, is_lvalue=False, is_super=False, is_operator=True, msg=self.msg, original_type=callee, chk=self.chk, in_literal_context=self.is_literal_context()) callable_name = callee.type.fullname + ".__call__" # Apply method signature hook, if one exists call_function = self.transform_callee_type( callable_name, call_function, args, arg_kinds, context, arg_names, callee) return self.check_call(call_function, args, arg_kinds, context, arg_names, callable_node, arg_messages, callable_name, callee) elif isinstance(callee, TypeVarType): return self.check_call(callee.upper_bound, args, arg_kinds, context, arg_names, callable_node, arg_messages) elif isinstance(callee, TypeType): # Pass the original Type[] as context since that's where errors should go. item = self.analyze_type_type_callee(callee.item, callee) return self.check_call(item, args, arg_kinds, context, arg_names, callable_node, arg_messages) elif isinstance(callee, TupleType): return self.check_call(tuple_fallback(callee), args, arg_kinds, context, arg_names, callable_node, arg_messages, callable_name, object_type) else: return self.msg.not_callable(callee, context), AnyType(TypeOfAny.from_error) def check_callable_call(self, callee: CallableType, args: List[Expression], arg_kinds: List[int], context: Context, arg_names: Optional[Sequence[Optional[str]]], callable_node: Optional[Expression], arg_messages: MessageBuilder, callable_name: Optional[str], object_type: Optional[Type]) -> Tuple[Type, Type]: """Type check a call that targets a callable value. See the docstring of check_call for more information. """ if callable_name is None and callee.name: callable_name = callee.name ret_type = get_proper_type(callee.ret_type) if callee.is_type_obj() and isinstance(ret_type, Instance): callable_name = ret_type.type.fullname if (isinstance(callable_node, RefExpr) and callable_node.fullname in ('enum.Enum', 'enum.IntEnum', 'enum.Flag', 'enum.IntFlag')): # An Enum() call that failed SemanticAnalyzerPass2.check_enum_call(). return callee.ret_type, callee if (callee.is_type_obj() and callee.type_object().is_abstract # Exception for Type[...] and not callee.from_type_type and not callee.type_object().fallback_to_any): type = callee.type_object() self.msg.cannot_instantiate_abstract_class( callee.type_object().name, type.abstract_attributes, context) elif (callee.is_type_obj() and callee.type_object().is_protocol # Exception for Type[...] and not callee.from_type_type): self.chk.fail(message_registry.CANNOT_INSTANTIATE_PROTOCOL .format(callee.type_object().name), context) formal_to_actual = map_actuals_to_formals( arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: self.accept(args[i])) if callee.is_generic(): callee = freshen_function_type_vars(callee) callee = self.infer_function_type_arguments_using_context( callee, context) callee = self.infer_function_type_arguments( callee, args, arg_kinds, formal_to_actual, context) arg_types = self.infer_arg_types_in_context( callee, args, arg_kinds, formal_to_actual) self.check_argument_count(callee, arg_types, arg_kinds, arg_names, formal_to_actual, context, self.msg) self.check_argument_types(arg_types, arg_kinds, args, callee, formal_to_actual, context, messages=arg_messages) if (callee.is_type_obj() and (len(arg_types) == 1) and is_equivalent(callee.ret_type, self.named_type('builtins.type'))): callee = callee.copy_modified(ret_type=TypeType.make_normalized(arg_types[0])) if callable_node: # Store the inferred callable type. self.chk.store_type(callable_node, callee) if (callable_name and ((object_type is None and self.plugin.get_function_hook(callable_name)) or (object_type is not None and self.plugin.get_method_hook(callable_name)))): new_ret_type = self.apply_function_plugin( callee, arg_kinds, arg_types, arg_names, formal_to_actual, args, callable_name, object_type, context) callee = callee.copy_modified(ret_type=new_ret_type) return callee.ret_type, callee def analyze_type_type_callee(self, item: ProperType, context: Context) -> Type: """Analyze the callee X in X(...) where X is Type[item]. Return a Y that we can pass to check_call(Y, ...). """ if isinstance(item, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=item) if isinstance(item, Instance): res = type_object_type(item.type, self.named_type) if isinstance(res, CallableType): res = res.copy_modified(from_type_type=True) expanded = get_proper_type(expand_type_by_instance(res, item)) if isinstance(expanded, CallableType): # Callee of the form Type[...] should never be generic, only # proper class objects can be. expanded = expanded.copy_modified(variables=[]) return expanded if isinstance(item, UnionType): return UnionType([self.analyze_type_type_callee(get_proper_type(tp), context) for tp in item.relevant_items()], item.line) if isinstance(item, TypeVarType): # Pretend we're calling the typevar's upper bound, # i.e. its constructor (a poor approximation for reality, # but better than AnyType...), but replace the return type # with typevar. callee = self.analyze_type_type_callee(get_proper_type(item.upper_bound), context) callee = get_proper_type(callee) if isinstance(callee, CallableType): callee = callee.copy_modified(ret_type=item) elif isinstance(callee, Overloaded): callee = Overloaded([c.copy_modified(ret_type=item) for c in callee.items()]) return callee # We support Type of namedtuples but not of tuples in general if (isinstance(item, TupleType) and tuple_fallback(item).type.fullname != 'builtins.tuple'): return self.analyze_type_type_callee(tuple_fallback(item), context) self.msg.unsupported_type_type(item, context) return AnyType(TypeOfAny.from_error) def infer_arg_types_in_empty_context(self, args: List[Expression]) -> List[Type]: """Infer argument expression types in an empty context. In short, we basically recurse on each argument without considering in what context the argument was called. """ res = [] # type: List[Type] for arg in args: arg_type = self.accept(arg) if has_erased_component(arg_type): res.append(NoneType()) else: res.append(arg_type) return res def infer_arg_types_in_context( self, callee: CallableType, args: List[Expression], arg_kinds: List[int], formal_to_actual: List[List[int]]) -> List[Type]: """Infer argument expression types using a callable type as context. For example, if callee argument 2 has type List[int], infer the argument expression with List[int] type context. Returns the inferred types of *actual arguments*. """ res = [None] * len(args) # type: List[Optional[Type]] for i, actuals in enumerate(formal_to_actual): for ai in actuals: if arg_kinds[ai] not in (nodes.ARG_STAR, nodes.ARG_STAR2): res[ai] = self.accept(args[ai], callee.arg_types[i]) # Fill in the rest of the argument types. for i, t in enumerate(res): if not t: res[i] = self.accept(args[i]) assert all(tp is not None for tp in res) return cast(List[Type], res) def infer_function_type_arguments_using_context( self, callable: CallableType, error_context: Context) -> CallableType: """Unify callable return type to type context to infer type vars. For example, if the return type is set[t] where 't' is a type variable of callable, and if the context is set[int], return callable modified by substituting 't' with 'int'. """ ctx = self.type_context[-1] if not ctx: return callable # The return type may have references to type metavariables that # we are inferring right now. We must consider them as indeterminate # and they are not potential results; thus we replace them with the # special ErasedType type. On the other hand, class type variables are # valid results. erased_ctx = replace_meta_vars(ctx, ErasedType()) ret_type = callable.ret_type if is_optional(ret_type) and is_optional(ctx): # If both the context and the return type are optional, unwrap the optional, # since in 99% cases this is what a user expects. In other words, we replace # Optional[T] <: Optional[int] # with # T <: int # while the former would infer T <: Optional[int]. ret_type = remove_optional(ret_type) erased_ctx = remove_optional(erased_ctx) # # TODO: Instead of this hack and the one below, we need to use outer and # inner contexts at the same time. This is however not easy because of two # reasons: # * We need to support constraints like [1 <: 2, 2 <: X], i.e. with variables # on both sides. (This is not too hard.) # * We need to update all the inference "infrastructure", so that all # variables in an expression are inferred at the same time. # (And this is hard, also we need to be careful with lambdas that require # two passes.) if isinstance(ret_type, TypeVarType): # Another special case: the return type is a type variable. If it's unrestricted, # we could infer a too general type for the type variable if we use context, # and this could result in confusing and spurious type errors elsewhere. # # So we give up and just use function arguments for type inference, with just two # exceptions: # # 1. If the context is a generic instance type, actually use it as context, as # this *seems* to usually be the reasonable thing to do. # # See also github issues #462 and #360. # # 2. If the context is some literal type, we want to "propagate" that information # down so that we infer a more precise type for literal expressions. For example, # the expression `3` normally has an inferred type of `builtins.int`: but if it's # in a literal context like below, we want it to infer `Literal[3]` instead. # # def expects_literal(x: Literal[3]) -> None: pass # def identity(x: T) -> T: return x # # expects_literal(identity(3)) # Should type-check if not is_generic_instance(ctx) and not is_literal_type_like(ctx): return callable.copy_modified() args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx) # Only substitute non-Uninhabited and non-erased types. new_args = [] # type: List[Optional[Type]] for arg in args: if has_uninhabited_component(arg) or has_erased_component(arg): new_args.append(None) else: new_args.append(arg) # Don't show errors after we have only used the outer context for inference. # We will use argument context to infer more variables. return self.apply_generic_arguments(callable, new_args, error_context, skip_unsatisfied=True) def infer_function_type_arguments(self, callee_type: CallableType, args: List[Expression], arg_kinds: List[int], formal_to_actual: List[List[int]], context: Context) -> CallableType: """Infer the type arguments for a generic callee type. Infer based on the types of arguments. Return a derived callable type that has the arguments applied. """ if self.chk.in_checked_function(): # Disable type errors during type inference. There may be errors # due to partial available context information at this time, but # these errors can be safely ignored as the arguments will be # inferred again later. self.msg.disable_errors() arg_types = self.infer_arg_types_in_context( callee_type, args, arg_kinds, formal_to_actual) self.msg.enable_errors() arg_pass_nums = self.get_arg_infer_passes( callee_type.arg_types, formal_to_actual, len(args)) pass1_args = [] # type: List[Optional[Type]] for i, arg in enumerate(arg_types): if arg_pass_nums[i] > 1: pass1_args.append(None) else: pass1_args.append(arg) inferred_args = infer_function_type_arguments( callee_type, pass1_args, arg_kinds, formal_to_actual, strict=self.chk.in_checked_function()) if 2 in arg_pass_nums: # Second pass of type inference. (callee_type, inferred_args) = self.infer_function_type_arguments_pass2( callee_type, args, arg_kinds, formal_to_actual, inferred_args, context) if callee_type.special_sig == 'dict' and len(inferred_args) == 2 and ( ARG_NAMED in arg_kinds or ARG_STAR2 in arg_kinds): # HACK: Infer str key type for dict(...) with keyword args. The type system # can't represent this so we special case it, as this is a pretty common # thing. This doesn't quite work with all possible subclasses of dict # if they shuffle type variables around, as we assume that there is a 1-1 # correspondence with dict type variables. This is a marginal issue and # a little tricky to fix so it's left unfixed for now. first_arg = get_proper_type(inferred_args[0]) if isinstance(first_arg, (NoneType, UninhabitedType)): inferred_args[0] = self.named_type('builtins.str') elif not first_arg or not is_subtype(self.named_type('builtins.str'), first_arg): self.msg.fail(message_registry.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE, context) else: # In dynamically typed functions use implicit 'Any' types for # type variables. inferred_args = [AnyType(TypeOfAny.unannotated)] * len(callee_type.variables) return self.apply_inferred_arguments(callee_type, inferred_args, context) def infer_function_type_arguments_pass2( self, callee_type: CallableType, args: List[Expression], arg_kinds: List[int], formal_to_actual: List[List[int]], old_inferred_args: Sequence[Optional[Type]], context: Context) -> Tuple[CallableType, List[Optional[Type]]]: """Perform second pass of generic function type argument inference. The second pass is needed for arguments with types such as Callable[[T], S], where both T and S are type variables, when the actual argument is a lambda with inferred types. The idea is to infer the type variable T in the first pass (based on the types of other arguments). This lets us infer the argument and return type of the lambda expression and thus also the type variable S in this second pass. Return (the callee with type vars applied, inferred actual arg types). """ # None or erased types in inferred types mean that there was not enough # information to infer the argument. Replace them with None values so # that they are not applied yet below. inferred_args = list(old_inferred_args) for i, arg in enumerate(get_proper_types(inferred_args)): if isinstance(arg, (NoneType, UninhabitedType)) or has_erased_component(arg): inferred_args[i] = None callee_type = self.apply_generic_arguments(callee_type, inferred_args, context) arg_types = self.infer_arg_types_in_context( callee_type, args, arg_kinds, formal_to_actual) inferred_args = infer_function_type_arguments( callee_type, arg_types, arg_kinds, formal_to_actual) return callee_type, inferred_args def get_arg_infer_passes(self, arg_types: List[Type], formal_to_actual: List[List[int]], num_actuals: int) -> List[int]: """Return pass numbers for args for two-pass argument type inference. For each actual, the pass number is either 1 (first pass) or 2 (second pass). Two-pass argument type inference primarily lets us infer types of lambdas more effectively. """ res = [1] * num_actuals for i, arg in enumerate(arg_types): if arg.accept(ArgInferSecondPassQuery()): for j in formal_to_actual[i]: res[j] = 2 return res def apply_inferred_arguments(self, callee_type: CallableType, inferred_args: Sequence[Optional[Type]], context: Context) -> CallableType: """Apply inferred values of type arguments to a generic function. Inferred_args contains the values of function type arguments. """ # Report error if some of the variables could not be solved. In that # case assume that all variables have type Any to avoid extra # bogus error messages. for i, inferred_type in enumerate(inferred_args): if not inferred_type or has_erased_component(inferred_type): # Could not infer a non-trivial type for a type variable. self.msg.could_not_infer_type_arguments( callee_type, i + 1, context) inferred_args = [AnyType(TypeOfAny.from_error)] * len(inferred_args) # Apply the inferred types to the function type. In this case the # return type must be CallableType, since we give the right number of type # arguments. return self.apply_generic_arguments(callee_type, inferred_args, context) def check_argument_count(self, callee: CallableType, actual_types: List[Type], actual_kinds: List[int], actual_names: Optional[Sequence[Optional[str]]], formal_to_actual: List[List[int]], context: Optional[Context], messages: Optional[MessageBuilder]) -> bool: """Check that there is a value for all required arguments to a function. Also check that there are no duplicate values for arguments. Report found errors using 'messages' if it's not None. If 'messages' is given, 'context' must also be given. Return False if there were any errors. Otherwise return True """ if messages: assert context, "Internal error: messages given without context" elif context is None: # Avoid "is None" checks context = TempNode(AnyType(TypeOfAny.special_form)) # TODO(jukka): We could return as soon as we find an error if messages is None. # Collect list of all actual arguments matched to formal arguments. all_actuals = [] # type: List[int] for actuals in formal_to_actual: all_actuals.extend(actuals) ok, is_unexpected_arg_error = self.check_for_extra_actual_arguments( callee, actual_types, actual_kinds, actual_names, all_actuals, context, messages) # Check for too many or few values for formals. for i, kind in enumerate(callee.arg_kinds): if kind == nodes.ARG_POS and (not formal_to_actual[i] and not is_unexpected_arg_error): # No actual for a mandatory positional formal. if messages: messages.too_few_arguments(callee, context, actual_names) ok = False elif kind == nodes.ARG_NAMED and (not formal_to_actual[i] and not is_unexpected_arg_error): # No actual for a mandatory named formal if messages: argname = callee.arg_names[i] or "?" messages.missing_named_argument(callee, context, argname) ok = False elif kind in [nodes.ARG_POS, nodes.ARG_OPT, nodes.ARG_NAMED, nodes.ARG_NAMED_OPT] and is_duplicate_mapping( formal_to_actual[i], actual_kinds): if (self.chk.in_checked_function() or isinstance(get_proper_type(actual_types[formal_to_actual[i][0]]), TupleType)): if messages: messages.duplicate_argument_value(callee, i, context) ok = False elif (kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT) and formal_to_actual[i] and actual_kinds[formal_to_actual[i][0]] not in [nodes.ARG_NAMED, nodes.ARG_STAR2]): # Positional argument when expecting a keyword argument. if messages: messages.too_many_positional_arguments(callee, context) ok = False return ok def check_for_extra_actual_arguments(self, callee: CallableType, actual_types: List[Type], actual_kinds: List[int], actual_names: Optional[Sequence[Optional[str]]], all_actuals: List[int], context: Context, messages: Optional[MessageBuilder]) -> Tuple[bool, bool]: """Check for extra actual arguments. Return tuple (was everything ok, was there an extra keyword argument error [used to avoid duplicate errors]). """ is_unexpected_arg_error = False # Keep track of errors to avoid duplicate errors ok = True # False if we've found any error for i, kind in enumerate(actual_kinds): if i not in all_actuals and ( kind != nodes.ARG_STAR or # We accept the other iterables than tuple (including Any) # as star arguments because they could be empty, resulting no arguments. is_non_empty_tuple(actual_types[i])): # Extra actual: not matched by a formal argument. ok = False if kind != nodes.ARG_NAMED: if messages: messages.too_many_arguments(callee, context) else: if messages: assert actual_names, "Internal error: named kinds without names given" act_name = actual_names[i] assert act_name is not None act_type = actual_types[i] messages.unexpected_keyword_argument(callee, act_name, act_type, context) is_unexpected_arg_error = True elif ((kind == nodes.ARG_STAR and nodes.ARG_STAR not in callee.arg_kinds) or kind == nodes.ARG_STAR2): actual_type = get_proper_type(actual_types[i]) if isinstance(actual_type, (TupleType, TypedDictType)): if all_actuals.count(i) < len(actual_type.items): # Too many tuple/dict items as some did not match. if messages: if (kind != nodes.ARG_STAR2 or not isinstance(actual_type, TypedDictType)): messages.too_many_arguments(callee, context) else: messages.too_many_arguments_from_typed_dict(callee, actual_type, context) is_unexpected_arg_error = True ok = False # *args/**kwargs can be applied even if the function takes a fixed # number of positional arguments. This may succeed at runtime. return ok, is_unexpected_arg_error def check_argument_types(self, arg_types: List[Type], arg_kinds: List[int], args: List[Expression], callee: CallableType, formal_to_actual: List[List[int]], context: Context, messages: Optional[MessageBuilder] = None, check_arg: Optional[ArgChecker] = None) -> None: """Check argument types against a callable type. Report errors if the argument types are not compatible. """ messages = messages or self.msg check_arg = check_arg or self.check_arg # Keep track of consumed tuple *arg items. mapper = ArgTypeExpander() for i, actuals in enumerate(formal_to_actual): for actual in actuals: actual_type = arg_types[actual] if actual_type is None: continue # Some kind of error was already reported. actual_kind = arg_kinds[actual] # Check that a *arg is valid as varargs. if (actual_kind == nodes.ARG_STAR and not self.is_valid_var_arg(actual_type)): messages.invalid_var_arg(actual_type, context) if (actual_kind == nodes.ARG_STAR2 and not self.is_valid_keyword_var_arg(actual_type)): is_mapping = is_subtype(actual_type, self.chk.named_type('typing.Mapping')) messages.invalid_keyword_var_arg(actual_type, is_mapping, context) expanded_actual = mapper.expand_actual_type( actual_type, actual_kind, callee.arg_names[i], callee.arg_kinds[i]) check_arg(expanded_actual, actual_type, arg_kinds[actual], callee.arg_types[i], actual + 1, i + 1, callee, args[actual], context, messages) def check_arg(self, caller_type: Type, original_caller_type: Type, caller_kind: int, callee_type: Type, n: int, m: int, callee: CallableType, context: Context, outer_context: Context, messages: MessageBuilder) -> None: """Check the type of a single argument in a call.""" caller_type = get_proper_type(caller_type) original_caller_type = get_proper_type(original_caller_type) callee_type = get_proper_type(callee_type) if isinstance(caller_type, DeletedType): messages.deleted_as_rvalue(caller_type, context) # Only non-abstract non-protocol class can be given where Type[...] is expected... elif (isinstance(caller_type, CallableType) and isinstance(callee_type, TypeType) and caller_type.is_type_obj() and (caller_type.type_object().is_abstract or caller_type.type_object().is_protocol) and isinstance(callee_type.item, Instance) and (callee_type.item.type.is_abstract or callee_type.item.type.is_protocol)): self.msg.concrete_only_call(callee_type, context) elif not is_subtype(caller_type, callee_type): if self.chk.should_suppress_optional_error([caller_type, callee_type]): return code = messages.incompatible_argument(n, m, callee, original_caller_type, caller_kind, context=context, outer_context=outer_context) messages.incompatible_argument_note(original_caller_type, callee_type, context, code=code) def check_overload_call(self, callee: Overloaded, args: List[Expression], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], callable_name: Optional[str], object_type: Optional[Type], context: Context, arg_messages: MessageBuilder) -> Tuple[Type, Type]: """Checks a call to an overloaded function.""" arg_types = self.infer_arg_types_in_empty_context(args) # Step 1: Filter call targets to remove ones where the argument counts don't match plausible_targets = self.plausible_overload_call_targets(arg_types, arg_kinds, arg_names, callee) # Step 2: If the arguments contain a union, we try performing union math first, # instead of picking the first matching overload. # This is because picking the first overload often ends up being too greedy: # for example, when we have a fallback alternative that accepts an unrestricted # typevar. See https://github.com/python/mypy/issues/4063 for related discussion. erased_targets = None # type: Optional[List[CallableType]] unioned_result = None # type: Optional[Tuple[Type, Type]] union_interrupted = False # did we try all union combinations? if any(self.real_union(arg) for arg in arg_types): unioned_errors = arg_messages.clean_copy() try: unioned_return = self.union_overload_result(plausible_targets, args, arg_types, arg_kinds, arg_names, callable_name, object_type, context, arg_messages=unioned_errors) except TooManyUnions: union_interrupted = True else: # Record if we succeeded. Next we need to see if maybe normal procedure # gives a narrower type. if unioned_return: returns, inferred_types = zip(*unioned_return) # Note that we use `combine_function_signatures` instead of just returning # a union of inferred callables because for example a call # Union[int -> int, str -> str](Union[int, str]) is invalid and # we don't want to introduce internal inconsistencies. unioned_result = (make_simplified_union(list(returns), context.line, context.column), self.combine_function_signatures(inferred_types)) # Step 3: We try checking each branch one-by-one. inferred_result = self.infer_overload_return_type(plausible_targets, args, arg_types, arg_kinds, arg_names, callable_name, object_type, context, arg_messages) # If any of checks succeed, stop early. if inferred_result is not None and unioned_result is not None: # Both unioned and direct checks succeeded, choose the more precise type. if (is_subtype(inferred_result[0], unioned_result[0]) and not isinstance(get_proper_type(inferred_result[0]), AnyType)): return inferred_result return unioned_result elif unioned_result is not None: return unioned_result elif inferred_result is not None: return inferred_result # Step 4: Failure. At this point, we know there is no match. We fall back to trying # to find a somewhat plausible overload target using the erased types # so we can produce a nice error message. # # For example, suppose the user passes a value of type 'List[str]' into an # overload with signatures f(x: int) -> int and f(x: List[int]) -> List[int]. # # Neither alternative matches, but we can guess the user probably wants the # second one. erased_targets = self.overload_erased_call_targets(plausible_targets, arg_types, arg_kinds, arg_names, args, context) # Step 5: We try and infer a second-best alternative if possible. If not, fall back # to using 'Any'. if len(erased_targets) > 0: # Pick the first plausible erased target as the fallback # TODO: Adjust the error message here to make it clear there was no match. # In order to do this, we need to find a clean way of associating # a note with whatever error message 'self.check_call' will generate. # In particular, the note's line and column numbers need to be the same # as the error's. target = erased_targets[0] # type: Type else: # There was no plausible match: give up target = AnyType(TypeOfAny.from_error) if not self.chk.should_suppress_optional_error(arg_types): if not is_operator_method(callable_name): code = None else: code = codes.OPERATOR arg_messages.no_variant_matches_arguments( plausible_targets, callee, arg_types, context, code=code) result = self.check_call(target, args, arg_kinds, context, arg_names, arg_messages=arg_messages, callable_name=callable_name, object_type=object_type) if union_interrupted: self.chk.fail("Not all union combinations were tried" " because there are too many unions", context) return result def plausible_overload_call_targets(self, arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], overload: Overloaded) -> List[CallableType]: """Returns all overload call targets that having matching argument counts. If the given args contains a star-arg (*arg or **kwarg argument), this method will ensure all star-arg overloads appear at the start of the list, instead of their usual location. The only exception is if the starred argument is something like a Tuple or a NamedTuple, which has a definitive "shape". If so, we don't move the corresponding alternative to the front since we can infer a more precise match using the original order.""" def has_shape(typ: Type) -> bool: typ = get_proper_type(typ) return (isinstance(typ, TupleType) or isinstance(typ, TypedDictType) or (isinstance(typ, Instance) and typ.type.is_named_tuple)) matches = [] # type: List[CallableType] star_matches = [] # type: List[CallableType] args_have_var_arg = False args_have_kw_arg = False for kind, typ in zip(arg_kinds, arg_types): if kind == ARG_STAR and not has_shape(typ): args_have_var_arg = True if kind == ARG_STAR2 and not has_shape(typ): args_have_kw_arg = True for typ in overload.items(): formal_to_actual = map_actuals_to_formals(arg_kinds, arg_names, typ.arg_kinds, typ.arg_names, lambda i: arg_types[i]) if self.check_argument_count(typ, arg_types, arg_kinds, arg_names, formal_to_actual, None, None): if args_have_var_arg and typ.is_var_arg: star_matches.append(typ) elif args_have_kw_arg and typ.is_kw_arg: star_matches.append(typ) else: matches.append(typ) return star_matches + matches def infer_overload_return_type(self, plausible_targets: List[CallableType], args: List[Expression], arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], callable_name: Optional[str], object_type: Optional[Type], context: Context, arg_messages: Optional[MessageBuilder] = None, ) -> Optional[Tuple[Type, Type]]: """Attempts to find the first matching callable from the given list. If a match is found, returns a tuple containing the result type and the inferred callee type. (This tuple is meant to be eventually returned by check_call.) If multiple targets match due to ambiguous Any parameters, returns (AnyType, AnyType). If no targets match, returns None. Assumes all of the given targets have argument counts compatible with the caller. """ arg_messages = self.msg if arg_messages is None else arg_messages matches = [] # type: List[CallableType] return_types = [] # type: List[Type] inferred_types = [] # type: List[Type] args_contain_any = any(map(has_any_type, arg_types)) for typ in plausible_targets: overload_messages = self.msg.clean_copy() prev_messages = self.msg assert self.msg is self.chk.msg self.msg = overload_messages self.chk.msg = overload_messages try: # Passing `overload_messages` as the `arg_messages` parameter doesn't # seem to reliably catch all possible errors. # TODO: Figure out why ret_type, infer_type = self.check_call( callee=typ, args=args, arg_kinds=arg_kinds, arg_names=arg_names, context=context, arg_messages=overload_messages, callable_name=callable_name, object_type=object_type) finally: self.chk.msg = prev_messages self.msg = prev_messages is_match = not overload_messages.is_errors() if is_match: # Return early if possible; otherwise record info so we can # check for ambiguity due to 'Any' below. if not args_contain_any: return ret_type, infer_type matches.append(typ) return_types.append(ret_type) inferred_types.append(infer_type) if len(matches) == 0: # No match was found return None elif any_causes_overload_ambiguity(matches, return_types, arg_types, arg_kinds, arg_names): # An argument of type or containing the type 'Any' caused ambiguity. # We try returning a precise type if we can. If not, we give up and just return 'Any'. if all_same_types(return_types): return return_types[0], inferred_types[0] elif all_same_types([erase_type(typ) for typ in return_types]): return erase_type(return_types[0]), erase_type(inferred_types[0]) else: return self.check_call(callee=AnyType(TypeOfAny.special_form), args=args, arg_kinds=arg_kinds, arg_names=arg_names, context=context, arg_messages=arg_messages, callable_name=callable_name, object_type=object_type) else: # Success! No ambiguity; return the first match. return return_types[0], inferred_types[0] def overload_erased_call_targets(self, plausible_targets: List[CallableType], arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], args: List[Expression], context: Context) -> List[CallableType]: """Returns a list of all targets that match the caller after erasing types. Assumes all of the given targets have argument counts compatible with the caller. """ matches = [] # type: List[CallableType] for typ in plausible_targets: if self.erased_signature_similarity(arg_types, arg_kinds, arg_names, args, typ, context): matches.append(typ) return matches def union_overload_result(self, plausible_targets: List[CallableType], args: List[Expression], arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], callable_name: Optional[str], object_type: Optional[Type], context: Context, arg_messages: Optional[MessageBuilder] = None, level: int = 0 ) -> Optional[List[Tuple[Type, Type]]]: """Accepts a list of overload signatures and attempts to match calls by destructuring the first union. Return a list of (, ) if call succeeds for every item of the desctructured union. Returns None if there is no match. """ # Step 1: If we are already too deep, then stop immediately. Otherwise mypy might # hang for long time because of a weird overload call. The caller will get # the exception and generate an appropriate note message, if needed. if level >= MAX_UNIONS: raise TooManyUnions # Step 2: Find position of the first union in arguments. Return the normal inferred # type if no more unions left. for idx, typ in enumerate(arg_types): if self.real_union(typ): break else: # No unions in args, just fall back to normal inference with self.type_overrides_set(args, arg_types): res = self.infer_overload_return_type(plausible_targets, args, arg_types, arg_kinds, arg_names, callable_name, object_type, context, arg_messages) if res is not None: return [res] return None # Step 3: Try a direct match before splitting to avoid unnecessary union splits # and save performance. with self.type_overrides_set(args, arg_types): direct = self.infer_overload_return_type(plausible_targets, args, arg_types, arg_kinds, arg_names, callable_name, object_type, context, arg_messages) if direct is not None and not isinstance(get_proper_type(direct[0]), (UnionType, AnyType)): # We only return non-unions soon, to avoid greedy match. return [direct] # Step 4: Split the first remaining union type in arguments into items and # try to match each item individually (recursive). first_union = get_proper_type(arg_types[idx]) assert isinstance(first_union, UnionType) res_items = [] for item in first_union.relevant_items(): new_arg_types = arg_types.copy() new_arg_types[idx] = item sub_result = self.union_overload_result(plausible_targets, args, new_arg_types, arg_kinds, arg_names, callable_name, object_type, context, arg_messages, level + 1) if sub_result is not None: res_items.extend(sub_result) else: # Some item doesn't match, return soon. return None # Step 5: If splitting succeeded, then filter out duplicate items before returning. seen = set() # type: Set[Tuple[Type, Type]] result = [] for pair in res_items: if pair not in seen: seen.add(pair) result.append(pair) return result def real_union(self, typ: Type) -> bool: typ = get_proper_type(typ) return isinstance(typ, UnionType) and len(typ.relevant_items()) > 1 @contextmanager def type_overrides_set(self, exprs: Sequence[Expression], overrides: Sequence[Type]) -> Iterator[None]: """Set _temporary_ type overrides for given expressions.""" assert len(exprs) == len(overrides) for expr, typ in zip(exprs, overrides): self.type_overrides[expr] = typ try: yield finally: for expr in exprs: del self.type_overrides[expr] def combine_function_signatures(self, types: Sequence[Type]) -> Union[AnyType, CallableType]: """Accepts a list of function signatures and attempts to combine them together into a new CallableType consisting of the union of all of the given arguments and return types. If there is at least one non-callable type, return Any (this can happen if there is an ambiguity because of Any in arguments). """ assert types, "Trying to merge no callables" types = get_proper_types(types) if not all(isinstance(c, CallableType) for c in types): return AnyType(TypeOfAny.special_form) callables = cast(Sequence[CallableType], types) if len(callables) == 1: return callables[0] # Note: we are assuming here that if a user uses some TypeVar 'T' in # two different functions, they meant for that TypeVar to mean the # same thing. # # This function will make sure that all instances of that TypeVar 'T' # refer to the same underlying TypeVarType and TypeVarDef objects to # simplify the union-ing logic below. # # (If the user did *not* mean for 'T' to be consistently bound to the # same type in their overloads, well, their code is probably too # confusing and ought to be re-written anyways.) callables, variables = merge_typevars_in_callables_by_name(callables) new_args = [[] for _ in range(len(callables[0].arg_types))] # type: List[List[Type]] new_kinds = list(callables[0].arg_kinds) new_returns = [] # type: List[Type] too_complex = False for target in callables: # We fall back to Callable[..., Union[]] if the functions do not have # the exact same signature. The only exception is if one arg is optional and # the other is positional: in that case, we continue unioning (and expect a # positional arg). # TODO: Enhance the merging logic to handle a wider variety of signatures. if len(new_kinds) != len(target.arg_kinds): too_complex = True break for i, (new_kind, target_kind) in enumerate(zip(new_kinds, target.arg_kinds)): if new_kind == target_kind: continue elif new_kind in (ARG_POS, ARG_OPT) and target_kind in (ARG_POS, ARG_OPT): new_kinds[i] = ARG_POS else: too_complex = True break if too_complex: break # outer loop for i, arg in enumerate(target.arg_types): new_args[i].append(arg) new_returns.append(target.ret_type) union_return = make_simplified_union(new_returns) if too_complex: any = AnyType(TypeOfAny.special_form) return callables[0].copy_modified( arg_types=[any, any], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=[None, None], ret_type=union_return, variables=variables, implicit=True) final_args = [] for args_list in new_args: new_type = make_simplified_union(args_list) final_args.append(new_type) return callables[0].copy_modified( arg_types=final_args, arg_kinds=new_kinds, ret_type=union_return, variables=variables, implicit=True) def erased_signature_similarity(self, arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], args: List[Expression], callee: CallableType, context: Context) -> bool: """Determine whether arguments could match the signature at runtime, after erasing types.""" formal_to_actual = map_actuals_to_formals(arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: arg_types[i]) if not self.check_argument_count(callee, arg_types, arg_kinds, arg_names, formal_to_actual, None, None): # Too few or many arguments -> no match. return False def check_arg(caller_type: Type, original_ccaller_type: Type, caller_kind: int, callee_type: Type, n: int, m: int, callee: CallableType, context: Context, outer_context: Context, messages: MessageBuilder) -> None: if not arg_approximate_similarity(caller_type, callee_type): # No match -- exit early since none of the remaining work can change # the result. raise Finished try: self.check_argument_types(arg_types, arg_kinds, args, callee, formal_to_actual, context=context, check_arg=check_arg) return True except Finished: return False def apply_generic_arguments(self, callable: CallableType, types: Sequence[Optional[Type]], context: Context, skip_unsatisfied: bool = False) -> CallableType: """Simple wrapper around mypy.applytype.apply_generic_arguments.""" return applytype.apply_generic_arguments(callable, types, self.msg.incompatible_typevar_value, context, skip_unsatisfied=skip_unsatisfied) def check_any_type_call(self, args: List[Expression], callee: Type) -> Tuple[Type, Type]: self.infer_arg_types_in_empty_context(args) callee = get_proper_type(callee) if isinstance(callee, AnyType): return (AnyType(TypeOfAny.from_another_any, source_any=callee), AnyType(TypeOfAny.from_another_any, source_any=callee)) else: return AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form) def check_union_call(self, callee: UnionType, args: List[Expression], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], context: Context, arg_messages: MessageBuilder) -> Tuple[Type, Type]: self.msg.disable_type_names += 1 results = [self.check_call(subtype, args, arg_kinds, context, arg_names, arg_messages=arg_messages) for subtype in callee.relevant_items()] self.msg.disable_type_names -= 1 return (make_simplified_union([res[0] for res in results]), callee) def visit_member_expr(self, e: MemberExpr, is_lvalue: bool = False) -> Type: """Visit member expression (of form e.id).""" self.chk.module_refs.update(extract_refexpr_names(e)) result = self.analyze_ordinary_member_access(e, is_lvalue) return self.narrow_type_from_binder(e, result) def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type: """Analyse member expression or member lvalue.""" if e.kind is not None: # This is a reference to a module attribute. return self.analyze_ref_expr(e) else: # This is a reference to a non-module attribute. original_type = self.accept(e.expr) base = e.expr module_symbol_table = None if isinstance(base, RefExpr) and isinstance(base.node, MypyFile): module_symbol_table = base.node.names member_type = analyze_member_access( e.name, original_type, e, is_lvalue, False, False, self.msg, original_type=original_type, chk=self.chk, in_literal_context=self.is_literal_context(), module_symbol_table=module_symbol_table) return member_type def analyze_external_member_access(self, member: str, base_type: Type, context: Context) -> Type: """Analyse member access that is external, i.e. it cannot refer to private definitions. Return the result type. """ # TODO remove; no private definitions in mypy return analyze_member_access(member, base_type, context, False, False, False, self.msg, original_type=base_type, chk=self.chk, in_literal_context=self.is_literal_context()) def is_literal_context(self) -> bool: return is_literal_type_like(self.type_context[-1]) def infer_literal_expr_type(self, value: LiteralValue, fallback_name: str) -> Type: """Analyzes the given literal expression and determines if we should be inferring an Instance type, a Literal[...] type, or an Instance that remembers the original literal. We... 1. ...Infer a normal Instance in most circumstances. 2. ...Infer a Literal[...] if we're in a literal context. For example, if we were analyzing the "3" in "foo(3)" where "foo" has a signature of "def foo(Literal[3]) -> None", we'd want to infer that the "3" has a type of Literal[3] instead of Instance. 3. ...Infer an Instance that remembers the original Literal if we're declaring a Final variable with an inferred type -- for example, "bar" in "bar: Final = 3" would be assigned an Instance that remembers it originated from a '3'. See the comments in Instance's constructor for more details. """ typ = self.named_type(fallback_name) if self.is_literal_context(): return LiteralType(value=value, fallback=typ) else: return typ.copy_modified(last_known_value=LiteralType( value=value, fallback=typ, line=typ.line, column=typ.column, )) def concat_tuples(self, left: TupleType, right: TupleType) -> TupleType: """Concatenate two fixed length tuples.""" return TupleType(items=left.items + right.items, fallback=self.named_type('builtins.tuple')) def visit_int_expr(self, e: IntExpr) -> Type: """Type check an integer literal (trivial).""" return self.infer_literal_expr_type(e.value, 'builtins.int') def visit_str_expr(self, e: StrExpr) -> Type: """Type check a string literal (trivial).""" return self.infer_literal_expr_type(e.value, 'builtins.str') def visit_bytes_expr(self, e: BytesExpr) -> Type: """Type check a bytes literal (trivial).""" return self.infer_literal_expr_type(e.value, 'builtins.bytes') def visit_unicode_expr(self, e: UnicodeExpr) -> Type: """Type check a unicode literal (trivial).""" return self.infer_literal_expr_type(e.value, 'builtins.unicode') def visit_float_expr(self, e: FloatExpr) -> Type: """Type check a float literal (trivial).""" return self.named_type('builtins.float') def visit_complex_expr(self, e: ComplexExpr) -> Type: """Type check a complex literal.""" return self.named_type('builtins.complex') def visit_ellipsis(self, e: EllipsisExpr) -> Type: """Type check '...'.""" if self.chk.options.python_version[0] >= 3: return self.named_type('builtins.ellipsis') else: # '...' is not valid in normal Python 2 code, but it can # be used in stubs. The parser makes sure that we only # get this far if we are in a stub, and we can safely # return 'object' as ellipsis is special cased elsewhere. # The builtins.ellipsis type does not exist in Python 2. return self.named_type('builtins.object') def visit_op_expr(self, e: OpExpr) -> Type: """Type check a binary operator expression.""" if e.op == 'in': self.accept(e.right) self.accept(e.left) return self.bool_type() if e.op == 'and' or e.op == 'or': return self.check_boolean_op(e, e) if e.op == '*' and isinstance(e.left, ListExpr): # Expressions of form [...] * e get special type inference. return self.check_list_multiply(e) if e.op == '%': pyversion = self.chk.options.python_version if pyversion[0] == 3: if isinstance(e.left, BytesExpr) and pyversion[1] >= 5: return self.strfrm_checker.check_str_interpolation(e.left, e.right) if isinstance(e.left, StrExpr): return self.strfrm_checker.check_str_interpolation(e.left, e.right) elif pyversion[0] <= 2: if isinstance(e.left, (StrExpr, BytesExpr, UnicodeExpr)): return self.strfrm_checker.check_str_interpolation(e.left, e.right) left_type = self.accept(e.left) proper_left_type = get_proper_type(left_type) if isinstance(proper_left_type, TupleType) and e.op == '+': left_add_method = proper_left_type.partial_fallback.type.get('__add__') if left_add_method and left_add_method.fullname == 'builtins.tuple.__add__': proper_right_type = get_proper_type(self.accept(e.right)) if isinstance(proper_right_type, TupleType): right_radd_method = proper_right_type.partial_fallback.type.get('__radd__') if right_radd_method is None: return self.concat_tuples(proper_left_type, proper_right_type) if e.op in nodes.op_methods: method = self.get_operator_method(e.op) result, method_type = self.check_op(method, left_type, e.right, e, allow_reverse=True) e.method_type = method_type return result else: raise RuntimeError('Unknown operator {}'.format(e.op)) def visit_comparison_expr(self, e: ComparisonExpr) -> Type: """Type check a comparison expression. Comparison expressions are type checked consecutive-pair-wise That is, 'a < b > c == d' is check as 'a < b and b > c and c == d' """ result = None # type: Optional[Type] sub_result = None # type: Optional[Type] # Check each consecutive operand pair and their operator for left, right, operator in zip(e.operands, e.operands[1:], e.operators): left_type = self.accept(left) method_type = None # type: Optional[mypy.types.Type] if operator == 'in' or operator == 'not in': right_type = self.accept(right) # always validate the right operand # Keep track of whether we get type check errors (these won't be reported, they # are just to verify whether something is valid typing wise). local_errors = self.msg.copy() local_errors.disable_count = 0 _, method_type = self.check_method_call_by_name( '__contains__', right_type, [left], [ARG_POS], e, local_errors) sub_result = self.bool_type() # Container item type for strict type overlap checks. Note: we need to only # check for nominal type, because a usual "Unsupported operands for in" # will be reported for types incompatible with __contains__(). # See testCustomContainsCheckStrictEquality for an example. cont_type = self.chk.analyze_container_item_type(right_type) if isinstance(right_type, PartialType): # We don't really know if this is an error or not, so just shut up. pass elif (local_errors.is_errors() and # is_valid_var_arg is True for any Iterable self.is_valid_var_arg(right_type)): _, itertype = self.chk.analyze_iterable_item_type(right) method_type = CallableType( [left_type], [nodes.ARG_POS], [None], self.bool_type(), self.named_type('builtins.function')) if not is_subtype(left_type, itertype): self.msg.unsupported_operand_types('in', left_type, right_type, e) # Only show dangerous overlap if there are no other errors. elif (not local_errors.is_errors() and cont_type and self.dangerous_comparison(left_type, cont_type, original_container=right_type)): self.msg.dangerous_comparison(left_type, cont_type, 'container', e) else: self.msg.add_errors(local_errors) elif operator in nodes.op_methods: method = self.get_operator_method(operator) err_count = self.msg.errors.total_errors() sub_result, method_type = self.check_op(method, left_type, right, e, allow_reverse=True) # Only show dangerous overlap if there are no other errors. See # testCustomEqCheckStrictEquality for an example. if self.msg.errors.total_errors() == err_count and operator in ('==', '!='): right_type = self.accept(right) # We suppress the error if there is a custom __eq__() method on either # side. User defined (or even standard library) classes can define this # to return True for comparisons between non-overlapping types. if (not custom_special_method(left_type, '__eq__') and not custom_special_method(right_type, '__eq__')): # Also flag non-overlapping literals in situations like: # x: Literal['a', 'b'] # if x == 'c': # ... left_type = try_getting_literal(left_type) right_type = try_getting_literal(right_type) if self.dangerous_comparison(left_type, right_type): self.msg.dangerous_comparison(left_type, right_type, 'equality', e) elif operator == 'is' or operator == 'is not': right_type = self.accept(right) # validate the right operand sub_result = self.bool_type() left_type = try_getting_literal(left_type) right_type = try_getting_literal(right_type) if self.dangerous_comparison(left_type, right_type): self.msg.dangerous_comparison(left_type, right_type, 'identity', e) method_type = None else: raise RuntimeError('Unknown comparison operator {}'.format(operator)) e.method_types.append(method_type) # Determine type of boolean-and of result and sub_result if result is None: result = sub_result else: result = join.join_types(result, sub_result) assert result is not None return result def dangerous_comparison(self, left: Type, right: Type, original_container: Optional[Type] = None) -> bool: """Check for dangerous non-overlapping comparisons like 42 == 'no'. The original_container is the original container type for 'in' checks (and None for equality checks). Rules: * X and None are overlapping even in strict-optional mode. This is to allow 'assert x is not None' for x defined as 'x = None # type: str' in class body (otherwise mypy itself would have couple dozen errors because of this). * Optional[X] and Optional[Y] are non-overlapping if X and Y are non-overlapping, although technically None is overlap, it is most likely an error. * Any overlaps with everything, i.e. always safe. * Special case: b'abc' in b'cde' is safe. """ if not self.chk.options.strict_equality: return False left, right = get_proper_types((left, right)) if self.chk.binder.is_unreachable_warning_suppressed(): # We are inside a function that contains type variables with value restrictions in # its signature. In this case we just suppress all strict-equality checks to avoid # false positives for code like: # # T = TypeVar('T', str, int) # def f(x: T) -> T: # if x == 0: # ... # return x # # TODO: find a way of disabling the check only for types resulted from the expansion. return False if isinstance(left, NoneType) or isinstance(right, NoneType): return False if isinstance(left, UnionType) and isinstance(right, UnionType): left = remove_optional(left) right = remove_optional(right) left, right = get_proper_types((left, right)) py2 = self.chk.options.python_version < (3, 0) if (original_container and has_bytes_component(original_container, py2) and has_bytes_component(left, py2)): # We need to special case bytes and bytearray, because 97 in b'abc', b'a' in b'abc', # b'a' in bytearray(b'abc') etc. all return True (and we want to show the error only # if the check can _never_ be True). return False if isinstance(left, Instance) and isinstance(right, Instance): # Special case some builtin implementations of AbstractSet. if (left.type.fullname in OVERLAPPING_TYPES_WHITELIST and right.type.fullname in OVERLAPPING_TYPES_WHITELIST): abstract_set = self.chk.lookup_typeinfo('typing.AbstractSet') left = map_instance_to_supertype(left, abstract_set) right = map_instance_to_supertype(right, abstract_set) return not is_overlapping_types(left.args[0], right.args[0]) return not is_overlapping_types(left, right, ignore_promotions=False) def get_operator_method(self, op: str) -> str: if op == '/' and self.chk.options.python_version[0] == 2: # TODO also check for "from __future__ import division" return '__div__' else: return nodes.op_methods[op] def check_method_call_by_name(self, method: str, base_type: Type, args: List[Expression], arg_kinds: List[int], context: Context, local_errors: Optional[MessageBuilder] = None, original_type: Optional[Type] = None ) -> Tuple[Type, Type]: """Type check a call to a named method on an object. Return tuple (result type, inferred method type). The 'original_type' is used for error messages. """ local_errors = local_errors or self.msg original_type = original_type or base_type # Unions are special-cased to allow plugins to act on each element of the union. base_type = get_proper_type(base_type) if isinstance(base_type, UnionType): return self.check_union_method_call_by_name(method, base_type, args, arg_kinds, context, local_errors, original_type) method_type = analyze_member_access(method, base_type, context, False, False, True, local_errors, original_type=original_type, chk=self.chk, in_literal_context=self.is_literal_context()) return self.check_method_call( method, base_type, method_type, args, arg_kinds, context, local_errors) def check_union_method_call_by_name(self, method: str, base_type: UnionType, args: List[Expression], arg_kinds: List[int], context: Context, local_errors: MessageBuilder, original_type: Optional[Type] = None ) -> Tuple[Type, Type]: """Type check a call to a named method on an object with union type. This essentially checks the call using check_method_call_by_name() for each union item and unions the result. We do this to allow plugins to act on individual union items. """ res = [] # type: List[Type] meth_res = [] # type: List[Type] for typ in base_type.relevant_items(): # Format error messages consistently with # mypy.checkmember.analyze_union_member_access(). local_errors.disable_type_names += 1 item, meth_item = self.check_method_call_by_name(method, typ, args, arg_kinds, context, local_errors, original_type) local_errors.disable_type_names -= 1 res.append(item) meth_res.append(meth_item) return make_simplified_union(res), make_simplified_union(meth_res) def check_method_call(self, method_name: str, base_type: Type, method_type: Type, args: List[Expression], arg_kinds: List[int], context: Context, local_errors: Optional[MessageBuilder] = None) -> Tuple[Type, Type]: """Type check a call to a method with the given name and type on an object. Return tuple (result type, inferred method type). """ callable_name = self.method_fullname(base_type, method_name) object_type = base_type if callable_name is not None else None # Try to refine the method signature using plugin hooks before checking the call. method_type = self.transform_callee_type( callable_name, method_type, args, arg_kinds, context, object_type=object_type) return self.check_call(method_type, args, arg_kinds, context, arg_messages=local_errors, callable_name=callable_name, object_type=object_type) def check_op_reversible(self, op_name: str, left_type: Type, left_expr: Expression, right_type: Type, right_expr: Expression, context: Context, msg: MessageBuilder) -> Tuple[Type, Type]: def make_local_errors() -> MessageBuilder: """Creates a new MessageBuilder object.""" local_errors = msg.clean_copy() local_errors.disable_count = 0 return local_errors def lookup_operator(op_name: str, base_type: Type) -> Optional[Type]: """Looks up the given operator and returns the corresponding type, if it exists.""" local_errors = make_local_errors() # TODO: Remove this call and rely just on analyze_member_access # Currently, it seems we still need this to correctly deal with # things like metaclasses? # # E.g. see the pythoneval.testMetaclassOpAccessAny test case. if not self.has_member(base_type, op_name): return None member = analyze_member_access( name=op_name, typ=base_type, is_lvalue=False, is_super=False, is_operator=True, original_type=base_type, context=context, msg=local_errors, chk=self.chk, in_literal_context=self.is_literal_context() ) if local_errors.is_errors(): return None else: return member def lookup_definer(typ: Instance, attr_name: str) -> Optional[str]: """Returns the name of the class that contains the actual definition of attr_name. So if class A defines foo and class B subclasses A, running 'get_class_defined_in(B, "foo")` would return the full name of A. However, if B were to override and redefine foo, that method call would return the full name of B instead. If the attr name is not present in the given class or its MRO, returns None. """ for cls in typ.type.mro: if cls.names.get(attr_name): return cls.fullname return None left_type = get_proper_type(left_type) right_type = get_proper_type(right_type) # If either the LHS or the RHS are Any, we can't really concluding anything # about the operation since the Any type may or may not define an # __op__ or __rop__ method. So, we punt and return Any instead. if isinstance(left_type, AnyType): any_type = AnyType(TypeOfAny.from_another_any, source_any=left_type) return any_type, any_type if isinstance(right_type, AnyType): any_type = AnyType(TypeOfAny.from_another_any, source_any=right_type) return any_type, any_type # STEP 1: # We start by getting the __op__ and __rop__ methods, if they exist. rev_op_name = self.get_reverse_op_method(op_name) left_op = lookup_operator(op_name, left_type) right_op = lookup_operator(rev_op_name, right_type) # STEP 2a: # We figure out in which order Python will call the operator methods. As it # turns out, it's not as simple as just trying to call __op__ first and # __rop__ second. # # We store the determined order inside the 'variants_raw' variable, # which records tuples containing the method, base type, and the argument. bias_right = is_proper_subtype(right_type, left_type) if op_name in nodes.op_methods_that_shortcut and is_same_type(left_type, right_type): # When we do "A() + A()", for example, Python will only call the __add__ method, # never the __radd__ method. # # This is the case even if the __add__ method is completely missing and the __radd__ # method is defined. variants_raw = [ (left_op, left_type, right_expr) ] elif (is_subtype(right_type, left_type) and isinstance(left_type, Instance) and isinstance(right_type, Instance) and lookup_definer(left_type, op_name) != lookup_definer(right_type, rev_op_name)): # When we do "A() + B()" where B is a subclass of B, we'll actually try calling # B's __radd__ method first, but ONLY if B explicitly defines or overrides the # __radd__ method. # # This mechanism lets subclasses "refine" the expected outcome of the operation, even # if they're located on the RHS. variants_raw = [ (right_op, right_type, left_expr), (left_op, left_type, right_expr), ] else: # In all other cases, we do the usual thing and call __add__ first and # __radd__ second when doing "A() + B()". variants_raw = [ (left_op, left_type, right_expr), (right_op, right_type, left_expr), ] # STEP 2b: # When running Python 2, we might also try calling the __cmp__ method. is_python_2 = self.chk.options.python_version[0] == 2 if is_python_2 and op_name in nodes.ops_falling_back_to_cmp: cmp_method = nodes.comparison_fallback_method left_cmp_op = lookup_operator(cmp_method, left_type) right_cmp_op = lookup_operator(cmp_method, right_type) if bias_right: variants_raw.append((right_cmp_op, right_type, left_expr)) variants_raw.append((left_cmp_op, left_type, right_expr)) else: variants_raw.append((left_cmp_op, left_type, right_expr)) variants_raw.append((right_cmp_op, right_type, left_expr)) # STEP 3: # We now filter out all non-existant operators. The 'variants' list contains # all operator methods that are actually present, in the order that Python # attempts to invoke them. variants = [(op, obj, arg) for (op, obj, arg) in variants_raw if op is not None] # STEP 4: # We now try invoking each one. If an operation succeeds, end early and return # the corresponding result. Otherwise, return the result and errors associated # with the first entry. errors = [] results = [] for method, obj, arg in variants: local_errors = make_local_errors() result = self.check_method_call( op_name, obj, method, [arg], [ARG_POS], context, local_errors) if local_errors.is_errors(): errors.append(local_errors) results.append(result) else: return result # STEP 4b: # Sometimes, the variants list is empty. In that case, we fall-back to attempting to # call the __op__ method (even though it's missing). if not variants: local_errors = make_local_errors() result = self.check_method_call_by_name( op_name, left_type, [right_expr], [ARG_POS], context, local_errors) if local_errors.is_errors(): errors.append(local_errors) results.append(result) else: # In theory, we should never enter this case, but it seems # we sometimes do, when dealing with Type[...]? E.g. see # check-classes.testTypeTypeComparisonWorks. # # This is probably related to the TODO in lookup_operator(...) # up above. # # TODO: Remove this extra case return result msg.add_errors(errors[0]) if len(results) == 1: return results[0] else: error_any = AnyType(TypeOfAny.from_error) result = error_any, error_any return result def check_op(self, method: str, base_type: Type, arg: Expression, context: Context, allow_reverse: bool = False) -> Tuple[Type, Type]: """Type check a binary operation which maps to a method call. Return tuple (result type, inferred operator method type). """ if allow_reverse: left_variants = [base_type] base_type = get_proper_type(base_type) if isinstance(base_type, UnionType): left_variants = [item for item in base_type.relevant_items()] right_type = self.accept(arg) # Step 1: We first try leaving the right arguments alone and destructure # just the left ones. (Mypy can sometimes perform some more precise inference # if we leave the right operands a union -- see testOperatorWithEmptyListAndSum.) msg = self.msg.clean_copy() msg.disable_count = 0 all_results = [] all_inferred = [] for left_possible_type in left_variants: result, inferred = self.check_op_reversible( op_name=method, left_type=left_possible_type, left_expr=TempNode(left_possible_type, context=context), right_type=right_type, right_expr=arg, context=context, msg=msg) all_results.append(result) all_inferred.append(inferred) if not msg.is_errors(): results_final = make_simplified_union(all_results) inferred_final = make_simplified_union(all_inferred) return results_final, inferred_final # Step 2: If that fails, we try again but also destructure the right argument. # This is also necessary to make certain edge cases work -- see # testOperatorDoubleUnionInterwovenUnionAdd, for example. # Note: We want to pass in the original 'arg' for 'left_expr' and 'right_expr' # whenever possible so that plugins and similar things can introspect on the original # node if possible. # # We don't do the same for the base expression because it could lead to weird # type inference errors -- e.g. see 'testOperatorDoubleUnionSum'. # TODO: Can we use `type_overrides_set()` here? right_variants = [(right_type, arg)] right_type = get_proper_type(right_type) if isinstance(right_type, UnionType): right_variants = [(item, TempNode(item, context=context)) for item in right_type.relevant_items()] msg = self.msg.clean_copy() msg.disable_count = 0 all_results = [] all_inferred = [] for left_possible_type in left_variants: for right_possible_type, right_expr in right_variants: result, inferred = self.check_op_reversible( op_name=method, left_type=left_possible_type, left_expr=TempNode(left_possible_type, context=context), right_type=right_possible_type, right_expr=right_expr, context=context, msg=msg) all_results.append(result) all_inferred.append(inferred) if msg.is_errors(): self.msg.add_errors(msg) if len(left_variants) >= 2 and len(right_variants) >= 2: self.msg.warn_both_operands_are_from_unions(context) elif len(left_variants) >= 2: self.msg.warn_operand_was_from_union("Left", base_type, context=right_expr) elif len(right_variants) >= 2: self.msg.warn_operand_was_from_union("Right", right_type, context=right_expr) # See the comment in 'check_overload_call' for more details on why # we call 'combine_function_signature' instead of just unioning the inferred # callable types. results_final = make_simplified_union(all_results) inferred_final = self.combine_function_signatures(all_inferred) return results_final, inferred_final else: return self.check_method_call_by_name( method=method, base_type=base_type, args=[arg], arg_kinds=[ARG_POS], context=context, local_errors=self.msg, ) def get_reverse_op_method(self, method: str) -> str: if method == '__div__' and self.chk.options.python_version[0] == 2: return '__rdiv__' else: return nodes.reverse_op_methods[method] def check_boolean_op(self, e: OpExpr, context: Context) -> Type: """Type check a boolean operation ('and' or 'or').""" # A boolean operation can evaluate to either of the operands. # We use the current type context to guide the type inference of of # the left operand. We also use the left operand type to guide the type # inference of the right operand so that expressions such as # '[1] or []' are inferred correctly. ctx = self.type_context[-1] left_type = self.accept(e.left, ctx) assert e.op in ('and', 'or') # Checked by visit_op_expr if e.op == 'and': right_map, left_map = self.chk.find_isinstance_check(e.left) restricted_left_type = false_only(left_type) result_is_left = not left_type.can_be_true elif e.op == 'or': left_map, right_map = self.chk.find_isinstance_check(e.left) restricted_left_type = true_only(left_type) result_is_left = not left_type.can_be_false # If right_map is None then we know mypy considers the right branch # to be unreachable and therefore any errors found in the right branch # should be suppressed. # # Note that we perform these checks *before* we take into account # the analysis from the semanal phase below. We assume that nodes # marked as unreachable during semantic analysis were done so intentionally. # So, we shouldn't report an error. if self.chk.options.warn_unreachable: if left_map is None: self.msg.redundant_left_operand(e.op, e.left) if right_map is None: self.msg.redundant_right_operand(e.op, e.right) if e.right_unreachable: right_map = None elif e.right_always: left_map = None if right_map is None: self.msg.disable_errors() try: right_type = self.analyze_cond_branch(right_map, e.right, left_type) finally: if right_map is None: self.msg.enable_errors() if right_map is None: # The boolean expression is statically known to be the left value assert left_map is not None # find_isinstance_check guarantees this return left_type if left_map is None: # The boolean expression is statically known to be the right value assert right_map is not None # find_isinstance_check guarantees this return right_type if isinstance(restricted_left_type, UninhabitedType): # The left operand can never be the result return right_type elif result_is_left: # The left operand is always the result return left_type else: return make_simplified_union([restricted_left_type, right_type]) def check_list_multiply(self, e: OpExpr) -> Type: """Type check an expression of form '[...] * e'. Type inference is special-cased for this common construct. """ right_type = self.accept(e.right) if is_subtype(right_type, self.named_type('builtins.int')): # Special case: [...] * . Use the type context of the # OpExpr, since the multiplication does not affect the type. left_type = self.accept(e.left, type_context=self.type_context[-1]) else: left_type = self.accept(e.left) result, method_type = self.check_op('__mul__', left_type, e.right, e) e.method_type = method_type return result def visit_assignment_expr(self, e: AssignmentExpr) -> Type: value = self.accept(e.value) self.chk.check_assignment(e.target, e.value) self.chk.check_final(e) return value def visit_unary_expr(self, e: UnaryExpr) -> Type: """Type check an unary operation ('not', '-', '+' or '~').""" operand_type = self.accept(e.expr) op = e.op if op == 'not': result = self.bool_type() # type: Type else: method = nodes.unary_op_methods[op] result, method_type = self.check_method_call_by_name(method, operand_type, [], [], e) e.method_type = method_type return result def visit_index_expr(self, e: IndexExpr) -> Type: """Type check an index expression (base[index]). It may also represent type application. """ result = self.visit_index_expr_helper(e) result = get_proper_type(self.narrow_type_from_binder(e, result)) if (self.is_literal_context() and isinstance(result, Instance) and result.last_known_value is not None): result = result.last_known_value return result def visit_index_expr_helper(self, e: IndexExpr) -> Type: if e.analyzed: # It's actually a type application. return self.accept(e.analyzed) left_type = self.accept(e.base) return self.visit_index_with_type(left_type, e) def visit_index_with_type(self, left_type: Type, e: IndexExpr, original_type: Optional[ProperType] = None) -> Type: """Analyze type of an index expression for a given type of base expression. The 'original_type' is used for error messages (currently used for union types). """ index = e.index left_type = get_proper_type(left_type) # Visit the index, just to make sure we have a type for it available self.accept(index) if isinstance(left_type, UnionType): original_type = original_type or left_type return make_simplified_union([self.visit_index_with_type(typ, e, original_type) for typ in left_type.relevant_items()]) elif isinstance(left_type, TupleType) and self.chk.in_checked_function(): # Special case for tuples. They return a more specific type when # indexed by an integer literal. if isinstance(index, SliceExpr): return self.visit_tuple_slice_helper(left_type, index) ns = self.try_getting_int_literals(index) if ns is not None: out = [] for n in ns: if n < 0: n += len(left_type.items) if 0 <= n < len(left_type.items): out.append(left_type.items[n]) else: self.chk.fail(message_registry.TUPLE_INDEX_OUT_OF_RANGE, e) return AnyType(TypeOfAny.from_error) return make_simplified_union(out) else: return self.nonliteral_tuple_index_helper(left_type, index) elif isinstance(left_type, TypedDictType): return self.visit_typeddict_index_expr(left_type, e.index) elif (isinstance(left_type, CallableType) and left_type.is_type_obj() and left_type.type_object().is_enum): return self.visit_enum_index_expr(left_type.type_object(), e.index, e) else: result, method_type = self.check_method_call_by_name( '__getitem__', left_type, [e.index], [ARG_POS], e, original_type=original_type) e.method_type = method_type return result def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type: begin = [None] # type: Sequence[Optional[int]] end = [None] # type: Sequence[Optional[int]] stride = [None] # type: Sequence[Optional[int]] if slic.begin_index: begin_raw = self.try_getting_int_literals(slic.begin_index) if begin_raw is None: return self.nonliteral_tuple_index_helper(left_type, slic) begin = begin_raw if slic.end_index: end_raw = self.try_getting_int_literals(slic.end_index) if end_raw is None: return self.nonliteral_tuple_index_helper(left_type, slic) end = end_raw if slic.stride: stride_raw = self.try_getting_int_literals(slic.stride) if stride_raw is None: return self.nonliteral_tuple_index_helper(left_type, slic) stride = stride_raw items = [] # type: List[Type] for b, e, s in itertools.product(begin, end, stride): items.append(left_type.slice(b, e, s)) return make_simplified_union(items) def try_getting_int_literals(self, index: Expression) -> Optional[List[int]]: """If the given expression or type corresponds to an int literal or a union of int literals, returns a list of the underlying ints. Otherwise, returns None. Specifically, this function is guaranteed to return a list with one or more ints if one one the following is true: 1. 'expr' is a IntExpr or a UnaryExpr backed by an IntExpr 2. 'typ' is a LiteralType containing an int 3. 'typ' is a UnionType containing only LiteralType of ints """ if isinstance(index, IntExpr): return [index.value] elif isinstance(index, UnaryExpr): if index.op == '-': operand = index.expr if isinstance(operand, IntExpr): return [-1 * operand.value] typ = get_proper_type(self.accept(index)) if isinstance(typ, Instance) and typ.last_known_value is not None: typ = typ.last_known_value if isinstance(typ, LiteralType) and isinstance(typ.value, int): return [typ.value] if isinstance(typ, UnionType): out = [] for item in get_proper_types(typ.items): if isinstance(item, LiteralType) and isinstance(item.value, int): out.append(item.value) else: return None return out return None def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) -> Type: index_type = self.accept(index) expected_type = UnionType.make_union([self.named_type('builtins.int'), self.named_type('builtins.slice')]) if not self.chk.check_subtype(index_type, expected_type, index, message_registry.INVALID_TUPLE_INDEX_TYPE, 'actual type', 'expected type'): return AnyType(TypeOfAny.from_error) else: union = make_simplified_union(left_type.items) if isinstance(index, SliceExpr): return self.chk.named_generic_type('builtins.tuple', [union]) else: return union def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) -> Type: if isinstance(index, (StrExpr, UnicodeExpr)): key_names = [index.value] else: typ = get_proper_type(self.accept(index)) if isinstance(typ, UnionType): key_types = list(typ.items) # type: List[Type] else: key_types = [typ] key_names = [] for key_type in get_proper_types(key_types): if isinstance(key_type, Instance) and key_type.last_known_value is not None: key_type = key_type.last_known_value if isinstance(key_type, LiteralType) and isinstance(key_type.value, str): key_names.append(key_type.value) else: self.msg.typeddict_key_must_be_string_literal(td_type, index) return AnyType(TypeOfAny.from_error) value_types = [] for key_name in key_names: value_type = td_type.items.get(key_name) if value_type is None: self.msg.typeddict_key_not_found(td_type, key_name, index) return AnyType(TypeOfAny.from_error) else: value_types.append(value_type) return make_simplified_union(value_types) def visit_enum_index_expr(self, enum_type: TypeInfo, index: Expression, context: Context) -> Type: string_type = self.named_type('builtins.str') # type: Type if self.chk.options.python_version[0] < 3: string_type = UnionType.make_union([string_type, self.named_type('builtins.unicode')]) self.chk.check_subtype(self.accept(index), string_type, context, "Enum index should be a string", "actual index type") return Instance(enum_type, []) def visit_cast_expr(self, expr: CastExpr) -> Type: """Type check a cast expression.""" source_type = self.accept(expr.expr, type_context=AnyType(TypeOfAny.special_form), allow_none_return=True, always_allow_any=True) target_type = expr.type options = self.chk.options if options.warn_redundant_casts and is_same_type(source_type, target_type): self.msg.redundant_cast(target_type, expr) if options.disallow_any_unimported and has_any_from_unimported_type(target_type): self.msg.unimported_type_becomes_any("Target type of cast", target_type, expr) check_for_explicit_any(target_type, self.chk.options, self.chk.is_typeshed_stub, self.msg, context=expr) return target_type def visit_reveal_expr(self, expr: RevealExpr) -> Type: """Type check a reveal_type expression.""" if expr.kind == REVEAL_TYPE: assert expr.expr is not None revealed_type = self.accept(expr.expr, type_context=self.type_context[-1]) if not self.chk.current_node_deferred: self.msg.reveal_type(revealed_type, expr.expr) if not self.chk.in_checked_function(): self.msg.note("'reveal_type' always outputs 'Any' in unchecked functions", expr.expr) return revealed_type else: # REVEAL_LOCALS if not self.chk.current_node_deferred: # the RevealExpr contains a local_nodes attribute, # calculated at semantic analysis time. Use it to pull out the # corresponding subset of variables in self.chk.type_map names_to_types = { var_node.name: var_node.type for var_node in expr.local_nodes } if expr.local_nodes is not None else {} self.msg.reveal_locals(names_to_types, expr) return NoneType() def visit_type_application(self, tapp: TypeApplication) -> Type: """Type check a type application (expr[type, ...]). There are two different options here, depending on whether expr refers to a type alias or directly to a generic class. In the first case we need to use a dedicated function typeanal.expand_type_aliases. This is due to the fact that currently type aliases machinery uses unbound type variables, while normal generics use bound ones; see TypeAlias docstring for more details. """ if isinstance(tapp.expr, RefExpr) and isinstance(tapp.expr.node, TypeAlias): # Subscription of a (generic) alias in runtime context, expand the alias. item = expand_type_alias(tapp.expr.node, tapp.types, self.chk.fail, tapp.expr.node.no_args, tapp) item = get_proper_type(item) if isinstance(item, Instance): tp = type_object_type(item.type, self.named_type) return self.apply_type_arguments_to_callable(tp, item.args, tapp) else: self.chk.fail(message_registry.ONLY_CLASS_APPLICATION, tapp) return AnyType(TypeOfAny.from_error) # Type application of a normal generic class in runtime context. # This is typically used as `x = G[int]()`. tp = get_proper_type(self.accept(tapp.expr)) if isinstance(tp, (CallableType, Overloaded)): if not tp.is_type_obj(): self.chk.fail(message_registry.ONLY_CLASS_APPLICATION, tapp) return self.apply_type_arguments_to_callable(tp, tapp.types, tapp) if isinstance(tp, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=tp) return AnyType(TypeOfAny.special_form) def visit_type_alias_expr(self, alias: TypeAliasExpr) -> Type: """Right hand side of a type alias definition. It has the same type as if the alias itself was used in a runtime context. For example, here: A = reveal_type(List[T]) reveal_type(A) both `reveal_type` instances will reveal the same type `def (...) -> builtins.list[Any]`. Note that type variables are implicitly substituted with `Any`. """ return self.alias_type_in_runtime_context(alias.node, alias.no_args, alias, alias_definition=True) def alias_type_in_runtime_context(self, alias: TypeAlias, no_args: bool, ctx: Context, *, alias_definition: bool = False) -> Type: """Get type of a type alias (could be generic) in a runtime expression. Note that this function can be called only if the alias appears _not_ as a target of type application, which is treated separately in the visit_type_application method. Some examples where this method is called are casts and instantiation: class LongName(Generic[T]): ... A = LongName[int] x = A() y = cast(A, ...) """ if isinstance(alias.target, Instance) and alias.target.invalid: # type: ignore # An invalid alias, error already has been reported return AnyType(TypeOfAny.from_error) # If this is a generic alias, we set all variables to `Any`. # For example: # A = List[Tuple[T, T]] # x = A() <- same as List[Tuple[Any, Any]], see PEP 484. item = get_proper_type(set_any_tvars(alias, ctx.line, ctx.column)) if isinstance(item, Instance): # Normally we get a callable type (or overloaded) with .is_type_obj() true # representing the class's constructor tp = type_object_type(item.type, self.named_type) if no_args: return tp return self.apply_type_arguments_to_callable(tp, item.args, ctx) elif (isinstance(item, TupleType) and # Tuple[str, int]() fails at runtime, only named tuples and subclasses work. tuple_fallback(item).type.fullname != 'builtins.tuple'): return type_object_type(tuple_fallback(item).type, self.named_type) elif isinstance(item, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=item) else: if alias_definition: return AnyType(TypeOfAny.special_form) # This type is invalid in most runtime contexts. self.msg.alias_invalid_in_runtime_context(item, ctx) return AnyType(TypeOfAny.from_error) def apply_type_arguments_to_callable(self, tp: Type, args: List[Type], ctx: Context) -> Type: """Apply type arguments to a generic callable type coming from a type object. This will first perform type arguments count checks, report the error as needed, and return the correct kind of Any. As a special case this returns Any for non-callable types, because if type object type is not callable, then an error should be already reported. """ tp = get_proper_type(tp) if isinstance(tp, CallableType): if len(tp.variables) != len(args): self.msg.incompatible_type_application(len(tp.variables), len(args), ctx) return AnyType(TypeOfAny.from_error) return self.apply_generic_arguments(tp, args, ctx) if isinstance(tp, Overloaded): for it in tp.items(): if len(it.variables) != len(args): self.msg.incompatible_type_application(len(it.variables), len(args), ctx) return AnyType(TypeOfAny.from_error) return Overloaded([self.apply_generic_arguments(it, args, ctx) for it in tp.items()]) return AnyType(TypeOfAny.special_form) def visit_list_expr(self, e: ListExpr) -> Type: """Type check a list expression [...].""" return self.check_lst_expr(e.items, 'builtins.list', '', e) def visit_set_expr(self, e: SetExpr) -> Type: return self.check_lst_expr(e.items, 'builtins.set', '', e) def check_lst_expr(self, items: List[Expression], fullname: str, tag: str, context: Context) -> Type: # Translate into type checking a generic function call. # Used for list and set expressions, as well as for tuples # containing star expressions that don't refer to a # Tuple. (Note: "lst" stands for list-set-tuple. :-) tvdef = TypeVarDef('T', 'T', -1, [], self.object_type()) tv = TypeVarType(tvdef) constructor = CallableType( [tv], [nodes.ARG_STAR], [None], self.chk.named_generic_type(fullname, [tv]), self.named_type('builtins.function'), name=tag, variables=[tvdef]) out = self.check_call(constructor, [(i.expr if isinstance(i, StarExpr) else i) for i in items], [(nodes.ARG_STAR if isinstance(i, StarExpr) else nodes.ARG_POS) for i in items], context)[0] return remove_instance_last_known_values(out) def visit_tuple_expr(self, e: TupleExpr) -> Type: """Type check a tuple expression.""" # Try to determine type context for type inference. type_context = get_proper_type(self.type_context[-1]) type_context_items = None if isinstance(type_context, UnionType): tuples_in_context = [t for t in get_proper_types(type_context.items) if (isinstance(t, TupleType) and len(t.items) == len(e.items)) or is_named_instance(t, 'builtins.tuple')] if len(tuples_in_context) == 1: type_context = tuples_in_context[0] else: # There are either no relevant tuples in the Union, or there is # more than one. Either way, we can't decide on a context. pass if isinstance(type_context, TupleType): type_context_items = type_context.items elif type_context and is_named_instance(type_context, 'builtins.tuple'): assert isinstance(type_context, Instance) if type_context.args: type_context_items = [type_context.args[0]] * len(e.items) # NOTE: it's possible for the context to have a different # number of items than e. In that case we use those context # items that match a position in e, and we'll worry about type # mismatches later. # Infer item types. Give up if there's a star expression # that's not a Tuple. items = [] # type: List[Type] j = 0 # Index into type_context_items; irrelevant if type_context_items is none for i in range(len(e.items)): item = e.items[i] if isinstance(item, StarExpr): # Special handling for star expressions. # TODO: If there's a context, and item.expr is a # TupleExpr, flatten it, so we can benefit from the # context? Counterargument: Why would anyone write # (1, *(2, 3)) instead of (1, 2, 3) except in a test? tt = self.accept(item.expr) tt = get_proper_type(tt) if isinstance(tt, TupleType): items.extend(tt.items) j += len(tt.items) else: # A star expression that's not a Tuple. # Treat the whole thing as a variable-length tuple. return self.check_lst_expr(e.items, 'builtins.tuple', '', e) else: if not type_context_items or j >= len(type_context_items): tt = self.accept(item) else: tt = self.accept(item, type_context_items[j]) j += 1 items.append(tt) # This is a partial fallback item type. A precise type will be calculated on demand. fallback_item = AnyType(TypeOfAny.special_form) return TupleType(items, self.chk.named_generic_type('builtins.tuple', [fallback_item])) def visit_dict_expr(self, e: DictExpr) -> Type: """Type check a dict expression. Translate it into a call to dict(), with provisions for **expr. """ # if the dict literal doesn't match TypedDict, check_typeddict_call_with_dict reports # an error, but returns the TypedDict type that matches the literal it found # that would cause a second error when that TypedDict type is returned upstream # to avoid the second error, we always return TypedDict type that was requested typeddict_context = self.find_typeddict_context(self.type_context[-1]) if typeddict_context: self.check_typeddict_call_with_dict( callee=typeddict_context, kwargs=e, context=e ) return typeddict_context.copy_modified() # Collect function arguments, watching out for **expr. args = [] # type: List[Expression] # Regular "key: value" stargs = [] # type: List[Expression] # For "**expr" for key, value in e.items: if key is None: stargs.append(value) else: tup = TupleExpr([key, value]) if key.line >= 0: tup.line = key.line tup.column = key.column else: tup.line = value.line tup.column = value.column args.append(tup) # Define type variables (used in constructors below). ktdef = TypeVarDef('KT', 'KT', -1, [], self.object_type()) vtdef = TypeVarDef('VT', 'VT', -2, [], self.object_type()) kt = TypeVarType(ktdef) vt = TypeVarType(vtdef) rv = None # Call dict(*args), unless it's empty and stargs is not. if args or not stargs: # The callable type represents a function like this: # # def (*v: Tuple[kt, vt]) -> Dict[kt, vt]: ... constructor = CallableType( [TupleType([kt, vt], self.named_type('builtins.tuple'))], [nodes.ARG_STAR], [None], self.chk.named_generic_type('builtins.dict', [kt, vt]), self.named_type('builtins.function'), name='', variables=[ktdef, vtdef]) rv = self.check_call(constructor, args, [nodes.ARG_POS] * len(args), e)[0] else: # dict(...) will be called below. pass # Call rv.update(arg) for each arg in **stargs, # except if rv isn't set yet, then set rv = dict(arg). if stargs: for arg in stargs: if rv is None: constructor = CallableType( [self.chk.named_generic_type('typing.Mapping', [kt, vt])], [nodes.ARG_POS], [None], self.chk.named_generic_type('builtins.dict', [kt, vt]), self.named_type('builtins.function'), name='', variables=[ktdef, vtdef]) rv = self.check_call(constructor, [arg], [nodes.ARG_POS], arg)[0] else: self.check_method_call_by_name('update', rv, [arg], [nodes.ARG_POS], arg) assert rv is not None return rv def find_typeddict_context(self, context: Optional[Type]) -> Optional[TypedDictType]: context = get_proper_type(context) if isinstance(context, TypedDictType): return context elif isinstance(context, UnionType): items = [] for item in context.items: item_context = self.find_typeddict_context(item) if item_context: items.append(item_context) if len(items) == 1: # Only one union item is TypedDict, so use the context as it's unambiguous. return items[0] # No TypedDict type in context. return None def visit_lambda_expr(self, e: LambdaExpr) -> Type: """Type check lambda expression.""" self.chk.check_default_args(e, body_is_trivial=False) inferred_type, type_override = self.infer_lambda_type_using_context(e) if not inferred_type: self.chk.return_types.append(AnyType(TypeOfAny.special_form)) # Type check everything in the body except for the final return # statement (it can contain tuple unpacking before return). with self.chk.scope.push_function(e): for stmt in e.body.body[:-1]: stmt.accept(self.chk) # Only type check the return expression, not the return statement. # This is important as otherwise the following statements would be # considered unreachable. There's no useful type context. ret_type = self.accept(e.expr(), allow_none_return=True) fallback = self.named_type('builtins.function') self.chk.return_types.pop() return callable_type(e, fallback, ret_type) else: # Type context available. self.chk.return_types.append(inferred_type.ret_type) self.chk.check_func_item(e, type_override=type_override) if e.expr() not in self.chk.type_map: # TODO: return expression must be accepted before exiting function scope. self.accept(e.expr(), allow_none_return=True) ret_type = self.chk.type_map[e.expr()] if isinstance(get_proper_type(ret_type), NoneType): # For "lambda ...: None", just use type from the context. # Important when the context is Callable[..., None] which # really means Void. See #1425. self.chk.return_types.pop() return inferred_type self.chk.return_types.pop() return replace_callable_return_type(inferred_type, ret_type) def infer_lambda_type_using_context(self, e: LambdaExpr) -> Tuple[Optional[CallableType], Optional[CallableType]]: """Try to infer lambda expression type using context. Return None if could not infer type. The second item in the return type is the type_override parameter for check_func_item. """ # TODO also accept 'Any' context ctx = get_proper_type(self.type_context[-1]) if isinstance(ctx, UnionType): callables = [t for t in get_proper_types(ctx.relevant_items()) if isinstance(t, CallableType)] if len(callables) == 1: ctx = callables[0] if not ctx or not isinstance(ctx, CallableType): return None, None # The context may have function type variables in it. We replace them # since these are the type variables we are ultimately trying to infer; # they must be considered as indeterminate. We use ErasedType since it # does not affect type inference results (it is for purposes like this # only). callable_ctx = get_proper_type(replace_meta_vars(ctx, ErasedType())) assert isinstance(callable_ctx, CallableType) arg_kinds = [arg.kind for arg in e.arguments] if callable_ctx.is_ellipsis_args: # Fill in Any arguments to match the arguments of the lambda. callable_ctx = callable_ctx.copy_modified( is_ellipsis_args=False, arg_types=[AnyType(TypeOfAny.special_form)] * len(arg_kinds), arg_kinds=arg_kinds, arg_names=[None] * len(arg_kinds) ) if ARG_STAR in arg_kinds or ARG_STAR2 in arg_kinds: # TODO treat this case appropriately return callable_ctx, None if callable_ctx.arg_kinds != arg_kinds: # Incompatible context; cannot use it to infer types. self.chk.fail(message_registry.CANNOT_INFER_LAMBDA_TYPE, e) return None, None return callable_ctx, callable_ctx def visit_super_expr(self, e: SuperExpr) -> Type: """Type check a super expression (non-lvalue).""" # We have an expression like super(T, var).member # First compute the types of T and var types = self._super_arg_types(e) if isinstance(types, tuple): type_type, instance_type = types else: return types # Now get the MRO type_info = type_info_from_type(type_type) if type_info is None: self.chk.fail(message_registry.UNSUPPORTED_ARG_1_FOR_SUPER, e) return AnyType(TypeOfAny.from_error) instance_info = type_info_from_type(instance_type) if instance_info is None: self.chk.fail(message_registry.UNSUPPORTED_ARG_2_FOR_SUPER, e) return AnyType(TypeOfAny.from_error) mro = instance_info.mro # The base is the first MRO entry *after* type_info that has a member # with the right name try: index = mro.index(type_info) except ValueError: self.chk.fail(message_registry.SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1, e) return AnyType(TypeOfAny.from_error) for base in mro[index+1:]: if e.name in base.names or base == mro[-1]: if e.info and e.info.fallback_to_any and base == mro[-1]: # There's an undefined base class, and we're at the end of the # chain. That's not an error. return AnyType(TypeOfAny.special_form) return analyze_member_access(name=e.name, typ=instance_type, is_lvalue=False, is_super=True, is_operator=False, original_type=instance_type, override_info=base, context=e, msg=self.msg, chk=self.chk, in_literal_context=self.is_literal_context()) assert False, 'unreachable' def _super_arg_types(self, e: SuperExpr) -> Union[Type, Tuple[Type, Type]]: """ Computes the types of the type and instance expressions in super(T, instance), or the implicit ones for zero-argument super() expressions. Returns a single type for the whole super expression when possible (for errors, anys), otherwise the pair of computed types. """ if not self.chk.in_checked_function(): return AnyType(TypeOfAny.unannotated) elif len(e.call.args) == 0: if self.chk.options.python_version[0] == 2: self.chk.fail(message_registry.TOO_FEW_ARGS_FOR_SUPER, e, code=codes.CALL_ARG) return AnyType(TypeOfAny.from_error) elif not e.info: # This has already been reported by the semantic analyzer. return AnyType(TypeOfAny.from_error) elif self.chk.scope.active_class(): self.chk.fail(message_registry.SUPER_OUTSIDE_OF_METHOD_NOT_SUPPORTED, e) return AnyType(TypeOfAny.from_error) # Zero-argument super() is like super(, ) current_type = fill_typevars(e.info) type_type = TypeType(current_type) # type: ProperType # Use the type of the self argument, in case it was annotated method = self.chk.scope.top_function() assert method is not None if method.arguments: instance_type = method.arguments[0].variable.type or current_type # type: Type else: self.chk.fail(message_registry.SUPER_ENCLOSING_POSITIONAL_ARGS_REQUIRED, e) return AnyType(TypeOfAny.from_error) elif ARG_STAR in e.call.arg_kinds: self.chk.fail(message_registry.SUPER_VARARGS_NOT_SUPPORTED, e) return AnyType(TypeOfAny.from_error) elif set(e.call.arg_kinds) != {ARG_POS}: self.chk.fail(message_registry.SUPER_POSITIONAL_ARGS_REQUIRED, e) return AnyType(TypeOfAny.from_error) elif len(e.call.args) == 1: self.chk.fail(message_registry.SUPER_WITH_SINGLE_ARG_NOT_SUPPORTED, e) return AnyType(TypeOfAny.from_error) elif len(e.call.args) == 2: type_type = get_proper_type(self.accept(e.call.args[0])) instance_type = self.accept(e.call.args[1]) else: self.chk.fail(message_registry.TOO_MANY_ARGS_FOR_SUPER, e) return AnyType(TypeOfAny.from_error) # Imprecisely assume that the type is the current class if isinstance(type_type, AnyType): if e.info: type_type = TypeType(fill_typevars(e.info)) else: return AnyType(TypeOfAny.from_another_any, source_any=type_type) elif isinstance(type_type, TypeType): type_item = type_type.item if isinstance(type_item, AnyType): if e.info: type_type = TypeType(fill_typevars(e.info)) else: return AnyType(TypeOfAny.from_another_any, source_any=type_item) if (not isinstance(type_type, TypeType) and not (isinstance(type_type, FunctionLike) and type_type.is_type_obj())): self.msg.first_argument_for_super_must_be_type(type_type, e) return AnyType(TypeOfAny.from_error) # Imprecisely assume that the instance is of the current class instance_type = get_proper_type(instance_type) if isinstance(instance_type, AnyType): if e.info: instance_type = fill_typevars(e.info) else: return AnyType(TypeOfAny.from_another_any, source_any=instance_type) elif isinstance(instance_type, TypeType): instance_item = instance_type.item if isinstance(instance_item, AnyType): if e.info: instance_type = TypeType(fill_typevars(e.info)) else: return AnyType(TypeOfAny.from_another_any, source_any=instance_item) return type_type, instance_type def visit_slice_expr(self, e: SliceExpr) -> Type: expected = make_optional_type(self.named_type('builtins.int')) for index in [e.begin_index, e.end_index, e.stride]: if index: t = self.accept(index) self.chk.check_subtype(t, expected, index, message_registry.INVALID_SLICE_INDEX) return self.named_type('builtins.slice') def visit_list_comprehension(self, e: ListComprehension) -> Type: return self.check_generator_or_comprehension( e.generator, 'builtins.list', '') def visit_set_comprehension(self, e: SetComprehension) -> Type: return self.check_generator_or_comprehension( e.generator, 'builtins.set', '') def visit_generator_expr(self, e: GeneratorExpr) -> Type: # If any of the comprehensions use async for, the expression will return an async generator # object if any(e.is_async): typ = 'typing.AsyncGenerator' # received type is always None in async generator expressions additional_args = [NoneType()] # type: List[Type] else: typ = 'typing.Generator' # received type and returned type are None additional_args = [NoneType(), NoneType()] return self.check_generator_or_comprehension(e, typ, '', additional_args=additional_args) def check_generator_or_comprehension(self, gen: GeneratorExpr, type_name: str, id_for_messages: str, additional_args: Optional[List[Type]] = None) -> Type: """Type check a generator expression or a list comprehension.""" additional_args = additional_args or [] with self.chk.binder.frame_context(can_skip=True, fall_through=0): self.check_for_comp(gen) # Infer the type of the list comprehension by using a synthetic generic # callable type. tvdef = TypeVarDef('T', 'T', -1, [], self.object_type()) tv_list = [TypeVarType(tvdef)] # type: List[Type] constructor = CallableType( tv_list, [nodes.ARG_POS], [None], self.chk.named_generic_type(type_name, tv_list + additional_args), self.chk.named_type('builtins.function'), name=id_for_messages, variables=[tvdef]) return self.check_call(constructor, [gen.left_expr], [nodes.ARG_POS], gen)[0] def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type: """Type check a dictionary comprehension.""" with self.chk.binder.frame_context(can_skip=True, fall_through=0): self.check_for_comp(e) # Infer the type of the list comprehension by using a synthetic generic # callable type. ktdef = TypeVarDef('KT', 'KT', -1, [], self.object_type()) vtdef = TypeVarDef('VT', 'VT', -2, [], self.object_type()) kt = TypeVarType(ktdef) vt = TypeVarType(vtdef) constructor = CallableType( [kt, vt], [nodes.ARG_POS, nodes.ARG_POS], [None, None], self.chk.named_generic_type('builtins.dict', [kt, vt]), self.chk.named_type('builtins.function'), name='', variables=[ktdef, vtdef]) return self.check_call(constructor, [e.key, e.value], [nodes.ARG_POS, nodes.ARG_POS], e)[0] def check_for_comp(self, e: Union[GeneratorExpr, DictionaryComprehension]) -> None: """Check the for_comp part of comprehensions. That is the part from 'for': ... for x in y if z Note: This adds the type information derived from the condlists to the current binder. """ for index, sequence, conditions, is_async in zip(e.indices, e.sequences, e.condlists, e.is_async): if is_async: _, sequence_type = self.chk.analyze_async_iterable_item_type(sequence) else: _, sequence_type = self.chk.analyze_iterable_item_type(sequence) self.chk.analyze_index_variables(index, sequence_type, True, e) for condition in conditions: self.accept(condition) # values are only part of the comprehension when all conditions are true true_map, false_map = self.chk.find_isinstance_check(condition) if true_map: for var, type in true_map.items(): self.chk.binder.put(var, type) if self.chk.options.warn_unreachable: if true_map is None: self.msg.redundant_condition_in_comprehension(False, condition) elif false_map is None: self.msg.redundant_condition_in_comprehension(True, condition) def visit_conditional_expr(self, e: ConditionalExpr) -> Type: self.accept(e.cond) ctx = self.type_context[-1] # Gain type information from isinstance if it is there # but only for the current expression if_map, else_map = self.chk.find_isinstance_check(e.cond) if self.chk.options.warn_unreachable: if if_map is None: self.msg.redundant_condition_in_if(False, e.cond) elif else_map is None: self.msg.redundant_condition_in_if(True, e.cond) if_type = self.analyze_cond_branch(if_map, e.if_expr, context=ctx) # Analyze the right branch using full type context and store the type full_context_else_type = self.analyze_cond_branch(else_map, e.else_expr, context=ctx) if not mypy.checker.is_valid_inferred_type(if_type): # Analyze the right branch disregarding the left branch. else_type = full_context_else_type # If it would make a difference, re-analyze the left # branch using the right branch's type as context. if ctx is None or not is_equivalent(else_type, ctx): # TODO: If it's possible that the previous analysis of # the left branch produced errors that are avoided # using this context, suppress those errors. if_type = self.analyze_cond_branch(if_map, e.if_expr, context=else_type) else: # Analyze the right branch in the context of the left # branch's type. else_type = self.analyze_cond_branch(else_map, e.else_expr, context=if_type) # Only create a union type if the type context is a union, to be mostly # compatible with older mypy versions where we always did a join. # # TODO: Always create a union or at least in more cases? if isinstance(get_proper_type(self.type_context[-1]), UnionType): res = make_simplified_union([if_type, full_context_else_type]) else: res = join.join_types(if_type, else_type) return res def analyze_cond_branch(self, map: Optional[Dict[Expression, Type]], node: Expression, context: Optional[Type]) -> Type: with self.chk.binder.frame_context(can_skip=True, fall_through=0): if map is None: # We still need to type check node, in case we want to # process it for isinstance checks later self.accept(node, type_context=context) return UninhabitedType() self.chk.push_type_map(map) return self.accept(node, type_context=context) def visit_backquote_expr(self, e: BackquoteExpr) -> Type: self.accept(e.expr) return self.named_type('builtins.str') # # Helpers # def accept(self, node: Expression, type_context: Optional[Type] = None, allow_none_return: bool = False, always_allow_any: bool = False, ) -> Type: """Type check a node in the given type context. If allow_none_return is True and this expression is a call, allow it to return None. This applies only to this expression and not any subexpressions. """ if node in self.type_overrides: return self.type_overrides[node] self.type_context.append(type_context) try: if allow_none_return and isinstance(node, CallExpr): typ = self.visit_call_expr(node, allow_none_return=True) elif allow_none_return and isinstance(node, YieldFromExpr): typ = self.visit_yield_from_expr(node, allow_none_return=True) else: typ = node.accept(self) except Exception as err: report_internal_error(err, self.chk.errors.file, node.line, self.chk.errors, self.chk.options) self.type_context.pop() assert typ is not None self.chk.store_type(node, typ) if (self.chk.options.disallow_any_expr and not always_allow_any and not self.chk.is_stub and self.chk.in_checked_function() and has_any_type(typ) and not self.chk.current_node_deferred): self.msg.disallowed_any_type(typ, node) if not self.chk.in_checked_function() or self.chk.current_node_deferred: return AnyType(TypeOfAny.unannotated) else: return typ def named_type(self, name: str) -> Instance: """Return an instance type with type given by the name and no type arguments. Alias for TypeChecker.named_type. """ return self.chk.named_type(name) def is_valid_var_arg(self, typ: Type) -> bool: """Is a type valid as a *args argument?""" typ = get_proper_type(typ) return (isinstance(typ, TupleType) or is_subtype(typ, self.chk.named_generic_type('typing.Iterable', [AnyType(TypeOfAny.special_form)])) or isinstance(typ, AnyType)) def is_valid_keyword_var_arg(self, typ: Type) -> bool: """Is a type valid as a **kwargs argument?""" if self.chk.options.python_version[0] >= 3: return is_subtype(typ, self.chk.named_generic_type( 'typing.Mapping', [self.named_type('builtins.str'), AnyType(TypeOfAny.special_form)])) else: return ( is_subtype(typ, self.chk.named_generic_type( 'typing.Mapping', [self.named_type('builtins.str'), AnyType(TypeOfAny.special_form)])) or is_subtype(typ, self.chk.named_generic_type( 'typing.Mapping', [self.named_type('builtins.unicode'), AnyType(TypeOfAny.special_form)]))) def has_member(self, typ: Type, member: str) -> bool: """Does type have member with the given name?""" # TODO: refactor this to use checkmember.analyze_member_access, otherwise # these two should be carefully kept in sync. typ = get_proper_type(typ) if isinstance(typ, TypeVarType): typ = get_proper_type(typ.upper_bound) if isinstance(typ, TupleType): typ = tuple_fallback(typ) if isinstance(typ, LiteralType): typ = typ.fallback if isinstance(typ, Instance): return typ.type.has_readable_member(member) if isinstance(typ, CallableType) and typ.is_type_obj(): return typ.fallback.type.has_readable_member(member) elif isinstance(typ, AnyType): return True elif isinstance(typ, UnionType): result = all(self.has_member(x, member) for x in typ.relevant_items()) return result elif isinstance(typ, TypeType): # Type[Union[X, ...]] is always normalized to Union[Type[X], ...], # so we don't need to care about unions here. item = typ.item if isinstance(item, TypeVarType): item = get_proper_type(item.upper_bound) if isinstance(item, TupleType): item = tuple_fallback(item) if isinstance(item, Instance) and item.type.metaclass_type is not None: return self.has_member(item.type.metaclass_type, member) if isinstance(item, AnyType): return True return False else: return False def not_ready_callback(self, name: str, context: Context) -> None: """Called when we can't infer the type of a variable because it's not ready yet. Either defer type checking of the enclosing function to the next pass or report an error. """ self.chk.handle_cannot_determine_type(name, context) def visit_yield_expr(self, e: YieldExpr) -> Type: return_type = self.chk.return_types[-1] expected_item_type = self.chk.get_generator_yield_type(return_type, False) if e.expr is None: if (not isinstance(get_proper_type(expected_item_type), (NoneType, AnyType)) and self.chk.in_checked_function()): self.chk.fail(message_registry.YIELD_VALUE_EXPECTED, e) else: actual_item_type = self.accept(e.expr, expected_item_type) self.chk.check_subtype(actual_item_type, expected_item_type, e, message_registry.INCOMPATIBLE_TYPES_IN_YIELD, 'actual type', 'expected type') return self.chk.get_generator_receive_type(return_type, False) def visit_await_expr(self, e: AwaitExpr) -> Type: expected_type = self.type_context[-1] if expected_type is not None: expected_type = self.chk.named_generic_type('typing.Awaitable', [expected_type]) actual_type = get_proper_type(self.accept(e.expr, expected_type)) if isinstance(actual_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=actual_type) return self.check_awaitable_expr(actual_type, e, message_registry.INCOMPATIBLE_TYPES_IN_AWAIT) def check_awaitable_expr(self, t: Type, ctx: Context, msg: str) -> Type: """Check the argument to `await` and extract the type of value. Also used by `async for` and `async with`. """ if not self.chk.check_subtype(t, self.named_type('typing.Awaitable'), ctx, msg, 'actual type', 'expected type'): return AnyType(TypeOfAny.special_form) else: generator = self.check_method_call_by_name('__await__', t, [], [], ctx)[0] return self.chk.get_generator_return_type(generator, False) def visit_yield_from_expr(self, e: YieldFromExpr, allow_none_return: bool = False) -> Type: # NOTE: Whether `yield from` accepts an `async def` decorated # with `@types.coroutine` (or `@asyncio.coroutine`) depends on # whether the generator containing the `yield from` is itself # thus decorated. But it accepts a generator regardless of # how it's decorated. return_type = self.chk.return_types[-1] # TODO: What should the context for the sub-expression be? # If the containing function has type Generator[X, Y, ...], # the context should be Generator[X, Y, T], where T is the # context of the 'yield from' itself (but it isn't known). subexpr_type = get_proper_type(self.accept(e.expr)) # Check that the expr is an instance of Iterable and get the type of the iterator produced # by __iter__. if isinstance(subexpr_type, AnyType): iter_type = AnyType(TypeOfAny.from_another_any, source_any=subexpr_type) # type: Type elif self.chk.type_is_iterable(subexpr_type): if is_async_def(subexpr_type) and not has_coroutine_decorator(return_type): self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e) any_type = AnyType(TypeOfAny.special_form) generic_generator_type = self.chk.named_generic_type('typing.Generator', [any_type, any_type, any_type]) iter_type, _ = self.check_method_call_by_name( '__iter__', subexpr_type, [], [], context=generic_generator_type) else: if not (is_async_def(subexpr_type) and has_coroutine_decorator(return_type)): self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e) iter_type = AnyType(TypeOfAny.from_error) else: iter_type = self.check_awaitable_expr( subexpr_type, e, message_registry.INCOMPATIBLE_TYPES_IN_YIELD_FROM) # Check that the iterator's item type matches the type yielded by the Generator function # containing this `yield from` expression. expected_item_type = self.chk.get_generator_yield_type(return_type, False) actual_item_type = self.chk.get_generator_yield_type(iter_type, False) self.chk.check_subtype(actual_item_type, expected_item_type, e, message_registry.INCOMPATIBLE_TYPES_IN_YIELD_FROM, 'actual type', 'expected type') # Determine the type of the entire yield from expression. iter_type = get_proper_type(iter_type) if (isinstance(iter_type, Instance) and iter_type.type.fullname == 'typing.Generator'): expr_type = self.chk.get_generator_return_type(iter_type, False) else: # Non-Generators don't return anything from `yield from` expressions. # However special-case Any (which might be produced by an error). actual_item_type = get_proper_type(actual_item_type) if isinstance(actual_item_type, AnyType): expr_type = AnyType(TypeOfAny.from_another_any, source_any=actual_item_type) else: # Treat `Iterator[X]` as a shorthand for `Generator[X, None, Any]`. expr_type = NoneType() if not allow_none_return and isinstance(get_proper_type(expr_type), NoneType): self.chk.msg.does_not_return_value(None, e) return expr_type def visit_temp_node(self, e: TempNode) -> Type: return e.type def visit_type_var_expr(self, e: TypeVarExpr) -> Type: return AnyType(TypeOfAny.special_form) def visit_newtype_expr(self, e: NewTypeExpr) -> Type: return AnyType(TypeOfAny.special_form) def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type: tuple_type = e.info.tuple_type if tuple_type: if (self.chk.options.disallow_any_unimported and has_any_from_unimported_type(tuple_type)): self.msg.unimported_type_becomes_any("NamedTuple type", tuple_type, e) check_for_explicit_any(tuple_type, self.chk.options, self.chk.is_typeshed_stub, self.msg, context=e) return AnyType(TypeOfAny.special_form) def visit_enum_call_expr(self, e: EnumCallExpr) -> Type: for name, value in zip(e.items, e.values): if value is not None: typ = self.accept(value) if not isinstance(get_proper_type(typ), AnyType): var = e.info.names[name].node if isinstance(var, Var): # Inline TypeChecker.set_inferred_type(), # without the lvalue. (This doesn't really do # much, since the value attribute is defined # to have type Any in the typeshed stub.) var.type = typ var.is_inferred = True return AnyType(TypeOfAny.special_form) def visit_typeddict_expr(self, e: TypedDictExpr) -> Type: return AnyType(TypeOfAny.special_form) def visit__promote_expr(self, e: PromoteExpr) -> Type: return e.type def visit_star_expr(self, e: StarExpr) -> StarType: return StarType(self.accept(e.expr)) def object_type(self) -> Instance: """Return instance type 'object'.""" return self.named_type('builtins.object') def bool_type(self) -> Instance: """Return instance type 'bool'.""" return self.named_type('builtins.bool') @overload def narrow_type_from_binder(self, expr: Expression, known_type: Type) -> Type: ... @overload def narrow_type_from_binder(self, expr: Expression, known_type: Type, skip_non_overlapping: bool) -> Optional[Type]: ... def narrow_type_from_binder(self, expr: Expression, known_type: Type, skip_non_overlapping: bool = False) -> Optional[Type]: """Narrow down a known type of expression using information in conditional type binder. If 'skip_non_overlapping' is True, return None if the type and restriction are non-overlapping. """ if literal(expr) >= LITERAL_TYPE: restriction = self.chk.binder.get(expr) # If the current node is deferred, some variables may get Any types that they # otherwise wouldn't have. We don't want to narrow down these since it may # produce invalid inferred Optional[Any] types, at least. if restriction and not (isinstance(get_proper_type(known_type), AnyType) and self.chk.current_node_deferred): # Note: this call should match the one in narrow_declared_type(). if (skip_non_overlapping and not is_overlapping_types(known_type, restriction, prohibit_none_typevar_overlap=True)): return None return narrow_declared_type(known_type, restriction) return known_type def has_any_type(t: Type) -> bool: """Whether t contains an Any type""" return t.accept(HasAnyType()) class HasAnyType(types.TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return t.type_of_any != TypeOfAny.special_form # special forms are not real Any types def has_coroutine_decorator(t: Type) -> bool: """Whether t came from a function decorated with `@coroutine`.""" t = get_proper_type(t) return isinstance(t, Instance) and t.type.fullname == 'typing.AwaitableGenerator' def is_async_def(t: Type) -> bool: """Whether t came from a function defined using `async def`.""" # In check_func_def(), when we see a function decorated with # `@typing.coroutine` or `@async.coroutine`, we change the # return type to typing.AwaitableGenerator[...], so that its # type is compatible with either Generator or Awaitable. # But for the check here we need to know whether the original # function (before decoration) was an `async def`. The # AwaitableGenerator type conveniently preserves the original # type as its 4th parameter (3rd when using 0-origin indexing # :-), so that we can recover that information here. # (We really need to see whether the original, undecorated # function was an `async def`, which is orthogonal to its # decorations.) t = get_proper_type(t) if (isinstance(t, Instance) and t.type.fullname == 'typing.AwaitableGenerator' and len(t.args) >= 4): t = get_proper_type(t.args[3]) return isinstance(t, Instance) and t.type.fullname == 'typing.Coroutine' def is_non_empty_tuple(t: Type) -> bool: t = get_proper_type(t) return isinstance(t, TupleType) and bool(t.items) def is_duplicate_mapping(mapping: List[int], actual_kinds: List[int]) -> bool: # Multiple actuals can map to the same formal only if they both come from # varargs (*args and **kwargs); in this case at runtime it is possible that # there are no duplicates. We need to allow this, as the convention # f(..., *args, **kwargs) is common enough. return len(mapping) > 1 and not ( len(mapping) == 2 and actual_kinds[mapping[0]] == nodes.ARG_STAR and actual_kinds[mapping[1]] == nodes.ARG_STAR2) def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> CallableType: """Return a copy of a callable type with a different return type.""" return c.copy_modified(ret_type=new_ret_type) class ArgInferSecondPassQuery(types.TypeQuery[bool]): """Query whether an argument type should be inferred in the second pass. The result is True if the type has a type variable in a callable return type anywhere. For example, the result for Callable[[], T] is True if t is a type variable. """ def __init__(self) -> None: super().__init__(any) def visit_callable_type(self, t: CallableType) -> bool: return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery()) class HasTypeVarQuery(types.TypeQuery[bool]): """Visitor for querying whether a type has a type variable component.""" def __init__(self) -> None: super().__init__(any) def visit_type_var(self, t: TypeVarType) -> bool: return True def has_erased_component(t: Optional[Type]) -> bool: return t is not None and t.accept(HasErasedComponentsQuery()) class HasErasedComponentsQuery(types.TypeQuery[bool]): """Visitor for querying whether a type has an erased component.""" def __init__(self) -> None: super().__init__(any) def visit_erased_type(self, t: ErasedType) -> bool: return True def has_uninhabited_component(t: Optional[Type]) -> bool: return t is not None and t.accept(HasUninhabitedComponentsQuery()) class HasUninhabitedComponentsQuery(types.TypeQuery[bool]): """Visitor for querying whether a type has an UninhabitedType component.""" def __init__(self) -> None: super().__init__(any) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return True def arg_approximate_similarity(actual: Type, formal: Type) -> bool: """Return if caller argument (actual) is roughly compatible with signature arg (formal). This function is deliberately loose and will report two types are similar as long as their "shapes" are plausibly the same. This is useful when we're doing error reporting: for example, if we're trying to select an overload alternative and there's no exact match, we can use this function to help us identify which alternative the user might have *meant* to match. """ actual = get_proper_type(actual) formal = get_proper_type(formal) # Erase typevars: we'll consider them all to have the same "shape". if isinstance(actual, TypeVarType): actual = erase_to_union_or_bound(actual) if isinstance(formal, TypeVarType): formal = erase_to_union_or_bound(formal) # Callable or Type[...]-ish types def is_typetype_like(typ: ProperType) -> bool: return (isinstance(typ, TypeType) or (isinstance(typ, FunctionLike) and typ.is_type_obj()) or (isinstance(typ, Instance) and typ.type.fullname == "builtins.type")) if isinstance(formal, CallableType): if isinstance(actual, (CallableType, Overloaded, TypeType)): return True if is_typetype_like(actual) and is_typetype_like(formal): return True # Unions if isinstance(actual, UnionType): return any(arg_approximate_similarity(item, formal) for item in actual.relevant_items()) if isinstance(formal, UnionType): return any(arg_approximate_similarity(actual, item) for item in formal.relevant_items()) # TypedDicts if isinstance(actual, TypedDictType): if isinstance(formal, TypedDictType): return True return arg_approximate_similarity(actual.fallback, formal) # Instances # For instances, we mostly defer to the existing is_subtype check. if isinstance(formal, Instance): if isinstance(actual, CallableType): actual = actual.fallback if isinstance(actual, Overloaded): actual = actual.items()[0].fallback if isinstance(actual, TupleType): actual = tuple_fallback(actual) if isinstance(actual, Instance) and formal.type in actual.type.mro: # Try performing a quick check as an optimization return True # Fall back to a standard subtype check for the remaining kinds of type. return is_subtype(erasetype.erase_type(actual), erasetype.erase_type(formal)) def any_causes_overload_ambiguity(items: List[CallableType], return_types: List[Type], arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]]) -> bool: """May an argument containing 'Any' cause ambiguous result type on call to overloaded function? Note that this sometimes returns True even if there is no ambiguity, since a correct implementation would be complex (and the call would be imprecisely typed due to Any types anyway). Args: items: Overload items matching the actual arguments arg_types: Actual argument types arg_kinds: Actual argument kinds arg_names: Actual argument names """ if all_same_types(return_types): return False actual_to_formal = [ map_formals_to_actuals( arg_kinds, arg_names, item.arg_kinds, item.arg_names, lambda i: arg_types[i]) for item in items ] for arg_idx, arg_type in enumerate(arg_types): if has_any_type(arg_type): matching_formals_unfiltered = [(item_idx, lookup[arg_idx]) for item_idx, lookup in enumerate(actual_to_formal) if lookup[arg_idx]] matching_returns = [] matching_formals = [] for item_idx, formals in matching_formals_unfiltered: matched_callable = items[item_idx] matching_returns.append(matched_callable.ret_type) # Note: if an actual maps to multiple formals of differing types within # a single callable, then we know at least one of those formals must be # a different type then the formal(s) in some other callable. # So it's safe to just append everything to the same list. for formal in formals: matching_formals.append(matched_callable.arg_types[formal]) if not all_same_types(matching_formals) and not all_same_types(matching_returns): # Any maps to multiple different types, and the return types of these items differ. return True return False def all_same_types(types: List[Type]) -> bool: if len(types) == 0: return True return all(is_same_type(t, types[0]) for t in types[1:]) def merge_typevars_in_callables_by_name( callables: Sequence[CallableType]) -> Tuple[List[CallableType], List[TypeVarDef]]: """Takes all the typevars present in the callables and 'combines' the ones with the same name. For example, suppose we have two callables with signatures "f(x: T, y: S) -> T" and "f(x: List[Tuple[T, S]]) -> Tuple[T, S]". Both callables use typevars named "T" and "S", but we treat them as distinct, unrelated typevars. (E.g. they could both have distinct ids.) If we pass in both callables into this function, it returns a a list containing two new callables that are identical in signature, but use the same underlying TypeVarDef and TypeVarType objects for T and S. This is useful if we want to take the output lists and "merge" them into one callable in some way -- for example, when unioning together overloads. Returns both the new list of callables and a list of all distinct TypeVarDef objects used. """ output = [] # type: List[CallableType] unique_typevars = {} # type: Dict[str, TypeVarType] variables = [] # type: List[TypeVarDef] for target in callables: if target.is_generic(): target = freshen_function_type_vars(target) rename = {} # Dict[TypeVarId, TypeVar] for tvdef in target.variables: name = tvdef.fullname if name not in unique_typevars: unique_typevars[name] = TypeVarType(tvdef) variables.append(tvdef) rename[tvdef.id] = unique_typevars[name] target = cast(CallableType, expand_type(target, rename)) output.append(target) return output, variables def is_literal_type_like(t: Optional[Type]) -> bool: """Returns 'true' if the given type context is potentially either a LiteralType, a Union of LiteralType, or something similar. """ t = get_proper_type(t) if t is None: return False elif isinstance(t, LiteralType): return True elif isinstance(t, UnionType): return any(is_literal_type_like(item) for item in t.items) elif isinstance(t, TypeVarType): return (is_literal_type_like(t.upper_bound) or any(is_literal_type_like(item) for item in t.values)) else: return False def try_getting_literal(typ: Type) -> ProperType: """If possible, get a more precise literal type for a given type.""" typ = get_proper_type(typ) if isinstance(typ, Instance) and typ.last_known_value is not None: return typ.last_known_value return typ def is_expr_literal_type(node: Expression) -> bool: """Returns 'true' if the given node is a Literal""" valid = ('typing.Literal', 'typing_extensions.Literal') if isinstance(node, IndexExpr): base = node.base return isinstance(base, RefExpr) and base.fullname in valid if isinstance(node, NameExpr): underlying = node.node return isinstance(underlying, TypeAlias) and isinstance(get_proper_type(underlying.target), LiteralType) return False def custom_equality_method(typ: Type) -> bool: """Does this type have a custom __eq__() method?""" typ = get_proper_type(typ) if isinstance(typ, Instance): method = typ.type.get('__eq__') if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)): if method.node.info: return not method.node.info.fullname.startswith('builtins.') return False if isinstance(typ, UnionType): return any(custom_equality_method(t) for t in typ.items) if isinstance(typ, TupleType): return custom_equality_method(tuple_fallback(typ)) if isinstance(typ, CallableType) and typ.is_type_obj(): # Look up __eq__ on the metaclass for class objects. return custom_equality_method(typ.fallback) if isinstance(typ, AnyType): # Avoid false positives in uncertain cases. return True # TODO: support other types (see ExpressionChecker.has_member())? return False def has_bytes_component(typ: Type, py2: bool = False) -> bool: """Is this one of builtin byte types, or a union that contains it?""" typ = get_proper_type(typ) if py2: byte_types = {'builtins.str', 'builtins.bytearray'} else: byte_types = {'builtins.bytes', 'builtins.bytearray'} if isinstance(typ, UnionType): return any(has_bytes_component(t) for t in typ.items) if isinstance(typ, Instance) and typ.type.fullname in byte_types: return True return False def type_info_from_type(typ: Type) -> Optional[TypeInfo]: """Gets the TypeInfo for a type, indirecting through things like type variables and tuples.""" typ = get_proper_type(typ) if isinstance(typ, FunctionLike) and typ.is_type_obj(): return typ.type_object() if isinstance(typ, TypeType): typ = typ.item if isinstance(typ, TypeVarType): typ = get_proper_type(typ.upper_bound) if isinstance(typ, TupleType): typ = tuple_fallback(typ) if isinstance(typ, Instance): return typ.type # A complicated type. Too tricky, give up. # TODO: Do something more clever here. return None def is_operator_method(fullname: Optional[str]) -> bool: if fullname is None: return False short_name = fullname.split('.')[-1] return ( short_name in nodes.op_methods.values() or short_name in nodes.reverse_op_methods.values() or short_name in nodes.unary_op_methods.values()) mypy-0.761/mypy/checkmember.py0000644€tŠÔÚ€2›s®0000012464113576752246022545 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type checking of attribute access""" from typing import cast, Callable, Optional, Union, List from typing_extensions import TYPE_CHECKING from mypy.types import ( Type, Instance, AnyType, TupleType, TypedDictType, CallableType, FunctionLike, TypeVarDef, Overloaded, TypeVarType, UnionType, PartialType, TypeOfAny, LiteralType, DeletedType, NoneType, TypeType, has_type_vars, get_proper_type, ProperType ) from mypy.nodes import ( TypeInfo, FuncBase, Var, FuncDef, SymbolNode, SymbolTable, Context, MypyFile, TypeVarExpr, ARG_POS, ARG_STAR, ARG_STAR2, Decorator, OverloadedFuncDef, TypeAlias, TempNode, is_final_node, SYMBOL_FUNCBASE_TYPES, ) from mypy.messages import MessageBuilder from mypy.maptype import map_instance_to_supertype from mypy.expandtype import expand_type_by_instance, freshen_function_type_vars from mypy.erasetype import erase_typevars from mypy.plugin import AttributeContext from mypy.typeanal import set_any_tvars from mypy import message_registry from mypy import subtypes from mypy import meet from mypy.typeops import ( tuple_fallback, bind_self, erase_to_bound, class_callable, type_object_type_from_function, make_simplified_union, function_type, ) if TYPE_CHECKING: # import for forward declaration only import mypy.checker from mypy import state class MemberContext: """Information and objects needed to type check attribute access. Look at the docstring of analyze_member_access for more information. """ def __init__(self, is_lvalue: bool, is_super: bool, is_operator: bool, original_type: Type, context: Context, msg: MessageBuilder, chk: 'mypy.checker.TypeChecker', self_type: Optional[Type], module_symbol_table: Optional[SymbolTable] = None) -> None: self.is_lvalue = is_lvalue self.is_super = is_super self.is_operator = is_operator self.original_type = original_type self.self_type = self_type or original_type self.context = context # Error context self.msg = msg self.chk = chk self.module_symbol_table = module_symbol_table def builtin_type(self, name: str) -> Instance: return self.chk.named_type(name) def not_ready_callback(self, name: str, context: Context) -> None: self.chk.handle_cannot_determine_type(name, context) def copy_modified(self, *, messages: Optional[MessageBuilder] = None, self_type: Optional[Type] = None) -> 'MemberContext': mx = MemberContext(self.is_lvalue, self.is_super, self.is_operator, self.original_type, self.context, self.msg, self.chk, self.self_type, self.module_symbol_table) if messages is not None: mx.msg = messages if self_type is not None: mx.self_type = self_type return mx def analyze_member_access(name: str, typ: Type, context: Context, is_lvalue: bool, is_super: bool, is_operator: bool, msg: MessageBuilder, *, original_type: Type, chk: 'mypy.checker.TypeChecker', override_info: Optional[TypeInfo] = None, in_literal_context: bool = False, self_type: Optional[Type] = None, module_symbol_table: Optional[SymbolTable] = None) -> Type: """Return the type of attribute 'name' of 'typ'. The actual implementation is in '_analyze_member_access' and this docstring also applies to it. This is a general operation that supports various different variations: 1. lvalue or non-lvalue access (setter or getter access) 2. supertype access when using super() (is_super == True and 'override_info' should refer to the supertype) 'original_type' is the most precise inferred or declared type of the base object that we have available. When looking for an attribute of 'typ', we may perform recursive calls targeting the fallback type, and 'typ' may become some supertype of 'original_type'. 'original_type' is always preserved as the 'typ' type used in the initial, non-recursive call. The 'self_type' is a component of 'original_type' to which generic self should be bound (a narrower type that has a fallback to instance). Currently this is used only for union types. 'module_symbol_table' is passed to this function if 'typ' is actually a module and we want to keep track of the available attributes of the module (since they are not available via the type object directly) """ mx = MemberContext(is_lvalue, is_super, is_operator, original_type, context, msg, chk=chk, self_type=self_type, module_symbol_table=module_symbol_table) result = _analyze_member_access(name, typ, mx, override_info) possible_literal = get_proper_type(result) if (in_literal_context and isinstance(possible_literal, Instance) and possible_literal.last_known_value is not None): return possible_literal.last_known_value else: return result def _analyze_member_access(name: str, typ: Type, mx: MemberContext, override_info: Optional[TypeInfo] = None) -> Type: # TODO: This and following functions share some logic with subtypes.find_member; # consider refactoring. typ = get_proper_type(typ) if isinstance(typ, Instance): return analyze_instance_member_access(name, typ, mx, override_info) elif isinstance(typ, AnyType): # The base object has dynamic type. return AnyType(TypeOfAny.from_another_any, source_any=typ) elif isinstance(typ, UnionType): return analyze_union_member_access(name, typ, mx) elif isinstance(typ, FunctionLike) and typ.is_type_obj(): return analyze_type_callable_member_access(name, typ, mx) elif isinstance(typ, TypeType): return analyze_type_type_member_access(name, typ, mx, override_info) elif isinstance(typ, TupleType): # Actually look up from the fallback instance type. return _analyze_member_access(name, tuple_fallback(typ), mx, override_info) elif isinstance(typ, (TypedDictType, LiteralType, FunctionLike)): # Actually look up from the fallback instance type. return _analyze_member_access(name, typ.fallback, mx, override_info) elif isinstance(typ, NoneType): return analyze_none_member_access(name, typ, mx) elif isinstance(typ, TypeVarType): return _analyze_member_access(name, typ.upper_bound, mx, override_info) elif isinstance(typ, DeletedType): mx.msg.deleted_as_rvalue(typ, mx.context) return AnyType(TypeOfAny.from_error) if mx.chk.should_suppress_optional_error([typ]): return AnyType(TypeOfAny.from_error) return mx.msg.has_no_attr(mx.original_type, typ, name, mx.context, mx.module_symbol_table) # The several functions that follow implement analyze_member_access for various # types and aren't documented individually. def analyze_instance_member_access(name: str, typ: Instance, mx: MemberContext, override_info: Optional[TypeInfo]) -> Type: if name == '__init__' and not mx.is_super: # Accessing __init__ in statically typed code would compromise # type safety unless used via super(). mx.msg.fail(message_registry.CANNOT_ACCESS_INIT, mx.context) return AnyType(TypeOfAny.from_error) # The base object has an instance type. info = typ.type if override_info: info = override_info if (state.find_occurrences and info.name == state.find_occurrences[0] and name == state.find_occurrences[1]): mx.msg.note("Occurrence of '{}.{}'".format(*state.find_occurrences), mx.context) # Look up the member. First look up the method dictionary. method = info.get_method(name) if method: if method.is_property: assert isinstance(method, OverloadedFuncDef) first_item = cast(Decorator, method.items[0]) return analyze_var(name, first_item.var, typ, info, mx) if mx.is_lvalue: mx.msg.cant_assign_to_method(mx.context) signature = function_type(method, mx.builtin_type('builtins.function')) signature = freshen_function_type_vars(signature) if name == '__new__': # __new__ is special and behaves like a static method -- don't strip # the first argument. pass else: if isinstance(signature, FunctionLike) and name != '__call__': # TODO: use proper treatment of special methods on unions instead # of this hack here and below (i.e. mx.self_type). dispatched_type = meet.meet_types(mx.original_type, typ) signature = check_self_arg(signature, dispatched_type, method.is_class, mx.context, name, mx.msg) signature = bind_self(signature, mx.self_type, is_classmethod=method.is_class) typ = map_instance_to_supertype(typ, method.info) member_type = expand_type_by_instance(signature, typ) freeze_type_vars(member_type) return member_type else: # Not a method. return analyze_member_var_access(name, typ, info, mx) def analyze_type_callable_member_access(name: str, typ: FunctionLike, mx: MemberContext) -> Type: # Class attribute. # TODO super? ret_type = typ.items()[0].ret_type assert isinstance(ret_type, ProperType) if isinstance(ret_type, TupleType): ret_type = tuple_fallback(ret_type) if isinstance(ret_type, Instance): if not mx.is_operator: # When Python sees an operator (eg `3 == 4`), it automatically translates that # into something like `int.__eq__(3, 4)` instead of `(3).__eq__(4)` as an # optimization. # # While it normally it doesn't matter which of the two versions are used, it # does cause inconsistencies when working with classes. For example, translating # `int == int` to `int.__eq__(int)` would not work since `int.__eq__` is meant to # compare two int _instances_. What we really want is `type(int).__eq__`, which # is meant to compare two types or classes. # # This check makes sure that when we encounter an operator, we skip looking up # the corresponding method in the current instance to avoid this edge case. # See https://github.com/python/mypy/pull/1787 for more info. # TODO: do not rely on same type variables being present in all constructor overloads. result = analyze_class_attribute_access(ret_type, name, mx, original_vars=typ.items()[0].variables) if result: return result # Look up from the 'type' type. return _analyze_member_access(name, typ.fallback, mx) else: assert False, 'Unexpected type {}'.format(repr(ret_type)) def analyze_type_type_member_access(name: str, typ: TypeType, mx: MemberContext, override_info: Optional[TypeInfo]) -> Type: # Similar to analyze_type_callable_attribute_access. item = None fallback = mx.builtin_type('builtins.type') ignore_messages = mx.msg.copy() ignore_messages.disable_errors() if isinstance(typ.item, Instance): item = typ.item elif isinstance(typ.item, AnyType): mx = mx.copy_modified(messages=ignore_messages) return _analyze_member_access(name, fallback, mx, override_info) elif isinstance(typ.item, TypeVarType): upper_bound = get_proper_type(typ.item.upper_bound) if isinstance(upper_bound, Instance): item = upper_bound elif isinstance(upper_bound, TupleType): item = tuple_fallback(upper_bound) elif isinstance(typ.item, TupleType): item = tuple_fallback(typ.item) elif isinstance(typ.item, FunctionLike) and typ.item.is_type_obj(): item = typ.item.fallback elif isinstance(typ.item, TypeType): # Access member on metaclass object via Type[Type[C]] if isinstance(typ.item.item, Instance): item = typ.item.item.type.metaclass_type if item and not mx.is_operator: # See comment above for why operators are skipped result = analyze_class_attribute_access(item, name, mx, override_info) if result: if not (isinstance(get_proper_type(result), AnyType) and item.type.fallback_to_any): return result else: # We don't want errors on metaclass lookup for classes with Any fallback mx = mx.copy_modified(messages=ignore_messages) if item is not None: fallback = item.type.metaclass_type or fallback return _analyze_member_access(name, fallback, mx, override_info) def analyze_union_member_access(name: str, typ: UnionType, mx: MemberContext) -> Type: mx.msg.disable_type_names += 1 results = [] for subtype in typ.relevant_items(): # Self types should be bound to every individual item of a union. item_mx = mx.copy_modified(self_type=subtype) results.append(_analyze_member_access(name, subtype, item_mx)) mx.msg.disable_type_names -= 1 return make_simplified_union(results) def analyze_none_member_access(name: str, typ: NoneType, mx: MemberContext) -> Type: if mx.chk.should_suppress_optional_error([typ]): return AnyType(TypeOfAny.from_error) is_python_3 = mx.chk.options.python_version[0] >= 3 # In Python 2 "None" has exactly the same attributes as "object". Python 3 adds a single # extra attribute, "__bool__". if is_python_3 and name == '__bool__': return CallableType(arg_types=[], arg_kinds=[], arg_names=[], ret_type=mx.builtin_type('builtins.bool'), fallback=mx.builtin_type('builtins.function')) else: return _analyze_member_access(name, mx.builtin_type('builtins.object'), mx) def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo, mx: MemberContext) -> Type: """Analyse attribute access that does not target a method. This is logically part of analyze_member_access and the arguments are similar. original_type is the type of E in the expression E.var """ # It was not a method. Try looking up a variable. v = lookup_member_var_or_accessor(info, name, mx.is_lvalue) vv = v if isinstance(vv, Decorator): # The associated Var node of a decorator contains the type. v = vv.var if isinstance(vv, TypeInfo): # If the associated variable is a TypeInfo synthesize a Var node for # the purposes of type checking. This enables us to type check things # like accessing class attributes on an inner class. v = Var(name, type=type_object_type(vv, mx.builtin_type)) v.info = info if isinstance(vv, TypeAlias) and isinstance(get_proper_type(vv.target), Instance): # Similar to the above TypeInfo case, we allow using # qualified type aliases in runtime context if it refers to an # instance type. For example: # class C: # A = List[int] # x = C.A() <- this is OK typ = instance_alias_type(vv, mx.builtin_type) v = Var(name, type=typ) v.info = info if isinstance(v, Var): implicit = info[name].implicit # An assignment to final attribute is always an error, # independently of types. if mx.is_lvalue and not mx.chk.get_final_context(): check_final_member(name, info, mx.msg, mx.context) return analyze_var(name, v, itype, info, mx, implicit=implicit) elif isinstance(v, FuncDef): assert False, "Did not expect a function" elif (not v and name not in ['__getattr__', '__setattr__', '__getattribute__'] and not mx.is_operator): if not mx.is_lvalue: for method_name in ('__getattribute__', '__getattr__'): method = info.get_method(method_name) # __getattribute__ is defined on builtins.object and returns Any, so without # the guard this search will always find object.__getattribute__ and conclude # that the attribute exists if method and method.info.fullname != 'builtins.object': function = function_type(method, mx.builtin_type('builtins.function')) bound_method = bind_self(function, mx.self_type) typ = map_instance_to_supertype(itype, method.info) getattr_type = get_proper_type(expand_type_by_instance(bound_method, typ)) if isinstance(getattr_type, CallableType): result = getattr_type.ret_type # Call the attribute hook before returning. fullname = '{}.{}'.format(method.info.fullname, name) hook = mx.chk.plugin.get_attribute_hook(fullname) if hook: result = hook(AttributeContext(get_proper_type(mx.original_type), result, mx.context, mx.chk)) return result else: setattr_meth = info.get_method('__setattr__') if setattr_meth and setattr_meth.info.fullname != 'builtins.object': setattr_func = function_type(setattr_meth, mx.builtin_type('builtins.function')) bound_type = bind_self(setattr_func, mx.self_type) typ = map_instance_to_supertype(itype, setattr_meth.info) setattr_type = get_proper_type(expand_type_by_instance(bound_type, typ)) if isinstance(setattr_type, CallableType) and len(setattr_type.arg_types) > 0: return setattr_type.arg_types[-1] if itype.type.fallback_to_any: return AnyType(TypeOfAny.special_form) # Could not find the member. if mx.is_super: mx.msg.undefined_in_superclass(name, mx.context) return AnyType(TypeOfAny.from_error) else: if mx.chk and mx.chk.should_suppress_optional_error([itype]): return AnyType(TypeOfAny.from_error) return mx.msg.has_no_attr( mx.original_type, itype, name, mx.context, mx.module_symbol_table ) def check_final_member(name: str, info: TypeInfo, msg: MessageBuilder, ctx: Context) -> None: """Give an error if the name being assigned was declared as final.""" for base in info.mro: sym = base.names.get(name) if sym and is_final_node(sym.node): msg.cant_assign_to_final(name, attr_assign=True, ctx=ctx) def analyze_descriptor_access(instance_type: Type, descriptor_type: Type, builtin_type: Callable[[str], Instance], msg: MessageBuilder, context: Context, *, chk: 'mypy.checker.TypeChecker') -> Type: """Type check descriptor access. Arguments: instance_type: The type of the instance on which the descriptor attribute is being accessed (the type of ``a`` in ``a.f`` when ``f`` is a descriptor). descriptor_type: The type of the descriptor attribute being accessed (the type of ``f`` in ``a.f`` when ``f`` is a descriptor). context: The node defining the context of this inference. Return: The return type of the appropriate ``__get__`` overload for the descriptor. """ instance_type = get_proper_type(instance_type) descriptor_type = get_proper_type(descriptor_type) if isinstance(descriptor_type, UnionType): # Map the access over union types return make_simplified_union([ analyze_descriptor_access(instance_type, typ, builtin_type, msg, context, chk=chk) for typ in descriptor_type.items ]) elif not isinstance(descriptor_type, Instance): return descriptor_type if not descriptor_type.type.has_readable_member('__get__'): return descriptor_type dunder_get = descriptor_type.type.get_method('__get__') if dunder_get is None: msg.fail(message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format(descriptor_type), context) return AnyType(TypeOfAny.from_error) function = function_type(dunder_get, builtin_type('builtins.function')) bound_method = bind_self(function, descriptor_type) typ = map_instance_to_supertype(descriptor_type, dunder_get.info) dunder_get_type = expand_type_by_instance(bound_method, typ) if isinstance(instance_type, FunctionLike) and instance_type.is_type_obj(): owner_type = instance_type.items()[0].ret_type instance_type = NoneType() elif isinstance(instance_type, TypeType): owner_type = instance_type.item instance_type = NoneType() else: owner_type = instance_type _, inferred_dunder_get_type = chk.expr_checker.check_call( dunder_get_type, [TempNode(instance_type, context=context), TempNode(TypeType.make_normalized(owner_type), context=context)], [ARG_POS, ARG_POS], context) inferred_dunder_get_type = get_proper_type(inferred_dunder_get_type) if isinstance(inferred_dunder_get_type, AnyType): # check_call failed, and will have reported an error return inferred_dunder_get_type if not isinstance(inferred_dunder_get_type, CallableType): msg.fail(message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format(descriptor_type), context) return AnyType(TypeOfAny.from_error) return inferred_dunder_get_type.ret_type def instance_alias_type(alias: TypeAlias, builtin_type: Callable[[str], Instance]) -> Type: """Type of a type alias node targeting an instance, when appears in runtime context. As usual, we first erase any unbound type variables to Any. """ target = get_proper_type(alias.target) # type: Type assert isinstance(get_proper_type(target), Instance), "Must be called only with aliases to classes" target = get_proper_type(set_any_tvars(alias, alias.line, alias.column)) assert isinstance(target, Instance) tp = type_object_type(target.type, builtin_type) return expand_type_by_instance(tp, target) def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, mx: MemberContext, *, implicit: bool = False) -> Type: """Analyze access to an attribute via a Var node. This is conceptually part of analyze_member_access and the arguments are similar. itype is the class object in which var is defined original_type is the type of E in the expression E.var if implicit is True, the original Var was created as an assignment to self """ # Found a member variable. itype = map_instance_to_supertype(itype, var.info) typ = var.type if typ: if isinstance(typ, PartialType): return mx.chk.handle_partial_var_type(typ, mx.is_lvalue, var, mx.context) if mx.is_lvalue and var.is_property and not var.is_settable_property: # TODO allow setting attributes in subclass (although it is probably an error) mx.msg.read_only_property(name, itype.type, mx.context) if mx.is_lvalue and var.is_classvar: mx.msg.cant_assign_to_classvar(name, mx.context) t = get_proper_type(expand_type_by_instance(typ, itype)) result = t # type: Type typ = get_proper_type(typ) if var.is_initialized_in_class and isinstance(typ, FunctionLike) and not typ.is_type_obj(): if mx.is_lvalue: if var.is_property: if not var.is_settable_property: mx.msg.read_only_property(name, itype.type, mx.context) else: mx.msg.cant_assign_to_method(mx.context) if not var.is_staticmethod: # Class-level function objects and classmethods become bound methods: # the former to the instance, the latter to the class. functype = typ # Use meet to narrow original_type to the dispatched type. # For example, assume # * A.f: Callable[[A1], None] where A1 <: A (maybe A1 == A) # * B.f: Callable[[B1], None] where B1 <: B (maybe B1 == B) # * x: Union[A1, B1] # In `x.f`, when checking `x` against A1 we assume x is compatible with A # and similarly for B1 when checking agains B dispatched_type = meet.meet_types(mx.original_type, itype) signature = freshen_function_type_vars(functype) signature = check_self_arg(signature, dispatched_type, var.is_classmethod, mx.context, name, mx.msg) signature = bind_self(signature, mx.self_type, var.is_classmethod) expanded_signature = get_proper_type(expand_type_by_instance(signature, itype)) freeze_type_vars(expanded_signature) if var.is_property: # A property cannot have an overloaded type => the cast is fine. assert isinstance(expanded_signature, CallableType) result = expanded_signature.ret_type else: result = expanded_signature else: if not var.is_ready: mx.not_ready_callback(var.name, mx.context) # Implicit 'Any' type. result = AnyType(TypeOfAny.special_form) fullname = '{}.{}'.format(var.info.fullname, name) hook = mx.chk.plugin.get_attribute_hook(fullname) if result and not mx.is_lvalue and not implicit: result = analyze_descriptor_access(mx.original_type, result, mx.builtin_type, mx.msg, mx.context, chk=mx.chk) if hook: result = hook(AttributeContext(get_proper_type(mx.original_type), result, mx.context, mx.chk)) return result def freeze_type_vars(member_type: Type) -> None: if not isinstance(member_type, ProperType): return if isinstance(member_type, CallableType): for v in member_type.variables: v.id.meta_level = 0 if isinstance(member_type, Overloaded): for it in member_type.items(): for v in it.variables: v.id.meta_level = 0 def lookup_member_var_or_accessor(info: TypeInfo, name: str, is_lvalue: bool) -> Optional[SymbolNode]: """Find the attribute/accessor node that refers to a member of a type.""" # TODO handle lvalues node = info.get(name) if node: return node.node else: return None def check_self_arg(functype: FunctionLike, dispatched_arg_type: Type, is_classmethod: bool, context: Context, name: str, msg: MessageBuilder) -> FunctionLike: """Check that an instance has a valid type for a method with annotated 'self'. For example if the method is defined as: class A: def f(self: S) -> T: ... then for 'x.f' we check that meet(type(x), A) <: S. If the method is overloaded, we select only overloads items that satisfy this requirement. If there are no matching overloads, an error is generated. Note: dispatched_arg_type uses a meet to select a relevant item in case if the original type of 'x' is a union. This is done because several special methods treat union types in ad-hoc manner, so we can't use MemberContext.self_type yet. """ items = functype.items() if not items: return functype new_items = [] if is_classmethod: dispatched_arg_type = TypeType.make_normalized(dispatched_arg_type) for item in items: if not item.arg_types or item.arg_kinds[0] not in (ARG_POS, ARG_STAR): # No positional first (self) argument (*args is okay). msg.no_formal_self(name, item, context) # This is pretty bad, so just return the original signature if # there is at least one such error. return functype else: selfarg = item.arg_types[0] if subtypes.is_subtype(dispatched_arg_type, erase_typevars(erase_to_bound(selfarg))): new_items.append(item) if not new_items: # Choose first item for the message (it may be not very helpful for overloads). msg.incompatible_self_argument(name, dispatched_arg_type, items[0], is_classmethod, context) return functype if len(new_items) == 1: return new_items[0] return Overloaded(new_items) def analyze_class_attribute_access(itype: Instance, name: str, mx: MemberContext, override_info: Optional[TypeInfo] = None, original_vars: Optional[List[TypeVarDef]] = None ) -> Optional[Type]: """Analyze access to an attribute on a class object. itype is the return type of the class object callable, original_type is the type of E in the expression E.var, original_vars are type variables of the class callable (for generic classes). """ info = itype.type if override_info: info = override_info node = info.get(name) if not node: if info.fallback_to_any: return AnyType(TypeOfAny.special_form) return None is_decorated = isinstance(node.node, Decorator) is_method = is_decorated or isinstance(node.node, FuncBase) if mx.is_lvalue: if is_method: mx.msg.cant_assign_to_method(mx.context) if isinstance(node.node, TypeInfo): mx.msg.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, mx.context) # If a final attribute was declared on `self` in `__init__`, then it # can't be accessed on the class object. if node.implicit and isinstance(node.node, Var) and node.node.is_final: mx.msg.fail(message_registry.CANNOT_ACCESS_FINAL_INSTANCE_ATTR .format(node.node.name), mx.context) # An assignment to final attribute on class object is also always an error, # independently of types. if mx.is_lvalue and not mx.chk.get_final_context(): check_final_member(name, info, mx.msg, mx.context) if info.is_enum and not (mx.is_lvalue or is_decorated or is_method): enum_literal = LiteralType(name, fallback=itype) # When we analyze enums, the corresponding Instance is always considered to be erased # due to how the signature of Enum.__new__ is `(cls: Type[_T], value: object) -> _T` # in typeshed. However, this is really more of an implementation detail of how Enums # are typed, and we really don't want to treat every single Enum value as if it were # from type variable substitution. So we reset the 'erased' field here. return itype.copy_modified(erased=False, last_known_value=enum_literal) t = node.type if t: if isinstance(t, PartialType): symnode = node.node assert isinstance(symnode, Var) return mx.chk.handle_partial_var_type(t, mx.is_lvalue, symnode, mx.context) # Find the class where method/variable was defined. if isinstance(node.node, Decorator): super_info = node.node.var.info # type: Optional[TypeInfo] elif isinstance(node.node, (Var, SYMBOL_FUNCBASE_TYPES)): super_info = node.node.info else: super_info = None # Map the type to how it would look as a defining class. For example: # class C(Generic[T]): ... # class D(C[Tuple[T, S]]): ... # D[int, str].method() # Here itype is D[int, str], isuper is C[Tuple[int, str]]. if not super_info: isuper = None else: isuper = map_instance_to_supertype(itype, super_info) if isinstance(node.node, Var): assert isuper is not None # Check if original variable type has type variables. For example: # class C(Generic[T]): # x: T # C.x # Error, ambiguous access # C[int].x # Also an error, since C[int] is same as C at runtime if isinstance(t, TypeVarType) or has_type_vars(t): # Exception: access on Type[...], including first argument of class methods is OK. if not isinstance(get_proper_type(mx.original_type), TypeType) or node.implicit: if node.node.is_classvar: message = message_registry.GENERIC_CLASS_VAR_ACCESS else: message = message_registry.GENERIC_INSTANCE_VAR_CLASS_ACCESS mx.msg.fail(message, mx.context) # Erase non-mapped variables, but keep mapped ones, even if there is an error. # In the above example this means that we infer following types: # C.x -> Any # C[int].x -> int t = erase_typevars(expand_type_by_instance(t, isuper)) is_classmethod = ((is_decorated and cast(Decorator, node.node).func.is_class) or (isinstance(node.node, FuncBase) and node.node.is_class)) t = get_proper_type(t) if isinstance(t, FunctionLike) and is_classmethod: t = check_self_arg(t, mx.self_type, False, mx.context, name, mx.msg) result = add_class_tvars(t, isuper, is_classmethod, mx.self_type, original_vars=original_vars) if not mx.is_lvalue: result = analyze_descriptor_access(mx.original_type, result, mx.builtin_type, mx.msg, mx.context, chk=mx.chk) return result elif isinstance(node.node, Var): mx.not_ready_callback(name, mx.context) return AnyType(TypeOfAny.special_form) if isinstance(node.node, TypeVarExpr): mx.msg.fail(message_registry.CANNOT_USE_TYPEVAR_AS_EXPRESSION.format( info.name, name), mx.context) return AnyType(TypeOfAny.from_error) if isinstance(node.node, TypeInfo): return type_object_type(node.node, mx.builtin_type) if isinstance(node.node, MypyFile): # Reference to a module object. return mx.builtin_type('types.ModuleType') if (isinstance(node.node, TypeAlias) and isinstance(get_proper_type(node.node.target), Instance)): return instance_alias_type(node.node, mx.builtin_type) if is_decorated: assert isinstance(node.node, Decorator) if node.node.type: return node.node.type else: mx.not_ready_callback(name, mx.context) return AnyType(TypeOfAny.from_error) else: assert isinstance(node.node, FuncBase) typ = function_type(node.node, mx.builtin_type('builtins.function')) # Note: if we are accessing class method on class object, the cls argument is bound. # Annotated and/or explicit class methods go through other code paths above, for # unannotated implicit class methods we do this here. if node.node.is_class: typ = bind_self(typ, is_classmethod=True) return typ def add_class_tvars(t: ProperType, isuper: Optional[Instance], is_classmethod: bool, original_type: Type, original_vars: Optional[List[TypeVarDef]] = None) -> Type: """Instantiate type variables during analyze_class_attribute_access, e.g T and Q in the following: class A(Generic[T]): @classmethod def foo(cls: Type[Q]) -> Tuple[T, Q]: ... class B(A[str]): pass B.foo() Args: t: Declared type of the method (or property) isuper: Current instance mapped to the superclass where method was defined, this is usually done by map_instance_to_supertype() is_classmethod: True if this method is decorated with @classmethod original_type: The value of the type B in the expression B.foo() or the corresponding component in case of a union (this is used to bind the self-types) original_vars: Type variables of the class callable on which the method was accessed Returns: Expanded method type with added type variables (when needed). """ # TODO: verify consistency between Q and T # We add class type variables if the class method is accessed on class object # without applied type arguments, this matches the behavior of __init__(). # For example (continuing the example in docstring): # A # The type of callable is def [T] () -> A[T], _not_ def () -> A[Any] # A[int] # The type of callable is def () -> A[int] # and # A.foo # The type is generic def [T] () -> Tuple[T, A[T]] # A[int].foo # The type is non-generic def () -> Tuple[int, A[int]] # # This behaviour is useful for defining alternative constructors for generic classes. # To achieve such behaviour, we add the class type variables that are still free # (i.e. appear in the return type of the class object on which the method was accessed). if isinstance(t, CallableType): tvars = original_vars if original_vars is not None else [] if is_classmethod: t = freshen_function_type_vars(t) t = bind_self(t, original_type, is_classmethod=True) assert isuper is not None t = cast(CallableType, expand_type_by_instance(t, isuper)) freeze_type_vars(t) return t.copy_modified(variables=tvars + t.variables) elif isinstance(t, Overloaded): return Overloaded([cast(CallableType, add_class_tvars(item, isuper, is_classmethod, original_type, original_vars=original_vars)) for item in t.items()]) if isuper is not None: t = cast(ProperType, expand_type_by_instance(t, isuper)) return t def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) -> ProperType: """Return the type of a type object. For a generic type G with type variables T and S the type is generally of form Callable[..., G[T, S]] where ... are argument types for the __init__/__new__ method (without the self argument). Also, the fallback type will be 'type' instead of 'function'. """ # We take the type from whichever of __init__ and __new__ is first # in the MRO, preferring __init__ if there is a tie. init_method = info.get('__init__') new_method = info.get('__new__') if not init_method or not is_valid_constructor(init_method.node): # Must be an invalid class definition. return AnyType(TypeOfAny.from_error) # There *should* always be a __new__ method except the test stubs # lack it, so just copy init_method in that situation new_method = new_method or init_method if not is_valid_constructor(new_method.node): # Must be an invalid class definition. return AnyType(TypeOfAny.from_error) # The two is_valid_constructor() checks ensure this. assert isinstance(new_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) assert isinstance(init_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) init_index = info.mro.index(init_method.node.info) new_index = info.mro.index(new_method.node.info) fallback = info.metaclass_type or builtin_type('builtins.type') if init_index < new_index: method = init_method.node # type: Union[FuncBase, Decorator] is_new = False elif init_index > new_index: method = new_method.node is_new = True else: if init_method.node.info.fullname == 'builtins.object': # Both are defined by object. But if we've got a bogus # base class, we can't know for sure, so check for that. if info.fallback_to_any: # Construct a universal callable as the prototype. any_type = AnyType(TypeOfAny.special_form) sig = CallableType(arg_types=[any_type, any_type], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=["_args", "_kwds"], ret_type=any_type, fallback=builtin_type('builtins.function')) return class_callable(sig, info, fallback, None, is_new=False) # Otherwise prefer __init__ in a tie. It isn't clear that this # is the right thing, but __new__ caused problems with # typeshed (#5647). method = init_method.node is_new = False # Construct callable type based on signature of __init__. Adjust # return type and insert type arguments. if isinstance(method, FuncBase): t = function_type(method, fallback) else: assert isinstance(method.type, ProperType) assert isinstance(method.type, FunctionLike) # is_valid_constructor() ensures this t = method.type return type_object_type_from_function(t, info, method.info, fallback, is_new) def is_valid_constructor(n: Optional[SymbolNode]) -> bool: """Does this node represents a valid constructor method? This includes normal functions, overloaded functions, and decorators that return a callable type. """ if isinstance(n, FuncBase): return True if isinstance(n, Decorator): return isinstance(get_proper_type(n.type), FunctionLike) return False mypy-0.761/mypy/checkstrformat.py0000644€tŠÔÚ€2›s®0000013635413576752246023323 0ustar jukkaDROPBOX\Domain Users00000000000000""" Format expression type checker. This file is conceptually part of ExpressionChecker and TypeChecker. Main functionality is located in StringFormatterChecker.check_str_format_call() for '{}'.format(), and in StringFormatterChecker.check_str_interpolation() for printf-style % interpolation. Note that although at runtime format strings are parsed using custom parsers, here we use a regexp-based approach. This way we 99% match runtime behaviour while keeping implementation simple. """ import re from typing import ( cast, List, Tuple, Dict, Callable, Union, Optional, Pattern, Match, Set, Any ) from typing_extensions import Final, TYPE_CHECKING from mypy.types import ( Type, AnyType, TupleType, Instance, UnionType, TypeOfAny, get_proper_type, TypeVarType, CallableType, LiteralType, get_proper_types ) from mypy.nodes import ( StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context, Expression, StarExpr, CallExpr, IndexExpr, MemberExpr, TempNode, ARG_POS, ARG_STAR, ARG_NAMED, ARG_STAR2, SYMBOL_FUNCBASE_TYPES, Decorator, Var, Node, MypyFile, ExpressionStmt, NameExpr, IntExpr ) import mypy.errorcodes as codes if TYPE_CHECKING: # break import cycle only needed for mypy import mypy.checker import mypy.checkexpr from mypy import message_registry from mypy.messages import MessageBuilder from mypy.maptype import map_instance_to_supertype from mypy.typeops import tuple_fallback from mypy.subtypes import is_subtype from mypy.parse import parse FormatStringExpr = Union[StrExpr, BytesExpr, UnicodeExpr] Checkers = Tuple[Callable[[Expression], None], Callable[[Type], None]] MatchMap = Dict[Tuple[int, int], Match[str]] # span -> match def compile_format_re() -> Pattern[str]: """Construct regexp to match format conversion specifiers in % interpolation. See https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting The regexp is intentionally a bit wider to report better errors. """ key_re = r'(\(([^()]*)\))?' # (optional) parenthesised sequence of characters. flags_re = r'([#0\-+ ]*)' # (optional) sequence of flags. width_re = r'(\*|[1-9][0-9]*)?' # (optional) minimum field width (* or numbers). precision_re = r'(?:\.(\*|[0-9]+)?)?' # (optional) . followed by * of numbers. length_mod_re = r'[hlL]?' # (optional) length modifier (unused). type_re = r'(.)?' # conversion type. format_re = '%' + key_re + flags_re + width_re + precision_re + length_mod_re + type_re return re.compile(format_re) def compile_new_format_re(custom_spec: bool) -> Pattern[str]: """Construct regexps to match format conversion specifiers in str.format() calls. See After https://docs.python.org/3/library/string.html#formatspec for specifications. The regexps are intentionally wider, to report better errors, instead of just not matching. """ # Field (optional) is an integer/identifier possibly followed by several .attr and [index]. field = r'(?P(?P[^.[!:]*)([^:!]+)?)' # Conversion (optional) is ! followed by one of letters for forced repr(), str(), or ascii(). conversion = r'(?P![^:])?' # Format specification (optional) follows its own mini-language: if not custom_spec: # Fill and align is valid for all builtin types. fill_align = r'(?P.?[<>=^])?' # Number formatting options are only valid for int, float, complex, and Decimal, # except if only width is given (it is valid for all types). # This contains sign, flags (sign, # and/or 0), width, grouping (_ or ,) and precision. num_spec = r'(?P[+\- ]?#?0?)(?P\d+)?[_,]?(?P\.\d+)?' # The last element is type. type = r'(?P.)?' # only some are supported, but we want to give a better error format_spec = r'(?P:' + fill_align + num_spec + type + r')?' else: # Custom types can define their own form_spec using __format__(). format_spec = r'(?P:.*)?' return re.compile(field + conversion + format_spec) FORMAT_RE = compile_format_re() # type: Final FORMAT_RE_NEW = compile_new_format_re(False) # type: Final FORMAT_RE_NEW_CUSTOM = compile_new_format_re(True) # type: Final DUMMY_FIELD_NAME = '__dummy_name__' # type: Final # Format types supported by str.format() for builtin classes. SUPPORTED_TYPES_NEW = {'b', 'c', 'd', 'e', 'E', 'f', 'F', 'g', 'G', 'n', 'o', 's', 'x', 'X', '%'} # type: Final # Types that require either int or float. NUMERIC_TYPES_OLD = {'d', 'i', 'o', 'u', 'x', 'X', 'e', 'E', 'f', 'F', 'g', 'G'} # type: Final NUMERIC_TYPES_NEW = {'b', 'd', 'o', 'e', 'E', 'f', 'F', 'g', 'G', 'n', 'x', 'X', '%'} # type: Final # These types accept _only_ int. REQUIRE_INT_OLD = {'o', 'x', 'X'} # type: Final REQUIRE_INT_NEW = {'b', 'd', 'o', 'x', 'X'} # type: Final # These types fall back to SupportsFloat with % (other fall back to SupportsInt) FLOAT_TYPES = {'e', 'E', 'f', 'F', 'g', 'G'} # type: Final class ConversionSpecifier: def __init__(self, key: Optional[str], flags: str, width: str, precision: str, type: str, format_spec: Optional[str] = None, conversion: Optional[str] = None, field: Optional[str] = None) -> None: self.key = key self.flags = flags self.width = width self.precision = precision self.type = type # Used only for str.format() calls (it may be custom for types with __format__()). self.format_spec = format_spec self.non_standard_format_spec = False # Used only for str.format() calls. self.conversion = conversion # Full formatted expression (i.e. key plus following attributes and/or indexes). # Used only for str.format() calls. self.field = field @classmethod def from_match(cls, match_obj: Match[str], non_standard_spec: bool = False) -> 'ConversionSpecifier': """Construct specifier from match object resulted from parsing str.format() call.""" match = cast(Any, match_obj) # TODO: remove this once typeshed is fixed. if non_standard_spec: spec = cls(match.group('key'), flags='', width='', precision='', type='', format_spec=match.group('format_spec'), conversion=match.group('conversion'), field=match.group('field')) spec.non_standard_format_spec = True return spec # Replace unmatched optional groups with empty matches (for convenience). return cls(match.group('key'), flags=match.group('flags') or '', width=match.group('width') or '', precision=match.group('precision') or '', type=match.group('type') or '', format_spec=match.group('format_spec'), conversion=match.group('conversion'), field=match.group('field')) def has_key(self) -> bool: return self.key is not None def has_star(self) -> bool: return self.width == '*' or self.precision == '*' class StringFormatterChecker: """String interpolation/formatter type checker. This class works closely together with checker.ExpressionChecker. """ # Some services are provided by a TypeChecker instance. chk = None # type: mypy.checker.TypeChecker # This is shared with TypeChecker, but stored also here for convenience. msg = None # type: MessageBuilder # Some services are provided by a ExpressionChecker instance. exprchk = None # type: mypy.checkexpr.ExpressionChecker def __init__(self, exprchk: 'mypy.checkexpr.ExpressionChecker', chk: 'mypy.checker.TypeChecker', msg: MessageBuilder) -> None: """Construct an expression type checker.""" self.chk = chk self.exprchk = exprchk self.msg = msg # This flag is used to track Python 2 corner case where for example # '%s, %d' % (u'abc', 42) returns u'abc, 42' (i.e. unicode, not a string). self.unicode_upcast = False def check_str_format_call(self, call: CallExpr, format_value: str) -> None: """Perform more precise checks for str.format() calls when possible. Currently the checks are performed for: * Actual string literals * Literal types with string values * Final names with string values The checks that we currently perform: * Check generic validity (e.g. unmatched { or }, and {} in invalid positions) * Check consistency of specifiers' auto-numbering * Verify that replacements can be found for all conversion specifiers, and all arguments were used * Non-standard format specs are only allowed for types with custom __format__ * Type check replacements with accessors applied (if any). * Verify that specifier type is known and matches replacement type * Perform special checks for some specifier types: - 'c' requires a single character string - 's' must not accept bytes - non-empty flags are only allowed for numeric types """ conv_specs = self.parse_format_value(format_value, call) if conv_specs is None: return if not self.auto_generate_keys(conv_specs, call): return self.check_specs_in_format_call(call, conv_specs, format_value) def parse_format_value(self, format_value: str, ctx: Context, nested: bool = False) -> Optional[List[ConversionSpecifier]]: """Parse format string into list of conversion specifiers. The specifiers may be nested (two levels maximum), in this case they are ordered as '{0:{1}}, {2:{3}{4}}'. Return None in case of an error. """ top_targets = self.find_non_escaped_targets(format_value, ctx) if top_targets is None: return None result = [] # type: List[ConversionSpecifier] for target in top_targets: match = FORMAT_RE_NEW.fullmatch(target) if match: conv_spec = ConversionSpecifier.from_match(match) else: custom_match = FORMAT_RE_NEW_CUSTOM.fullmatch(target) if custom_match: conv_spec = ConversionSpecifier.from_match(custom_match, non_standard_spec=True) else: self.msg.fail('Invalid conversion specifier in format string', ctx, code=codes.STRING_FORMATTING) return None if conv_spec.key and ('{' in conv_spec.key or '}' in conv_spec.key): self.msg.fail('Conversion value must not contain { or }', ctx, code=codes.STRING_FORMATTING) return None result.append(conv_spec) # Parse nested conversions that are allowed in format specifier. if (conv_spec.format_spec and conv_spec.non_standard_format_spec and ('{' in conv_spec.format_spec or '}' in conv_spec.format_spec)): if nested: self.msg.fail('Formatting nesting must be at most two levels deep', ctx, code=codes.STRING_FORMATTING) return None sub_conv_specs = self.parse_format_value(conv_spec.format_spec, ctx=ctx, nested=True) if sub_conv_specs is None: return None result.extend(sub_conv_specs) return result def find_non_escaped_targets(self, format_value: str, ctx: Context) -> Optional[List[str]]: """Return list of raw (un-parsed) format specifiers in format string. Format specifiers don't include enclosing braces. We don't use regexp for this because they don't work well with nested/repeated patterns (both greedy and non-greedy), and these are heavily used internally for representation of f-strings. Return None in case of an error. """ result = [] next_spec = '' pos = 0 nesting = 0 while pos < len(format_value): c = format_value[pos] if not nesting: # Skip any paired '{{' and '}}', enter nesting on '{', report error on '}'. if c == '{': if pos < len(format_value) - 1 and format_value[pos + 1] == '{': pos += 1 else: nesting = 1 if c == '}': if pos < len(format_value) - 1 and format_value[pos + 1] == '}': pos += 1 else: self.msg.fail('Invalid conversion specifier in format string:' ' unexpected }', ctx, code=codes.STRING_FORMATTING) return None else: # Adjust nesting level, then either continue adding chars or move on. if c == '{': nesting += 1 if c == '}': nesting -= 1 if nesting: next_spec += c else: result.append(next_spec) next_spec = '' pos += 1 if nesting: self.msg.fail('Invalid conversion specifier in format string:' ' unmatched {', ctx, code=codes.STRING_FORMATTING) return None return result def check_specs_in_format_call(self, call: CallExpr, specs: List[ConversionSpecifier], format_value: str) -> None: """Perform pairwise checks for conversion specifiers vs their replacements. The core logic for format checking is implemented in this method. """ assert all(s.key for s in specs), "Keys must be auto-generated first!" replacements = self.find_replacements_in_call(call, [cast(str, s.key) for s in specs]) assert len(replacements) == len(specs) for spec, repl in zip(specs, replacements): repl = self.apply_field_accessors(spec, repl, ctx=call) actual_type = repl.type if isinstance(repl, TempNode) else self.chk.type_map.get(repl) assert actual_type is not None # Special case custom formatting. if (spec.format_spec and spec.non_standard_format_spec and # Exclude "dynamic" specifiers (i.e. containing nested formatting). not ('{' in spec.format_spec or '}' in spec.format_spec)): if (not custom_special_method(actual_type, '__format__', check_all=True) or spec.conversion): # TODO: add support for some custom specs like datetime? self.msg.fail('Unrecognized format' ' specification "{}"'.format(spec.format_spec[1:]), call, code=codes.STRING_FORMATTING) continue # Adjust expected and actual types. if not spec.type: expected_type = AnyType(TypeOfAny.special_form) # type: Optional[Type] else: assert isinstance(call.callee, MemberExpr) if isinstance(call.callee.expr, (StrExpr, UnicodeExpr)): format_str = call.callee.expr else: format_str = StrExpr(format_value) expected_type = self.conversion_type(spec.type, call, format_str, format_call=True) if spec.conversion is not None: # If the explicit conversion is given, then explicit conversion is called _first_. if spec.conversion[1] not in 'rsa': self.msg.fail('Invalid conversion type "{}",' ' must be one of "r", "s" or "a"'.format(spec.conversion[1]), call, code=codes.STRING_FORMATTING) actual_type = self.named_type('builtins.str') # Perform the checks for given types. if expected_type is None: continue a_type = get_proper_type(actual_type) actual_items = (get_proper_types(a_type.items) if isinstance(a_type, UnionType) else [a_type]) for a_type in actual_items: if custom_special_method(a_type, '__format__'): continue self.check_placeholder_type(a_type, expected_type, call) self.perform_special_format_checks(spec, call, repl, a_type, expected_type) def perform_special_format_checks(self, spec: ConversionSpecifier, call: CallExpr, repl: Expression, actual_type: Type, expected_type: Type) -> None: # TODO: try refactoring to combine this logic with % formatting. if spec.type == 'c': if isinstance(repl, (StrExpr, BytesExpr)) and len(repl.value) != 1: self.msg.requires_int_or_char(call, format_call=True) c_typ = get_proper_type(self.chk.type_map[repl]) if isinstance(c_typ, Instance) and c_typ.last_known_value: c_typ = c_typ.last_known_value if isinstance(c_typ, LiteralType) and isinstance(c_typ.value, str): if len(c_typ.value) != 1: self.msg.requires_int_or_char(call, format_call=True) if (not spec.type or spec.type == 's') and not spec.conversion: if self.chk.options.python_version >= (3, 0): if has_type_component(actual_type, 'builtins.bytes'): self.msg.fail("On Python 3 '{}'.format(b'abc') produces \"b'abc'\";" " use !r if this is a desired behavior", call, code=codes.STR_BYTES_PY3) if spec.flags: numeric_types = UnionType([self.named_type('builtins.int'), self.named_type('builtins.float')]) if (spec.type and spec.type not in NUMERIC_TYPES_NEW or not spec.type and not is_subtype(actual_type, numeric_types) and not custom_special_method(actual_type, '__format__')): self.msg.fail('Numeric flags are only allowed for numeric types', call, code=codes.STRING_FORMATTING) def find_replacements_in_call(self, call: CallExpr, keys: List[str]) -> List[Expression]: """Find replacement expression for every specifier in str.format() call. In case of an error use TempNode(AnyType). """ result = [] # type: List[Expression] used = set() # type: Set[Expression] for key in keys: if key.isdecimal(): expr = self.get_expr_by_position(int(key), call) if not expr: self.msg.fail('Cannot find replacement for positional' ' format specifier {}'.format(key), call, code=codes.STRING_FORMATTING) expr = TempNode(AnyType(TypeOfAny.from_error)) else: expr = self.get_expr_by_name(key, call) if not expr: self.msg.fail('Cannot find replacement for named' ' format specifier "{}"'.format(key), call, code=codes.STRING_FORMATTING) expr = TempNode(AnyType(TypeOfAny.from_error)) result.append(expr) if not isinstance(expr, TempNode): used.add(expr) # Strictly speaking not using all replacements is not a type error, but most likely # a typo in user code, so we show an error like we do for % formatting. total_explicit = len([kind for kind in call.arg_kinds if kind in (ARG_POS, ARG_NAMED)]) if len(used) < total_explicit: self.msg.too_many_string_formatting_arguments(call) return result def get_expr_by_position(self, pos: int, call: CallExpr) -> Optional[Expression]: """Get positional replacement expression from '{0}, {1}'.format(x, y, ...) call. If the type is from *args, return TempNode(). Return None in case of an error. """ pos_args = [arg for arg, kind in zip(call.args, call.arg_kinds) if kind == ARG_POS] if pos < len(pos_args): return pos_args[pos] star_args = [arg for arg, kind in zip(call.args, call.arg_kinds) if kind == ARG_STAR] if not star_args: return None # Fall back to *args when present in call. star_arg = star_args[0] varargs_type = get_proper_type(self.chk.type_map[star_arg]) if (not isinstance(varargs_type, Instance) or not varargs_type.type.has_base('typing.Sequence')): # Error should be already reported. return TempNode(AnyType(TypeOfAny.special_form)) iter_info = self.chk.named_generic_type('typing.Sequence', [AnyType(TypeOfAny.special_form)]).type return TempNode(map_instance_to_supertype(varargs_type, iter_info).args[0]) def get_expr_by_name(self, key: str, call: CallExpr) -> Optional[Expression]: """Get named replacement expression from '{name}'.format(name=...) call. If the type is from **kwargs, return TempNode(). Return None in case of an error. """ named_args = [arg for arg, kind, name in zip(call.args, call.arg_kinds, call.arg_names) if kind == ARG_NAMED and name == key] if named_args: return named_args[0] star_args_2 = [arg for arg, kind in zip(call.args, call.arg_kinds) if kind == ARG_STAR2] if not star_args_2: return None star_arg_2 = star_args_2[0] kwargs_type = get_proper_type(self.chk.type_map[star_arg_2]) if (not isinstance(kwargs_type, Instance) or not kwargs_type.type.has_base('typing.Mapping')): # Error should be already reported. return TempNode(AnyType(TypeOfAny.special_form)) any_type = AnyType(TypeOfAny.special_form) mapping_info = self.chk.named_generic_type('typing.Mapping', [any_type, any_type]).type return TempNode(map_instance_to_supertype(kwargs_type, mapping_info).args[1]) def auto_generate_keys(self, all_specs: List[ConversionSpecifier], ctx: Context) -> bool: """Translate '{} {name} {}' to '{0} {name} {1}'. Return True if generation was successful, otherwise report an error and return false. """ some_defined = any(s.key and s.key.isdecimal() for s in all_specs) all_defined = all(bool(s.key) for s in all_specs) if some_defined and not all_defined: self.msg.fail('Cannot combine automatic field numbering and' ' manual field specification', ctx, code=codes.STRING_FORMATTING) return False if all_defined: return True next_index = 0 for spec in all_specs: if not spec.key: str_index = str(next_index) spec.key = str_index # Update also the full field (i.e. turn {.x} into {0.x}). if not spec.field: spec.field = str_index else: spec.field = str_index + spec.field next_index += 1 return True def apply_field_accessors(self, spec: ConversionSpecifier, repl: Expression, ctx: Context) -> Expression: """Transform and validate expr in '{.attr[item]}'.format(expr) into expr.attr['item']. If validation fails, return TempNode(AnyType). """ assert spec.key, "Keys must be auto-generated first!" if spec.field == spec.key: return repl assert spec.field # This is a bit of a dirty trick, but it looks like this is the simplest way. temp_errors = self.msg.clean_copy().errors dummy = DUMMY_FIELD_NAME + spec.field[len(spec.key):] temp_ast = parse(dummy, fnam='', module=None, options=self.chk.options, errors=temp_errors) # type: Node if temp_errors.is_errors(): self.msg.fail('Syntax error in format specifier "{}"'.format(spec.field), ctx, code=codes.STRING_FORMATTING) return TempNode(AnyType(TypeOfAny.from_error)) # These asserts are guaranteed by the original regexp. assert isinstance(temp_ast, MypyFile) temp_ast = temp_ast.defs[0] assert isinstance(temp_ast, ExpressionStmt) temp_ast = temp_ast.expr if not self.validate_and_transform_accessors(temp_ast, repl, spec, ctx=ctx): return TempNode(AnyType(TypeOfAny.from_error)) # Check if there are any other errors (like missing members). # TODO: fix column to point to actual start of the format specifier _within_ string. temp_ast.line = ctx.line temp_ast.column = ctx.column self.exprchk.accept(temp_ast) return temp_ast def validate_and_transform_accessors(self, temp_ast: Expression, original_repl: Expression, spec: ConversionSpecifier, ctx: Context) -> bool: """Validate and transform (in-place) format field accessors. On error, report it and return False. The transformations include replacing the dummy variable with actual replacement expression and translating any name expressions in an index into strings, so that this will work: class User(TypedDict): name: str id: int u: User '{[id]:d} -> {[name]}'.format(u) """ if not isinstance(temp_ast, (MemberExpr, IndexExpr)): self.msg.fail('Only index and member expressions are allowed in' ' format field accessors; got "{}"'.format(spec.field), ctx, code=codes.STRING_FORMATTING) return False if isinstance(temp_ast, MemberExpr): node = temp_ast.expr else: node = temp_ast.base if not isinstance(temp_ast.index, (NameExpr, IntExpr)): assert spec.key, "Call this method only after auto-generating keys!" assert spec.field self.msg.fail('Invalid index expression in format field' ' accessor "{}"'.format(spec.field[len(spec.key):]), ctx, code=codes.STRING_FORMATTING) return False if isinstance(temp_ast.index, NameExpr): temp_ast.index = StrExpr(temp_ast.index.name) if isinstance(node, NameExpr) and node.name == DUMMY_FIELD_NAME: # Replace it with the actual replacement expression. assert isinstance(temp_ast, (IndexExpr, MemberExpr)) # XXX: this is redundant if isinstance(temp_ast, IndexExpr): temp_ast.base = original_repl else: temp_ast.expr = original_repl return True node.line = ctx.line node.column = ctx.column return self.validate_and_transform_accessors(node, original_repl=original_repl, spec=spec, ctx=ctx) # TODO: In Python 3, the bytes formatting has a more restricted set of options # compared to string formatting. def check_str_interpolation(self, expr: FormatStringExpr, replacements: Expression) -> Type: """Check the types of the 'replacements' in a string interpolation expression: str % replacements. """ self.exprchk.accept(expr) specifiers = self.parse_conversion_specifiers(expr.value) has_mapping_keys = self.analyze_conversion_specifiers(specifiers, expr) if isinstance(expr, BytesExpr) and (3, 0) <= self.chk.options.python_version < (3, 5): self.msg.fail('Bytes formatting is only supported in Python 3.5 and later', replacements, code=codes.STRING_FORMATTING) return AnyType(TypeOfAny.from_error) self.unicode_upcast = False if has_mapping_keys is None: pass # Error was reported elif has_mapping_keys: self.check_mapping_str_interpolation(specifiers, replacements, expr) else: self.check_simple_str_interpolation(specifiers, replacements, expr) if isinstance(expr, BytesExpr): return self.named_type('builtins.bytes') elif isinstance(expr, UnicodeExpr): return self.named_type('builtins.unicode') elif isinstance(expr, StrExpr): if self.unicode_upcast: return self.named_type('builtins.unicode') return self.named_type('builtins.str') else: assert False def parse_conversion_specifiers(self, format: str) -> List[ConversionSpecifier]: specifiers = [] # type: List[ConversionSpecifier] for parens_key, key, flags, width, precision, type in FORMAT_RE.findall(format): if parens_key == '': key = None specifiers.append(ConversionSpecifier(key, flags, width, precision, type)) return specifiers def analyze_conversion_specifiers(self, specifiers: List[ConversionSpecifier], context: Context) -> Optional[bool]: has_star = any(specifier.has_star() for specifier in specifiers) has_key = any(specifier.has_key() for specifier in specifiers) all_have_keys = all( specifier.has_key() or specifier.type == '%' for specifier in specifiers ) if has_key and has_star: self.msg.string_interpolation_with_star_and_key(context) return None if has_key and not all_have_keys: self.msg.string_interpolation_mixing_key_and_non_keys(context) return None return has_key def check_simple_str_interpolation(self, specifiers: List[ConversionSpecifier], replacements: Expression, expr: FormatStringExpr) -> None: """Check % string interpolation with positional specifiers '%s, %d' % ('yes, 42').""" checkers = self.build_replacement_checkers(specifiers, replacements, expr) if checkers is None: return rhs_type = get_proper_type(self.accept(replacements)) rep_types = [] # type: List[Type] if isinstance(rhs_type, TupleType): rep_types = rhs_type.items elif isinstance(rhs_type, AnyType): return elif isinstance(rhs_type, Instance) and rhs_type.type.fullname == 'builtins.tuple': # Assume that an arbitrary-length tuple has the right number of items. rep_types = [rhs_type.args[0]] * len(checkers) elif isinstance(rhs_type, UnionType): for typ in rhs_type.relevant_items(): temp_node = TempNode(typ) temp_node.line = replacements.line self.check_simple_str_interpolation(specifiers, temp_node, expr) return else: rep_types = [rhs_type] if len(checkers) > len(rep_types): self.msg.too_few_string_formatting_arguments(replacements) elif len(checkers) < len(rep_types): self.msg.too_many_string_formatting_arguments(replacements) else: if len(checkers) == 1: check_node, check_type = checkers[0] if isinstance(rhs_type, TupleType) and len(rhs_type.items) == 1: check_type(rhs_type.items[0]) else: check_node(replacements) elif (isinstance(replacements, TupleExpr) and not any(isinstance(item, StarExpr) for item in replacements.items)): for checks, rep_node in zip(checkers, replacements.items): check_node, check_type = checks check_node(rep_node) else: for checks, rep_type in zip(checkers, rep_types): check_node, check_type = checks check_type(rep_type) def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier], replacements: Expression, expr: FormatStringExpr) -> None: """Check % string interpolation with names specifiers '%(name)s' % {'name': 'John'}.""" if (isinstance(replacements, DictExpr) and all(isinstance(k, (StrExpr, BytesExpr, UnicodeExpr)) for k, v in replacements.items)): mapping = {} # type: Dict[str, Type] for k, v in replacements.items: if self.chk.options.python_version >= (3, 0) and isinstance(expr, BytesExpr): # Special case: for bytes formatting keys must be bytes. if not isinstance(k, BytesExpr): self.msg.fail('Dictionary keys in bytes formatting must be bytes,' ' not strings', expr, code=codes.STRING_FORMATTING) key_str = cast(FormatStringExpr, k).value mapping[key_str] = self.accept(v) for specifier in specifiers: if specifier.type == '%': # %% is allowed in mappings, no checking is required continue assert specifier.key is not None if specifier.key not in mapping: self.msg.key_not_in_mapping(specifier.key, replacements) return rep_type = mapping[specifier.key] expected_type = self.conversion_type(specifier.type, replacements, expr) if expected_type is None: return self.chk.check_subtype(rep_type, expected_type, replacements, message_registry.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION, 'expression has type', 'placeholder with key \'%s\' has type' % specifier.key, code=codes.STRING_FORMATTING) if specifier.type == 's': self.check_s_special_cases(expr, rep_type, expr) else: rep_type = self.accept(replacements) dict_type = self.build_dict_type(expr) self.chk.check_subtype(rep_type, dict_type, replacements, message_registry.FORMAT_REQUIRES_MAPPING, 'expression has type', 'expected type for mapping is', code=codes.STRING_FORMATTING) def build_dict_type(self, expr: FormatStringExpr) -> Type: """Build expected mapping type for right operand in % formatting.""" any_type = AnyType(TypeOfAny.special_form) if self.chk.options.python_version >= (3, 0): if isinstance(expr, BytesExpr): bytes_type = self.chk.named_generic_type('builtins.bytes', []) return self.chk.named_generic_type('typing.Mapping', [bytes_type, any_type]) elif isinstance(expr, StrExpr): str_type = self.chk.named_generic_type('builtins.str', []) return self.chk.named_generic_type('typing.Mapping', [str_type, any_type]) else: assert False, "There should not be UnicodeExpr on Python 3" else: str_type = self.chk.named_generic_type('builtins.str', []) unicode_type = self.chk.named_generic_type('builtins.unicode', []) str_map = self.chk.named_generic_type('typing.Mapping', [str_type, any_type]) unicode_map = self.chk.named_generic_type('typing.Mapping', [unicode_type, any_type]) return UnionType.make_union([str_map, unicode_map]) def build_replacement_checkers(self, specifiers: List[ConversionSpecifier], context: Context, expr: FormatStringExpr ) -> Optional[List[Checkers]]: checkers = [] # type: List[Checkers] for specifier in specifiers: checker = self.replacement_checkers(specifier, context, expr) if checker is None: return None checkers.extend(checker) return checkers def replacement_checkers(self, specifier: ConversionSpecifier, context: Context, expr: FormatStringExpr) -> Optional[List[Checkers]]: """Returns a list of tuples of two functions that check whether a replacement is of the right type for the specifier. The first functions take a node and checks its type in the right type context. The second function just checks a type. """ checkers = [] # type: List[Checkers] if specifier.width == '*': checkers.append(self.checkers_for_star(context)) if specifier.precision == '*': checkers.append(self.checkers_for_star(context)) if specifier.type == 'c': c = self.checkers_for_c_type(specifier.type, context, expr) if c is None: return None checkers.append(c) elif specifier.type != '%': c = self.checkers_for_regular_type(specifier.type, context, expr) if c is None: return None checkers.append(c) return checkers def checkers_for_star(self, context: Context) -> Checkers: """Returns a tuple of check functions that check whether, respectively, a node or a type is compatible with a star in a conversion specifier. """ expected = self.named_type('builtins.int') def check_type(type: Type) -> None: expected = self.named_type('builtins.int') self.chk.check_subtype(type, expected, context, '* wants int', code=codes.STRING_FORMATTING) def check_expr(expr: Expression) -> None: type = self.accept(expr, expected) check_type(type) return check_expr, check_type def check_placeholder_type(self, typ: Type, expected_type: Type, context: Context) -> None: self.chk.check_subtype(typ, expected_type, context, message_registry.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION, 'expression has type', 'placeholder has type', code=codes.STRING_FORMATTING) def checkers_for_regular_type(self, type: str, context: Context, expr: FormatStringExpr) -> Optional[Checkers]: """Returns a tuple of check functions that check whether, respectively, a node or a type is compatible with 'type'. Return None in case of an error. """ expected_type = self.conversion_type(type, context, expr) if expected_type is None: return None def check_type(typ: Type) -> None: assert expected_type is not None self.check_placeholder_type(typ, expected_type, context) if type == 's': self.check_s_special_cases(expr, typ, context) def check_expr(expr: Expression) -> None: type = self.accept(expr, expected_type) check_type(type) return check_expr, check_type def check_s_special_cases(self, expr: FormatStringExpr, typ: Type, context: Context) -> None: """Additional special cases for %s in bytes vs string context.""" if isinstance(expr, StrExpr): # Couple special cases for string formatting. if self.chk.options.python_version >= (3, 0): if has_type_component(typ, 'builtins.bytes'): self.msg.fail("On Python 3 '%s' % b'abc' produces \"b'abc'\";" " use %r if this is a desired behavior", context, code=codes.STR_BYTES_PY3) if self.chk.options.python_version < (3, 0): if has_type_component(typ, 'builtins.unicode'): self.unicode_upcast = True if isinstance(expr, BytesExpr): # A special case for bytes formatting: b'%s' actually requires bytes on Python 3. if self.chk.options.python_version >= (3, 0): if has_type_component(typ, 'builtins.str'): self.msg.fail("On Python 3 b'%s' requires bytes, not string", context, code=codes.STRING_FORMATTING) def checkers_for_c_type(self, type: str, context: Context, expr: FormatStringExpr) -> Optional[Checkers]: """Returns a tuple of check functions that check whether, respectively, a node or a type is compatible with 'type' that is a character type. """ expected_type = self.conversion_type(type, context, expr) if expected_type is None: return None def check_type(type: Type) -> None: assert expected_type is not None self.check_placeholder_type(type, expected_type, context) def check_expr(expr: Expression) -> None: """int, or str with length 1""" type = self.accept(expr, expected_type) if isinstance(expr, (StrExpr, BytesExpr)) and len(cast(StrExpr, expr).value) != 1: self.msg.requires_int_or_char(context) check_type(type) return check_expr, check_type def conversion_type(self, p: str, context: Context, expr: FormatStringExpr, format_call: bool = False) -> Optional[Type]: """Return the type that is accepted for a string interpolation conversion specifier type. Note that both Python's float (e.g. %f) and integer (e.g. %d) specifier types accept both float and integers. The 'format_call' argument indicates whether this type came from % interpolation or from a str.format() call, the meaning of few formatting types are different. """ NUMERIC_TYPES = NUMERIC_TYPES_NEW if format_call else NUMERIC_TYPES_OLD INT_TYPES = REQUIRE_INT_NEW if format_call else REQUIRE_INT_OLD if p == 'b' and not format_call: if self.chk.options.python_version < (3, 5): self.msg.fail("Format character 'b' is only supported in Python 3.5 and later", context, code=codes.STRING_FORMATTING) return None if not isinstance(expr, BytesExpr): self.msg.fail("Format character 'b' is only supported on bytes patterns", context, code=codes.STRING_FORMATTING) return None return self.named_type('builtins.bytes') elif p == 'a': if self.chk.options.python_version < (3, 0): self.msg.fail("Format character 'a' is only supported in Python 3", context, code=codes.STRING_FORMATTING) return None # TODO: return type object? return AnyType(TypeOfAny.special_form) elif p in ['s', 'r']: return AnyType(TypeOfAny.special_form) elif p in NUMERIC_TYPES: if p in INT_TYPES: numeric_types = [self.named_type('builtins.int')] else: numeric_types = [self.named_type('builtins.int'), self.named_type('builtins.float')] if not format_call: if p in FLOAT_TYPES: numeric_types.append(self.named_type('typing.SupportsFloat')) else: numeric_types.append(self.named_type('typing.SupportsInt')) return UnionType.make_union(numeric_types) elif p in ['c']: return UnionType([self.named_type('builtins.int'), self.named_type('builtins.float'), self.named_type('builtins.str')]) else: self.msg.unsupported_placeholder(p, context) return None # # Helpers # def named_type(self, name: str) -> Instance: """Return an instance type with type given by the name and no type arguments. Alias for TypeChecker.named_type. """ return self.chk.named_type(name) def accept(self, expr: Expression, context: Optional[Type] = None) -> Type: """Type check a node. Alias for TypeChecker.accept.""" return self.chk.expr_checker.accept(expr, context) def has_type_component(typ: Type, fullname: str) -> bool: """Is this a specific instance type, or a union that contains it? We use this ad-hoc function instead of a proper visitor or subtype check because some str vs bytes errors are strictly speaking not runtime errors, but rather highly counter-intuitive behavior. This is similar to what is used for --strict-equality. """ typ = get_proper_type(typ) if isinstance(typ, Instance): return typ.type.has_base(fullname) elif isinstance(typ, TypeVarType): return (has_type_component(typ.upper_bound, fullname) or any(has_type_component(v, fullname) for v in typ.values)) elif isinstance(typ, UnionType): return any(has_type_component(t, fullname) for t in typ.relevant_items()) return False def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool: """Does this type have a custom special method such as __format__() or __eq__()? If check_all is True ensure all items of a union have a custom method, not just some. """ typ = get_proper_type(typ) if isinstance(typ, Instance): method = typ.type.get(name) if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)): if method.node.info: return not method.node.info.fullname.startswith('builtins.') return False if isinstance(typ, UnionType): if check_all: return all(custom_special_method(t, name, check_all) for t in typ.items) return any(custom_special_method(t, name) for t in typ.items) if isinstance(typ, TupleType): return custom_special_method(tuple_fallback(typ), name) if isinstance(typ, CallableType) and typ.is_type_obj(): # Look up __method__ on the metaclass for class objects. return custom_special_method(typ.fallback, name) if isinstance(typ, AnyType): # Avoid false positives in uncertain cases. return True # TODO: support other types (see ExpressionChecker.has_member())? return False mypy-0.761/mypy/config_parser.py0000644€tŠÔÚ€2›s®0000003122213576752246023111 0ustar jukkaDROPBOX\Domain Users00000000000000import argparse import configparser import glob as fileglob from io import StringIO import os import re import sys from typing import Any, Dict, List, Mapping, Optional, Tuple, TextIO from typing_extensions import Final from mypy import defaults from mypy.options import Options, PER_MODULE_OPTIONS def parse_version(v: str) -> Tuple[int, int]: m = re.match(r'\A(\d)\.(\d+)\Z', v) if not m: raise argparse.ArgumentTypeError( "Invalid python version '{}' (expected format: 'x.y')".format(v)) major, minor = int(m.group(1)), int(m.group(2)) if major == 2: if minor != 7: raise argparse.ArgumentTypeError( "Python 2.{} is not supported (must be 2.7)".format(minor)) elif major == 3: if minor < defaults.PYTHON3_VERSION_MIN[1]: raise argparse.ArgumentTypeError( "Python 3.{0} is not supported (must be {1}.{2} or higher)".format(minor, *defaults.PYTHON3_VERSION_MIN)) else: raise argparse.ArgumentTypeError( "Python major version '{}' out of range (must be 2 or 3)".format(major)) return major, minor def expand_path(path: str) -> str: """Expand the user home directory and any environment variables contained within the provided path. """ return os.path.expandvars(os.path.expanduser(path)) def split_and_match_files(paths: str) -> List[str]: """Take a string representing a list of files/directories (with support for globbing through the glob library). Where a path/glob matches no file, we still include the raw path in the resulting list. Returns a list of file paths """ expanded_paths = [] for path in paths.split(','): path = expand_path(path.strip()) globbed_files = fileglob.glob(path, recursive=True) if globbed_files: expanded_paths.extend(globbed_files) else: expanded_paths.append(path) return expanded_paths # For most options, the type of the default value set in options.py is # sufficient, and we don't have to do anything here. This table # exists to specify types for values initialized to None or container # types. config_types = { 'python_version': parse_version, 'strict_optional_whitelist': lambda s: s.split(), 'custom_typing_module': str, 'custom_typeshed_dir': expand_path, 'mypy_path': lambda s: [expand_path(p.strip()) for p in re.split('[,:]', s)], 'files': split_and_match_files, 'quickstart_file': str, 'junit_xml': str, # These two are for backwards compatibility 'silent_imports': bool, 'almost_silent': bool, 'plugins': lambda s: [p.strip() for p in s.split(',')], 'always_true': lambda s: [p.strip() for p in s.split(',')], 'always_false': lambda s: [p.strip() for p in s.split(',')], 'package_root': lambda s: [p.strip() for p in s.split(',')], 'cache_dir': expand_path, 'python_executable': expand_path, } # type: Final def parse_config_file(options: Options, filename: Optional[str], stdout: Optional[TextIO] = None, stderr: Optional[TextIO] = None) -> None: """Parse a config file into an Options object. Errors are written to stderr but are not fatal. If filename is None, fall back to default config files. """ stdout = stdout or sys.stdout stderr = stderr or sys.stderr if filename is not None: config_files = (filename,) # type: Tuple[str, ...] else: config_files = tuple(map(os.path.expanduser, defaults.CONFIG_FILES)) parser = configparser.RawConfigParser() for config_file in config_files: if not os.path.exists(config_file): continue try: parser.read(config_file) except configparser.Error as err: print("%s: %s" % (config_file, err), file=stderr) else: file_read = config_file options.config_file = file_read break else: return if 'mypy' not in parser: if filename or file_read not in defaults.SHARED_CONFIG_FILES: print("%s: No [mypy] section in config file" % file_read, file=stderr) else: section = parser['mypy'] prefix = '%s: [%s]: ' % (file_read, 'mypy') updates, report_dirs = parse_section(prefix, options, section, stderr) for k, v in updates.items(): setattr(options, k, v) options.report_dirs.update(report_dirs) for name, section in parser.items(): if name.startswith('mypy-'): prefix = '%s: [%s]: ' % (file_read, name) updates, report_dirs = parse_section(prefix, options, section, stderr) if report_dirs: print("%sPer-module sections should not specify reports (%s)" % (prefix, ', '.join(s + '_report' for s in sorted(report_dirs))), file=stderr) if set(updates) - PER_MODULE_OPTIONS: print("%sPer-module sections should only specify per-module flags (%s)" % (prefix, ', '.join(sorted(set(updates) - PER_MODULE_OPTIONS))), file=stderr) updates = {k: v for k, v in updates.items() if k in PER_MODULE_OPTIONS} globs = name[5:] for glob in globs.split(','): # For backwards compatibility, replace (back)slashes with dots. glob = glob.replace(os.sep, '.') if os.altsep: glob = glob.replace(os.altsep, '.') if (any(c in glob for c in '?[]!') or any('*' in x and x != '*' for x in glob.split('.'))): print("%sPatterns must be fully-qualified module names, optionally " "with '*' in some components (e.g spam.*.eggs.*)" % prefix, file=stderr) else: options.per_module_options[glob] = updates def parse_section(prefix: str, template: Options, section: Mapping[str, str], stderr: TextIO = sys.stderr ) -> Tuple[Dict[str, object], Dict[str, str]]: """Parse one section of a config file. Returns a dict of option values encountered, and a dict of report directories. """ results = {} # type: Dict[str, object] report_dirs = {} # type: Dict[str, str] for key in section: invert = False options_key = key if key in config_types: ct = config_types[key] else: dv = None # We have to keep new_semantic_analyzer in Options # for plugin compatibility but it is not a valid option anymore. assert hasattr(template, 'new_semantic_analyzer') if key != 'new_semantic_analyzer': dv = getattr(template, key, None) if dv is None: if key.endswith('_report'): report_type = key[:-7].replace('_', '-') if report_type in defaults.REPORTER_NAMES: report_dirs[report_type] = section[key] else: print("%sUnrecognized report type: %s" % (prefix, key), file=stderr) continue if key.startswith('x_'): pass # Don't complain about `x_blah` flags elif key.startswith('no_') and hasattr(template, key[3:]): options_key = key[3:] invert = True elif key.startswith('allow') and hasattr(template, 'dis' + key): options_key = 'dis' + key invert = True elif key.startswith('disallow') and hasattr(template, key[3:]): options_key = key[3:] invert = True elif key == 'strict': print("%sStrict mode is not supported in configuration files: specify " "individual flags instead (see 'mypy -h' for the list of flags enabled " "in strict mode)" % prefix, file=stderr) else: print("%sUnrecognized option: %s = %s" % (prefix, key, section[key]), file=stderr) if invert: dv = getattr(template, options_key, None) else: continue ct = type(dv) v = None # type: Any try: if ct is bool: v = section.getboolean(key) # type: ignore[attr-defined] # Until better stub if invert: v = not v elif callable(ct): if invert: print("%sCan not invert non-boolean key %s" % (prefix, options_key), file=stderr) continue try: v = ct(section.get(key)) except argparse.ArgumentTypeError as err: print("%s%s: %s" % (prefix, key, err), file=stderr) continue else: print("%sDon't know what type %s should have" % (prefix, key), file=stderr) continue except ValueError as err: print("%s%s: %s" % (prefix, key, err), file=stderr) continue if key == 'silent_imports': print("%ssilent_imports has been replaced by " "ignore_missing_imports=True; follow_imports=skip" % prefix, file=stderr) if v: if 'ignore_missing_imports' not in results: results['ignore_missing_imports'] = True if 'follow_imports' not in results: results['follow_imports'] = 'skip' if key == 'almost_silent': print("%salmost_silent has been replaced by " "follow_imports=error" % prefix, file=stderr) if v: if 'follow_imports' not in results: results['follow_imports'] = 'error' results[options_key] = v return results, report_dirs def split_directive(s: str) -> Tuple[List[str], List[str]]: """Split s on commas, except during quoted sections. Returns the parts and a list of error messages.""" parts = [] cur = [] # type: List[str] errors = [] i = 0 while i < len(s): if s[i] == ',': parts.append(''.join(cur).strip()) cur = [] elif s[i] == '"': i += 1 while i < len(s) and s[i] != '"': cur.append(s[i]) i += 1 if i == len(s): errors.append("Unterminated quote in configuration comment") cur.clear() else: cur.append(s[i]) i += 1 if cur: parts.append(''.join(cur).strip()) return parts, errors def mypy_comments_to_config_map(line: str, template: Options) -> Tuple[Dict[str, str], List[str]]: """Rewrite the mypy comment syntax into ini file syntax. Returns """ options = {} entries, errors = split_directive(line) for entry in entries: if '=' not in entry: name = entry value = None else: name, value = [x.strip() for x in entry.split('=', 1)] name = name.replace('-', '_') if value is None: value = 'True' options[name] = value return options, errors def parse_mypy_comments( args: List[Tuple[int, str]], template: Options) -> Tuple[Dict[str, object], List[Tuple[int, str]]]: """Parse a collection of inline mypy: configuration comments. Returns a dictionary of options to be applied and a list of error messages generated. """ errors = [] # type: List[Tuple[int, str]] sections = {} for lineno, line in args: # In order to easily match the behavior for bools, we abuse configparser. # Oddly, the only way to get the SectionProxy object with the getboolean # method is to create a config parser. parser = configparser.RawConfigParser() options, parse_errors = mypy_comments_to_config_map(line, template) parser['dummy'] = options errors.extend((lineno, x) for x in parse_errors) stderr = StringIO() new_sections, reports = parse_section('', template, parser['dummy'], stderr=stderr) errors.extend((lineno, x) for x in stderr.getvalue().strip().split('\n') if x) if reports: errors.append((lineno, "Reports not supported in inline configuration")) sections.update(new_sections) return sections, errors mypy-0.761/mypy/constraints.py0000644€tŠÔÚ€2›s®0000006474513576752246022657 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type inference constraints.""" from typing import Iterable, List, Optional, Sequence from typing_extensions import Final from mypy.types import ( CallableType, Type, TypeVisitor, UnboundType, AnyType, NoneType, TypeVarType, Instance, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType, DeletedType, UninhabitedType, TypeType, TypeVarId, TypeQuery, is_named_instance, TypeOfAny, LiteralType, ProperType, get_proper_type, TypeAliasType ) from mypy.maptype import map_instance_to_supertype import mypy.subtypes import mypy.sametypes import mypy.typeops from mypy.erasetype import erase_typevars from mypy.nodes import COVARIANT, CONTRAVARIANT from mypy.argmap import ArgTypeExpander from mypy.typestate import TypeState SUBTYPE_OF = 0 # type: Final[int] SUPERTYPE_OF = 1 # type: Final[int] class Constraint: """A representation of a type constraint. It can be either T <: type or T :> type (T is a type variable). """ type_var = None # type: TypeVarId op = 0 # SUBTYPE_OF or SUPERTYPE_OF target = None # type: Type def __init__(self, type_var: TypeVarId, op: int, target: Type) -> None: self.type_var = type_var self.op = op self.target = target def __repr__(self) -> str: op_str = '<:' if self.op == SUPERTYPE_OF: op_str = ':>' return '{} {} {}'.format(self.type_var, op_str, self.target) def infer_constraints_for_callable( callee: CallableType, arg_types: Sequence[Optional[Type]], arg_kinds: List[int], formal_to_actual: List[List[int]]) -> List[Constraint]: """Infer type variable constraints for a callable and actual arguments. Return a list of constraints. """ constraints = [] # type: List[Constraint] mapper = ArgTypeExpander() for i, actuals in enumerate(formal_to_actual): for actual in actuals: actual_arg_type = arg_types[actual] if actual_arg_type is None: continue actual_type = mapper.expand_actual_type(actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i]) c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) constraints.extend(c) return constraints def infer_constraints(template: Type, actual: Type, direction: int) -> List[Constraint]: """Infer type constraints. Match a template type, which may contain type variable references, recursively against a type which does not contain (the same) type variable references. The result is a list of type constrains of form 'T is a supertype/subtype of x', where T is a type variable present in the template and x is a type without reference to type variables present in the template. Assume T and S are type variables. Now the following results can be calculated (read as '(template, actual) --> result'): (T, X) --> T :> X (X[T], X[Y]) --> T <: Y and T :> Y ((T, T), (X, Y)) --> T :> X and T :> Y ((T, S), (X, Y)) --> T :> X and S :> Y (X[T], Any) --> T <: Any and T :> Any The constraints are represented as Constraint objects. """ if any(get_proper_type(template) == get_proper_type(t) for t in TypeState._inferring): return [] if isinstance(template, TypeAliasType) and template.is_recursive: # This case requires special care because it may cause infinite recursion. TypeState._inferring.append(template) res = _infer_constraints(template, actual, direction) TypeState._inferring.pop() return res return _infer_constraints(template, actual, direction) def _infer_constraints(template: Type, actual: Type, direction: int) -> List[Constraint]: orig_template = template template = get_proper_type(template) actual = get_proper_type(actual) # Type inference shouldn't be affected by whether union types have been simplified. # We however keep any ErasedType items, so that the caller will see it when using # checkexpr.has_erased_component(). if isinstance(template, UnionType): template = mypy.typeops.make_simplified_union(template.items, keep_erased=True) if isinstance(actual, UnionType): actual = mypy.typeops.make_simplified_union(actual.items, keep_erased=True) # Ignore Any types from the type suggestion engine to avoid them # causing us to infer Any in situations where a better job could # be done otherwise. (This can produce false positives but that # doesn't really matter because it is all heuristic anyway.) if isinstance(actual, AnyType) and actual.type_of_any == TypeOfAny.suggestion_engine: return [] # If the template is simply a type variable, emit a Constraint directly. # We need to handle this case before handling Unions for two reasons: # 1. "T <: Union[U1, U2]" is not equivalent to "T <: U1 or T <: U2", # because T can itself be a union (notably, Union[U1, U2] itself). # 2. "T :> Union[U1, U2]" is logically equivalent to "T :> U1 and # T :> U2", but they are not equivalent to the constraint solver, # which never introduces new Union types (it uses join() instead). if isinstance(template, TypeVarType): return [Constraint(template.id, direction, actual)] # Now handle the case of either template or actual being a Union. # For a Union to be a subtype of another type, every item of the Union # must be a subtype of that type, so concatenate the constraints. if direction == SUBTYPE_OF and isinstance(template, UnionType): res = [] for t_item in template.items: res.extend(infer_constraints(t_item, actual, direction)) return res if direction == SUPERTYPE_OF and isinstance(actual, UnionType): res = [] for a_item in actual.items: res.extend(infer_constraints(orig_template, a_item, direction)) return res # Now the potential subtype is known not to be a Union or a type # variable that we are solving for. In that case, for a Union to # be a supertype of the potential subtype, some item of the Union # must be a supertype of it. if direction == SUBTYPE_OF and isinstance(actual, UnionType): # If some of items is not a complete type, disregard that. items = simplify_away_incomplete_types(actual.items) # We infer constraints eagerly -- try to find constraints for a type # variable if possible. This seems to help with some real-world # use cases. return any_constraints( [infer_constraints_if_possible(template, a_item, direction) for a_item in items], eager=True) if direction == SUPERTYPE_OF and isinstance(template, UnionType): # When the template is a union, we are okay with leaving some # type variables indeterminate. This helps with some special # cases, though this isn't very principled. return any_constraints( [infer_constraints_if_possible(t_item, actual, direction) for t_item in template.items], eager=False) # Remaining cases are handled by ConstraintBuilderVisitor. return template.accept(ConstraintBuilderVisitor(actual, direction)) def infer_constraints_if_possible(template: Type, actual: Type, direction: int) -> Optional[List[Constraint]]: """Like infer_constraints, but return None if the input relation is known to be unsatisfiable, for example if template=List[T] and actual=int. (In this case infer_constraints would return [], just like it would for an automatically satisfied relation like template=List[T] and actual=object.) """ if (direction == SUBTYPE_OF and not mypy.subtypes.is_subtype(erase_typevars(template), actual)): return None if (direction == SUPERTYPE_OF and not mypy.subtypes.is_subtype(actual, erase_typevars(template))): return None if (direction == SUPERTYPE_OF and isinstance(template, TypeVarType) and not mypy.subtypes.is_subtype(actual, erase_typevars(template.upper_bound))): # This is not caught by the above branch because of the erase_typevars() call, # that would return 'Any' for a type variable. return None return infer_constraints(template, actual, direction) def any_constraints(options: List[Optional[List[Constraint]]], eager: bool) -> List[Constraint]: """Deduce what we can from a collection of constraint lists. It's a given that at least one of the lists must be satisfied. A None element in the list of options represents an unsatisfiable constraint and is ignored. Ignore empty constraint lists if eager is true -- they are always trivially satisfiable. """ if eager: valid_options = [option for option in options if option] else: valid_options = [option for option in options if option is not None] if len(valid_options) == 1: return valid_options[0] elif (len(valid_options) > 1 and all(is_same_constraints(valid_options[0], c) for c in valid_options[1:])): # Multiple sets of constraints that are all the same. Just pick any one of them. # TODO: More generally, if a given (variable, direction) pair appears in # every option, combine the bounds with meet/join. return valid_options[0] # Otherwise, there are either no valid options or multiple, inconsistent valid # options. Give up and deduce nothing. return [] def is_same_constraints(x: List[Constraint], y: List[Constraint]) -> bool: for c1 in x: if not any(is_same_constraint(c1, c2) for c2 in y): return False for c1 in y: if not any(is_same_constraint(c1, c2) for c2 in x): return False return True def is_same_constraint(c1: Constraint, c2: Constraint) -> bool: return (c1.type_var == c2.type_var and c1.op == c2.op and mypy.sametypes.is_same_type(c1.target, c2.target)) def simplify_away_incomplete_types(types: Iterable[Type]) -> List[Type]: complete = [typ for typ in types if is_complete_type(typ)] if complete: return complete else: return list(types) def is_complete_type(typ: Type) -> bool: """Is a type complete? A complete doesn't have uninhabited type components or (when not in strict optional mode) None components. """ return typ.accept(CompleteTypeVisitor()) class CompleteTypeVisitor(TypeQuery[bool]): def __init__(self) -> None: super().__init__(all) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return False class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]): """Visitor class for inferring type constraints.""" # The type that is compared against a template # TODO: The value may be None. Is that actually correct? actual = None # type: ProperType def __init__(self, actual: ProperType, direction: int) -> None: # Direction must be SUBTYPE_OF or SUPERTYPE_OF. self.actual = actual self.direction = direction # Trivial leaf types def visit_unbound_type(self, template: UnboundType) -> List[Constraint]: return [] def visit_any(self, template: AnyType) -> List[Constraint]: return [] def visit_none_type(self, template: NoneType) -> List[Constraint]: return [] def visit_uninhabited_type(self, template: UninhabitedType) -> List[Constraint]: return [] def visit_erased_type(self, template: ErasedType) -> List[Constraint]: return [] def visit_deleted_type(self, template: DeletedType) -> List[Constraint]: return [] def visit_literal_type(self, template: LiteralType) -> List[Constraint]: return [] # Errors def visit_partial_type(self, template: PartialType) -> List[Constraint]: # We can't do anything useful with a partial type here. assert False, "Internal error" # Non-trivial leaf type def visit_type_var(self, template: TypeVarType) -> List[Constraint]: assert False, ("Unexpected TypeVarType in ConstraintBuilderVisitor" " (should have been handled in infer_constraints)") # Non-leaf types def visit_instance(self, template: Instance) -> List[Constraint]: original_actual = actual = self.actual res = [] # type: List[Constraint] if isinstance(actual, (CallableType, Overloaded)) and template.type.is_protocol: if template.type.protocol_members == ['__call__']: # Special case: a generic callback protocol if not any(mypy.sametypes.is_same_type(template, t) for t in template.type.inferring): template.type.inferring.append(template) call = mypy.subtypes.find_member('__call__', template, actual, is_operator=True) assert call is not None if mypy.subtypes.is_subtype(actual, erase_typevars(call)): subres = infer_constraints(call, actual, self.direction) res.extend(subres) template.type.inferring.pop() return res if isinstance(actual, CallableType) and actual.fallback is not None: actual = actual.fallback if isinstance(actual, Overloaded) and actual.fallback is not None: actual = actual.fallback if isinstance(actual, TypedDictType): actual = actual.as_anonymous().fallback if isinstance(actual, LiteralType): actual = actual.fallback if isinstance(actual, Instance): instance = actual erased = erase_typevars(template) assert isinstance(erased, Instance) # type: ignore # We always try nominal inference if possible, # it is much faster than the structural one. if (self.direction == SUBTYPE_OF and template.type.has_base(instance.type.fullname)): mapped = map_instance_to_supertype(template, instance.type) tvars = mapped.type.defn.type_vars for i in range(len(instance.args)): # The constraints for generic type parameters depend on variance. # Include constraints from both directions if invariant. if tvars[i].variance != CONTRAVARIANT: res.extend(infer_constraints( mapped.args[i], instance.args[i], self.direction)) if tvars[i].variance != COVARIANT: res.extend(infer_constraints( mapped.args[i], instance.args[i], neg_op(self.direction))) return res elif (self.direction == SUPERTYPE_OF and instance.type.has_base(template.type.fullname)): mapped = map_instance_to_supertype(instance, template.type) tvars = template.type.defn.type_vars for j in range(len(template.args)): # The constraints for generic type parameters depend on variance. # Include constraints from both directions if invariant. if tvars[j].variance != CONTRAVARIANT: res.extend(infer_constraints( template.args[j], mapped.args[j], self.direction)) if tvars[j].variance != COVARIANT: res.extend(infer_constraints( template.args[j], mapped.args[j], neg_op(self.direction))) return res if (template.type.is_protocol and self.direction == SUPERTYPE_OF and # We avoid infinite recursion for structural subtypes by checking # whether this type already appeared in the inference chain. # This is a conservative way break the inference cycles. # It never produces any "false" constraints but gives up soon # on purely structural inference cycles, see #3829. # Note that we use is_protocol_implementation instead of is_subtype # because some type may be considered a subtype of a protocol # due to _promote, but still not implement the protocol. not any(mypy.sametypes.is_same_type(template, t) for t in template.type.inferring) and mypy.subtypes.is_protocol_implementation(instance, erased)): template.type.inferring.append(template) self.infer_constraints_from_protocol_members(res, instance, template, original_actual, template) template.type.inferring.pop() return res elif (instance.type.is_protocol and self.direction == SUBTYPE_OF and # We avoid infinite recursion for structural subtypes also here. not any(mypy.sametypes.is_same_type(instance, i) for i in instance.type.inferring) and mypy.subtypes.is_protocol_implementation(erased, instance)): instance.type.inferring.append(instance) self.infer_constraints_from_protocol_members(res, instance, template, template, instance) instance.type.inferring.pop() return res if isinstance(actual, AnyType): # IDEA: Include both ways, i.e. add negation as well? return self.infer_against_any(template.args, actual) if (isinstance(actual, TupleType) and (is_named_instance(template, 'typing.Iterable') or is_named_instance(template, 'typing.Container') or is_named_instance(template, 'typing.Sequence') or is_named_instance(template, 'typing.Reversible')) and self.direction == SUPERTYPE_OF): for item in actual.items: cb = infer_constraints(template.args[0], item, SUPERTYPE_OF) res.extend(cb) return res elif isinstance(actual, TupleType) and self.direction == SUPERTYPE_OF: return infer_constraints(template, mypy.typeops.tuple_fallback(actual), self.direction) else: return [] def infer_constraints_from_protocol_members(self, res: List[Constraint], instance: Instance, template: Instance, subtype: Type, protocol: Instance) -> None: """Infer constraints for situations where either 'template' or 'instance' is a protocol. The 'protocol' is the one of two that is an instance of protocol type, 'subtype' is the type used to bind self during inference. Currently, we just infer constrains for every protocol member type (both ways for settable members). """ for member in protocol.type.protocol_members: inst = mypy.subtypes.find_member(member, instance, subtype) temp = mypy.subtypes.find_member(member, template, subtype) assert inst is not None and temp is not None # The above is safe since at this point we know that 'instance' is a subtype # of (erased) 'template', therefore it defines all protocol members res.extend(infer_constraints(temp, inst, self.direction)) if (mypy.subtypes.IS_SETTABLE in mypy.subtypes.get_member_flags(member, protocol.type)): # Settable members are invariant, add opposite constraints res.extend(infer_constraints(temp, inst, neg_op(self.direction))) def visit_callable_type(self, template: CallableType) -> List[Constraint]: if isinstance(self.actual, CallableType): cactual = self.actual # FIX verify argument counts # FIX what if one of the functions is generic res = [] # type: List[Constraint] # We can't infer constraints from arguments if the template is Callable[..., T] (with # literal '...'). if not template.is_ellipsis_args: # The lengths should match, but don't crash (it will error elsewhere). for t, a in zip(template.arg_types, cactual.arg_types): # Negate direction due to function argument type contravariance. res.extend(infer_constraints(t, a, neg_op(self.direction))) res.extend(infer_constraints(template.ret_type, cactual.ret_type, self.direction)) return res elif isinstance(self.actual, AnyType): # FIX what if generic res = self.infer_against_any(template.arg_types, self.actual) any_type = AnyType(TypeOfAny.from_another_any, source_any=self.actual) res.extend(infer_constraints(template.ret_type, any_type, self.direction)) return res elif isinstance(self.actual, Overloaded): return self.infer_against_overloaded(self.actual, template) elif isinstance(self.actual, TypeType): return infer_constraints(template.ret_type, self.actual.item, self.direction) elif isinstance(self.actual, Instance): # Instances with __call__ method defined are considered structural # subtypes of Callable with a compatible signature. call = mypy.subtypes.find_member('__call__', self.actual, self.actual, is_operator=True) if call: return infer_constraints(template, call, self.direction) else: return [] else: return [] def infer_against_overloaded(self, overloaded: Overloaded, template: CallableType) -> List[Constraint]: # Create constraints by matching an overloaded type against a template. # This is tricky to do in general. We cheat by only matching against # the first overload item, and by only matching the return type. This # seems to work somewhat well, but we should really use a more # reliable technique. item = find_matching_overload_item(overloaded, template) return infer_constraints(template.ret_type, item.ret_type, self.direction) def visit_tuple_type(self, template: TupleType) -> List[Constraint]: actual = self.actual if isinstance(actual, TupleType) and len(actual.items) == len(template.items): res = [] # type: List[Constraint] for i in range(len(template.items)): res.extend(infer_constraints(template.items[i], actual.items[i], self.direction)) return res elif isinstance(actual, AnyType): return self.infer_against_any(template.items, actual) else: return [] def visit_typeddict_type(self, template: TypedDictType) -> List[Constraint]: actual = self.actual if isinstance(actual, TypedDictType): res = [] # type: List[Constraint] # NOTE: Non-matching keys are ignored. Compatibility is checked # elsewhere so this shouldn't be unsafe. for (item_name, template_item_type, actual_item_type) in template.zip(actual): res.extend(infer_constraints(template_item_type, actual_item_type, self.direction)) return res elif isinstance(actual, AnyType): return self.infer_against_any(template.items.values(), actual) else: return [] def visit_union_type(self, template: UnionType) -> List[Constraint]: assert False, ("Unexpected UnionType in ConstraintBuilderVisitor" " (should have been handled in infer_constraints)") def visit_type_alias_type(self, template: TypeAliasType) -> List[Constraint]: assert False, "This should be never called, got {}".format(template) def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> List[Constraint]: res = [] # type: List[Constraint] for t in types: res.extend(infer_constraints(t, any_type, self.direction)) return res def visit_overloaded(self, template: Overloaded) -> List[Constraint]: res = [] # type: List[Constraint] for t in template.items(): res.extend(infer_constraints(t, self.actual, self.direction)) return res def visit_type_type(self, template: TypeType) -> List[Constraint]: if isinstance(self.actual, CallableType): return infer_constraints(template.item, self.actual.ret_type, self.direction) elif isinstance(self.actual, Overloaded): return infer_constraints(template.item, self.actual.items()[0].ret_type, self.direction) elif isinstance(self.actual, TypeType): return infer_constraints(template.item, self.actual.item, self.direction) elif isinstance(self.actual, AnyType): return infer_constraints(template.item, self.actual, self.direction) else: return [] def neg_op(op: int) -> int: """Map SubtypeOf to SupertypeOf and vice versa.""" if op == SUBTYPE_OF: return SUPERTYPE_OF elif op == SUPERTYPE_OF: return SUBTYPE_OF else: raise ValueError('Invalid operator {}'.format(op)) def find_matching_overload_item(overloaded: Overloaded, template: CallableType) -> CallableType: """Disambiguate overload item against a template.""" items = overloaded.items() for item in items: # Return type may be indeterminate in the template, so ignore it when performing a # subtype check. if mypy.subtypes.is_callable_compatible(item, template, is_compat=mypy.subtypes.is_subtype, ignore_return=True): return item # Fall back to the first item if we can't find a match. This is totally arbitrary -- # maybe we should just bail out at this point. return items[0] mypy-0.761/mypy/defaults.py0000644€tŠÔÚ€2›s®0000000214713576752246022103 0ustar jukkaDROPBOX\Domain Users00000000000000import os from typing_extensions import Final PYTHON2_VERSION = (2, 7) # type: Final PYTHON3_VERSION = (3, 6) # type: Final PYTHON3_VERSION_MIN = (3, 4) # type: Final CACHE_DIR = '.mypy_cache' # type: Final CONFIG_FILE = 'mypy.ini' # type: Final SHARED_CONFIG_FILES = ['setup.cfg', ] # type: Final USER_CONFIG_FILES = ['~/.config/mypy/config', '~/.mypy.ini', ] # type: Final if os.environ.get('XDG_CONFIG_HOME'): USER_CONFIG_FILES.insert(0, os.path.join(os.environ['XDG_CONFIG_HOME'], 'mypy/config')) CONFIG_FILES = [CONFIG_FILE, ] + SHARED_CONFIG_FILES + USER_CONFIG_FILES # type: Final # This must include all reporters defined in mypy.report. This is defined here # to make reporter names available without importing mypy.report -- this speeds # up startup. REPORTER_NAMES = ['linecount', 'any-exprs', 'linecoverage', 'memory-xml', 'cobertura-xml', 'xml', 'xslt-html', 'xslt-txt', 'html', 'txt', 'lineprecision'] # type: Final mypy-0.761/mypy/dmypy/0000755€tŠÔÚ€2›s®0000000000013576752266021062 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/dmypy/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246023157 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/dmypy/__main__.py0000644€tŠÔÚ€2›s®0000000013413576752246023150 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.dmypy.client import console_entry if __name__ == '__main__': console_entry() mypy-0.761/mypy/dmypy/client.py0000644€tŠÔÚ€2›s®0000005115113576752246022713 0ustar jukkaDROPBOX\Domain Users00000000000000"""Client for mypy daemon mode. This manages a daemon process which keeps useful state in memory rather than having to read it back from disk on each run. """ import argparse import base64 import json import os import pickle import sys import time import traceback from typing import Any, Callable, Dict, Mapping, Optional, Tuple, List from mypy.dmypy_util import DEFAULT_STATUS_FILE, receive from mypy.ipc import IPCClient, IPCException from mypy.dmypy_os import alive, kill from mypy.util import check_python_version, get_terminal_width from mypy.version import __version__ # Argument parser. Subparsers are tied to action functions by the # @action(subparse) decorator. class AugmentedHelpFormatter(argparse.RawDescriptionHelpFormatter): def __init__(self, prog: str) -> None: super().__init__(prog=prog, max_help_position=30) parser = argparse.ArgumentParser(prog='dmypy', description="Client for mypy daemon mode", fromfile_prefix_chars='@') parser.set_defaults(action=None) parser.add_argument('--status-file', default=DEFAULT_STATUS_FILE, help='status file to retrieve daemon details') parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__, help="Show program's version number and exit") subparsers = parser.add_subparsers() start_parser = p = subparsers.add_parser('start', help="Start daemon") p.add_argument('--log-file', metavar='FILE', type=str, help="Direct daemon stdout/stderr to FILE") p.add_argument('--timeout', metavar='TIMEOUT', type=int, help="Server shutdown timeout (in seconds)") p.add_argument('flags', metavar='FLAG', nargs='*', type=str, help="Regular mypy flags (precede with --)") restart_parser = p = subparsers.add_parser('restart', help="Restart daemon (stop or kill followed by start)") p.add_argument('--log-file', metavar='FILE', type=str, help="Direct daemon stdout/stderr to FILE") p.add_argument('--timeout', metavar='TIMEOUT', type=int, help="Server shutdown timeout (in seconds)") p.add_argument('flags', metavar='FLAG', nargs='*', type=str, help="Regular mypy flags (precede with --)") status_parser = p = subparsers.add_parser('status', help="Show daemon status") p.add_argument('-v', '--verbose', action='store_true', help="Print detailed status") p.add_argument('--fswatcher-dump-file', help="Collect information about the current file state") stop_parser = p = subparsers.add_parser('stop', help="Stop daemon (asks it politely to go away)") kill_parser = p = subparsers.add_parser('kill', help="Kill daemon (kills the process)") check_parser = p = subparsers.add_parser('check', formatter_class=AugmentedHelpFormatter, help="Check some files (requires daemon)") p.add_argument('-v', '--verbose', action='store_true', help="Print detailed status") p.add_argument('-q', '--quiet', action='store_true', help=argparse.SUPPRESS) # Deprecated p.add_argument('--junit-xml', help="Write junit.xml to the given file") p.add_argument('--perf-stats-file', help='write performance information to the given file') p.add_argument('files', metavar='FILE', nargs='+', help="File (or directory) to check") run_parser = p = subparsers.add_parser('run', formatter_class=AugmentedHelpFormatter, help="Check some files, [re]starting daemon if necessary") p.add_argument('-v', '--verbose', action='store_true', help="Print detailed status") p.add_argument('--junit-xml', help="Write junit.xml to the given file") p.add_argument('--perf-stats-file', help='write performance information to the given file') p.add_argument('--timeout', metavar='TIMEOUT', type=int, help="Server shutdown timeout (in seconds)") p.add_argument('--log-file', metavar='FILE', type=str, help="Direct daemon stdout/stderr to FILE") p.add_argument('flags', metavar='ARG', nargs='*', type=str, help="Regular mypy flags and files (precede with --)") recheck_parser = p = subparsers.add_parser('recheck', formatter_class=AugmentedHelpFormatter, help="Re-check the previous list of files, with optional modifications (requires daemon)") p.add_argument('-v', '--verbose', action='store_true', help="Print detailed status") p.add_argument('-q', '--quiet', action='store_true', help=argparse.SUPPRESS) # Deprecated p.add_argument('--junit-xml', help="Write junit.xml to the given file") p.add_argument('--perf-stats-file', help='write performance information to the given file') p.add_argument('--update', metavar='FILE', nargs='*', help="Files in the run to add or check again (default: all from previous run)") p.add_argument('--remove', metavar='FILE', nargs='*', help="Files to remove from the run") suggest_parser = p = subparsers.add_parser('suggest', help="Suggest a signature or show call sites for a specific function") p.add_argument('function', metavar='FUNCTION', type=str, help="Function specified as '[package.]module.[class.]function'") p.add_argument('--json', action='store_true', help="Produce json that pyannotate can use to apply a suggestion") p.add_argument('--no-errors', action='store_true', help="Only produce suggestions that cause no errors") p.add_argument('--no-any', action='store_true', help="Only produce suggestions that don't contain Any") p.add_argument('--flex-any', type=float, help="Allow anys in types if they go above a certain score (scores are from 0-1)") p.add_argument('--try-text', action='store_true', help="Try using unicode wherever str is inferred") p.add_argument('--callsites', action='store_true', help="Find callsites instead of suggesting a type") p.add_argument('--use-fixme', metavar='NAME', type=str, help="A dummy name to use instead of Any for types that can't be inferred") p.add_argument('--max-guesses', type=int, help="Set the maximum number of types to try for a function (default 64)") hang_parser = p = subparsers.add_parser('hang', help="Hang for 100 seconds") daemon_parser = p = subparsers.add_parser('daemon', help="Run daemon in foreground") p.add_argument('--timeout', metavar='TIMEOUT', type=int, help="Server shutdown timeout (in seconds)") p.add_argument('flags', metavar='FLAG', nargs='*', type=str, help="Regular mypy flags (precede with --)") p.add_argument('--options-data', help=argparse.SUPPRESS) help_parser = p = subparsers.add_parser('help') del p class BadStatus(Exception): """Exception raised when there is something wrong with the status file. For example: - No status file found - Status file malformed - Process whose pid is in the status file does not exist """ pass def main(argv: List[str]) -> None: """The code is top-down.""" check_python_version('dmypy') args = parser.parse_args(argv) if not args.action: parser.print_usage() else: try: args.action(args) except BadStatus as err: fail(err.args[0]) except Exception: # We do this explicitly to avoid exceptions percolating up # through mypy.api invocations traceback.print_exc() sys.exit(2) def fail(msg: str) -> None: print(msg, file=sys.stderr) sys.exit(2) ActionFunction = Callable[[argparse.Namespace], None] def action(subparser: argparse.ArgumentParser) -> Callable[[ActionFunction], ActionFunction]: """Decorator to tie an action function to a subparser.""" def register(func: ActionFunction) -> ActionFunction: subparser.set_defaults(action=func) return func return register # Action functions (run in client from command line). @action(start_parser) def do_start(args: argparse.Namespace) -> None: """Start daemon (it must not already be running). This is where mypy flags are set from the command line. Setting flags is a bit awkward; you have to use e.g.: dmypy start -- --strict since we don't want to duplicate mypy's huge list of flags. """ try: get_status(args.status_file) except BadStatus: # Bad or missing status file or dead process; good to start. pass else: fail("Daemon is still alive") start_server(args) @action(restart_parser) def do_restart(args: argparse.Namespace) -> None: """Restart daemon (it may or may not be running; but not hanging). We first try to stop it politely if it's running. This also sets mypy flags from the command line (see do_start()). """ restart_server(args) def restart_server(args: argparse.Namespace, allow_sources: bool = False) -> None: """Restart daemon (it may or may not be running; but not hanging).""" try: do_stop(args) except BadStatus: # Bad or missing status file or dead process; good to start. pass start_server(args, allow_sources) def start_server(args: argparse.Namespace, allow_sources: bool = False) -> None: """Start the server from command arguments and wait for it.""" # Lazy import so this import doesn't slow down other commands. from mypy.dmypy_server import daemonize, process_start_options start_options = process_start_options(args.flags, allow_sources) if daemonize(start_options, args.status_file, timeout=args.timeout, log_file=args.log_file): sys.exit(2) wait_for_server(args.status_file) def wait_for_server(status_file: str, timeout: float = 5.0) -> None: """Wait until the server is up. Exit if it doesn't happen within the timeout. """ endtime = time.time() + timeout while time.time() < endtime: try: data = read_status(status_file) except BadStatus: # If the file isn't there yet, retry later. time.sleep(0.1) continue # If the file's content is bogus or the process is dead, fail. check_status(data) print("Daemon started") return fail("Timed out waiting for daemon to start") @action(run_parser) def do_run(args: argparse.Namespace) -> None: """Do a check, starting (or restarting) the daemon as necessary Restarts the daemon if the running daemon reports that it is required (due to a configuration change, for example). Setting flags is a bit awkward; you have to use e.g.: dmypy run -- --strict a.py b.py ... since we don't want to duplicate mypy's huge list of flags. (The -- is only necessary if flags are specified.) """ if not is_running(args.status_file): # Bad or missing status file or dead process; good to start. start_server(args, allow_sources=True) t0 = time.time() response = request(args.status_file, 'run', version=__version__, args=args.flags) # If the daemon signals that a restart is necessary, do it if 'restart' in response: print('Restarting: {}'.format(response['restart'])) restart_server(args, allow_sources=True) response = request(args.status_file, 'run', version=__version__, args=args.flags) t1 = time.time() response['roundtrip_time'] = t1 - t0 check_output(response, args.verbose, args.junit_xml, args.perf_stats_file) @action(status_parser) def do_status(args: argparse.Namespace) -> None: """Print daemon status. This verifies that it is responsive to requests. """ status = read_status(args.status_file) if args.verbose: show_stats(status) # Both check_status() and request() may raise BadStatus, # which will be handled by main(). check_status(status) response = request(args.status_file, 'status', fswatcher_dump_file=args.fswatcher_dump_file, timeout=5) if args.verbose or 'error' in response: show_stats(response) if 'error' in response: fail("Daemon is stuck; consider %s kill" % sys.argv[0]) print("Daemon is up and running") @action(stop_parser) def do_stop(args: argparse.Namespace) -> None: """Stop daemon via a 'stop' request.""" # May raise BadStatus, which will be handled by main(). response = request(args.status_file, 'stop', timeout=5) if 'error' in response: show_stats(response) fail("Daemon is stuck; consider %s kill" % sys.argv[0]) else: print("Daemon stopped") @action(kill_parser) def do_kill(args: argparse.Namespace) -> None: """Kill daemon process with SIGKILL.""" pid, _ = get_status(args.status_file) try: kill(pid) except OSError as err: fail(str(err)) else: print("Daemon killed") @action(check_parser) def do_check(args: argparse.Namespace) -> None: """Ask the daemon to check a list of files.""" t0 = time.time() response = request(args.status_file, 'check', files=args.files) t1 = time.time() response['roundtrip_time'] = t1 - t0 check_output(response, args.verbose, args.junit_xml, args.perf_stats_file) @action(recheck_parser) def do_recheck(args: argparse.Namespace) -> None: """Ask the daemon to recheck the previous list of files, with optional modifications. If at least one of --remove or --update is given, the server will update the list of files to check accordingly and assume that any other files are unchanged. If none of these flags are given, the server will call stat() on each file last checked to determine its status. Files given in --update ought to exist. Files given in --remove need not exist; if they don't they will be ignored. The lists may be empty but oughtn't contain duplicates or overlap. NOTE: The list of files is lost when the daemon is restarted. """ t0 = time.time() if args.remove is not None or args.update is not None: response = request(args.status_file, 'recheck', remove=args.remove, update=args.update) else: response = request(args.status_file, 'recheck') t1 = time.time() response['roundtrip_time'] = t1 - t0 check_output(response, args.verbose, args.junit_xml, args.perf_stats_file) @action(suggest_parser) def do_suggest(args: argparse.Namespace) -> None: """Ask the daemon for a suggested signature. This just prints whatever the daemon reports as output. For now it may be closer to a list of call sites. """ response = request(args.status_file, 'suggest', function=args.function, json=args.json, callsites=args.callsites, no_errors=args.no_errors, no_any=args.no_any, flex_any=args.flex_any, try_text=args.try_text, use_fixme=args.use_fixme, max_guesses=args.max_guesses) check_output(response, verbose=False, junit_xml=None, perf_stats_file=None) def check_output(response: Dict[str, Any], verbose: bool, junit_xml: Optional[str], perf_stats_file: Optional[str]) -> None: """Print the output from a check or recheck command. Call sys.exit() unless the status code is zero. """ if 'error' in response: fail(response['error']) try: out, err, status_code = response['out'], response['err'], response['status'] except KeyError: fail("Response: %s" % str(response)) sys.stdout.write(out) sys.stderr.write(err) if verbose: show_stats(response) if junit_xml: # Lazy import so this import doesn't slow things down when not writing junit from mypy.util import write_junit_xml messages = (out + err).splitlines() write_junit_xml(response['roundtrip_time'], bool(err), messages, junit_xml, response['python_version'], response['platform']) if perf_stats_file: telemetry = response.get('stats', {}) with open(perf_stats_file, 'w') as f: json.dump(telemetry, f) if status_code: sys.exit(status_code) def show_stats(response: Mapping[str, object]) -> None: for key, value in sorted(response.items()): if key not in ('out', 'err'): print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value)) else: value = str(value).replace('\n', '\\n') if len(value) > 50: value = value[:40] + ' ...' print("%-24s: %s" % (key, value)) @action(hang_parser) def do_hang(args: argparse.Namespace) -> None: """Hang for 100 seconds, as a debug hack.""" print(request(args.status_file, 'hang', timeout=1)) @action(daemon_parser) def do_daemon(args: argparse.Namespace) -> None: """Serve requests in the foreground.""" # Lazy import so this import doesn't slow down other commands. from mypy.dmypy_server import Server, process_start_options if args.options_data: from mypy.options import Options options_dict, timeout, log_file = pickle.loads(base64.b64decode(args.options_data)) options_obj = Options() options = options_obj.apply_changes(options_dict) if log_file: sys.stdout = sys.stderr = open(log_file, 'a', buffering=1) fd = sys.stdout.fileno() os.dup2(fd, 2) os.dup2(fd, 1) else: options = process_start_options(args.flags, allow_sources=False) timeout = args.timeout Server(options, args.status_file, timeout=timeout).serve() @action(help_parser) def do_help(args: argparse.Namespace) -> None: """Print full help (same as dmypy --help).""" parser.print_help() # Client-side infrastructure. def request(status_file: str, command: str, *, timeout: Optional[int] = None, **kwds: object) -> Dict[str, Any]: """Send a request to the daemon. Return the JSON dict with the response. Raise BadStatus if there is something wrong with the status file or if the process whose pid is in the status file has died. Return {'error': } if an IPC operation or receive() raised OSError. This covers cases such as connection refused or closed prematurely as well as invalid JSON received. """ response = {} # type: Dict[str, str] args = dict(kwds) args['command'] = command # Tell the server whether this request was initiated from a human-facing terminal, # so that it can format the type checking output accordingly. args['is_tty'] = sys.stdout.isatty() or int(os.getenv('MYPY_FORCE_COLOR', '0')) > 0 args['terminal_width'] = (int(os.getenv('MYPY_FORCE_TERMINAL_WIDTH', '0')) or get_terminal_width()) bdata = json.dumps(args).encode('utf8') _, name = get_status(status_file) try: with IPCClient(name, timeout) as client: client.write(bdata) response = receive(client) except (OSError, IPCException) as err: return {'error': str(err)} # TODO: Other errors, e.g. ValueError, UnicodeError else: return response def get_status(status_file: str) -> Tuple[int, str]: """Read status file and check if the process is alive. Return (pid, connection_name) on success. Raise BadStatus if something's wrong. """ data = read_status(status_file) return check_status(data) def check_status(data: Dict[str, Any]) -> Tuple[int, str]: """Check if the process is alive. Return (pid, connection_name) on success. Raise BadStatus if something's wrong. """ if 'pid' not in data: raise BadStatus("Invalid status file (no pid field)") pid = data['pid'] if not isinstance(pid, int): raise BadStatus("pid field is not an int") if not alive(pid): raise BadStatus("Daemon has died") if 'connection_name' not in data: raise BadStatus("Invalid status file (no connection_name field)") connection_name = data['connection_name'] if not isinstance(connection_name, str): raise BadStatus("connection_name field is not a string") return pid, connection_name def read_status(status_file: str) -> Dict[str, object]: """Read status file. Raise BadStatus if the status file doesn't exist or contains invalid JSON or the JSON is not a dict. """ if not os.path.isfile(status_file): raise BadStatus("No status file found") with open(status_file) as f: try: data = json.load(f) except Exception: raise BadStatus("Malformed status file (not JSON)") if not isinstance(data, dict): raise BadStatus("Invalid status file (not a dict)") return data def is_running(status_file: str) -> bool: """Check if the server is running cleanly""" try: get_status(status_file) except BadStatus: return False return True # Run main(). def console_entry() -> None: main(sys.argv[1:]) mypy-0.761/mypy/dmypy_os.py0000644€tŠÔÚ€2›s®0000000227013576752246022134 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable if sys.platform == 'win32': import ctypes from ctypes.wintypes import DWORD, HANDLE import subprocess PROCESS_QUERY_LIMITED_INFORMATION = ctypes.c_ulong(0x1000) kernel32 = ctypes.windll.kernel32 OpenProcess = kernel32.OpenProcess # type: Callable[[DWORD, int, int], HANDLE] GetExitCodeProcess = kernel32.GetExitCodeProcess # type: Callable[[HANDLE, Any], int] else: import os import signal def alive(pid: int) -> bool: """Is the process alive?""" if sys.platform == 'win32': # why can't anything be easy... status = DWORD() handle = OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid) GetExitCodeProcess(handle, ctypes.byref(status)) return status.value == 259 # STILL_ACTIVE else: try: os.kill(pid, 0) except OSError: return False return True def kill(pid: int) -> None: """Kill the process.""" if sys.platform == 'win32': subprocess.check_output("taskkill /pid {pid} /f /t".format(pid=pid)) else: os.kill(pid, signal.SIGKILL) mypy-0.761/mypy/dmypy_server.py0000644€tŠÔÚ€2›s®0000006434713576752246023036 0ustar jukkaDROPBOX\Domain Users00000000000000"""Server for mypy daemon mode. This implements a daemon process which keeps useful state in memory to enable fine-grained incremental reprocessing of changes. """ import argparse import base64 import io import json import os import pickle import subprocess import sys import time import traceback from contextlib import redirect_stderr, redirect_stdout from typing import AbstractSet, Any, Callable, Dict, List, Optional, Sequence, Tuple from typing_extensions import Final import mypy.build import mypy.errors import mypy.main from mypy.find_sources import create_source_list, InvalidSourceList from mypy.server.update import FineGrainedBuildManager from mypy.dmypy_util import receive from mypy.ipc import IPCServer from mypy.fscache import FileSystemCache from mypy.fswatcher import FileSystemWatcher, FileData from mypy.modulefinder import BuildSource, compute_search_paths from mypy.options import Options from mypy.suggestions import SuggestionFailure, SuggestionEngine from mypy.typestate import reset_global_state from mypy.version import __version__ from mypy.util import FancyFormatter, count_stats MEM_PROFILE = False # type: Final # If True, dump memory profile after initialization if sys.platform == 'win32': from subprocess import STARTUPINFO def daemonize(options: Options, status_file: str, timeout: Optional[int] = None, log_file: Optional[str] = None) -> int: """Create the daemon process via "dmypy daemon" and pass options via command line When creating the daemon grandchild, we create it in a new console, which is started hidden. We cannot use DETACHED_PROCESS since it will cause console windows to pop up when starting. See https://github.com/python/cpython/pull/4150#issuecomment-340215696 for more on why we can't have nice things. It also pickles the options to be unpickled by mypy. """ command = [sys.executable, '-m', 'mypy.dmypy', '--status-file', status_file, 'daemon'] pickeled_options = pickle.dumps((options.snapshot(), timeout, log_file)) command.append('--options-data="{}"'.format(base64.b64encode(pickeled_options).decode())) info = STARTUPINFO() info.dwFlags = 0x1 # STARTF_USESHOWWINDOW aka use wShowWindow's value info.wShowWindow = 0 # SW_HIDE aka make the window invisible try: subprocess.Popen(command, creationflags=0x10, # CREATE_NEW_CONSOLE startupinfo=info) return 0 except subprocess.CalledProcessError as e: return e.returncode else: def _daemonize_cb(func: Callable[[], None], log_file: Optional[str] = None) -> int: """Arrange to call func() in a grandchild of the current process. Return 0 for success, exit status for failure, negative if subprocess killed by signal. """ # See https://stackoverflow.com/questions/473620/how-do-you-create-a-daemon-in-python sys.stdout.flush() sys.stderr.flush() pid = os.fork() if pid: # Parent process: wait for child in case things go bad there. npid, sts = os.waitpid(pid, 0) sig = sts & 0xff if sig: print("Child killed by signal", sig) return -sig sts = sts >> 8 if sts: print("Child exit status", sts) return sts # Child process: do a bunch of UNIX stuff and then fork a grandchild. try: os.setsid() # Detach controlling terminal os.umask(0o27) devnull = os.open('/dev/null', os.O_RDWR) os.dup2(devnull, 0) os.dup2(devnull, 1) os.dup2(devnull, 2) os.close(devnull) pid = os.fork() if pid: # Child is done, exit to parent. os._exit(0) # Grandchild: run the server. if log_file: sys.stdout = sys.stderr = open(log_file, 'a', buffering=1) fd = sys.stdout.fileno() os.dup2(fd, 2) os.dup2(fd, 1) func() finally: # Make sure we never get back into the caller. os._exit(1) def daemonize(options: Options, status_file: str, timeout: Optional[int] = None, log_file: Optional[str] = None) -> int: """Run the mypy daemon in a grandchild of the current process Return 0 for success, exit status for failure, negative if subprocess killed by signal. """ return _daemonize_cb(Server(options, status_file, timeout).serve, log_file) # Server code. CONNECTION_NAME = 'dmypy' # type: Final def process_start_options(flags: List[str], allow_sources: bool) -> Options: sources, options = mypy.main.process_options(['-i'] + flags, require_targets=False, server_options=True) if sources and not allow_sources: sys.exit("dmypy: start/restart does not accept sources") if options.report_dirs: sys.exit("dmypy: start/restart cannot generate reports") if options.junit_xml: sys.exit("dmypy: start/restart does not support --junit-xml; " "pass it to check/recheck instead") if not options.incremental: sys.exit("dmypy: start/restart should not disable incremental mode") # Our file change tracking can't yet handle changes to files that aren't # specified in the sources list. if options.follow_imports not in ('skip', 'error'): sys.exit("dmypy: follow-imports must be 'skip' or 'error'") return options ModulePathPair = Tuple[str, str] ModulePathPairs = List[ModulePathPair] ChangesAndRemovals = Tuple[ModulePathPairs, ModulePathPairs] class Server: # NOTE: the instance is constructed in the parent process but # serve() is called in the grandchild (by daemonize()). def __init__(self, options: Options, status_file: str, timeout: Optional[int] = None) -> None: """Initialize the server with the desired mypy flags.""" self.options = options # Snapshot the options info before we muck with it, to detect changes self.options_snapshot = options.snapshot() self.timeout = timeout self.fine_grained_manager = None # type: Optional[FineGrainedBuildManager] if os.path.isfile(status_file): os.unlink(status_file) self.fscache = FileSystemCache() options.raise_exceptions = True options.incremental = True options.fine_grained_incremental = True options.show_traceback = True if options.use_fine_grained_cache: # Using fine_grained_cache implies generating and caring # about the fine grained cache options.cache_fine_grained = True else: options.cache_dir = os.devnull # Fine-grained incremental doesn't support general partial types # (details in https://github.com/python/mypy/issues/4492) options.local_partial_types = True self.status_file = status_file # Since the object is created in the parent process we can check # the output terminal options here. self.formatter = FancyFormatter(sys.stdout, sys.stderr, options.show_error_codes) def _response_metadata(self) -> Dict[str, str]: py_version = '{}_{}'.format(self.options.python_version[0], self.options.python_version[1]) return { 'platform': self.options.platform, 'python_version': py_version, } def serve(self) -> None: """Serve requests, synchronously (no thread or fork).""" command = None try: server = IPCServer(CONNECTION_NAME, self.timeout) with open(self.status_file, 'w') as f: json.dump({'pid': os.getpid(), 'connection_name': server.connection_name}, f) f.write('\n') # I like my JSON with a trailing newline while True: with server: data = receive(server) resp = {} # type: Dict[str, Any] if 'command' not in data: resp = {'error': "No command found in request"} else: command = data['command'] if not isinstance(command, str): resp = {'error': "Command is not a string"} else: command = data.pop('command') try: resp = self.run_command(command, data) except Exception: # If we are crashing, report the crash to the client tb = traceback.format_exception(*sys.exc_info()) resp = {'error': "Daemon crashed!\n" + "".join(tb)} resp.update(self._response_metadata()) server.write(json.dumps(resp).encode('utf8')) raise try: resp.update(self._response_metadata()) server.write(json.dumps(resp).encode('utf8')) except OSError: pass # Maybe the client hung up if command == 'stop': reset_global_state() sys.exit(0) finally: # If the final command is something other than a clean # stop, remove the status file. (We can't just # simplify the logic and always remove the file, since # that could cause us to remove a future server's # status file.) if command != 'stop': os.unlink(self.status_file) try: server.cleanup() # try to remove the socket dir on Linux except OSError: pass exc_info = sys.exc_info() if exc_info[0] and exc_info[0] is not SystemExit: traceback.print_exception(*exc_info) def run_command(self, command: str, data: Dict[str, object]) -> Dict[str, object]: """Run a specific command from the registry.""" key = 'cmd_' + command method = getattr(self.__class__, key, None) if method is None: return {'error': "Unrecognized command '%s'" % command} else: if command not in {'check', 'recheck', 'run'}: # Only the above commands use some error formatting. del data['is_tty'] del data['terminal_width'] return method(self, **data) # Command functions (run in the server via RPC). def cmd_status(self, fswatcher_dump_file: Optional[str] = None) -> Dict[str, object]: """Return daemon status.""" res = {} # type: Dict[str, object] res.update(get_meminfo()) if fswatcher_dump_file: data = self.fswatcher.dump_file_data() if hasattr(self, 'fswatcher') else {} # Using .dumps and then writing was noticably faster than using dump s = json.dumps(data) with open(fswatcher_dump_file, 'w') as f: f.write(s) return res def cmd_stop(self) -> Dict[str, object]: """Stop daemon.""" # We need to remove the status file *before* we complete the # RPC. Otherwise a race condition exists where a subsequent # command can see a status file from a dying server and think # it is a live one. os.unlink(self.status_file) return {} def cmd_run(self, version: str, args: Sequence[str], is_tty: bool, terminal_width: int) -> Dict[str, object]: """Check a list of files, triggering a restart if needed.""" try: # Process options can exit on improper arguments, so we need to catch that and # capture stderr so the client can report it stderr = io.StringIO() stdout = io.StringIO() with redirect_stderr(stderr): with redirect_stdout(stdout): sources, options = mypy.main.process_options( ['-i'] + list(args), require_targets=True, server_options=True, fscache=self.fscache, program='mypy-daemon', header=argparse.SUPPRESS) # Signal that we need to restart if the options have changed if self.options_snapshot != options.snapshot(): return {'restart': 'configuration changed'} if __version__ != version: return {'restart': 'mypy version changed'} if self.fine_grained_manager: manager = self.fine_grained_manager.manager start_plugins_snapshot = manager.plugins_snapshot _, current_plugins_snapshot = mypy.build.load_plugins( options, manager.errors, sys.stdout, extra_plugins=() ) if current_plugins_snapshot != start_plugins_snapshot: return {'restart': 'plugins changed'} except InvalidSourceList as err: return {'out': '', 'err': str(err), 'status': 2} except SystemExit as e: return {'out': stdout.getvalue(), 'err': stderr.getvalue(), 'status': e.code} return self.check(sources, is_tty, terminal_width) def cmd_check(self, files: Sequence[str], is_tty: bool, terminal_width: int) -> Dict[str, object]: """Check a list of files.""" try: sources = create_source_list(files, self.options, self.fscache) except InvalidSourceList as err: return {'out': '', 'err': str(err), 'status': 2} return self.check(sources, is_tty, terminal_width) def cmd_recheck(self, is_tty: bool, terminal_width: int, remove: Optional[List[str]] = None, update: Optional[List[str]] = None) -> Dict[str, object]: """Check the same list of files we checked most recently. If remove/update is given, they modify the previous list; if all are None, stat() is called for each file in the previous list. """ t0 = time.time() if not self.fine_grained_manager: return {'error': "Command 'recheck' is only valid after a 'check' command"} sources = self.previous_sources if remove: removals = set(remove) sources = [s for s in sources if s.path and s.path not in removals] if update: known = {s.path for s in sources if s.path} added = [p for p in update if p not in known] try: added_sources = create_source_list(added, self.options, self.fscache) except InvalidSourceList as err: return {'out': '', 'err': str(err), 'status': 2} sources = sources + added_sources # Make a copy! t1 = time.time() manager = self.fine_grained_manager.manager manager.log("fine-grained increment: cmd_recheck: {:.3f}s".format(t1 - t0)) res = self.fine_grained_increment(sources, is_tty, terminal_width, remove, update) self.fscache.flush() self.update_stats(res) return res def check(self, sources: List[BuildSource], is_tty: bool, terminal_width: int) -> Dict[str, Any]: """Check using fine-grained incremental mode. If is_tty is True format the output nicely with colors and summary line (unless disabled in self.options). Also pass the terminal_width to formatter. """ if not self.fine_grained_manager: res = self.initialize_fine_grained(sources, is_tty, terminal_width) else: res = self.fine_grained_increment(sources, is_tty, terminal_width) self.fscache.flush() self.update_stats(res) return res def update_stats(self, res: Dict[str, Any]) -> None: if self.fine_grained_manager: manager = self.fine_grained_manager.manager manager.dump_stats() res['stats'] = manager.stats manager.stats = {} def initialize_fine_grained(self, sources: List[BuildSource], is_tty: bool, terminal_width: int) -> Dict[str, Any]: self.fswatcher = FileSystemWatcher(self.fscache) t0 = time.time() self.update_sources(sources) t1 = time.time() try: result = mypy.build.build(sources=sources, options=self.options, fscache=self.fscache) except mypy.errors.CompileError as e: output = ''.join(s + '\n' for s in e.messages) if e.use_stdout: out, err = output, '' else: out, err = '', output return {'out': out, 'err': err, 'status': 2} messages = result.errors self.fine_grained_manager = FineGrainedBuildManager(result) self.previous_sources = sources # If we are using the fine-grained cache, build hasn't actually done # the typechecking on the updated files yet. # Run a fine-grained update starting from the cached data if result.used_cache: t2 = time.time() # Pull times and hashes out of the saved_cache and stick them into # the fswatcher, so we pick up the changes. for state in self.fine_grained_manager.graph.values(): meta = state.meta if meta is None: continue assert state.path is not None self.fswatcher.set_file_data( state.path, FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash)) changed, removed = self.find_changed(sources) # Find anything that has had its dependency list change for state in self.fine_grained_manager.graph.values(): if not state.is_fresh(): assert state.path is not None changed.append((state.id, state.path)) t3 = time.time() # Run an update messages = self.fine_grained_manager.update(changed, removed) t4 = time.time() self.fine_grained_manager.manager.add_stats( update_sources_time=t1 - t0, build_time=t2 - t1, find_changes_time=t3 - t2, fg_update_time=t4 - t3, files_changed=len(removed) + len(changed)) else: # Stores the initial state of sources as a side effect. self.fswatcher.find_changed() if MEM_PROFILE: from mypy.memprofile import print_memory_profile print_memory_profile(run_gc=False) status = 1 if messages else 0 messages = self.pretty_messages(messages, len(sources), is_tty, terminal_width) return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status} def fine_grained_increment(self, sources: List[BuildSource], is_tty: bool, terminal_width: int, remove: Optional[List[str]] = None, update: Optional[List[str]] = None, ) -> Dict[str, Any]: assert self.fine_grained_manager is not None manager = self.fine_grained_manager.manager t0 = time.time() if remove is None and update is None: # Use the fswatcher to determine which files were changed # (updated or added) or removed. self.update_sources(sources) changed, removed = self.find_changed(sources) else: # Use the remove/update lists to update fswatcher. # This avoids calling stat() for unchanged files. changed, removed = self.update_changed(sources, remove or [], update or []) manager.search_paths = compute_search_paths(sources, manager.options, manager.data_dir) t1 = time.time() manager.log("fine-grained increment: find_changed: {:.3f}s".format(t1 - t0)) messages = self.fine_grained_manager.update(changed, removed) t2 = time.time() manager.log("fine-grained increment: update: {:.3f}s".format(t2 - t1)) manager.add_stats( find_changes_time=t1 - t0, fg_update_time=t2 - t1, files_changed=len(removed) + len(changed)) status = 1 if messages else 0 self.previous_sources = sources messages = self.pretty_messages(messages, len(sources), is_tty, terminal_width) return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status} def pretty_messages(self, messages: List[str], n_sources: int, is_tty: bool = False, terminal_width: Optional[int] = None) -> List[str]: use_color = self.options.color_output and is_tty fit_width = self.options.pretty and is_tty if fit_width: messages = self.formatter.fit_in_terminal(messages, fixed_terminal_width=terminal_width) if self.options.error_summary: summary = None # type: Optional[str] if messages: n_errors, n_files = count_stats(messages) if n_errors: summary = self.formatter.format_error(n_errors, n_files, n_sources, use_color) else: summary = self.formatter.format_success(n_sources, use_color) if summary: # Create new list to avoid appending multiple summaries on successive runs. messages = messages + [summary] if use_color: messages = [self.formatter.colorize(m) for m in messages] return messages def update_sources(self, sources: List[BuildSource]) -> None: paths = [source.path for source in sources if source.path is not None] self.fswatcher.add_watched_paths(paths) def update_changed(self, sources: List[BuildSource], remove: List[str], update: List[str], ) -> ChangesAndRemovals: changed_paths = self.fswatcher.update_changed(remove, update) return self._find_changed(sources, changed_paths) def find_changed(self, sources: List[BuildSource]) -> ChangesAndRemovals: changed_paths = self.fswatcher.find_changed() return self._find_changed(sources, changed_paths) def _find_changed(self, sources: List[BuildSource], changed_paths: AbstractSet[str]) -> ChangesAndRemovals: # Find anything that has been added or modified changed = [(source.module, source.path) for source in sources if source.path and source.path in changed_paths] # Now find anything that has been removed from the build modules = {source.module for source in sources} omitted = [source for source in self.previous_sources if source.module not in modules] removed = [] for source in omitted: path = source.path assert path removed.append((source.module, path)) # Find anything that has had its module path change because of added or removed __init__s last = {s.path: s.module for s in self.previous_sources} for s in sources: assert s.path if s.path in last and last[s.path] != s.module: # Mark it as removed from its old name and changed at its new name removed.append((last[s.path], s.path)) changed.append((s.module, s.path)) return changed, removed def cmd_suggest(self, function: str, callsites: bool, **kwargs: Any) -> Dict[str, object]: """Suggest a signature for a function.""" if not self.fine_grained_manager: return { 'error': "Command 'suggest' is only valid after a 'check' command" " (that produces no parse errors)"} engine = SuggestionEngine(self.fine_grained_manager, **kwargs) try: if callsites: out = engine.suggest_callsites(function) else: out = engine.suggest(function) except SuggestionFailure as err: return {'error': str(err)} else: if not out: out = "No suggestions\n" elif not out.endswith("\n"): out += "\n" return {'out': out, 'err': "", 'status': 0} finally: self.fscache.flush() def cmd_hang(self) -> Dict[str, object]: """Hang for 100 seconds, as a debug hack.""" time.sleep(100) return {} # Misc utilities. MiB = 2**20 # type: Final def get_meminfo() -> Dict[str, Any]: res = {} # type: Dict[str, Any] try: import psutil # type: ignore # It's not in typeshed yet except ImportError: res['memory_psutil_missing'] = ( 'psutil not found, run pip install mypy[dmypy] ' 'to install the needed components for dmypy' ) else: process = psutil.Process() meminfo = process.memory_info() res['memory_rss_mib'] = meminfo.rss / MiB res['memory_vms_mib'] = meminfo.vms / MiB if sys.platform == 'win32': res['memory_maxrss_mib'] = meminfo.peak_wset / MiB else: # See https://stackoverflow.com/questions/938733/total-memory-used-by-python-process import resource # Since it doesn't exist on Windows. rusage = resource.getrusage(resource.RUSAGE_SELF) if sys.platform == 'darwin': factor = 1 else: factor = 1024 # Linux res['memory_maxrss_mib'] = rusage.ru_maxrss * factor / MiB return res mypy-0.761/mypy/dmypy_util.py0000644€tŠÔÚ€2›s®0000000151313576752246022467 0ustar jukkaDROPBOX\Domain Users00000000000000"""Shared code between dmypy.py and dmypy_server.py. This should be pretty lightweight and not depend on other mypy code (other than ipc). """ import json from typing import Any from typing_extensions import Final from mypy.ipc import IPCBase DEFAULT_STATUS_FILE = '.dmypy.json' # type: Final def receive(connection: IPCBase) -> Any: """Receive JSON data from a connection until EOF. Raise OSError if the data received is not valid JSON or if it is not a dict. """ bdata = connection.read() if not bdata: raise OSError("No data received") try: data = json.loads(bdata.decode('utf8')) except Exception: raise OSError("Data received is not valid JSON") if not isinstance(data, dict): raise OSError("Data received is not a dict (%s)" % str(type(data))) return data mypy-0.761/mypy/erasetype.py0000644€tŠÔÚ€2›s®0000001271013576752246022272 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Container, Callable from mypy.types import ( Type, TypeVisitor, UnboundType, AnyType, NoneType, TypeVarId, Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType, DeletedType, TypeTranslator, UninhabitedType, TypeType, TypeOfAny, LiteralType, ProperType, get_proper_type, TypeAliasType ) from mypy.nodes import ARG_STAR, ARG_STAR2 def erase_type(typ: Type) -> ProperType: """Erase any type variables from a type. Also replace tuple types with the corresponding concrete types. Examples: A -> A B[X] -> B[Any] Tuple[A, B] -> tuple Callable[[A1, A2, ...], R] -> Callable[..., Any] Type[X] -> Type[Any] """ typ = get_proper_type(typ) return typ.accept(EraseTypeVisitor()) class EraseTypeVisitor(TypeVisitor[ProperType]): def visit_unbound_type(self, t: UnboundType) -> ProperType: # TODO: replace with an assert after UnboundType can't leak from semantic analysis. return AnyType(TypeOfAny.from_error) def visit_any(self, t: AnyType) -> ProperType: return t def visit_none_type(self, t: NoneType) -> ProperType: return t def visit_uninhabited_type(self, t: UninhabitedType) -> ProperType: return t def visit_erased_type(self, t: ErasedType) -> ProperType: # Should not get here. raise RuntimeError() def visit_partial_type(self, t: PartialType) -> ProperType: # Should not get here. raise RuntimeError() def visit_deleted_type(self, t: DeletedType) -> ProperType: return t def visit_instance(self, t: Instance) -> ProperType: return Instance(t.type, [AnyType(TypeOfAny.special_form)] * len(t.args), t.line) def visit_type_var(self, t: TypeVarType) -> ProperType: return AnyType(TypeOfAny.special_form) def visit_callable_type(self, t: CallableType) -> ProperType: # We must preserve the fallback type for overload resolution to work. any_type = AnyType(TypeOfAny.special_form) return CallableType( arg_types=[any_type, any_type], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=[None, None], ret_type=any_type, fallback=t.fallback, is_ellipsis_args=True, implicit=True, ) def visit_overloaded(self, t: Overloaded) -> ProperType: return t.fallback.accept(self) def visit_tuple_type(self, t: TupleType) -> ProperType: return t.partial_fallback.accept(self) def visit_typeddict_type(self, t: TypedDictType) -> ProperType: return t.fallback.accept(self) def visit_literal_type(self, t: LiteralType) -> ProperType: # The fallback for literal types should always be either # something like int or str, or an enum class -- types that # don't contain any TypeVars. So there's no need to visit it. return t def visit_union_type(self, t: UnionType) -> ProperType: erased_items = [erase_type(item) for item in t.items] from mypy.typeops import make_simplified_union # asdf return make_simplified_union(erased_items) def visit_type_type(self, t: TypeType) -> ProperType: return TypeType.make_normalized(t.item.accept(self), line=t.line) def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: raise RuntimeError("Type aliases should be expanded before accepting this visitor") def erase_typevars(t: Type, ids_to_erase: Optional[Container[TypeVarId]] = None) -> Type: """Replace all type variables in a type with any, or just the ones in the provided collection. """ def erase_id(id: TypeVarId) -> bool: if ids_to_erase is None: return True return id in ids_to_erase return t.accept(TypeVarEraser(erase_id, AnyType(TypeOfAny.special_form))) def replace_meta_vars(t: Type, target_type: Type) -> Type: """Replace unification variables in a type with the target type.""" return t.accept(TypeVarEraser(lambda id: id.is_meta_var(), target_type)) class TypeVarEraser(TypeTranslator): """Implementation of type erasure""" def __init__(self, erase_id: Callable[[TypeVarId], bool], replacement: Type) -> None: self.erase_id = erase_id self.replacement = replacement def visit_type_var(self, t: TypeVarType) -> Type: if self.erase_id(t.id): return self.replacement return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: # Type alias target can't contain bound type variables, so # it is safe to just erase the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) def remove_instance_last_known_values(t: Type) -> Type: return t.accept(LastKnownValueEraser()) class LastKnownValueEraser(TypeTranslator): """Removes the Literal[...] type that may be associated with any Instance types.""" def visit_instance(self, t: Instance) -> Type: if not t.last_known_value and not t.args: return t new_t = t.copy_modified( args=[a.accept(self) for a in t.args], last_known_value=None, ) new_t.can_be_true = t.can_be_true new_t.can_be_false = t.can_be_false return new_t def visit_type_alias_type(self, t: TypeAliasType) -> Type: # Type aliases can't contain literal values, because they are # always constructed as explicit types. return t mypy-0.761/mypy/errorcodes.py0000644€tŠÔÚ€2›s®0000001165713576752246022451 0ustar jukkaDROPBOX\Domain Users00000000000000"""Classification of possible errors mypy can detect. These can be used for filtering specific errors. """ from typing import List from typing_extensions import Final # All created error codes are implicitly stored in this list. all_error_codes = [] # type: List[ErrorCode] class ErrorCode: def __init__(self, code: str, description: str, category: str) -> None: self.code = code self.description = description self.category = category def __str__(self) -> str: return ''.format(self.code) ATTR_DEFINED = ErrorCode( 'attr-defined', "Check that attribute exists", 'General') # type: Final NAME_DEFINED = ErrorCode( 'name-defined', "Check that name is defined", 'General') # type: Final CALL_ARG = ErrorCode( 'call-arg', "Check number, names and kinds of arguments in calls", 'General') # type: Final ARG_TYPE = ErrorCode( 'arg-type', "Check argument types in calls", 'General') # type: Final CALL_OVERLOAD = ErrorCode( 'call-overload', "Check that an overload variant matches arguments", 'General') # type: Final VALID_TYPE = ErrorCode( 'valid-type', "Check that type (annotation) is valid", 'General') # type: Final VAR_ANNOTATED = ErrorCode( 'var-annotated', "Require variable annotation if type can't be inferred", 'General') # type: Final OVERRIDE = ErrorCode( 'override', "Check that method override is compatible with base class", 'General') # type: Final RETURN = ErrorCode( 'return', "Check that function always returns a value", 'General') # type: Final RETURN_VALUE = ErrorCode( 'return-value', "Check that return value is compatible with signature", 'General') # type: Final ASSIGNMENT = ErrorCode( 'assignment', "Check that assigned value is compatible with target", 'General') # type: Final TYPE_ARG = ErrorCode( 'type-arg', "Check that generic type arguments are present", 'General') # type: Final TYPE_VAR = ErrorCode( 'type-var', "Check that type variable values are valid", 'General') # type: Final UNION_ATTR = ErrorCode( 'union-attr', "Check that attribute exists in each item of a union", 'General') # type: Final INDEX = ErrorCode( 'index', "Check indexing operations", 'General') # type: Final OPERATOR = ErrorCode( 'operator', "Check that operator is valid for operands", 'General') # type: Final LIST_ITEM = ErrorCode( 'list-item', "Check list items in a list expression [item, ...]", 'General') # type: Final DICT_ITEM = ErrorCode( 'dict-item', "Check dict items in a dict expression {key: value, ...}", 'General') # type: Final TYPEDDICT_ITEM = ErrorCode( 'typeddict-item', "Check items when constructing TypedDict", 'General') # type: Final HAS_TYPE = ErrorCode( 'has-type', "Check that type of reference can be determined", 'General') # type: Final IMPORT = ErrorCode( 'import', "Require that imported module can be found or has stubs", 'General') # type: Final NO_REDEF = ErrorCode( 'no-redef', "Check that each name is defined once", 'General') # type: Final FUNC_RETURNS_VALUE = ErrorCode( 'func-returns-value', "Check that called function returns a value in value context", 'General') # type: Final ABSTRACT = ErrorCode( 'abstract', "Prevent instantiation of classes with abstract attributes", 'General') # type: Final VALID_NEWTYPE = ErrorCode( 'valid-newtype', "Check that argument 2 to NewType is valid", 'General') # type: Final STRING_FORMATTING = ErrorCode( 'str-format', "Check that string formatting/interpolation is type-safe", 'General') # type: Final STR_BYTES_PY3 = ErrorCode( 'str-bytes-safe', "Warn about dangerous coercions related to bytes and string types", 'General') # type: Final EXIT_RETURN = ErrorCode( 'exit-return', "Warn about too general return type for '__exit__'", 'General') # type: Final # These error codes aren't enabled by default. NO_UNTYPED_DEF = ErrorCode( 'no-untyped-def', "Check that every function has an annotation", 'General') # type: Final NO_UNTYPED_CALL = ErrorCode( 'no-untyped-call', "Disallow calling functions without type annotations from annotated functions", 'General') # type: Final REDUNDANT_CAST = ErrorCode( 'redundant-cast', "Check that cast changes type of expression", 'General') # type: Final COMPARISON_OVERLAP = ErrorCode( 'comparison-overlap', "Check that types in comparisons and 'in' expressions overlap", 'General') # type: Final NO_ANY_UNIMPORTED = ErrorCode( 'no-any-unimported', 'Reject "Any" types from unfollowed imports', 'General') # type: Final NO_ANY_RETURN = ErrorCode( 'no-any-return', 'Reject returning value with "Any" type if return type is not "Any"', 'General') # type: Final # Syntax errors are often blocking. SYNTAX = ErrorCode( 'syntax', "Report syntax errors", 'General') # type: Final # This is a catch-all for remaining uncategorized errors. MISC = ErrorCode( 'misc', "Miscenallenous other checks", 'General') # type: Final mypy-0.761/mypy/errors.py0000644€tŠÔÚ€2›s®0000007105713576752246021616 0ustar jukkaDROPBOX\Domain Users00000000000000import os.path import sys import traceback from collections import OrderedDict, defaultdict from typing import Tuple, List, TypeVar, Set, Dict, Optional, TextIO, Callable from typing_extensions import Final from mypy.scope import Scope from mypy.options import Options from mypy.version import __version__ as mypy_version from mypy.errorcodes import ErrorCode from mypy import errorcodes as codes from mypy.util import DEFAULT_SOURCE_OFFSET T = TypeVar('T') allowed_duplicates = ['@overload', 'Got:', 'Expected:'] # type: Final class ErrorInfo: """Representation of a single error message.""" # Description of a sequence of imports that refer to the source file # related to this error. Each item is a (path, line number) tuple. import_ctx = None # type: List[Tuple[str, int]] # The path to source file that was the source of this error. file = '' # The fully-qualified id of the source module for this error. module = None # type: Optional[str] # The name of the type in which this error is located at. type = '' # type: Optional[str] # Unqualified, may be None # The name of the function or member in which this error is located at. function_or_member = '' # type: Optional[str] # Unqualified, may be None # The line number related to this error within file. line = 0 # -1 if unknown # The column number related to this error with file. column = 0 # -1 if unknown # Either 'error' or 'note' severity = '' # The error message. message = '' # The error code. code = None # type: Optional[ErrorCode] # If True, we should halt build after the file that generated this error. blocker = False # Only report this particular messages once per program. only_once = False # Actual origin of the error message as tuple (path, line number, end line number) # If end line number is unknown, use line number. origin = None # type: Tuple[str, int, int] # Fine-grained incremental target where this was reported target = None # type: Optional[str] def __init__(self, import_ctx: List[Tuple[str, int]], file: str, module: Optional[str], typ: Optional[str], function_or_member: Optional[str], line: int, column: int, severity: str, message: str, code: Optional[ErrorCode], blocker: bool, only_once: bool, origin: Optional[Tuple[str, int, int]] = None, target: Optional[str] = None) -> None: self.import_ctx = import_ctx self.file = file self.module = module self.type = typ self.function_or_member = function_or_member self.line = line self.column = column self.severity = severity self.message = message self.code = code self.blocker = blocker self.only_once = only_once self.origin = origin or (file, line, line) self.target = target # Type used internally to represent errors: # (path, line, column, severity, message, code) ErrorTuple = Tuple[Optional[str], int, int, str, str, Optional[ErrorCode]] class Errors: """Container for compile errors. This class generates and keeps tracks of compile errors and the current error context (nested imports). """ # Map from files to generated error messages. Is an OrderedDict so # that it can be used to order messages based on the order the # files were processed. error_info_map = None # type: Dict[str, List[ErrorInfo]] # Files that we have reported the errors for flushed_files = None # type: Set[str] # Current error context: nested import context/stack, as a list of (path, line) pairs. import_ctx = None # type: List[Tuple[str, int]] # Path name prefix that is removed from all paths, if set. ignore_prefix = None # type: Optional[str] # Path to current file. file = '' # type: str # Ignore some errors on these lines of each file # (path -> line -> error-codes) ignored_lines = None # type: Dict[str, Dict[int, List[str]]] # Lines on which an error was actually ignored. used_ignored_lines = None # type: Dict[str, Set[int]] # Files where all errors should be ignored. ignored_files = None # type: Set[str] # Collection of reported only_once messages. only_once_messages = None # type: Set[str] # Set to True to show "In function "foo":" messages. show_error_context = False # type: bool # Set to True to show column numbers in error messages. show_column_numbers = False # type: bool # Set to True to show absolute file paths in error messages. show_absolute_path = False # type: bool # State for keeping track of the current fine-grained incremental mode target. # (See mypy.server.update for more about targets.) # Current module id. target_module = None # type: Optional[str] scope = None # type: Optional[Scope] def __init__(self, show_error_context: bool = False, show_column_numbers: bool = False, show_error_codes: bool = False, pretty: bool = False, read_source: Optional[Callable[[str], Optional[List[str]]]] = None, show_absolute_path: bool = False) -> None: self.show_error_context = show_error_context self.show_column_numbers = show_column_numbers self.show_error_codes = show_error_codes self.show_absolute_path = show_absolute_path self.pretty = pretty # We use fscache to read source code when showing snippets. self.read_source = read_source self.initialize() def initialize(self) -> None: self.error_info_map = OrderedDict() self.flushed_files = set() self.import_ctx = [] self.function_or_member = [None] self.ignored_lines = OrderedDict() self.used_ignored_lines = defaultdict(set) self.ignored_files = set() self.only_once_messages = set() self.scope = None self.target_module = None def reset(self) -> None: self.initialize() def copy(self) -> 'Errors': new = Errors(self.show_error_context, self.show_column_numbers, self.show_error_codes, self.pretty, self.read_source, self.show_absolute_path) new.file = self.file new.import_ctx = self.import_ctx[:] new.function_or_member = self.function_or_member[:] new.target_module = self.target_module new.scope = self.scope return new def total_errors(self) -> int: return sum(len(errs) for errs in self.error_info_map.values()) def set_ignore_prefix(self, prefix: str) -> None: """Set path prefix that will be removed from all paths.""" prefix = os.path.normpath(prefix) # Add separator to the end, if not given. if os.path.basename(prefix) != '': prefix += os.sep self.ignore_prefix = prefix def simplify_path(self, file: str) -> str: if self.show_absolute_path: return os.path.abspath(file) else: file = os.path.normpath(file) return remove_path_prefix(file, self.ignore_prefix) def set_file(self, file: str, module: Optional[str], scope: Optional[Scope] = None) -> None: """Set the path and module id of the current file.""" # The path will be simplified later, in render_messages. That way # * 'file' is always a key that uniquely identifies a source file # that mypy read (simplified paths might not be unique); and # * we only have to simplify in one place, while still supporting # reporting errors for files other than the one currently being # processed. self.file = file self.target_module = module self.scope = scope def set_file_ignored_lines(self, file: str, ignored_lines: Dict[int, List[str]], ignore_all: bool = False) -> None: self.ignored_lines[file] = ignored_lines if ignore_all: self.ignored_files.add(file) def current_target(self) -> Optional[str]: """Retrieves the current target from the associated scope. If there is no associated scope, use the target module.""" if self.scope is not None: return self.scope.current_target() return self.target_module def current_module(self) -> Optional[str]: return self.target_module def import_context(self) -> List[Tuple[str, int]]: """Return a copy of the import context.""" return self.import_ctx[:] def set_import_context(self, ctx: List[Tuple[str, int]]) -> None: """Replace the entire import context with a new value.""" self.import_ctx = ctx[:] def report(self, line: int, column: Optional[int], message: str, code: Optional[ErrorCode] = None, *, blocker: bool = False, severity: str = 'error', file: Optional[str] = None, only_once: bool = False, origin_line: Optional[int] = None, offset: int = 0, end_line: Optional[int] = None) -> None: """Report message at the given line using the current error context. Args: line: line number of error column: column number of error message: message to report code: error code (defaults to 'misc'; not shown for notes) blocker: if True, don't continue analysis after this error severity: 'error' or 'note' file: if non-None, override current file as context only_once: if True, only report this exact message once per build origin_line: if non-None, override current context as origin end_line: if non-None, override current context as end """ if self.scope: type = self.scope.current_type_name() if self.scope.ignored > 0: type = None # Omit type context if nested function function = self.scope.current_function_name() else: type = None function = None if column is None: column = -1 if file is None: file = self.file if offset: message = " " * offset + message if origin_line is None: origin_line = line if end_line is None: end_line = origin_line code = code or codes.MISC info = ErrorInfo(self.import_context(), file, self.current_module(), type, function, line, column, severity, message, code, blocker, only_once, origin=(self.file, origin_line, end_line), target=self.current_target()) self.add_error_info(info) def _add_error_info(self, file: str, info: ErrorInfo) -> None: assert file not in self.flushed_files if file not in self.error_info_map: self.error_info_map[file] = [] self.error_info_map[file].append(info) def add_error_info(self, info: ErrorInfo) -> None: file, line, end_line = info.origin if not info.blocker: # Blockers cannot be ignored if file in self.ignored_lines: # It's okay if end_line is *before* line. # Function definitions do this, for example, because the correct # error reporting line is at the *end* of the ignorable range # (for compatibility reasons). If so, just flip 'em! if end_line < line: line, end_line = end_line, line # Check each line in this context for "type: ignore" comments. # line == end_line for most nodes, so we only loop once. for scope_line in range(line, end_line + 1): if self.is_ignored_error(scope_line, info, self.ignored_lines[file]): # Annotation requests us to ignore all errors on this line. self.used_ignored_lines[file].add(scope_line) return if file in self.ignored_files: return if info.only_once: if info.message in self.only_once_messages: return self.only_once_messages.add(info.message) self._add_error_info(file, info) def is_ignored_error(self, line: int, info: ErrorInfo, ignores: Dict[int, List[str]]) -> bool: if line not in ignores: return False elif not ignores[line]: # Empty list means that we ignore all errors return True elif info.code: return info.code.code in ignores[line] return False def clear_errors_in_targets(self, path: str, targets: Set[str]) -> None: """Remove errors in specific fine-grained targets within a file.""" if path in self.error_info_map: new_errors = [] for info in self.error_info_map[path]: if info.target not in targets: new_errors.append(info) elif info.only_once: self.only_once_messages.remove(info.message) self.error_info_map[path] = new_errors def generate_unused_ignore_errors(self, file: str) -> None: ignored_lines = self.ignored_lines[file] if not self.is_typeshed_file(file) and file not in self.ignored_files: for line in set(ignored_lines) - self.used_ignored_lines[file]: # Don't use report since add_error_info will ignore the error! info = ErrorInfo(self.import_context(), file, self.current_module(), None, None, line, -1, 'error', "unused 'type: ignore' comment", None, False, False) self._add_error_info(file, info) def is_typeshed_file(self, file: str) -> bool: # gross, but no other clear way to tell return 'typeshed' in os.path.normpath(file).split(os.sep) def num_messages(self) -> int: """Return the number of generated messages.""" return sum(len(x) for x in self.error_info_map.values()) def is_errors(self) -> bool: """Are there any generated errors?""" return bool(self.error_info_map) def is_blockers(self) -> bool: """Are the any errors that are blockers?""" return any(err for errs in self.error_info_map.values() for err in errs if err.blocker) def blocker_module(self) -> Optional[str]: """Return the module with a blocking error, or None if not possible.""" for errs in self.error_info_map.values(): for err in errs: if err.blocker: return err.module return None def is_errors_for_file(self, file: str) -> bool: """Are there any errors for the given file?""" return file in self.error_info_map def raise_error(self, use_stdout: bool = True) -> None: """Raise a CompileError with the generated messages. Render the messages suitable for displaying. """ # self.new_messages() will format all messages that haven't already # been returned from a file_messages() call. raise CompileError(self.new_messages(), use_stdout=use_stdout, module_with_blocker=self.blocker_module()) def format_messages(self, error_info: List[ErrorInfo], source_lines: Optional[List[str]]) -> List[str]: """Return a string list that represents the error messages. Use a form suitable for displaying to the user. If self.pretty is True also append a relevant trimmed source code line (only for severity 'error'). """ a = [] # type: List[str] errors = self.render_messages(self.sort_messages(error_info)) errors = self.remove_duplicates(errors) for file, line, column, severity, message, code in errors: s = '' if file is not None: if self.show_column_numbers and line >= 0 and column >= 0: srcloc = '{}:{}:{}'.format(file, line, 1 + column) elif line >= 0: srcloc = '{}:{}'.format(file, line) else: srcloc = file s = '{}: {}: {}'.format(srcloc, severity, message) else: s = message if self.show_error_codes and code and severity != 'note': # If note has an error code, it is related to a previous error. Avoid # displaying duplicate error codes. s = '{} [{}]'.format(s, code.code) a.append(s) if self.pretty: # Add source code fragment and a location marker. if severity == 'error' and source_lines and line > 0: source_line = source_lines[line - 1] if column < 0: # Something went wrong, take first non-empty column. column = len(source_line) - len(source_line.lstrip()) # Note, currently coloring uses the offset to detect source snippets, # so these offsets should not be arbitrary. a.append(' ' * DEFAULT_SOURCE_OFFSET + source_line) a.append(' ' * (DEFAULT_SOURCE_OFFSET + column) + '^') return a def file_messages(self, path: str) -> List[str]: """Return a string list of new error messages from a given file. Use a form suitable for displaying to the user. """ if path not in self.error_info_map: return [] self.flushed_files.add(path) source_lines = None if self.pretty: assert self.read_source source_lines = self.read_source(path) return self.format_messages(self.error_info_map[path], source_lines) def new_messages(self) -> List[str]: """Return a string list of new error messages. Use a form suitable for displaying to the user. Errors from different files are ordered based on the order in which they first generated an error. """ msgs = [] for path in self.error_info_map.keys(): if path not in self.flushed_files: msgs.extend(self.file_messages(path)) return msgs def targets(self) -> Set[str]: """Return a set of all targets that contain errors.""" # TODO: Make sure that either target is always defined or that not being defined # is okay for fine-grained incremental checking. return set(info.target for errs in self.error_info_map.values() for info in errs if info.target) def render_messages(self, errors: List[ErrorInfo]) -> List[ErrorTuple]: """Translate the messages into a sequence of tuples. Each tuple is of form (path, line, col, severity, message, code). The rendered sequence includes information about error contexts. The path item may be None. If the line item is negative, the line number is not defined for the tuple. """ result = [] # type: List[ErrorTuple] prev_import_context = [] # type: List[Tuple[str, int]] prev_function_or_member = None # type: Optional[str] prev_type = None # type: Optional[str] for e in errors: # Report module import context, if different from previous message. if not self.show_error_context: pass elif e.import_ctx != prev_import_context: last = len(e.import_ctx) - 1 i = last while i >= 0: path, line = e.import_ctx[i] fmt = '{}:{}: note: In module imported here' if i < last: fmt = '{}:{}: note: ... from here' if i > 0: fmt += ',' else: fmt += ':' # Remove prefix to ignore from path (if present) to # simplify path. path = remove_path_prefix(path, self.ignore_prefix) result.append((None, -1, -1, 'note', fmt.format(path, line), None)) i -= 1 file = self.simplify_path(e.file) # Report context within a source file. if not self.show_error_context: pass elif (e.function_or_member != prev_function_or_member or e.type != prev_type): if e.function_or_member is None: if e.type is None: result.append((file, -1, -1, 'note', 'At top level:', None)) else: result.append((file, -1, -1, 'note', 'In class "{}":'.format( e.type), None)) else: if e.type is None: result.append((file, -1, -1, 'note', 'In function "{}":'.format( e.function_or_member), None)) else: result.append((file, -1, -1, 'note', 'In member "{}" of class "{}":'.format( e.function_or_member, e.type), None)) elif e.type != prev_type: if e.type is None: result.append((file, -1, -1, 'note', 'At top level:', None)) else: result.append((file, -1, -1, 'note', 'In class "{}":'.format(e.type), None)) result.append((file, e.line, e.column, e.severity, e.message, e.code)) prev_import_context = e.import_ctx prev_function_or_member = e.function_or_member prev_type = e.type return result def sort_messages(self, errors: List[ErrorInfo]) -> List[ErrorInfo]: """Sort an array of error messages locally by line number. I.e., sort a run of consecutive messages with the same context by line number, but otherwise retain the general ordering of the messages. """ result = [] # type: List[ErrorInfo] i = 0 while i < len(errors): i0 = i # Find neighbouring errors with the same context and file. while (i + 1 < len(errors) and errors[i + 1].import_ctx == errors[i].import_ctx and errors[i + 1].file == errors[i].file): i += 1 i += 1 # Sort the errors specific to a file according to line number and column. a = sorted(errors[i0:i], key=lambda x: (x.line, x.column)) result.extend(a) return result def remove_duplicates(self, errors: List[ErrorTuple]) -> List[ErrorTuple]: """Remove duplicates from a sorted error list.""" res = [] # type: List[ErrorTuple] i = 0 while i < len(errors): dup = False # Use slightly special formatting for member conflicts reporting. conflicts_notes = False j = i - 1 while j >= 0 and errors[j][0] == errors[i][0]: if errors[j][4].strip() == 'Got:': conflicts_notes = True j -= 1 j = i - 1 while (j >= 0 and errors[j][0] == errors[i][0] and errors[j][1] == errors[i][1]): if (errors[j][3] == errors[i][3] and # Allow duplicate notes in overload conflicts reporting. not ((errors[i][3] == 'note' and errors[i][4].strip() in allowed_duplicates) or (errors[i][4].strip().startswith('def ') and conflicts_notes)) and errors[j][4] == errors[i][4]): # ignore column dup = True break j -= 1 if not dup: res.append(errors[i]) i += 1 return res class CompileError(Exception): """Exception raised when there is a compile error. It can be a parse, semantic analysis, type check or other compilation-related error. CompileErrors raised from an errors object carry all of the messages that have not been reported out by error streaming. This is patched up by build.build to contain either all error messages (if errors were streamed) or none (if they were not). """ messages = None # type: List[str] use_stdout = False # Can be set in case there was a module with a blocking error module_with_blocker = None # type: Optional[str] def __init__(self, messages: List[str], use_stdout: bool = False, module_with_blocker: Optional[str] = None) -> None: super().__init__('\n'.join(messages)) self.messages = messages self.use_stdout = use_stdout self.module_with_blocker = module_with_blocker def remove_path_prefix(path: str, prefix: Optional[str]) -> str: """If path starts with prefix, return copy of path with the prefix removed. Otherwise, return path. If path is None, return None. """ if prefix is not None and path.startswith(prefix): return path[len(prefix):] else: return path def report_internal_error(err: Exception, file: Optional[str], line: int, errors: Errors, options: Options, stdout: Optional[TextIO] = None, stderr: Optional[TextIO] = None, ) -> None: """Report internal error and exit. This optionally starts pdb or shows a traceback. """ stdout = (stdout or sys.stdout) stderr = (stderr or sys.stderr) # Dump out errors so far, they often provide a clue. # But catch unexpected errors rendering them. try: for msg in errors.new_messages(): print(msg) except Exception as e: print("Failed to dump errors:", repr(e), file=stderr) # Compute file:line prefix for official-looking error messages. if file: if line: prefix = '{}:{}: '.format(file, line) else: prefix = '{}: '.format(file) else: prefix = '' # Print "INTERNAL ERROR" message. print('{}error: INTERNAL ERROR --'.format(prefix), 'Please try using mypy master on Github:\n' 'https://mypy.rtfd.io/en/latest/common_issues.html#using-a-development-mypy-build', file=stderr) if options.show_traceback: print('Please report a bug at https://github.com/python/mypy/issues', file=stderr) else: print('If this issue continues with mypy master, ' 'please report a bug at https://github.com/python/mypy/issues', file=stderr) print('version: {}'.format(mypy_version), file=stderr) # If requested, drop into pdb. This overrides show_tb. if options.pdb: print('Dropping into pdb', file=stderr) import pdb pdb.post_mortem(sys.exc_info()[2]) # If requested, print traceback, else print note explaining how to get one. if options.raise_exceptions: raise err if not options.show_traceback: if not options.pdb: print('{}: note: please use --show-traceback to print a traceback ' 'when reporting a bug'.format(prefix), file=stderr) else: tb = traceback.extract_stack()[:-2] tb2 = traceback.extract_tb(sys.exc_info()[2]) print('Traceback (most recent call last):') for s in traceback.format_list(tb + tb2): print(s.rstrip('\n')) print('{}: {}'.format(type(err).__name__, err), file=stdout) print('{}: note: use --pdb to drop into pdb'.format(prefix), file=stderr) # Exit. The caller has nothing more to say. # We use exit code 2 to signal that this is no ordinary error. raise SystemExit(2) mypy-0.761/mypy/expandtype.py0000644€tŠÔÚ€2›s®0000001265113576752246022456 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Iterable, List, TypeVar, Mapping, cast from mypy.types import ( Type, Instance, CallableType, TypeVisitor, UnboundType, AnyType, NoneType, TypeVarType, Overloaded, TupleType, TypedDictType, UnionType, ErasedType, PartialType, DeletedType, UninhabitedType, TypeType, TypeVarId, FunctionLike, TypeVarDef, LiteralType, get_proper_type, ProperType, TypeAliasType) def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: """Substitute any type variable references in a type given by a type environment. """ # TODO: use an overloaded signature? (ProperType stays proper after expansion.) return typ.accept(ExpandTypeVisitor(env)) def expand_type_by_instance(typ: Type, instance: Instance) -> Type: """Substitute type variables in type using values from an Instance. Type variables are considered to be bound by the class declaration.""" # TODO: use an overloaded signature? (ProperType stays proper after expansion.) if instance.args == []: return typ else: variables = {} # type: Dict[TypeVarId, Type] for binder, arg in zip(instance.type.defn.type_vars, instance.args): variables[binder.id] = arg return expand_type(typ, variables) F = TypeVar('F', bound=FunctionLike) def freshen_function_type_vars(callee: F) -> F: """Substitute fresh type variables for generic function type variables.""" if isinstance(callee, CallableType): if not callee.is_generic(): return cast(F, callee) tvdefs = [] tvmap = {} # type: Dict[TypeVarId, Type] for v in callee.variables: tvdef = TypeVarDef.new_unification_variable(v) tvdefs.append(tvdef) tvmap[v.id] = TypeVarType(tvdef) fresh = cast(CallableType, expand_type(callee, tvmap)).copy_modified(variables=tvdefs) return cast(F, fresh) else: assert isinstance(callee, Overloaded) fresh_overload = Overloaded([freshen_function_type_vars(item) for item in callee.items()]) return cast(F, fresh_overload) class ExpandTypeVisitor(TypeVisitor[Type]): """Visitor that substitutes type variables with values.""" variables = None # type: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value def __init__(self, variables: Mapping[TypeVarId, Type]) -> None: self.variables = variables def visit_unbound_type(self, t: UnboundType) -> Type: return t def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneType) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: # Should not get here. raise RuntimeError() def visit_instance(self, t: Instance) -> Type: args = self.expand_types(t.args) return Instance(t.type, args, t.line, t.column) def visit_type_var(self, t: TypeVarType) -> Type: repl = get_proper_type(self.variables.get(t.id, t)) if isinstance(repl, Instance): inst = repl # Return copy of instance with type erasure flag on. return Instance(inst.type, inst.args, line=inst.line, column=inst.column, erased=True) else: return repl def visit_callable_type(self, t: CallableType) -> Type: return t.copy_modified(arg_types=self.expand_types(t.arg_types), ret_type=t.ret_type.accept(self)) def visit_overloaded(self, t: Overloaded) -> Type: items = [] # type: List[CallableType] for item in t.items(): new_item = item.accept(self) assert isinstance(new_item, ProperType) assert isinstance(new_item, CallableType) items.append(new_item) return Overloaded(items) def visit_tuple_type(self, t: TupleType) -> Type: return t.copy_modified(items=self.expand_types(t.items)) def visit_typeddict_type(self, t: TypedDictType) -> Type: return t.copy_modified(item_types=self.expand_types(t.items.values())) def visit_literal_type(self, t: LiteralType) -> Type: # TODO: Verify this implementation is correct return t def visit_union_type(self, t: UnionType) -> Type: # After substituting for type variables in t.items, # some of the resulting types might be subtypes of others. from mypy.typeops import make_simplified_union # asdf return make_simplified_union(self.expand_types(t.items), t.line, t.column) def visit_partial_type(self, t: PartialType) -> Type: return t def visit_type_type(self, t: TypeType) -> Type: # TODO: Verify that the new item type is valid (instance or # union of instances or Any). Sadly we can't report errors # here yet. item = t.item.accept(self) return TypeType.make_normalized(item) def visit_type_alias_type(self, t: TypeAliasType) -> Type: # Target of the type alias cannot contain type variables, # so we just expand the arguments. return t.copy_modified(args=self.expand_types(t.args)) def expand_types(self, types: Iterable[Type]) -> List[Type]: a = [] # type: List[Type] for t in types: a.append(t.accept(self)) return a mypy-0.761/mypy/exprtotype.py0000644€tŠÔÚ€2›s®0000001405313576752246022516 0ustar jukkaDROPBOX\Domain Users00000000000000"""Translate an Expression to a Type value.""" from typing import Optional from mypy.nodes import ( Expression, NameExpr, MemberExpr, IndexExpr, TupleExpr, IntExpr, FloatExpr, UnaryExpr, ComplexExpr, ListExpr, StrExpr, BytesExpr, UnicodeExpr, EllipsisExpr, CallExpr, get_member_expr_fullname ) from mypy.fastparse import parse_type_string from mypy.types import ( Type, UnboundType, TypeList, EllipsisType, AnyType, CallableArgument, TypeOfAny, RawExpressionType, ProperType ) class TypeTranslationError(Exception): """Exception raised when an expression is not valid as a type.""" def _extract_argument_name(expr: Expression) -> Optional[str]: if isinstance(expr, NameExpr) and expr.name == 'None': return None elif isinstance(expr, StrExpr): return expr.value elif isinstance(expr, UnicodeExpr): return expr.value else: raise TypeTranslationError() def expr_to_unanalyzed_type(expr: Expression, _parent: Optional[Expression] = None) -> ProperType: """Translate an expression to the corresponding type. The result is not semantically analyzed. It can be UnboundType or TypeList. Raise TypeTranslationError if the expression cannot represent a type. """ # The `parent` parameter is used in recursive calls to provide context for # understanding whether an CallableArgument is ok. name = None # type: Optional[str] if isinstance(expr, NameExpr): name = expr.name if name == 'True': return RawExpressionType(True, 'builtins.bool', line=expr.line, column=expr.column) elif name == 'False': return RawExpressionType(False, 'builtins.bool', line=expr.line, column=expr.column) else: return UnboundType(name, line=expr.line, column=expr.column) elif isinstance(expr, MemberExpr): fullname = get_member_expr_fullname(expr) if fullname: return UnboundType(fullname, line=expr.line, column=expr.column) else: raise TypeTranslationError() elif isinstance(expr, IndexExpr): base = expr_to_unanalyzed_type(expr.base, expr) if isinstance(base, UnboundType): if base.args: raise TypeTranslationError() if isinstance(expr.index, TupleExpr): args = expr.index.items else: args = [expr.index] base.args = [expr_to_unanalyzed_type(arg, expr) for arg in args] if not base.args: base.empty_tuple_index = True return base else: raise TypeTranslationError() elif isinstance(expr, CallExpr) and isinstance(_parent, ListExpr): c = expr.callee names = [] # Go through the dotted member expr chain to get the full arg # constructor name to look up while True: if isinstance(c, NameExpr): names.append(c.name) break elif isinstance(c, MemberExpr): names.append(c.name) c = c.expr else: raise TypeTranslationError() arg_const = '.'.join(reversed(names)) # Go through the constructor args to get its name and type. name = None default_type = AnyType(TypeOfAny.unannotated) typ = default_type # type: Type for i, arg in enumerate(expr.args): if expr.arg_names[i] is not None: if expr.arg_names[i] == "name": if name is not None: # Two names raise TypeTranslationError() name = _extract_argument_name(arg) continue elif expr.arg_names[i] == "type": if typ is not default_type: # Two types raise TypeTranslationError() typ = expr_to_unanalyzed_type(arg, expr) continue else: raise TypeTranslationError() elif i == 0: typ = expr_to_unanalyzed_type(arg, expr) elif i == 1: name = _extract_argument_name(arg) else: raise TypeTranslationError() return CallableArgument(typ, name, arg_const, expr.line, expr.column) elif isinstance(expr, ListExpr): return TypeList([expr_to_unanalyzed_type(t, expr) for t in expr.items], line=expr.line, column=expr.column) elif isinstance(expr, StrExpr): return parse_type_string(expr.value, 'builtins.str', expr.line, expr.column, assume_str_is_unicode=expr.from_python_3) elif isinstance(expr, BytesExpr): return parse_type_string(expr.value, 'builtins.bytes', expr.line, expr.column, assume_str_is_unicode=False) elif isinstance(expr, UnicodeExpr): return parse_type_string(expr.value, 'builtins.unicode', expr.line, expr.column, assume_str_is_unicode=True) elif isinstance(expr, UnaryExpr): typ = expr_to_unanalyzed_type(expr.expr) if isinstance(typ, RawExpressionType): if isinstance(typ.literal_value, int) and expr.op == '-': typ.literal_value *= -1 return typ raise TypeTranslationError() elif isinstance(expr, IntExpr): return RawExpressionType(expr.value, 'builtins.int', line=expr.line, column=expr.column) elif isinstance(expr, FloatExpr): # Floats are not valid parameters for RawExpressionType , so we just # pass in 'None' for now. We'll report the appropriate error at a later stage. return RawExpressionType(None, 'builtins.float', line=expr.line, column=expr.column) elif isinstance(expr, ComplexExpr): # Same thing as above with complex numbers. return RawExpressionType(None, 'builtins.complex', line=expr.line, column=expr.column) elif isinstance(expr, EllipsisExpr): return EllipsisType(expr.line) else: raise TypeTranslationError() mypy-0.761/mypy/fastparse.py0000644€tŠÔÚ€2›s®0000017775013576752246022301 0ustar jukkaDROPBOX\Domain Users00000000000000import re import sys import warnings import typing # for typing.Type, which conflicts with types.Type from typing import ( Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, Dict, cast, List, overload ) from typing_extensions import Final, Literal, overload from mypy.sharedparse import ( special_function_elide_names, argument_elide_name, ) from mypy.nodes import ( MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef, OverloadPart, ClassDef, Decorator, Block, Var, OperatorAssignmentStmt, ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt, DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl, WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr, DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr, UnaryExpr, LambdaExpr, ComparisonExpr, AssignmentExpr, StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension, SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument, AwaitExpr, TempNode, Expression, Statement, ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR2, check_arg_names, FakeInfo, ) from mypy.types import ( Type, CallableType, AnyType, UnboundType, TupleType, TypeList, EllipsisType, CallableArgument, TypeOfAny, Instance, RawExpressionType, ProperType ) from mypy import defaults from mypy import message_registry, errorcodes as codes from mypy.errors import Errors from mypy.options import Options from mypy.reachability import mark_block_unreachable try: # pull this into a final variable to make mypyc be quiet about the # the default argument warning PY_MINOR_VERSION = sys.version_info[1] # type: Final # Check if we can use the stdlib ast module instead of typed_ast. if sys.version_info >= (3, 8): import ast as ast3 assert 'kind' in ast3.Constant._fields, \ "This 3.8.0 alpha (%s) is too old; 3.8.0a3 required" % sys.version.split()[0] from ast import ( AST, Call, FunctionType, Name, Attribute, Ellipsis as ast3_Ellipsis, Starred, NameConstant, Expression as ast3_Expression, Str, Bytes, Index, Num, UnaryOp, USub, ) def ast3_parse(source: Union[str, bytes], filename: str, mode: str, feature_version: int = PY_MINOR_VERSION) -> AST: return ast3.parse(source, filename, mode, type_comments=True, # This works the magic feature_version=feature_version) NamedExpr = ast3.NamedExpr Constant = ast3.Constant else: from typed_ast import ast3 from typed_ast.ast3 import ( AST, Call, FunctionType, Name, Attribute, Ellipsis as ast3_Ellipsis, Starred, NameConstant, Expression as ast3_Expression, Str, Bytes, Index, Num, UnaryOp, USub, ) def ast3_parse(source: Union[str, bytes], filename: str, mode: str, feature_version: int = PY_MINOR_VERSION) -> AST: return ast3.parse(source, filename, mode, feature_version=feature_version) # These don't exist before 3.8 NamedExpr = Any Constant = Any except ImportError: try: from typed_ast import ast35 # type: ignore[attr-defined] # noqa: F401 except ImportError: print('The typed_ast package is not installed.\n' 'You can install it with `python3 -m pip install typed-ast`.', file=sys.stderr) else: print('You need a more recent version of the typed_ast package.\n' 'You can update to the latest version with ' '`python3 -m pip install -U typed-ast`.', file=sys.stderr) sys.exit(1) N = TypeVar('N', bound=Node) # There is no way to create reasonable fallbacks at this stage, # they must be patched later. MISSING_FALLBACK = FakeInfo("fallback can't be filled out until semanal") # type: Final _dummy_fallback = Instance(MISSING_FALLBACK, [], -1) # type: Final TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment' # type: Final INVALID_TYPE_IGNORE = 'Invalid "type: ignore" comment' # type: Final TYPE_IGNORE_PATTERN = re.compile(r'[^#]*#\s*type:\s*ignore\s*(.*)') def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors] = None, options: Optional[Options] = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True if options is None: options = Options() errors.set_file(fnam, module) is_stub_file = fnam.endswith('.pyi') try: if is_stub_file: feature_version = defaults.PYTHON3_VERSION[1] else: assert options.python_version[0] >= 3 feature_version = options.python_version[1] # Disable deprecation warnings about \u with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) ast = ast3_parse(source, fnam, 'exec', feature_version=feature_version) tree = ASTConverter(options=options, is_stub=is_stub_file, errors=errors, ).visit(ast) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.report(e.lineno, e.offset, e.msg, blocker=True, code=codes.SYNTAX) tree = MypyFile([], [], False, {}) if raise_on_error and errors.is_errors(): errors.raise_error() return tree def parse_type_ignore_tag(tag: Optional[str]) -> Optional[List[str]]: """Parse optional "[code, ...]" tag after "# type: ignore". Return: * [] if no tag was found (ignore all errors) * list of ignored error codes if a tag was found * None if the tag was invalid. """ if not tag or tag.strip() == '' or tag.strip().startswith('#'): # No tag -- ignore all errors. return [] m = re.match(r'\s*\[([^]#]*)\]\s*(#.*)?$', tag) if m is None: # Invalid "# type: ignore" comment. return None return [code.strip() for code in m.group(1).split(',')] def parse_type_comment(type_comment: str, line: int, column: int, errors: Optional[Errors], assume_str_is_unicode: bool = True, ) -> Tuple[Optional[List[str]], Optional[ProperType]]: """Parse type portion of a type comment (+ optional type ignore). Return (ignore info, parsed type). """ try: typ = ast3_parse(type_comment, '', 'eval') except SyntaxError: if errors is not None: stripped_type = type_comment.split("#", 2)[0].strip() err_msg = "{} '{}'".format(TYPE_COMMENT_SYNTAX_ERROR, stripped_type) errors.report(line, column, err_msg, blocker=True, code=codes.SYNTAX) return None, None else: raise else: extra_ignore = TYPE_IGNORE_PATTERN.match(type_comment) if extra_ignore: # Typeshed has a non-optional return type for group! tag = cast(Any, extra_ignore).group(1) # type: Optional[str] ignored = parse_type_ignore_tag(tag) # type: Optional[List[str]] if ignored is None: if errors is not None: errors.report(line, column, INVALID_TYPE_IGNORE, code=codes.SYNTAX) else: raise SyntaxError else: ignored = None assert isinstance(typ, ast3_Expression) converted = TypeConverter(errors, line=line, override_column=column, assume_str_is_unicode=assume_str_is_unicode).visit(typ.body) return ignored, converted def parse_type_string(expr_string: str, expr_fallback_name: str, line: int, column: int, assume_str_is_unicode: bool = True) -> ProperType: """Parses a type that was originally present inside of an explicit string, byte string, or unicode string. For example, suppose we have the type `Foo["blah"]`. We should parse the string expression "blah" using this function. If `assume_str_is_unicode` is set to true, this function will assume that `Foo["blah"]` is equivalent to `Foo[u"blah"]`. Otherwise, it assumes it's equivalent to `Foo[b"blah"]`. The caller is responsible for keeping track of the context in which the type string was encountered (e.g. in Python 3 code, Python 2 code, Python 2 code with unicode_literals...) and setting `assume_str_is_unicode` accordingly. """ try: _, node = parse_type_comment(expr_string.strip(), line=line, column=column, errors=None, assume_str_is_unicode=assume_str_is_unicode) if isinstance(node, UnboundType) and node.original_str_expr is None: node.original_str_expr = expr_string node.original_str_fallback = expr_fallback_name return node else: return RawExpressionType(expr_string, expr_fallback_name, line, column) except (SyntaxError, ValueError): # Note: the parser will raise a `ValueError` instead of a SyntaxError if # the string happens to contain things like \x00. return RawExpressionType(expr_string, expr_fallback_name, line, column) def is_no_type_check_decorator(expr: ast3.expr) -> bool: if isinstance(expr, Name): return expr.id == 'no_type_check' elif isinstance(expr, Attribute): if isinstance(expr.value, Name): return expr.value.id == 'typing' and expr.attr == 'no_type_check' return False class ASTConverter: def __init__(self, options: Options, is_stub: bool, errors: Errors) -> None: # 'C' for class, 'F' for function self.class_and_function_stack = [] # type: List[Literal['C', 'F']] self.imports = [] # type: List[ImportBase] self.options = options self.is_stub = is_stub self.errors = errors self.type_ignores = {} # type: Dict[int, List[str]] # Cache of visit_X methods keyed by type of visited object self.visitor_cache = {} # type: Dict[type, Callable[[Optional[AST]], Any]] def note(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg, severity='note', code=codes.SYNTAX) def fail(self, msg: str, line: int, column: int, blocker: bool = True) -> None: if blocker or not self.options.ignore_errors: self.errors.report(line, column, msg, blocker=blocker, code=codes.SYNTAX) def visit(self, node: Optional[AST]) -> Any: if node is None: return None typeobj = type(node) visitor = self.visitor_cache.get(typeobj) if visitor is None: method = 'visit_' + node.__class__.__name__ visitor = getattr(self, method) self.visitor_cache[typeobj] = visitor return visitor(node) def set_line(self, node: N, n: Union[ast3.expr, ast3.stmt]) -> N: node.line = n.lineno node.column = n.col_offset node.end_line = getattr(n, "end_lineno", None) if isinstance(n, ast3.expr) else None return node def translate_opt_expr_list(self, l: Sequence[Optional[AST]]) -> List[Optional[Expression]]: res = [] # type: List[Optional[Expression]] for e in l: exp = self.visit(e) res.append(exp) return res def translate_expr_list(self, l: Sequence[AST]) -> List[Expression]: return cast(List[Expression], self.translate_opt_expr_list(l)) def get_lineno(self, node: Union[ast3.expr, ast3.stmt]) -> int: if (isinstance(node, (ast3.AsyncFunctionDef, ast3.ClassDef, ast3.FunctionDef)) and node.decorator_list): return node.decorator_list[0].lineno return node.lineno def translate_stmt_list(self, stmts: Sequence[ast3.stmt], ismodule: bool = False) -> List[Statement]: # A "# type: ignore" comment before the first statement of a module # ignores the whole module: if (ismodule and stmts and self.type_ignores and min(self.type_ignores) < self.get_lineno(stmts[0])): self.errors.used_ignored_lines[self.errors.file].add(min(self.type_ignores)) block = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) mark_block_unreachable(block) return [block] res = [] # type: List[Statement] for stmt in stmts: node = self.visit(stmt) res.append(node) return res def translate_type_comment(self, n: Union[ast3.stmt, ast3.arg], type_comment: Optional[str]) -> Optional[ProperType]: if type_comment is None: return None else: lineno = n.lineno extra_ignore, typ = parse_type_comment(type_comment, lineno, n.col_offset, self.errors) if extra_ignore is not None: self.type_ignores[lineno] = extra_ignore return typ op_map = { ast3.Add: '+', ast3.Sub: '-', ast3.Mult: '*', ast3.MatMult: '@', ast3.Div: '/', ast3.Mod: '%', ast3.Pow: '**', ast3.LShift: '<<', ast3.RShift: '>>', ast3.BitOr: '|', ast3.BitXor: '^', ast3.BitAnd: '&', ast3.FloorDiv: '//' } # type: Final[Dict[typing.Type[AST], str]] def from_operator(self, op: ast3.operator) -> str: op_name = ASTConverter.op_map.get(type(op)) if op_name is None: raise RuntimeError('Unknown operator ' + str(type(op))) else: return op_name comp_op_map = { ast3.Gt: '>', ast3.Lt: '<', ast3.Eq: '==', ast3.GtE: '>=', ast3.LtE: '<=', ast3.NotEq: '!=', ast3.Is: 'is', ast3.IsNot: 'is not', ast3.In: 'in', ast3.NotIn: 'not in' } # type: Final[Dict[typing.Type[AST], str]] def from_comp_operator(self, op: ast3.cmpop) -> str: op_name = ASTConverter.comp_op_map.get(type(op)) if op_name is None: raise RuntimeError('Unknown comparison operator ' + str(type(op))) else: return op_name def as_block(self, stmts: List[ast3.stmt], lineno: int) -> Optional[Block]: b = None if stmts: b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) b.set_line(lineno) return b def as_required_block(self, stmts: List[ast3.stmt], lineno: int) -> Block: assert stmts # must be non-empty b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) b.set_line(lineno) return b def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]: ret = [] # type: List[Statement] current_overload = [] # type: List[OverloadPart] current_overload_name = None # type: Optional[str] for stmt in stmts: if (current_overload_name is not None and isinstance(stmt, (Decorator, FuncDef)) and stmt.name == current_overload_name): current_overload.append(stmt) else: if len(current_overload) == 1: ret.append(current_overload[0]) elif len(current_overload) > 1: ret.append(OverloadedFuncDef(current_overload)) if isinstance(stmt, Decorator): current_overload = [stmt] current_overload_name = stmt.name else: current_overload = [] current_overload_name = None ret.append(stmt) if len(current_overload) == 1: ret.append(current_overload[0]) elif len(current_overload) > 1: ret.append(OverloadedFuncDef(current_overload)) return ret def in_method_scope(self) -> bool: return self.class_and_function_stack[-2:] == ['C', 'F'] def translate_module_id(self, id: str) -> str: """Return the actual, internal module id for a source text id. For example, translate '__builtin__' in Python 2 to 'builtins'. """ if id == self.options.custom_typing_module: return 'typing' elif id == '__builtin__' and self.options.python_version[0] == 2: # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation # is named __builtin__.py (there is another layer of translation elsewhere). return 'builtins' return id def visit_Module(self, mod: ast3.Module) -> MypyFile: self.type_ignores = {} for ti in mod.type_ignores: parsed = parse_type_ignore_tag(ti.tag) # type: ignore[attr-defined] if parsed is not None: self.type_ignores[ti.lineno] = parsed else: self.fail(INVALID_TYPE_IGNORE, ti.lineno, -1) body = self.fix_function_overloads(self.translate_stmt_list(mod.body, ismodule=True)) return MypyFile(body, self.imports, False, self.type_ignores, ) # --- stmt --- # FunctionDef(identifier name, arguments args, # stmt* body, expr* decorator_list, expr? returns, string? type_comment) # arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, # arg? kwarg, expr* defaults) def visit_FunctionDef(self, n: ast3.FunctionDef) -> Union[FuncDef, Decorator]: return self.do_func_def(n) # AsyncFunctionDef(identifier name, arguments args, # stmt* body, expr* decorator_list, expr? returns, string? type_comment) def visit_AsyncFunctionDef(self, n: ast3.AsyncFunctionDef) -> Union[FuncDef, Decorator]: return self.do_func_def(n, is_coroutine=True) def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef], is_coroutine: bool = False) -> Union[FuncDef, Decorator]: """Helper shared between visit_FunctionDef and visit_AsyncFunctionDef.""" self.class_and_function_stack.append('F') no_type_check = bool(n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list)) lineno = n.lineno args = self.transform_args(n.args, lineno, no_type_check=no_type_check) posonlyargs = [arg.arg for arg in getattr(n.args, "posonlyargs", [])] arg_kinds = [arg.kind for arg in args] arg_names = [arg.variable.name for arg in args] # type: List[Optional[str]] arg_names = [None if argument_elide_name(name) or name in posonlyargs else name for name in arg_names] if special_function_elide_names(n.name): arg_names = [None] * len(arg_names) arg_types = [] # type: List[Optional[Type]] if no_type_check: arg_types = [None] * len(args) return_type = None elif n.type_comment is not None: try: func_type_ast = ast3_parse(n.type_comment, '', 'func_type') assert isinstance(func_type_ast, FunctionType) # for ellipsis arg if (len(func_type_ast.argtypes) == 1 and isinstance(func_type_ast.argtypes[0], ast3_Ellipsis)): if n.returns: # PEP 484 disallows both type annotations and type comments self.fail(message_registry.DUPLICATE_TYPE_SIGNATURES, lineno, n.col_offset) arg_types = [a.type_annotation if a.type_annotation is not None else AnyType(TypeOfAny.unannotated) for a in args] else: # PEP 484 disallows both type annotations and type comments if n.returns or any(a.type_annotation is not None for a in args): self.fail(message_registry.DUPLICATE_TYPE_SIGNATURES, lineno, n.col_offset) translated_args = (TypeConverter(self.errors, line=lineno, override_column=n.col_offset) .translate_expr_list(func_type_ast.argtypes)) arg_types = [a if a is not None else AnyType(TypeOfAny.unannotated) for a in translated_args] return_type = TypeConverter(self.errors, line=lineno).visit(func_type_ast.returns) # add implicit self type if self.in_method_scope() and len(arg_types) < len(args): arg_types.insert(0, AnyType(TypeOfAny.special_form)) except SyntaxError: stripped_type = n.type_comment.split("#", 2)[0].strip() err_msg = "{} '{}'".format(TYPE_COMMENT_SYNTAX_ERROR, stripped_type) self.fail(err_msg, lineno, n.col_offset) if n.type_comment and n.type_comment[0] not in ["(", "#"]: self.note('Suggestion: wrap argument types in parentheses', lineno, n.col_offset) arg_types = [AnyType(TypeOfAny.from_error)] * len(args) return_type = AnyType(TypeOfAny.from_error) else: arg_types = [a.type_annotation for a in args] return_type = TypeConverter(self.errors, line=n.returns.lineno if n.returns else lineno).visit(n.returns) for arg, arg_type in zip(args, arg_types): self.set_type_optional(arg_type, arg.initializer) func_type = None if any(arg_types) or return_type: if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types): self.fail("Ellipses cannot accompany other argument types " "in function type signature", lineno, n.col_offset) elif len(arg_types) > len(arg_kinds): self.fail('Type signature has too many arguments', lineno, n.col_offset, blocker=False) elif len(arg_types) < len(arg_kinds): self.fail('Type signature has too few arguments', lineno, n.col_offset, blocker=False) else: func_type = CallableType([a if a is not None else AnyType(TypeOfAny.unannotated) for a in arg_types], arg_kinds, arg_names, return_type if return_type is not None else AnyType(TypeOfAny.unannotated), _dummy_fallback) func_def = FuncDef(n.name, args, self.as_required_block(n.body, lineno), func_type) if isinstance(func_def.type, CallableType): # semanal.py does some in-place modifications we want to avoid func_def.unanalyzed_type = func_def.type.copy_modified() if is_coroutine: func_def.is_coroutine = True if func_type is not None: func_type.definition = func_def func_type.line = lineno if n.decorator_list: if sys.version_info < (3, 8): # Before 3.8, [typed_]ast the line number points to the first decorator. # In 3.8, it points to the 'def' line, where we want it. lineno += len(n.decorator_list) end_lineno = None # type: Optional[int] else: # Set end_lineno to the old pre-3.8 lineno, in order to keep # existing "# type: ignore" comments working: end_lineno = n.decorator_list[0].lineno + len(n.decorator_list) var = Var(func_def.name) var.is_ready = False var.set_line(lineno) func_def.is_decorated = True func_def.set_line(lineno, n.col_offset, end_lineno) func_def.body.set_line(lineno) # TODO: Why? deco = Decorator(func_def, self.translate_expr_list(n.decorator_list), var) first = n.decorator_list[0] deco.set_line(first.lineno, first.col_offset) retval = deco # type: Union[FuncDef, Decorator] else: # FuncDef overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset) retval = func_def self.class_and_function_stack.pop() return retval def set_type_optional(self, type: Optional[Type], initializer: Optional[Expression]) -> None: if self.options.no_implicit_optional: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == 'None' if isinstance(type, UnboundType): type.optional = optional def transform_args(self, args: ast3.arguments, line: int, no_type_check: bool = False, ) -> List[Argument]: new_args = [] names = [] # type: List[ast3.arg] args_args = getattr(args, "posonlyargs", []) + args.args args_defaults = args.defaults num_no_defaults = len(args_args) - len(args_defaults) # positional arguments without defaults for a in args_args[:num_no_defaults]: new_args.append(self.make_argument(a, None, ARG_POS, no_type_check)) names.append(a) # positional arguments with defaults for a, d in zip(args_args[num_no_defaults:], args_defaults): new_args.append(self.make_argument(a, d, ARG_OPT, no_type_check)) names.append(a) # *arg if args.vararg is not None: new_args.append(self.make_argument(args.vararg, None, ARG_STAR, no_type_check)) names.append(args.vararg) # keyword-only arguments with defaults for a, d in zip(args.kwonlyargs, args.kw_defaults): new_args.append(self.make_argument( a, d, ARG_NAMED if d is None else ARG_NAMED_OPT, no_type_check)) names.append(a) # **kwarg if args.kwarg is not None: new_args.append(self.make_argument(args.kwarg, None, ARG_STAR2, no_type_check)) names.append(args.kwarg) check_arg_names([arg.variable.name for arg in new_args], names, self.fail_arg) return new_args def make_argument(self, arg: ast3.arg, default: Optional[ast3.expr], kind: int, no_type_check: bool) -> Argument: if no_type_check: arg_type = None else: annotation = arg.annotation type_comment = arg.type_comment if annotation is not None and type_comment is not None: self.fail(message_registry.DUPLICATE_TYPE_SIGNATURES, arg.lineno, arg.col_offset) arg_type = None if annotation is not None: arg_type = TypeConverter(self.errors, line=arg.lineno).visit(annotation) else: arg_type = self.translate_type_comment(arg, type_comment) return Argument(Var(arg.arg), arg_type, self.visit(default), kind) def fail_arg(self, msg: str, arg: ast3.arg) -> None: self.fail(msg, arg.lineno, arg.col_offset) # ClassDef(identifier name, # expr* bases, # keyword* keywords, # stmt* body, # expr* decorator_list) def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: self.class_and_function_stack.append('C') keywords = [(kw.arg, self.visit(kw.value)) for kw in n.keywords if kw.arg] cdef = ClassDef(n.name, self.as_required_block(n.body, n.lineno), None, self.translate_expr_list(n.bases), metaclass=dict(keywords).get('metaclass'), keywords=keywords) cdef.decorators = self.translate_expr_list(n.decorator_list) # Set end_lineno to the old mypy 0.700 lineno, in order to keep # existing "# type: ignore" comments working: if sys.version_info < (3, 8): cdef.line = n.lineno + len(n.decorator_list) cdef.end_line = n.lineno else: cdef.line = n.lineno cdef.end_line = n.decorator_list[0].lineno if n.decorator_list else None cdef.column = n.col_offset self.class_and_function_stack.pop() return cdef # Return(expr? value) def visit_Return(self, n: ast3.Return) -> ReturnStmt: node = ReturnStmt(self.visit(n.value)) return self.set_line(node, n) # Delete(expr* targets) def visit_Delete(self, n: ast3.Delete) -> DelStmt: if len(n.targets) > 1: tup = TupleExpr(self.translate_expr_list(n.targets)) tup.set_line(n.lineno) node = DelStmt(tup) else: node = DelStmt(self.visit(n.targets[0])) return self.set_line(node, n) # Assign(expr* targets, expr? value, string? type_comment, expr? annotation) def visit_Assign(self, n: ast3.Assign) -> AssignmentStmt: lvalues = self.translate_expr_list(n.targets) rvalue = self.visit(n.value) typ = self.translate_type_comment(n, n.type_comment) s = AssignmentStmt(lvalues, rvalue, type=typ, new_syntax=False) return self.set_line(s, n) # AnnAssign(expr target, expr annotation, expr? value, int simple) def visit_AnnAssign(self, n: ast3.AnnAssign) -> AssignmentStmt: line = n.lineno if n.value is None: # always allow 'x: int' rvalue = TempNode(AnyType(TypeOfAny.special_form), no_rhs=True) # type: Expression rvalue.line = line rvalue.column = n.col_offset else: rvalue = self.visit(n.value) typ = TypeConverter(self.errors, line=line).visit(n.annotation) assert typ is not None typ.column = n.annotation.col_offset s = AssignmentStmt([self.visit(n.target)], rvalue, type=typ, new_syntax=True) return self.set_line(s, n) # AugAssign(expr target, operator op, expr value) def visit_AugAssign(self, n: ast3.AugAssign) -> OperatorAssignmentStmt: s = OperatorAssignmentStmt(self.from_operator(n.op), self.visit(n.target), self.visit(n.value)) return self.set_line(s, n) # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) def visit_For(self, n: ast3.For) -> ForStmt: target_type = self.translate_type_comment(n, n.type_comment) node = ForStmt(self.visit(n.target), self.visit(n.iter), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno), target_type) return self.set_line(node, n) # AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) def visit_AsyncFor(self, n: ast3.AsyncFor) -> ForStmt: target_type = self.translate_type_comment(n, n.type_comment) node = ForStmt(self.visit(n.target), self.visit(n.iter), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno), target_type) node.is_async = True return self.set_line(node, n) # While(expr test, stmt* body, stmt* orelse) def visit_While(self, n: ast3.While) -> WhileStmt: node = WhileStmt(self.visit(n.test), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno)) return self.set_line(node, n) # If(expr test, stmt* body, stmt* orelse) def visit_If(self, n: ast3.If) -> IfStmt: lineno = n.lineno node = IfStmt([self.visit(n.test)], [self.as_required_block(n.body, lineno)], self.as_block(n.orelse, lineno)) return self.set_line(node, n) # With(withitem* items, stmt* body, string? type_comment) def visit_With(self, n: ast3.With) -> WithStmt: target_type = self.translate_type_comment(n, n.type_comment) node = WithStmt([self.visit(i.context_expr) for i in n.items], [self.visit(i.optional_vars) for i in n.items], self.as_required_block(n.body, n.lineno), target_type) return self.set_line(node, n) # AsyncWith(withitem* items, stmt* body, string? type_comment) def visit_AsyncWith(self, n: ast3.AsyncWith) -> WithStmt: target_type = self.translate_type_comment(n, n.type_comment) s = WithStmt([self.visit(i.context_expr) for i in n.items], [self.visit(i.optional_vars) for i in n.items], self.as_required_block(n.body, n.lineno), target_type) s.is_async = True return self.set_line(s, n) # Raise(expr? exc, expr? cause) def visit_Raise(self, n: ast3.Raise) -> RaiseStmt: node = RaiseStmt(self.visit(n.exc), self.visit(n.cause)) return self.set_line(node, n) # Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) def visit_Try(self, n: ast3.Try) -> TryStmt: vs = [NameExpr(h.name) if h.name is not None else None for h in n.handlers] types = [self.visit(h.type) for h in n.handlers] handlers = [self.as_required_block(h.body, h.lineno) for h in n.handlers] node = TryStmt(self.as_required_block(n.body, n.lineno), vs, types, handlers, self.as_block(n.orelse, n.lineno), self.as_block(n.finalbody, n.lineno)) return self.set_line(node, n) # Assert(expr test, expr? msg) def visit_Assert(self, n: ast3.Assert) -> AssertStmt: node = AssertStmt(self.visit(n.test), self.visit(n.msg)) return self.set_line(node, n) # Import(alias* names) def visit_Import(self, n: ast3.Import) -> Import: names = [] # type: List[Tuple[str, Optional[str]]] for alias in n.names: name = self.translate_module_id(alias.name) asname = alias.asname if asname is None and name != alias.name: # if the module name has been translated (and it's not already # an explicit import-as), make it an implicit import-as the # original name asname = alias.name names.append((name, asname)) i = Import(names) self.imports.append(i) return self.set_line(i, n) # ImportFrom(identifier? module, alias* names, int? level) def visit_ImportFrom(self, n: ast3.ImportFrom) -> ImportBase: assert n.level is not None if len(n.names) == 1 and n.names[0].name == '*': mod = n.module if n.module is not None else '' i = ImportAll(mod, n.level) # type: ImportBase else: i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '', n.level, [(a.name, a.asname) for a in n.names]) self.imports.append(i) return self.set_line(i, n) # Global(identifier* names) def visit_Global(self, n: ast3.Global) -> GlobalDecl: g = GlobalDecl(n.names) return self.set_line(g, n) # Nonlocal(identifier* names) def visit_Nonlocal(self, n: ast3.Nonlocal) -> NonlocalDecl: d = NonlocalDecl(n.names) return self.set_line(d, n) # Expr(expr value) def visit_Expr(self, n: ast3.Expr) -> ExpressionStmt: value = self.visit(n.value) node = ExpressionStmt(value) return self.set_line(node, n) # Pass def visit_Pass(self, n: ast3.Pass) -> PassStmt: s = PassStmt() return self.set_line(s, n) # Break def visit_Break(self, n: ast3.Break) -> BreakStmt: s = BreakStmt() return self.set_line(s, n) # Continue def visit_Continue(self, n: ast3.Continue) -> ContinueStmt: s = ContinueStmt() return self.set_line(s, n) # --- expr --- def visit_NamedExpr(self, n: NamedExpr) -> AssignmentExpr: s = AssignmentExpr(self.visit(n.target), self.visit(n.value)) return self.set_line(s, n) # BoolOp(boolop op, expr* values) def visit_BoolOp(self, n: ast3.BoolOp) -> OpExpr: # mypy translates (1 and 2 and 3) as (1 and (2 and 3)) assert len(n.values) >= 2 op_node = n.op if isinstance(op_node, ast3.And): op = 'and' elif isinstance(op_node, ast3.Or): op = 'or' else: raise RuntimeError('unknown BoolOp ' + str(type(n))) # potentially inefficient! return self.group(op, self.translate_expr_list(n.values), n) def group(self, op: str, vals: List[Expression], n: ast3.expr) -> OpExpr: if len(vals) == 2: e = OpExpr(op, vals[0], vals[1]) else: e = OpExpr(op, vals[0], self.group(op, vals[1:], n)) return self.set_line(e, n) # BinOp(expr left, operator op, expr right) def visit_BinOp(self, n: ast3.BinOp) -> OpExpr: op = self.from_operator(n.op) if op is None: raise RuntimeError('cannot translate BinOp ' + str(type(n.op))) e = OpExpr(op, self.visit(n.left), self.visit(n.right)) return self.set_line(e, n) # UnaryOp(unaryop op, expr operand) def visit_UnaryOp(self, n: ast3.UnaryOp) -> UnaryExpr: op = None if isinstance(n.op, ast3.Invert): op = '~' elif isinstance(n.op, ast3.Not): op = 'not' elif isinstance(n.op, ast3.UAdd): op = '+' elif isinstance(n.op, ast3.USub): op = '-' if op is None: raise RuntimeError('cannot translate UnaryOp ' + str(type(n.op))) e = UnaryExpr(op, self.visit(n.operand)) return self.set_line(e, n) # Lambda(arguments args, expr body) def visit_Lambda(self, n: ast3.Lambda) -> LambdaExpr: body = ast3.Return(n.body) body.lineno = n.body.lineno body.col_offset = n.body.col_offset e = LambdaExpr(self.transform_args(n.args, n.lineno), self.as_required_block([body], n.lineno)) e.set_line(n.lineno, n.col_offset) # Overrides set_line -- can't use self.set_line return e # IfExp(expr test, expr body, expr orelse) def visit_IfExp(self, n: ast3.IfExp) -> ConditionalExpr: e = ConditionalExpr(self.visit(n.test), self.visit(n.body), self.visit(n.orelse)) return self.set_line(e, n) # Dict(expr* keys, expr* values) def visit_Dict(self, n: ast3.Dict) -> DictExpr: e = DictExpr(list(zip(self.translate_opt_expr_list(n.keys), self.translate_expr_list(n.values)))) return self.set_line(e, n) # Set(expr* elts) def visit_Set(self, n: ast3.Set) -> SetExpr: e = SetExpr(self.translate_expr_list(n.elts)) return self.set_line(e, n) # ListComp(expr elt, comprehension* generators) def visit_ListComp(self, n: ast3.ListComp) -> ListComprehension: e = ListComprehension(self.visit_GeneratorExp(cast(ast3.GeneratorExp, n))) return self.set_line(e, n) # SetComp(expr elt, comprehension* generators) def visit_SetComp(self, n: ast3.SetComp) -> SetComprehension: e = SetComprehension(self.visit_GeneratorExp(cast(ast3.GeneratorExp, n))) return self.set_line(e, n) # DictComp(expr key, expr value, comprehension* generators) def visit_DictComp(self, n: ast3.DictComp) -> DictionaryComprehension: targets = [self.visit(c.target) for c in n.generators] iters = [self.visit(c.iter) for c in n.generators] ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] is_async = [bool(c.is_async) for c in n.generators] e = DictionaryComprehension(self.visit(n.key), self.visit(n.value), targets, iters, ifs_list, is_async) return self.set_line(e, n) # GeneratorExp(expr elt, comprehension* generators) def visit_GeneratorExp(self, n: ast3.GeneratorExp) -> GeneratorExpr: targets = [self.visit(c.target) for c in n.generators] iters = [self.visit(c.iter) for c in n.generators] ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] is_async = [bool(c.is_async) for c in n.generators] e = GeneratorExpr(self.visit(n.elt), targets, iters, ifs_list, is_async) return self.set_line(e, n) # Await(expr value) def visit_Await(self, n: ast3.Await) -> AwaitExpr: v = self.visit(n.value) e = AwaitExpr(v) return self.set_line(e, n) # Yield(expr? value) def visit_Yield(self, n: ast3.Yield) -> YieldExpr: e = YieldExpr(self.visit(n.value)) return self.set_line(e, n) # YieldFrom(expr value) def visit_YieldFrom(self, n: ast3.YieldFrom) -> YieldFromExpr: e = YieldFromExpr(self.visit(n.value)) return self.set_line(e, n) # Compare(expr left, cmpop* ops, expr* comparators) def visit_Compare(self, n: ast3.Compare) -> ComparisonExpr: operators = [self.from_comp_operator(o) for o in n.ops] operands = self.translate_expr_list([n.left] + n.comparators) e = ComparisonExpr(operators, operands) return self.set_line(e, n) # Call(expr func, expr* args, keyword* keywords) # keyword = (identifier? arg, expr value) def visit_Call(self, n: Call) -> CallExpr: args = n.args keywords = n.keywords keyword_names = [k.arg for k in keywords] arg_types = self.translate_expr_list( [a.value if isinstance(a, Starred) else a for a in args] + [k.value for k in keywords]) arg_kinds = ([ARG_STAR if type(a) is Starred else ARG_POS for a in args] + [ARG_STAR2 if arg is None else ARG_NAMED for arg in keyword_names]) e = CallExpr(self.visit(n.func), arg_types, arg_kinds, cast('List[Optional[str]]', [None] * len(args)) + keyword_names) return self.set_line(e, n) # Constant(object value) -- a constant, in Python 3.8. def visit_Constant(self, n: Constant) -> Any: val = n.value e = None # type: Any if val is None: e = NameExpr('None') elif isinstance(val, str): e = StrExpr(n.s) elif isinstance(val, bytes): e = BytesExpr(bytes_to_human_readable_repr(n.s)) elif isinstance(val, bool): # Must check before int! e = NameExpr(str(val)) elif isinstance(val, int): e = IntExpr(val) elif isinstance(val, float): e = FloatExpr(val) elif isinstance(val, complex): e = ComplexExpr(val) elif val is Ellipsis: e = EllipsisExpr() else: raise RuntimeError('Constant not implemented for ' + str(type(val))) return self.set_line(e, n) # Num(object n) -- a number as a PyObject. def visit_Num(self, n: ast3.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]: # The n field has the type complex, but complex isn't *really* # a parent of int and float, and this causes isinstance below # to think that the complex branch is always picked. Avoid # this by throwing away the type. val = n.n # type: object if isinstance(val, int): e = IntExpr(val) # type: Union[IntExpr, FloatExpr, ComplexExpr] elif isinstance(val, float): e = FloatExpr(val) elif isinstance(val, complex): e = ComplexExpr(val) else: raise RuntimeError('num not implemented for ' + str(type(val))) return self.set_line(e, n) # Str(string s) def visit_Str(self, n: Str) -> Union[UnicodeExpr, StrExpr]: # Hack: assume all string literals in Python 2 stubs are normal # strs (i.e. not unicode). All stubs are parsed with the Python 3 # parser, which causes unprefixed string literals to be interpreted # as unicode instead of bytes. This hack is generally okay, # because mypy considers str literals to be compatible with # unicode. e = StrExpr(n.s) return self.set_line(e, n) # JoinedStr(expr* values) def visit_JoinedStr(self, n: ast3.JoinedStr) -> Expression: # Each of n.values is a str or FormattedValue; we just concatenate # them all using ''.join. empty_string = StrExpr('') empty_string.set_line(n.lineno, n.col_offset) strs_to_join = ListExpr(self.translate_expr_list(n.values)) strs_to_join.set_line(empty_string) # Don't make unecessary join call if there is only one str to join if len(strs_to_join.items) == 1: return self.set_line(strs_to_join.items[0], n) join_method = MemberExpr(empty_string, 'join') join_method.set_line(empty_string) result_expression = CallExpr(join_method, [strs_to_join], [ARG_POS], [None]) return self.set_line(result_expression, n) # FormattedValue(expr value) def visit_FormattedValue(self, n: ast3.FormattedValue) -> Expression: # A FormattedValue is a component of a JoinedStr, or it can exist # on its own. We translate them to individual '{}'.format(value) # calls. Format specifier and conversion information is passed along # to allow mypyc to support f-strings with format specifiers and conversions. val_exp = self.visit(n.value) val_exp.set_line(n.lineno, n.col_offset) conv_str = '' if n.conversion is None or n.conversion < 0 else '!' + chr(n.conversion) format_string = StrExpr('{' + conv_str + ':{}}') format_spec_exp = self.visit(n.format_spec) if n.format_spec is not None else StrExpr('') format_string.set_line(n.lineno, n.col_offset) format_method = MemberExpr(format_string, 'format') format_method.set_line(format_string) result_expression = CallExpr(format_method, [val_exp, format_spec_exp], [ARG_POS, ARG_POS], [None, None]) return self.set_line(result_expression, n) # Bytes(bytes s) def visit_Bytes(self, n: ast3.Bytes) -> Union[BytesExpr, StrExpr]: e = BytesExpr(bytes_to_human_readable_repr(n.s)) return self.set_line(e, n) # NameConstant(singleton value) def visit_NameConstant(self, n: NameConstant) -> NameExpr: e = NameExpr(str(n.value)) return self.set_line(e, n) # Ellipsis def visit_Ellipsis(self, n: ast3_Ellipsis) -> EllipsisExpr: e = EllipsisExpr() return self.set_line(e, n) # Attribute(expr value, identifier attr, expr_context ctx) def visit_Attribute(self, n: Attribute) -> Union[MemberExpr, SuperExpr]: value = n.value member_expr = MemberExpr(self.visit(value), n.attr) obj = member_expr.expr if (isinstance(obj, CallExpr) and isinstance(obj.callee, NameExpr) and obj.callee.name == 'super'): e = SuperExpr(member_expr.name, obj) # type: Union[MemberExpr, SuperExpr] else: e = member_expr return self.set_line(e, n) # Subscript(expr value, slice slice, expr_context ctx) def visit_Subscript(self, n: ast3.Subscript) -> IndexExpr: e = IndexExpr(self.visit(n.value), self.visit(n.slice)) self.set_line(e, n) if isinstance(e.index, SliceExpr): # Slice has no line/column in the raw ast. e.index.line = e.line e.index.column = e.column return e # Starred(expr value, expr_context ctx) def visit_Starred(self, n: Starred) -> StarExpr: e = StarExpr(self.visit(n.value)) return self.set_line(e, n) # Name(identifier id, expr_context ctx) def visit_Name(self, n: Name) -> NameExpr: e = NameExpr(n.id) return self.set_line(e, n) # List(expr* elts, expr_context ctx) def visit_List(self, n: ast3.List) -> Union[ListExpr, TupleExpr]: expr_list = [self.visit(e) for e in n.elts] # type: List[Expression] if isinstance(n.ctx, ast3.Store): # [x, y] = z and (x, y) = z means exactly the same thing e = TupleExpr(expr_list) # type: Union[ListExpr, TupleExpr] else: e = ListExpr(expr_list) return self.set_line(e, n) # Tuple(expr* elts, expr_context ctx) def visit_Tuple(self, n: ast3.Tuple) -> TupleExpr: e = TupleExpr([self.visit(e) for e in n.elts]) return self.set_line(e, n) # --- slice --- # Slice(expr? lower, expr? upper, expr? step) def visit_Slice(self, n: ast3.Slice) -> SliceExpr: return SliceExpr(self.visit(n.lower), self.visit(n.upper), self.visit(n.step)) # ExtSlice(slice* dims) def visit_ExtSlice(self, n: ast3.ExtSlice) -> TupleExpr: return TupleExpr(self.translate_expr_list(n.dims)) # Index(expr value) def visit_Index(self, n: Index) -> Node: return self.visit(n.value) class TypeConverter: def __init__(self, errors: Optional[Errors], line: int = -1, override_column: int = -1, assume_str_is_unicode: bool = True, ) -> None: self.errors = errors self.line = line self.override_column = override_column self.node_stack = [] # type: List[AST] self.assume_str_is_unicode = assume_str_is_unicode def convert_column(self, column: int) -> int: """Apply column override if defined; otherwise return column. Column numbers are sometimes incorrect in the AST and the column override can be used to work around that. """ if self.override_column < 0: return column else: return self.override_column def invalid_type(self, node: AST, note: Optional[str] = None) -> RawExpressionType: """Constructs a type representing some expression that normally forms an invalid type. For example, if we see a type hint that says "3 + 4", we would transform that expression into a RawExpressionType. The semantic analysis layer will report an "Invalid type" error when it encounters this type, along with the given note if one is provided. See RawExpressionType's docstring for more details on how it's used. """ return RawExpressionType( None, 'typing.Any', line=self.line, column=getattr(node, 'col_offset', -1), note=note, ) @overload def visit(self, node: ast3.expr) -> ProperType: ... @overload def visit(self, node: Optional[AST]) -> Optional[ProperType]: ... def visit(self, node: Optional[AST]) -> Optional[ProperType]: """Modified visit -- keep track of the stack of nodes""" if node is None: return None self.node_stack.append(node) try: method = 'visit_' + node.__class__.__name__ visitor = getattr(self, method, None) if visitor is not None: return visitor(node) else: return self.invalid_type(node) finally: self.node_stack.pop() def parent(self) -> Optional[AST]: """Return the AST node above the one we are processing""" if len(self.node_stack) < 2: return None return self.node_stack[-2] def fail(self, msg: str, line: int, column: int) -> None: if self.errors: self.errors.report(line, column, msg, blocker=True, code=codes.SYNTAX) def note(self, msg: str, line: int, column: int) -> None: if self.errors: self.errors.report(line, column, msg, severity='note', code=codes.SYNTAX) def translate_expr_list(self, l: Sequence[ast3.expr]) -> List[Type]: return [self.visit(e) for e in l] def visit_raw_str(self, s: str) -> Type: # An escape hatch that allows the AST walker in fastparse2 to # directly hook into the Python 3.5 type converter in some cases # without needing to create an intermediary `Str` object. _, typ = parse_type_comment(s.strip(), self.line, -1, self.errors, self.assume_str_is_unicode) return typ or AnyType(TypeOfAny.from_error) def visit_Call(self, e: Call) -> Type: # Parse the arg constructor f = e.func constructor = stringify_name(f) if not isinstance(self.parent(), ast3.List): note = None if constructor: note = "Suggestion: use {0}[...] instead of {0}(...)".format(constructor) return self.invalid_type(e, note=note) if not constructor: self.fail("Expected arg constructor name", e.lineno, e.col_offset) name = None # type: Optional[str] default_type = AnyType(TypeOfAny.special_form) typ = default_type # type: Type for i, arg in enumerate(e.args): if i == 0: converted = self.visit(arg) assert converted is not None typ = converted elif i == 1: name = self._extract_argument_name(arg) else: self.fail("Too many arguments for argument constructor", f.lineno, f.col_offset) for k in e.keywords: value = k.value if k.arg == "name": if name is not None: self.fail('"{}" gets multiple values for keyword argument "name"'.format( constructor), f.lineno, f.col_offset) name = self._extract_argument_name(value) elif k.arg == "type": if typ is not default_type: self.fail('"{}" gets multiple values for keyword argument "type"'.format( constructor), f.lineno, f.col_offset) converted = self.visit(value) assert converted is not None typ = converted else: self.fail( 'Unexpected argument "{}" for argument constructor'.format(k.arg), value.lineno, value.col_offset) return CallableArgument(typ, name, constructor, e.lineno, e.col_offset) def translate_argument_list(self, l: Sequence[ast3.expr]) -> TypeList: return TypeList([self.visit(e) for e in l], line=self.line) def _extract_argument_name(self, n: ast3.expr) -> Optional[str]: if isinstance(n, Str): return n.s.strip() elif isinstance(n, NameConstant) and str(n.value) == 'None': return None self.fail('Expected string literal for argument name, got {}'.format( type(n).__name__), self.line, 0) return None def visit_Name(self, n: Name) -> Type: return UnboundType(n.id, line=self.line, column=self.convert_column(n.col_offset)) def visit_NameConstant(self, n: NameConstant) -> Type: if isinstance(n.value, bool): return RawExpressionType(n.value, 'builtins.bool', line=self.line) else: return UnboundType(str(n.value), line=self.line, column=n.col_offset) # Only for 3.8 and newer def visit_Constant(self, n: Constant) -> Type: val = n.value if val is None: # None is a type. return UnboundType('None', line=self.line) if isinstance(val, str): # Parse forward reference. if (n.kind and 'u' in n.kind) or self.assume_str_is_unicode: return parse_type_string(n.s, 'builtins.unicode', self.line, n.col_offset, assume_str_is_unicode=self.assume_str_is_unicode) else: return parse_type_string(n.s, 'builtins.str', self.line, n.col_offset, assume_str_is_unicode=self.assume_str_is_unicode) if val is Ellipsis: # '...' is valid in some types. return EllipsisType(line=self.line) if isinstance(val, bool): # Special case for True/False. return RawExpressionType(val, 'builtins.bool', line=self.line) if isinstance(val, (int, float, complex)): return self.numeric_type(val, n) if isinstance(val, bytes): contents = bytes_to_human_readable_repr(val) return RawExpressionType(contents, 'builtins.bytes', self.line, column=n.col_offset) # Everything else is invalid. return self.invalid_type(n) # UnaryOp(op, operand) def visit_UnaryOp(self, n: UnaryOp) -> Type: # We support specifically Literal[-4] and nothing else. # For example, Literal[+4] or Literal[~6] is not supported. typ = self.visit(n.operand) if isinstance(typ, RawExpressionType) and isinstance(n.op, USub): if isinstance(typ.literal_value, int): typ.literal_value *= -1 return typ return self.invalid_type(n) def numeric_type(self, value: object, n: AST) -> Type: # The node's field has the type complex, but complex isn't *really* # a parent of int and float, and this causes isinstance below # to think that the complex branch is always picked. Avoid # this by throwing away the type. if isinstance(value, int): numeric_value = value # type: Optional[int] type_name = 'builtins.int' else: # Other kinds of numbers (floats, complex) are not valid parameters for # RawExpressionType so we just pass in 'None' for now. We'll report the # appropriate error at a later stage. numeric_value = None type_name = 'builtins.{}'.format(type(value).__name__) return RawExpressionType( numeric_value, type_name, line=self.line, column=getattr(n, 'col_offset', -1), ) # These next three methods are only used if we are on python < # 3.8, using typed_ast. They are defined unconditionally because # mypyc can't handle conditional method definitions. # Num(number n) def visit_Num(self, n: Num) -> Type: return self.numeric_type(n.n, n) # Str(string s) def visit_Str(self, n: Str) -> Type: # Note: we transform these fallback types into the correct types in # 'typeanal.py' -- specifically in the named_type_with_normalized_str method. # If we're analyzing Python 3, that function will translate 'builtins.unicode' # into 'builtins.str'. In contrast, if we're analyzing Python 2 code, we'll # translate 'builtins.bytes' in the method below into 'builtins.str'. # Do a getattr because the field doesn't exist in 3.8 (where # this method doesn't actually ever run.) We can't just do # an attribute access with a `# type: ignore` because it would be # unused on < 3.8. kind = getattr(n, 'kind') # type: str # noqa if 'u' in kind or self.assume_str_is_unicode: return parse_type_string(n.s, 'builtins.unicode', self.line, n.col_offset, assume_str_is_unicode=self.assume_str_is_unicode) else: return parse_type_string(n.s, 'builtins.str', self.line, n.col_offset, assume_str_is_unicode=self.assume_str_is_unicode) # Bytes(bytes s) def visit_Bytes(self, n: Bytes) -> Type: contents = bytes_to_human_readable_repr(n.s) return RawExpressionType(contents, 'builtins.bytes', self.line, column=n.col_offset) # Subscript(expr value, slice slice, expr_context ctx) def visit_Subscript(self, n: ast3.Subscript) -> Type: if not isinstance(n.slice, Index): self.fail(TYPE_COMMENT_SYNTAX_ERROR, self.line, getattr(n, 'col_offset', -1)) return AnyType(TypeOfAny.from_error) empty_tuple_index = False if isinstance(n.slice.value, ast3.Tuple): params = self.translate_expr_list(n.slice.value.elts) if len(n.slice.value.elts) == 0: empty_tuple_index = True else: params = [self.visit(n.slice.value)] value = self.visit(n.value) if isinstance(value, UnboundType) and not value.args: return UnboundType(value.name, params, line=self.line, column=value.column, empty_tuple_index=empty_tuple_index) else: return self.invalid_type(n) def visit_Tuple(self, n: ast3.Tuple) -> Type: return TupleType(self.translate_expr_list(n.elts), _dummy_fallback, implicit=True, line=self.line, column=self.convert_column(n.col_offset)) # Attribute(expr value, identifier attr, expr_context ctx) def visit_Attribute(self, n: Attribute) -> Type: before_dot = self.visit(n.value) if isinstance(before_dot, UnboundType) and not before_dot.args: return UnboundType("{}.{}".format(before_dot.name, n.attr), line=self.line) else: return self.invalid_type(n) # Ellipsis def visit_Ellipsis(self, n: ast3_Ellipsis) -> Type: return EllipsisType(line=self.line) # List(expr* elts, expr_context ctx) def visit_List(self, n: ast3.List) -> Type: assert isinstance(n.ctx, ast3.Load) return self.translate_argument_list(n.elts) def stringify_name(n: AST) -> Optional[str]: if isinstance(n, Name): return n.id elif isinstance(n, Attribute): sv = stringify_name(n.value) if sv is not None: return "{}.{}".format(sv, n.attr) return None # Can't do it. def bytes_to_human_readable_repr(b: bytes) -> str: """Converts bytes into some human-readable representation. Unprintable bytes such as the nul byte are escaped. For example: >>> b = bytes([102, 111, 111, 10, 0]) >>> s = bytes_to_human_readable_repr(b) >>> print(s) foo\n\x00 >>> print(repr(s)) 'foo\\n\\x00' """ return repr(b)[2:-1] mypy-0.761/mypy/fastparse2.py0000644€tŠÔÚ€2›s®0000012621113576752246022345 0ustar jukkaDROPBOX\Domain Users00000000000000""" This file is nearly identical to `fastparse.py`, except that it works with a Python 2 AST instead of a Python 3 AST. Previously, how we handled Python 2 code was by first obtaining the Python 2 AST via typed_ast, converting it into a Python 3 AST by using typed_ast.conversion, then running it through mypy.fastparse. While this worked, it did add some overhead, especially in larger Python 2 codebases. This module allows us to skip the conversion step, saving us some time. The reason why this file is not easily merged with mypy.fastparse despite the large amount of redundancy is because the Python 2 AST and the Python 3 AST nodes belong to two completely different class hierarchies, which made it difficult to write a shared visitor between the two in a typesafe way. """ import sys import warnings import typing # for typing.Type, which conflicts with types.Type from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, Dict, cast, List from typing_extensions import Final, Literal from mypy.sharedparse import ( special_function_elide_names, argument_elide_name, ) from mypy.nodes import ( MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef, ClassDef, Decorator, Block, Var, OperatorAssignmentStmt, ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt, DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl, WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr, DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr, UnaryExpr, LambdaExpr, ComparisonExpr, DictionaryComprehension, SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument, Expression, Statement, BackquoteExpr, PrintStmt, ExecStmt, ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2, OverloadPart, check_arg_names, FakeInfo, ) from mypy.types import ( Type, CallableType, AnyType, UnboundType, EllipsisType, TypeOfAny, Instance, ProperType ) from mypy import message_registry, errorcodes as codes from mypy.errors import Errors from mypy.fastparse import ( TypeConverter, parse_type_comment, bytes_to_human_readable_repr, parse_type_ignore_tag, TYPE_IGNORE_PATTERN, INVALID_TYPE_IGNORE ) from mypy.options import Options from mypy.reachability import mark_block_unreachable try: from typed_ast import ast27 from typed_ast.ast27 import ( AST, Call, Name, Attribute, Tuple as ast27_Tuple, ) # Import ast3 from fastparse, which has special case for Python 3.8 from mypy.fastparse import ast3, ast3_parse except ImportError: try: from typed_ast import ast35 # type: ignore[attr-defined] # noqa: F401 except ImportError: print('The typed_ast package is not installed.\n' 'You can install it with `python3 -m pip install typed-ast`.', file=sys.stderr) else: print('You need a more recent version of the typed_ast package.\n' 'You can update to the latest version with ' '`python3 -m pip install -U typed-ast`.', file=sys.stderr) sys.exit(1) N = TypeVar('N', bound=Node) # There is no way to create reasonable fallbacks at this stage, # they must be patched later. MISSING_FALLBACK = FakeInfo("fallback can't be filled out until semanal") # type: Final _dummy_fallback = Instance(MISSING_FALLBACK, [], -1) # type: Final TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment' # type: Final TYPE_COMMENT_AST_ERROR = 'invalid type comment' # type: Final def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors] = None, options: Optional[Options] = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True if options is None: options = Options() errors.set_file(fnam, module) is_stub_file = fnam.endswith('.pyi') try: assert options.python_version[0] < 3 and not is_stub_file # Disable deprecation warnings about <>. with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) ast = ast27.parse(source, fnam, 'exec') tree = ASTConverter(options=options, errors=errors, ).visit(ast) assert isinstance(tree, MypyFile) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.report(e.lineno, e.offset, e.msg, blocker=True, code=codes.SYNTAX) tree = MypyFile([], [], False, {}) if raise_on_error and errors.is_errors(): errors.raise_error() return tree def is_no_type_check_decorator(expr: ast27.expr) -> bool: if isinstance(expr, Name): return expr.id == 'no_type_check' elif isinstance(expr, Attribute): if isinstance(expr.value, Name): return expr.value.id == 'typing' and expr.attr == 'no_type_check' return False class ASTConverter: def __init__(self, options: Options, errors: Errors) -> None: # 'C' for class, 'F' for function self.class_and_function_stack = [] # type: List[Literal['C', 'F']] self.imports = [] # type: List[ImportBase] self.options = options self.errors = errors # Indicates whether this file is being parsed with unicode_literals enabled. # Note: typed_ast already naturally takes unicode_literals into account when # parsing so we don't have to worry when analyzing strings within this class. # # The only place where we use this field is when we call fastparse's TypeConverter # and any related methods. That class accepts a Python 3 AST instead of a Python 2 # AST: as a result, it don't special-case the `unicode_literals` import and won't know # exactly whether to parse some string as bytes or unicode. # # This distinction is relevant mostly when handling Literal types -- Literal[u"foo"] # is not the same type as Literal[b"foo"], and Literal["foo"] could mean either the # former or the latter based on context. # # This field is set in the 'visit_ImportFrom' method: it's ok to delay computing it # because any `from __future__ import blah` import must be located at the top of the # file, with the exception of the docstring. This means we're guaranteed to correctly # set this field before we encounter any type hints. self.unicode_literals = False # Cache of visit_X methods keyed by type of visited object self.visitor_cache = {} # type: Dict[type, Callable[[Optional[AST]], Any]] self.type_ignores = {} # type: Dict[int, List[str]] def fail(self, msg: str, line: int, column: int, blocker: bool = True) -> None: if blocker or not self.options.ignore_errors: self.errors.report(line, column, msg, blocker=blocker, code=codes.SYNTAX) def visit(self, node: Optional[AST]) -> Any: # same as in typed_ast stub if node is None: return None typeobj = type(node) visitor = self.visitor_cache.get(typeobj) if visitor is None: method = 'visit_' + node.__class__.__name__ visitor = getattr(self, method) self.visitor_cache[typeobj] = visitor return visitor(node) def set_line(self, node: N, n: Union[ast27.expr, ast27.stmt]) -> N: node.line = n.lineno node.column = n.col_offset return node def translate_expr_list(self, l: Sequence[AST]) -> List[Expression]: res = [] # type: List[Expression] for e in l: exp = self.visit(e) assert isinstance(exp, Expression) res.append(exp) return res def get_lineno(self, node: Union[ast27.expr, ast27.stmt]) -> int: if isinstance(node, (ast27.ClassDef, ast27.FunctionDef)) and node.decorator_list: return node.decorator_list[0].lineno return node.lineno def translate_stmt_list(self, stmts: Sequence[ast27.stmt], module: bool = False) -> List[Statement]: # A "# type: ignore" comment before the first statement of a module # ignores the whole module: if (module and stmts and self.type_ignores and min(self.type_ignores) < self.get_lineno(stmts[0])): self.errors.used_ignored_lines[self.errors.file].add(min(self.type_ignores)) block = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) mark_block_unreachable(block) return [block] res = [] # type: List[Statement] for stmt in stmts: node = self.visit(stmt) assert isinstance(node, Statement) res.append(node) return res def translate_type_comment(self, n: ast27.stmt, type_comment: Optional[str]) -> Optional[ProperType]: if type_comment is None: return None else: lineno = n.lineno extra_ignore, typ = parse_type_comment(type_comment, lineno, n.col_offset, self.errors, assume_str_is_unicode=self.unicode_literals) if extra_ignore is not None: self.type_ignores[lineno] = extra_ignore return typ op_map = { ast27.Add: '+', ast27.Sub: '-', ast27.Mult: '*', ast27.Div: '/', ast27.Mod: '%', ast27.Pow: '**', ast27.LShift: '<<', ast27.RShift: '>>', ast27.BitOr: '|', ast27.BitXor: '^', ast27.BitAnd: '&', ast27.FloorDiv: '//' } # type: Final[Dict[typing.Type[AST], str]] def from_operator(self, op: ast27.operator) -> str: op_name = ASTConverter.op_map.get(type(op)) if op_name is None: raise RuntimeError('Unknown operator ' + str(type(op))) elif op_name == '@': raise RuntimeError('mypy does not support the MatMult operator') else: return op_name comp_op_map = { ast27.Gt: '>', ast27.Lt: '<', ast27.Eq: '==', ast27.GtE: '>=', ast27.LtE: '<=', ast27.NotEq: '!=', ast27.Is: 'is', ast27.IsNot: 'is not', ast27.In: 'in', ast27.NotIn: 'not in' } # type: Final[Dict[typing.Type[AST], str]] def from_comp_operator(self, op: ast27.cmpop) -> str: op_name = ASTConverter.comp_op_map.get(type(op)) if op_name is None: raise RuntimeError('Unknown comparison operator ' + str(type(op))) else: return op_name def as_block(self, stmts: List[ast27.stmt], lineno: int) -> Optional[Block]: b = None if stmts: b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) b.set_line(lineno) return b def as_required_block(self, stmts: List[ast27.stmt], lineno: int) -> Block: assert stmts # must be non-empty b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) b.set_line(lineno) return b def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]: ret = [] # type: List[Statement] current_overload = [] # type: List[OverloadPart] current_overload_name = None # type: Optional[str] for stmt in stmts: if (current_overload_name is not None and isinstance(stmt, (Decorator, FuncDef)) and stmt.name == current_overload_name): current_overload.append(stmt) else: if len(current_overload) == 1: ret.append(current_overload[0]) elif len(current_overload) > 1: ret.append(OverloadedFuncDef(current_overload)) if isinstance(stmt, Decorator): current_overload = [stmt] current_overload_name = stmt.name else: current_overload = [] current_overload_name = None ret.append(stmt) if len(current_overload) == 1: ret.append(current_overload[0]) elif len(current_overload) > 1: ret.append(OverloadedFuncDef(current_overload)) return ret def in_method_scope(self) -> bool: return self.class_and_function_stack[-2:] == ['C', 'F'] def translate_module_id(self, id: str) -> str: """Return the actual, internal module id for a source text id. For example, translate '__builtin__' in Python 2 to 'builtins'. """ if id == self.options.custom_typing_module: return 'typing' elif id == '__builtin__': # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation # is named __builtin__.py (there is another layer of translation elsewhere). return 'builtins' return id def visit_Module(self, mod: ast27.Module) -> MypyFile: self.type_ignores = {} for ti in mod.type_ignores: parsed = parse_type_ignore_tag(ti.tag) # type: ignore[attr-defined] if parsed is not None: self.type_ignores[ti.lineno] = parsed else: self.fail(INVALID_TYPE_IGNORE, ti.lineno, -1) body = self.fix_function_overloads(self.translate_stmt_list(mod.body, module=True)) return MypyFile(body, self.imports, False, self.type_ignores, ) # --- stmt --- # FunctionDef(identifier name, arguments args, # stmt* body, expr* decorator_list, expr? returns, string? type_comment) # arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, # arg? kwarg, expr* defaults) def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement: self.class_and_function_stack.append('F') lineno = n.lineno converter = TypeConverter(self.errors, line=lineno, override_column=n.col_offset, assume_str_is_unicode=self.unicode_literals) args, decompose_stmts = self.transform_args(n.args, lineno) arg_kinds = [arg.kind for arg in args] arg_names = [arg.variable.name for arg in args] # type: List[Optional[str]] arg_names = [None if argument_elide_name(name) else name for name in arg_names] if special_function_elide_names(n.name): arg_names = [None] * len(arg_names) arg_types = [] # type: List[Optional[Type]] type_comment = n.type_comment if (n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list)): arg_types = [None] * len(args) return_type = None elif type_comment is not None and len(type_comment) > 0: try: func_type_ast = ast3_parse(type_comment, '', 'func_type') assert isinstance(func_type_ast, ast3.FunctionType) # for ellipsis arg if (len(func_type_ast.argtypes) == 1 and isinstance(func_type_ast.argtypes[0], ast3.Ellipsis)): arg_types = [a.type_annotation if a.type_annotation is not None else AnyType(TypeOfAny.unannotated) for a in args] else: # PEP 484 disallows both type annotations and type comments if any(a.type_annotation is not None for a in args): self.fail(message_registry.DUPLICATE_TYPE_SIGNATURES, lineno, n.col_offset) arg_types = [a if a is not None else AnyType(TypeOfAny.unannotated) for a in converter.translate_expr_list(func_type_ast.argtypes)] return_type = converter.visit(func_type_ast.returns) # add implicit self type if self.in_method_scope() and len(arg_types) < len(args): arg_types.insert(0, AnyType(TypeOfAny.special_form)) except SyntaxError: stripped_type = type_comment.split("#", 2)[0].strip() err_msg = "{} '{}'".format(TYPE_COMMENT_SYNTAX_ERROR, stripped_type) self.fail(err_msg, lineno, n.col_offset) arg_types = [AnyType(TypeOfAny.from_error)] * len(args) return_type = AnyType(TypeOfAny.from_error) else: arg_types = [a.type_annotation for a in args] return_type = converter.visit(None) for arg, arg_type in zip(args, arg_types): self.set_type_optional(arg_type, arg.initializer) func_type = None if any(arg_types) or return_type: if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types): self.fail("Ellipses cannot accompany other argument types " "in function type signature", lineno, n.col_offset) elif len(arg_types) > len(arg_kinds): self.fail('Type signature has too many arguments', lineno, n.col_offset, blocker=False) elif len(arg_types) < len(arg_kinds): self.fail('Type signature has too few arguments', lineno, n.col_offset, blocker=False) else: any_type = AnyType(TypeOfAny.unannotated) func_type = CallableType([a if a is not None else any_type for a in arg_types], arg_kinds, arg_names, return_type if return_type is not None else any_type, _dummy_fallback) body = self.as_required_block(n.body, lineno) if decompose_stmts: body.body = decompose_stmts + body.body func_def = FuncDef(n.name, args, body, func_type) if isinstance(func_def.type, CallableType): # semanal.py does some in-place modifications we want to avoid func_def.unanalyzed_type = func_def.type.copy_modified() if func_type is not None: func_type.definition = func_def func_type.line = lineno if n.decorator_list: var = Var(func_def.name) var.is_ready = False var.set_line(n.decorator_list[0].lineno) func_def.is_decorated = True func_def.set_line(lineno + len(n.decorator_list)) func_def.body.set_line(func_def.get_line()) dec = Decorator(func_def, self.translate_expr_list(n.decorator_list), var) dec.set_line(lineno, n.col_offset) retval = dec # type: Statement else: # Overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset) retval = func_def self.class_and_function_stack.pop() return retval def set_type_optional(self, type: Optional[Type], initializer: Optional[Expression]) -> None: if self.options.no_implicit_optional: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == 'None' if isinstance(type, UnboundType): type.optional = optional def transform_args(self, n: ast27.arguments, line: int, ) -> Tuple[List[Argument], List[Statement]]: type_comments = n.type_comments # type: Sequence[Optional[str]] converter = TypeConverter(self.errors, line=line, assume_str_is_unicode=self.unicode_literals) decompose_stmts = [] # type: List[Statement] n_args = n.args args = [(self.convert_arg(i, arg, line, decompose_stmts), self.get_type(i, type_comments, converter)) for i, arg in enumerate(n_args)] defaults = self.translate_expr_list(n.defaults) names = [name for arg in n_args for name in self.extract_names(arg)] # type: List[str] new_args = [] # type: List[Argument] num_no_defaults = len(args) - len(defaults) # positional arguments without defaults for a, annotation in args[:num_no_defaults]: new_args.append(Argument(a, annotation, None, ARG_POS)) # positional arguments with defaults for (a, annotation), d in zip(args[num_no_defaults:], defaults): new_args.append(Argument(a, annotation, d, ARG_OPT)) # *arg if n.vararg is not None: new_args.append(Argument(Var(n.vararg), self.get_type(len(args), type_comments, converter), None, ARG_STAR)) names.append(n.vararg) # **kwarg if n.kwarg is not None: typ = self.get_type(len(args) + (0 if n.vararg is None else 1), type_comments, converter) new_args.append(Argument(Var(n.kwarg), typ, None, ARG_STAR2)) names.append(n.kwarg) # We don't have any context object to give, but we have closed around the line num def fail_arg(msg: str, arg: None) -> None: self.fail(msg, line, 0) check_arg_names(names, [None] * len(names), fail_arg) return new_args, decompose_stmts def extract_names(self, arg: ast27.expr) -> List[str]: if isinstance(arg, Name): return [arg.id] elif isinstance(arg, ast27_Tuple): return [name for elt in arg.elts for name in self.extract_names(elt)] else: return [] def convert_arg(self, index: int, arg: ast27.expr, line: int, decompose_stmts: List[Statement]) -> Var: if isinstance(arg, Name): v = arg.id elif isinstance(arg, ast27_Tuple): v = '__tuple_arg_{}'.format(index + 1) rvalue = NameExpr(v) rvalue.set_line(line) assignment = AssignmentStmt([self.visit(arg)], rvalue) assignment.set_line(line) decompose_stmts.append(assignment) else: raise RuntimeError("'{}' is not a valid argument.".format(ast27.dump(arg))) return Var(v) def get_type(self, i: int, type_comments: Sequence[Optional[str]], converter: TypeConverter) -> Optional[Type]: if i < len(type_comments): comment = type_comments[i] if comment is not None: typ = converter.visit_raw_str(comment) extra_ignore = TYPE_IGNORE_PATTERN.match(comment) if extra_ignore: tag = cast(Any, extra_ignore).group(1) # type: Optional[str] ignored = parse_type_ignore_tag(tag) if ignored is None: self.fail(INVALID_TYPE_IGNORE, converter.line, -1) else: self.type_ignores[converter.line] = ignored return typ return None def stringify_name(self, n: AST) -> str: if isinstance(n, Name): return n.id elif isinstance(n, Attribute): return "{}.{}".format(self.stringify_name(n.value), n.attr) else: assert False, "can't stringify " + str(type(n)) # ClassDef(identifier name, # expr* bases, # keyword* keywords, # stmt* body, # expr* decorator_list) def visit_ClassDef(self, n: ast27.ClassDef) -> ClassDef: self.class_and_function_stack.append('C') cdef = ClassDef(n.name, self.as_required_block(n.body, n.lineno), None, self.translate_expr_list(n.bases), metaclass=None) cdef.decorators = self.translate_expr_list(n.decorator_list) cdef.line = n.lineno + len(n.decorator_list) cdef.column = n.col_offset cdef.end_line = n.lineno self.class_and_function_stack.pop() return cdef # Return(expr? value) def visit_Return(self, n: ast27.Return) -> ReturnStmt: stmt = ReturnStmt(self.visit(n.value)) return self.set_line(stmt, n) # Delete(expr* targets) def visit_Delete(self, n: ast27.Delete) -> DelStmt: if len(n.targets) > 1: tup = TupleExpr(self.translate_expr_list(n.targets)) tup.set_line(n.lineno) stmt = DelStmt(tup) else: stmt = DelStmt(self.visit(n.targets[0])) return self.set_line(stmt, n) # Assign(expr* targets, expr value, string? type_comment) def visit_Assign(self, n: ast27.Assign) -> AssignmentStmt: typ = self.translate_type_comment(n, n.type_comment) stmt = AssignmentStmt(self.translate_expr_list(n.targets), self.visit(n.value), type=typ) return self.set_line(stmt, n) # AugAssign(expr target, operator op, expr value) def visit_AugAssign(self, n: ast27.AugAssign) -> OperatorAssignmentStmt: stmt = OperatorAssignmentStmt(self.from_operator(n.op), self.visit(n.target), self.visit(n.value)) return self.set_line(stmt, n) # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) def visit_For(self, n: ast27.For) -> ForStmt: typ = self.translate_type_comment(n, n.type_comment) stmt = ForStmt(self.visit(n.target), self.visit(n.iter), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno), typ) return self.set_line(stmt, n) # While(expr test, stmt* body, stmt* orelse) def visit_While(self, n: ast27.While) -> WhileStmt: stmt = WhileStmt(self.visit(n.test), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno)) return self.set_line(stmt, n) # If(expr test, stmt* body, stmt* orelse) def visit_If(self, n: ast27.If) -> IfStmt: stmt = IfStmt([self.visit(n.test)], [self.as_required_block(n.body, n.lineno)], self.as_block(n.orelse, n.lineno)) return self.set_line(stmt, n) # With(withitem* items, stmt* body, string? type_comment) def visit_With(self, n: ast27.With) -> WithStmt: typ = self.translate_type_comment(n, n.type_comment) stmt = WithStmt([self.visit(n.context_expr)], [self.visit(n.optional_vars)], self.as_required_block(n.body, n.lineno), typ) return self.set_line(stmt, n) def visit_Raise(self, n: ast27.Raise) -> RaiseStmt: if n.type is None: e = None else: if n.inst is None: e = self.visit(n.type) else: if n.tback is None: e = TupleExpr([self.visit(n.type), self.visit(n.inst)]) else: e = TupleExpr([self.visit(n.type), self.visit(n.inst), self.visit(n.tback)]) stmt = RaiseStmt(e, None) return self.set_line(stmt, n) # TryExcept(stmt* body, excepthandler* handlers, stmt* orelse) def visit_TryExcept(self, n: ast27.TryExcept) -> TryStmt: stmt = self.try_handler(n.body, n.handlers, n.orelse, [], n.lineno) return self.set_line(stmt, n) def visit_TryFinally(self, n: ast27.TryFinally) -> TryStmt: if len(n.body) == 1 and isinstance(n.body[0], ast27.TryExcept): stmt = self.try_handler([n.body[0]], [], [], n.finalbody, n.lineno) else: stmt = self.try_handler(n.body, [], [], n.finalbody, n.lineno) return self.set_line(stmt, n) def try_handler(self, body: List[ast27.stmt], handlers: List[ast27.ExceptHandler], orelse: List[ast27.stmt], finalbody: List[ast27.stmt], lineno: int) -> TryStmt: vs = [] # type: List[Optional[NameExpr]] for item in handlers: if item.name is None: vs.append(None) elif isinstance(item.name, Name): vs.append(NameExpr(item.name.id)) else: self.fail("Sorry, `except , ` is not supported", item.lineno, item.col_offset) vs.append(None) types = [self.visit(h.type) for h in handlers] handlers_ = [self.as_required_block(h.body, h.lineno) for h in handlers] return TryStmt(self.as_required_block(body, lineno), vs, types, handlers_, self.as_block(orelse, lineno), self.as_block(finalbody, lineno)) def visit_Print(self, n: ast27.Print) -> PrintStmt: stmt = PrintStmt(self.translate_expr_list(n.values), n.nl, self.visit(n.dest)) return self.set_line(stmt, n) def visit_Exec(self, n: ast27.Exec) -> ExecStmt: stmt = ExecStmt(self.visit(n.body), self.visit(n.globals), self.visit(n.locals)) return self.set_line(stmt, n) def visit_Repr(self, n: ast27.Repr) -> BackquoteExpr: stmt = BackquoteExpr(self.visit(n.value)) return self.set_line(stmt, n) # Assert(expr test, expr? msg) def visit_Assert(self, n: ast27.Assert) -> AssertStmt: stmt = AssertStmt(self.visit(n.test), self.visit(n.msg)) return self.set_line(stmt, n) # Import(alias* names) def visit_Import(self, n: ast27.Import) -> Import: names = [] # type: List[Tuple[str, Optional[str]]] for alias in n.names: name = self.translate_module_id(alias.name) asname = alias.asname if asname is None and name != alias.name: # if the module name has been translated (and it's not already # an explicit import-as), make it an implicit import-as the # original name asname = alias.name names.append((name, asname)) i = Import(names) self.imports.append(i) return self.set_line(i, n) # ImportFrom(identifier? module, alias* names, int? level) def visit_ImportFrom(self, n: ast27.ImportFrom) -> ImportBase: assert n.level is not None if len(n.names) == 1 and n.names[0].name == '*': mod = n.module if n.module is not None else '' i = ImportAll(mod, n.level) # type: ImportBase else: module_id = self.translate_module_id(n.module) if n.module is not None else '' i = ImportFrom(module_id, n.level, [(a.name, a.asname) for a in n.names]) # See comments in the constructor for more information about this field. if module_id == '__future__' and any(a.name == 'unicode_literals' for a in n.names): self.unicode_literals = True self.imports.append(i) return self.set_line(i, n) # Global(identifier* names) def visit_Global(self, n: ast27.Global) -> GlobalDecl: stmt = GlobalDecl(n.names) return self.set_line(stmt, n) # Expr(expr value) def visit_Expr(self, n: ast27.Expr) -> ExpressionStmt: value = self.visit(n.value) stmt = ExpressionStmt(value) return self.set_line(stmt, n) # Pass def visit_Pass(self, n: ast27.Pass) -> PassStmt: stmt = PassStmt() return self.set_line(stmt, n) # Break def visit_Break(self, n: ast27.Break) -> BreakStmt: stmt = BreakStmt() return self.set_line(stmt, n) # Continue def visit_Continue(self, n: ast27.Continue) -> ContinueStmt: stmt = ContinueStmt() return self.set_line(stmt, n) # --- expr --- # BoolOp(boolop op, expr* values) def visit_BoolOp(self, n: ast27.BoolOp) -> OpExpr: # mypy translates (1 and 2 and 3) as (1 and (2 and 3)) assert len(n.values) >= 2 if isinstance(n.op, ast27.And): op = 'and' elif isinstance(n.op, ast27.Or): op = 'or' else: raise RuntimeError('unknown BoolOp ' + str(type(n))) # potentially inefficient! e = self.group(self.translate_expr_list(n.values), op) return self.set_line(e, n) def group(self, vals: List[Expression], op: str) -> OpExpr: if len(vals) == 2: return OpExpr(op, vals[0], vals[1]) else: return OpExpr(op, vals[0], self.group(vals[1:], op)) # BinOp(expr left, operator op, expr right) def visit_BinOp(self, n: ast27.BinOp) -> OpExpr: op = self.from_operator(n.op) if op is None: raise RuntimeError('cannot translate BinOp ' + str(type(n.op))) e = OpExpr(op, self.visit(n.left), self.visit(n.right)) return self.set_line(e, n) # UnaryOp(unaryop op, expr operand) def visit_UnaryOp(self, n: ast27.UnaryOp) -> UnaryExpr: op = None if isinstance(n.op, ast27.Invert): op = '~' elif isinstance(n.op, ast27.Not): op = 'not' elif isinstance(n.op, ast27.UAdd): op = '+' elif isinstance(n.op, ast27.USub): op = '-' if op is None: raise RuntimeError('cannot translate UnaryOp ' + str(type(n.op))) e = UnaryExpr(op, self.visit(n.operand)) return self.set_line(e, n) # Lambda(arguments args, expr body) def visit_Lambda(self, n: ast27.Lambda) -> LambdaExpr: args, decompose_stmts = self.transform_args(n.args, n.lineno) n_body = ast27.Return(n.body) n_body.lineno = n.body.lineno n_body.col_offset = n.body.col_offset body = self.as_required_block([n_body], n.lineno) if decompose_stmts: body.body = decompose_stmts + body.body e = LambdaExpr(args, body) e.set_line(n.lineno, n.col_offset) # Overrides set_line -- can't use self.set_line return e # IfExp(expr test, expr body, expr orelse) def visit_IfExp(self, n: ast27.IfExp) -> ConditionalExpr: e = ConditionalExpr(self.visit(n.test), self.visit(n.body), self.visit(n.orelse)) return self.set_line(e, n) # Dict(expr* keys, expr* values) def visit_Dict(self, n: ast27.Dict) -> DictExpr: e = DictExpr(list(zip(self.translate_expr_list(n.keys), self.translate_expr_list(n.values)))) return self.set_line(e, n) # Set(expr* elts) def visit_Set(self, n: ast27.Set) -> SetExpr: e = SetExpr(self.translate_expr_list(n.elts)) return self.set_line(e, n) # ListComp(expr elt, comprehension* generators) def visit_ListComp(self, n: ast27.ListComp) -> ListComprehension: e = ListComprehension(self.visit_GeneratorExp(cast(ast27.GeneratorExp, n))) return self.set_line(e, n) # SetComp(expr elt, comprehension* generators) def visit_SetComp(self, n: ast27.SetComp) -> SetComprehension: e = SetComprehension(self.visit_GeneratorExp(cast(ast27.GeneratorExp, n))) return self.set_line(e, n) # DictComp(expr key, expr value, comprehension* generators) def visit_DictComp(self, n: ast27.DictComp) -> DictionaryComprehension: targets = [self.visit(c.target) for c in n.generators] iters = [self.visit(c.iter) for c in n.generators] ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] e = DictionaryComprehension(self.visit(n.key), self.visit(n.value), targets, iters, ifs_list, [False for _ in n.generators]) return self.set_line(e, n) # GeneratorExp(expr elt, comprehension* generators) def visit_GeneratorExp(self, n: ast27.GeneratorExp) -> GeneratorExpr: targets = [self.visit(c.target) for c in n.generators] iters = [self.visit(c.iter) for c in n.generators] ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] e = GeneratorExpr(self.visit(n.elt), targets, iters, ifs_list, [False for _ in n.generators]) return self.set_line(e, n) # Yield(expr? value) def visit_Yield(self, n: ast27.Yield) -> YieldExpr: e = YieldExpr(self.visit(n.value)) return self.set_line(e, n) # Compare(expr left, cmpop* ops, expr* comparators) def visit_Compare(self, n: ast27.Compare) -> ComparisonExpr: operators = [self.from_comp_operator(o) for o in n.ops] operands = self.translate_expr_list([n.left] + n.comparators) e = ComparisonExpr(operators, operands) return self.set_line(e, n) # Call(expr func, expr* args, keyword* keywords) # keyword = (identifier? arg, expr value) def visit_Call(self, n: Call) -> CallExpr: arg_types = [] # type: List[ast27.expr] arg_kinds = [] # type: List[int] signature = [] # type: List[Optional[str]] args = n.args arg_types.extend(args) arg_kinds.extend(ARG_POS for a in args) signature.extend(None for a in args) if n.starargs is not None: arg_types.append(n.starargs) arg_kinds.append(ARG_STAR) signature.append(None) keywords = n.keywords arg_types.extend(k.value for k in keywords) arg_kinds.extend(ARG_NAMED for k in keywords) signature.extend(k.arg for k in keywords) if n.kwargs is not None: arg_types.append(n.kwargs) arg_kinds.append(ARG_STAR2) signature.append(None) e = CallExpr(self.visit(n.func), self.translate_expr_list(arg_types), arg_kinds, signature) return self.set_line(e, n) # Num(object n) -- a number as a PyObject. def visit_Num(self, n: ast27.Num) -> Expression: # The n field has the type complex, but complex isn't *really* # a parent of int and float, and this causes isinstance below # to think that the complex branch is always picked. Avoid # this by throwing away the type. value = n.n # type: object is_inverse = False if str(n.n).startswith('-'): # Hackish because of complex. value = -n.n is_inverse = True if isinstance(value, int): expr = IntExpr(value) # type: Expression elif isinstance(value, float): expr = FloatExpr(value) elif isinstance(value, complex): expr = ComplexExpr(value) else: raise RuntimeError('num not implemented for ' + str(type(n.n))) if is_inverse: expr = UnaryExpr('-', expr) return self.set_line(expr, n) # Str(string s) def visit_Str(self, n: ast27.Str) -> Expression: # Note: typed_ast.ast27 will handled unicode_literals for us. If # n.s is of type 'bytes', we know unicode_literals was not enabled; # otherwise we know it was. # # Note that the following code is NOT run when parsing Python 2.7 stubs: # we always parse stub files (no matter what version) using the Python 3 # parser. This is also why string literals in Python 2.7 stubs are assumed # to be unicode. if isinstance(n.s, bytes): contents = bytes_to_human_readable_repr(n.s) e = StrExpr(contents, from_python_3=False) # type: Union[StrExpr, UnicodeExpr] return self.set_line(e, n) else: e = UnicodeExpr(n.s) return self.set_line(e, n) # Ellipsis def visit_Ellipsis(self, n: ast27.Ellipsis) -> EllipsisExpr: return EllipsisExpr() # Attribute(expr value, identifier attr, expr_context ctx) def visit_Attribute(self, n: Attribute) -> Expression: # First create MemberExpr and then potentially replace with a SuperExpr # to improve performance when compiled. The check for "super()" will be # faster with native AST nodes. Note also that super expressions are # less common than normal member expressions. member_expr = MemberExpr(self.visit(n.value), n.attr) obj = member_expr.expr if (isinstance(obj, CallExpr) and isinstance(obj.callee, NameExpr) and obj.callee.name == 'super'): e = SuperExpr(member_expr.name, obj) # type: Expression else: e = member_expr return self.set_line(e, n) # Subscript(expr value, slice slice, expr_context ctx) def visit_Subscript(self, n: ast27.Subscript) -> IndexExpr: e = IndexExpr(self.visit(n.value), self.visit(n.slice)) self.set_line(e, n) if isinstance(e.index, SliceExpr): # Slice has no line/column in the raw ast. e.index.line = e.line e.index.column = e.column return e # Name(identifier id, expr_context ctx) def visit_Name(self, n: Name) -> NameExpr: e = NameExpr(n.id) return self.set_line(e, n) # List(expr* elts, expr_context ctx) def visit_List(self, n: ast27.List) -> Union[ListExpr, TupleExpr]: expr_list = [self.visit(e) for e in n.elts] # type: List[Expression] if isinstance(n.ctx, ast27.Store): # [x, y] = z and (x, y) = z means exactly the same thing e = TupleExpr(expr_list) # type: Union[ListExpr, TupleExpr] else: e = ListExpr(expr_list) return self.set_line(e, n) # Tuple(expr* elts, expr_context ctx) def visit_Tuple(self, n: ast27_Tuple) -> TupleExpr: e = TupleExpr([self.visit(e) for e in n.elts]) return self.set_line(e, n) # --- slice --- # Slice(expr? lower, expr? upper, expr? step) def visit_Slice(self, n: ast27.Slice) -> SliceExpr: return SliceExpr(self.visit(n.lower), self.visit(n.upper), self.visit(n.step)) # ExtSlice(slice* dims) def visit_ExtSlice(self, n: ast27.ExtSlice) -> TupleExpr: return TupleExpr(self.translate_expr_list(n.dims)) # Index(expr value) def visit_Index(self, n: ast27.Index) -> Expression: return self.visit(n.value) mypy-0.761/mypy/find_sources.py0000644€tŠÔÚ€2›s®0000001413613576752246022760 0ustar jukkaDROPBOX\Domain Users00000000000000"""Routines for finding the sources that mypy will check""" import os.path from typing import List, Sequence, Set, Tuple, Optional, Dict from typing_extensions import Final from mypy.modulefinder import BuildSource, PYTHON_EXTENSIONS from mypy.fscache import FileSystemCache from mypy.options import Options PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS) # type: Final class InvalidSourceList(Exception): """Exception indicating a problem in the list of sources given to mypy.""" def create_source_list(files: Sequence[str], options: Options, fscache: Optional[FileSystemCache] = None, allow_empty_dir: bool = False) -> List[BuildSource]: """From a list of source files/directories, makes a list of BuildSources. Raises InvalidSourceList on errors. """ fscache = fscache or FileSystemCache() finder = SourceFinder(fscache) targets = [] for f in files: if f.endswith(PY_EXTENSIONS): # Can raise InvalidSourceList if a directory doesn't have a valid module name. name, base_dir = finder.crawl_up(os.path.normpath(f)) targets.append(BuildSource(f, name, None, base_dir)) elif fscache.isdir(f): sub_targets = finder.expand_dir(os.path.normpath(f)) if not sub_targets and not allow_empty_dir: raise InvalidSourceList("There are no .py[i] files in directory '{}'" .format(f)) targets.extend(sub_targets) else: mod = os.path.basename(f) if options.scripts_are_modules else None targets.append(BuildSource(f, mod, None)) return targets def keyfunc(name: str) -> Tuple[int, str]: """Determines sort order for directory listing. The desirable property is foo < foo.pyi < foo.py. """ base, suffix = os.path.splitext(name) for i, ext in enumerate(PY_EXTENSIONS): if suffix == ext: return (i, base) return (-1, name) class SourceFinder: def __init__(self, fscache: FileSystemCache) -> None: self.fscache = fscache # A cache for package names, mapping from directory path to module id and base dir self.package_cache = {} # type: Dict[str, Tuple[str, str]] def expand_dir(self, arg: str, mod_prefix: str = '') -> List[BuildSource]: """Convert a directory name to a list of sources to build.""" f = self.get_init_file(arg) if mod_prefix and not f: return [] seen = set() # type: Set[str] sources = [] top_mod, base_dir = self.crawl_up_dir(arg) if f and not mod_prefix: mod_prefix = top_mod + '.' if mod_prefix: sources.append(BuildSource(f, mod_prefix.rstrip('.'), None, base_dir)) names = self.fscache.listdir(arg) names.sort(key=keyfunc) for name in names: # Skip certain names altogether if (name == '__pycache__' or name == 'py.typed' or name.startswith('.') or name.endswith(('~', '.pyc', '.pyo'))): continue path = os.path.join(arg, name) if self.fscache.isdir(path): sub_sources = self.expand_dir(path, mod_prefix + name + '.') if sub_sources: seen.add(name) sources.extend(sub_sources) else: base, suffix = os.path.splitext(name) if base == '__init__': continue if base not in seen and '.' not in base and suffix in PY_EXTENSIONS: seen.add(base) src = BuildSource(path, mod_prefix + base, None, base_dir) sources.append(src) return sources def crawl_up(self, arg: str) -> Tuple[str, str]: """Given a .py[i] filename, return module and base directory We crawl up the path until we find a directory without __init__.py[i], or until we run out of path components. """ dir, mod = os.path.split(arg) mod = strip_py(mod) or mod base, base_dir = self.crawl_up_dir(dir) if mod == '__init__' or not mod: mod = base else: mod = module_join(base, mod) return mod, base_dir def crawl_up_dir(self, dir: str) -> Tuple[str, str]: """Given a directory name, return the corresponding module name and base directory Use package_cache to cache results. """ if dir in self.package_cache: return self.package_cache[dir] parent_dir, base = os.path.split(dir) if not dir or not self.get_init_file(dir) or not base: res = '' base_dir = dir or '.' else: # Ensure that base is a valid python module name if not base.isidentifier(): raise InvalidSourceList('{} is not a valid Python package name'.format(base)) parent, base_dir = self.crawl_up_dir(parent_dir) res = module_join(parent, base) self.package_cache[dir] = res, base_dir return res, base_dir def get_init_file(self, dir: str) -> Optional[str]: """Check whether a directory contains a file named __init__.py[i]. If so, return the file's name (with dir prefixed). If not, return None. This prefers .pyi over .py (because of the ordering of PY_EXTENSIONS). """ for ext in PY_EXTENSIONS: f = os.path.join(dir, '__init__' + ext) if self.fscache.isfile(f): return f if ext == '.py' and self.fscache.init_under_package_root(f): return f return None def module_join(parent: str, child: str) -> str: """Join module ids, accounting for a possibly empty parent.""" if parent: return parent + '.' + child else: return child def strip_py(arg: str) -> Optional[str]: """Strip a trailing .py or .pyi suffix. Return None if no such suffix is found. """ for ext in PY_EXTENSIONS: if arg.endswith(ext): return arg[:-len(ext)] return None mypy-0.761/mypy/fixup.py0000644€tŠÔÚ€2›s®0000003032013576752246021421 0ustar jukkaDROPBOX\Domain Users00000000000000"""Fix up various things after deserialization.""" from typing import Any, Dict, Optional from typing_extensions import Final from mypy.nodes import ( MypyFile, SymbolNode, SymbolTable, SymbolTableNode, TypeInfo, FuncDef, OverloadedFuncDef, Decorator, Var, TypeVarExpr, ClassDef, Block, TypeAlias, ) from mypy.types import ( CallableType, Instance, Overloaded, TupleType, TypedDictType, TypeVarType, UnboundType, UnionType, TypeVisitor, LiteralType, TypeType, NOT_READY, TypeAliasType, AnyType, TypeOfAny) from mypy.visitor import NodeVisitor from mypy.lookup import lookup_fully_qualified # N.B: we do a allow_missing fixup when fixing up a fine-grained # incremental cache load (since there may be cross-refs into deleted # modules) def fixup_module(tree: MypyFile, modules: Dict[str, MypyFile], allow_missing: bool) -> None: node_fixer = NodeFixer(modules, allow_missing) node_fixer.visit_symbol_table(tree.names) # TODO: Fix up .info when deserializing, i.e. much earlier. class NodeFixer(NodeVisitor[None]): current_info = None # type: Optional[TypeInfo] def __init__(self, modules: Dict[str, MypyFile], allow_missing: bool) -> None: self.modules = modules self.allow_missing = allow_missing self.type_fixer = TypeFixer(self.modules, allow_missing) # NOTE: This method isn't (yet) part of the NodeVisitor API. def visit_type_info(self, info: TypeInfo) -> None: save_info = self.current_info try: self.current_info = info if info.defn: info.defn.accept(self) if info.names: self.visit_symbol_table(info.names) if info.bases: for base in info.bases: base.accept(self.type_fixer) if info._promote: info._promote.accept(self.type_fixer) if info.tuple_type: info.tuple_type.accept(self.type_fixer) if info.typeddict_type: info.typeddict_type.accept(self.type_fixer) if info.declared_metaclass: info.declared_metaclass.accept(self.type_fixer) if info.metaclass_type: info.metaclass_type.accept(self.type_fixer) if info._mro_refs: info.mro = [lookup_qualified_typeinfo(self.modules, name, self.allow_missing) for name in info._mro_refs] info._mro_refs = None finally: self.current_info = save_info # NOTE: This method *definitely* isn't part of the NodeVisitor API. def visit_symbol_table(self, symtab: SymbolTable) -> None: # Copy the items because we may mutate symtab. for key, value in list(symtab.items()): cross_ref = value.cross_ref if cross_ref is not None: # Fix up cross-reference. value.cross_ref = None if cross_ref in self.modules: value.node = self.modules[cross_ref] else: stnode = lookup_qualified_stnode(self.modules, cross_ref, self.allow_missing) if stnode is not None: assert stnode.node is not None value.node = stnode.node elif not self.allow_missing: assert False, "Could not find cross-ref %s" % (cross_ref,) else: # We have a missing crossref in allow missing mode, need to put something value.node = missing_info(self.modules) else: if isinstance(value.node, TypeInfo): # TypeInfo has no accept(). TODO: Add it? self.visit_type_info(value.node) elif value.node is not None: value.node.accept(self) else: assert False, 'Unexpected empty node %r: %s' % (key, value) def visit_func_def(self, func: FuncDef) -> None: if self.current_info is not None: func.info = self.current_info if func.type is not None: func.type.accept(self.type_fixer) def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: if self.current_info is not None: o.info = self.current_info if o.type: o.type.accept(self.type_fixer) for item in o.items: item.accept(self) if o.impl: o.impl.accept(self) def visit_decorator(self, d: Decorator) -> None: if self.current_info is not None: d.var.info = self.current_info if d.func: d.func.accept(self) if d.var: d.var.accept(self) for node in d.decorators: node.accept(self) def visit_class_def(self, c: ClassDef) -> None: for v in c.type_vars: for value in v.values: value.accept(self.type_fixer) v.upper_bound.accept(self.type_fixer) def visit_type_var_expr(self, tv: TypeVarExpr) -> None: for value in tv.values: value.accept(self.type_fixer) tv.upper_bound.accept(self.type_fixer) def visit_var(self, v: Var) -> None: if self.current_info is not None: v.info = self.current_info if v.type is not None: v.type.accept(self.type_fixer) def visit_type_alias(self, a: TypeAlias) -> None: a.target.accept(self.type_fixer) class TypeFixer(TypeVisitor[None]): def __init__(self, modules: Dict[str, MypyFile], allow_missing: bool) -> None: self.modules = modules self.allow_missing = allow_missing def visit_instance(self, inst: Instance) -> None: # TODO: Combine Instances that are exactly the same? type_ref = inst.type_ref if type_ref is None: return # We've already been here. inst.type_ref = None inst.type = lookup_qualified_typeinfo(self.modules, type_ref, self.allow_missing) # TODO: Is this needed or redundant? # Also fix up the bases, just in case. for base in inst.type.bases: if base.type is NOT_READY: base.accept(self) for a in inst.args: a.accept(self) if inst.last_known_value is not None: inst.last_known_value.accept(self) def visit_type_alias_type(self, t: TypeAliasType) -> None: type_ref = t.type_ref if type_ref is None: return # We've already been here. t.type_ref = None t.alias = lookup_qualified_alias(self.modules, type_ref, self.allow_missing) for a in t.args: a.accept(self) def visit_any(self, o: Any) -> None: pass # Nothing to descend into. def visit_callable_type(self, ct: CallableType) -> None: if ct.fallback: ct.fallback.accept(self) for argt in ct.arg_types: # argt may be None, e.g. for __self in NamedTuple constructors. if argt is not None: argt.accept(self) if ct.ret_type is not None: ct.ret_type.accept(self) for v in ct.variables: if v.values: for val in v.values: val.accept(self) v.upper_bound.accept(self) for arg in ct.bound_args: if arg: arg.accept(self) def visit_overloaded(self, t: Overloaded) -> None: for ct in t.items(): ct.accept(self) def visit_erased_type(self, o: Any) -> None: # This type should exist only temporarily during type inference raise RuntimeError("Shouldn't get here", o) def visit_deleted_type(self, o: Any) -> None: pass # Nothing to descend into. def visit_none_type(self, o: Any) -> None: pass # Nothing to descend into. def visit_uninhabited_type(self, o: Any) -> None: pass # Nothing to descend into. def visit_partial_type(self, o: Any) -> None: raise RuntimeError("Shouldn't get here", o) def visit_tuple_type(self, tt: TupleType) -> None: if tt.items: for it in tt.items: it.accept(self) if tt.partial_fallback is not None: tt.partial_fallback.accept(self) def visit_typeddict_type(self, tdt: TypedDictType) -> None: if tdt.items: for it in tdt.items.values(): it.accept(self) if tdt.fallback is not None: if tdt.fallback.type_ref is not None: if lookup_qualified(self.modules, tdt.fallback.type_ref, self.allow_missing) is None: # We reject fake TypeInfos for TypedDict fallbacks because # the latter are used in type checking and must be valid. tdt.fallback.type_ref = 'typing._TypedDict' tdt.fallback.accept(self) def visit_literal_type(self, lt: LiteralType) -> None: lt.fallback.accept(self) def visit_type_var(self, tvt: TypeVarType) -> None: if tvt.values: for vt in tvt.values: vt.accept(self) if tvt.upper_bound is not None: tvt.upper_bound.accept(self) def visit_unbound_type(self, o: UnboundType) -> None: for a in o.args: a.accept(self) def visit_union_type(self, ut: UnionType) -> None: if ut.items: for it in ut.items: it.accept(self) def visit_void(self, o: Any) -> None: pass # Nothing to descend into. def visit_type_type(self, t: TypeType) -> None: t.item.accept(self) def lookup_qualified_typeinfo(modules: Dict[str, MypyFile], name: str, allow_missing: bool) -> TypeInfo: node = lookup_qualified(modules, name, allow_missing) if isinstance(node, TypeInfo): return node else: # Looks like a missing TypeInfo during an initial daemon load, put something there assert allow_missing, "Should never get here in normal mode," \ " got {}:{} instead of TypeInfo".format(type(node).__name__, node.fullname if node else '') return missing_info(modules) def lookup_qualified_alias(modules: Dict[str, MypyFile], name: str, allow_missing: bool) -> TypeAlias: node = lookup_qualified(modules, name, allow_missing) if isinstance(node, TypeAlias): return node else: # Looks like a missing TypeAlias during an initial daemon load, put something there assert allow_missing, "Should never get here in normal mode," \ " got {}:{} instead of TypeAlias".format(type(node).__name__, node.fullname if node else '') return missing_alias() def lookup_qualified(modules: Dict[str, MypyFile], name: str, allow_missing: bool) -> Optional[SymbolNode]: stnode = lookup_qualified_stnode(modules, name, allow_missing) if stnode is None: return None else: return stnode.node def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str, allow_missing: bool) -> Optional[SymbolTableNode]: return lookup_fully_qualified(name, modules, raise_on_missing=not allow_missing) _SUGGESTION = "" # type: Final def missing_info(modules: Dict[str, MypyFile]) -> TypeInfo: suggestion = _SUGGESTION.format('info') dummy_def = ClassDef(suggestion, Block([])) dummy_def.fullname = suggestion info = TypeInfo(SymbolTable(), dummy_def, "") obj_type = lookup_qualified(modules, 'builtins.object', False) assert isinstance(obj_type, TypeInfo) info.bases = [Instance(obj_type, [])] info.mro = [info, obj_type] return info def missing_alias() -> TypeAlias: suggestion = _SUGGESTION.format('alias') return TypeAlias(AnyType(TypeOfAny.special_form), suggestion, line=-1, column=-1) mypy-0.761/mypy/freetree.py0000644€tŠÔÚ€2›s®0000000110513576752246022066 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generic node traverser visitor""" from mypy.traverser import TraverserVisitor from mypy.nodes import Block, MypyFile class TreeFreer(TraverserVisitor): def visit_block(self, block: Block) -> None: super().visit_block(block) block.body.clear() def free_tree(tree: MypyFile) -> None: """Free all the ASTs associated with a module. This needs to be done recursively, since symbol tables contain references to definitions, so those won't be freed but we want their contents to be. """ tree.accept(TreeFreer()) tree.defs.clear() mypy-0.761/mypy/fscache.py0000644€tŠÔÚ€2›s®0000002422613576752246021672 0ustar jukkaDROPBOX\Domain Users00000000000000"""Interface for accessing the file system with automatic caching. The idea is to cache the results of any file system state reads during a single transaction. This has two main benefits: * This avoids redundant syscalls, as we won't perform the same OS operations multiple times. * This makes it easier to reason about concurrent FS updates, as different operations targeting the same paths can't report different state during a transaction. Note that this only deals with reading state, not writing. Properties maintained by the API: * The contents of the file are always from the same or later time compared to the reported mtime of the file, even if mtime is queried after reading a file. * Repeating an operation produces the same result as the first one during a transaction. * Call flush() to start a new transaction (flush the caches). The API is a bit limited. It's easy to add new cached operations, however. You should perform all file system reads through the API to actually take advantage of the benefits. """ import hashlib import os import stat from typing import Dict, List, Set class FileSystemCache: def __init__(self) -> None: # The package root is not flushed with the caches. # It is set by set_package_root() below. self.package_root = [] # type: List[str] self.flush() def set_package_root(self, package_root: List[str]) -> None: self.package_root = package_root def flush(self) -> None: """Start another transaction and empty all caches.""" self.stat_cache = {} # type: Dict[str, os.stat_result] self.stat_error_cache = {} # type: Dict[str, OSError] self.listdir_cache = {} # type: Dict[str, List[str]] self.listdir_error_cache = {} # type: Dict[str, OSError] self.isfile_case_cache = {} # type: Dict[str, bool] self.read_cache = {} # type: Dict[str, bytes] self.read_error_cache = {} # type: Dict[str, Exception] self.hash_cache = {} # type: Dict[str, str] self.fake_package_cache = set() # type: Set[str] def stat(self, path: str) -> os.stat_result: if path in self.stat_cache: return self.stat_cache[path] if path in self.stat_error_cache: raise copy_os_error(self.stat_error_cache[path]) try: st = os.stat(path) except OSError as err: if self.init_under_package_root(path): try: return self._fake_init(path) except OSError: pass # Take a copy to get rid of associated traceback and frame objects. # Just assigning to __traceback__ doesn't free them. self.stat_error_cache[path] = copy_os_error(err) raise err self.stat_cache[path] = st return st def init_under_package_root(self, path: str) -> bool: """Is this path an __init__.py under a package root? This is used to detect packages that don't contain __init__.py files, which is needed to support Bazel. The function should only be called for non-existing files. It will return True if it refers to a __init__.py file that Bazel would create, so that at runtime Python would think the directory containing it is a package. For this to work you must pass one or more package roots using the --package-root flag. As an exceptional case, any directory that is a package root itself will not be considered to contain a __init__.py file. This is different from the rules Bazel itself applies, but is necessary for mypy to properly distinguish packages from other directories. See https://docs.bazel.build/versions/master/be/python.html, where this behavior is described under legacy_create_init. """ if not self.package_root: return False dirname, basename = os.path.split(path) if basename != '__init__.py': return False try: st = self.stat(dirname) except OSError: return False else: if not stat.S_ISDIR(st.st_mode): return False ok = False drive, path = os.path.splitdrive(path) # Ignore Windows drive name path = os.path.normpath(path) for root in self.package_root: if path.startswith(root): if path == root + basename: # A package root itself is never a package. ok = False break else: ok = True return ok def _fake_init(self, path: str) -> os.stat_result: """Prime the cache with a fake __init__.py file. This makes code that looks for path believe an empty file by that name exists. Should only be called after init_under_package_root() returns True. """ dirname, basename = os.path.split(path) assert basename == '__init__.py', path assert not os.path.exists(path), path # Not cached! dirname = os.path.normpath(dirname) st = self.stat(dirname) # May raise OSError # Get stat result as a sequence so we can modify it. # (Alas, typeshed's os.stat_result is not a sequence yet.) tpl = tuple(st) # type: ignore[arg-type, var-annotated] seq = list(tpl) # type: List[float] seq[stat.ST_MODE] = stat.S_IFREG | 0o444 seq[stat.ST_INO] = 1 seq[stat.ST_NLINK] = 1 seq[stat.ST_SIZE] = 0 tpl = tuple(seq) st = os.stat_result(tpl) self.stat_cache[path] = st # Make listdir() and read() also pretend this file exists. self.fake_package_cache.add(dirname) return st def listdir(self, path: str) -> List[str]: path = os.path.normpath(path) if path in self.listdir_cache: res = self.listdir_cache[path] # Check the fake cache. if path in self.fake_package_cache and '__init__.py' not in res: res.append('__init__.py') # Updates the result as well as the cache return res if path in self.listdir_error_cache: raise copy_os_error(self.listdir_error_cache[path]) try: results = os.listdir(path) except OSError as err: # Like above, take a copy to reduce memory use. self.listdir_error_cache[path] = copy_os_error(err) raise err self.listdir_cache[path] = results # Check the fake cache. if path in self.fake_package_cache and '__init__.py' not in results: results.append('__init__.py') return results def isfile(self, path: str) -> bool: try: st = self.stat(path) except OSError: return False return stat.S_ISREG(st.st_mode) def isfile_case(self, path: str, prefix: str) -> bool: """Return whether path exists and is a file. On case-insensitive filesystems (like Mac or Windows) this returns False if the case of path's last component does not exactly match the case found in the filesystem. We check also the case of other path components up to prefix. For example, if path is 'user-stubs/pack/mod.pyi' and prefix is 'user-stubs', we check that the case of 'pack' and 'mod.py' matches exactly, 'user-stubs' will be case insensitive on case insensitive filesystems. The caller must ensure that prefix is a valid file system prefix of path. """ if path in self.isfile_case_cache: return self.isfile_case_cache[path] head, tail = os.path.split(path) if not tail: res = False else: try: names = self.listdir(head) # This allows one to check file name case sensitively in # case-insensitive filesystems. res = tail in names and self.isfile(path) except OSError: res = False # Also check the other path components in case sensitive way. head, dir = os.path.split(head) while res and head and dir and head.startswith(prefix): try: res = dir in self.listdir(head) except OSError: res = False head, dir = os.path.split(head) self.isfile_case_cache[path] = res return res def isdir(self, path: str) -> bool: try: st = self.stat(path) except OSError: return False return stat.S_ISDIR(st.st_mode) def exists(self, path: str) -> bool: try: self.stat(path) except FileNotFoundError: return False return True def read(self, path: str) -> bytes: if path in self.read_cache: return self.read_cache[path] if path in self.read_error_cache: raise self.read_error_cache[path] # Need to stat first so that the contents of file are from no # earlier instant than the mtime reported by self.stat(). self.stat(path) dirname, basename = os.path.split(path) dirname = os.path.normpath(dirname) # Check the fake cache. if basename == '__init__.py' and dirname in self.fake_package_cache: data = b'' else: try: with open(path, 'rb') as f: data = f.read() except OSError as err: self.read_error_cache[path] = err raise md5hash = hashlib.md5(data).hexdigest() self.read_cache[path] = data self.hash_cache[path] = md5hash return data def md5(self, path: str) -> str: if path not in self.hash_cache: self.read(path) return self.hash_cache[path] def samefile(self, f1: str, f2: str) -> bool: s1 = self.stat(f1) s2 = self.stat(f2) return os.path.samestat(s1, s2) def copy_os_error(e: OSError) -> OSError: new = OSError(*e.args) new.errno = e.errno new.strerror = e.strerror new.filename = e.filename if e.filename2: new.filename2 = e.filename2 return new mypy-0.761/mypy/fswatcher.py0000644€tŠÔÚ€2›s®0000001001013576752246022246 0ustar jukkaDROPBOX\Domain Users00000000000000"""Watch parts of the file system for changes.""" from mypy.fscache import FileSystemCache from typing import AbstractSet, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple FileData = NamedTuple('FileData', [('st_mtime', float), ('st_size', int), ('md5', str)]) class FileSystemWatcher: """Watcher for file system changes among specific paths. All file system access is performed using FileSystemCache. We detect changed files by stat()ing them all and comparing md5 hashes of potentially changed files. If a file has both size and mtime unmodified, the file is assumed to be unchanged. An important goal of this class is to make it easier to eventually use file system events to detect file changes. Note: This class doesn't flush the file system cache. If you don't manually flush it, changes won't be seen. """ # TODO: Watching directories? # TODO: Handle non-files def __init__(self, fs: FileSystemCache) -> None: self.fs = fs self._paths = set() # type: Set[str] self._file_data = {} # type: Dict[str, Optional[FileData]] def dump_file_data(self) -> Dict[str, Tuple[float, int, str]]: return {k: v for k, v in self._file_data.items() if v is not None} def set_file_data(self, path: str, data: FileData) -> None: self._file_data[path] = data def add_watched_paths(self, paths: Iterable[str]) -> None: for path in paths: if path not in self._paths: # By storing None this path will get reported as changed by # find_changed if it exists. self._file_data[path] = None self._paths |= set(paths) def remove_watched_paths(self, paths: Iterable[str]) -> None: for path in paths: if path in self._file_data: del self._file_data[path] self._paths -= set(paths) def _update(self, path: str) -> None: st = self.fs.stat(path) md5 = self.fs.md5(path) self._file_data[path] = FileData(st.st_mtime, st.st_size, md5) def _find_changed(self, paths: Iterable[str]) -> AbstractSet[str]: changed = set() for path in paths: old = self._file_data[path] try: st = self.fs.stat(path) except FileNotFoundError: if old is not None: # File was deleted. changed.add(path) self._file_data[path] = None else: if old is None: # File is new. changed.add(path) self._update(path) # Round mtimes down, to match the mtimes we write to meta files elif st.st_size != old.st_size or int(st.st_mtime) != int(old.st_mtime): # Only look for changes if size or mtime has changed as an # optimization, since calculating md5 is expensive. new_md5 = self.fs.md5(path) self._update(path) if st.st_size != old.st_size or new_md5 != old.md5: # Changed file. changed.add(path) return changed def find_changed(self) -> AbstractSet[str]: """Return paths that have changes since the last call, in the watched set.""" return self._find_changed(self._paths) def update_changed(self, remove: List[str], update: List[str], ) -> AbstractSet[str]: """Alternative to find_changed() given explicit changes. This only calls self.fs.stat() on added or updated files, not on all files. It believes all other files are unchanged! Implies add_watched_paths() for add and update, and remove_watched_paths() for remove. """ self.remove_watched_paths(remove) self.add_watched_paths(update) return self._find_changed(update) mypy-0.761/mypy/gclogger.py0000644€tŠÔÚ€2›s®0000000314013576752246022057 0ustar jukkaDROPBOX\Domain Users00000000000000import gc import time from typing import Mapping, Optional class GcLogger: """Context manager to log GC stats and overall time.""" def __enter__(self) -> 'GcLogger': self.gc_start_time = None # type: Optional[float] self.gc_time = 0.0 self.gc_calls = 0 self.gc_collected = 0 self.gc_uncollectable = 0 gc.callbacks.append(self.gc_callback) self.start_time = time.time() return self def gc_callback(self, phase: str, info: Mapping[str, int]) -> None: if phase == 'start': assert self.gc_start_time is None, "Start phase out of sequence" self.gc_start_time = time.time() elif phase == 'stop': assert self.gc_start_time is not None, "Stop phase out of sequence" self.gc_calls += 1 self.gc_time += time.time() - self.gc_start_time self.gc_start_time = None self.gc_collected += info['collected'] self.gc_uncollectable += info['uncollectable'] else: assert False, "Unrecognized gc phase (%r)" % (phase,) def __exit__(self, *args: object) -> None: while self.gc_callback in gc.callbacks: gc.callbacks.remove(self.gc_callback) def get_stats(self) -> Mapping[str, float]: end_time = time.time() result = {} result['gc_time'] = self.gc_time result['gc_calls'] = self.gc_calls result['gc_collected'] = self.gc_collected result['gc_uncollectable'] = self.gc_uncollectable result['build_time'] = end_time - self.start_time return result mypy-0.761/mypy/git.py0000644€tŠÔÚ€2›s®0000001162013576752246021053 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for verifying git integrity.""" # Used also from setup.py, so don't pull in anything additional here (like mypy or typing): import os import pipes import subprocess import sys MYPY = False if MYPY: from typing import Iterator def is_git_repo(dir: str) -> bool: """Is the given directory version-controlled with git?""" return os.path.exists(os.path.join(dir, ".git")) def have_git() -> bool: """Can we run the git executable?""" try: subprocess.check_output(["git", "--help"]) return True except subprocess.CalledProcessError: return False except OSError: return False def get_submodules(dir: str) -> "Iterator[str]": """Return a list of all git top-level submodules in a given directory.""" # It would be nicer to do # "git submodule foreach 'echo MODULE $name $path $sha1 $toplevel'" # but that wouldn't work on Windows. output = subprocess.check_output(["git", "submodule", "status"], cwd=dir) # " name desc" # status='-': not initialized # status='+': changed # status='u': merge conflicts # status=' ': up-to-date for line in output.splitlines(): # Skip the status indicator, as it could be a space can confuse the split. line = line[1:] name = line.split(b" ")[1] yield name.decode(sys.getfilesystemencoding()) def git_revision(dir: str) -> bytes: """Get the SHA-1 of the HEAD of a git repository.""" return subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=dir).strip() def submodule_revision(dir: str, submodule: str) -> bytes: """Get the SHA-1 a submodule is supposed to have.""" output = subprocess.check_output(["git", "ls-files", "-s", submodule], cwd=dir).strip() # E.g.: "160000 e4a7edb949e0b920b16f61aeeb19fc3d328f3012 0 typeshed" return output.split()[1] def is_dirty(dir: str) -> bool: """Check whether a git repository has uncommitted changes.""" output = subprocess.check_output(["git", "status", "-uno", "--porcelain"], cwd=dir) return output.strip() != b"" def has_extra_files(dir: str) -> bool: """Check whether a git repository has untracked files.""" output = subprocess.check_output(["git", "clean", "--dry-run", "-d"], cwd=dir) return output.strip() != b"" def warn_no_git_executable() -> None: print("Warning: Couldn't check git integrity. " "git executable not in path.", file=sys.stderr) def warn_dirty(dir: str) -> None: print("Warning: git module '{}' has uncommitted changes.".format(dir), file=sys.stderr) print("Go to the directory", file=sys.stderr) print(" {}".format(dir), file=sys.stderr) print("and commit or reset your changes", file=sys.stderr) def warn_extra_files(dir: str) -> None: print("Warning: git module '{}' has untracked files.".format(dir), file=sys.stderr) print("Go to the directory", file=sys.stderr) print(" {}".format(dir), file=sys.stderr) print("and add & commit your new files.", file=sys.stderr) def chdir_prefix(dir: str) -> str: """Return the command to change to the target directory, plus '&&'.""" if os.path.relpath(dir) != ".": return "cd " + pipes.quote(dir) + " && " else: return "" def error_submodule_not_initialized(name: str, dir: str) -> None: print("Submodule '{}' not initialized.".format(name), file=sys.stderr) print("Please run:", file=sys.stderr) print(" {}git submodule update --init {}".format( chdir_prefix(dir), name), file=sys.stderr) def error_submodule_not_updated(name: str, dir: str) -> None: print("Submodule '{}' not updated.".format(name), file=sys.stderr) print("Please run:", file=sys.stderr) print(" {}git submodule update {}".format( chdir_prefix(dir), name), file=sys.stderr) print("(If you got this message because you updated {} yourself".format(name), file=sys.stderr) print(" then run \"git add {}\" to silence this check)".format(name), file=sys.stderr) def verify_git_integrity_or_abort(datadir: str) -> None: """Verify the (submodule) integrity of a git repository. Potentially output warnings/errors (to stderr), and exit with status 1 if we detected a severe problem. """ datadir = datadir or '.' if not is_git_repo(datadir): return if not have_git(): warn_no_git_executable() return for submodule in get_submodules(datadir): submodule_path = os.path.join(datadir, submodule) if not is_git_repo(submodule_path): error_submodule_not_initialized(submodule, datadir) sys.exit(1) elif submodule_revision(datadir, submodule) != git_revision(submodule_path): error_submodule_not_updated(submodule, datadir) sys.exit(1) elif is_dirty(submodule_path): warn_dirty(submodule) elif has_extra_files(submodule_path): warn_extra_files(submodule) mypy-0.761/mypy/indirection.py0000644€tŠÔÚ€2›s®0000001001713576752246022576 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Iterable, List, Optional, Set, Union from mypy.types import TypeVisitor import mypy.types as types from mypy.util import split_module_names def extract_module_names(type_name: Optional[str]) -> List[str]: """Returns the module names of a fully qualified type name.""" if type_name is not None: # Discard the first one, which is just the qualified name of the type possible_module_names = split_module_names(type_name) return possible_module_names[1:] else: return [] class TypeIndirectionVisitor(TypeVisitor[Set[str]]): """Returns all module references within a particular type.""" def __init__(self) -> None: self.cache = {} # type: Dict[types.Type, Set[str]] self.seen_aliases = set() # type: Set[types.TypeAliasType] def find_modules(self, typs: Iterable[types.Type]) -> Set[str]: self.seen_aliases.clear() return self._visit(typs) def _visit(self, typ_or_typs: Union[types.Type, Iterable[types.Type]]) -> Set[str]: typs = [typ_or_typs] if isinstance(typ_or_typs, types.Type) else typ_or_typs output = set() # type: Set[str] for typ in typs: if isinstance(typ, types.TypeAliasType): # Avoid infinite recursion for recursive type aliases. if typ in self.seen_aliases: continue self.seen_aliases.add(typ) if typ in self.cache: modules = self.cache[typ] else: modules = typ.accept(self) self.cache[typ] = set(modules) output.update(modules) return output def visit_unbound_type(self, t: types.UnboundType) -> Set[str]: return self._visit(t.args) def visit_any(self, t: types.AnyType) -> Set[str]: return set() def visit_none_type(self, t: types.NoneType) -> Set[str]: return set() def visit_uninhabited_type(self, t: types.UninhabitedType) -> Set[str]: return set() def visit_erased_type(self, t: types.ErasedType) -> Set[str]: return set() def visit_deleted_type(self, t: types.DeletedType) -> Set[str]: return set() def visit_type_var(self, t: types.TypeVarType) -> Set[str]: return self._visit(t.values) | self._visit(t.upper_bound) def visit_instance(self, t: types.Instance) -> Set[str]: out = self._visit(t.args) if t.type: # Uses of a class depend on everything in the MRO, # as changes to classes in the MRO can add types to methods, # change property types, change the MRO itself, etc. for s in t.type.mro: out.update(split_module_names(s.module_name)) if t.type.metaclass_type is not None: out.update(split_module_names(t.type.metaclass_type.type.module_name)) return out def visit_callable_type(self, t: types.CallableType) -> Set[str]: out = self._visit(t.arg_types) | self._visit(t.ret_type) if t.definition is not None: out.update(extract_module_names(t.definition.fullname)) return out def visit_overloaded(self, t: types.Overloaded) -> Set[str]: return self._visit(t.items()) | self._visit(t.fallback) def visit_tuple_type(self, t: types.TupleType) -> Set[str]: return self._visit(t.items) | self._visit(t.partial_fallback) def visit_typeddict_type(self, t: types.TypedDictType) -> Set[str]: return self._visit(t.items.values()) | self._visit(t.fallback) def visit_literal_type(self, t: types.LiteralType) -> Set[str]: return self._visit(t.fallback) def visit_union_type(self, t: types.UnionType) -> Set[str]: return self._visit(t.items) def visit_partial_type(self, t: types.PartialType) -> Set[str]: return set() def visit_type_type(self, t: types.TypeType) -> Set[str]: return self._visit(t.item) def visit_type_alias_type(self, t: types.TypeAliasType) -> Set[str]: return self._visit(types.get_proper_type(t)) mypy-0.761/mypy/infer.py0000644€tŠÔÚ€2›s®0000000363713576752246021404 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for type argument inference.""" from typing import List, Optional, Sequence from mypy.constraints import ( infer_constraints, infer_constraints_for_callable, SUBTYPE_OF, SUPERTYPE_OF ) from mypy.types import Type, TypeVarId, CallableType from mypy.solve import solve_constraints def infer_function_type_arguments(callee_type: CallableType, arg_types: Sequence[Optional[Type]], arg_kinds: List[int], formal_to_actual: List[List[int]], strict: bool = True) -> List[Optional[Type]]: """Infer the type arguments of a generic function. Return an array of lower bound types for the type variables -1 (at index 0), -2 (at index 1), etc. A lower bound is None if a value could not be inferred. Arguments: callee_type: the target generic function arg_types: argument types at the call site (each optional; if None, we are not considering this argument in the current pass) arg_kinds: nodes.ARG_* values for arg_types formal_to_actual: mapping from formal to actual variable indices """ # Infer constraints. constraints = infer_constraints_for_callable( callee_type, arg_types, arg_kinds, formal_to_actual) # Solve constraints. type_vars = callee_type.type_var_ids() return solve_constraints(type_vars, constraints, strict) def infer_type_arguments(type_var_ids: List[TypeVarId], template: Type, actual: Type, is_supertype: bool = False) -> List[Optional[Type]]: # Like infer_function_type_arguments, but only match a single type # against a generic type. constraints = infer_constraints(template, actual, SUPERTYPE_OF if is_supertype else SUBTYPE_OF) return solve_constraints(type_var_ids, constraints) mypy-0.761/mypy/ipc.py0000644€tŠÔÚ€2›s®0000002451513576752246021052 0ustar jukkaDROPBOX\Domain Users00000000000000"""Cross platform abstractions for inter-process communication On Unix, this uses AF_UNIX sockets. On Windows, this uses NamedPipes. """ import base64 import os import shutil import sys import tempfile from typing import Optional, Callable from typing_extensions import Final, Type from types import TracebackType if sys.platform == 'win32': # This may be private, but it is needed for IPC on Windows, and is basically stable import _winapi import ctypes _IPCHandle = int kernel32 = ctypes.windll.kernel32 DisconnectNamedPipe = kernel32.DisconnectNamedPipe # type: Callable[[_IPCHandle], int] FlushFileBuffers = kernel32.FlushFileBuffers # type: Callable[[_IPCHandle], int] else: import socket _IPCHandle = socket.socket class IPCException(Exception): """Exception for IPC issues.""" pass class IPCBase: """Base class for communication between the dmypy client and server. This contains logic shared between the client and server, such as reading and writing. """ connection = None # type: _IPCHandle def __init__(self, name: str, timeout: Optional[float]) -> None: self.name = name self.timeout = timeout def read(self, size: int = 100000) -> bytes: """Read bytes from an IPC connection until its empty.""" bdata = bytearray() if sys.platform == 'win32': while True: ov, err = _winapi.ReadFile(self.connection, size, overlapped=True) # TODO: remove once typeshed supports Literal types assert isinstance(ov, _winapi.Overlapped) assert isinstance(err, int) try: if err == _winapi.ERROR_IO_PENDING: timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE res = _winapi.WaitForSingleObject(ov.event, timeout) if res != _winapi.WAIT_OBJECT_0: raise IPCException("Bad result from I/O wait: {}".format(res)) except BaseException: ov.cancel() raise _, err = ov.GetOverlappedResult(True) more = ov.getbuffer() if more: bdata.extend(more) if err == 0: # we are done! break elif err == _winapi.ERROR_MORE_DATA: # read again continue elif err == _winapi.ERROR_OPERATION_ABORTED: raise IPCException("ReadFile operation aborted.") else: while True: more = self.connection.recv(size) if not more: break bdata.extend(more) return bytes(bdata) def write(self, data: bytes) -> None: """Write bytes to an IPC connection.""" if sys.platform == 'win32': try: ov, err = _winapi.WriteFile(self.connection, data, overlapped=True) # TODO: remove once typeshed supports Literal types assert isinstance(ov, _winapi.Overlapped) assert isinstance(err, int) try: if err == _winapi.ERROR_IO_PENDING: timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE res = _winapi.WaitForSingleObject(ov.event, timeout) if res != _winapi.WAIT_OBJECT_0: raise IPCException("Bad result from I/O wait: {}".format(res)) elif err != 0: raise IPCException("Failed writing to pipe with error: {}".format(err)) except BaseException: ov.cancel() raise bytes_written, err = ov.GetOverlappedResult(True) assert err == 0, err assert bytes_written == len(data) except WindowsError as e: raise IPCException("Failed to write with error: {}".format(e.winerror)) else: self.connection.sendall(data) self.connection.shutdown(socket.SHUT_WR) def close(self) -> None: if sys.platform == 'win32': if self.connection != _winapi.NULL: _winapi.CloseHandle(self.connection) else: self.connection.close() class IPCClient(IPCBase): """The client side of an IPC connection.""" def __init__(self, name: str, timeout: Optional[float]) -> None: super().__init__(name, timeout) if sys.platform == 'win32': timeout = int(self.timeout * 1000) if self.timeout else _winapi.NMPWAIT_WAIT_FOREVER try: _winapi.WaitNamedPipe(self.name, timeout) except FileNotFoundError: raise IPCException("The NamedPipe at {} was not found.".format(self.name)) except WindowsError as e: if e.winerror == _winapi.ERROR_SEM_TIMEOUT: raise IPCException("Timed out waiting for connection.") else: raise try: self.connection = _winapi.CreateFile( self.name, _winapi.GENERIC_READ | _winapi.GENERIC_WRITE, 0, _winapi.NULL, _winapi.OPEN_EXISTING, _winapi.FILE_FLAG_OVERLAPPED, _winapi.NULL, ) except WindowsError as e: if e.winerror == _winapi.ERROR_PIPE_BUSY: raise IPCException("The connection is busy.") else: raise _winapi.SetNamedPipeHandleState(self.connection, _winapi.PIPE_READMODE_MESSAGE, None, None) else: self.connection = socket.socket(socket.AF_UNIX) self.connection.settimeout(timeout) self.connection.connect(name) def __enter__(self) -> 'IPCClient': return self def __exit__(self, exc_ty: 'Optional[Type[BaseException]]' = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> None: self.close() class IPCServer(IPCBase): BUFFER_SIZE = 2**16 # type: Final def __init__(self, name: str, timeout: Optional[float] = None) -> None: if sys.platform == 'win32': name = r'\\.\pipe\{}-{}.pipe'.format( name, base64.urlsafe_b64encode(os.urandom(6)).decode()) else: name = '{}.sock'.format(name) super().__init__(name, timeout) if sys.platform == 'win32': self.connection = _winapi.CreateNamedPipe(self.name, _winapi.PIPE_ACCESS_DUPLEX | _winapi.FILE_FLAG_FIRST_PIPE_INSTANCE | _winapi.FILE_FLAG_OVERLAPPED, _winapi.PIPE_READMODE_MESSAGE | _winapi.PIPE_TYPE_MESSAGE | _winapi.PIPE_WAIT | 0x8, # PIPE_REJECT_REMOTE_CLIENTS 1, # one instance self.BUFFER_SIZE, self.BUFFER_SIZE, _winapi.NMPWAIT_WAIT_FOREVER, 0, # Use default security descriptor ) if self.connection == -1: # INVALID_HANDLE_VALUE err = _winapi.GetLastError() raise IPCException('Invalid handle to pipe: {}'.format(err)) else: self.sock_directory = tempfile.mkdtemp() sockfile = os.path.join(self.sock_directory, self.name) self.sock = socket.socket(socket.AF_UNIX) self.sock.bind(sockfile) self.sock.listen(1) if timeout is not None: self.sock.settimeout(timeout) def __enter__(self) -> 'IPCServer': if sys.platform == 'win32': # NOTE: It is theoretically possible that this will hang forever if the # client never connects, though this can be "solved" by killing the server try: ov = _winapi.ConnectNamedPipe(self.connection, overlapped=True) # TODO: remove once typeshed supports Literal types assert isinstance(ov, _winapi.Overlapped) except WindowsError as e: # Don't raise if the client already exists, or the client already connected if e.winerror not in (_winapi.ERROR_PIPE_CONNECTED, _winapi.ERROR_NO_DATA): raise else: try: timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE res = _winapi.WaitForSingleObject(ov.event, timeout) assert res == _winapi.WAIT_OBJECT_0 except BaseException: ov.cancel() _winapi.CloseHandle(self.connection) raise _, err = ov.GetOverlappedResult(True) assert err == 0 else: try: self.connection, _ = self.sock.accept() except socket.timeout: raise IPCException('The socket timed out') return self def __exit__(self, exc_ty: 'Optional[Type[BaseException]]' = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> None: if sys.platform == 'win32': try: # Wait for the client to finish reading the last write before disconnecting if not FlushFileBuffers(self.connection): raise IPCException("Failed to flush NamedPipe buffer," "maybe the client hung up?") finally: DisconnectNamedPipe(self.connection) else: self.close() def cleanup(self) -> None: if sys.platform == 'win32': self.close() else: shutil.rmtree(self.sock_directory) @property def connection_name(self) -> str: if sys.platform == 'win32': return self.name else: return self.sock.getsockname() mypy-0.761/mypy/join.py0000644€tŠÔÚ€2›s®0000005055313576752246021237 0ustar jukkaDROPBOX\Domain Users00000000000000"""Calculation of the least upper bound types (joins).""" from collections import OrderedDict from typing import List, Optional from mypy.types import ( Type, AnyType, NoneType, TypeVisitor, Instance, UnboundType, TypeVarType, CallableType, TupleType, TypedDictType, ErasedType, UnionType, FunctionLike, Overloaded, LiteralType, PartialType, DeletedType, UninhabitedType, TypeType, TypeOfAny, get_proper_type, ProperType, get_proper_types, TypeAliasType ) from mypy.maptype import map_instance_to_supertype from mypy.subtypes import ( is_subtype, is_equivalent, is_subtype_ignoring_tvars, is_proper_subtype, is_protocol_implementation, find_member ) from mypy.nodes import ARG_NAMED, ARG_NAMED_OPT import mypy.typeops from mypy import state def join_simple(declaration: Optional[Type], s: Type, t: Type) -> ProperType: """Return a simple least upper bound given the declared type.""" # TODO: check infinite recursion for aliases here. declaration = get_proper_type(declaration) s = get_proper_type(s) t = get_proper_type(t) if (s.can_be_true, s.can_be_false) != (t.can_be_true, t.can_be_false): # if types are restricted in different ways, use the more general versions s = mypy.typeops.true_or_false(s) t = mypy.typeops.true_or_false(t) if isinstance(s, AnyType): return s if isinstance(s, ErasedType): return t if is_proper_subtype(s, t): return t if is_proper_subtype(t, s): return s if isinstance(declaration, UnionType): return mypy.typeops.make_simplified_union([s, t]) if isinstance(s, NoneType) and not isinstance(t, NoneType): s, t = t, s if isinstance(s, UninhabitedType) and not isinstance(t, UninhabitedType): s, t = t, s value = t.accept(TypeJoinVisitor(s)) if declaration is None or is_subtype(value, declaration): return value return declaration def trivial_join(s: Type, t: Type) -> ProperType: """Return one of types (expanded) if it is a supertype of other, otherwise top type.""" if is_subtype(s, t): return get_proper_type(t) elif is_subtype(t, s): return get_proper_type(s) else: return object_or_any_from_type(get_proper_type(t)) def join_types(s: Type, t: Type) -> ProperType: """Return the least upper bound of s and t. For example, the join of 'int' and 'object' is 'object'. """ if mypy.typeops.is_recursive_pair(s, t): # This case can trigger an infinite recursion, general support for this will be # tricky so we use a trivial join (like for protocols). return trivial_join(s, t) s = get_proper_type(s) t = get_proper_type(t) if (s.can_be_true, s.can_be_false) != (t.can_be_true, t.can_be_false): # if types are restricted in different ways, use the more general versions s = mypy.typeops.true_or_false(s) t = mypy.typeops.true_or_false(t) if isinstance(s, AnyType): return s if isinstance(s, ErasedType): return t if isinstance(s, UnionType) and not isinstance(t, UnionType): s, t = t, s if isinstance(s, NoneType) and not isinstance(t, NoneType): s, t = t, s if isinstance(s, UninhabitedType) and not isinstance(t, UninhabitedType): s, t = t, s # Use a visitor to handle non-trivial cases. return t.accept(TypeJoinVisitor(s)) class TypeJoinVisitor(TypeVisitor[ProperType]): """Implementation of the least upper bound algorithm. Attributes: s: The other (left) type operand. """ def __init__(self, s: ProperType) -> None: self.s = s def visit_unbound_type(self, t: UnboundType) -> ProperType: return AnyType(TypeOfAny.special_form) def visit_union_type(self, t: UnionType) -> ProperType: if is_subtype(self.s, t): return t else: return mypy.typeops.make_simplified_union([self.s, t]) def visit_any(self, t: AnyType) -> ProperType: return t def visit_none_type(self, t: NoneType) -> ProperType: if state.strict_optional: if isinstance(self.s, (NoneType, UninhabitedType)): return t elif isinstance(self.s, UnboundType): return AnyType(TypeOfAny.special_form) else: return mypy.typeops.make_simplified_union([self.s, t]) else: return self.s def visit_uninhabited_type(self, t: UninhabitedType) -> ProperType: return self.s def visit_deleted_type(self, t: DeletedType) -> ProperType: return self.s def visit_erased_type(self, t: ErasedType) -> ProperType: return self.s def visit_type_var(self, t: TypeVarType) -> ProperType: if isinstance(self.s, TypeVarType) and self.s.id == t.id: return self.s else: return self.default(self.s) def visit_instance(self, t: Instance) -> ProperType: if isinstance(self.s, Instance): nominal = join_instances(t, self.s) structural = None # type: Optional[Instance] if t.type.is_protocol and is_protocol_implementation(self.s, t): structural = t elif self.s.type.is_protocol and is_protocol_implementation(t, self.s): structural = self.s # Structural join is preferred in the case where we have found both # structural and nominal and they have same MRO length (see two comments # in join_instances_via_supertype). Otherwise, just return the nominal join. if not structural or is_better(nominal, structural): return nominal return structural elif isinstance(self.s, FunctionLike): if t.type.is_protocol: call = unpack_callback_protocol(t) if call: return join_types(call, self.s) return join_types(t, self.s.fallback) elif isinstance(self.s, TypeType): return join_types(t, self.s) elif isinstance(self.s, TypedDictType): return join_types(t, self.s) elif isinstance(self.s, LiteralType): return join_types(t, self.s) else: return self.default(self.s) def visit_callable_type(self, t: CallableType) -> ProperType: if isinstance(self.s, CallableType) and is_similar_callables(t, self.s): if is_equivalent(t, self.s): return combine_similar_callables(t, self.s) result = join_similar_callables(t, self.s) # We set the from_type_type flag to suppress error when a collection of # concrete class objects gets inferred as their common abstract superclass. if not ((t.is_type_obj() and t.type_object().is_abstract) or (self.s.is_type_obj() and self.s.type_object().is_abstract)): result.from_type_type = True if any(isinstance(tp, (NoneType, UninhabitedType)) for tp in get_proper_types(result.arg_types)): # We don't want to return unusable Callable, attempt fallback instead. return join_types(t.fallback, self.s) return result elif isinstance(self.s, Overloaded): # Switch the order of arguments to that we'll get to visit_overloaded. return join_types(t, self.s) elif isinstance(self.s, Instance) and self.s.type.is_protocol: call = unpack_callback_protocol(self.s) if call: return join_types(t, call) return join_types(t.fallback, self.s) def visit_overloaded(self, t: Overloaded) -> ProperType: # This is more complex than most other cases. Here are some # examples that illustrate how this works. # # First let's define a concise notation: # - Cn are callable types (for n in 1, 2, ...) # - Ov(C1, C2, ...) is an overloaded type with items C1, C2, ... # - Callable[[T, ...], S] is written as [T, ...] -> S. # # We want some basic properties to hold (assume Cn are all # unrelated via Any-similarity): # # join(Ov(C1, C2), C1) == C1 # join(Ov(C1, C2), Ov(C1, C2)) == Ov(C1, C2) # join(Ov(C1, C2), Ov(C1, C3)) == C1 # join(Ov(C2, C2), C3) == join of fallback types # # The presence of Any types makes things more interesting. The join is the # most general type we can get with respect to Any: # # join(Ov([int] -> int, [str] -> str), [Any] -> str) == Any -> str # # We could use a simplification step that removes redundancies, but that's not # implemented right now. Consider this example, where we get a redundancy: # # join(Ov([int, Any] -> Any, [str, Any] -> Any), [Any, int] -> Any) == # Ov([Any, int] -> Any, [Any, int] -> Any) # # TODO: Consider more cases of callable subtyping. result = [] # type: List[CallableType] s = self.s if isinstance(s, FunctionLike): # The interesting case where both types are function types. for t_item in t.items(): for s_item in s.items(): if is_similar_callables(t_item, s_item): if is_equivalent(t_item, s_item): result.append(combine_similar_callables(t_item, s_item)) elif is_subtype(t_item, s_item): result.append(s_item) if result: # TODO: Simplify redundancies from the result. if len(result) == 1: return result[0] else: return Overloaded(result) return join_types(t.fallback, s.fallback) elif isinstance(s, Instance) and s.type.is_protocol: call = unpack_callback_protocol(s) if call: return join_types(t, call) return join_types(t.fallback, s) def visit_tuple_type(self, t: TupleType) -> ProperType: if isinstance(self.s, TupleType) and self.s.length() == t.length(): items = [] # type: List[Type] for i in range(t.length()): items.append(self.join(t.items[i], self.s.items[i])) fallback = join_instances(mypy.typeops.tuple_fallback(self.s), mypy.typeops.tuple_fallback(t)) assert isinstance(fallback, Instance) return TupleType(items, fallback) else: return self.default(self.s) def visit_typeddict_type(self, t: TypedDictType) -> ProperType: if isinstance(self.s, TypedDictType): items = OrderedDict([ (item_name, s_item_type) for (item_name, s_item_type, t_item_type) in self.s.zip(t) if (is_equivalent(s_item_type, t_item_type) and (item_name in t.required_keys) == (item_name in self.s.required_keys)) ]) mapping_value_type = join_type_list(list(items.values())) fallback = self.s.create_anonymous_fallback(value_type=mapping_value_type) # We need to filter by items.keys() since some required keys present in both t and # self.s might be missing from the join if the types are incompatible. required_keys = set(items.keys()) & t.required_keys & self.s.required_keys return TypedDictType(items, required_keys, fallback) elif isinstance(self.s, Instance): return join_types(self.s, t.fallback) else: return self.default(self.s) def visit_literal_type(self, t: LiteralType) -> ProperType: if isinstance(self.s, LiteralType): if t == self.s: return t else: return join_types(self.s.fallback, t.fallback) else: return join_types(self.s, t.fallback) def visit_partial_type(self, t: PartialType) -> ProperType: # We only have partial information so we can't decide the join result. We should # never get here. assert False, "Internal error" def visit_type_type(self, t: TypeType) -> ProperType: if isinstance(self.s, TypeType): return TypeType.make_normalized(self.join(t.item, self.s.item), line=t.line) elif isinstance(self.s, Instance) and self.s.type.fullname == 'builtins.type': return self.s else: return self.default(self.s) def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: assert False, "This should be never called, got {}".format(t) def join(self, s: Type, t: Type) -> ProperType: return join_types(s, t) def default(self, typ: Type) -> ProperType: typ = get_proper_type(typ) if isinstance(typ, Instance): return object_from_instance(typ) elif isinstance(typ, UnboundType): return AnyType(TypeOfAny.special_form) elif isinstance(typ, TupleType): return self.default(mypy.typeops.tuple_fallback(typ)) elif isinstance(typ, TypedDictType): return self.default(typ.fallback) elif isinstance(typ, FunctionLike): return self.default(typ.fallback) elif isinstance(typ, TypeVarType): return self.default(typ.upper_bound) else: return AnyType(TypeOfAny.special_form) def join_instances(t: Instance, s: Instance) -> ProperType: """Calculate the join of two instance types. """ if t.type == s.type: # Simplest case: join two types with the same base type (but # potentially different arguments). if is_subtype(t, s) or is_subtype(s, t): # Compatible; combine type arguments. args = [] # type: List[Type] for i in range(len(t.args)): args.append(join_types(t.args[i], s.args[i])) return Instance(t.type, args) else: # Incompatible; return trivial result object. return object_from_instance(t) elif t.type.bases and is_subtype_ignoring_tvars(t, s): return join_instances_via_supertype(t, s) else: # Now t is not a subtype of s, and t != s. Now s could be a subtype # of t; alternatively, we need to find a common supertype. This works # in of the both cases. return join_instances_via_supertype(s, t) def join_instances_via_supertype(t: Instance, s: Instance) -> ProperType: # Give preference to joins via duck typing relationship, so that # join(int, float) == float, for example. if t.type._promote and is_subtype(t.type._promote, s): return join_types(t.type._promote, s) elif s.type._promote and is_subtype(s.type._promote, t): return join_types(t, s.type._promote) # Compute the "best" supertype of t when joined with s. # The definition of "best" may evolve; for now it is the one with # the longest MRO. Ties are broken by using the earlier base. best = None # type: Optional[ProperType] for base in t.type.bases: mapped = map_instance_to_supertype(t, base.type) res = join_instances(mapped, s) if best is None or is_better(res, best): best = res assert best is not None return best def is_better(t: Type, s: Type) -> bool: # Given two possible results from join_instances_via_supertype(), # indicate whether t is the better one. t = get_proper_type(t) s = get_proper_type(s) if isinstance(t, Instance): if not isinstance(s, Instance): return True # Use len(mro) as a proxy for the better choice. if len(t.type.mro) > len(s.type.mro): return True return False def is_similar_callables(t: CallableType, s: CallableType) -> bool: """Return True if t and s have identical numbers of arguments, default arguments and varargs. """ return (len(t.arg_types) == len(s.arg_types) and t.min_args == s.min_args and t.is_var_arg == s.is_var_arg) def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: from mypy.meet import meet_types arg_types = [] # type: List[Type] for i in range(len(t.arg_types)): arg_types.append(meet_types(t.arg_types[i], s.arg_types[i])) # TODO in combine_similar_callables also applies here (names and kinds) # The fallback type can be either 'function' or 'type'. The result should have 'type' as # fallback only if both operands have it as 'type'. if t.fallback.type.fullname != 'builtins.type': fallback = t.fallback else: fallback = s.fallback return t.copy_modified(arg_types=arg_types, arg_names=combine_arg_names(t, s), ret_type=join_types(t.ret_type, s.ret_type), fallback=fallback, name=None) def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType: arg_types = [] # type: List[Type] for i in range(len(t.arg_types)): arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) # TODO kinds and argument names # The fallback type can be either 'function' or 'type'. The result should have 'type' as # fallback only if both operands have it as 'type'. if t.fallback.type.fullname != 'builtins.type': fallback = t.fallback else: fallback = s.fallback return t.copy_modified(arg_types=arg_types, arg_names=combine_arg_names(t, s), ret_type=join_types(t.ret_type, s.ret_type), fallback=fallback, name=None) def combine_arg_names(t: CallableType, s: CallableType) -> List[Optional[str]]: """Produces a list of argument names compatible with both callables. For example, suppose 't' and 's' have the following signatures: - t: (a: int, b: str, X: str) -> None - s: (a: int, b: str, Y: str) -> None This function would return ["a", "b", None]. This information is then used above to compute the join of t and s, which results in a signature of (a: int, b: str, str) -> None. Note that the third argument's name is omitted and 't' and 's' are both valid subtypes of this inferred signature. Precondition: is_similar_types(t, s) is true. """ num_args = len(t.arg_types) new_names = [] named = (ARG_NAMED, ARG_NAMED_OPT) for i in range(num_args): t_name = t.arg_names[i] s_name = s.arg_names[i] if t_name == s_name or t.arg_kinds[i] in named or s.arg_kinds[i] in named: new_names.append(t_name) else: new_names.append(None) return new_names def object_from_instance(instance: Instance) -> Instance: """Construct the type 'builtins.object' from an instance type.""" # Use the fact that 'object' is always the last class in the mro. res = Instance(instance.type.mro[-1], []) return res def object_or_any_from_type(typ: ProperType) -> ProperType: # Similar to object_from_instance() but tries hard for all types. # TODO: find a better way to get object, or make this more reliable. if isinstance(typ, Instance): return object_from_instance(typ) elif isinstance(typ, (CallableType, TypedDictType, LiteralType)): return object_from_instance(typ.fallback) elif isinstance(typ, TupleType): return object_from_instance(typ.partial_fallback) elif isinstance(typ, TypeType): return object_or_any_from_type(typ.item) elif isinstance(typ, TypeVarType) and isinstance(typ.upper_bound, ProperType): return object_or_any_from_type(typ.upper_bound) elif isinstance(typ, UnionType): joined = join_type_list([it for it in typ.items if isinstance(it, ProperType)]) return object_or_any_from_type(joined) return AnyType(TypeOfAny.implementation_artifact) def join_type_list(types: List[Type]) -> ProperType: if not types: # This is a little arbitrary but reasonable. Any empty tuple should be compatible # with all variable length tuples, and this makes it possible. return UninhabitedType() joined = get_proper_type(types[0]) for t in types[1:]: joined = join_types(joined, t) return joined def unpack_callback_protocol(t: Instance) -> Optional[Type]: assert t.type.is_protocol if t.type.protocol_members == ['__call__']: return find_member('__call__', t, t, is_operator=True) return None mypy-0.761/mypy/literals.py0000644€tŠÔÚ€2›s®0000001720413576752246022113 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Union, Any, Tuple, Iterable from typing_extensions import Final from mypy.nodes import ( Expression, ComparisonExpr, OpExpr, MemberExpr, UnaryExpr, StarExpr, IndexExpr, LITERAL_YES, LITERAL_NO, NameExpr, LITERAL_TYPE, IntExpr, FloatExpr, ComplexExpr, StrExpr, BytesExpr, UnicodeExpr, ListExpr, TupleExpr, SetExpr, DictExpr, CallExpr, SliceExpr, CastExpr, ConditionalExpr, EllipsisExpr, YieldFromExpr, YieldExpr, RevealExpr, SuperExpr, TypeApplication, LambdaExpr, ListComprehension, SetComprehension, DictionaryComprehension, GeneratorExpr, BackquoteExpr, TypeVarExpr, TypeAliasExpr, NamedTupleExpr, EnumCallExpr, TypedDictExpr, NewTypeExpr, PromoteExpr, AwaitExpr, TempNode, AssignmentExpr, ) from mypy.visitor import ExpressionVisitor # [Note Literals and literal_hash] # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Mypy uses the term "literal" to refer to any expression built out of # the following: # # * Plain literal expressions, like `1` (integer, float, string, etc.) # # * Compound literal expressions, like `(lit1, lit2)` (list, dict, # set, or tuple) # # * Operator expressions, like `lit1 + lit2` # # * Variable references, like `x` # # * Member references, like `lit.m` # # * Index expressions, like `lit[0]` # # A typical "literal" looks like `x[(i,j+1)].m`. # # An expression that is a literal has a `literal_hash`, with the # following properties. # # * `literal_hash` is a Key: a tuple containing basic data types and # possibly other Keys. So it can be used as a key in a dictionary # that will be compared by value (as opposed to the Node itself, # which is compared by identity). # # * Two expressions have equal `literal_hash`es if and only if they # are syntactically equal expressions. (NB: Actually, we also # identify as equal expressions like `3` and `3.0`; is this a good # idea?) # # * The elements of `literal_hash` that are tuples are exactly the # subexpressions of the original expression (e.g. the base and index # of an index expression, or the operands of an operator expression). def literal(e: Expression) -> int: if isinstance(e, ComparisonExpr): return min(literal(o) for o in e.operands) elif isinstance(e, OpExpr): return min(literal(e.left), literal(e.right)) elif isinstance(e, (MemberExpr, UnaryExpr, StarExpr)): return literal(e.expr) elif isinstance(e, IndexExpr): if literal(e.index) == LITERAL_YES: return literal(e.base) else: return LITERAL_NO elif isinstance(e, NameExpr): return LITERAL_TYPE if isinstance(e, (IntExpr, FloatExpr, ComplexExpr, StrExpr, BytesExpr, UnicodeExpr)): return LITERAL_YES if literal_hash(e): return LITERAL_YES return LITERAL_NO Key = Tuple[Any, ...] def subkeys(key: Key) -> Iterable[Key]: return [elt for elt in key if isinstance(elt, tuple)] def literal_hash(e: Expression) -> Optional[Key]: return e.accept(_hasher) class _Hasher(ExpressionVisitor[Optional[Key]]): def visit_int_expr(self, e: IntExpr) -> Key: return ('Literal', e.value) def visit_str_expr(self, e: StrExpr) -> Key: return ('Literal', e.value, e.from_python_3) def visit_bytes_expr(self, e: BytesExpr) -> Key: return ('Literal', e.value) def visit_unicode_expr(self, e: UnicodeExpr) -> Key: return ('Literal', e.value) def visit_float_expr(self, e: FloatExpr) -> Key: return ('Literal', e.value) def visit_complex_expr(self, e: ComplexExpr) -> Key: return ('Literal', e.value) def visit_star_expr(self, e: StarExpr) -> Key: return ('Star', literal_hash(e.expr)) def visit_name_expr(self, e: NameExpr) -> Key: # N.B: We use the node itself as the key, and not the name, # because using the name causes issues when there is shadowing # (for example, in list comprehensions). return ('Var', e.node) def visit_member_expr(self, e: MemberExpr) -> Key: return ('Member', literal_hash(e.expr), e.name) def visit_op_expr(self, e: OpExpr) -> Key: return ('Binary', e.op, literal_hash(e.left), literal_hash(e.right)) def visit_comparison_expr(self, e: ComparisonExpr) -> Key: rest = tuple(e.operators) # type: Any rest += tuple(literal_hash(o) for o in e.operands) return ('Comparison',) + rest def visit_unary_expr(self, e: UnaryExpr) -> Key: return ('Unary', e.op, literal_hash(e.expr)) def seq_expr(self, e: Union[ListExpr, TupleExpr, SetExpr], name: str) -> Optional[Key]: if all(literal(x) == LITERAL_YES for x in e.items): rest = tuple(literal_hash(x) for x in e.items) # type: Any return (name,) + rest return None def visit_list_expr(self, e: ListExpr) -> Optional[Key]: return self.seq_expr(e, 'List') def visit_dict_expr(self, e: DictExpr) -> Optional[Key]: if all(a and literal(a) == literal(b) == LITERAL_YES for a, b in e.items): rest = tuple((literal_hash(a) if a else None, literal_hash(b)) for a, b in e.items) # type: Any return ('Dict',) + rest return None def visit_tuple_expr(self, e: TupleExpr) -> Optional[Key]: return self.seq_expr(e, 'Tuple') def visit_set_expr(self, e: SetExpr) -> Optional[Key]: return self.seq_expr(e, 'Set') def visit_index_expr(self, e: IndexExpr) -> Optional[Key]: if literal(e.index) == LITERAL_YES: return ('Index', literal_hash(e.base), literal_hash(e.index)) return None def visit_assignment_expr(self, e: AssignmentExpr) -> None: return None def visit_call_expr(self, e: CallExpr) -> None: return None def visit_slice_expr(self, e: SliceExpr) -> None: return None def visit_cast_expr(self, e: CastExpr) -> None: return None def visit_conditional_expr(self, e: ConditionalExpr) -> None: return None def visit_ellipsis(self, e: EllipsisExpr) -> None: return None def visit_yield_from_expr(self, e: YieldFromExpr) -> None: return None def visit_yield_expr(self, e: YieldExpr) -> None: return None def visit_reveal_expr(self, e: RevealExpr) -> None: return None def visit_super_expr(self, e: SuperExpr) -> None: return None def visit_type_application(self, e: TypeApplication) -> None: return None def visit_lambda_expr(self, e: LambdaExpr) -> None: return None def visit_list_comprehension(self, e: ListComprehension) -> None: return None def visit_set_comprehension(self, e: SetComprehension) -> None: return None def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: return None def visit_generator_expr(self, e: GeneratorExpr) -> None: return None def visit_backquote_expr(self, e: BackquoteExpr) -> None: return None def visit_type_var_expr(self, e: TypeVarExpr) -> None: return None def visit_type_alias_expr(self, e: TypeAliasExpr) -> None: return None def visit_namedtuple_expr(self, e: NamedTupleExpr) -> None: return None def visit_enum_call_expr(self, e: EnumCallExpr) -> None: return None def visit_typeddict_expr(self, e: TypedDictExpr) -> None: return None def visit_newtype_expr(self, e: NewTypeExpr) -> None: return None def visit__promote_expr(self, e: PromoteExpr) -> None: return None def visit_await_expr(self, e: AwaitExpr) -> None: return None def visit_temp_node(self, e: TempNode) -> None: return None _hasher = _Hasher() # type: Final mypy-0.761/mypy/lookup.py0000644€tŠÔÚ€2›s®0000000405213576752246021602 0ustar jukkaDROPBOX\Domain Users00000000000000""" This is a module for various lookup functions: functions that will find a semantic node by its name. """ from mypy.nodes import MypyFile, SymbolTableNode, TypeInfo from typing import Dict, Optional # TODO: gradually move existing lookup functions to this module. def lookup_fully_qualified(name: str, modules: Dict[str, MypyFile], raise_on_missing: bool = False) -> Optional[SymbolTableNode]: """Find a symbol using it fully qualified name. The algorithm has two steps: first we try splitting the name on '.' to find the module, then iteratively look for each next chunk after a '.' (e.g. for nested classes). This function should *not* be used to find a module. Those should be looked in the modules dictionary. """ head = name rest = [] # 1. Find a module tree in modules dictionary. while True: if '.' not in head: if raise_on_missing: assert '.' in head, "Cannot find module for %s" % (name,) return None head, tail = head.rsplit('.', maxsplit=1) rest.append(tail) mod = modules.get(head) if mod is not None: break names = mod.names # 2. Find the symbol in the module tree. if not rest: # Looks like a module, don't use this to avoid confusions. if raise_on_missing: assert rest, "Cannot find %s, got a module symbol" % (name,) return None while True: key = rest.pop() if key not in names: if raise_on_missing: assert key in names, "Cannot find component %r for %r" % (key, name) return None stnode = names[key] if not rest: return stnode node = stnode.node # In fine-grained mode, could be a cross-reference to a deleted module # or a Var made up for a missing module. if not isinstance(node, TypeInfo): if raise_on_missing: assert node, "Cannot find %s" % (name,) return None names = node.names mypy-0.761/mypy/main.py0000644€tŠÔÚ€2›s®0000012777113576752246021233 0ustar jukkaDROPBOX\Domain Users00000000000000"""Mypy type checker command line tool.""" import argparse from gettext import gettext import os import subprocess import sys import time from typing import Any, Dict, IO, List, Optional, Sequence, Tuple, TextIO, Union from typing_extensions import Final, NoReturn from mypy import build from mypy import defaults from mypy import state from mypy import util from mypy.modulefinder import BuildSource, FindModuleCache, mypy_path, SearchPaths from mypy.find_sources import create_source_list, InvalidSourceList from mypy.fscache import FileSystemCache from mypy.errors import CompileError from mypy.options import Options, BuildType from mypy.config_parser import parse_version, parse_config_file from mypy.split_namespace import SplitNamespace from mypy.version import __version__ orig_stat = os.stat # type: Final MEM_PROFILE = False # type: Final # If True, dump memory profile def stat_proxy(path: str) -> os.stat_result: try: st = orig_stat(path) except os.error as err: print("stat(%r) -> %s" % (path, err)) raise else: print("stat(%r) -> (st_mode=%o, st_mtime=%d, st_size=%d)" % (path, st.st_mode, st.st_mtime, st.st_size)) return st def main(script_path: Optional[str], stdout: TextIO, stderr: TextIO, args: Optional[List[str]] = None, ) -> None: """Main entry point to the type checker. Args: script_path: Path to the 'mypy' script (used for finding data files). args: Custom command-line arguments. If not given, sys.argv[1:] will be used. """ util.check_python_version('mypy') t0 = time.time() # To log stat() calls: os.stat = stat_proxy sys.setrecursionlimit(2 ** 14) if args is None: args = sys.argv[1:] fscache = FileSystemCache() sources, options = process_options(args, stdout=stdout, stderr=stderr, fscache=fscache) messages = [] formatter = util.FancyFormatter(stdout, stderr, options.show_error_codes) def flush_errors(new_messages: List[str], serious: bool) -> None: if options.pretty: new_messages = formatter.fit_in_terminal(new_messages) messages.extend(new_messages) f = stderr if serious else stdout try: for msg in new_messages: if options.color_output: msg = formatter.colorize(msg) f.write(msg + '\n') f.flush() except BrokenPipeError: sys.exit(2) serious = False blockers = False res = None try: # Keep a dummy reference (res) for memory profiling below, as otherwise # the result could be freed. res = build.build(sources, options, None, flush_errors, fscache, stdout, stderr) except CompileError as e: blockers = True if not e.use_stdout: serious = True if options.warn_unused_configs and options.unused_configs and not options.incremental: print("Warning: unused section(s) in %s: %s" % (options.config_file, ", ".join("[mypy-%s]" % glob for glob in options.per_module_options.keys() if glob in options.unused_configs)), file=stderr) if options.junit_xml: t1 = time.time() py_version = '{}_{}'.format(options.python_version[0], options.python_version[1]) util.write_junit_xml(t1 - t0, serious, messages, options.junit_xml, py_version, options.platform) if MEM_PROFILE: from mypy.memprofile import print_memory_profile print_memory_profile() del res # Now it's safe to delete code = 0 if messages: code = 2 if blockers else 1 if options.error_summary: if messages: n_errors, n_files = util.count_stats(messages) if n_errors: stdout.write(formatter.format_error(n_errors, n_files, len(sources), options.color_output) + '\n') else: stdout.write(formatter.format_success(len(sources), options.color_output) + '\n') stdout.flush() if options.fast_exit: # Exit without freeing objects -- it's faster. # # NOTE: We don't flush all open files on exit (or run other destructors)! util.hard_exit(code) elif code: sys.exit(code) # Make the help output a little less jarring. class AugmentedHelpFormatter(argparse.RawDescriptionHelpFormatter): def __init__(self, prog: str) -> None: super().__init__(prog=prog, max_help_position=28) # FIXME: typeshed incorrectly has the type of indent as int when # it should be str. Make it Any to avoid rusing mypyc. def _fill_text(self, text: str, width: int, indent: Any) -> str: if '\n' in text: # Assume we want to manually format the text return super()._fill_text(text, width, indent) else: # Assume we want argparse to manage wrapping, indentating, and # formatting the text for us. return argparse.HelpFormatter._fill_text(self, text, width, indent) # Define pairs of flag prefixes with inverse meaning. flag_prefix_pairs = [ ('allow', 'disallow'), ('show', 'hide'), ] # type: Final flag_prefix_map = {} # type: Final[Dict[str, str]] for a, b in flag_prefix_pairs: flag_prefix_map[a] = b flag_prefix_map[b] = a def invert_flag_name(flag: str) -> str: split = flag[2:].split('-', 1) if len(split) == 2: prefix, rest = split if prefix in flag_prefix_map: return '--{}-{}'.format(flag_prefix_map[prefix], rest) elif prefix == 'no': return '--{}'.format(rest) return '--no-{}'.format(flag[2:]) class PythonExecutableInferenceError(Exception): """Represents a failure to infer the version or executable while searching.""" def python_executable_prefix(v: str) -> List[str]: if sys.platform == 'win32': # on Windows, all Python executables are named `python`. To handle this, there # is the `py` launcher, which can be passed a version e.g. `py -3.5`, and it will # execute an installed Python 3.5 interpreter. See also: # https://docs.python.org/3/using/windows.html#python-launcher-for-windows return ['py', '-{}'.format(v)] else: return ['python{}'.format(v)] def _python_executable_from_version(python_version: Tuple[int, int]) -> str: if sys.version_info[:2] == python_version: return sys.executable str_ver = '.'.join(map(str, python_version)) try: sys_exe = subprocess.check_output(python_executable_prefix(str_ver) + ['-c', 'import sys; print(sys.executable)'], stderr=subprocess.STDOUT).decode().strip() return sys_exe except (subprocess.CalledProcessError, FileNotFoundError): raise PythonExecutableInferenceError( 'failed to find a Python executable matching version {},' ' perhaps try --python-executable, or --no-site-packages?'.format(python_version)) def infer_python_executable(options: Options, special_opts: argparse.Namespace) -> None: """Infer the Python executable from the given version. This function mutates options based on special_opts to infer the correct Python executable to use. """ # TODO: (ethanhs) Look at folding these checks and the site packages subprocess calls into # one subprocess call for speed. # Use the command line specified executable, or fall back to one set in the # config file. If an executable is not specified, infer it from the version # (unless no_executable is set) python_executable = special_opts.python_executable or options.python_executable if python_executable is None: if not special_opts.no_executable: python_executable = _python_executable_from_version(options.python_version) options.python_executable = python_executable HEADER = """%(prog)s [-h] [-v] [-V] [more options; see below] [-m MODULE] [-p PACKAGE] [-c PROGRAM_TEXT] [files ...]""" # type: Final DESCRIPTION = """ Mypy is a program that will type check your Python code. Pass in any files or folders you want to type check. Mypy will recursively traverse any provided folders to find .py files: $ mypy my_program.py my_src_folder For more information on getting started, see: - http://mypy.readthedocs.io/en/latest/getting_started.html For more details on both running mypy and using the flags below, see: - http://mypy.readthedocs.io/en/latest/running_mypy.html - http://mypy.readthedocs.io/en/latest/command_line.html You can also use a config file to configure mypy instead of using command line flags. For more details, see: - http://mypy.readthedocs.io/en/latest/config_file.html """ # type: Final FOOTER = """Environment variables: Define MYPYPATH for additional module search path entries. Define MYPY_CACHE_DIR to override configuration cache_dir path.""" # type: Final class CapturableArgumentParser(argparse.ArgumentParser): """Override ArgumentParser methods that use sys.stdout/sys.stderr directly. This is needed because hijacking sys.std* is not thread-safe, yet output must be captured to properly support mypy.api.run. """ def __init__(self, *args: Any, **kwargs: Any): self.stdout = kwargs.pop('stdout', sys.stdout) self.stderr = kwargs.pop('stderr', sys.stderr) super().__init__(*args, **kwargs) # ===================== # Help-printing methods # ===================== def print_usage(self, file: Optional[IO[str]] = None) -> None: if file is None: file = self.stdout self._print_message(self.format_usage(), file) def print_help(self, file: Optional[IO[str]] = None) -> None: if file is None: file = self.stdout self._print_message(self.format_help(), file) def _print_message(self, message: str, file: Optional[IO[str]] = None) -> None: if message: if file is None: file = self.stderr file.write(message) # =============== # Exiting methods # =============== def exit(self, status: int = 0, message: Optional[str] = None) -> NoReturn: if message: self._print_message(message, self.stderr) sys.exit(status) def error(self, message: str) -> NoReturn: """error(message: string) Prints a usage message incorporating the message to stderr and exits. If you override this in a subclass, it should not return -- it should either exit or raise an exception. """ self.print_usage(self.stderr) args = {'prog': self.prog, 'message': message} self.exit(2, gettext('%(prog)s: error: %(message)s\n') % args) class CapturableVersionAction(argparse.Action): """Supplement CapturableArgumentParser to handle --version. This is nearly identical to argparse._VersionAction except, like CapturableArgumentParser, it allows output to be captured. Another notable difference is that version is mandatory. This allows removing a line in __call__ that falls back to parser.version (which does not appear to exist). """ def __init__(self, option_strings: Sequence[str], version: str, dest: str = argparse.SUPPRESS, default: str = argparse.SUPPRESS, help: str = "show program's version number and exit", stdout: Optional[IO[str]] = None): super().__init__( option_strings=option_strings, dest=dest, default=default, nargs=0, help=help) self.version = version self.stdout = stdout or sys.stdout def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace, values: Union[str, Sequence[Any], None], option_string: Optional[str] = None) -> NoReturn: formatter = parser._get_formatter() formatter.add_text(self.version) parser._print_message(formatter.format_help(), self.stdout) parser.exit() def process_options(args: List[str], stdout: Optional[TextIO] = None, stderr: Optional[TextIO] = None, require_targets: bool = True, server_options: bool = False, fscache: Optional[FileSystemCache] = None, program: str = 'mypy', header: str = HEADER, ) -> Tuple[List[BuildSource], Options]: """Parse command line arguments. If a FileSystemCache is passed in, and package_root options are given, call fscache.set_package_root() to set the cache's package root. """ stdout = stdout or sys.stdout stderr = stderr or sys.stderr parser = CapturableArgumentParser(prog=program, usage=header, description=DESCRIPTION, epilog=FOOTER, fromfile_prefix_chars='@', formatter_class=AugmentedHelpFormatter, add_help=False, stdout=stdout, stderr=stderr) strict_flag_names = [] # type: List[str] strict_flag_assignments = [] # type: List[Tuple[str, bool]] def add_invertible_flag(flag: str, *, inverse: Optional[str] = None, default: bool, dest: Optional[str] = None, help: str, strict_flag: bool = False, group: Optional[argparse._ActionsContainer] = None ) -> None: if inverse is None: inverse = invert_flag_name(flag) if group is None: group = parser if help is not argparse.SUPPRESS: help += " (inverse: {})".format(inverse) arg = group.add_argument(flag, action='store_false' if default else 'store_true', dest=dest, help=help) dest = arg.dest arg = group.add_argument(inverse, action='store_true' if default else 'store_false', dest=dest, help=argparse.SUPPRESS) if strict_flag: assert dest is not None strict_flag_names.append(flag) strict_flag_assignments.append((dest, not default)) # Unless otherwise specified, arguments will be parsed directly onto an # Options object. Options that require further processing should have # their `dest` prefixed with `special-opts:`, which will cause them to be # parsed into the separate special_opts namespace object. # Note: we have a style guide for formatting the mypy --help text. See # https://github.com/python/mypy/wiki/Documentation-Conventions general_group = parser.add_argument_group( title='Optional arguments') general_group.add_argument( '-h', '--help', action='help', help="Show this help message and exit") general_group.add_argument( '-v', '--verbose', action='count', dest='verbosity', help="More verbose messages") general_group.add_argument( '-V', '--version', action=CapturableVersionAction, version='%(prog)s ' + __version__, help="Show program's version number and exit", stdout=stdout) config_group = parser.add_argument_group( title='Config file', description="Use a config file instead of command line arguments. " "This is useful if you are using many flags or want " "to set different options per each module.") config_group.add_argument( '--config-file', help="Configuration file, must have a [mypy] section " "(defaults to {})".format(', '.join(defaults.CONFIG_FILES))) add_invertible_flag('--warn-unused-configs', default=False, strict_flag=True, help="Warn about unused '[mypy-]' config sections", group=config_group) imports_group = parser.add_argument_group( title='Import discovery', description="Configure how imports are discovered and followed.") add_invertible_flag( '--namespace-packages', default=False, help="Support namespace packages (PEP 420, __init__.py-less)", group=imports_group) imports_group.add_argument( '--ignore-missing-imports', action='store_true', help="Silently ignore imports of missing modules") imports_group.add_argument( '--follow-imports', choices=['normal', 'silent', 'skip', 'error'], default='normal', help="How to treat imports (default normal)") imports_group.add_argument( '--python-executable', action='store', metavar='EXECUTABLE', help="Python executable used for finding PEP 561 compliant installed" " packages and stubs", dest='special-opts:python_executable') imports_group.add_argument( '--no-site-packages', action='store_true', dest='special-opts:no_executable', help="Do not search for installed PEP 561 compliant packages") imports_group.add_argument( '--no-silence-site-packages', action='store_true', help="Do not silence errors in PEP 561 compliant installed packages") platform_group = parser.add_argument_group( title='Platform configuration', description="Type check code assuming it will be run under certain " "runtime conditions. By default, mypy assumes your code " "will be run using the same operating system and Python " "version you are using to run mypy itself.") platform_group.add_argument( '--python-version', type=parse_version, metavar='x.y', help='Type check code assuming it will be running on Python x.y', dest='special-opts:python_version') platform_group.add_argument( '-2', '--py2', dest='special-opts:python_version', action='store_const', const=defaults.PYTHON2_VERSION, help="Use Python 2 mode (same as --python-version 2.7)") platform_group.add_argument( '--platform', action='store', metavar='PLATFORM', help="Type check special-cased code for the given OS platform " "(defaults to sys.platform)") platform_group.add_argument( '--always-true', metavar='NAME', action='append', default=[], help="Additional variable to be considered True (may be repeated)") platform_group.add_argument( '--always-false', metavar='NAME', action='append', default=[], help="Additional variable to be considered False (may be repeated)") disallow_any_group = parser.add_argument_group( title='Disallow dynamic typing', description="Disallow the use of the dynamic 'Any' type under certain conditions.") disallow_any_group.add_argument( '--disallow-any-unimported', default=False, action='store_true', help="Disallow Any types resulting from unfollowed imports") disallow_any_group.add_argument( '--disallow-any-expr', default=False, action='store_true', help='Disallow all expressions that have type Any') disallow_any_group.add_argument( '--disallow-any-decorated', default=False, action='store_true', help='Disallow functions that have Any in their signature ' 'after decorator transformation') disallow_any_group.add_argument( '--disallow-any-explicit', default=False, action='store_true', help='Disallow explicit Any in type positions') add_invertible_flag('--disallow-any-generics', default=False, strict_flag=True, help='Disallow usage of generic types that do not specify explicit type ' 'parameters', group=disallow_any_group) add_invertible_flag('--disallow-subclassing-any', default=False, strict_flag=True, help="Disallow subclassing values of type 'Any' when defining classes", group=disallow_any_group) untyped_group = parser.add_argument_group( title='Untyped definitions and calls', description="Configure how untyped definitions and calls are handled. " "Note: by default, mypy ignores any untyped function definitions " "and assumes any calls to such functions have a return " "type of 'Any'.") add_invertible_flag('--disallow-untyped-calls', default=False, strict_flag=True, help="Disallow calling functions without type annotations" " from functions with type annotations", group=untyped_group) add_invertible_flag('--disallow-untyped-defs', default=False, strict_flag=True, help="Disallow defining functions without type annotations" " or with incomplete type annotations", group=untyped_group) add_invertible_flag('--disallow-incomplete-defs', default=False, strict_flag=True, help="Disallow defining functions with incomplete type annotations", group=untyped_group) add_invertible_flag('--check-untyped-defs', default=False, strict_flag=True, help="Type check the interior of functions without type annotations", group=untyped_group) add_invertible_flag('--disallow-untyped-decorators', default=False, strict_flag=True, help="Disallow decorating typed functions with untyped decorators", group=untyped_group) none_group = parser.add_argument_group( title='None and Optional handling', description="Adjust how values of type 'None' are handled. For more context on " "how mypy handles values of type 'None', see: " "http://mypy.readthedocs.io/en/latest/kinds_of_types.html#no-strict-optional") add_invertible_flag('--no-implicit-optional', default=False, strict_flag=True, help="Don't assume arguments with default values of None are Optional", group=none_group) none_group.add_argument( '--strict-optional', action='store_true', help=argparse.SUPPRESS) none_group.add_argument( '--no-strict-optional', action='store_false', dest='strict_optional', help="Disable strict Optional checks (inverse: --strict-optional)") none_group.add_argument( '--strict-optional-whitelist', metavar='GLOB', nargs='*', help=argparse.SUPPRESS) lint_group = parser.add_argument_group( title='Configuring warnings', description="Detect code that is sound but redundant or problematic.") add_invertible_flag('--warn-redundant-casts', default=False, strict_flag=True, help="Warn about casting an expression to its inferred type", group=lint_group) add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True, help="Warn about unneeded '# type: ignore' comments", group=lint_group) add_invertible_flag('--no-warn-no-return', dest='warn_no_return', default=True, help="Do not warn about functions that end without returning", group=lint_group) add_invertible_flag('--warn-return-any', default=False, strict_flag=True, help="Warn about returning values of type Any" " from non-Any typed functions", group=lint_group) add_invertible_flag('--warn-unreachable', default=False, strict_flag=False, help="Warn about statements or expressions inferred to be" " unreachable or redundant", group=lint_group) # Note: this group is intentionally added here even though we don't add # --strict to this group near the end. # # That way, this group will appear after the various strictness groups # but before the remaining flags. # We add `--strict` near the end so we don't accidentally miss any strictness # flags that are added after this group. strictness_group = parser.add_argument_group( title='Miscellaneous strictness flags') add_invertible_flag('--allow-untyped-globals', default=False, strict_flag=False, help="Suppress toplevel errors caused by missing annotations", group=strictness_group) add_invertible_flag('--allow-redefinition', default=False, strict_flag=False, help="Allow unconditional variable redefinition with a new type", group=strictness_group) add_invertible_flag('--no-implicit-reexport', default=True, strict_flag=True, dest='implicit_reexport', help="Treat imports as private unless aliased", group=strictness_group) add_invertible_flag('--strict-equality', default=False, strict_flag=False, help="Prohibit equality, identity, and container checks for" " non-overlapping types", group=strictness_group) strict_help = "Strict mode; enables the following flags: {}".format( ", ".join(strict_flag_names)) strictness_group.add_argument( '--strict', action='store_true', dest='special-opts:strict', help=strict_help) error_group = parser.add_argument_group( title='Configuring error messages', description="Adjust the amount of detail shown in error messages.") add_invertible_flag('--show-error-context', default=False, dest='show_error_context', help='Precede errors with "note:" messages explaining context', group=error_group) add_invertible_flag('--show-column-numbers', default=False, help="Show column numbers in error messages", group=error_group) add_invertible_flag('--show-error-codes', default=False, help="Show error codes in error messages", group=error_group) add_invertible_flag('--pretty', default=False, help="Use visually nicer output in error messages:" " Use soft word wrap, show source code snippets," " and show error location markers", group=error_group) add_invertible_flag('--no-color-output', dest='color_output', default=True, help="Do not colorize error messages", group=error_group) add_invertible_flag('--no-error-summary', dest='error_summary', default=True, help="Do not show error stats summary", group=error_group) add_invertible_flag('--show-absolute-path', default=False, help="Show absolute paths to files", group=error_group) incremental_group = parser.add_argument_group( title='Incremental mode', description="Adjust how mypy incrementally type checks and caches modules. " "Mypy caches type information about modules into a cache to " "let you speed up future invocations of mypy. Also see " "mypy's daemon mode: " "mypy.readthedocs.io/en/latest/mypy_daemon.html#mypy-daemon") incremental_group.add_argument( '-i', '--incremental', action='store_true', help=argparse.SUPPRESS) incremental_group.add_argument( '--no-incremental', action='store_false', dest='incremental', help="Disable module cache (inverse: --incremental)") incremental_group.add_argument( '--cache-dir', action='store', metavar='DIR', help="Store module cache info in the given folder in incremental mode " "(defaults to '{}')".format(defaults.CACHE_DIR)) add_invertible_flag('--sqlite-cache', default=False, help="Use a sqlite database to store the cache", group=incremental_group) incremental_group.add_argument( '--cache-fine-grained', action='store_true', help="Include fine-grained dependency information in the cache for the mypy daemon") incremental_group.add_argument( '--skip-version-check', action='store_true', help="Allow using cache written by older mypy version") incremental_group.add_argument( '--skip-cache-mtime-checks', action='store_true', help="Skip cache internal consistency checks based on mtime") internals_group = parser.add_argument_group( title='Advanced options', description="Debug and customize mypy internals.") internals_group.add_argument( '--pdb', action='store_true', help="Invoke pdb on fatal error") internals_group.add_argument( '--show-traceback', '--tb', action='store_true', help="Show traceback on fatal error") internals_group.add_argument( '--raise-exceptions', action='store_true', help="Raise exception on fatal error" ) internals_group.add_argument( '--custom-typing-module', metavar='MODULE', dest='custom_typing_module', help="Use a custom typing module") internals_group.add_argument( '--custom-typeshed-dir', metavar='DIR', help="Use the custom typeshed in DIR") add_invertible_flag('--warn-incomplete-stub', default=False, help="Warn if missing type annotation in typeshed, only relevant with" " --disallow-untyped-defs or --disallow-incomplete-defs enabled", group=internals_group) internals_group.add_argument( '--shadow-file', nargs=2, metavar=('SOURCE_FILE', 'SHADOW_FILE'), dest='shadow_file', action='append', help="When encountering SOURCE_FILE, read and type check " "the contents of SHADOW_FILE instead.") add_invertible_flag('--fast-exit', default=False, help=argparse.SUPPRESS, group=internals_group) report_group = parser.add_argument_group( title='Report generation', description='Generate a report in the specified format.') for report_type in sorted(defaults.REPORTER_NAMES): if report_type not in {'memory-xml'}: report_group.add_argument('--%s-report' % report_type.replace('_', '-'), metavar='DIR', dest='special-opts:%s_report' % report_type) other_group = parser.add_argument_group( title='Miscellaneous') other_group.add_argument( '--quickstart-file', help=argparse.SUPPRESS) other_group.add_argument( '--junit-xml', help="Write junit.xml to the given file") other_group.add_argument( '--find-occurrences', metavar='CLASS.MEMBER', dest='special-opts:find_occurrences', help="Print out all usages of a class member (experimental)") other_group.add_argument( '--scripts-are-modules', action='store_true', help="Script x becomes module x instead of __main__") if server_options: # TODO: This flag is superfluous; remove after a short transition (2018-03-16) other_group.add_argument( '--experimental', action='store_true', dest='fine_grained_incremental', help="Enable fine-grained incremental mode") other_group.add_argument( '--use-fine-grained-cache', action='store_true', help="Use the cache in fine-grained incremental mode") # hidden options parser.add_argument( '--stats', action='store_true', dest='dump_type_stats', help=argparse.SUPPRESS) parser.add_argument( '--inferstats', action='store_true', dest='dump_inference_stats', help=argparse.SUPPRESS) parser.add_argument( '--dump-build-stats', action='store_true', help=argparse.SUPPRESS) # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS) # --dump-deps will dump all fine-grained dependencies to stdout parser.add_argument('--dump-deps', action='store_true', help=argparse.SUPPRESS) # --dump-graph will dump the contents of the graph of SCCs and exit. parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS) # --semantic-analysis-only does exactly that. parser.add_argument('--semantic-analysis-only', action='store_true', help=argparse.SUPPRESS) # --local-partial-types disallows partial types spanning module top level and a function # (implicitly defined in fine-grained incremental mode) parser.add_argument('--local-partial-types', action='store_true', help=argparse.SUPPRESS) # --logical-deps adds some more dependencies that are not semantically needed, but # may be helpful to determine relative importance of classes and functions for overall # type precision in a code base. It also _removes_ some deps, so this flag should be never # used except for generating code stats. This also automatically enables --cache-fine-grained. # NOTE: This is an experimental option that may be modified or removed at any time. parser.add_argument('--logical-deps', action='store_true', help=argparse.SUPPRESS) # --bazel changes some behaviors for use with Bazel (https://bazel.build). parser.add_argument('--bazel', action='store_true', help=argparse.SUPPRESS) # --package-root adds a directory below which directories are considered # packages even without __init__.py. May be repeated. parser.add_argument('--package-root', metavar='ROOT', action='append', default=[], help=argparse.SUPPRESS) # --cache-map FILE ... gives a mapping from source files to cache files. # Each triple of arguments is a source file, a cache meta file, and a cache data file. # Modules not mentioned in the file will go through cache_dir. # Must be followed by another flag or by '--' (and then only file args may follow). parser.add_argument('--cache-map', nargs='+', dest='special-opts:cache_map', help=argparse.SUPPRESS) # options specifying code to check code_group = parser.add_argument_group( title="Running code", description="Specify the code you want to type check. For more details, see " "mypy.readthedocs.io/en/latest/running_mypy.html#running-mypy") code_group.add_argument( '-m', '--module', action='append', metavar='MODULE', default=[], dest='special-opts:modules', help="Type-check module; can repeat for more modules") code_group.add_argument( '-p', '--package', action='append', metavar='PACKAGE', default=[], dest='special-opts:packages', help="Type-check package recursively; can be repeated") code_group.add_argument( '-c', '--command', action='append', metavar='PROGRAM_TEXT', dest='special-opts:command', help="Type-check program passed in as string") code_group.add_argument( metavar='files', nargs='*', dest='special-opts:files', help="Type-check given files or directories") # Parse arguments once into a dummy namespace so we can get the # filename for the config file and know if the user requested all strict options. dummy = argparse.Namespace() parser.parse_args(args, dummy) config_file = dummy.config_file # Don't explicitly test if "config_file is not None" for this check. # This lets `--config-file=` (an empty string) be used to disable all config files. if config_file and not os.path.exists(config_file): parser.error("Cannot find config file '%s'" % config_file) # Parse config file first, so command line can override. options = Options() parse_config_file(options, config_file, stdout, stderr) # Set strict flags before parsing (if strict mode enabled), so other command # line options can override. if getattr(dummy, 'special-opts:strict'): # noqa for dest, value in strict_flag_assignments: setattr(options, dest, value) # Override cache_dir if provided in the environment environ_cache_dir = os.getenv('MYPY_CACHE_DIR', '') if environ_cache_dir.strip(): options.cache_dir = environ_cache_dir # Parse command line for real, using a split namespace. special_opts = argparse.Namespace() parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:')) # The python_version is either the default, which can be overridden via a config file, # or stored in special_opts and is passed via the command line. options.python_version = special_opts.python_version or options.python_version try: infer_python_executable(options, special_opts) except PythonExecutableInferenceError as e: parser.error(str(e)) if special_opts.no_executable: options.python_executable = None # Paths listed in the config file will be ignored if any paths are passed on # the command line. if options.files and not special_opts.files: special_opts.files = options.files # Check for invalid argument combinations. if require_targets: code_methods = sum(bool(c) for c in [special_opts.modules + special_opts.packages, special_opts.command, special_opts.files]) if code_methods == 0: parser.error("Missing target module, package, files, or command.") elif code_methods > 1: parser.error("May only specify one of: module/package, files, or command.") # Check for overlapping `--always-true` and `--always-false` flags. overlap = set(options.always_true) & set(options.always_false) if overlap: parser.error("You can't make a variable always true and always false (%s)" % ', '.join(sorted(overlap))) # Set build flags. if options.strict_optional_whitelist is not None: # TODO: Deprecate, then kill this flag options.strict_optional = True if special_opts.find_occurrences: state.find_occurrences = special_opts.find_occurrences.split('.') assert state.find_occurrences is not None if len(state.find_occurrences) < 2: parser.error("Can only find occurrences of class members.") if len(state.find_occurrences) != 2: parser.error("Can only find occurrences of non-nested class members.") # Set reports. for flag, val in vars(special_opts).items(): if flag.endswith('_report') and val is not None: report_type = flag[:-7].replace('_', '-') report_dir = val options.report_dirs[report_type] = report_dir # Process --package-root. if options.package_root: process_package_roots(fscache, parser, options) # Process --cache-map. if special_opts.cache_map: if options.sqlite_cache: parser.error("--cache-map is incompatible with --sqlite-cache") process_cache_map(parser, special_opts, options) # Let logical_deps imply cache_fine_grained (otherwise the former is useless). if options.logical_deps: options.cache_fine_grained = True # Set target. if special_opts.modules + special_opts.packages: options.build_type = BuildType.MODULE search_paths = SearchPaths((os.getcwd(),), tuple(mypy_path() + options.mypy_path), (), ()) targets = [] # TODO: use the same cache that the BuildManager will cache = FindModuleCache(search_paths, fscache, options, special_opts.packages) for p in special_opts.packages: if os.sep in p or os.altsep and os.altsep in p: fail("Package name '{}' cannot have a slash in it.".format(p), stderr) p_targets = cache.find_modules_recursive(p) if not p_targets: fail("Can't find package '{}'".format(p), stderr) targets.extend(p_targets) for m in special_opts.modules: targets.append(BuildSource(None, m, None)) return targets, options elif special_opts.command: options.build_type = BuildType.PROGRAM_TEXT targets = [BuildSource(None, None, '\n'.join(special_opts.command))] return targets, options else: try: targets = create_source_list(special_opts.files, options, fscache) # Variable named e2 instead of e to work around mypyc bug #620 # which causes issues when using the same variable to catch # exceptions of different types. except InvalidSourceList as e2: fail(str(e2), stderr) return targets, options def process_package_roots(fscache: Optional[FileSystemCache], parser: argparse.ArgumentParser, options: Options) -> None: """Validate and normalize package_root.""" if fscache is None: parser.error("--package-root does not work here (no fscache)") assert fscache is not None # Since mypy doesn't know parser.error() raises. # Do some stuff with drive letters to make Windows happy (esp. tests). current_drive, _ = os.path.splitdrive(os.getcwd()) dot = os.curdir dotslash = os.curdir + os.sep dotdotslash = os.pardir + os.sep trivial_paths = {dot, dotslash} package_root = [] for root in options.package_root: if os.path.isabs(root): parser.error("Package root cannot be absolute: %r" % root) drive, root = os.path.splitdrive(root) if drive and drive != current_drive: parser.error("Package root must be on current drive: %r" % (drive + root)) # Empty package root is always okay. if root: root = os.path.relpath(root) # Normalize the heck out of it. if root.startswith(dotdotslash): parser.error("Package root cannot be above current directory: %r" % root) if root in trivial_paths: root = '' elif not root.endswith(os.sep): root = root + os.sep package_root.append(root) options.package_root = package_root # Pass the package root on the the filesystem cache. fscache.set_package_root(package_root) def process_cache_map(parser: argparse.ArgumentParser, special_opts: argparse.Namespace, options: Options) -> None: """Validate cache_map and copy into options.cache_map.""" n = len(special_opts.cache_map) if n % 3 != 0: parser.error("--cache-map requires one or more triples (see source)") for i in range(0, n, 3): source, meta_file, data_file = special_opts.cache_map[i:i + 3] if source in options.cache_map: parser.error("Duplicate --cache-map source %s)" % source) if not source.endswith('.py') and not source.endswith('.pyi'): parser.error("Invalid --cache-map source %s (triple[0] must be *.py[i])" % source) if not meta_file.endswith('.meta.json'): parser.error("Invalid --cache-map meta_file %s (triple[1] must be *.meta.json)" % meta_file) if not data_file.endswith('.data.json'): parser.error("Invalid --cache-map data_file %s (triple[2] must be *.data.json)" % data_file) options.cache_map[source] = (meta_file, data_file) def fail(msg: str, stderr: TextIO) -> None: stderr.write('%s\n' % msg) sys.exit(2) mypy-0.761/mypy/maptype.py0000644€tŠÔÚ€2›s®0000000773313576752246021761 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, List from mypy.expandtype import expand_type from mypy.nodes import TypeInfo from mypy.types import Type, TypeVarId, Instance, AnyType, TypeOfAny, ProperType def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Instance: """Produce a supertype of `instance` that is an Instance of `superclass`, mapping type arguments up the chain of bases. If `superclass` is not a nominal superclass of `instance.type`, then all type arguments are mapped to 'Any'. """ if instance.type == superclass: # Fast path: `instance` already belongs to `superclass`. return instance if not superclass.type_vars: # Fast path: `superclass` has no type variables to map to. return Instance(superclass, []) return map_instance_to_supertypes(instance, superclass)[0] def map_instance_to_supertypes(instance: Instance, supertype: TypeInfo) -> List[Instance]: # FIX: Currently we should only have one supertype per interface, so no # need to return an array result = [] # type: List[Instance] for path in class_derivation_paths(instance.type, supertype): types = [instance] for sup in path: a = [] # type: List[Instance] for t in types: a.extend(map_instance_to_direct_supertypes(t, sup)) types = a result.extend(types) if result: return result else: # Nothing. Presumably due to an error. Construct a dummy using Any. any_type = AnyType(TypeOfAny.from_error) return [Instance(supertype, [any_type] * len(supertype.type_vars))] def class_derivation_paths(typ: TypeInfo, supertype: TypeInfo) -> List[List[TypeInfo]]: """Return an array of non-empty paths of direct base classes from type to supertype. Return [] if no such path could be found. InterfaceImplementationPaths(A, B) == [[B]] if A inherits B InterfaceImplementationPaths(A, C) == [[B, C]] if A inherits B and B inherits C """ # FIX: Currently we might only ever have a single path, so this could be # simplified result = [] # type: List[List[TypeInfo]] for base in typ.bases: btype = base.type if btype == supertype: result.append([btype]) else: # Try constructing a longer path via the base class. for path in class_derivation_paths(btype, supertype): result.append([btype] + path) return result def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) -> List[Instance]: # FIX: There should only be one supertypes, always. typ = instance.type result = [] # type: List[Instance] for b in typ.bases: if b.type == supertype: env = instance_to_type_environment(instance) t = expand_type(b, env) assert isinstance(t, ProperType) assert isinstance(t, Instance) result.append(t) if result: return result else: # Relationship with the supertype not specified explicitly. Use dynamic # type arguments implicitly. any_type = AnyType(TypeOfAny.unannotated) return [Instance(supertype, [any_type] * len(supertype.type_vars))] def instance_to_type_environment(instance: Instance) -> Dict[TypeVarId, Type]: """Given an Instance, produce the resulting type environment for type variables bound by the Instance's class definition. An Instance is a type application of a class (a TypeInfo) to its required number of type arguments. So this environment consists of the class's type variables mapped to the Instance's actual arguments. The type variables are mapped by their `id`. """ return {binder.id: arg for binder, arg in zip(instance.type.defn.type_vars, instance.args)} mypy-0.761/mypy/meet.py0000644€tŠÔÚ€2›s®0000010224113576752246021222 0ustar jukkaDROPBOX\Domain Users00000000000000from collections import OrderedDict from typing import List, Optional, Tuple, Callable from mypy.join import ( is_similar_callables, combine_similar_callables, join_type_list, unpack_callback_protocol ) from mypy.types import ( Type, AnyType, TypeVisitor, UnboundType, NoneType, TypeVarType, Instance, CallableType, TupleType, TypedDictType, ErasedType, UnionType, PartialType, DeletedType, UninhabitedType, TypeType, TypeOfAny, Overloaded, FunctionLike, LiteralType, ProperType, get_proper_type, get_proper_types, TypeAliasType ) from mypy.subtypes import is_equivalent, is_subtype, is_callable_compatible, is_proper_subtype from mypy.erasetype import erase_type from mypy.maptype import map_instance_to_supertype from mypy.typeops import tuple_fallback, make_simplified_union, is_recursive_pair from mypy import state # TODO Describe this module. def trivial_meet(s: Type, t: Type) -> ProperType: """Return one of types (expanded) if it is a subtype of other, otherwise bottom type.""" if is_subtype(s, t): return get_proper_type(s) elif is_subtype(t, s): return get_proper_type(t) else: if state.strict_optional: return UninhabitedType() else: return NoneType() def meet_types(s: Type, t: Type) -> ProperType: """Return the greatest lower bound of two types.""" if is_recursive_pair(s, t): # This case can trigger an infinite recursion, general support for this will be # tricky so we use a trivial meet (like for protocols). return trivial_meet(s, t) s = get_proper_type(s) t = get_proper_type(t) if isinstance(s, ErasedType): return s if isinstance(s, AnyType): return t if isinstance(s, UnionType) and not isinstance(t, UnionType): s, t = t, s return t.accept(TypeMeetVisitor(s)) def narrow_declared_type(declared: Type, narrowed: Type) -> Type: """Return the declared type narrowed down to another type.""" # TODO: check infinite recursion for aliases here. declared = get_proper_type(declared) narrowed = get_proper_type(narrowed) if declared == narrowed: return declared if isinstance(declared, UnionType): return make_simplified_union([narrow_declared_type(x, narrowed) for x in declared.relevant_items()]) elif not is_overlapping_types(declared, narrowed, prohibit_none_typevar_overlap=True): if state.strict_optional: return UninhabitedType() else: return NoneType() elif isinstance(narrowed, UnionType): return make_simplified_union([narrow_declared_type(declared, x) for x in narrowed.relevant_items()]) elif isinstance(narrowed, AnyType): return narrowed elif isinstance(declared, TypeType) and isinstance(narrowed, TypeType): return TypeType.make_normalized(narrow_declared_type(declared.item, narrowed.item)) elif isinstance(declared, (Instance, TupleType, TypeType, LiteralType)): return meet_types(declared, narrowed) elif isinstance(declared, TypedDictType) and isinstance(narrowed, Instance): # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). if (narrowed.type.fullname == 'builtins.dict' and all(isinstance(t, AnyType) for t in get_proper_types(narrowed.args))): return declared return meet_types(declared, narrowed) return narrowed def get_possible_variants(typ: Type) -> List[Type]: """This function takes any "Union-like" type and returns a list of the available "options". Specifically, there are currently exactly three different types that can have "variants" or are "union-like": - Unions - TypeVars with value restrictions - Overloads This function will return a list of each "option" present in those types. If this function receives any other type, we return a list containing just that original type. (E.g. pretend the type was contained within a singleton union). The only exception is regular TypeVars: we return a list containing that TypeVar's upper bound. This function is useful primarily when checking to see if two types are overlapping: the algorithm to check if two unions are overlapping is fundamentally the same as the algorithm for checking if two overloads are overlapping. Normalizing both kinds of types in the same way lets us reuse the same algorithm for both. """ typ = get_proper_type(typ) if isinstance(typ, TypeVarType): if len(typ.values) > 0: return typ.values else: return [typ.upper_bound] elif isinstance(typ, UnionType): return list(typ.items) elif isinstance(typ, Overloaded): # Note: doing 'return typ.items()' makes mypy # infer a too-specific return type of List[CallableType] return list(typ.items()) else: return [typ] def is_overlapping_types(left: Type, right: Type, ignore_promotions: bool = False, prohibit_none_typevar_overlap: bool = False) -> bool: """Can a value of type 'left' also be of type 'right' or vice-versa? If 'ignore_promotions' is True, we ignore promotions while checking for overlaps. If 'prohibit_none_typevar_overlap' is True, we disallow None from overlapping with TypeVars (in both strict-optional and non-strict-optional mode). """ left, right = get_proper_types((left, right)) def _is_overlapping_types(left: Type, right: Type) -> bool: '''Encode the kind of overlapping check to perform. This function mostly exists so we don't have to repeat keyword arguments everywhere.''' return is_overlapping_types( left, right, ignore_promotions=ignore_promotions, prohibit_none_typevar_overlap=prohibit_none_typevar_overlap) # We should never encounter this type. if isinstance(left, PartialType) or isinstance(right, PartialType): assert False, "Unexpectedly encountered partial type" # We should also never encounter these types, but it's possible a few # have snuck through due to unrelated bugs. For now, we handle these # in the same way we handle 'Any'. # # TODO: Replace these with an 'assert False' once we are more confident. illegal_types = (UnboundType, ErasedType, DeletedType) if isinstance(left, illegal_types) or isinstance(right, illegal_types): return True # 'Any' may or may not be overlapping with the other type if isinstance(left, AnyType) or isinstance(right, AnyType): return True # When running under non-strict optional mode, simplify away types of # the form 'Union[A, B, C, None]' into just 'Union[A, B, C]'. if not state.strict_optional: if isinstance(left, UnionType): left = UnionType.make_union(left.relevant_items()) if isinstance(right, UnionType): right = UnionType.make_union(right.relevant_items()) left, right = get_proper_types((left, right)) # We check for complete overlaps next as a general-purpose failsafe. # If this check fails, we start checking to see if there exists a # *partial* overlap between types. # # These checks will also handle the NoneType and UninhabitedType cases for us. if (is_proper_subtype(left, right, ignore_promotions=ignore_promotions) or is_proper_subtype(right, left, ignore_promotions=ignore_promotions)): return True # See the docstring for 'get_possible_variants' for more info on what the # following lines are doing. left_possible = get_possible_variants(left) right_possible = get_possible_variants(right) # We start by checking multi-variant types like Unions first. We also perform # the same logic if either type happens to be a TypeVar. # # Handling the TypeVars now lets us simulate having them bind to the corresponding # type -- if we deferred these checks, the "return-early" logic of the other # checks will prevent us from detecting certain overlaps. # # If both types are singleton variants (and are not TypeVars), we've hit the base case: # we skip these checks to avoid infinitely recursing. def is_none_typevar_overlap(t1: Type, t2: Type) -> bool: t1, t2 = get_proper_types((t1, t2)) return isinstance(t1, NoneType) and isinstance(t2, TypeVarType) if prohibit_none_typevar_overlap: if is_none_typevar_overlap(left, right) or is_none_typevar_overlap(right, left): return False if (len(left_possible) > 1 or len(right_possible) > 1 or isinstance(left, TypeVarType) or isinstance(right, TypeVarType)): for l in left_possible: for r in right_possible: if _is_overlapping_types(l, r): return True return False # Now that we've finished handling TypeVars, we're free to end early # if one one of the types is None and we're running in strict-optional mode. # (None only overlaps with None in strict-optional mode). # # We must perform this check after the TypeVar checks because # a TypeVar could be bound to None, for example. if state.strict_optional and isinstance(left, NoneType) != isinstance(right, NoneType): return False # Next, we handle single-variant types that may be inherently partially overlapping: # # - TypedDicts # - Tuples # # If we cannot identify a partial overlap and end early, we degrade these two types # into their 'Instance' fallbacks. if isinstance(left, TypedDictType) and isinstance(right, TypedDictType): return are_typed_dicts_overlapping(left, right, ignore_promotions=ignore_promotions) elif typed_dict_mapping_pair(left, right): # Overlaps between TypedDicts and Mappings require dedicated logic. return typed_dict_mapping_overlap(left, right, overlapping=_is_overlapping_types) elif isinstance(left, TypedDictType): left = left.fallback elif isinstance(right, TypedDictType): right = right.fallback if is_tuple(left) and is_tuple(right): return are_tuples_overlapping(left, right, ignore_promotions=ignore_promotions) elif isinstance(left, TupleType): left = tuple_fallback(left) elif isinstance(right, TupleType): right = tuple_fallback(right) # Next, we handle single-variant types that cannot be inherently partially overlapping, # but do require custom logic to inspect. # # As before, we degrade into 'Instance' whenever possible. if isinstance(left, TypeType) and isinstance(right, TypeType): return _is_overlapping_types(left.item, right.item) def _type_object_overlap(left: Type, right: Type) -> bool: """Special cases for type object types overlaps.""" # TODO: these checks are a bit in gray area, adjust if they cause problems. left, right = get_proper_types((left, right)) # 1. Type[C] vs Callable[..., C], where the latter is class object. if isinstance(left, TypeType) and isinstance(right, CallableType) and right.is_type_obj(): return _is_overlapping_types(left.item, right.ret_type) # 2. Type[C] vs Meta, where Meta is a metaclass for C. if isinstance(left, TypeType) and isinstance(right, Instance): if isinstance(left.item, Instance): left_meta = left.item.type.metaclass_type if left_meta is not None: return _is_overlapping_types(left_meta, right) # builtins.type (default metaclass) overlaps with all metaclasses return right.type.has_base('builtins.type') elif isinstance(left.item, AnyType): return right.type.has_base('builtins.type') # 3. Callable[..., C] vs Meta is considered below, when we switch to fallbacks. return False if isinstance(left, TypeType) or isinstance(right, TypeType): return _type_object_overlap(left, right) or _type_object_overlap(right, left) if isinstance(left, CallableType) and isinstance(right, CallableType): return is_callable_compatible(left, right, is_compat=_is_overlapping_types, ignore_pos_arg_names=True, allow_partial_overlap=True) elif isinstance(left, CallableType): left = left.fallback elif isinstance(right, CallableType): right = right.fallback if isinstance(left, LiteralType) and isinstance(right, LiteralType): if left.value == right.value: # If values are the same, we still need to check if fallbacks are overlapping, # this is done below. left = left.fallback right = right.fallback else: return False elif isinstance(left, LiteralType): left = left.fallback elif isinstance(right, LiteralType): right = right.fallback # Finally, we handle the case where left and right are instances. if isinstance(left, Instance) and isinstance(right, Instance): # First we need to handle promotions and structural compatibility for instances # that came as fallbacks, so simply call is_subtype() to avoid code duplication. if (is_subtype(left, right, ignore_promotions=ignore_promotions) or is_subtype(right, left, ignore_promotions=ignore_promotions)): return True # Two unrelated types cannot be partially overlapping: they're disjoint. if left.type.has_base(right.type.fullname): left = map_instance_to_supertype(left, right.type) elif right.type.has_base(left.type.fullname): right = map_instance_to_supertype(right, left.type) else: return False if len(left.args) == len(right.args): # Note: we don't really care about variance here, since the overlapping check # is symmetric and since we want to return 'True' even for partial overlaps. # # For example, suppose we have two types Wrapper[Parent] and Wrapper[Child]. # It doesn't matter whether Wrapper is covariant or contravariant since # either way, one of the two types will overlap with the other. # # Similarly, if Wrapper was invariant, the two types could still be partially # overlapping -- what if Wrapper[Parent] happened to contain only instances of # specifically Child? # # Or, to use a more concrete example, List[Union[A, B]] and List[Union[B, C]] # would be considered partially overlapping since it's possible for both lists # to contain only instances of B at runtime. for left_arg, right_arg in zip(left.args, right.args): if _is_overlapping_types(left_arg, right_arg): return True return False # We ought to have handled every case by now: we conclude the # two types are not overlapping, either completely or partially. # # Note: it's unclear however, whether returning False is the right thing # to do when inferring reachability -- see https://github.com/python/mypy/issues/5529 assert type(left) != type(right) return False def is_overlapping_erased_types(left: Type, right: Type, *, ignore_promotions: bool = False) -> bool: """The same as 'is_overlapping_erased_types', except the types are erased first.""" return is_overlapping_types(erase_type(left), erase_type(right), ignore_promotions=ignore_promotions, prohibit_none_typevar_overlap=True) def are_typed_dicts_overlapping(left: TypedDictType, right: TypedDictType, *, ignore_promotions: bool = False, prohibit_none_typevar_overlap: bool = False) -> bool: """Returns 'true' if left and right are overlapping TypeDictTypes.""" # All required keys in left are present and overlapping with something in right for key in left.required_keys: if key not in right.items: return False if not is_overlapping_types(left.items[key], right.items[key], ignore_promotions=ignore_promotions, prohibit_none_typevar_overlap=prohibit_none_typevar_overlap): return False # Repeat check in the other direction for key in right.required_keys: if key not in left.items: return False if not is_overlapping_types(left.items[key], right.items[key], ignore_promotions=ignore_promotions): return False # The presence of any additional optional keys does not affect whether the two # TypedDicts are partially overlapping: the dicts would be overlapping if the # keys happened to be missing. return True def are_tuples_overlapping(left: Type, right: Type, *, ignore_promotions: bool = False, prohibit_none_typevar_overlap: bool = False) -> bool: """Returns true if left and right are overlapping tuples.""" left, right = get_proper_types((left, right)) left = adjust_tuple(left, right) or left right = adjust_tuple(right, left) or right assert isinstance(left, TupleType), 'Type {} is not a tuple'.format(left) assert isinstance(right, TupleType), 'Type {} is not a tuple'.format(right) if len(left.items) != len(right.items): return False return all(is_overlapping_types(l, r, ignore_promotions=ignore_promotions, prohibit_none_typevar_overlap=prohibit_none_typevar_overlap) for l, r in zip(left.items, right.items)) def adjust_tuple(left: ProperType, r: ProperType) -> Optional[TupleType]: """Find out if `left` is a Tuple[A, ...], and adjust its length to `right`""" if isinstance(left, Instance) and left.type.fullname == 'builtins.tuple': n = r.length() if isinstance(r, TupleType) else 1 return TupleType([left.args[0]] * n, left) return None def is_tuple(typ: Type) -> bool: typ = get_proper_type(typ) return (isinstance(typ, TupleType) or (isinstance(typ, Instance) and typ.type.fullname == 'builtins.tuple')) class TypeMeetVisitor(TypeVisitor[ProperType]): def __init__(self, s: ProperType) -> None: self.s = s def visit_unbound_type(self, t: UnboundType) -> ProperType: if isinstance(self.s, NoneType): if state.strict_optional: return AnyType(TypeOfAny.special_form) else: return self.s elif isinstance(self.s, UninhabitedType): return self.s else: return AnyType(TypeOfAny.special_form) def visit_any(self, t: AnyType) -> ProperType: return self.s def visit_union_type(self, t: UnionType) -> ProperType: if isinstance(self.s, UnionType): meets = [] # type: List[Type] for x in t.items: for y in self.s.items: meets.append(meet_types(x, y)) else: meets = [meet_types(x, self.s) for x in t.items] return make_simplified_union(meets) def visit_none_type(self, t: NoneType) -> ProperType: if state.strict_optional: if isinstance(self.s, NoneType) or (isinstance(self.s, Instance) and self.s.type.fullname == 'builtins.object'): return t else: return UninhabitedType() else: return t def visit_uninhabited_type(self, t: UninhabitedType) -> ProperType: return t def visit_deleted_type(self, t: DeletedType) -> ProperType: if isinstance(self.s, NoneType): if state.strict_optional: return t else: return self.s elif isinstance(self.s, UninhabitedType): return self.s else: return t def visit_erased_type(self, t: ErasedType) -> ProperType: return self.s def visit_type_var(self, t: TypeVarType) -> ProperType: if isinstance(self.s, TypeVarType) and self.s.id == t.id: return self.s else: return self.default(self.s) def visit_instance(self, t: Instance) -> ProperType: if isinstance(self.s, Instance): si = self.s if t.type == si.type: if is_subtype(t, self.s) or is_subtype(self.s, t): # Combine type arguments. We could have used join below # equivalently. args = [] # type: List[Type] for i in range(len(t.args)): args.append(self.meet(t.args[i], si.args[i])) return Instance(t.type, args) else: if state.strict_optional: return UninhabitedType() else: return NoneType() else: if is_subtype(t, self.s): return t elif is_subtype(self.s, t): # See also above comment. return self.s else: if state.strict_optional: return UninhabitedType() else: return NoneType() elif isinstance(self.s, FunctionLike) and t.type.is_protocol: call = unpack_callback_protocol(t) if call: return meet_types(call, self.s) elif isinstance(self.s, FunctionLike) and self.s.is_type_obj() and t.type.is_metaclass(): if is_subtype(self.s.fallback, t): return self.s return self.default(self.s) elif isinstance(self.s, TypeType): return meet_types(t, self.s) elif isinstance(self.s, TupleType): return meet_types(t, self.s) elif isinstance(self.s, LiteralType): return meet_types(t, self.s) elif isinstance(self.s, TypedDictType): return meet_types(t, self.s) return self.default(self.s) def visit_callable_type(self, t: CallableType) -> ProperType: if isinstance(self.s, CallableType) and is_similar_callables(t, self.s): if is_equivalent(t, self.s): return combine_similar_callables(t, self.s) result = meet_similar_callables(t, self.s) # We set the from_type_type flag to suppress error when a collection of # concrete class objects gets inferred as their common abstract superclass. if not ((t.is_type_obj() and t.type_object().is_abstract) or (self.s.is_type_obj() and self.s.type_object().is_abstract)): result.from_type_type = True if isinstance(get_proper_type(result.ret_type), UninhabitedType): # Return a plain None or instead of a weird function. return self.default(self.s) return result elif isinstance(self.s, TypeType) and t.is_type_obj() and not t.is_generic(): # In this case we are able to potentially produce a better meet. res = meet_types(self.s.item, t.ret_type) if not isinstance(res, (NoneType, UninhabitedType)): return TypeType.make_normalized(res) return self.default(self.s) elif isinstance(self.s, Instance) and self.s.type.is_protocol: call = unpack_callback_protocol(self.s) if call: return meet_types(t, call) return self.default(self.s) def visit_overloaded(self, t: Overloaded) -> ProperType: # TODO: Implement a better algorithm that covers at least the same cases # as TypeJoinVisitor.visit_overloaded(). s = self.s if isinstance(s, FunctionLike): if s.items() == t.items(): return Overloaded(t.items()) elif is_subtype(s, t): return s elif is_subtype(t, s): return t else: return meet_types(t.fallback, s.fallback) elif isinstance(self.s, Instance) and self.s.type.is_protocol: call = unpack_callback_protocol(self.s) if call: return meet_types(t, call) return meet_types(t.fallback, s) def visit_tuple_type(self, t: TupleType) -> ProperType: if isinstance(self.s, TupleType) and self.s.length() == t.length(): items = [] # type: List[Type] for i in range(t.length()): items.append(self.meet(t.items[i], self.s.items[i])) # TODO: What if the fallbacks are different? return TupleType(items, tuple_fallback(t)) elif isinstance(self.s, Instance): # meet(Tuple[t1, t2, <...>], Tuple[s, ...]) == Tuple[meet(t1, s), meet(t2, s), <...>]. if self.s.type.fullname == 'builtins.tuple' and self.s.args: return t.copy_modified(items=[meet_types(it, self.s.args[0]) for it in t.items]) elif is_proper_subtype(t, self.s): # A named tuple that inherits from a normal class return t return self.default(self.s) def visit_typeddict_type(self, t: TypedDictType) -> ProperType: if isinstance(self.s, TypedDictType): for (name, l, r) in self.s.zip(t): if (not is_equivalent(l, r) or (name in t.required_keys) != (name in self.s.required_keys)): return self.default(self.s) item_list = [] # type: List[Tuple[str, Type]] for (item_name, s_item_type, t_item_type) in self.s.zipall(t): if s_item_type is not None: item_list.append((item_name, s_item_type)) else: # at least one of s_item_type and t_item_type is not None assert t_item_type is not None item_list.append((item_name, t_item_type)) items = OrderedDict(item_list) mapping_value_type = join_type_list(list(items.values())) fallback = self.s.create_anonymous_fallback(value_type=mapping_value_type) required_keys = t.required_keys | self.s.required_keys return TypedDictType(items, required_keys, fallback) elif isinstance(self.s, Instance) and is_subtype(t, self.s): return t else: return self.default(self.s) def visit_literal_type(self, t: LiteralType) -> ProperType: if isinstance(self.s, LiteralType) and self.s == t: return t elif isinstance(self.s, Instance) and is_subtype(t.fallback, self.s): return t else: return self.default(self.s) def visit_partial_type(self, t: PartialType) -> ProperType: # We can't determine the meet of partial types. We should never get here. assert False, 'Internal error' def visit_type_type(self, t: TypeType) -> ProperType: if isinstance(self.s, TypeType): typ = self.meet(t.item, self.s.item) if not isinstance(typ, NoneType): typ = TypeType.make_normalized(typ, line=t.line) return typ elif isinstance(self.s, Instance) and self.s.type.fullname == 'builtins.type': return t elif isinstance(self.s, CallableType): return self.meet(t, self.s) else: return self.default(self.s) def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: assert False, "This should be never called, got {}".format(t) def meet(self, s: Type, t: Type) -> ProperType: return meet_types(s, t) def default(self, typ: Type) -> ProperType: if isinstance(typ, UnboundType): return AnyType(TypeOfAny.special_form) else: if state.strict_optional: return UninhabitedType() else: return NoneType() def meet_similar_callables(t: CallableType, s: CallableType) -> CallableType: from mypy.join import join_types arg_types = [] # type: List[Type] for i in range(len(t.arg_types)): arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) # TODO in combine_similar_callables also applies here (names and kinds) # The fallback type can be either 'function' or 'type'. The result should have 'function' as # fallback only if both operands have it as 'function'. if t.fallback.type.fullname != 'builtins.function': fallback = t.fallback else: fallback = s.fallback return t.copy_modified(arg_types=arg_types, ret_type=meet_types(t.ret_type, s.ret_type), fallback=fallback, name=None) def meet_type_list(types: List[Type]) -> Type: if not types: # This should probably be builtins.object but that is hard to get and # it doesn't matter for any current users. return AnyType(TypeOfAny.implementation_artifact) met = types[0] for t in types[1:]: met = meet_types(met, t) return met def typed_dict_mapping_pair(left: Type, right: Type) -> bool: """Is this a pair where one type is a TypedDict and another one is an instance of Mapping? This case requires a precise/principled consideration because there are two use cases that push the boundary the opposite ways: we need to avoid spurious overlaps to avoid false positives for overloads, but we also need to avoid spuriously non-overlapping types to avoid false positives with --strict-equality. """ left, right = get_proper_types((left, right)) assert not isinstance(left, TypedDictType) or not isinstance(right, TypedDictType) if isinstance(left, TypedDictType): _, other = left, right elif isinstance(right, TypedDictType): _, other = right, left else: return False return isinstance(other, Instance) and other.type.has_base('typing.Mapping') def typed_dict_mapping_overlap(left: Type, right: Type, overlapping: Callable[[Type, Type], bool]) -> bool: """Check if a TypedDict type is overlapping with a Mapping. The basic logic here consists of two rules: * A TypedDict with some required keys is overlapping with Mapping[str, ] if and only if every key type is overlapping with . For example: - TypedDict(x=int, y=str) overlaps with Dict[str, Union[str, int]] - TypedDict(x=int, y=str) doesn't overlap with Dict[str, int] Note that any additional non-required keys can't change the above result. * A TypedDict with no required keys overlaps with Mapping[str, ] if and only if at least one of key types overlaps with . For example: - TypedDict(x=str, y=str, total=False) overlaps with Dict[str, str] - TypedDict(x=str, y=str, total=False) doesn't overlap with Dict[str, int] - TypedDict(x=int, y=str, total=False) overlaps with Dict[str, str] As usual empty, dictionaries lie in a gray area. In general, List[str] and List[str] are considered non-overlapping despite empty list belongs to both. However, List[int] and List[] are considered overlapping. So here we follow the same logic: a TypedDict with no required keys is considered non-overlapping with Mapping[str, ], but is considered overlapping with Mapping[, ]. This way we avoid false positives for overloads, and also avoid false positives for comparisons like SomeTypedDict == {} under --strict-equality. """ left, right = get_proper_types((left, right)) assert not isinstance(left, TypedDictType) or not isinstance(right, TypedDictType) if isinstance(left, TypedDictType): assert isinstance(right, Instance) typed, other = left, right else: assert isinstance(left, Instance) assert isinstance(right, TypedDictType) typed, other = right, left mapping = next(base for base in other.type.mro if base.fullname == 'typing.Mapping') other = map_instance_to_supertype(other, mapping) key_type, value_type = get_proper_types(other.args) # TODO: is there a cleaner way to get str_type here? fallback = typed.as_anonymous().fallback str_type = fallback.type.bases[0].args[0] # typing._TypedDict inherits Mapping[str, object] # Special case: a TypedDict with no required keys overlaps with an empty dict. if isinstance(key_type, UninhabitedType) and isinstance(value_type, UninhabitedType): return not typed.required_keys if typed.required_keys: if not overlapping(key_type, str_type): return False return all(overlapping(typed.items[k], value_type) for k in typed.required_keys) else: if not overlapping(key_type, str_type): return False non_required = set(typed.items.keys()) - typed.required_keys return any(overlapping(typed.items[k], value_type) for k in non_required) mypy-0.761/mypy/memprofile.py0000644€tŠÔÚ€2›s®0000000750213576752246022433 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utility for dumping memory usage stats. This is tailored to mypy and knows (a little) about which list objects are owned by particular AST nodes, etc. """ from collections import defaultdict import gc import sys from typing import List, Dict, Iterable, Tuple, cast from mypy.nodes import FakeInfo, Node from mypy.types import Type from mypy.util import get_class_descriptors def collect_memory_stats() -> Tuple[Dict[str, int], Dict[str, int]]: """Return stats about memory use. Return a tuple with these items: - Dict from object kind to number of instances of that kind - Dict from object kind to total bytes used by all instances of that kind """ objs = gc.get_objects() find_recursive_objects(objs) inferred = {} for obj in objs: if type(obj) is FakeInfo: # Processing these would cause a crash. continue n = type(obj).__name__ if hasattr(obj, '__dict__'): # Keep track of which class a particular __dict__ is associated with. inferred[id(obj.__dict__)] = '%s (__dict__)' % n if isinstance(obj, (Node, Type)): # type: ignore if hasattr(obj, '__dict__'): for x in obj.__dict__.values(): if isinstance(x, list): # Keep track of which node a list is associated with. inferred[id(x)] = '%s (list)' % n for k in get_class_descriptors(type(obj)): x = getattr(obj, k, None) if isinstance(x, list): inferred[id(x)] = '%s (list)' % n freqs = {} # type: Dict[str, int] memuse = {} # type: Dict[str, int] for obj in objs: if id(obj) in inferred: name = inferred[id(obj)] else: name = type(obj).__name__ freqs[name] = freqs.get(name, 0) + 1 memuse[name] = memuse.get(name, 0) + sys.getsizeof(obj) return freqs, memuse def print_memory_profile(run_gc: bool = True) -> None: if not sys.platform.startswith('win'): import resource system_memuse = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss else: system_memuse = -1 # TODO: Support this on Windows if run_gc: gc.collect() freqs, memuse = collect_memory_stats() print('%7s %7s %7s %s' % ('Freq', 'Size(k)', 'AvgSize', 'Type')) print('-------------------------------------------') totalmem = 0 i = 0 for n, mem in sorted(memuse.items(), key=lambda x: -x[1]): f = freqs[n] if i < 50: print('%7d %7d %7.0f %s' % (f, mem // 1024, mem / f, n)) i += 1 totalmem += mem print() print('Mem usage RSS ', system_memuse // 1024) print('Total reachable ', totalmem // 1024) def find_recursive_objects(objs: List[object]) -> None: """Find additional objects referenced by objs and append them to objs. We use this since gc.get_objects() does not return objects without pointers in them such as strings. """ seen = set(id(o) for o in objs) def visit(o: object) -> None: if id(o) not in seen: objs.append(o) seen.add(id(o)) for obj in objs[:]: if type(obj) is FakeInfo: # Processing these would cause a crash. continue if type(obj) in (dict, defaultdict): for key, val in cast(Dict[object, object], obj).items(): visit(key) visit(val) if type(obj) in (list, tuple, set): for x in cast(Iterable[object], obj): visit(x) if hasattr(obj, '__slots__'): for base in type.mro(type(obj)): for slot in getattr(base, '__slots__', ()): if hasattr(obj, slot): visit(getattr(obj, slot)) mypy-0.761/mypy/message_registry.py0000644€tŠÔÚ€2›s®0000002026613576752246023652 0ustar jukkaDROPBOX\Domain Users00000000000000"""Message constants for generating error messages during type checking. Literal messages should be defined as constants in this module so they won't get out of sync if used in more than one place, and so that they can be easily introspected. These messages are ultimately consumed by messages.MessageBuilder.fail(). For more non-trivial message generation, add a method to MessageBuilder and call this instead. """ from typing_extensions import Final # Invalid types INVALID_TYPE_RAW_ENUM_VALUE = "Invalid type: try using Literal[{}.{}] instead?" # type: Final # Type checker error message constants NO_RETURN_VALUE_EXPECTED = 'No return value expected' # type: Final MISSING_RETURN_STATEMENT = 'Missing return statement' # type: Final INVALID_IMPLICIT_RETURN = 'Implicit return in function which does not return' # type: Final INCOMPATIBLE_RETURN_VALUE_TYPE = 'Incompatible return value type' # type: Final RETURN_VALUE_EXPECTED = 'Return value expected' # type: Final NO_RETURN_EXPECTED = 'Return statement in function which does not return' # type: Final INVALID_EXCEPTION = 'Exception must be derived from BaseException' # type: Final INVALID_EXCEPTION_TYPE = 'Exception type must be derived from BaseException' # type: Final RETURN_IN_ASYNC_GENERATOR = "'return' with value in async generator is not allowed" # type: Final INVALID_RETURN_TYPE_FOR_GENERATOR = \ 'The return type of a generator function should be "Generator"' \ ' or one of its supertypes' # type: Final INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR = \ 'The return type of an async generator function should be "AsyncGenerator" or one of its ' \ 'supertypes' # type: Final INVALID_GENERATOR_RETURN_ITEM_TYPE = \ 'The return type of a generator function must be None in' \ ' its third type parameter in Python 2' # type: Final YIELD_VALUE_EXPECTED = 'Yield value expected' # type: Final INCOMPATIBLE_TYPES = 'Incompatible types' # type: Final INCOMPATIBLE_TYPES_IN_ASSIGNMENT = 'Incompatible types in assignment' # type: Final INCOMPATIBLE_REDEFINITION = 'Incompatible redefinition' # type: Final INCOMPATIBLE_TYPES_IN_AWAIT = 'Incompatible types in "await"' # type: Final INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER = \ 'Incompatible types in "async with" for "__aenter__"' # type: Final INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT = \ 'Incompatible types in "async with" for "__aexit__"' # type: Final INCOMPATIBLE_TYPES_IN_ASYNC_FOR = 'Incompatible types in "async for"' # type: Final INCOMPATIBLE_TYPES_IN_YIELD = 'Incompatible types in "yield"' # type: Final INCOMPATIBLE_TYPES_IN_YIELD_FROM = 'Incompatible types in "yield from"' # type: Final INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION = \ 'Incompatible types in string interpolation' # type: Final MUST_HAVE_NONE_RETURN_TYPE = 'The return type of "{}" must be None' # type: Final INVALID_TUPLE_INDEX_TYPE = 'Invalid tuple index type' # type: Final TUPLE_INDEX_OUT_OF_RANGE = 'Tuple index out of range' # type: Final INVALID_SLICE_INDEX = 'Slice index must be an integer or None' # type: Final CANNOT_INFER_LAMBDA_TYPE = 'Cannot infer type of lambda' # type: Final CANNOT_ACCESS_INIT = 'Cannot access "__init__" directly' # type: Final NON_INSTANCE_NEW_TYPE = '"__new__" must return a class instance (got {})' # type: Final INVALID_NEW_TYPE = 'Incompatible return type for "__new__"' # type: Final BAD_CONSTRUCTOR_TYPE = 'Unsupported decorated constructor type' # type: Final CANNOT_ASSIGN_TO_METHOD = 'Cannot assign to a method' # type: Final CANNOT_ASSIGN_TO_TYPE = 'Cannot assign to a type' # type: Final INCONSISTENT_ABSTRACT_OVERLOAD = \ 'Overloaded method has both abstract and non-abstract variants' # type: Final MULTIPLE_OVERLOADS_REQUIRED = 'Single overload definition, multiple required' # type: Final READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE = \ 'Read-only property cannot override read-write property' # type: Final FORMAT_REQUIRES_MAPPING = 'Format requires a mapping' # type: Final RETURN_TYPE_CANNOT_BE_CONTRAVARIANT = \ "Cannot use a contravariant type variable as return type" # type: Final FUNCTION_PARAMETER_CANNOT_BE_COVARIANT = \ "Cannot use a covariant type variable as a parameter" # type: Final INCOMPATIBLE_IMPORT_OF = "Incompatible import of" # type: Final FUNCTION_TYPE_EXPECTED = "Function is missing a type annotation" # type: Final ONLY_CLASS_APPLICATION = "Type application is only supported for generic classes" # type: Final RETURN_TYPE_EXPECTED = "Function is missing a return type annotation" # type: Final ARGUMENT_TYPE_EXPECTED = \ "Function is missing a type annotation for one or more arguments" # type: Final KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE = \ 'Keyword argument only valid with "str" key type in call to "dict"' # type: Final ALL_MUST_BE_SEQ_STR = 'Type of __all__ must be {}, not {}' # type: Final INVALID_TYPEDDICT_ARGS = \ 'Expected keyword arguments, {...}, or dict(...) in TypedDict constructor' # type: Final TYPEDDICT_KEY_MUST_BE_STRING_LITERAL = \ 'Expected TypedDict key to be string literal' # type: Final MALFORMED_ASSERT = 'Assertion is always true, perhaps remove parentheses?' # type: Final DUPLICATE_TYPE_SIGNATURES = 'Function has duplicate type signatures' # type: Final DESCRIPTOR_SET_NOT_CALLABLE = "{}.__set__ is not callable" # type: Final DESCRIPTOR_GET_NOT_CALLABLE = "{}.__get__ is not callable" # type: Final MODULE_LEVEL_GETATTRIBUTE = '__getattribute__ is not valid at the module level' # type: Final # Generic GENERIC_INSTANCE_VAR_CLASS_ACCESS = \ 'Access to generic instance variables via class is ambiguous' # type: Final GENERIC_CLASS_VAR_ACCESS = \ 'Access to generic class variables is ambiguous' # type: Final BARE_GENERIC = 'Missing type parameters for generic type {}' # type: Final IMPLICIT_GENERIC_ANY_BUILTIN = \ 'Implicit generic "Any". Use "{}" and specify generic parameters' # type: Final # TypeVar INCOMPATIBLE_TYPEVAR_VALUE = 'Value of type variable "{}" of {} cannot be {}' # type: Final CANNOT_USE_TYPEVAR_AS_EXPRESSION = \ 'Type variable "{}.{}" cannot be used as an expression' # type: Final # Super TOO_MANY_ARGS_FOR_SUPER = 'Too many arguments for "super"' # type: Final TOO_FEW_ARGS_FOR_SUPER = 'Too few arguments for "super"' # type: Final SUPER_WITH_SINGLE_ARG_NOT_SUPPORTED = '"super" with a single argument not supported' # type: Final UNSUPPORTED_ARG_1_FOR_SUPER = 'Unsupported argument 1 for "super"' # type: Final UNSUPPORTED_ARG_2_FOR_SUPER = 'Unsupported argument 2 for "super"' # type: Final SUPER_VARARGS_NOT_SUPPORTED = 'Varargs not supported with "super"' # type: Final SUPER_POSITIONAL_ARGS_REQUIRED = '"super" only accepts positional arguments' # type: Final SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1 = \ 'Argument 2 for "super" not an instance of argument 1' # type: Final SUPER_OUTSIDE_OF_METHOD_NOT_SUPPORTED = \ 'super() outside of a method is not supported' # type: Final SUPER_ENCLOSING_POSITIONAL_ARGS_REQUIRED = \ 'super() requires one or more positional arguments in enclosing function' # type: Final # Self-type MISSING_OR_INVALID_SELF_TYPE = \ "Self argument missing for a non-static method (or an invalid type for self)" # type: Final ERASED_SELF_TYPE_NOT_SUPERTYPE = \ 'The erased type of self "{}" is not a supertype of its class "{}"' # type: Final INVALID_SELF_TYPE_OR_EXTRA_ARG = \ "Invalid type for self, or extra argument type in function annotation" # type: Final # Final CANNOT_INHERIT_FROM_FINAL = 'Cannot inherit from final class "{}"' # type: Final DEPENDENT_FINAL_IN_CLASS_BODY = \ "Final name declared in class body cannot depend on type variables" # type: Final CANNOT_ACCESS_FINAL_INSTANCE_ATTR = \ 'Cannot access final instance attribute "{}" on class object' # type: Final # ClassVar CANNOT_OVERRIDE_INSTANCE_VAR = \ 'Cannot override instance variable (previously declared on base class "{}") with class ' \ 'variable' # type: Final CANNOT_OVERRIDE_CLASS_VAR = \ 'Cannot override class variable (previously declared on base class "{}") with instance ' \ 'variable' # type: Final # Protocol RUNTIME_PROTOCOL_EXPECTED = \ 'Only @runtime_checkable protocols can be used with instance and class checks' # type: Final CANNOT_INSTANTIATE_PROTOCOL = 'Cannot instantiate protocol class "{}"' # type: Final mypy-0.761/mypy/messages.py0000644€tŠÔÚ€2›s®0000027101213576752246022102 0ustar jukkaDROPBOX\Domain Users00000000000000"""Facilities for generating error messages during type checking. Don't add any non-trivial message construction logic to the type checker, as it can compromise clarity and make messages less consistent. Add such logic to this module instead. Literal messages, including those with format args, should be defined as constants in mypy.message_registry. Historically we tried to avoid all message string literals in the type checker but we are moving away from this convention. """ from collections import OrderedDict import re import difflib from textwrap import dedent from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple, Set, Optional, Union from typing_extensions import Final from mypy.erasetype import erase_type from mypy.errors import Errors from mypy.types import ( Type, CallableType, Instance, TypeVarType, TupleType, TypedDictType, LiteralType, UnionType, NoneType, AnyType, Overloaded, FunctionLike, DeletedType, TypeType, UninhabitedType, TypeOfAny, UnboundType, PartialType, get_proper_type, ProperType, get_proper_types ) from mypy.typetraverser import TypeTraverserVisitor from mypy.nodes import ( TypeInfo, Context, MypyFile, op_methods, op_methods_to_symbols, FuncDef, reverse_builtin_aliases, ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, ReturnStmt, NameExpr, Var, CONTRAVARIANT, COVARIANT, SymbolNode, CallExpr, SymbolTable ) from mypy.subtypes import ( is_subtype, find_member, get_member_flags, IS_SETTABLE, IS_CLASSVAR, IS_CLASS_OR_STATIC, ) from mypy.sametypes import is_same_type from mypy.util import unmangle from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes ARG_CONSTRUCTOR_NAMES = { ARG_POS: "Arg", ARG_OPT: "DefaultArg", ARG_NAMED: "NamedArg", ARG_NAMED_OPT: "DefaultNamedArg", ARG_STAR: "VarArg", ARG_STAR2: "KwArg", } # type: Final class MessageBuilder: """Helper class for reporting type checker error messages with parameters. The methods of this class need to be provided with the context within a file; the errors member manages the wider context. IDEA: Support a 'verbose mode' that includes full information about types in error messages and that may otherwise produce more detailed error messages. """ # Report errors using this instance. It knows about the current file and # import context. errors = None # type: Errors modules = None # type: Dict[str, MypyFile] # Number of times errors have been disabled. disable_count = 0 # Hack to deduplicate error messages from union types disable_type_names = 0 def __init__(self, errors: Errors, modules: Dict[str, MypyFile]) -> None: self.errors = errors self.modules = modules self.disable_count = 0 self.disable_type_names = 0 # # Helpers # def copy(self) -> 'MessageBuilder': new = MessageBuilder(self.errors.copy(), self.modules) new.disable_count = self.disable_count new.disable_type_names = self.disable_type_names return new def clean_copy(self) -> 'MessageBuilder': errors = self.errors.copy() errors.error_info_map = OrderedDict() return MessageBuilder(errors, self.modules) def add_errors(self, messages: 'MessageBuilder') -> None: """Add errors in messages to this builder.""" if self.disable_count <= 0: for errs in messages.errors.error_info_map.values(): for info in errs: self.errors.add_error_info(info) def disable_errors(self) -> None: self.disable_count += 1 def enable_errors(self) -> None: self.disable_count -= 1 def is_errors(self) -> bool: return self.errors.is_errors() def report(self, msg: str, context: Optional[Context], severity: str, *, code: Optional[ErrorCode] = None, file: Optional[str] = None, origin: Optional[Context] = None, offset: int = 0) -> None: """Report an error or note (unless disabled).""" if origin is not None: end_line = origin.end_line elif context is not None: end_line = context.end_line else: end_line = None if self.disable_count <= 0: self.errors.report(context.get_line() if context else -1, context.get_column() if context else -1, msg, severity=severity, file=file, offset=offset, origin_line=origin.get_line() if origin else None, end_line=end_line, code=code) def fail(self, msg: str, context: Optional[Context], *, code: Optional[ErrorCode] = None, file: Optional[str] = None, origin: Optional[Context] = None) -> None: """Report an error message (unless disabled).""" self.report(msg, context, 'error', code=code, file=file, origin=origin) def note(self, msg: str, context: Context, file: Optional[str] = None, origin: Optional[Context] = None, offset: int = 0, *, code: Optional[ErrorCode] = None) -> None: """Report a note (unless disabled).""" self.report(msg, context, 'note', file=file, origin=origin, offset=offset, code=code) def note_multiline(self, messages: str, context: Context, file: Optional[str] = None, origin: Optional[Context] = None, offset: int = 0, code: Optional[ErrorCode] = None) -> None: """Report as many notes as lines in the message (unless disabled).""" for msg in messages.splitlines(): self.report(msg, context, 'note', file=file, origin=origin, offset=offset, code=code) # # Specific operations # # The following operations are for generating specific error messages. They # get some information as arguments, and they build an error message based # on them. def has_no_attr(self, original_type: Type, typ: Type, member: str, context: Context, module_symbol_table: Optional[SymbolTable] = None) -> Type: """Report a missing or non-accessible member. original_type is the top-level type on which the error occurred. typ is the actual type that is missing the member. These can be different, e.g., in a union, original_type will be the union and typ will be the specific item in the union that does not have the member attribute. 'module_symbol_table' is passed to this function if the type for which we are trying to get a member was originally a module. The SymbolTable allows us to look up and suggests attributes of the module since they are not directly available on original_type If member corresponds to an operator, use the corresponding operator name in the messages. Return type Any. """ original_type = get_proper_type(original_type) typ = get_proper_type(typ) if (isinstance(original_type, Instance) and original_type.type.has_readable_member(member)): self.fail('Member "{}" is not assignable'.format(member), context) elif member == '__contains__': self.fail('Unsupported right operand type for in ({})'.format( format_type(original_type)), context, code=codes.OPERATOR) elif member in op_methods.values(): # Access to a binary operator member (e.g. _add). This case does # not handle indexing operations. for op, method in op_methods.items(): if method == member: self.unsupported_left_operand(op, original_type, context) break elif member == '__neg__': self.fail('Unsupported operand type for unary - ({})'.format( format_type(original_type)), context, code=codes.OPERATOR) elif member == '__pos__': self.fail('Unsupported operand type for unary + ({})'.format( format_type(original_type)), context, code=codes.OPERATOR) elif member == '__invert__': self.fail('Unsupported operand type for ~ ({})'.format( format_type(original_type)), context, code=codes.OPERATOR) elif member == '__getitem__': # Indexed get. # TODO: Fix this consistently in format_type if isinstance(original_type, CallableType) and original_type.is_type_obj(): self.fail('The type {} is not generic and not indexable'.format( format_type(original_type)), context) else: self.fail('Value of type {} is not indexable'.format( format_type(original_type)), context, code=codes.INDEX) elif member == '__setitem__': # Indexed set. self.fail('Unsupported target for indexed assignment', context, code=codes.INDEX) elif member == '__call__': if isinstance(original_type, Instance) and \ (original_type.type.fullname == 'builtins.function'): # "'function' not callable" is a confusing error message. # Explain that the problem is that the type of the function is not known. self.fail('Cannot call function of unknown type', context, code=codes.OPERATOR) else: self.fail('{} not callable'.format(format_type(original_type)), context, code=codes.OPERATOR) else: # The non-special case: a missing ordinary attribute. extra = '' if member == '__iter__': extra = ' (not iterable)' elif member == '__aiter__': extra = ' (not async iterable)' if not self.disable_type_names: failed = False if isinstance(original_type, Instance) and original_type.type.names: alternatives = set(original_type.type.names.keys()) if module_symbol_table is not None: alternatives |= {key for key in module_symbol_table.keys()} # in some situations, the member is in the alternatives set # but since we're in this function, we shouldn't suggest it if member in alternatives: alternatives.remove(member) matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives] matches.extend(best_matches(member, alternatives)[:3]) if member == '__aiter__' and matches == ['__iter__']: matches = [] # Avoid misleading suggestion if member == '__div__' and matches == ['__truediv__']: # TODO: Handle differences in division between Python 2 and 3 more cleanly matches = [] if matches: self.fail( '{} has no attribute "{}"; maybe {}?{}'.format( format_type(original_type), member, pretty_or(matches), extra), context, code=codes.ATTR_DEFINED) failed = True if not failed: self.fail( '{} has no attribute "{}"{}'.format( format_type(original_type), member, extra), context, code=codes.ATTR_DEFINED) elif isinstance(original_type, UnionType): # The checker passes "object" in lieu of "None" for attribute # checks, so we manually convert it back. typ_format, orig_type_format = format_type_distinctly(typ, original_type) if typ_format == '"object"' and \ any(type(item) == NoneType for item in original_type.items): typ_format = '"None"' self.fail('Item {} of {} has no attribute "{}"{}'.format( typ_format, orig_type_format, member, extra), context, code=codes.UNION_ATTR) return AnyType(TypeOfAny.from_error) def unsupported_operand_types(self, op: str, left_type: Any, right_type: Any, context: Context, *, code: ErrorCode = codes.OPERATOR) -> None: """Report unsupported operand types for a binary operation. Types can be Type objects or strings. """ left_str = '' if isinstance(left_type, str): left_str = left_type else: left_str = format_type(left_type) right_str = '' if isinstance(right_type, str): right_str = right_type else: right_str = format_type(right_type) if self.disable_type_names: msg = 'Unsupported operand types for {} (likely involving Union)'.format(op) else: msg = 'Unsupported operand types for {} ({} and {})'.format( op, left_str, right_str) self.fail(msg, context, code=code) def unsupported_left_operand(self, op: str, typ: Type, context: Context) -> None: if self.disable_type_names: msg = 'Unsupported left operand type for {} (some union)'.format(op) else: msg = 'Unsupported left operand type for {} ({})'.format( op, format_type(typ)) self.fail(msg, context, code=codes.OPERATOR) def not_callable(self, typ: Type, context: Context) -> Type: self.fail('{} not callable'.format(format_type(typ)), context) return AnyType(TypeOfAny.from_error) def untyped_function_call(self, callee: CallableType, context: Context) -> Type: name = callable_name(callee) or '(unknown)' self.fail('Call to untyped function {} in typed context'.format(name), context, code=codes.NO_UNTYPED_CALL) return AnyType(TypeOfAny.from_error) def incompatible_argument(self, n: int, m: int, callee: CallableType, arg_type: Type, arg_kind: int, context: Context, outer_context: Context) -> Optional[ErrorCode]: """Report an error about an incompatible argument type. The argument type is arg_type, argument number is n and the callee type is 'callee'. If the callee represents a method that corresponds to an operator, use the corresponding operator name in the messages. Return the error code that used for the argument (multiple error codes are possible). """ arg_type = get_proper_type(arg_type) target = '' callee_name = callable_name(callee) if callee_name is not None: name = callee_name if callee.bound_args and callee.bound_args[0] is not None: base = format_type(callee.bound_args[0]) else: base = extract_type(name) for method, op in op_methods_to_symbols.items(): for variant in method, '__r' + method[2:]: # FIX: do not rely on textual formatting if name.startswith('"{}" of'.format(variant)): if op == 'in' or variant != method: # Reversed order of base/argument. self.unsupported_operand_types(op, arg_type, base, context, code=codes.OPERATOR) else: self.unsupported_operand_types(op, base, arg_type, context, code=codes.OPERATOR) return codes.OPERATOR if name.startswith('"__cmp__" of'): self.unsupported_operand_types("comparison", arg_type, base, context, code=codes.OPERATOR) return codes.INDEX if name.startswith('"__getitem__" of'): self.invalid_index_type(arg_type, callee.arg_types[n - 1], base, context, code=codes.INDEX) return codes.INDEX if name.startswith('"__setitem__" of'): if n == 1: self.invalid_index_type(arg_type, callee.arg_types[n - 1], base, context, code=codes.INDEX) return codes.INDEX else: msg = '{} (expression has type {}, target has type {})' arg_type_str, callee_type_str = format_type_distinctly(arg_type, callee.arg_types[n - 1]) self.fail(msg.format(message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, arg_type_str, callee_type_str), context, code=codes.ASSIGNMENT) return codes.ASSIGNMENT target = 'to {} '.format(name) msg = '' code = codes.MISC notes = [] # type: List[str] if callee_name == '': name = callee_name[1:-1] n -= 1 actual_type_str, expected_type_str = format_type_distinctly(arg_type, callee.arg_types[0]) msg = '{} item {} has incompatible type {}; expected {}'.format( name.title(), n, actual_type_str, expected_type_str) code = codes.LIST_ITEM elif callee_name == '': name = callee_name[1:-1] n -= 1 key_type, value_type = cast(TupleType, arg_type).items expected_key_type, expected_value_type = cast(TupleType, callee.arg_types[0]).items # don't increase verbosity unless there is need to do so if is_subtype(key_type, expected_key_type): key_type_str = format_type(key_type) expected_key_type_str = format_type(expected_key_type) else: key_type_str, expected_key_type_str = format_type_distinctly( key_type, expected_key_type) if is_subtype(value_type, expected_value_type): value_type_str = format_type(value_type) expected_value_type_str = format_type(expected_value_type) else: value_type_str, expected_value_type_str = format_type_distinctly( value_type, expected_value_type) msg = '{} entry {} has incompatible type {}: {}; expected {}: {}'.format( name.title(), n, key_type_str, value_type_str, expected_key_type_str, expected_value_type_str) code = codes.DICT_ITEM elif callee_name == '': actual_type_str, expected_type_str = map(strip_quotes, format_type_distinctly(arg_type, callee.arg_types[0])) msg = 'List comprehension has incompatible type List[{}]; expected List[{}]'.format( actual_type_str, expected_type_str) elif callee_name == '': actual_type_str, expected_type_str = map(strip_quotes, format_type_distinctly(arg_type, callee.arg_types[0])) msg = 'Set comprehension has incompatible type Set[{}]; expected Set[{}]'.format( actual_type_str, expected_type_str) elif callee_name == '': actual_type_str, expected_type_str = format_type_distinctly(arg_type, callee.arg_types[n - 1]) msg = ('{} expression in dictionary comprehension has incompatible type {}; ' 'expected type {}').format( 'Key' if n == 1 else 'Value', actual_type_str, expected_type_str) elif callee_name == '': actual_type_str, expected_type_str = format_type_distinctly(arg_type, callee.arg_types[0]) msg = 'Generator has incompatible item type {}; expected {}'.format( actual_type_str, expected_type_str) else: try: expected_type = callee.arg_types[m - 1] except IndexError: # Varargs callees expected_type = callee.arg_types[-1] arg_type_str, expected_type_str = format_type_distinctly( arg_type, expected_type, bare=True) if arg_kind == ARG_STAR: arg_type_str = '*' + arg_type_str elif arg_kind == ARG_STAR2: arg_type_str = '**' + arg_type_str # For function calls with keyword arguments, display the argument name rather than the # number. arg_label = str(n) if isinstance(outer_context, CallExpr) and len(outer_context.arg_names) >= n: arg_name = outer_context.arg_names[n - 1] if arg_name is not None: arg_label = '"{}"'.format(arg_name) if (arg_kind == ARG_STAR2 and isinstance(arg_type, TypedDictType) and m <= len(callee.arg_names) and callee.arg_names[m - 1] is not None and callee.arg_kinds[m - 1] != ARG_STAR2): arg_name = callee.arg_names[m - 1] assert arg_name is not None arg_type_str, expected_type_str = format_type_distinctly( arg_type.items[arg_name], expected_type, bare=True) arg_label = '"{}"'.format(arg_name) msg = 'Argument {} {}has incompatible type {}; expected {}'.format( arg_label, target, quote_type_string(arg_type_str), quote_type_string(expected_type_str)) code = codes.ARG_TYPE expected_type = get_proper_type(expected_type) if isinstance(expected_type, UnionType): expected_types = list(expected_type.items) else: expected_types = [expected_type] for type in get_proper_types(expected_types): if isinstance(arg_type, Instance) and isinstance(type, Instance): notes = append_invariance_notes(notes, arg_type, type) self.fail(msg, context, code=code) if notes: for note_msg in notes: self.note(note_msg, context, code=code) return code def incompatible_argument_note(self, original_caller_type: ProperType, callee_type: ProperType, context: Context, code: Optional[ErrorCode]) -> None: if (isinstance(original_caller_type, (Instance, TupleType, TypedDictType)) and isinstance(callee_type, Instance) and callee_type.type.is_protocol): self.report_protocol_problems(original_caller_type, callee_type, context, code=code) if (isinstance(callee_type, CallableType) and isinstance(original_caller_type, Instance)): call = find_member('__call__', original_caller_type, original_caller_type, is_operator=True) if call: self.note_call(original_caller_type, call, context, code=code) def invalid_index_type(self, index_type: Type, expected_type: Type, base_str: str, context: Context, *, code: ErrorCode) -> None: index_str, expected_str = format_type_distinctly(index_type, expected_type) self.fail('Invalid index type {} for {}; expected type {}'.format( index_str, base_str, expected_str), context, code=code) def too_few_arguments(self, callee: CallableType, context: Context, argument_names: Optional[Sequence[Optional[str]]]) -> None: if (argument_names is not None and not all(k is None for k in argument_names) and len(argument_names) >= 1): num_positional_args = sum(k is None for k in argument_names) arguments_left = callee.arg_names[num_positional_args:callee.min_args] diff = [k for k in arguments_left if k not in argument_names] if len(diff) == 1: msg = 'Missing positional argument' else: msg = 'Missing positional arguments' callee_name = callable_name(callee) if callee_name is not None and diff and all(d is not None for d in diff): args = '", "'.join(cast(List[str], diff)) msg += ' "{}" in call to {}'.format(args, callee_name) else: msg = 'Too few arguments' + for_function(callee) self.fail(msg, context, code=codes.CALL_ARG) def missing_named_argument(self, callee: CallableType, context: Context, name: str) -> None: msg = 'Missing named argument "{}"'.format(name) + for_function(callee) self.fail(msg, context, code=codes.CALL_ARG) def too_many_arguments(self, callee: CallableType, context: Context) -> None: msg = 'Too many arguments' + for_function(callee) self.fail(msg, context, code=codes.CALL_ARG) def too_many_arguments_from_typed_dict(self, callee: CallableType, arg_type: TypedDictType, context: Context) -> None: # Try to determine the name of the extra argument. for key in arg_type.items: if key not in callee.arg_names: msg = 'Extra argument "{}" from **args'.format(key) + for_function(callee) break else: self.too_many_arguments(callee, context) return self.fail(msg, context) def too_many_positional_arguments(self, callee: CallableType, context: Context) -> None: msg = 'Too many positional arguments' + for_function(callee) self.fail(msg, context) def unexpected_keyword_argument(self, callee: CallableType, name: str, arg_type: Type, context: Context) -> None: msg = 'Unexpected keyword argument "{}"'.format(name) + for_function(callee) # Suggest intended keyword, look for type match else fallback on any match. matching_type_args = [] not_matching_type_args = [] for i, kwarg_type in enumerate(callee.arg_types): callee_arg_name = callee.arg_names[i] if callee_arg_name is not None and callee.arg_kinds[i] != ARG_STAR: if is_subtype(arg_type, kwarg_type): matching_type_args.append(callee_arg_name) else: not_matching_type_args.append(callee_arg_name) matches = best_matches(name, matching_type_args) if not matches: matches = best_matches(name, not_matching_type_args) if matches: msg += "; did you mean {}?".format(pretty_or(matches[:3])) self.fail(msg, context, code=codes.CALL_ARG) module = find_defining_module(self.modules, callee) if module: assert callee.definition is not None fname = callable_name(callee) if not fname: # an alias to function with a different name fname = 'Called function' self.note('{} defined here'.format(fname), callee.definition, file=module.path, origin=context, code=codes.CALL_ARG) def duplicate_argument_value(self, callee: CallableType, index: int, context: Context) -> None: self.fail('{} gets multiple values for keyword argument "{}"'. format(callable_name(callee) or 'Function', callee.arg_names[index]), context) def does_not_return_value(self, callee_type: Optional[Type], context: Context) -> None: """Report an error about use of an unusable type.""" name = None # type: Optional[str] callee_type = get_proper_type(callee_type) if isinstance(callee_type, FunctionLike): name = callable_name(callee_type) if name is not None: self.fail('{} does not return a value'.format(capitalize(name)), context, code=codes.FUNC_RETURNS_VALUE) else: self.fail('Function does not return a value', context, code=codes.FUNC_RETURNS_VALUE) def deleted_as_rvalue(self, typ: DeletedType, context: Context) -> None: """Report an error about using an deleted type as an rvalue.""" if typ.source is None: s = "" else: s = " '{}'".format(typ.source) self.fail('Trying to read deleted variable{}'.format(s), context) def deleted_as_lvalue(self, typ: DeletedType, context: Context) -> None: """Report an error about using an deleted type as an lvalue. Currently, this only occurs when trying to assign to an exception variable outside the local except: blocks. """ if typ.source is None: s = "" else: s = " '{}'".format(typ.source) self.fail('Assignment to variable{} outside except: block'.format(s), context) def no_variant_matches_arguments(self, plausible_targets: List[CallableType], overload: Overloaded, arg_types: List[Type], context: Context, *, code: Optional[ErrorCode] = None) -> None: code = code or codes.CALL_OVERLOAD name = callable_name(overload) if name: name_str = ' of {}'.format(name) else: name_str = '' arg_types_str = ', '.join(format_type(arg) for arg in arg_types) num_args = len(arg_types) if num_args == 0: self.fail('All overload variants{} require at least one argument'.format(name_str), context, code=code) elif num_args == 1: self.fail('No overload variant{} matches argument type {}' .format(name_str, arg_types_str), context, code=code) else: self.fail('No overload variant{} matches argument types {}' .format(name_str, arg_types_str), context, code=code) self.pretty_overload_matches(plausible_targets, overload, context, offset=2, max_items=2, code=code) def wrong_number_values_to_unpack(self, provided: int, expected: int, context: Context) -> None: if provided < expected: if provided == 1: self.fail('Need more than 1 value to unpack ({} expected)'.format(expected), context) else: self.fail('Need more than {} values to unpack ({} expected)'.format( provided, expected), context) elif provided > expected: self.fail('Too many values to unpack ({} expected, {} provided)'.format( expected, provided), context) def type_not_iterable(self, type: Type, context: Context) -> None: self.fail('\'{}\' object is not iterable'.format(type), context) def incompatible_operator_assignment(self, op: str, context: Context) -> None: self.fail('Result type of {} incompatible in assignment'.format(op), context) def overload_signature_incompatible_with_supertype( self, name: str, name_in_super: str, supertype: str, overload: Overloaded, context: Context) -> None: target = self.override_target(name, name_in_super, supertype) self.fail('Signature of "{}" incompatible with {}'.format( name, target), context, code=codes.OVERRIDE) note_template = 'Overload variants must be defined in the same order as they are in "{}"' self.note(note_template.format(supertype), context, code=codes.OVERRIDE) def signature_incompatible_with_supertype( self, name: str, name_in_super: str, supertype: str, context: Context) -> None: target = self.override_target(name, name_in_super, supertype) self.fail('Signature of "{}" incompatible with {}'.format( name, target), context, code=codes.OVERRIDE) def argument_incompatible_with_supertype( self, arg_num: int, name: str, type_name: Optional[str], name_in_supertype: str, arg_type_in_supertype: Type, supertype: str, context: Context) -> None: target = self.override_target(name, name_in_supertype, supertype) arg_type_in_supertype_f = format_type_bare(arg_type_in_supertype) self.fail('Argument {} of "{}" is incompatible with {}; ' 'supertype defines the argument type as "{}"' .format(arg_num, name, target, arg_type_in_supertype_f), context, code=codes.OVERRIDE) if name == "__eq__" and type_name: multiline_msg = self.comparison_method_example_msg(class_name=type_name) self.note_multiline(multiline_msg, context, code=codes.OVERRIDE) def comparison_method_example_msg(self, class_name: str) -> str: return dedent('''\ It is recommended for "__eq__" to work with arbitrary objects, for example: def __eq__(self, other: object) -> bool: if not isinstance(other, {class_name}): return NotImplemented return '''.format(class_name=class_name)) def return_type_incompatible_with_supertype( self, name: str, name_in_supertype: str, supertype: str, original: Type, override: Type, context: Context) -> None: target = self.override_target(name, name_in_supertype, supertype) override_str, original_str = format_type_distinctly(override, original) self.fail('Return type {} of "{}" incompatible with return type {} in {}' .format(override_str, name, original_str, target), context, code=codes.OVERRIDE) def override_target(self, name: str, name_in_super: str, supertype: str) -> str: target = 'supertype "{}"'.format(supertype) if name_in_super != name: target = '"{}" of {}'.format(name_in_super, target) return target def incompatible_type_application(self, expected_arg_count: int, actual_arg_count: int, context: Context) -> None: if expected_arg_count == 0: self.fail('Type application targets a non-generic function or class', context) elif actual_arg_count > expected_arg_count: self.fail('Type application has too many types ({} expected)' .format(expected_arg_count), context) else: self.fail('Type application has too few types ({} expected)' .format(expected_arg_count), context) def alias_invalid_in_runtime_context(self, item: ProperType, ctx: Context) -> None: kind = (' to Callable' if isinstance(item, CallableType) else ' to Tuple' if isinstance(item, TupleType) else ' to Union' if isinstance(item, UnionType) else ' to Literal' if isinstance(item, LiteralType) else '') self.fail('The type alias{} is invalid in runtime context'.format(kind), ctx) def could_not_infer_type_arguments(self, callee_type: CallableType, n: int, context: Context) -> None: callee_name = callable_name(callee_type) if callee_name is not None and n > 0: self.fail('Cannot infer type argument {} of {}'.format(n, callee_name), context) else: self.fail('Cannot infer function type argument', context) def invalid_var_arg(self, typ: Type, context: Context) -> None: self.fail('List or tuple expected as variable arguments', context) def invalid_keyword_var_arg(self, typ: Type, is_mapping: bool, context: Context) -> None: typ = get_proper_type(typ) if isinstance(typ, Instance) and is_mapping: self.fail('Keywords must be strings', context) else: suffix = '' if isinstance(typ, Instance): suffix = ', not {}'.format(format_type(typ)) self.fail( 'Argument after ** must be a mapping{}'.format(suffix), context) def undefined_in_superclass(self, member: str, context: Context) -> None: self.fail('"{}" undefined in superclass'.format(member), context) def first_argument_for_super_must_be_type(self, actual: Type, context: Context) -> None: actual = get_proper_type(actual) if isinstance(actual, Instance): # Don't include type of instance, because it can look confusingly like a type # object. type_str = 'a non-type instance' else: type_str = format_type(actual) self.fail('Argument 1 for "super" must be a type object; got {}'.format(type_str), context) def too_few_string_formatting_arguments(self, context: Context) -> None: self.fail('Not enough arguments for format string', context, code=codes.STRING_FORMATTING) def too_many_string_formatting_arguments(self, context: Context) -> None: self.fail('Not all arguments converted during string formatting', context, code=codes.STRING_FORMATTING) def unsupported_placeholder(self, placeholder: str, context: Context) -> None: self.fail('Unsupported format character \'%s\'' % placeholder, context, code=codes.STRING_FORMATTING) def string_interpolation_with_star_and_key(self, context: Context) -> None: self.fail('String interpolation contains both stars and mapping keys', context, code=codes.STRING_FORMATTING) def requires_int_or_char(self, context: Context, format_call: bool = False) -> None: self.fail('"{}c" requires int or char'.format(':' if format_call else '%'), context, code=codes.STRING_FORMATTING) def key_not_in_mapping(self, key: str, context: Context) -> None: self.fail('Key \'%s\' not found in mapping' % key, context, code=codes.STRING_FORMATTING) def string_interpolation_mixing_key_and_non_keys(self, context: Context) -> None: self.fail('String interpolation mixes specifier with and without mapping keys', context, code=codes.STRING_FORMATTING) def cannot_determine_type(self, name: str, context: Context) -> None: self.fail("Cannot determine type of '%s'" % name, context, code=codes.HAS_TYPE) def cannot_determine_type_in_base(self, name: str, base: str, context: Context) -> None: self.fail("Cannot determine type of '%s' in base class '%s'" % (name, base), context) def no_formal_self(self, name: str, item: CallableType, context: Context) -> None: self.fail('Attribute function "%s" with type %s does not accept self argument' % (name, format_type(item)), context) def incompatible_self_argument(self, name: str, arg: Type, sig: CallableType, is_classmethod: bool, context: Context) -> None: kind = 'class attribute function' if is_classmethod else 'attribute function' self.fail('Invalid self argument %s to %s "%s" with type %s' % (format_type(arg), kind, name, format_type(sig)), context) def incompatible_conditional_function_def(self, defn: FuncDef) -> None: self.fail('All conditional function variants must have identical ' 'signatures', defn) def cannot_instantiate_abstract_class(self, class_name: str, abstract_attributes: List[str], context: Context) -> None: attrs = format_string_list(["'%s'" % a for a in abstract_attributes]) self.fail("Cannot instantiate abstract class '%s' with abstract " "attribute%s %s" % (class_name, plural_s(abstract_attributes), attrs), context, code=codes.ABSTRACT) def base_class_definitions_incompatible(self, name: str, base1: TypeInfo, base2: TypeInfo, context: Context) -> None: self.fail('Definition of "{}" in base class "{}" is incompatible ' 'with definition in base class "{}"'.format( name, base1.name, base2.name), context) def cant_assign_to_method(self, context: Context) -> None: self.fail(message_registry.CANNOT_ASSIGN_TO_METHOD, context, code=codes.ASSIGNMENT) def cant_assign_to_classvar(self, name: str, context: Context) -> None: self.fail('Cannot assign to class variable "%s" via instance' % name, context) def final_cant_override_writable(self, name: str, ctx: Context) -> None: self.fail('Cannot override writable attribute "{}" with a final one'.format(name), ctx) def cant_override_final(self, name: str, base_name: str, ctx: Context) -> None: self.fail('Cannot override final attribute "{}"' ' (previously declared in base class "{}")'.format(name, base_name), ctx) def cant_assign_to_final(self, name: str, attr_assign: bool, ctx: Context) -> None: """Warn about a prohibited assignment to a final attribute. Pass `attr_assign=True` if the assignment assigns to an attribute. """ kind = "attribute" if attr_assign else "name" self.fail('Cannot assign to final {} "{}"'.format(kind, unmangle(name)), ctx) def protocol_members_cant_be_final(self, ctx: Context) -> None: self.fail("Protocol member cannot be final", ctx) def final_without_value(self, ctx: Context) -> None: self.fail("Final name must be initialized with a value", ctx) def read_only_property(self, name: str, type: TypeInfo, context: Context) -> None: self.fail('Property "{}" defined in "{}" is read-only'.format( name, type.name), context) def incompatible_typevar_value(self, callee: CallableType, typ: Type, typevar_name: str, context: Context) -> None: self.fail(message_registry.INCOMPATIBLE_TYPEVAR_VALUE .format(typevar_name, callable_name(callee) or 'function', format_type(typ)), context, code=codes.TYPE_VAR) def dangerous_comparison(self, left: Type, right: Type, kind: str, ctx: Context) -> None: left_str = 'element' if kind == 'container' else 'left operand' right_str = 'container item' if kind == 'container' else 'right operand' message = 'Non-overlapping {} check ({} type: {}, {} type: {})' left_typ, right_typ = format_type_distinctly(left, right) self.fail(message.format(kind, left_str, left_typ, right_str, right_typ), ctx, code=codes.COMPARISON_OVERLAP) def overload_inconsistently_applies_decorator(self, decorator: str, context: Context) -> None: self.fail( 'Overload does not consistently use the "@{}" '.format(decorator) + 'decorator on all function signatures.', context) def overloaded_signatures_overlap(self, index1: int, index2: int, context: Context) -> None: self.fail('Overloaded function signatures {} and {} overlap with ' 'incompatible return types'.format(index1, index2), context) def overloaded_signature_will_never_match(self, index1: int, index2: int, context: Context) -> None: self.fail( 'Overloaded function signature {index2} will never be matched: ' 'signature {index1}\'s parameter type(s) are the same or broader'.format( index1=index1, index2=index2), context) def overloaded_signatures_typevar_specific(self, index: int, context: Context) -> None: self.fail('Overloaded function implementation cannot satisfy signature {} '.format(index) + 'due to inconsistencies in how they use type variables', context) def overloaded_signatures_arg_specific(self, index: int, context: Context) -> None: self.fail('Overloaded function implementation does not accept all possible arguments ' 'of signature {}'.format(index), context) def overloaded_signatures_ret_specific(self, index: int, context: Context) -> None: self.fail('Overloaded function implementation cannot produce return type ' 'of signature {}'.format(index), context) def warn_both_operands_are_from_unions(self, context: Context) -> None: self.note('Both left and right operands are unions', context, code=codes.OPERATOR) def warn_operand_was_from_union(self, side: str, original: Type, context: Context) -> None: self.note('{} operand is of type {}'.format(side, format_type(original)), context, code=codes.OPERATOR) def operator_method_signatures_overlap( self, reverse_class: TypeInfo, reverse_method: str, forward_class: Type, forward_method: str, context: Context) -> None: self.fail('Signatures of "{}" of "{}" and "{}" of {} ' 'are unsafely overlapping'.format( reverse_method, reverse_class.name, forward_method, format_type(forward_class)), context) def forward_operator_not_callable( self, forward_method: str, context: Context) -> None: self.fail('Forward operator "{}" is not callable'.format( forward_method), context) def signatures_incompatible(self, method: str, other_method: str, context: Context) -> None: self.fail('Signatures of "{}" and "{}" are incompatible'.format( method, other_method), context) def yield_from_invalid_operand_type(self, expr: Type, context: Context) -> Type: text = format_type(expr) if format_type(expr) != 'object' else expr self.fail('"yield from" can\'t be applied to {}'.format(text), context) return AnyType(TypeOfAny.from_error) def invalid_signature(self, func_type: Type, context: Context) -> None: self.fail('Invalid signature "{}"'.format(func_type), context) def invalid_signature_for_special_method( self, func_type: Type, context: Context, method_name: str) -> None: self.fail('Invalid signature "{}" for "{}"'.format(func_type, method_name), context) def reveal_type(self, typ: Type, context: Context) -> None: self.note('Revealed type is \'{}\''.format(typ), context) def reveal_locals(self, type_map: Dict[str, Optional[Type]], context: Context) -> None: # To ensure that the output is predictable on Python < 3.6, # use an ordered dictionary sorted by variable name sorted_locals = OrderedDict(sorted(type_map.items(), key=lambda t: t[0])) self.note("Revealed local types are:", context) for line in [' {}: {}'.format(k, v) for k, v in sorted_locals.items()]: self.note(line, context) def unsupported_type_type(self, item: Type, context: Context) -> None: self.fail('Unsupported type Type[{}]'.format(format_type(item)), context) def redundant_cast(self, typ: Type, context: Context) -> None: self.fail('Redundant cast to {}'.format(format_type(typ)), context, code=codes.REDUNDANT_CAST) def unimported_type_becomes_any(self, prefix: str, typ: Type, ctx: Context) -> None: self.fail("{} becomes {} due to an unfollowed import".format(prefix, format_type(typ)), ctx, code=codes.NO_ANY_UNIMPORTED) def need_annotation_for_var(self, node: SymbolNode, context: Context, python_version: Optional[Tuple[int, int]] = None) -> None: hint = '' has_variable_annotations = not python_version or python_version >= (3, 6) # Only gives hint if it's a variable declaration and the partial type is a builtin type if (python_version and isinstance(node, Var) and isinstance(node.type, PartialType) and node.type.type and node.type.type.fullname in reverse_builtin_aliases): alias = reverse_builtin_aliases[node.type.type.fullname] alias = alias.split('.')[-1] type_dec = '' if alias == 'Dict': type_dec = '{}, {}'.format(type_dec, type_dec) if has_variable_annotations: hint = ' (hint: "{}: {}[{}] = ...")'.format(node.name, alias, type_dec) else: hint = ' (hint: "{} = ... # type: {}[{}]")'.format(node.name, alias, type_dec) if has_variable_annotations: needed = 'annotation' else: needed = 'comment' self.fail("Need type {} for '{}'{}".format(needed, unmangle(node.name), hint), context, code=codes.VAR_ANNOTATED) def explicit_any(self, ctx: Context) -> None: self.fail('Explicit "Any" is not allowed', ctx) def unexpected_typeddict_keys( self, typ: TypedDictType, expected_keys: List[str], actual_keys: List[str], context: Context) -> None: actual_set = set(actual_keys) expected_set = set(expected_keys) if not typ.is_anonymous(): # Generate simpler messages for some common special cases. if actual_set < expected_set: # Use list comprehension instead of set operations to preserve order. missing = [key for key in expected_keys if key not in actual_set] self.fail('{} missing for TypedDict {}'.format( format_key_list(missing, short=True).capitalize(), format_type(typ)), context, code=codes.TYPEDDICT_ITEM) return else: extra = [key for key in actual_keys if key not in expected_set] if extra: # If there are both extra and missing keys, only report extra ones for # simplicity. self.fail('Extra {} for TypedDict {}'.format( format_key_list(extra, short=True), format_type(typ)), context, code=codes.TYPEDDICT_ITEM) return found = format_key_list(actual_keys, short=True) if not expected_keys: self.fail('Unexpected TypedDict {}'.format(found), context) return expected = format_key_list(expected_keys) if actual_keys and actual_set < expected_set: found = 'only {}'.format(found) self.fail('Expected {} but found {}'.format(expected, found), context, code=codes.TYPEDDICT_ITEM) def typeddict_key_must_be_string_literal( self, typ: TypedDictType, context: Context) -> None: self.fail( 'TypedDict key must be a string literal; expected one of {}'.format( format_item_name_list(typ.items.keys())), context) def typeddict_key_not_found( self, typ: TypedDictType, item_name: str, context: Context) -> None: if typ.is_anonymous(): self.fail('\'{}\' is not a valid TypedDict key; expected one of {}'.format( item_name, format_item_name_list(typ.items.keys())), context) else: self.fail("TypedDict {} has no key '{}'".format(format_type(typ), item_name), context) def typeddict_key_cannot_be_deleted( self, typ: TypedDictType, item_name: str, context: Context) -> None: if typ.is_anonymous(): self.fail("TypedDict key '{}' cannot be deleted".format(item_name), context) else: self.fail("Key '{}' of TypedDict {} cannot be deleted".format( item_name, format_type(typ)), context) def typeddict_setdefault_arguments_inconsistent( self, default: Type, expected: Type, context: Context) -> None: msg = 'Argument 2 to "setdefault" of "TypedDict" has incompatible type {}; expected {}' self.fail(msg.format(format_type(default), format_type(expected)), context) def type_arguments_not_allowed(self, context: Context) -> None: self.fail('Parameterized generics cannot be used with class or instance checks', context) def disallowed_any_type(self, typ: Type, context: Context) -> None: typ = get_proper_type(typ) if isinstance(typ, AnyType): message = 'Expression has type "Any"' else: message = 'Expression type contains "Any" (has type {})'.format(format_type(typ)) self.fail(message, context) def incorrectly_returning_any(self, typ: Type, context: Context) -> None: message = 'Returning Any from function declared to return {}'.format( format_type(typ)) self.fail(message, context, code=codes.NO_ANY_RETURN) def incorrect__exit__return(self, context: Context) -> None: self.fail( '"bool" is invalid as return type for "__exit__" that always returns False', context, code=codes.EXIT_RETURN) self.note( 'Use "typing_extensions.Literal[False]" as the return type or change it to "None"', context, code=codes.EXIT_RETURN) self.note( 'If return type of "__exit__" implies that it may return True, ' 'the context manager may swallow exceptions', context, code=codes.EXIT_RETURN) def untyped_decorated_function(self, typ: Type, context: Context) -> None: typ = get_proper_type(typ) if isinstance(typ, AnyType): self.fail("Function is untyped after decorator transformation", context) else: self.fail('Type of decorated function contains type "Any" ({})'.format( format_type(typ)), context) def typed_function_untyped_decorator(self, func_name: str, context: Context) -> None: self.fail('Untyped decorator makes function "{}" untyped'.format(func_name), context) def bad_proto_variance(self, actual: int, tvar_name: str, expected: int, context: Context) -> None: msg = capitalize("{} type variable '{}' used in protocol where" " {} one is expected".format(variance_string(actual), tvar_name, variance_string(expected))) self.fail(msg, context) def concrete_only_assign(self, typ: Type, context: Context) -> None: self.fail("Can only assign concrete classes to a variable of type {}" .format(format_type(typ)), context) def concrete_only_call(self, typ: Type, context: Context) -> None: self.fail("Only concrete class can be given where {} is expected" .format(format_type(typ)), context) def cannot_use_function_with_type( self, method_name: str, type_name: str, context: Context) -> None: self.fail("Cannot use {}() with {} type".format(method_name, type_name), context) def report_non_method_protocol(self, tp: TypeInfo, members: List[str], context: Context) -> None: self.fail("Only protocols that don't have non-method members can be" " used with issubclass()", context) if len(members) < 3: attrs = ', '.join(members) self.note('Protocol "{}" has non-method member(s): {}' .format(tp.name, attrs), context) def note_call(self, subtype: Type, call: Type, context: Context, *, code: Optional[ErrorCode]) -> None: self.note('"{}.__call__" has type {}'.format(format_type_bare(subtype), format_type(call, verbosity=1)), context, code=code) def unreachable_statement(self, context: Context) -> None: self.fail("Statement is unreachable", context) def redundant_left_operand(self, op_name: str, context: Context) -> None: """Indicates that the left operand of a boolean expression is redundant: it does not change the truth value of the entire condition as a whole. 'op_name' should either be the string "and" or the string "or". """ self.redundant_expr("Left operand of '{}'".format(op_name), op_name == 'and', context) def redundant_right_operand(self, op_name: str, context: Context) -> None: """Indicates that the right operand of a boolean expression is redundant: it does not change the truth value of the entire condition as a whole. 'op_name' should either be the string "and" or the string "or". """ self.fail("Right operand of '{}' is never evaluated".format(op_name), context) def redundant_condition_in_comprehension(self, truthiness: bool, context: Context) -> None: self.redundant_expr("If condition in comprehension", truthiness, context) def redundant_condition_in_if(self, truthiness: bool, context: Context) -> None: self.redundant_expr("If condition", truthiness, context) def redundant_condition_in_assert(self, truthiness: bool, context: Context) -> None: self.redundant_expr("Condition in assert", truthiness, context) def redundant_expr(self, description: str, truthiness: bool, context: Context) -> None: self.fail("{} is always {}".format(description, str(truthiness).lower()), context) def report_protocol_problems(self, subtype: Union[Instance, TupleType, TypedDictType], supertype: Instance, context: Context, *, code: Optional[ErrorCode]) -> None: """Report possible protocol conflicts between 'subtype' and 'supertype'. This includes missing members, incompatible types, and incompatible attribute flags, such as settable vs read-only or class variable vs instance variable. """ OFFSET = 4 # Four spaces, so that notes will look like this: # note: 'Cls' is missing following 'Proto' members: # note: method, attr MAX_ITEMS = 2 # Maximum number of conflicts, missing members, and overloads shown # List of special situations where we don't want to report additional problems exclusions = {TypedDictType: ['typing.Mapping'], TupleType: ['typing.Iterable', 'typing.Sequence'], Instance: []} # type: Dict[type, List[str]] if supertype.type.fullname in exclusions[type(subtype)]: return if any(isinstance(tp, UninhabitedType) for tp in get_proper_types(supertype.args)): # We don't want to add notes for failed inference (e.g. Iterable[]). # This will be only confusing a user even more. return if isinstance(subtype, TupleType): if not isinstance(subtype.partial_fallback, Instance): return subtype = subtype.partial_fallback elif isinstance(subtype, TypedDictType): if not isinstance(subtype.fallback, Instance): return subtype = subtype.fallback # Report missing members missing = get_missing_protocol_members(subtype, supertype) if (missing and len(missing) < len(supertype.type.protocol_members) and len(missing) <= MAX_ITEMS): self.note("'{}' is missing following '{}' protocol member{}:" .format(subtype.type.name, supertype.type.name, plural_s(missing)), context, code=code) self.note(', '.join(missing), context, offset=OFFSET, code=code) elif len(missing) > MAX_ITEMS or len(missing) == len(supertype.type.protocol_members): # This is an obviously wrong type: too many missing members return # Report member type conflicts conflict_types = get_conflict_protocol_types(subtype, supertype) if conflict_types and (not is_subtype(subtype, erase_type(supertype)) or not subtype.type.defn.type_vars or not supertype.type.defn.type_vars): self.note('Following member(s) of {} have ' 'conflicts:'.format(format_type(subtype)), context, code=code) for name, got, exp in conflict_types[:MAX_ITEMS]: exp = get_proper_type(exp) got = get_proper_type(got) if (not isinstance(exp, (CallableType, Overloaded)) or not isinstance(got, (CallableType, Overloaded))): self.note('{}: expected {}, got {}'.format(name, *format_type_distinctly(exp, got)), context, offset=OFFSET, code=code) else: self.note('Expected:', context, offset=OFFSET, code=code) if isinstance(exp, CallableType): self.note(pretty_callable(exp), context, offset=2 * OFFSET, code=code) else: assert isinstance(exp, Overloaded) self.pretty_overload(exp, context, OFFSET, MAX_ITEMS, code=code) self.note('Got:', context, offset=OFFSET, code=code) if isinstance(got, CallableType): self.note(pretty_callable(got), context, offset=2 * OFFSET, code=code) else: assert isinstance(got, Overloaded) self.pretty_overload(got, context, OFFSET, MAX_ITEMS, code=code) self.print_more(conflict_types, context, OFFSET, MAX_ITEMS, code=code) # Report flag conflicts (i.e. settable vs read-only etc.) conflict_flags = get_bad_protocol_flags(subtype, supertype) for name, subflags, superflags in conflict_flags[:MAX_ITEMS]: if IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags: self.note('Protocol member {}.{} expected instance variable,' ' got class variable'.format(supertype.type.name, name), context, code=code) if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: self.note('Protocol member {}.{} expected class variable,' ' got instance variable'.format(supertype.type.name, name), context, code=code) if IS_SETTABLE in superflags and IS_SETTABLE not in subflags: self.note('Protocol member {}.{} expected settable variable,' ' got read-only attribute'.format(supertype.type.name, name), context, code=code) if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags: self.note('Protocol member {}.{} expected class or static method' .format(supertype.type.name, name), context, code=code) self.print_more(conflict_flags, context, OFFSET, MAX_ITEMS, code=code) def pretty_overload(self, tp: Overloaded, context: Context, offset: int, max_items: int, *, code: Optional[ErrorCode] = None) -> None: for item in tp.items()[:max_items]: self.note('@overload', context, offset=2 * offset, code=code) self.note(pretty_callable(item), context, offset=2 * offset, code=code) left = len(tp.items()) - max_items if left > 0: msg = '<{} more overload{} not shown>'.format(left, plural_s(left)) self.note(msg, context, offset=2 * offset, code=code) def pretty_overload_matches(self, targets: List[CallableType], func: Overloaded, context: Context, offset: int, max_items: int, code: ErrorCode) -> None: if not targets: targets = func.items() shown = min(max_items, len(targets)) max_matching = len(targets) max_available = len(func.items()) # If there are 3 matches but max_items == 2, we might as well show # all three items instead of having the 3rd item be an error message. if shown + 1 == max_matching: shown = max_matching self.note('Possible overload variant{}:'.format(plural_s(shown)), context, code=code) for item in targets[:shown]: self.note(pretty_callable(item), context, offset=2 * offset, code=code) assert shown <= max_matching <= max_available if shown < max_matching <= max_available: left = max_matching - shown msg = '<{} more similar overload{} not shown, out of {} total overloads>'.format( left, plural_s(left), max_available) self.note(msg, context, offset=2 * offset, code=code) elif shown == max_matching < max_available: left = max_available - shown msg = '<{} more non-matching overload{} not shown>'.format(left, plural_s(left)) self.note(msg, context, offset=2 * offset, code=code) else: assert shown == max_matching == max_available def print_more(self, conflicts: Sequence[Any], context: Context, offset: int, max_items: int, *, code: Optional[ErrorCode] = None) -> None: if len(conflicts) > max_items: self.note('<{} more conflict(s) not shown>' .format(len(conflicts) - max_items), context, offset=offset, code=code) def try_report_long_tuple_assignment_error(self, subtype: ProperType, supertype: ProperType, context: Context, msg: str = message_registry.INCOMPATIBLE_TYPES, subtype_label: Optional[str] = None, supertype_label: Optional[str] = None, code: Optional[ErrorCode] = None) -> bool: """Try to generate meaningful error message for very long tuple assignment Returns a bool: True when generating long tuple assignment error, False when no such error reported """ if isinstance(subtype, TupleType): if (len(subtype.items) > 10 and isinstance(supertype, Instance) and supertype.type.fullname == 'builtins.tuple'): lhs_type = supertype.args[0] lhs_types = [lhs_type] * len(subtype.items) self.generate_incompatible_tuple_error(lhs_types, subtype.items, context, msg, code) return True elif (isinstance(supertype, TupleType) and (len(subtype.items) > 10 or len(supertype.items) > 10)): if len(subtype.items) != len(supertype.items): if supertype_label is not None and subtype_label is not None: error_msg = "{} ({} {}, {} {})".format(msg, subtype_label, self.format_long_tuple_type(subtype), supertype_label, self.format_long_tuple_type(supertype)) self.fail(error_msg, context, code=code) return True self.generate_incompatible_tuple_error(supertype.items, subtype.items, context, msg, code) return True return False def format_long_tuple_type(self, typ: TupleType) -> str: """Format very long tuple type using an ellipsis notation""" item_cnt = len(typ.items) if item_cnt > 10: return 'Tuple[{}, {}, ... <{} more items>]'\ .format(format_type_bare(typ.items[0]), format_type_bare(typ.items[1]), str(item_cnt - 2)) else: return format_type_bare(typ) def generate_incompatible_tuple_error(self, lhs_types: List[Type], rhs_types: List[Type], context: Context, msg: str = message_registry.INCOMPATIBLE_TYPES, code: Optional[ErrorCode] = None) -> None: """Generate error message for individual incompatible tuple pairs""" error_cnt = 0 notes = [] # List[str] for i, (lhs_t, rhs_t) in enumerate(zip(lhs_types, rhs_types)): if not is_subtype(lhs_t, rhs_t): if error_cnt < 3: notes.append('Expression tuple item {} has type "{}"; "{}" expected; ' .format(str(i), format_type_bare(rhs_t), format_type_bare(lhs_t))) error_cnt += 1 error_msg = msg + ' ({} tuple items are incompatible'.format(str(error_cnt)) if error_cnt - 3 > 0: error_msg += '; {} items are omitted)'.format(str(error_cnt - 3)) else: error_msg += ')' self.fail(error_msg, context, code=code) for note in notes: self.note(note, context, code=code) def quote_type_string(type_string: str) -> str: """Quotes a type representation for use in messages.""" no_quote_regex = r'^<(tuple|union): \d+ items>$' if (type_string in ['Module', 'overloaded function', '', ''] or re.match(no_quote_regex, type_string) is not None or type_string.endswith('?')): # Messages are easier to read if these aren't quoted. We use a # regex to match strings with variable contents. return type_string return '"{}"'.format(type_string) def format_type_inner(typ: Type, verbosity: int, fullnames: Optional[Set[str]]) -> str: """ Convert a type to a relatively short string suitable for error messages. Args: verbosity: a coarse grained control on the verbosity of the type fullnames: a set of names that should be printed in full """ def format(typ: Type) -> str: return format_type_inner(typ, verbosity, fullnames) # TODO: show type alias names in errors. typ = get_proper_type(typ) if isinstance(typ, Instance): itype = typ # Get the short name of the type. if itype.type.fullname in ('types.ModuleType', '_importlib_modulespec.ModuleType'): # Make some common error messages simpler and tidier. return 'Module' if verbosity >= 2 or (fullnames and itype.type.fullname in fullnames): base_str = itype.type.fullname else: base_str = itype.type.name if itype.args == []: # No type arguments, just return the type name return base_str elif itype.type.fullname == 'builtins.tuple': item_type_str = format(itype.args[0]) return 'Tuple[{}, ...]'.format(item_type_str) elif itype.type.fullname in reverse_builtin_aliases: alias = reverse_builtin_aliases[itype.type.fullname] alias = alias.split('.')[-1] items = [format(arg) for arg in itype.args] return '{}[{}]'.format(alias, ', '.join(items)) else: # There are type arguments. Convert the arguments to strings. a = [] # type: List[str] for arg in itype.args: a.append(format(arg)) s = ', '.join(a) return '{}[{}]'.format(base_str, s) elif isinstance(typ, TypeVarType): # This is similar to non-generic instance types. return typ.name elif isinstance(typ, TupleType): # Prefer the name of the fallback class (if not tuple), as it's more informative. if typ.partial_fallback.type.fullname != 'builtins.tuple': return format(typ.partial_fallback) items = [] for t in typ.items: items.append(format(t)) s = 'Tuple[{}]'.format(', '.join(items)) if len(s) < 400: return s else: return ''.format(len(items)) elif isinstance(typ, TypedDictType): # If the TypedDictType is named, return the name if not typ.is_anonymous(): return format(typ.fallback) items = [] for (item_name, item_type) in typ.items.items(): modifier = '' if item_name in typ.required_keys else '?' items.append('{!r}{}: {}'.format(item_name, modifier, format(item_type))) s = 'TypedDict({{{}}})'.format(', '.join(items)) return s elif isinstance(typ, LiteralType): if typ.is_enum_literal(): underlying_type = format(typ.fallback) return 'Literal[{}.{}]'.format(underlying_type, typ.value) else: return str(typ) elif isinstance(typ, UnionType): # Only print Unions as Optionals if the Optional wouldn't have to contain another Union print_as_optional = (len(typ.items) - sum(isinstance(get_proper_type(t), NoneType) for t in typ.items) == 1) if print_as_optional: rest = [t for t in typ.items if not isinstance(get_proper_type(t), NoneType)] return 'Optional[{}]'.format(format(rest[0])) else: items = [] for t in typ.items: items.append(format(t)) s = 'Union[{}]'.format(', '.join(items)) if len(s) < 400: return s else: return ''.format(len(items)) elif isinstance(typ, NoneType): return 'None' elif isinstance(typ, AnyType): return 'Any' elif isinstance(typ, DeletedType): return '' elif isinstance(typ, UninhabitedType): if typ.is_noreturn: return 'NoReturn' else: return '' elif isinstance(typ, TypeType): return 'Type[{}]'.format(format(typ.item)) elif isinstance(typ, FunctionLike): func = typ if func.is_type_obj(): # The type of a type object type can be derived from the # return type (this always works). return format(TypeType.make_normalized(erase_type(func.items()[0].ret_type))) elif isinstance(func, CallableType): return_type = format(func.ret_type) if func.is_ellipsis_args: return 'Callable[..., {}]'.format(return_type) arg_strings = [] for arg_name, arg_type, arg_kind in zip( func.arg_names, func.arg_types, func.arg_kinds): if (arg_kind == ARG_POS and arg_name is None or verbosity == 0 and arg_kind in (ARG_POS, ARG_OPT)): arg_strings.append(format(arg_type)) else: constructor = ARG_CONSTRUCTOR_NAMES[arg_kind] if arg_kind in (ARG_STAR, ARG_STAR2) or arg_name is None: arg_strings.append("{}({})".format( constructor, format(arg_type))) else: arg_strings.append("{}({}, {})".format( constructor, format(arg_type), repr(arg_name))) return 'Callable[[{}], {}]'.format(", ".join(arg_strings), return_type) else: # Use a simple representation for function types; proper # function types may result in long and difficult-to-read # error messages. return 'overloaded function' elif isinstance(typ, UnboundType): return str(typ) elif typ is None: raise RuntimeError('Type is None') else: # Default case; we simply have to return something meaningful here. return 'object' def collect_all_instances(t: Type) -> List[Instance]: """Return all instances that `t` contains (including `t`). This is similar to collect_all_inner_types from typeanal but only returns instances and will recurse into fallbacks. """ visitor = CollectAllInstancesQuery() t.accept(visitor) return visitor.instances class CollectAllInstancesQuery(TypeTraverserVisitor): def __init__(self) -> None: self.instances = [] # type: List[Instance] def visit_instance(self, t: Instance) -> None: self.instances.append(t) super().visit_instance(t) def find_type_overlaps(*types: Type) -> Set[str]: """Return a set of fullnames that share a short name and appear in either type. This is used to ensure that distinct types with the same short name are printed with their fullname. """ d = {} # type: Dict[str, Set[str]] for type in types: for inst in collect_all_instances(type): d.setdefault(inst.type.name, set()).add(inst.type.fullname) overlaps = set() # type: Set[str] for fullnames in d.values(): if len(fullnames) > 1: overlaps.update(fullnames) return overlaps def format_type(typ: Type, verbosity: int = 0) -> str: """ Convert a type to a relatively short string suitable for error messages. `verbosity` is a coarse grained control on the verbosity of the type This function returns a string appropriate for unmodified use in error messages; this means that it will be quoted in most cases. If modification of the formatted string is required, callers should use format_type_bare. """ return quote_type_string(format_type_bare(typ, verbosity)) def format_type_bare(typ: Type, verbosity: int = 0, fullnames: Optional[Set[str]] = None) -> str: """ Convert a type to a relatively short string suitable for error messages. `verbosity` is a coarse grained control on the verbosity of the type `fullnames` specifies a set of names that should be printed in full This function will return an unquoted string. If a caller doesn't need to perform post-processing on the string output, format_type should be used instead. (The caller may want to use quote_type_string after processing has happened, to maintain consistent quoting in messages.) """ return format_type_inner(typ, verbosity, find_type_overlaps(typ)) def format_type_distinctly(type1: Type, type2: Type, bare: bool = False) -> Tuple[str, str]: """Jointly format a pair of types to distinct strings. Increase the verbosity of the type strings until they become distinct while also requiring that distinct types with the same short name are formatted distinctly. By default, the returned strings are created using format_type() and will be quoted accordingly. If ``bare`` is True, the returned strings will not be quoted; callers who need to do post-processing of the strings before quoting them (such as prepending * or **) should use this. """ overlapping = find_type_overlaps(type1, type2) for verbosity in range(2): str1 = format_type_inner(type1, verbosity=verbosity, fullnames=overlapping) str2 = format_type_inner(type2, verbosity=verbosity, fullnames=overlapping) if str1 != str2: break if bare: return (str1, str2) else: return (quote_type_string(str1), quote_type_string(str2)) def pretty_callable(tp: CallableType) -> str: """Return a nice easily-readable representation of a callable type. For example: def [T <: int] f(self, x: int, y: T) -> None """ s = '' asterisk = False for i in range(len(tp.arg_types)): if s: s += ', ' if tp.arg_kinds[i] in (ARG_NAMED, ARG_NAMED_OPT) and not asterisk: s += '*, ' asterisk = True if tp.arg_kinds[i] == ARG_STAR: s += '*' asterisk = True if tp.arg_kinds[i] == ARG_STAR2: s += '**' name = tp.arg_names[i] if name: s += name + ': ' s += format_type_bare(tp.arg_types[i]) if tp.arg_kinds[i] in (ARG_OPT, ARG_NAMED_OPT): s += ' = ...' # If we got a "special arg" (i.e: self, cls, etc...), prepend it to the arg list if isinstance(tp.definition, FuncDef) and tp.definition.name is not None: definition_args = tp.definition.arg_names if definition_args and tp.arg_names != definition_args \ and len(definition_args) > 0: if s: s = ', ' + s s = definition_args[0] + s s = '{}({})'.format(tp.definition.name, s) elif tp.name: first_arg = tp.def_extras.get('first_arg') if first_arg: if s: s = ', ' + s s = first_arg + s s = '{}({})'.format(tp.name.split()[0], s) # skip "of Class" part else: s = '({})'.format(s) s += ' -> ' + format_type_bare(tp.ret_type) if tp.variables: tvars = [] for tvar in tp.variables: upper_bound = get_proper_type(tvar.upper_bound) if (isinstance(upper_bound, Instance) and upper_bound.type.fullname != 'builtins.object'): tvars.append('{} <: {}'.format(tvar.name, format_type_bare(upper_bound))) elif tvar.values: tvars.append('{} in ({})' .format(tvar.name, ', '.join([format_type_bare(tp) for tp in tvar.values]))) else: tvars.append(tvar.name) s = '[{}] {}'.format(', '.join(tvars), s) return 'def {}'.format(s) def variance_string(variance: int) -> str: if variance == COVARIANT: return 'covariant' elif variance == CONTRAVARIANT: return 'contravariant' else: return 'invariant' def get_missing_protocol_members(left: Instance, right: Instance) -> List[str]: """Find all protocol members of 'right' that are not implemented (i.e. completely missing) in 'left'. """ assert right.type.is_protocol missing = [] # type: List[str] for member in right.type.protocol_members: if not find_member(member, left, left): missing.append(member) return missing def get_conflict_protocol_types(left: Instance, right: Instance) -> List[Tuple[str, Type, Type]]: """Find members that are defined in 'left' but have incompatible types. Return them as a list of ('member', 'got', 'expected'). """ assert right.type.is_protocol conflicts = [] # type: List[Tuple[str, Type, Type]] for member in right.type.protocol_members: if member in ('__init__', '__new__'): continue supertype = find_member(member, right, left) assert supertype is not None subtype = find_member(member, left, left) if not subtype: continue is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True) if IS_SETTABLE in get_member_flags(member, right.type): is_compat = is_compat and is_subtype(supertype, subtype) if not is_compat: conflicts.append((member, subtype, supertype)) return conflicts def get_bad_protocol_flags(left: Instance, right: Instance ) -> List[Tuple[str, Set[int], Set[int]]]: """Return all incompatible attribute flags for members that are present in both 'left' and 'right'. """ assert right.type.is_protocol all_flags = [] # type: List[Tuple[str, Set[int], Set[int]]] for member in right.type.protocol_members: if find_member(member, left, left): item = (member, get_member_flags(member, left.type), get_member_flags(member, right.type)) all_flags.append(item) bad_flags = [] for name, subflags, superflags in all_flags: if (IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags or IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags or IS_SETTABLE in superflags and IS_SETTABLE not in subflags or IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags): bad_flags.append((name, subflags, superflags)) return bad_flags def capitalize(s: str) -> str: """Capitalize the first character of a string.""" if s == '': return '' else: return s[0].upper() + s[1:] def extract_type(name: str) -> str: """If the argument is the name of a method (of form C.m), return the type portion in quotes (e.g. "y"). Otherwise, return the string unmodified. """ name = re.sub('^"[a-zA-Z0-9_]+" of ', '', name) return name def strip_quotes(s: str) -> str: """Strip a double quote at the beginning and end of the string, if any.""" s = re.sub('^"', '', s) s = re.sub('"$', '', s) return s def plural_s(s: Union[int, Sequence[Any]]) -> str: count = s if isinstance(s, int) else len(s) if count > 1: return 's' else: return '' def format_string_list(lst: List[str]) -> str: assert len(lst) > 0 if len(lst) == 1: return lst[0] elif len(lst) <= 5: return '%s and %s' % (', '.join(lst[:-1]), lst[-1]) else: return '%s, ... and %s (%i methods suppressed)' % ( ', '.join(lst[:2]), lst[-1], len(lst) - 3) def format_item_name_list(s: Iterable[str]) -> str: lst = list(s) if len(lst) <= 5: return '(' + ', '.join(["'%s'" % name for name in lst]) + ')' else: return '(' + ', '.join(["'%s'" % name for name in lst[:5]]) + ', ...)' def callable_name(type: FunctionLike) -> Optional[str]: name = type.get_name() if name is not None and name[0] != '<': return '"{}"'.format(name).replace(' of ', '" of "') return name def for_function(callee: CallableType) -> str: name = callable_name(callee) if name is not None: return ' for {}'.format(name) return '' def find_defining_module(modules: Dict[str, MypyFile], typ: CallableType) -> Optional[MypyFile]: if not typ.definition: return None fullname = typ.definition.fullname if fullname is not None and '.' in fullname: for i in range(fullname.count('.')): module_name = fullname.rsplit('.', i + 1)[0] try: return modules[module_name] except KeyError: pass assert False, "Couldn't determine module from CallableType" return None def temp_message_builder() -> MessageBuilder: """Return a message builder usable for throwaway errors (which may not format properly).""" return MessageBuilder(Errors(), {}) # For hard-coding suggested missing member alternatives. COMMON_MISTAKES = { 'add': ('append', 'extend'), } # type: Final[Dict[str, Sequence[str]]] def best_matches(current: str, options: Iterable[str]) -> List[str]: ratios = {v: difflib.SequenceMatcher(a=current, b=v).ratio() for v in options} return sorted((o for o in options if ratios[o] > 0.75), reverse=True, key=lambda v: (ratios[v], v)) def pretty_or(args: List[str]) -> str: quoted = ['"' + a + '"' for a in args] if len(quoted) == 1: return quoted[0] if len(quoted) == 2: return "{} or {}".format(quoted[0], quoted[1]) return ", ".join(quoted[:-1]) + ", or " + quoted[-1] def append_invariance_notes(notes: List[str], arg_type: Instance, expected_type: Instance) -> List[str]: """Explain that the type is invariant and give notes for how to solve the issue.""" invariant_type = '' covariant_suggestion = '' if (arg_type.type.fullname == 'builtins.list' and expected_type.type.fullname == 'builtins.list' and is_subtype(arg_type.args[0], expected_type.args[0])): invariant_type = 'List' covariant_suggestion = 'Consider using "Sequence" instead, which is covariant' elif (arg_type.type.fullname == 'builtins.dict' and expected_type.type.fullname == 'builtins.dict' and is_same_type(arg_type.args[0], expected_type.args[0]) and is_subtype(arg_type.args[1], expected_type.args[1])): invariant_type = 'Dict' covariant_suggestion = ('Consider using "Mapping" instead, ' 'which is covariant in the value type') if invariant_type and covariant_suggestion: notes.append( '"{}" is invariant -- see '.format(invariant_type) + 'http://mypy.readthedocs.io/en/latest/common_issues.html#variance') notes.append(covariant_suggestion) return notes def make_inferred_type_note(context: Context, subtype: Type, supertype: Type, supertype_str: str) -> str: """Explain that the user may have forgotten to type a variable. The user does not expect an error if the inferred container type is the same as the return type of a function and the argument type(s) are a subtype of the argument type(s) of the return type. This note suggests that they add a type annotation with the return type instead of relying on the inferred type. """ subtype = get_proper_type(subtype) supertype = get_proper_type(supertype) if (isinstance(subtype, Instance) and isinstance(supertype, Instance) and subtype.type.fullname == supertype.type.fullname and subtype.args and supertype.args and isinstance(context, ReturnStmt) and isinstance(context.expr, NameExpr) and isinstance(context.expr.node, Var) and context.expr.node.is_inferred): for subtype_arg, supertype_arg in zip(subtype.args, supertype.args): if not is_subtype(subtype_arg, supertype_arg): return '' var_name = context.expr.name return 'Perhaps you need a type annotation for "{}"? Suggestion: {}'.format( var_name, supertype_str) return '' def format_key_list(keys: List[str], *, short: bool = False) -> str: reprs = [repr(key) for key in keys] td = '' if short else 'TypedDict ' if len(keys) == 0: return 'no {}keys'.format(td) elif len(keys) == 1: return '{}key {}'.format(td, reprs[0]) else: return '{}keys ({})'.format(td, ', '.join(reprs)) mypy-0.761/mypy/metastore.py0000644€tŠÔÚ€2›s®0000001501113576752246022271 0ustar jukkaDROPBOX\Domain Users00000000000000"""Interfaces for accessing metadata. We provide two implementations. * The "classic" file system implementation, which uses a directory structure of files. * A hokey sqlite backed implementation, which basically simulates the file system in an effort to work around poor file system performance on OS X. """ import binascii import os import time from abc import abstractmethod from typing import List, Iterable, Any, Optional from typing_extensions import TYPE_CHECKING if TYPE_CHECKING: # We avoid importing sqlite3 unless we are using it so we can mostly work # on semi-broken pythons that are missing it. import sqlite3 class MetadataStore: """Generic interface for metadata storage.""" @abstractmethod def getmtime(self, name: str) -> float: """Read the mtime of a metadata entry.. Raises FileNotFound if the entry does not exist. """ pass @abstractmethod def read(self, name: str) -> str: """Read the contents of a metadata entry. Raises FileNotFound if the entry does not exist. """ pass @abstractmethod def write(self, name: str, data: str, mtime: Optional[float] = None) -> bool: """Write a metadata entry. If mtime is specified, set it as the mtime of the entry. Otherwise, the current time is used. Returns True if the entry is successfully written, False otherwise. """ @abstractmethod def remove(self, name: str) -> None: """Delete a metadata entry""" pass @abstractmethod def commit(self) -> None: """If the backing store requires a commit, do it. But N.B. that this is not *guaranteed* to do anything, and there is no guarantee that changes are not made until it is called. """ pass @abstractmethod def list_all(self) -> Iterable[str]: ... def random_string() -> str: return binascii.hexlify(os.urandom(8)).decode('ascii') class FilesystemMetadataStore(MetadataStore): def __init__(self, cache_dir_prefix: str) -> None: # We check startswith instead of equality because the version # will have already been appended by the time the cache dir is # passed here. if cache_dir_prefix.startswith(os.devnull): self.cache_dir_prefix = None else: self.cache_dir_prefix = cache_dir_prefix def getmtime(self, name: str) -> float: if not self.cache_dir_prefix: raise FileNotFoundError() return int(os.path.getmtime(os.path.join(self.cache_dir_prefix, name))) def read(self, name: str) -> str: assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!" if not self.cache_dir_prefix: raise FileNotFoundError() with open(os.path.join(self.cache_dir_prefix, name), 'r') as f: return f.read() def write(self, name: str, data: str, mtime: Optional[float] = None) -> bool: assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!" if not self.cache_dir_prefix: return False path = os.path.join(self.cache_dir_prefix, name) tmp_filename = path + '.' + random_string() try: os.makedirs(os.path.dirname(path), exist_ok=True) with open(tmp_filename, 'w') as f: f.write(data) os.replace(tmp_filename, path) if mtime is not None: os.utime(path, times=(mtime, mtime)) except os.error: return False return True def remove(self, name: str) -> None: if not self.cache_dir_prefix: raise FileNotFoundError() os.remove(os.path.join(self.cache_dir_prefix, name)) def commit(self) -> None: pass def list_all(self) -> Iterable[str]: if not self.cache_dir_prefix: return for dir, _, files in os.walk(self.cache_dir_prefix): dir = os.path.relpath(dir, self.cache_dir_prefix) for file in files: yield os.path.join(dir, file) SCHEMA = ''' CREATE TABLE IF NOT EXISTS files ( path TEXT UNIQUE NOT NULL, mtime REAL, data TEXT ); CREATE INDEX IF NOT EXISTS path_idx on files(path); ''' # No migrations yet MIGRATIONS = [ ] # type: List[str] def connect_db(db_file: str) -> 'sqlite3.Connection': import sqlite3.dbapi2 db = sqlite3.dbapi2.connect(db_file) db.executescript(SCHEMA) for migr in MIGRATIONS: try: db.executescript(migr) except sqlite3.OperationalError: pass return db class SqliteMetadataStore(MetadataStore): def __init__(self, cache_dir_prefix: str) -> None: # We check startswith instead of equality because the version # will have already been appended by the time the cache dir is # passed here. if cache_dir_prefix.startswith(os.devnull): self.db = None return os.makedirs(cache_dir_prefix, exist_ok=True) self.db = connect_db(os.path.join(cache_dir_prefix, 'cache.db')) def _query(self, name: str, field: str) -> Any: # Raises FileNotFound for consistency with the file system version if not self.db: raise FileNotFoundError() cur = self.db.execute('SELECT {} FROM files WHERE path = ?'.format(field), (name,)) results = cur.fetchall() if not results: raise FileNotFoundError() assert len(results) == 1 return results[0][0] def getmtime(self, name: str) -> float: return self._query(name, 'mtime') def read(self, name: str) -> str: return self._query(name, 'data') def write(self, name: str, data: str, mtime: Optional[float] = None) -> bool: import sqlite3 if not self.db: return False try: if mtime is None: mtime = time.time() self.db.execute('INSERT OR REPLACE INTO files(path, mtime, data) VALUES(?, ?, ?)', (name, mtime, data)) except sqlite3.OperationalError: return False return True def remove(self, name: str) -> None: if not self.db: raise FileNotFoundError() self.db.execute('DELETE FROM files WHERE path = ?', (name,)) def commit(self) -> None: if self.db: self.db.commit() def list_all(self) -> Iterable[str]: if self.db: for row in self.db.execute('SELECT path FROM files'): yield row[0] mypy-0.761/mypy/mixedtraverser.py0000644€tŠÔÚ€2›s®0000000543113576752246023337 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from mypy.nodes import ( Var, FuncItem, ClassDef, AssignmentStmt, ForStmt, WithStmt, CastExpr, TypeApplication, TypeAliasExpr, TypeVarExpr, TypedDictExpr, NamedTupleExpr, PromoteExpr, NewTypeExpr ) from mypy.types import Type from mypy.traverser import TraverserVisitor from mypy.typetraverser import TypeTraverserVisitor class MixedTraverserVisitor(TraverserVisitor, TypeTraverserVisitor): """Recursive traversal of both Node and Type objects.""" # Symbol nodes def visit_var(self, var: Var) -> None: self.visit_optional_type(var.type) def visit_func(self, o: FuncItem) -> None: super().visit_func(o) self.visit_optional_type(o.type) def visit_class_def(self, o: ClassDef) -> None: # TODO: Should we visit generated methods/variables as well, either here or in # TraverserVisitor? super().visit_class_def(o) info = o.info if info: for base in info.bases: base.accept(self) def visit_type_alias_expr(self, o: TypeAliasExpr) -> None: super().visit_type_alias_expr(o) o.type.accept(self) def visit_type_var_expr(self, o: TypeVarExpr) -> None: super().visit_type_var_expr(o) o.upper_bound.accept(self) for value in o.values: value.accept(self) def visit_typeddict_expr(self, o: TypedDictExpr) -> None: super().visit_typeddict_expr(o) self.visit_optional_type(o.info.typeddict_type) def visit_namedtuple_expr(self, o: NamedTupleExpr) -> None: super().visit_namedtuple_expr(o) assert o.info.tuple_type o.info.tuple_type.accept(self) def visit__promote_expr(self, o: PromoteExpr) -> None: super().visit__promote_expr(o) o.type.accept(self) def visit_newtype_expr(self, o: NewTypeExpr) -> None: super().visit_newtype_expr(o) self.visit_optional_type(o.old_type) # Statements def visit_assignment_stmt(self, o: AssignmentStmt) -> None: super().visit_assignment_stmt(o) self.visit_optional_type(o.type) def visit_for_stmt(self, o: ForStmt) -> None: super().visit_for_stmt(o) self.visit_optional_type(o.index_type) def visit_with_stmt(self, o: WithStmt) -> None: super().visit_with_stmt(o) for typ in o.analyzed_types: typ.accept(self) # Expressions def visit_cast_expr(self, o: CastExpr) -> None: super().visit_cast_expr(o) o.type.accept(self) def visit_type_application(self, o: TypeApplication) -> None: super().visit_type_application(o) for t in o.types: t.accept(self) # Helpers def visit_optional_type(self, t: Optional[Type]) -> None: if t: t.accept(self) mypy-0.761/mypy/modulefinder.py0000644€tŠÔÚ€2›s®0000005720113576752246022752 0ustar jukkaDROPBOX\Domain Users00000000000000"""Low-level infrastructure to find modules. This build on fscache.py; find_sources.py builds on top of this. """ import ast import collections import functools import os import subprocess import sys from typing import Dict, List, NamedTuple, Optional, Set, Tuple from typing_extensions import Final from mypy.defaults import PYTHON3_VERSION_MIN from mypy.fscache import FileSystemCache from mypy.options import Options from mypy import sitepkgs # Paths to be searched in find_module(). SearchPaths = NamedTuple( 'SearchPaths', [('python_path', Tuple[str, ...]), # where user code is found ('mypy_path', Tuple[str, ...]), # from $MYPYPATH or config variable ('package_path', Tuple[str, ...]), # from get_site_packages_dirs() ('typeshed_path', Tuple[str, ...]), # paths in typeshed ]) # Package dirs are a two-tuple of path to search and whether to verify the module OnePackageDir = Tuple[str, bool] PackageDirs = List[OnePackageDir] PYTHON_EXTENSIONS = ['.pyi', '.py'] # type: Final class BuildSource: """A single source file.""" def __init__(self, path: Optional[str], module: Optional[str], text: Optional[str], base_dir: Optional[str] = None) -> None: self.path = path # File where it's found (e.g. 'xxx/yyy/foo/bar.py') self.module = module or '__main__' # Module name (e.g. 'foo.bar') self.text = text # Source code, if initially supplied, else None self.base_dir = base_dir # Directory where the package is rooted (e.g. 'xxx/yyy') def __repr__(self) -> str: return '' % (self.path, self.module, self.text is not None) class FindModuleCache: """Module finder with integrated cache. Module locations and some intermediate results are cached internally and can be cleared with the clear() method. All file system accesses are performed through a FileSystemCache, which is not ever cleared by this class. If necessary it must be cleared by client code. """ def __init__(self, search_paths: SearchPaths, fscache: Optional[FileSystemCache] = None, options: Optional[Options] = None, ns_packages: Optional[List[str]] = None) -> None: self.search_paths = search_paths self.fscache = fscache or FileSystemCache() # Cache for get_toplevel_possibilities: # search_paths -> (toplevel_id -> list(package_dirs)) self.initial_components = {} # type: Dict[Tuple[str, ...], Dict[str, List[str]]] # Cache find_module: id -> result self.results = {} # type: Dict[str, Optional[str]] self.ns_ancestors = {} # type: Dict[str, str] self.options = options self.ns_packages = ns_packages or [] # type: List[str] def clear(self) -> None: self.results.clear() self.initial_components.clear() self.ns_ancestors.clear() def find_lib_path_dirs(self, id: str, lib_path: Tuple[str, ...]) -> PackageDirs: """Find which elements of a lib_path have the directory a module needs to exist. This is run for the python_path, mypy_path, and typeshed_path search paths.""" components = id.split('.') dir_chain = os.sep.join(components[:-1]) # e.g., 'foo/bar' dirs = [] for pathitem in self.get_toplevel_possibilities(lib_path, components[0]): # e.g., '/usr/lib/python3.4/foo/bar' dir = os.path.normpath(os.path.join(pathitem, dir_chain)) if self.fscache.isdir(dir): dirs.append((dir, True)) return dirs def get_toplevel_possibilities(self, lib_path: Tuple[str, ...], id: str) -> List[str]: """Find which elements of lib_path could contain a particular top-level module. In practice, almost all modules can be routed to the correct entry in lib_path by looking at just the first component of the module name. We take advantage of this by enumerating the contents of all of the directories on the lib_path and building a map of which entries in the lib_path could contain each potential top-level module that appears. """ if lib_path in self.initial_components: return self.initial_components[lib_path].get(id, []) # Enumerate all the files in the directories on lib_path and produce the map components = {} # type: Dict[str, List[str]] for dir in lib_path: try: contents = self.fscache.listdir(dir) except OSError: contents = [] # False positives are fine for correctness here, since we will check # precisely later, so we only look at the root of every filename without # any concern for the exact details. for name in contents: name = os.path.splitext(name)[0] components.setdefault(name, []).append(dir) self.initial_components[lib_path] = components return components.get(id, []) def find_module(self, id: str) -> Optional[str]: """Return the path of the module source file, or None if not found.""" if id not in self.results: self.results[id] = self._find_module(id) return self.results[id] def _find_module_non_stub_helper(self, components: List[str], pkg_dir: str) -> Optional[OnePackageDir]: dir_path = pkg_dir for index, component in enumerate(components): dir_path = os.path.join(dir_path, component) if self.fscache.isfile(os.path.join(dir_path, 'py.typed')): return os.path.join(pkg_dir, *components[:-1]), index == 0 return None def _update_ns_ancestors(self, components: List[str], match: Tuple[str, bool]) -> None: path, verify = match for i in range(1, len(components)): pkg_id = '.'.join(components[:-i]) if pkg_id not in self.ns_ancestors and self.fscache.isdir(path): self.ns_ancestors[pkg_id] = path path = os.path.dirname(path) def _find_module(self, id: str) -> Optional[str]: fscache = self.fscache # If we're looking for a module like 'foo.bar.baz', it's likely that most of the # many elements of lib_path don't even have a subdirectory 'foo/bar'. Discover # that only once and cache it for when we look for modules like 'foo.bar.blah' # that will require the same subdirectory. components = id.split('.') dir_chain = os.sep.join(components[:-1]) # e.g., 'foo/bar' # TODO (ethanhs): refactor each path search to its own method with lru_cache # We have two sets of folders so that we collect *all* stubs folders and # put them in the front of the search path third_party_inline_dirs = [] # type: PackageDirs third_party_stubs_dirs = [] # type: PackageDirs # Third-party stub/typed packages for pkg_dir in self.search_paths.package_path: stub_name = components[0] + '-stubs' stub_dir = os.path.join(pkg_dir, stub_name) if fscache.isdir(stub_dir): stub_typed_file = os.path.join(stub_dir, 'py.typed') stub_components = [stub_name] + components[1:] path = os.path.join(pkg_dir, *stub_components[:-1]) if fscache.isdir(path): if fscache.isfile(stub_typed_file): # Stub packages can have a py.typed file, which must include # 'partial\n' to make the package partial # Partial here means that mypy should look at the runtime # package if installed. if fscache.read(stub_typed_file).decode().strip() == 'partial': runtime_path = os.path.join(pkg_dir, dir_chain) third_party_inline_dirs.append((runtime_path, True)) # if the package is partial, we don't verify the module, as # the partial stub package may not have a __init__.pyi third_party_stubs_dirs.append((path, False)) else: # handle the edge case where people put a py.typed file # in a stub package, but it isn't partial third_party_stubs_dirs.append((path, True)) else: third_party_stubs_dirs.append((path, True)) non_stub_match = self._find_module_non_stub_helper(components, pkg_dir) if non_stub_match: third_party_inline_dirs.append(non_stub_match) self._update_ns_ancestors(components, non_stub_match) if self.options and self.options.use_builtins_fixtures: # Everything should be in fixtures. third_party_inline_dirs.clear() third_party_stubs_dirs.clear() python_mypy_path = self.search_paths.mypy_path + self.search_paths.python_path candidate_base_dirs = self.find_lib_path_dirs(id, python_mypy_path) + \ third_party_stubs_dirs + third_party_inline_dirs + \ self.find_lib_path_dirs(id, self.search_paths.typeshed_path) # If we're looking for a module like 'foo.bar.baz', then candidate_base_dirs now # contains just the subdirectories 'foo/bar' that actually exist under the # elements of lib_path. This is probably much shorter than lib_path itself. # Now just look for 'baz.pyi', 'baz/__init__.py', etc., inside those directories. seplast = os.sep + components[-1] # so e.g. '/baz' sepinit = os.sep + '__init__' near_misses = [] # Collect near misses for namespace mode (see below). for base_dir, verify in candidate_base_dirs: base_path = base_dir + seplast # so e.g. '/usr/lib/python3.4/foo/bar/baz' has_init = False dir_prefix = base_dir for _ in range(len(components) - 1): dir_prefix = os.path.dirname(dir_prefix) # Prefer package over module, i.e. baz/__init__.py* over baz.py*. for extension in PYTHON_EXTENSIONS: path = base_path + sepinit + extension path_stubs = base_path + '-stubs' + sepinit + extension if fscache.isfile_case(path, dir_prefix): has_init = True if verify and not verify_module(fscache, id, path, dir_prefix): near_misses.append((path, dir_prefix)) continue return path elif fscache.isfile_case(path_stubs, dir_prefix): if verify and not verify_module(fscache, id, path_stubs, dir_prefix): near_misses.append((path_stubs, dir_prefix)) continue return path_stubs # In namespace mode, register a potential namespace package if self.options and self.options.namespace_packages: if fscache.isdir(base_path) and not has_init: near_misses.append((base_path, dir_prefix)) # No package, look for module. for extension in PYTHON_EXTENSIONS: path = base_path + extension if fscache.isfile_case(path, dir_prefix): if verify and not verify_module(fscache, id, path, dir_prefix): near_misses.append((path, dir_prefix)) continue return path # In namespace mode, re-check those entries that had 'verify'. # Assume search path entries xxx, yyy and zzz, and we're # looking for foo.bar.baz. Suppose near_misses has: # # - xxx/foo/bar/baz.py # - yyy/foo/bar/baz/__init__.py # - zzz/foo/bar/baz.pyi # # If any of the foo directories has __init__.py[i], it wins. # Else, we look for foo/bar/__init__.py[i], etc. If there are # none, the first hit wins. Note that this does not take into # account whether the lowest-level module is a file (baz.py), # a package (baz/__init__.py), or a stub file (baz.pyi) -- for # these the first one encountered along the search path wins. # # The helper function highest_init_level() returns an int that # indicates the highest level at which a __init__.py[i] file # is found; if no __init__ was found it returns 0, if we find # only foo/bar/__init__.py it returns 1, and if we have # foo/__init__.py it returns 2 (regardless of what's in # foo/bar). It doesn't look higher than that. if self.options and self.options.namespace_packages and near_misses: levels = [highest_init_level(fscache, id, path, dir_prefix) for path, dir_prefix in near_misses] index = levels.index(max(levels)) return near_misses[index][0] # Finally, we may be asked to produce an ancestor for an # installed package with a py.typed marker that is a # subpackage of a namespace package. We only fess up to these # if we would otherwise return "not found". return self.ns_ancestors.get(id) def find_modules_recursive(self, module: str) -> List[BuildSource]: module_path = self.find_module(module) if not module_path: return [] result = [BuildSource(module_path, module, None)] if module_path.endswith(('__init__.py', '__init__.pyi')): # Subtle: this code prefers the .pyi over the .py if both # exists, and also prefers packages over modules if both x/ # and x.py* exist. How? We sort the directory items, so x # comes before x.py and x.pyi. But the preference for .pyi # over .py is encoded in find_module(); even though we see # x.py before x.pyi, find_module() will find x.pyi first. We # use hits to avoid adding it a second time when we see x.pyi. # This also avoids both x.py and x.pyi when x/ was seen first. hits = set() # type: Set[str] for item in sorted(self.fscache.listdir(os.path.dirname(module_path))): abs_path = os.path.join(os.path.dirname(module_path), item) if os.path.isdir(abs_path) and \ (os.path.isfile(os.path.join(abs_path, '__init__.py')) or os.path.isfile(os.path.join(abs_path, '__init__.pyi'))): hits.add(item) result += self.find_modules_recursive(module + '.' + item) elif item != '__init__.py' and item != '__init__.pyi' and \ item.endswith(('.py', '.pyi')): mod = item.split('.')[0] if mod not in hits: hits.add(mod) result += self.find_modules_recursive(module + '.' + mod) elif os.path.isdir(module_path) and module in self.ns_packages: # Even more subtler: handle recursive decent into PEP 420 # namespace packages that are explicitly listed on the command # line with -p/--packages. for item in sorted(self.fscache.listdir(module_path)): if os.path.isdir(os.path.join(module_path, item)): result += self.find_modules_recursive(module + '.' + item) return result def verify_module(fscache: FileSystemCache, id: str, path: str, prefix: str) -> bool: """Check that all packages containing id have a __init__ file.""" if path.endswith(('__init__.py', '__init__.pyi')): path = os.path.dirname(path) for i in range(id.count('.')): path = os.path.dirname(path) if not any(fscache.isfile_case(os.path.join(path, '__init__{}'.format(extension)), prefix) for extension in PYTHON_EXTENSIONS): return False return True def highest_init_level(fscache: FileSystemCache, id: str, path: str, prefix: str) -> int: """Compute the highest level where an __init__ file is found.""" if path.endswith(('__init__.py', '__init__.pyi')): path = os.path.dirname(path) level = 0 for i in range(id.count('.')): path = os.path.dirname(path) if any(fscache.isfile_case(os.path.join(path, '__init__{}'.format(extension)), prefix) for extension in PYTHON_EXTENSIONS): level = i + 1 return level def mypy_path() -> List[str]: path_env = os.getenv('MYPYPATH') if not path_env: return [] return path_env.split(os.pathsep) def default_lib_path(data_dir: str, pyversion: Tuple[int, int], custom_typeshed_dir: Optional[str]) -> List[str]: """Return default standard library search paths.""" # IDEA: Make this more portable. path = [] # type: List[str] if custom_typeshed_dir: typeshed_dir = custom_typeshed_dir else: auto = os.path.join(data_dir, 'stubs-auto') if os.path.isdir(auto): data_dir = auto typeshed_dir = os.path.join(data_dir, "typeshed") if pyversion[0] == 3: # We allow a module for e.g. version 3.5 to be in 3.4/. The assumption # is that a module added with 3.4 will still be present in Python 3.5. versions = ["%d.%d" % (pyversion[0], minor) for minor in reversed(range(PYTHON3_VERSION_MIN[1], pyversion[1] + 1))] else: # For Python 2, we only have stubs for 2.7 versions = ["2.7"] # E.g. for Python 3.6, try 3.6/, 3.5/, 3.4/, 3/, 2and3/. for v in versions + [str(pyversion[0]), '2and3']: for lib_type in ['stdlib', 'third_party']: stubdir = os.path.join(typeshed_dir, lib_type, v) if os.path.isdir(stubdir): path.append(stubdir) # Add fallback path that can be used if we have a broken installation. if sys.platform != 'win32': path.append('/usr/local/lib/mypy') if not path: print("Could not resolve typeshed subdirectories. If you are using mypy\n" "from source, you need to run \"git submodule update --init\".\n" "Otherwise your mypy install is broken.\nPython executable is located at " "{0}.\nMypy located at {1}".format(sys.executable, data_dir), file=sys.stderr) sys.exit(1) return path @functools.lru_cache(maxsize=None) def get_site_packages_dirs(python_executable: Optional[str]) -> Tuple[List[str], List[str]]: """Find package directories for given python. This runs a subprocess call, which generates a list of the egg directories, and the site package directories. To avoid repeatedly calling a subprocess (which can be slow!) we lru_cache the results.""" def make_abspath(path: str, root: str) -> str: """Take a path and make it absolute relative to root if not already absolute.""" if os.path.isabs(path): return os.path.normpath(path) else: return os.path.join(root, os.path.normpath(path)) if python_executable is None: return [], [] if python_executable == sys.executable: # Use running Python's package dirs site_packages = sitepkgs.getsitepackages() else: # Use subprocess to get the package directory of given Python # executable site_packages = ast.literal_eval( subprocess.check_output([python_executable, sitepkgs.__file__], stderr=subprocess.PIPE).decode()) egg_dirs = [] for dir in site_packages: pth = os.path.join(dir, 'easy-install.pth') if os.path.isfile(pth): with open(pth) as f: egg_dirs.extend([make_abspath(d.rstrip(), dir) for d in f.readlines()]) return egg_dirs, site_packages def compute_search_paths(sources: List[BuildSource], options: Options, data_dir: str, alt_lib_path: Optional[str] = None) -> SearchPaths: """Compute the search paths as specified in PEP 561. There are the following 4 members created: - User code (from `sources`) - MYPYPATH (set either via config or environment variable) - installed package directories (which will later be split into stub-only and inline) - typeshed """ # Determine the default module search path. lib_path = collections.deque( default_lib_path(data_dir, options.python_version, custom_typeshed_dir=options.custom_typeshed_dir)) if options.use_builtins_fixtures: # Use stub builtins (to speed up test cases and to make them easier to # debug). This is a test-only feature, so assume our files are laid out # as in the source tree. # We also need to allow overriding where to look for it. Argh. root_dir = os.getenv('MYPY_TEST_PREFIX', None) if not root_dir: root_dir = os.path.dirname(os.path.dirname(__file__)) lib_path.appendleft(os.path.join(root_dir, 'test-data', 'unit', 'lib-stub')) # alt_lib_path is used by some tests to bypass the normal lib_path mechanics. # If we don't have one, grab directories of source files. python_path = [] # type: List[str] if not alt_lib_path: for source in sources: # Include directory of the program file in the module search path. if source.base_dir: dir = source.base_dir if dir not in python_path: python_path.append(dir) # Do this even if running as a file, for sanity (mainly because with # multiple builds, there could be a mix of files/modules, so its easier # to just define the semantics that we always add the current director # to the lib_path # TODO: Don't do this in some cases; for motivation see see # https://github.com/python/mypy/issues/4195#issuecomment-341915031 if options.bazel: dir = '.' else: dir = os.getcwd() if dir not in lib_path: python_path.insert(0, dir) # Start with a MYPYPATH environment variable at the front of the mypy_path, if defined. mypypath = mypy_path() # Add a config-defined mypy path. mypypath.extend(options.mypy_path) # If provided, insert the caller-supplied extra module path to the # beginning (highest priority) of the search path. if alt_lib_path: mypypath.insert(0, alt_lib_path) egg_dirs, site_packages = get_site_packages_dirs(options.python_executable) for site_dir in site_packages: assert site_dir not in lib_path if (site_dir in mypypath or any(p.startswith(site_dir + os.path.sep) for p in mypypath) or os.path.altsep and any(p.startswith(site_dir + os.path.altsep) for p in mypypath)): print("{} is in the MYPYPATH. Please remove it.".format(site_dir), file=sys.stderr) print("See https://mypy.readthedocs.io/en/latest/running_mypy.html" "#how-mypy-handles-imports for more info", file=sys.stderr) sys.exit(1) elif site_dir in python_path: print("{} is in the PYTHONPATH. Please change directory" " so it is not.".format(site_dir), file=sys.stderr) sys.exit(1) return SearchPaths(tuple(reversed(python_path)), tuple(mypypath), tuple(egg_dirs + site_packages), tuple(lib_path)) mypy-0.761/mypy/moduleinfo.py0000644€tŠÔÚ€2›s®0000002325313576752246022436 0ustar jukkaDROPBOX\Domain Users00000000000000"""Collection of names of notable Python library modules. Both standard library and third party modules are included. The selection criteria for third party modules is somewhat arbitrary. For packages we usually just include the top-level package name, but sometimes some or all submodules are enumerated. In the latter case if the top-level name is included we include all possible submodules (this is an implementation limitation). These are used to give more useful error messages when there is no stub for a module. """ from typing import Set from typing_extensions import Final third_party_modules = { # From https://hugovk.github.io/top-pypi-packages/ 'pip', 'urllib3', 'six', 'botocore', 'dateutil', 's3transfer', 'yaml', 'requests', 'pyasn1', 'docutils', 'jmespath', 'certifi', 'rsa', 'setuptools', 'idna', 'awscli', 'concurrent.futures', 'colorama', 'chardet', 'wheel', 'simplejson', 'boto3', 'pytz', 'numpy', 'markupsafe', 'jinja2', 'cffi', 'cryptography', 'google.protobuf', 'cwlogs', 'enum', 'pycparser', 'asn1crypto', 'attr', 'click', 'ipaddress', 'pytest', 'future', 'decorator', 'pbr', 'google.api', 'pandas', 'werkzeug', 'pyparsing', 'flask', 'psutil', 'itsdangerous', 'google.cloud', 'grpc', 'cachetools', 'virtualenv', 'google.auth', 'py', 'pluggy', 'scipy', 'boto', 'coverage', 'mock', 'OpenSSL', 'sklearn', 'jsonschema', 'argparse', 'more_itertools', 'pygments', 'psycopg2', 'websocket', 'PIL', 'googleapiclient', 'httplib2', 'matplotlib', 'oauth2client', 'docopt', 'tornado', 'funcsigs', 'lxml', 'prompt_toolkit', 'paramiko', 'jwt', 'IPython', 'docker', 'dockerpycreds', 'oauthlib', 'mccabe', 'google.resumable_media', 'sqlalchemy', 'wrapt', 'bcrypt', 'ptyprocess', 'requests_oauthlib', 'multidict', 'markdown', 'pexpect', 'atomicwrites', 'uritemplate', 'nacl', 'pycodestyle', 'elasticsearch', 'absl', 'aiohttp', 'redis', 'sklearn', 'gevent', 'pymysql', 'wcwidth', 'tqdm', 'bs4', 'functools32', 'configparser', 'gunicorn', 'typing', 'ujson', 'pyflakes', 'packaging', 'lazy_object_proxy', 'ipython_genutils', 'toolz', 'async_timeout', 'traitlets', 'kiwisolver', 'pathlib2', 'greenlet', 'networkx', 'cv2', 'termcolor', 'babel', 'django', 'pymemcache', 'skimage', 'pickleshare', 'flake8', 'cycler', 'requests_toolbelt', 'bleach', 'scandir', 'selenium', 'dask', 'websockets', 'isort', 'h5py', 'tabulate', 'tensorflow', 'html5lib', 'pylint', 'tensorboard', 'compose', 'astroid', 'trueskill', 'webencodings', 'defusedxml', 'pykube', 'pymongo', 'retrying', 'cached_property', 'zope', 'singledispatch', 'tzlocal', 'datadog', 'zmq', 'discord', 'apache_beam', 'subprocess32', 'astor', 'entrypoints', 'gast', 'nose', 'smmap', 'gitdb', 'isodate', 'pywt', 'simplegeneric', 'sortedcontainers', 'psycopg2', 'pytest_cov', 'hiredis', 'elasticsearch_dsl', 'dill', 'keras', 'contextlib2', 'hdfs', 'jupyter_core', 'typed_ast', 'croniter', 'azure', 'nbformat', 'xmltodict', 'lockfile', 'arrow', 'parso', 'jsonpickle', # Skipped (name considered too generic): # - fixtures # - migrate (from sqlalchemy-migrate) # - git (GitPython) # Other 'formencode', 'pkg_resources', 'wx', 'gi.repository', 'pygtk', 'gtk', 'PyQt4', 'PyQt5', 'pylons', # for use in tests '__dummy_third_party1', } # type: Final # Modules and packages common to Python 2.7 and 3.x. common_std_lib_modules = { 'abc', 'aifc', 'antigravity', 'argparse', 'array', 'ast', 'asynchat', 'asyncore', 'audioop', 'base64', 'bdb', 'binascii', 'binhex', 'bisect', 'bz2', 'cProfile', 'calendar', 'cgi', 'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs', 'codeop', 'collections', 'colorsys', 'compileall', 'contextlib', 'copy', 'crypt', 'csv', 'ctypes', 'curses', 'datetime', 'decimal', 'difflib', 'dis', 'distutils', 'doctest', 'dummy_threading', 'email', 'encodings', 'fcntl', 'filecmp', 'fileinput', 'fnmatch', 'formatter', 'fractions', 'ftplib', 'functools', 'genericpath', 'getopt', 'getpass', 'gettext', 'glob', 'grp', 'gzip', 'hashlib', 'heapq', 'hmac', 'imaplib', 'imghdr', 'importlib', 'inspect', 'io', 'json', 'keyword', 'lib2to3', 'linecache', 'locale', 'logging', 'macpath', 'macurl2path', 'mailbox', 'mailcap', 'math', 'mimetypes', 'mmap', 'modulefinder', 'msilib', 'multiprocessing', 'netrc', 'nis', 'nntplib', 'ntpath', 'nturl2path', 'numbers', 'opcode', 'operator', 'optparse', 'os', 'ossaudiodev', 'parser', 'pdb', 'pickle', 'pickletools', 'pipes', 'pkgutil', 'platform', 'plistlib', 'poplib', 'posixpath', 'pprint', 'profile', 'pstats', 'pty', 'py_compile', 'pyclbr', 'pydoc', 'pydoc_data', 'pyexpat', 'quopri', 'random', 're', 'resource', 'rlcompleter', 'runpy', 'sched', 'select', 'shelve', 'shlex', 'shutil', 'site', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'spwd', 'sqlite3', 'sqlite3.dbapi2', 'sqlite3.dump', 'sre_compile', 'sre_constants', 'sre_parse', 'ssl', 'stat', 'string', 'stringprep', 'struct', 'subprocess', 'sunau', 'symbol', 'symtable', 'sysconfig', 'syslog', 'tabnanny', 'tarfile', 'telnetlib', 'tempfile', 'termios', 'textwrap', 'this', 'threading', 'timeit', 'token', 'tokenize', 'trace', 'traceback', 'tty', 'types', 'unicodedata', 'unittest', 'urllib', 'uu', 'uuid', 'warnings', 'wave', 'weakref', 'webbrowser', 'wsgiref', 'xdrlib', 'xml.dom', 'xml.dom.NodeFilter', 'xml.dom.domreg', 'xml.dom.expatbuilder', 'xml.dom.minicompat', 'xml.dom.minidom', 'xml.dom.pulldom', 'xml.dom.xmlbuilder', 'xml.etree', 'xml.etree.ElementInclude', 'xml.etree.ElementPath', 'xml.etree.ElementTree', 'xml.etree.cElementTree', 'xml.parsers', 'xml.parsers.expat', 'xml.sax', 'xml.sax._exceptions', 'xml.sax.expatreader', 'xml.sax.handler', 'xml.sax.saxutils', 'xml.sax.xmlreader', 'zipfile', 'zlib', # fake names to use in tests '__dummy_stdlib1', '__dummy_stdlib2', } # type: Final # Python 2 standard library modules. python2_std_lib_modules = common_std_lib_modules | { 'BaseHTTPServer', 'Bastion', 'CGIHTTPServer', 'ConfigParser', 'Cookie', 'DocXMLRPCServer', 'HTMLParser', 'MimeWriter', 'Queue', 'SimpleHTTPServer', 'SimpleXMLRPCServer', 'SocketServer', 'StringIO', 'UserDict', 'UserList', 'UserString', 'anydbm', 'atexit', 'audiodev', 'bsddb', 'cPickle', 'cStringIO', 'commands', 'cookielib', 'copy_reg', 'curses.wrapper', 'dbhash', 'dircache', 'dumbdbm', 'dummy_thread', 'fpformat', 'future_builtins', 'hotshot', 'htmlentitydefs', 'htmllib', 'httplib', 'ihooks', 'imputil', 'itertools', 'linuxaudiodev', 'markupbase', 'md5', 'mhlib', 'mimetools', 'mimify', 'multifile', 'multiprocessing.forking', 'mutex', 'new', 'os2emxpath', 'popen2', 'posixfile', 'repr', 'rexec', 'rfc822', 'robotparser', 'sets', 'sgmllib', 'sha', 'sre', 'statvfs', 'stringold', 'strop', 'sunaudio', 'time', 'toaiff', 'urllib2', 'urlparse', 'user', 'whichdb', 'xmllib', 'xmlrpclib', } # type: Final # Python 3 standard library modules (based on Python 3.5.0). python3_std_lib_modules = common_std_lib_modules | { 'asyncio', 'collections.abc', 'concurrent', 'concurrent.futures', 'configparser', 'copyreg', 'dbm', 'ensurepip', 'enum', 'html', 'http', 'imp', 'ipaddress', 'lzma', 'pathlib', 'queue', 'readline', 'reprlib', 'selectors', 'signal', 'socketserver', 'statistics', 'tkinter', 'tracemalloc', 'turtle', 'turtledemo', 'typing', 'unittest.mock', 'urllib.error', 'urllib.parse', 'urllib.request', 'urllib.response', 'urllib.robotparser', 'venv', 'xmlrpc', 'xxlimited', 'zipapp', } # type: Final def is_third_party_module(id: str) -> bool: return is_in_module_collection(third_party_modules, id) def is_py2_std_lib_module(id: str) -> bool: return is_in_module_collection(python2_std_lib_modules, id) def is_py3_std_lib_module(id: str) -> bool: return is_in_module_collection(python3_std_lib_modules, id) def is_in_module_collection(collection: Set[str], id: str) -> bool: components = id.split('.') for prefix_length in range(1, len(components) + 1): if '.'.join(components[:prefix_length]) in collection: return True return False mypy-0.761/mypy/moduleinspect.py0000644€tŠÔÚ€2›s®0000001410213576752246023141 0ustar jukkaDROPBOX\Domain Users00000000000000"""Basic introspection of modules.""" from typing import List, Optional, Union from types import ModuleType from multiprocessing import Process, Queue import importlib import inspect import os import pkgutil import queue import sys class ModuleProperties: def __init__(self, name: str, file: Optional[str], path: Optional[List[str]], all: Optional[List[str]], is_c_module: bool, subpackages: List[str]) -> None: self.name = name # __name__ attribute self.file = file # __file__ attribute self.path = path # __path__ attribute self.all = all # __all__ attribute self.is_c_module = is_c_module self.subpackages = subpackages def is_c_module(module: ModuleType) -> bool: if module.__dict__.get('__file__') is None: # Could be a namespace package. These must be handled through # introspection, since there is no source file. return True return os.path.splitext(module.__dict__['__file__'])[-1] in ['.so', '.pyd'] class InspectError(Exception): pass def get_package_properties(package_id: str) -> ModuleProperties: """Use runtime introspection to get information about a module/package.""" try: package = importlib.import_module(package_id) except BaseException as e: raise InspectError(str(e)) name = getattr(package, '__name__', None) file = getattr(package, '__file__', None) path = getattr(package, '__path__', None) # type: Optional[List[str]] if not isinstance(path, list): path = None pkg_all = getattr(package, '__all__', None) if pkg_all is not None: try: pkg_all = list(pkg_all) except Exception: pkg_all = None is_c = is_c_module(package) if path is None: # Object has no path; this means it's either a module inside a package # (and thus no sub-packages), or it could be a C extension package. if is_c: # This is a C extension module, now get the list of all sub-packages # using the inspect module subpackages = [package.__name__ + "." + name for name, val in inspect.getmembers(package) if inspect.ismodule(val) and val.__name__ == package.__name__ + "." + name] else: # It's a module inside a package. There's nothing else to walk/yield. subpackages = [] else: all_packages = pkgutil.walk_packages(path, prefix=package.__name__ + ".", onerror=lambda r: None) subpackages = [qualified_name for importer, qualified_name, ispkg in all_packages] return ModuleProperties(name=name, file=file, path=path, all=pkg_all, is_c_module=is_c, subpackages=subpackages) def worker(tasks: 'Queue[str]', results: 'Queue[Union[str, ModuleProperties]]', sys_path: List[str]) -> None: """The main loop of a worker introspection process.""" sys.path = sys_path while True: mod = tasks.get() try: prop = get_package_properties(mod) except InspectError as e: results.put(str(e)) continue results.put(prop) class ModuleInspect: """Perform runtime introspection of modules in a separate process. Reuse the process for multiple modules for efficiency. However, if there is an error, retry using a fresh process to avoid cross-contamination of state between modules. We use a separate process to isolate us from many side effects. For example, the import of a module may kill the current process, and we want to recover from that. Always use in a with statement for proper clean-up: with ModuleInspect() as m: p = m.get_package_properties('urllib.parse') """ def __init__(self) -> None: self._start() def _start(self) -> None: self.tasks = Queue() # type: Queue[str] self.results = Queue() # type: Queue[Union[ModuleProperties, str]] self.proc = Process(target=worker, args=(self.tasks, self.results, sys.path)) self.proc.start() self.counter = 0 # Number of successfull roundtrips def close(self) -> None: """Free any resources used.""" self.proc.terminate() def get_package_properties(self, package_id: str) -> ModuleProperties: """Return some properties of a module/package using runtime introspection. Raise InspectError if the target couldn't be imported. """ self.tasks.put(package_id) res = self._get_from_queue() if res is None: # The process died; recover and report error. self._start() raise InspectError('Process died when importing %r' % package_id) if isinstance(res, str): # Error importing module if self.counter > 0: # Also try with a fresh process. Maybe one of the previous imports has # corrupted some global state. self.close() self._start() return self.get_package_properties(package_id) raise InspectError(res) self.counter += 1 return res def _get_from_queue(self) -> Union[ModuleProperties, str, None]: """Get value from the queue. Return the value read from the queue, or None if the process unexpectedly died. """ max_iter = 100 n = 0 while True: if n == max_iter: raise RuntimeError('Timeout waiting for subprocess') try: return self.results.get(timeout=0.05) except queue.Empty: if not self.proc.is_alive(): return None n += 1 def __enter__(self) -> 'ModuleInspect': return self def __exit__(self, *args: object) -> None: self.close() mypy-0.761/mypy/mro.py0000644€tŠÔÚ€2›s®0000000406213576752246021067 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Callable, List from mypy.nodes import TypeInfo from mypy.types import Instance from mypy.typestate import TypeState def calculate_mro(info: TypeInfo, obj_type: Optional[Callable[[], Instance]] = None) -> None: """Calculate and set mro (method resolution order). Raise MroError if cannot determine mro. """ mro = linearize_hierarchy(info, obj_type) assert mro, "Could not produce a MRO at all for %s" % (info,) info.mro = mro # The property of falling back to Any is inherited. info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in info.mro) TypeState.reset_all_subtype_caches_for(info) class MroError(Exception): """Raised if a consistent mro cannot be determined for a class.""" def linearize_hierarchy(info: TypeInfo, obj_type: Optional[Callable[[], Instance]] = None) -> List[TypeInfo]: # TODO describe if info.mro: return info.mro bases = info.direct_base_classes() if (not bases and info.fullname != 'builtins.object' and obj_type is not None): # Second pass in import cycle, add a dummy `object` base class, # otherwise MRO calculation may spuriously fail. # MRO will be re-calculated for real in the third pass. bases = [obj_type().type] lin_bases = [] for base in bases: assert base is not None, "Cannot linearize bases for %s %s" % (info.fullname, bases) lin_bases.append(linearize_hierarchy(base, obj_type)) lin_bases.append(bases) return [info] + merge(lin_bases) def merge(seqs: List[List[TypeInfo]]) -> List[TypeInfo]: seqs = [s[:] for s in seqs] result = [] # type: List[TypeInfo] while True: seqs = [s for s in seqs if s] if not seqs: return result for seq in seqs: head = seq[0] if not [s for s in seqs if head in s[1:]]: break else: raise MroError() result.append(head) for s in seqs: if s[0] is head: del s[0] mypy-0.761/mypy/nodes.py0000644€tŠÔÚ€2›s®0000033520713576752246021412 0ustar jukkaDROPBOX\Domain Users00000000000000"""Abstract syntax tree node classes (i.e. parse tree).""" import os from abc import abstractmethod from collections import OrderedDict, defaultdict from typing import ( Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional, Callable, Sequence, Iterator ) from typing_extensions import DefaultDict, Final, TYPE_CHECKING from mypy_extensions import trait import mypy.strconv from mypy.util import short_type from mypy.visitor import NodeVisitor, StatementVisitor, ExpressionVisitor from mypy.bogus_type import Bogus class Context: """Base type for objects that are valid as error message locations.""" __slots__ = ('line', 'column', 'end_line') def __init__(self, line: int = -1, column: int = -1) -> None: self.line = line self.column = column self.end_line = None # type: Optional[int] def set_line(self, target: Union['Context', int], column: Optional[int] = None, end_line: Optional[int] = None) -> None: """If target is a node, pull line (and column) information into this node. If column is specified, this will override any column information coming from a node. """ if isinstance(target, int): self.line = target else: self.line = target.line self.column = target.column self.end_line = target.end_line if column is not None: self.column = column if end_line is not None: self.end_line = end_line def get_line(self) -> int: """Don't use. Use x.line.""" return self.line def get_column(self) -> int: """Don't use. Use x.column.""" return self.column if TYPE_CHECKING: # break import cycle only needed for mypy import mypy.types T = TypeVar('T') JsonDict = Dict[str, Any] # Symbol table node kinds # # TODO rename to use more descriptive names LDEF = 0 # type: Final[int] GDEF = 1 # type: Final[int] MDEF = 2 # type: Final[int] # Placeholder for a name imported via 'from ... import'. Second phase of # semantic will replace this the actual imported reference. This is # needed so that we can detect whether a name has been imported during # XXX what? UNBOUND_IMPORTED = 3 # type: Final[int] # RevealExpr node kinds REVEAL_TYPE = 0 # type: Final[int] REVEAL_LOCALS = 1 # type: Final[int] LITERAL_YES = 2 # type: Final LITERAL_TYPE = 1 # type: Final LITERAL_NO = 0 # type: Final node_kinds = { LDEF: 'Ldef', GDEF: 'Gdef', MDEF: 'Mdef', UNBOUND_IMPORTED: 'UnboundImported', } # type: Final inverse_node_kinds = {_kind: _name for _name, _kind in node_kinds.items()} # type: Final implicit_module_attrs = {'__name__': '__builtins__.str', '__doc__': None, # depends on Python version, see semanal.py '__file__': '__builtins__.str', '__package__': '__builtins__.str'} # type: Final # These aliases exist because built-in class objects are not subscriptable. # For example `list[int]` fails at runtime. Instead List[int] should be used. type_aliases = { 'typing.List': 'builtins.list', 'typing.Dict': 'builtins.dict', 'typing.Set': 'builtins.set', 'typing.FrozenSet': 'builtins.frozenset', 'typing.ChainMap': 'collections.ChainMap', 'typing.Counter': 'collections.Counter', 'typing.DefaultDict': 'collections.defaultdict', 'typing.Deque': 'collections.deque', } # type: Final # This keeps track of the oldest supported Python version where the corresponding # alias _target_ is available. type_aliases_target_versions = { 'typing.List': (2, 7), 'typing.Dict': (2, 7), 'typing.Set': (2, 7), 'typing.FrozenSet': (2, 7), 'typing.ChainMap': (3, 3), 'typing.Counter': (2, 7), 'typing.DefaultDict': (2, 7), 'typing.Deque': (2, 7), } # type: Final reverse_builtin_aliases = { 'builtins.list': 'typing.List', 'builtins.dict': 'typing.Dict', 'builtins.set': 'typing.Set', 'builtins.frozenset': 'typing.FrozenSet', } # type: Final nongen_builtins = {'builtins.tuple': 'typing.Tuple', 'builtins.enumerate': ''} # type: Final nongen_builtins.update((name, alias) for alias, name in type_aliases.items()) RUNTIME_PROTOCOL_DECOS = ('typing.runtime_checkable', 'typing_extensions.runtime', 'typing_extensions.runtime_checkable') # type: Final class Node(Context): """Common base class for all non-type parse tree nodes.""" __slots__ = () def __str__(self) -> str: ans = self.accept(mypy.strconv.StrConv()) if ans is None: return repr(self) return ans def accept(self, visitor: NodeVisitor[T]) -> T: raise RuntimeError('Not implemented') @trait class Statement(Node): """A statement node.""" __slots__ = () def accept(self, visitor: StatementVisitor[T]) -> T: raise RuntimeError('Not implemented') @trait class Expression(Node): """An expression node.""" __slots__ = () def accept(self, visitor: ExpressionVisitor[T]) -> T: raise RuntimeError('Not implemented') class FakeExpression(Expression): """A dummy expression. We need a dummy expression in one place, and can't instantiate Expression because it is a trait and mypyc barfs. """ pass # TODO: # Lvalue = Union['NameExpr', 'MemberExpr', 'IndexExpr', 'SuperExpr', 'StarExpr' # 'TupleExpr']; see #1783. Lvalue = Expression @trait class SymbolNode(Node): """Nodes that can be stored in a symbol table.""" __slots__ = () @property @abstractmethod def name(self) -> str: pass # fullname can often be None even though the type system # disagrees. We mark this with Bogus to let mypyc know not to # worry about it. @property @abstractmethod def fullname(self) -> Bogus[str]: pass @abstractmethod def serialize(self) -> JsonDict: pass @classmethod def deserialize(cls, data: JsonDict) -> 'SymbolNode': classname = data['.class'] method = deserialize_map.get(classname) if method is not None: return method(data) raise NotImplementedError('unexpected .class {}'.format(classname)) # Items: fullname, related symbol table node, surrounding type (if any) Definition = Tuple[str, 'SymbolTableNode', Optional['TypeInfo']] class MypyFile(SymbolNode): """The abstract syntax tree of a single source file.""" # Fully qualified module name _fullname = None # type: Bogus[str] # Path to the file (empty string if not known) path = '' # Top-level definitions and statements defs = None # type: List[Statement] # Type alias dependencies as mapping from target to set of alias full names alias_deps = None # type: DefaultDict[str, Set[str]] # Is there a UTF-8 BOM at the start? is_bom = False names = None # type: SymbolTable # All import nodes within the file (also ones within functions etc.) imports = None # type: List[ImportBase] # Lines on which to ignore certain errors when checking. # If the value is empty, ignore all errors; otherwise, the list contains all # error codes to ignore. ignored_lines = None # type: Dict[int, List[str]] # Is this file represented by a stub file (.pyi)? is_stub = False # Is this loaded from the cache and thus missing the actual body of the file? is_cache_skeleton = False # Does this represent an __init__.pyi stub with a module __getattr__ # (i.e. a partial stub package), for such packages we suppress any missing # module errors in addition to missing attribute errors. is_partial_stub_package = False # Plugin-created dependencies plugin_deps = None # type: Dict[str, Set[str]] def __init__(self, defs: List[Statement], imports: List['ImportBase'], is_bom: bool = False, ignored_lines: Optional[Dict[int, List[str]]] = None) -> None: super().__init__() self.defs = defs self.line = 1 # Dummy line number self.imports = imports self.is_bom = is_bom self.alias_deps = defaultdict(set) self.plugin_deps = {} if ignored_lines: self.ignored_lines = ignored_lines else: self.ignored_lines = {} def local_definitions(self) -> Iterator[Definition]: """Return all definitions within the module (including nested). This doesn't include imported definitions. """ return local_definitions(self.names, self.fullname) @property def name(self) -> str: return '' if not self._fullname else self._fullname.split('.')[-1] @property def fullname(self) -> Bogus[str]: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: return visitor.visit_mypy_file(self) def is_package_init_file(self) -> bool: return len(self.path) != 0 and os.path.basename(self.path).startswith('__init__.') def serialize(self) -> JsonDict: return {'.class': 'MypyFile', '_fullname': self._fullname, 'names': self.names.serialize(self._fullname), 'is_stub': self.is_stub, 'path': self.path, 'is_partial_stub_package': self.is_partial_stub_package, } @classmethod def deserialize(cls, data: JsonDict) -> 'MypyFile': assert data['.class'] == 'MypyFile', data tree = MypyFile([], []) tree._fullname = data['_fullname'] tree.names = SymbolTable.deserialize(data['names']) tree.is_stub = data['is_stub'] tree.path = data['path'] tree.is_partial_stub_package = data['is_partial_stub_package'] tree.is_cache_skeleton = True return tree class ImportBase(Statement): """Base class for all import statements.""" is_unreachable = False # Set by semanal.SemanticAnalyzerPass1 if inside `if False` etc. is_top_level = False # Ditto if outside any class or def is_mypy_only = False # Ditto if inside `if TYPE_CHECKING` or `if MYPY` # If an import replaces existing definitions, we construct dummy assignment # statements that assign the imported names to the names in the current scope, # for type checking purposes. Example: # # x = 1 # from m import x <-- add assignment representing "x = m.x" assignments = None # type: List[AssignmentStmt] def __init__(self) -> None: super().__init__() self.assignments = [] class Import(ImportBase): """import m [as n]""" ids = None # type: List[Tuple[str, Optional[str]]] # (module id, as id) def __init__(self, ids: List[Tuple[str, Optional[str]]]) -> None: super().__init__() self.ids = ids def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import(self) class ImportFrom(ImportBase): """from m import x [as y], ...""" id = None # type: str relative = None # type: int names = None # type: List[Tuple[str, Optional[str]]] # Tuples (name, as name) def __init__(self, id: str, relative: int, names: List[Tuple[str, Optional[str]]]) -> None: super().__init__() self.id = id self.names = names self.relative = relative def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_from(self) class ImportAll(ImportBase): """from m import *""" id = None # type: str relative = None # type: int # NOTE: Only filled and used by old semantic analyzer. imported_names = None # type: List[str] def __init__(self, id: str, relative: int) -> None: super().__init__() self.id = id self.relative = relative self.imported_names = [] def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_all(self) class ImportedName(SymbolNode): """Indirect reference to a fullname stored in symbol table. This node is not present in the original program as such. This is just a temporary artifact in binding imported names. After semantic analysis pass 2, these references should be replaced with direct reference to a real AST node. Note that this is neither a Statement nor an Expression so this can't be visited. """ def __init__(self, target_fullname: str) -> None: super().__init__() self.target_fullname = target_fullname @property def name(self) -> str: return self.target_fullname.split('.')[-1] @property def fullname(self) -> str: return self.target_fullname def serialize(self) -> JsonDict: assert False, "ImportedName leaked from semantic analysis" @classmethod def deserialize(cls, data: JsonDict) -> 'ImportedName': assert False, "ImportedName should never be serialized" def __str__(self) -> str: return 'ImportedName(%s)' % self.target_fullname FUNCBASE_FLAGS = [ 'is_property', 'is_class', 'is_static', 'is_final' ] # type: Final class FuncBase(Node): """Abstract base class for function-like nodes. N.B: Although this has SymbolNode subclasses (FuncDef, OverloadedFuncDef), avoid calling isinstance(..., FuncBase) on something that is typed as SymbolNode. This is to work around mypy bug #3603, in which mypy doesn't understand multiple inheritance very well, and will assume that a SymbolNode cannot be a FuncBase. Instead, test against SYMBOL_FUNCBASE_TYPES, which enumerates SymbolNode subclasses that are also FuncBase subclasses. """ __slots__ = ('type', 'unanalyzed_type', 'info', 'is_property', 'is_class', # Uses "@classmethod" (explicit or implicit) 'is_static', # Uses "@staticmethod" 'is_final', # Uses "@final" '_fullname', ) def __init__(self) -> None: super().__init__() # Type signature. This is usually CallableType or Overloaded, but it can be # something else for decorated functions. self.type = None # type: Optional[mypy.types.ProperType] # Original, not semantically analyzed type (used for reprocessing) self.unanalyzed_type = None # type: Optional[mypy.types.ProperType] # If method, reference to TypeInfo # TODO: Type should be Optional[TypeInfo] self.info = FUNC_NO_INFO self.is_property = False self.is_class = False self.is_static = False self.is_final = False # Name with module prefix # TODO: Type should be Optional[str] self._fullname = cast(Bogus[str], None) @property @abstractmethod def name(self) -> str: pass @property def fullname(self) -> Bogus[str]: return self._fullname OverloadPart = Union['FuncDef', 'Decorator'] class OverloadedFuncDef(FuncBase, SymbolNode, Statement): """A logical node representing all the variants of a multi-declaration function. A multi-declaration function is often an @overload, but can also be a @property with a setter and a/or a deleter. This node has no explicit representation in the source program. Overloaded variants must be consecutive in the source file. """ items = None # type: List[OverloadPart] unanalyzed_items = None # type: List[OverloadPart] impl = None # type: Optional[OverloadPart] def __init__(self, items: List['OverloadPart']) -> None: super().__init__() self.items = items self.unanalyzed_items = items.copy() self.impl = None if len(items) > 0: self.set_line(items[0].line, items[0].column) self.is_final = False @property def name(self) -> str: if self.items: return self.items[0].name else: # This may happen for malformed overload assert self.impl is not None return self.impl.name def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_overloaded_func_def(self) def serialize(self) -> JsonDict: return {'.class': 'OverloadedFuncDef', 'items': [i.serialize() for i in self.items], 'type': None if self.type is None else self.type.serialize(), 'fullname': self._fullname, 'impl': None if self.impl is None else self.impl.serialize(), 'flags': get_flags(self, FUNCBASE_FLAGS), } @classmethod def deserialize(cls, data: JsonDict) -> 'OverloadedFuncDef': assert data['.class'] == 'OverloadedFuncDef' res = OverloadedFuncDef([ cast(OverloadPart, SymbolNode.deserialize(d)) for d in data['items']]) if data.get('impl') is not None: res.impl = cast(OverloadPart, SymbolNode.deserialize(data['impl'])) # set line for empty overload items, as not set in __init__ if len(res.items) > 0: res.set_line(res.impl.line) if data.get('type') is not None: typ = mypy.types.deserialize_type(data['type']) assert isinstance(typ, mypy.types.ProperType) res.type = typ res._fullname = data['fullname'] set_flags(res, data['flags']) # NOTE: res.info will be set in the fixup phase. return res class Argument(Node): """A single argument in a FuncItem.""" __slots__ = ('variable', 'type_annotation', 'initializer', 'kind') def __init__(self, variable: 'Var', type_annotation: 'Optional[mypy.types.Type]', initializer: Optional[Expression], kind: int) -> None: super().__init__() self.variable = variable self.type_annotation = type_annotation self.initializer = initializer self.kind = kind # must be an ARG_* constant def set_line(self, target: Union[Context, int], column: Optional[int] = None, end_line: Optional[int] = None) -> None: super().set_line(target, column, end_line) if self.initializer and self.initializer.line < 0: self.initializer.set_line(self.line, self.column, self.end_line) self.variable.set_line(self.line, self.column, self.end_line) FUNCITEM_FLAGS = FUNCBASE_FLAGS + [ 'is_overload', 'is_generator', 'is_coroutine', 'is_async_generator', 'is_awaitable_coroutine', ] # type: Final class FuncItem(FuncBase): """Base class for nodes usable as overloaded function items.""" __slots__ = ('arguments', # Note that can be None if deserialized (type is a lie!) 'arg_names', # Names of arguments 'arg_kinds', # Kinds of arguments 'min_args', # Minimum number of arguments 'max_pos', # Maximum number of positional arguments, -1 if no explicit # limit (*args not included) 'body', # Body of the function 'is_overload', # Is this an overload variant of function with more than # one overload variant? 'is_generator', # Contains a yield statement? 'is_coroutine', # Defined using 'async def' syntax? 'is_async_generator', # Is an async def generator? 'is_awaitable_coroutine', # Decorated with '@{typing,asyncio}.coroutine'? 'expanded', # Variants of function with type variables with values expanded ) def __init__(self, arguments: List[Argument], body: 'Block', typ: 'Optional[mypy.types.FunctionLike]' = None) -> None: super().__init__() self.arguments = arguments self.arg_names = [arg.variable.name for arg in self.arguments] self.arg_kinds = [arg.kind for arg in self.arguments] # type: List[int] self.max_pos = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT) self.body = body self.type = typ self.unanalyzed_type = typ self.is_overload = False self.is_generator = False self.is_coroutine = False self.is_async_generator = False self.is_awaitable_coroutine = False self.expanded = [] # type: List[FuncItem] self.min_args = 0 for i in range(len(self.arguments)): if self.arguments[i] is None and i < self.max_fixed_argc(): self.min_args = i + 1 def max_fixed_argc(self) -> int: return self.max_pos def set_line(self, target: Union[Context, int], column: Optional[int] = None, end_line: Optional[int] = None) -> None: super().set_line(target, column, end_line) for arg in self.arguments: arg.set_line(self.line, self.column, self.end_line) def is_dynamic(self) -> bool: return self.type is None FUNCDEF_FLAGS = FUNCITEM_FLAGS + [ 'is_decorated', 'is_conditional', 'is_abstract', ] # type: Final class FuncDef(FuncItem, SymbolNode, Statement): """Function definition. This is a non-lambda function defined using 'def'. """ __slots__ = ('_name', 'is_decorated', 'is_conditional', 'is_abstract', 'original_def', ) def __init__(self, name: str, # Function name arguments: List[Argument], body: 'Block', typ: 'Optional[mypy.types.FunctionLike]' = None) -> None: super().__init__(arguments, body, typ) self._name = name self.is_decorated = False self.is_conditional = False # Defined conditionally (within block)? self.is_abstract = False self.is_final = False # Original conditional definition self.original_def = None # type: Union[None, FuncDef, Var, Decorator] @property def name(self) -> str: return self._name def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_func_def(self) def serialize(self) -> JsonDict: # We're deliberating omitting arguments and storing only arg_names and # arg_kinds for space-saving reasons (arguments is not used in later # stages of mypy). # TODO: After a FuncDef is deserialized, the only time we use `arg_names` # and `arg_kinds` is when `type` is None and we need to infer a type. Can # we store the inferred type ahead of time? return {'.class': 'FuncDef', 'name': self._name, 'fullname': self._fullname, 'arg_names': self.arg_names, 'arg_kinds': self.arg_kinds, 'type': None if self.type is None else self.type.serialize(), 'flags': get_flags(self, FUNCDEF_FLAGS), # TODO: Do we need expanded, original_def? } @classmethod def deserialize(cls, data: JsonDict) -> 'FuncDef': assert data['.class'] == 'FuncDef' body = Block([]) ret = FuncDef(data['name'], [], body, (None if data['type'] is None else cast(mypy.types.FunctionLike, mypy.types.deserialize_type(data['type'])))) ret._fullname = data['fullname'] set_flags(ret, data['flags']) # NOTE: ret.info is set in the fixup phase. ret.arg_names = data['arg_names'] ret.arg_kinds = data['arg_kinds'] # Leave these uninitialized so that future uses will trigger an error del ret.arguments del ret.max_pos del ret.min_args return ret # All types that are both SymbolNodes and FuncBases. See the FuncBase # docstring for the rationale. SYMBOL_FUNCBASE_TYPES = (OverloadedFuncDef, FuncDef) class Decorator(SymbolNode, Statement): """A decorated function. A single Decorator object can include any number of function decorators. """ func = None # type: FuncDef # Decorated function decorators = None # type: List[Expression] # Decorators (may be empty) # Some decorators are removed by semanal, keep the original here. original_decorators = None # type: List[Expression] # TODO: This is mostly used for the type; consider replacing with a 'type' attribute var = None # type: Var # Represents the decorated function obj is_overload = False def __init__(self, func: FuncDef, decorators: List[Expression], var: 'Var') -> None: super().__init__() self.func = func self.decorators = decorators self.original_decorators = decorators.copy() self.var = var self.is_overload = False @property def name(self) -> str: return self.func.name @property def fullname(self) -> Bogus[str]: return self.func.fullname @property def is_final(self) -> bool: return self.func.is_final @property def info(self) -> 'TypeInfo': return self.func.info @property def type(self) -> 'Optional[mypy.types.Type]': return self.var.type def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_decorator(self) def serialize(self) -> JsonDict: return {'.class': 'Decorator', 'func': self.func.serialize(), 'var': self.var.serialize(), 'is_overload': self.is_overload, } @classmethod def deserialize(cls, data: JsonDict) -> 'Decorator': assert data['.class'] == 'Decorator' dec = Decorator(FuncDef.deserialize(data['func']), [], Var.deserialize(data['var'])) dec.is_overload = data['is_overload'] return dec VAR_FLAGS = [ 'is_self', 'is_initialized_in_class', 'is_staticmethod', 'is_classmethod', 'is_property', 'is_settable_property', 'is_suppressed_import', 'is_classvar', 'is_abstract_var', 'is_final', 'final_unset_in_class', 'final_set_in_init', 'explicit_self_type', 'is_ready', ] # type: Final class Var(SymbolNode): """A variable. It can refer to global/local variable or a data attribute. """ __slots__ = ('_name', '_fullname', 'info', 'type', 'final_value', 'is_self', 'is_ready', 'is_inferred', 'is_initialized_in_class', 'is_staticmethod', 'is_classmethod', 'is_property', 'is_settable_property', 'is_classvar', 'is_abstract_var', 'is_final', 'final_unset_in_class', 'final_set_in_init', 'is_suppressed_import', 'explicit_self_type', 'from_module_getattr', ) def __init__(self, name: str, type: 'Optional[mypy.types.Type]' = None) -> None: super().__init__() self._name = name # Name without module prefix # TODO: Should be Optional[str] self._fullname = cast('Bogus[str]', None) # Name with module prefix # TODO: Should be Optional[TypeInfo] self.info = VAR_NO_INFO self.type = type # type: Optional[mypy.types.Type] # Declared or inferred type, or None # Is this the first argument to an ordinary method (usually "self")? self.is_self = False self.is_ready = True # If inferred, is the inferred type available? self.is_inferred = (self.type is None) # Is this initialized explicitly to a non-None value in class body? self.is_initialized_in_class = False self.is_staticmethod = False self.is_classmethod = False self.is_property = False self.is_settable_property = False self.is_classvar = False self.is_abstract_var = False # Set to true when this variable refers to a module we were unable to # parse for some reason (eg a silenced module) self.is_suppressed_import = False # Was this "variable" (rather a constant) defined as Final[...]? self.is_final = False # If constant value is a simple literal, # store the literal value (unboxed) for the benefit of # tools like mypyc. self.final_value = None # type: Optional[Union[int, float, bool, str]] # Where the value was set (only for class attributes) self.final_unset_in_class = False self.final_set_in_init = False # This is True for a variable that was declared on self with an explicit type: # class C: # def __init__(self) -> None: # self.x: int # This case is important because this defines a new Var, even if there is one # present in a superclass (without explict type this doesn't create a new Var). # See SemanticAnalyzer.analyze_member_lvalue() for details. self.explicit_self_type = False # If True, this is an implicit Var created due to module-level __getattr__. self.from_module_getattr = False @property def name(self) -> str: return self._name @property def fullname(self) -> Bogus[str]: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: return visitor.visit_var(self) def serialize(self) -> JsonDict: # TODO: Leave default values out? # NOTE: Sometimes self.is_ready is False here, but we don't care. data = {'.class': 'Var', 'name': self._name, 'fullname': self._fullname, 'type': None if self.type is None else self.type.serialize(), 'flags': get_flags(self, VAR_FLAGS), } # type: JsonDict if self.final_value is not None: data['final_value'] = self.final_value return data @classmethod def deserialize(cls, data: JsonDict) -> 'Var': assert data['.class'] == 'Var' name = data['name'] type = None if data['type'] is None else mypy.types.deserialize_type(data['type']) v = Var(name, type) v.is_ready = False # Override True default set in __init__ v._fullname = data['fullname'] set_flags(v, data['flags']) v.final_value = data.get('final_value') return v class ClassDef(Statement): """Class definition""" name = None # type: str # Name of the class without module prefix fullname = None # type: Bogus[str] # Fully qualified name of the class defs = None # type: Block type_vars = None # type: List[mypy.types.TypeVarDef] # Base class expressions (not semantically analyzed -- can be arbitrary expressions) base_type_exprs = None # type: List[Expression] # Special base classes like Generic[...] get moved here during semantic analysis removed_base_type_exprs = None # type: List[Expression] info = None # type: TypeInfo # Related TypeInfo metaclass = None # type: Optional[Expression] decorators = None # type: List[Expression] keywords = None # type: OrderedDict[str, Expression] analyzed = None # type: Optional[Expression] has_incompatible_baseclass = False def __init__(self, name: str, defs: 'Block', type_vars: Optional[List['mypy.types.TypeVarDef']] = None, base_type_exprs: Optional[List[Expression]] = None, metaclass: Optional[Expression] = None, keywords: Optional[List[Tuple[str, Expression]]] = None) -> None: super().__init__() self.name = name self.defs = defs self.type_vars = type_vars or [] self.base_type_exprs = base_type_exprs or [] self.removed_base_type_exprs = [] self.info = CLASSDEF_NO_INFO self.metaclass = metaclass self.decorators = [] self.keywords = OrderedDict(keywords or []) def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) def is_generic(self) -> bool: return self.info.is_generic() def serialize(self) -> JsonDict: # Not serialized: defs, base_type_exprs, metaclass, decorators, # analyzed (for named tuples etc.) return {'.class': 'ClassDef', 'name': self.name, 'fullname': self.fullname, 'type_vars': [v.serialize() for v in self.type_vars], } @classmethod def deserialize(self, data: JsonDict) -> 'ClassDef': assert data['.class'] == 'ClassDef' res = ClassDef(data['name'], Block([]), [mypy.types.TypeVarDef.deserialize(v) for v in data['type_vars']], ) res.fullname = data['fullname'] return res class GlobalDecl(Statement): """Declaration global x, y, ...""" names = None # type: List[str] def __init__(self, names: List[str]) -> None: super().__init__() self.names = names def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_global_decl(self) class NonlocalDecl(Statement): """Declaration nonlocal x, y, ...""" names = None # type: List[str] def __init__(self, names: List[str]) -> None: super().__init__() self.names = names def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_nonlocal_decl(self) class Block(Statement): __slots__ = ('body', 'is_unreachable') def __init__(self, body: List[Statement]) -> None: super().__init__() self.body = body # True if we can determine that this block is not executed during semantic # analysis. For example, this applies to blocks that are protected by # something like "if PY3:" when using Python 2. However, some code is # only considered unreachable during type checking and this is not true # in those cases. self.is_unreachable = False def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_block(self) # Statements class ExpressionStmt(Statement): """An expression as a statement, such as print(s).""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: super().__init__() self.expr = expr def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_expression_stmt(self) class AssignmentStmt(Statement): """Assignment statement. The same node class is used for single assignment, multiple assignment (e.g. x, y = z) and chained assignment (e.g. x = y = z), assignments that define new names, and assignments with explicit types ("# type: t" or "x: t [= ...]"). An lvalue can be NameExpr, TupleExpr, ListExpr, MemberExpr, or IndexExpr. """ lvalues = None # type: List[Lvalue] # This is a TempNode if and only if no rvalue (x: t). rvalue = None # type: Expression # Declared type in a comment, may be None. type = None # type: Optional[mypy.types.Type] # Original, not semantically analyzed type in annotation (used for reprocessing) unanalyzed_type = None # type: Optional[mypy.types.Type] # This indicates usage of PEP 526 type annotation syntax in assignment. new_syntax = False # type: bool # Does this assignment define a type alias? is_alias_def = False # Is this a final definition? # Final attributes can't be re-assigned once set, and can't be overridden # in a subclass. This flag is not set if an attempted declaration was found to # be invalid during semantic analysis. It is still set to `True` if # a final declaration overrides another final declaration (this is checked # during type checking when MROs are known). is_final_def = False def __init__(self, lvalues: List[Lvalue], rvalue: Expression, type: 'Optional[mypy.types.Type]' = None, new_syntax: bool = False) -> None: super().__init__() self.lvalues = lvalues self.rvalue = rvalue self.type = type self.unanalyzed_type = type self.new_syntax = new_syntax def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_assignment_stmt(self) class OperatorAssignmentStmt(Statement): """Operator assignment statement such as x += 1""" op = '' lvalue = None # type: Lvalue rvalue = None # type: Expression def __init__(self, op: str, lvalue: Lvalue, rvalue: Expression) -> None: super().__init__() self.op = op self.lvalue = lvalue self.rvalue = rvalue def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_operator_assignment_stmt(self) class WhileStmt(Statement): expr = None # type: Expression body = None # type: Block else_body = None # type: Optional[Block] def __init__(self, expr: Expression, body: Block, else_body: Optional[Block]) -> None: super().__init__() self.expr = expr self.body = body self.else_body = else_body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_while_stmt(self) class ForStmt(Statement): # Index variables index = None # type: Lvalue # Type given by type comments for index, can be None index_type = None # type: Optional[mypy.types.Type] # Original, not semantically analyzed type in annotation (used for reprocessing) unanalyzed_index_type = None # type: Optional[mypy.types.Type] # Inferred iterable item type inferred_item_type = None # type: Optional[mypy.types.Type] # Inferred iterator type inferred_iterator_type = None # type: Optional[mypy.types.Type] # Expression to iterate expr = None # type: Expression body = None # type: Block else_body = None # type: Optional[Block] is_async = False # True if `async for ...` (PEP 492, Python 3.5) def __init__(self, index: Lvalue, expr: Expression, body: Block, else_body: Optional[Block], index_type: 'Optional[mypy.types.Type]' = None) -> None: super().__init__() self.index = index self.index_type = index_type self.unanalyzed_index_type = index_type self.expr = expr self.body = body self.else_body = else_body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_for_stmt(self) class ReturnStmt(Statement): expr = None # type: Optional[Expression] def __init__(self, expr: Optional[Expression]) -> None: super().__init__() self.expr = expr def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_return_stmt(self) class AssertStmt(Statement): expr = None # type: Expression msg = None # type: Optional[Expression] def __init__(self, expr: Expression, msg: Optional[Expression] = None) -> None: super().__init__() self.expr = expr self.msg = msg def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_assert_stmt(self) class DelStmt(Statement): expr = None # type: Lvalue def __init__(self, expr: Lvalue) -> None: super().__init__() self.expr = expr def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_del_stmt(self) class BreakStmt(Statement): def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_break_stmt(self) class ContinueStmt(Statement): def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_continue_stmt(self) class PassStmt(Statement): def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_pass_stmt(self) class IfStmt(Statement): expr = None # type: List[Expression] body = None # type: List[Block] else_body = None # type: Optional[Block] def __init__(self, expr: List[Expression], body: List[Block], else_body: Optional[Block]) -> None: super().__init__() self.expr = expr self.body = body self.else_body = else_body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_if_stmt(self) class RaiseStmt(Statement): # Plain 'raise' is a valid statement. expr = None # type: Optional[Expression] from_expr = None # type: Optional[Expression] def __init__(self, expr: Optional[Expression], from_expr: Optional[Expression]) -> None: super().__init__() self.expr = expr self.from_expr = from_expr def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_raise_stmt(self) class TryStmt(Statement): body = None # type: Block # Try body # Plain 'except:' also possible types = None # type: List[Optional[Expression]] # Except type expressions vars = None # type: List[Optional[NameExpr]] # Except variable names handlers = None # type: List[Block] # Except bodies else_body = None # type: Optional[Block] finally_body = None # type: Optional[Block] def __init__(self, body: Block, vars: List['Optional[NameExpr]'], types: List[Optional[Expression]], handlers: List[Block], else_body: Optional[Block], finally_body: Optional[Block]) -> None: super().__init__() self.body = body self.vars = vars self.types = types self.handlers = handlers self.else_body = else_body self.finally_body = finally_body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_try_stmt(self) class WithStmt(Statement): expr = None # type: List[Expression] target = None # type: List[Optional[Lvalue]] # Type given by type comments for target, can be None unanalyzed_type = None # type: Optional[mypy.types.Type] # Semantically analyzed types from type comment (TypeList type expanded) analyzed_types = None # type: List[mypy.types.Type] body = None # type: Block is_async = False # True if `async with ...` (PEP 492, Python 3.5) def __init__(self, expr: List[Expression], target: List[Optional[Lvalue]], body: Block, target_type: 'Optional[mypy.types.Type]' = None) -> None: super().__init__() self.expr = expr self.target = target self.unanalyzed_type = target_type self.analyzed_types = [] self.body = body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_with_stmt(self) class PrintStmt(Statement): """Python 2 print statement""" args = None # type: List[Expression] newline = False # The file-like target object (given using >>). target = None # type: Optional[Expression] def __init__(self, args: List[Expression], newline: bool, target: Optional[Expression] = None) -> None: super().__init__() self.args = args self.newline = newline self.target = target def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_print_stmt(self) class ExecStmt(Statement): """Python 2 exec statement""" expr = None # type: Expression globals = None # type: Optional[Expression] locals = None # type: Optional[Expression] def __init__(self, expr: Expression, globals: Optional[Expression], locals: Optional[Expression]) -> None: super().__init__() self.expr = expr self.globals = globals self.locals = locals def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_exec_stmt(self) # Expressions class IntExpr(Expression): """Integer literal""" value = 0 def __init__(self, value: int) -> None: super().__init__() self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_int_expr(self) # How mypy uses StrExpr, BytesExpr, and UnicodeExpr: # In Python 2 mode: # b'x', 'x' -> StrExpr # u'x' -> UnicodeExpr # BytesExpr is unused # # In Python 3 mode: # b'x' -> BytesExpr # 'x', u'x' -> StrExpr # UnicodeExpr is unused class StrExpr(Expression): """String literal""" value = '' # Keeps track of whether this string originated from Python 2 source code vs # Python 3 source code. We need to keep track of this information so we can # correctly handle types that have "nested strings". For example, consider this # type alias, where we have a forward reference to a literal type: # # Alias = List["Literal['foo']"] # # When parsing this, we need to know whether the outer string and alias came from # Python 2 code vs Python 3 code so we can determine whether the inner `Literal['foo']` # is meant to be `Literal[u'foo']` or `Literal[b'foo']`. # # This field keeps track of that information. from_python_3 = True def __init__(self, value: str, from_python_3: bool = False) -> None: super().__init__() self.value = value self.from_python_3 = from_python_3 def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_str_expr(self) class BytesExpr(Expression): """Bytes literal""" # Note: we deliberately do NOT use bytes here because it ends up # unnecessarily complicating a lot of the result logic. For example, # we'd have to worry about converting the bytes into a format we can # easily serialize/deserialize to and from JSON, would have to worry # about turning the bytes into a human-readable representation in # error messages... # # It's more convenient to just store the human-readable representation # from the very start. value = '' def __init__(self, value: str) -> None: super().__init__() self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_bytes_expr(self) class UnicodeExpr(Expression): """Unicode literal (Python 2.x)""" value = '' def __init__(self, value: str) -> None: super().__init__() self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_unicode_expr(self) class FloatExpr(Expression): """Float literal""" value = 0.0 def __init__(self, value: float) -> None: super().__init__() self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_float_expr(self) class ComplexExpr(Expression): """Complex literal""" def __init__(self, value: complex) -> None: super().__init__() self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_complex_expr(self) class EllipsisExpr(Expression): """Ellipsis (...)""" def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_ellipsis(self) class StarExpr(Expression): """Star expression""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: super().__init__() self.expr = expr # Whether this starred expression is used in a tuple/list and as lvalue self.valid = False def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_star_expr(self) class RefExpr(Expression): """Abstract base class for name-like constructs""" __slots__ = ('kind', 'node', 'fullname', 'is_new_def', 'is_inferred_def', 'is_alias_rvalue') def __init__(self) -> None: super().__init__() # LDEF/GDEF/MDEF/... (None if not available) self.kind = None # type: Optional[int] # Var, FuncDef or TypeInfo that describes this self.node = None # type: Optional[SymbolNode] # Fully qualified name (or name if not global) self.fullname = None # type: Optional[str] # Does this define a new name? self.is_new_def = False # Does this define a new name with inferred type? # # For members, after semantic analysis, this does not take base # classes into consideration at all; the type checker deals with these. self.is_inferred_def = False # Is this expression appears as an rvalue of a valid type alias definition? self.is_alias_rvalue = False class NameExpr(RefExpr): """Name expression This refers to a local name, global name or a module. """ __slots__ = ('name', 'is_special_form') def __init__(self, name: str) -> None: super().__init__() self.name = name # Name referred to (may be qualified) # Is this a l.h.s. of a special form assignment like typed dict or type variable? self.is_special_form = False def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_name_expr(self) def serialize(self) -> JsonDict: assert False, "Serializing NameExpr: %s" % (self,) class MemberExpr(RefExpr): """Member access expression x.y""" __slots__ = ('expr', 'name', 'def_var') def __init__(self, expr: Expression, name: str) -> None: super().__init__() self.expr = expr self.name = name # The variable node related to a definition through 'self.x = '. # The nodes of other kinds of member expressions are resolved during type checking. self.def_var = None # type: Optional[Var] def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_member_expr(self) # Kinds of arguments # Positional argument ARG_POS = 0 # type: Final[int] # Positional, optional argument (functions only, not calls) ARG_OPT = 1 # type: Final[int] # *arg argument ARG_STAR = 2 # type: Final[int] # Keyword argument x=y in call, or keyword-only function arg ARG_NAMED = 3 # type: Final[int] # **arg argument ARG_STAR2 = 4 # type: Final[int] # In an argument list, keyword-only and also optional ARG_NAMED_OPT = 5 # type: Final[int] class CallExpr(Expression): """Call expression. This can also represent several special forms that are syntactically calls such as cast(...) and None # type: .... """ __slots__ = ('callee', 'args', 'arg_kinds', 'arg_names', 'analyzed') def __init__(self, callee: Expression, args: List[Expression], arg_kinds: List[int], arg_names: List[Optional[str]], analyzed: Optional[Expression] = None) -> None: super().__init__() if not arg_names: arg_names = [None] * len(args) self.callee = callee self.args = args self.arg_kinds = arg_kinds # ARG_ constants # Each name can be None if not a keyword argument. self.arg_names = arg_names # type: List[Optional[str]] # If not None, the node that represents the meaning of the CallExpr. For # cast(...) this is a CastExpr. self.analyzed = analyzed def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_call_expr(self) class YieldFromExpr(Expression): expr = None # type: Expression def __init__(self, expr: Expression) -> None: super().__init__() self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_yield_from_expr(self) class YieldExpr(Expression): expr = None # type: Optional[Expression] def __init__(self, expr: Optional[Expression]) -> None: super().__init__() self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_yield_expr(self) class IndexExpr(Expression): """Index expression x[y]. Also wraps type application such as List[int] as a special form. """ base = None # type: Expression index = None # type: Expression # Inferred __getitem__ method type method_type = None # type: Optional[mypy.types.Type] # If not None, this is actually semantically a type application # Class[type, ...] or a type alias initializer. analyzed = None # type: Union[TypeApplication, TypeAliasExpr, None] def __init__(self, base: Expression, index: Expression) -> None: super().__init__() self.base = base self.index = index self.analyzed = None def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_index_expr(self) class UnaryExpr(Expression): """Unary operation""" op = '' expr = None # type: Expression # Inferred operator method type method_type = None # type: Optional[mypy.types.Type] def __init__(self, op: str, expr: Expression) -> None: super().__init__() self.op = op self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_unary_expr(self) class AssignmentExpr(Expression): """Assignment expressions in Python 3.8+, like "a := 2".""" def __init__(self, target: Expression, value: Expression) -> None: super().__init__() self.target = target self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_assignment_expr(self) # Map from binary operator id to related method name (in Python 3). op_methods = { '+': '__add__', '-': '__sub__', '*': '__mul__', '/': '__truediv__', '%': '__mod__', 'divmod': '__divmod__', '//': '__floordiv__', '**': '__pow__', '@': '__matmul__', '&': '__and__', '|': '__or__', '^': '__xor__', '<<': '__lshift__', '>>': '__rshift__', '==': '__eq__', '!=': '__ne__', '<': '__lt__', '>=': '__ge__', '>': '__gt__', '<=': '__le__', 'in': '__contains__', } # type: Final[Dict[str, str]] op_methods_to_symbols = {v: k for (k, v) in op_methods.items()} # type: Final op_methods_to_symbols['__div__'] = '/' comparison_fallback_method = '__cmp__' # type: Final ops_falling_back_to_cmp = {'__ne__', '__eq__', '__lt__', '__le__', '__gt__', '__ge__'} # type: Final ops_with_inplace_method = { '+', '-', '*', '/', '%', '//', '**', '@', '&', '|', '^', '<<', '>>'} # type: Final inplace_operator_methods = set( '__i' + op_methods[op][2:] for op in ops_with_inplace_method) # type: Final reverse_op_methods = { '__add__': '__radd__', '__sub__': '__rsub__', '__mul__': '__rmul__', '__truediv__': '__rtruediv__', '__mod__': '__rmod__', '__divmod__': '__rdivmod__', '__floordiv__': '__rfloordiv__', '__pow__': '__rpow__', '__matmul__': '__rmatmul__', '__and__': '__rand__', '__or__': '__ror__', '__xor__': '__rxor__', '__lshift__': '__rlshift__', '__rshift__': '__rrshift__', '__eq__': '__eq__', '__ne__': '__ne__', '__lt__': '__gt__', '__ge__': '__le__', '__gt__': '__lt__', '__le__': '__ge__', } # type: Final # Suppose we have some class A. When we do A() + A(), Python will only check # the output of A().__add__(A()) and skip calling the __radd__ method entirely. # This shortcut is used only for the following methods: op_methods_that_shortcut = { '__add__', '__sub__', '__mul__', '__div__', '__truediv__', '__mod__', '__divmod__', '__floordiv__', '__pow__', '__matmul__', '__and__', '__or__', '__xor__', '__lshift__', '__rshift__', } # type: Final normal_from_reverse_op = dict((m, n) for n, m in reverse_op_methods.items()) # type: Final reverse_op_method_set = set(reverse_op_methods.values()) # type: Final unary_op_methods = { '-': '__neg__', '+': '__pos__', '~': '__invert__', } # type: Final class OpExpr(Expression): """Binary operation (other than . or [] or comparison operators, which have specific nodes).""" op = '' left = None # type: Expression right = None # type: Expression # Inferred type for the operator method type (when relevant). method_type = None # type: Optional[mypy.types.Type] # Is the right side going to be evaluated every time? right_always = False # Is the right side unreachable? right_unreachable = False def __init__(self, op: str, left: Expression, right: Expression) -> None: super().__init__() self.op = op self.left = left self.right = right def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_op_expr(self) class ComparisonExpr(Expression): """Comparison expression (e.g. a < b > c < d).""" operators = None # type: List[str] operands = None # type: List[Expression] # Inferred type for the operator methods (when relevant; None for 'is'). method_types = None # type: List[Optional[mypy.types.Type]] def __init__(self, operators: List[str], operands: List[Expression]) -> None: super().__init__() self.operators = operators self.operands = operands self.method_types = [] def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_comparison_expr(self) class SliceExpr(Expression): """Slice expression (e.g. 'x:y', 'x:', '::2' or ':'). This is only valid as index in index expressions. """ begin_index = None # type: Optional[Expression] end_index = None # type: Optional[Expression] stride = None # type: Optional[Expression] def __init__(self, begin_index: Optional[Expression], end_index: Optional[Expression], stride: Optional[Expression]) -> None: super().__init__() self.begin_index = begin_index self.end_index = end_index self.stride = stride def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_slice_expr(self) class CastExpr(Expression): """Cast expression cast(type, expr).""" expr = None # type: Expression type = None # type: mypy.types.Type def __init__(self, expr: Expression, typ: 'mypy.types.Type') -> None: super().__init__() self.expr = expr self.type = typ def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_cast_expr(self) class RevealExpr(Expression): """Reveal type expression reveal_type(expr) or reveal_locals() expression.""" expr = None # type: Optional[Expression] kind = 0 # type: int local_nodes = None # type: Optional[List[Var]] def __init__( self, kind: int, expr: Optional[Expression] = None, local_nodes: 'Optional[List[Var]]' = None) -> None: super().__init__() self.expr = expr self.kind = kind self.local_nodes = local_nodes def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_reveal_expr(self) class SuperExpr(Expression): """Expression super().name""" name = '' info = None # type: Optional[TypeInfo] # Type that contains this super expression call = None # type: CallExpr # The expression super(...) def __init__(self, name: str, call: CallExpr) -> None: super().__init__() self.name = name self.call = call def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_super_expr(self) class LambdaExpr(FuncItem, Expression): """Lambda expression""" @property def name(self) -> str: return '' def expr(self) -> Expression: """Return the expression (the body) of the lambda.""" ret = cast(ReturnStmt, self.body.body[-1]) expr = ret.expr assert expr is not None # lambda can't have empty body return expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_lambda_expr(self) def is_dynamic(self) -> bool: return False class ListExpr(Expression): """List literal expression [...].""" items = None # type: List[Expression] def __init__(self, items: List[Expression]) -> None: super().__init__() self.items = items def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_list_expr(self) class DictExpr(Expression): """Dictionary literal expression {key: value, ...}.""" items = None # type: List[Tuple[Optional[Expression], Expression]] def __init__(self, items: List[Tuple[Optional[Expression], Expression]]) -> None: super().__init__() self.items = items def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_dict_expr(self) class TupleExpr(Expression): """Tuple literal expression (..., ...) Also lvalue sequences (..., ...) and [..., ...]""" items = None # type: List[Expression] def __init__(self, items: List[Expression]) -> None: super().__init__() self.items = items def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_tuple_expr(self) class SetExpr(Expression): """Set literal expression {value, ...}.""" items = None # type: List[Expression] def __init__(self, items: List[Expression]) -> None: super().__init__() self.items = items def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_set_expr(self) class GeneratorExpr(Expression): """Generator expression ... for ... in ... [ for ... in ... ] [ if ... ].""" left_expr = None # type: Expression sequences = None # type: List[Expression] condlists = None # type: List[List[Expression]] is_async = None # type: List[bool] indices = None # type: List[Lvalue] def __init__(self, left_expr: Expression, indices: List[Lvalue], sequences: List[Expression], condlists: List[List[Expression]], is_async: List[bool]) -> None: super().__init__() self.left_expr = left_expr self.sequences = sequences self.condlists = condlists self.indices = indices self.is_async = is_async def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_generator_expr(self) class ListComprehension(Expression): """List comprehension (e.g. [x + 1 for x in a])""" generator = None # type: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: super().__init__() self.generator = generator def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_list_comprehension(self) class SetComprehension(Expression): """Set comprehension (e.g. {x + 1 for x in a})""" generator = None # type: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: super().__init__() self.generator = generator def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_set_comprehension(self) class DictionaryComprehension(Expression): """Dictionary comprehension (e.g. {k: v for k, v in a}""" key = None # type: Expression value = None # type: Expression sequences = None # type: List[Expression] condlists = None # type: List[List[Expression]] is_async = None # type: List[bool] indices = None # type: List[Lvalue] def __init__(self, key: Expression, value: Expression, indices: List[Lvalue], sequences: List[Expression], condlists: List[List[Expression]], is_async: List[bool]) -> None: super().__init__() self.key = key self.value = value self.sequences = sequences self.condlists = condlists self.indices = indices self.is_async = is_async def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_dictionary_comprehension(self) class ConditionalExpr(Expression): """Conditional expression (e.g. x if y else z)""" cond = None # type: Expression if_expr = None # type: Expression else_expr = None # type: Expression def __init__(self, cond: Expression, if_expr: Expression, else_expr: Expression) -> None: super().__init__() self.cond = cond self.if_expr = if_expr self.else_expr = else_expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_conditional_expr(self) class BackquoteExpr(Expression): """Python 2 expression `...`.""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: super().__init__() self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_backquote_expr(self) class TypeApplication(Expression): """Type application expr[type, ...]""" expr = None # type: Expression types = None # type: List[mypy.types.Type] def __init__(self, expr: Expression, types: List['mypy.types.Type']) -> None: super().__init__() self.expr = expr self.types = types def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_application(self) # Variance of a type variable. For example, T in the definition of # List[T] is invariant, so List[int] is not a subtype of List[object], # and also List[object] is not a subtype of List[int]. # # The T in Iterable[T] is covariant, so Iterable[int] is a subtype of # Iterable[object], but not vice versa. # # If T is contravariant in Foo[T], Foo[object] is a subtype of # Foo[int], but not vice versa. INVARIANT = 0 # type: Final[int] COVARIANT = 1 # type: Final[int] CONTRAVARIANT = 2 # type: Final[int] class TypeVarExpr(SymbolNode, Expression): """Type variable expression TypeVar(...). This is also used to represent type variables in symbol tables. A type variable is not valid as a type unless bound in a TypeVarScope. That happens within: 1. a generic class that uses the type variable as a type argument or 2. a generic function that refers to the type variable in its signature. """ _name = '' _fullname = '' # Value restriction: only types in the list are valid as values. If the # list is empty, there is no restriction. values = None # type: List[mypy.types.Type] # Upper bound: only subtypes of upper_bound are valid as values. By default # this is 'object', meaning no restriction. upper_bound = None # type: mypy.types.Type # Variance of the type variable. Invariant is the default. # TypeVar(..., covariant=True) defines a covariant type variable. # TypeVar(..., contravariant=True) defines a contravariant type # variable. variance = INVARIANT def __init__(self, name: str, fullname: str, values: List['mypy.types.Type'], upper_bound: 'mypy.types.Type', variance: int = INVARIANT) -> None: super().__init__() self._name = name self._fullname = fullname self.values = values self.upper_bound = upper_bound self.variance = variance @property def name(self) -> str: return self._name @property def fullname(self) -> str: return self._fullname def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_var_expr(self) def serialize(self) -> JsonDict: return {'.class': 'TypeVarExpr', 'name': self._name, 'fullname': self._fullname, 'values': [t.serialize() for t in self.values], 'upper_bound': self.upper_bound.serialize(), 'variance': self.variance, } @classmethod def deserialize(cls, data: JsonDict) -> 'TypeVarExpr': assert data['.class'] == 'TypeVarExpr' return TypeVarExpr(data['name'], data['fullname'], [mypy.types.deserialize_type(v) for v in data['values']], mypy.types.deserialize_type(data['upper_bound']), data['variance']) class TypeAliasExpr(Expression): """Type alias expression (rvalue).""" # The target type. type = None # type: mypy.types.Type # Names of unbound type variables used to define the alias tvars = None # type: List[str] # Whether this alias was defined in bare form. Used to distinguish # between # A = List # and # A = List[Any] no_args = False # type: bool def __init__(self, node: 'TypeAlias') -> None: super().__init__() self.type = node.target self.tvars = node.alias_tvars self.no_args = node.no_args self.node = node def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_alias_expr(self) class NamedTupleExpr(Expression): """Named tuple expression namedtuple(...) or NamedTuple(...).""" # The class representation of this named tuple (its tuple_type attribute contains # the tuple item types) info = None # type: TypeInfo is_typed = False # whether this class was created with typing.NamedTuple def __init__(self, info: 'TypeInfo', is_typed: bool = False) -> None: super().__init__() self.info = info self.is_typed = is_typed def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_namedtuple_expr(self) class TypedDictExpr(Expression): """Typed dict expression TypedDict(...).""" # The class representation of this typed dict info = None # type: TypeInfo def __init__(self, info: 'TypeInfo') -> None: super().__init__() self.info = info def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_typeddict_expr(self) class EnumCallExpr(Expression): """Named tuple expression Enum('name', 'val1 val2 ...').""" # The class representation of this enumerated type info = None # type: TypeInfo # The item names (for debugging) items = None # type: List[str] values = None # type: List[Optional[Expression]] def __init__(self, info: 'TypeInfo', items: List[str], values: List[Optional[Expression]]) -> None: super().__init__() self.info = info self.items = items self.values = values def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_enum_call_expr(self) class PromoteExpr(Expression): """Ducktype class decorator expression _promote(...).""" type = None # type: mypy.types.Type def __init__(self, type: 'mypy.types.Type') -> None: super().__init__() self.type = type def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit__promote_expr(self) class NewTypeExpr(Expression): """NewType expression NewType(...).""" name = None # type: str # The base type (the second argument to NewType) old_type = None # type: Optional[mypy.types.Type] # The synthesized class representing the new type (inherits old_type) info = None # type: Optional[TypeInfo] def __init__(self, name: str, old_type: 'Optional[mypy.types.Type]', line: int, column: int) -> None: super().__init__() self.name = name self.old_type = old_type self.line = line self.column = column def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_newtype_expr(self) class AwaitExpr(Expression): """Await expression (await ...).""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: super().__init__() self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_await_expr(self) # Constants class TempNode(Expression): """Temporary dummy node used during type checking. This node is not present in the original program; it is just an artifact of the type checker implementation. It only represents an opaque node with some fixed type. """ type = None # type: mypy.types.Type # Is this TempNode used to indicate absence of a right hand side in an annotated assignment? # (e.g. for 'x: int' the rvalue is TempNode(AnyType(TypeOfAny.special_form), no_rhs=True)) no_rhs = False # type: bool def __init__(self, typ: 'mypy.types.Type', no_rhs: bool = False, *, context: Optional[Context] = None) -> None: """Construct a dummy node; optionally borrow line/column from context object.""" super().__init__() self.type = typ self.no_rhs = no_rhs if context is not None: self.line = context.line self.column = context.column def __repr__(self) -> str: return 'TempNode:%d(%s)' % (self.line, str(self.type)) def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_temp_node(self) class TypeInfo(SymbolNode): """The type structure of a single class. Each TypeInfo corresponds one-to-one to a ClassDef, which represents the AST of the class. In type-theory terms, this is a "type constructor", and if the class is generic then it will be a type constructor of higher kind. Where the class is used in an actual type, it's in the form of an Instance, which amounts to a type application of the tycon to the appropriate number of arguments. """ _fullname = None # type: Bogus[str] # Fully qualified name # Fully qualified name for the module this type was defined in. This # information is also in the fullname, but is harder to extract in the # case of nested class definitions. module_name = None # type: str defn = None # type: ClassDef # Corresponding ClassDef # Method Resolution Order: the order of looking up attributes. The first # value always to refers to this class. mro = None # type: List[TypeInfo] # Used to stash the names of the mro classes temporarily between # deserialization and fixup. See deserialize() for why. _mro_refs = None # type: Optional[List[str]] bad_mro = False # Could not construct full MRO declared_metaclass = None # type: Optional[mypy.types.Instance] metaclass_type = None # type: Optional[mypy.types.Instance] names = None # type: SymbolTable # Names defined directly in this type is_abstract = False # Does the class have any abstract attributes? is_protocol = False # Is this a protocol class? runtime_protocol = False # Does this protocol support isinstance checks? abstract_attributes = None # type: List[str] # The attributes 'assuming' and 'assuming_proper' represent structural subtype matrices. # # In languages with structural subtyping, one can keep a global subtype matrix like this: # . A B C . # A 1 0 0 # B 1 1 1 # C 1 0 1 # . # where 1 indicates that the type in corresponding row is a subtype of the type # in corresponding column. This matrix typically starts filled with all 1's and # a typechecker tries to "disprove" every subtyping relation using atomic (or nominal) types. # However, we don't want to keep this huge global state. Instead, we keep the subtype # information in the form of list of pairs (subtype, supertype) shared by all 'Instance's # with given supertype's TypeInfo. When we enter a subtype check we push a pair in this list # thus assuming that we started with 1 in corresponding matrix element. Such algorithm allows # to treat recursive and mutually recursive protocols and other kinds of complex situations. # # If concurrent/parallel type checking will be added in future, # then there should be one matrix per thread/process to avoid false negatives # during the type checking phase. assuming = None # type: List[Tuple[mypy.types.Instance, mypy.types.Instance]] assuming_proper = None # type: List[Tuple[mypy.types.Instance, mypy.types.Instance]] # Ditto for temporary 'inferring' stack of recursive constraint inference. # It contains Instance's of protocol types that appeared as an argument to # constraints.infer_constraints(). We need 'inferring' to avoid infinite recursion for # recursive and mutually recursive protocols. # # We make 'assuming' and 'inferring' attributes here instead of passing they as kwargs, # since this would require to pass them in many dozens of calls. In particular, # there is a dependency infer_constraint -> is_subtype -> is_callable_subtype -> # -> infer_constraints. inferring = None # type: List[mypy.types.Instance] # 'inferring' and 'assuming' can't be made sets, since we need to use # is_same_type to correctly treat unions. # Classes inheriting from Enum shadow their true members with a __getattr__, so we # have to treat them as a special case. is_enum = False # If true, any unknown attributes should have type 'Any' instead # of generating a type error. This would be true if there is a # base class with type 'Any', but other use cases may be # possible. This is similar to having __getattr__ that returns Any # (and __setattr__), but without the __getattr__ method. fallback_to_any = False # Information related to type annotations. # Generic type variable names (full names) type_vars = None # type: List[str] # Direct base classes. bases = None # type: List[mypy.types.Instance] # Another type which this type will be treated as a subtype of, # even though it's not a subclass in Python. The non-standard # `@_promote` decorator introduces this, and there are also # several builtin examples, in particular `int` -> `float`. _promote = None # type: Optional[mypy.types.Type] # Representation of a Tuple[...] base class, if the class has any # (e.g., for named tuples). If this is not None, the actual Type # object used for this class is not an Instance but a TupleType; # the corresponding Instance is set as the fallback type of the # tuple type. tuple_type = None # type: Optional[mypy.types.TupleType] # Is this a named tuple type? is_named_tuple = False # If this class is defined by the TypedDict type constructor, # then this is not None. typeddict_type = None # type: Optional[mypy.types.TypedDictType] # Is this a newtype type? is_newtype = False # This is a dictionary that will be serialized and un-serialized as is. # It is useful for plugins to add their data to save in the cache. metadata = None # type: Dict[str, JsonDict] FLAGS = [ 'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple', 'is_newtype', 'is_protocol', 'runtime_protocol', 'is_final', ] # type: Final[List[str]] def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> None: """Initialize a TypeInfo.""" super().__init__() self.names = names self.defn = defn self.module_name = module_name self.type_vars = [] self.bases = [] self.mro = [] self._fullname = defn.fullname self.is_abstract = False self.abstract_attributes = [] self.assuming = [] self.assuming_proper = [] self.inferring = [] self.add_type_vars() self.metadata = {} self.is_final = False def add_type_vars(self) -> None: if self.defn.type_vars: for vd in self.defn.type_vars: self.type_vars.append(vd.fullname) @property def name(self) -> str: """Short name.""" return self.defn.name @property def fullname(self) -> Bogus[str]: return self._fullname def is_generic(self) -> bool: """Is the type generic (i.e. does it have type variables)?""" return len(self.type_vars) > 0 def get(self, name: str) -> 'Optional[SymbolTableNode]': for cls in self.mro: n = cls.names.get(name) if n: return n return None def get_containing_type_info(self, name: str) -> 'Optional[TypeInfo]': for cls in self.mro: if name in cls.names: return cls return None @property def protocol_members(self) -> List[str]: # Protocol members are names of all attributes/methods defined in a protocol # and in all its supertypes (except for 'object'). members = set() # type: Set[str] assert self.mro, "This property can be only accessed after MRO is (re-)calculated" for base in self.mro[:-1]: # we skip "object" since everyone implements it if base.is_protocol: for name in base.names: members.add(name) return sorted(list(members)) def __getitem__(self, name: str) -> 'SymbolTableNode': n = self.get(name) if n: return n else: raise KeyError(name) def __repr__(self) -> str: return '' % self.fullname def __bool__(self) -> bool: # We defined this here instead of just overriding it in # FakeInfo so that mypyc can generate a direct call instead of # using the generic bool handling. return not isinstance(self, FakeInfo) def has_readable_member(self, name: str) -> bool: return self.get(name) is not None def get_method(self, name: str) -> Optional[FuncBase]: for cls in self.mro: if name in cls.names: node = cls.names[name].node if isinstance(node, FuncBase): return node else: return None return None def calculate_metaclass_type(self) -> 'Optional[mypy.types.Instance]': declared = self.declared_metaclass if declared is not None and not declared.type.has_base('builtins.type'): return declared if self._fullname == 'builtins.type': return mypy.types.Instance(self, []) candidates = [s.declared_metaclass for s in self.mro if s.declared_metaclass is not None and s.declared_metaclass.type is not None] for c in candidates: if all(other.type in c.type.mro for other in candidates): return c return None def is_metaclass(self) -> bool: return (self.has_base('builtins.type') or self.fullname == 'abc.ABCMeta' or self.fallback_to_any) def has_base(self, fullname: str) -> bool: """Return True if type has a base type with the specified name. This can be either via extension or via implementation. """ for cls in self.mro: if cls.fullname == fullname: return True return False def direct_base_classes(self) -> 'List[TypeInfo]': """Return a direct base classes. Omit base classes of other base classes. """ return [base.type for base in self.bases] def __str__(self) -> str: """Return a string representation of the type. This includes the most important information about the type. """ return self.dump() def dump(self, str_conv: 'Optional[mypy.strconv.StrConv]' = None, type_str_conv: 'Optional[mypy.types.TypeStrVisitor]' = None) -> str: """Return a string dump of the contents of the TypeInfo.""" if not str_conv: str_conv = mypy.strconv.StrConv() base = '' # type: str def type_str(typ: 'mypy.types.Type') -> str: if type_str_conv: return typ.accept(type_str_conv) return str(typ) head = 'TypeInfo' + str_conv.format_id(self) if self.bases: base = 'Bases({})'.format(', '.join(type_str(base) for base in self.bases)) mro = 'Mro({})'.format(', '.join(item.fullname + str_conv.format_id(item) for item in self.mro)) names = [] for name in sorted(self.names): description = name + str_conv.format_id(self.names[name].node) node = self.names[name].node if isinstance(node, Var) and node.type: description += ' ({})'.format(type_str(node.type)) names.append(description) items = [ 'Name({})'.format(self.fullname), base, mro, ('Names', names), ] if self.declared_metaclass: items.append('DeclaredMetaclass({})'.format(type_str(self.declared_metaclass))) if self.metaclass_type: items.append('MetaclassType({})'.format(type_str(self.metaclass_type))) return mypy.strconv.dump_tagged( items, head, str_conv=str_conv) def serialize(self) -> JsonDict: # NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates. data = {'.class': 'TypeInfo', 'module_name': self.module_name, 'fullname': self.fullname, 'names': self.names.serialize(self.fullname), 'defn': self.defn.serialize(), 'abstract_attributes': self.abstract_attributes, 'type_vars': self.type_vars, 'bases': [b.serialize() for b in self.bases], 'mro': [c.fullname for c in self.mro], '_promote': None if self._promote is None else self._promote.serialize(), 'declared_metaclass': (None if self.declared_metaclass is None else self.declared_metaclass.serialize()), 'metaclass_type': None if self.metaclass_type is None else self.metaclass_type.serialize(), 'tuple_type': None if self.tuple_type is None else self.tuple_type.serialize(), 'typeddict_type': None if self.typeddict_type is None else self.typeddict_type.serialize(), 'flags': get_flags(self, TypeInfo.FLAGS), 'metadata': self.metadata, } return data @classmethod def deserialize(cls, data: JsonDict) -> 'TypeInfo': names = SymbolTable.deserialize(data['names']) defn = ClassDef.deserialize(data['defn']) module_name = data['module_name'] ti = TypeInfo(names, defn, module_name) ti._fullname = data['fullname'] # TODO: Is there a reason to reconstruct ti.subtypes? ti.abstract_attributes = data['abstract_attributes'] ti.type_vars = data['type_vars'] ti.bases = [mypy.types.Instance.deserialize(b) for b in data['bases']] ti._promote = (None if data['_promote'] is None else mypy.types.deserialize_type(data['_promote'])) ti.declared_metaclass = (None if data['declared_metaclass'] is None else mypy.types.Instance.deserialize(data['declared_metaclass'])) ti.metaclass_type = (None if data['metaclass_type'] is None else mypy.types.Instance.deserialize(data['metaclass_type'])) # NOTE: ti.mro will be set in the fixup phase based on these # names. The reason we need to store the mro instead of just # recomputing it from base classes has to do with a subtle # point about fine-grained incremental: the cache files might # not be loaded until after a class in the mro has changed its # bases, which causes the mro to change. If we recomputed our # mro, we would compute the *new* mro, which leaves us with no # way to detect that the mro has changed! Thus we need to make # sure to load the original mro so that once the class is # rechecked, it can tell that the mro has changed. ti._mro_refs = data['mro'] ti.tuple_type = (None if data['tuple_type'] is None else mypy.types.TupleType.deserialize(data['tuple_type'])) ti.typeddict_type = (None if data['typeddict_type'] is None else mypy.types.TypedDictType.deserialize(data['typeddict_type'])) ti.metadata = data['metadata'] set_flags(ti, data['flags']) return ti class FakeInfo(TypeInfo): # types.py defines a single instance of this class, called types.NOT_READY. # This instance is used as a temporary placeholder in the process of de-serialization # of 'Instance' types. The de-serialization happens in two steps: In the first step, # Instance.type is set to NOT_READY. In the second step (in fixup.py) it is replaced by # an actual TypeInfo. If you see the assertion error below, then most probably something # went wrong during the second step and an 'Instance' that raised this error was not fixed. # Note: # 'None' is not used as a dummy value for two reasons: # 1. This will require around 80-100 asserts to make 'mypy --strict-optional mypy' # pass cleanly. # 2. If NOT_READY value is accidentally used somewhere, it will be obvious where the value # is from, whereas a 'None' value could come from anywhere. # # Additionally, this serves as a more general-purpose placeholder # for missing TypeInfos in a number of places where the excuses # for not being Optional are a little weaker. # # TypeInfo defines a __bool__ method that returns False for FakeInfo # so that it can be conveniently tested against in the same way that it # would be if things were properly optional. def __init__(self, msg: str) -> None: self.msg = msg def __getattribute__(self, attr: str) -> None: # Handle __class__ so that isinstance still works... if attr == '__class__': return object.__getattribute__(self, attr) raise AssertionError(object.__getattribute__(self, 'msg')) VAR_NO_INFO = FakeInfo('Var is lacking info') # type: Final[TypeInfo] CLASSDEF_NO_INFO = FakeInfo('ClassDef is lacking info') # type: Final[TypeInfo] FUNC_NO_INFO = FakeInfo('FuncBase for non-methods lack info') # type: Final[TypeInfo] class TypeAlias(SymbolNode): """ A symbol node representing a type alias. Type alias is a static concept, in contrast to variables with types like Type[...]. Namely: * type aliases - can be used in type context (annotations) - cannot be re-assigned * variables with type Type[...] - cannot be used in type context - but can be re-assigned An alias can be defined only by an assignment to a name (not any other lvalues). Such assignment defines an alias by default. To define a variable, an explicit Type[...] annotation is required. As an exception, at non-global scope non-subscripted rvalue creates a variable even without an annotation. This exception exists to accommodate the common use case of class-valued attributes. See SemanticAnalyzerPass2.check_and_set_up_type_alias for details. Aliases can be generic. Currently, mypy uses unbound type variables for generic aliases and identifies them by name. Essentially, type aliases work as macros that expand textually. The definition and expansion rules are following: 1. An alias targeting a generic class without explicit variables act as the given class (this doesn't apply to Tuple and Callable, which are not proper classes but special type constructors): A = List AA = List[Any] x: A # same as List[Any] x: A[int] # same as List[int] x: AA # same as List[Any] x: AA[int] # Error! C = Callable # Same as Callable[..., Any] T = Tuple # Same as Tuple[Any, ...] 2. An alias using explicit type variables in its rvalue expects replacements (type arguments) for these variables. If missing, they are treated as Any, like for other generics: B = List[Tuple[T, T]] x: B # same as List[Tuple[Any, Any]] x: B[int] # same as List[Tuple[int, int]] def f(x: B[T]) -> T: ... # without T, Any would be used here 3. An alias can be defined using another aliases. In the definition rvalue the Any substitution doesn't happen for top level unsubscripted generic classes: A = List B = A # here A is expanded to List, _not_ List[Any], # to match the Python runtime behaviour x: B[int] # same as List[int] C = List[A] # this expands to List[List[Any]] AA = List[T] D = AA # here AA expands to List[Any] x: D[int] # Error! Note: the fact that we support aliases like `A = List` means that the target type will be initially an instance type with wrong number of type arguments. Such instances are all fixed in the third pass of semantic analyzis. We therefore store the difference between `List` and `List[Any]` rvalues (targets) using the `no_args` flag. See also TypeAliasExpr.no_args. Meaning of other fields: target: The target type. For generic aliases contains unbound type variables as nested types. _fullname: Qualified name of this type alias. This is used in particular to track fine grained dependencies from aliases. alias_tvars: Names of unbound type variables used to define this alias. normalized: Used to distinguish between `A = List`, and `A = list`. Both are internally stored using `builtins.list` (because `typing.List` is itself an alias), while the second cannot be subscripted because of Python runtime limitation. line and column: Line an column on the original alias definition. """ __slots__ = ('target', '_fullname', 'alias_tvars', 'no_args', 'normalized', 'line', 'column', '_is_recursive') def __init__(self, target: 'mypy.types.Type', fullname: str, line: int, column: int, *, alias_tvars: Optional[List[str]] = None, no_args: bool = False, normalized: bool = False) -> None: self._fullname = fullname self.target = target if alias_tvars is None: alias_tvars = [] self.alias_tvars = alias_tvars self.no_args = no_args self.normalized = normalized # This attribute is manipulated by TypeAliasType. If non-None, # it is the cached value. self._is_recursive = None # type: Optional[bool] super().__init__(line, column) @property def name(self) -> str: return self._fullname.split('.')[-1] @property def fullname(self) -> str: return self._fullname def serialize(self) -> JsonDict: data = {'.class': 'TypeAlias', 'fullname': self._fullname, 'target': self.target.serialize(), 'alias_tvars': self.alias_tvars, 'no_args': self.no_args, 'normalized': self.normalized, 'line': self.line, 'column': self.column } # type: JsonDict return data def accept(self, visitor: NodeVisitor[T]) -> T: return visitor.visit_type_alias(self) @classmethod def deserialize(cls, data: JsonDict) -> 'TypeAlias': assert data['.class'] == 'TypeAlias' fullname = data['fullname'] alias_tvars = data['alias_tvars'] target = mypy.types.deserialize_type(data['target']) no_args = data['no_args'] normalized = data['normalized'] line = data['line'] column = data['column'] return cls(target, fullname, line, column, alias_tvars=alias_tvars, no_args=no_args, normalized=normalized) class PlaceholderNode(SymbolNode): """Temporary symbol node that will later become a real SymbolNode. These are only present during semantic analysis when using the new semantic analyzer. These are created if some essential dependencies of a definition are not yet complete. A typical use is for names imported from a module which is still incomplete (within an import cycle): from m import f # Initially may create PlaceholderNode This is particularly important if the imported shadows a name from an enclosing scope or builtins: from m import int # Placeholder avoids mixups with builtins.int Another case where this is useful is when there is another definition or assignment: from m import f def f() -> None: ... In the above example, the presence of PlaceholderNode allows us to handle the second definition as a redefinition. They are also used to create PlaceholderType instances for types that refer to incomplete types. Example: class C(Sequence[C]): ... We create a PlaceholderNode (with becomes_typeinfo=True) for C so that the type C in Sequence[C] can be bound. Attributes: fullname: Full name of of the PlaceholderNode. node: AST node that contains the definition that caused this to be created. This is useful for tracking order of incomplete definitions and for debugging. becomes_typeinfo: If True, this refers something that could later become a TypeInfo. It can't be used with type variables, in particular, as this would cause issues with class type variable detection. The long-term purpose of placeholder nodes/types is to evolve into something that can support general recursive types. """ def __init__(self, fullname: str, node: Node, line: int, *, becomes_typeinfo: bool = False) -> None: self._fullname = fullname self.node = node self.becomes_typeinfo = becomes_typeinfo self.line = line @property def name(self) -> str: return self._fullname.split('.')[-1] @property def fullname(self) -> str: return self._fullname def serialize(self) -> JsonDict: assert False, "PlaceholderNode can't be serialized" def accept(self, visitor: NodeVisitor[T]) -> T: return visitor.visit_placeholder_node(self) class SymbolTableNode: """Description of a name binding in a symbol table. These are only used as values in module (global), function (local) and class symbol tables (see SymbolTable). The name that is bound is the key in SymbolTable. Symbol tables don't contain direct references to AST nodes primarily because there can be multiple symbol table references to a single AST node (due to imports and aliases), and different references can behave differently. This class describes the unique properties of each reference. The most fundamental attribute is 'node', which is the AST node that the name refers to. The kind is usually one of LDEF, GDEF or MDEF, depending on the scope of the definition. These three kinds can usually be used interchangeably and the difference between local, global and class scopes is mostly descriptive, with no semantic significance. However, some tools that consume mypy ASTs may care about these so they should be correct. Attributes: node: AST node of definition. Among others, this can be one of FuncDef, Var, TypeInfo, TypeVarExpr or MypyFile -- or None for cross_ref that hasn't been fixed up yet. kind: Kind of node. Possible values: - LDEF: local definition - GDEF: global (module-level) definition - MDEF: class member definition - UNBOUND_IMPORTED: temporary kind for imported names (we don't know the final kind yet) module_public: If False, this name won't be imported via 'from import *'. This has no effect on names within classes. module_hidden: If True, the name will be never exported (needed for stub files) cross_ref: For deserialized MypyFile nodes, the referenced module name; for other nodes, optionally the name of the referenced object. implicit: Was this defined by assignment to self attribute? plugin_generated: Was this symbol generated by a plugin? (And therefore needs to be removed in aststrip.) no_serialize: Do not serialize this node if True. This is used to prevent keys in the cache that refer to modules on which this file does not depend. Currently this can happen if there is a module not in build used e.g. like this: import a.b.c # type: ignore This will add a submodule symbol to parent module `a` symbol table, but `a.b` is _not_ added as its dependency. Therefore, we should not serialize these symbols as they may not be found during fixup phase, instead they will be re-added during subsequent patch parents phase. TODO: Refactor build.py to make dependency tracking more transparent and/or refactor look-up functions to not require parent patching. NOTE: No other attributes should be added to this class unless they are shared by all node kinds. """ __slots__ = ('kind', 'node', 'module_public', 'module_hidden', 'cross_ref', 'implicit', 'plugin_generated', 'no_serialize', ) def __init__(self, kind: int, node: Optional[SymbolNode], module_public: bool = True, implicit: bool = False, module_hidden: bool = False, *, plugin_generated: bool = False, no_serialize: bool = False) -> None: self.kind = kind self.node = node self.module_public = module_public self.implicit = implicit self.module_hidden = module_hidden self.cross_ref = None # type: Optional[str] self.plugin_generated = plugin_generated self.no_serialize = no_serialize @property def fullname(self) -> Optional[str]: if self.node is not None: return self.node.fullname else: return None @property def type(self) -> 'Optional[mypy.types.Type]': node = self.node if isinstance(node, (Var, SYMBOL_FUNCBASE_TYPES)) and node.type is not None: return node.type elif isinstance(node, Decorator): return node.var.type else: return None def copy(self) -> 'SymbolTableNode': new = SymbolTableNode(self.kind, self.node, self.module_public, self.implicit, self.module_hidden) new.cross_ref = self.cross_ref return new def __str__(self) -> str: s = '{}/{}'.format(node_kinds[self.kind], short_type(self.node)) if isinstance(self.node, SymbolNode): s += ' ({})'.format(self.node.fullname) # Include declared type of variables and functions. if self.type is not None: s += ' : {}'.format(self.type) return s def serialize(self, prefix: str, name: str) -> JsonDict: """Serialize a SymbolTableNode. Args: prefix: full name of the containing module or class; or None name: name of this object relative to the containing object """ data = {'.class': 'SymbolTableNode', 'kind': node_kinds[self.kind], } # type: JsonDict if self.module_hidden: data['module_hidden'] = True if not self.module_public: data['module_public'] = False if self.implicit: data['implicit'] = True if self.plugin_generated: data['plugin_generated'] = True if isinstance(self.node, MypyFile): data['cross_ref'] = self.node.fullname else: assert self.node is not None, '%s:%s' % (prefix, name) if prefix is not None: fullname = self.node.fullname if (fullname is not None and '.' in fullname and fullname != prefix + '.' + name and not (isinstance(self.node, Var) and self.node.from_module_getattr)): data['cross_ref'] = fullname return data data['node'] = self.node.serialize() return data @classmethod def deserialize(cls, data: JsonDict) -> 'SymbolTableNode': assert data['.class'] == 'SymbolTableNode' kind = inverse_node_kinds[data['kind']] if 'cross_ref' in data: # This will be fixed up later. stnode = SymbolTableNode(kind, None) stnode.cross_ref = data['cross_ref'] else: assert 'node' in data, data node = SymbolNode.deserialize(data['node']) stnode = SymbolTableNode(kind, node) if 'module_hidden' in data: stnode.module_hidden = data['module_hidden'] if 'module_public' in data: stnode.module_public = data['module_public'] if 'implicit' in data: stnode.implicit = data['implicit'] if 'plugin_generated' in data: stnode.plugin_generated = data['plugin_generated'] return stnode class SymbolTable(Dict[str, SymbolTableNode]): """Static representation of a namespace dictionary. This is used for module, class and function namespaces. """ def __str__(self) -> str: a = [] # type: List[str] for key, value in self.items(): # Filter out the implicit import of builtins. if isinstance(value, SymbolTableNode): if (value.fullname != 'builtins' and (value.fullname or '').split('.')[-1] not in implicit_module_attrs): a.append(' ' + str(key) + ' : ' + str(value)) else: a.append(' ') a = sorted(a) a.insert(0, 'SymbolTable(') a[-1] += ')' return '\n'.join(a) def copy(self) -> 'SymbolTable': return SymbolTable([(key, node.copy()) for key, node in self.items()]) def serialize(self, fullname: str) -> JsonDict: data = {'.class': 'SymbolTable'} # type: JsonDict for key, value in self.items(): # Skip __builtins__: it's a reference to the builtins # module that gets added to every module by # SemanticAnalyzerPass2.visit_file(), but it shouldn't be # accessed by users of the module. if key == '__builtins__' or value.no_serialize: continue data[key] = value.serialize(fullname, key) return data @classmethod def deserialize(cls, data: JsonDict) -> 'SymbolTable': assert data['.class'] == 'SymbolTable' st = SymbolTable() for key, value in data.items(): if key != '.class': st[key] = SymbolTableNode.deserialize(value) return st def get_flags(node: Node, names: List[str]) -> List[str]: return [name for name in names if getattr(node, name)] def set_flags(node: Node, flags: List[str]) -> None: for name in flags: setattr(node, name, True) def get_member_expr_fullname(expr: MemberExpr) -> Optional[str]: """Return the qualified name representation of a member expression. Return a string of form foo.bar, foo.bar.baz, or similar, or None if the argument cannot be represented in this form. """ initial = None # type: Optional[str] if isinstance(expr.expr, NameExpr): initial = expr.expr.name elif isinstance(expr.expr, MemberExpr): initial = get_member_expr_fullname(expr.expr) else: return None return '{}.{}'.format(initial, expr.name) deserialize_map = { key: obj.deserialize for key, obj in globals().items() if type(obj) is not FakeInfo and isinstance(obj, type) and issubclass(obj, SymbolNode) and obj is not SymbolNode } # type: Final def check_arg_kinds(arg_kinds: List[int], nodes: List[T], fail: Callable[[str, T], None]) -> None: is_var_arg = False is_kw_arg = False seen_named = False seen_opt = False for kind, node in zip(arg_kinds, nodes): if kind == ARG_POS: if is_var_arg or is_kw_arg or seen_named or seen_opt: fail("Required positional args may not appear " "after default, named or var args", node) break elif kind == ARG_OPT: if is_var_arg or is_kw_arg or seen_named: fail("Positional default args may not appear after named or var args", node) break seen_opt = True elif kind == ARG_STAR: if is_var_arg or is_kw_arg or seen_named: fail("Var args may not appear after named or var args", node) break is_var_arg = True elif kind == ARG_NAMED or kind == ARG_NAMED_OPT: seen_named = True if is_kw_arg: fail("A **kwargs argument must be the last argument", node) break elif kind == ARG_STAR2: if is_kw_arg: fail("You may only have one **kwargs argument", node) break is_kw_arg = True def check_arg_names(names: Sequence[Optional[str]], nodes: List[T], fail: Callable[[str, T], None], description: str = 'function definition') -> None: seen_names = set() # type: Set[Optional[str]] for name, node in zip(names, nodes): if name is not None and name in seen_names: fail("Duplicate argument '{}' in {}".format(name, description), node) break seen_names.add(name) def is_class_var(expr: NameExpr) -> bool: """Return whether the expression is ClassVar[...]""" if isinstance(expr.node, Var): return expr.node.is_classvar return False def is_final_node(node: Optional[SymbolNode]) -> bool: """Check whether `node` corresponds to a final attribute.""" return isinstance(node, (Var, FuncDef, OverloadedFuncDef, Decorator)) and node.is_final def local_definitions(names: SymbolTable, name_prefix: str, info: Optional[TypeInfo] = None) -> Iterator[Definition]: """Iterate over local definitions (not imported) in a symbol table. Recursively iterate over class members and nested classes. """ # TODO: What should the name be? Or maybe remove it? for name, symnode in names.items(): shortname = name if '-redef' in name: # Restore original name from mangled name of multiply defined function shortname = name.split('-redef')[0] fullname = name_prefix + '.' + shortname node = symnode.node if node and node.fullname == fullname: yield fullname, symnode, info if isinstance(node, TypeInfo): yield from local_definitions(node.names, fullname, node) mypy-0.761/mypy/options.py0000644€tŠÔÚ€2›s®0000003737313576752246022000 0ustar jukkaDROPBOX\Domain Users00000000000000from collections import OrderedDict import re import pprint import sys from typing_extensions import Final from typing import Dict, List, Mapping, Optional, Pattern, Set, Tuple, Callable, Any from mypy import defaults from mypy.util import get_class_descriptors, replace_object_state class BuildType: STANDARD = 0 # type: Final[int] MODULE = 1 # type: Final[int] PROGRAM_TEXT = 2 # type: Final[int] PER_MODULE_OPTIONS = { # Please keep this list sorted "allow_untyped_globals", "allow_redefinition", "strict_equality", "always_false", "always_true", "check_untyped_defs", "debug_cache", "disallow_any_decorated", "disallow_any_explicit", "disallow_any_expr", "disallow_any_generics", "disallow_any_unimported", "disallow_incomplete_defs", "disallow_subclassing_any", "disallow_untyped_calls", "disallow_untyped_decorators", "disallow_untyped_defs", "follow_imports", "follow_imports_for_stubs", "ignore_errors", "ignore_missing_imports", "local_partial_types", "mypyc", "no_implicit_optional", "implicit_reexport", "show_none_errors", "strict_optional", "strict_optional_whitelist", "warn_no_return", "warn_return_any", "warn_unreachable", "warn_unused_ignores", } # type: Final OPTIONS_AFFECTING_CACHE = ((PER_MODULE_OPTIONS | {"platform", "bazel", "plugins"}) - {"debug_cache"}) # type: Final class Options: """Options collected from flags.""" def __init__(self) -> None: # Cache for clone_for_module() self._per_module_cache = None # type: Optional[Dict[str, Options]] # -- build options -- self.build_type = BuildType.STANDARD self.python_version = sys.version_info[:2] # type: Tuple[int, int] # The executable used to search for PEP 561 packages. If this is None, # then mypy does not search for PEP 561 packages. self.python_executable = sys.executable # type: Optional[str] self.platform = sys.platform self.custom_typing_module = None # type: Optional[str] self.custom_typeshed_dir = None # type: Optional[str] self.mypy_path = [] # type: List[str] self.report_dirs = {} # type: Dict[str, str] # Show errors in PEP 561 packages/site-packages modules self.no_silence_site_packages = False self.ignore_missing_imports = False self.follow_imports = 'normal' # normal|silent|skip|error # Whether to respect the follow_imports setting even for stub files. # Intended to be used for disabling specific stubs. self.follow_imports_for_stubs = False # PEP 420 namespace packages self.namespace_packages = False # disallow_any options self.disallow_any_generics = False self.disallow_any_unimported = False self.disallow_any_expr = False self.disallow_any_decorated = False self.disallow_any_explicit = False # Disallow calling untyped functions from typed ones self.disallow_untyped_calls = False # Disallow defining untyped (or incompletely typed) functions self.disallow_untyped_defs = False # Disallow defining incompletely typed functions self.disallow_incomplete_defs = False # Type check unannotated functions self.check_untyped_defs = False # Disallow decorating typed functions with untyped decorators self.disallow_untyped_decorators = False # Disallow subclassing values of type 'Any' self.disallow_subclassing_any = False # Also check typeshed for missing annotations self.warn_incomplete_stub = False # Warn about casting an expression to its inferred type self.warn_redundant_casts = False # Warn about falling off the end of a function returning non-None self.warn_no_return = True # Warn about returning objects of type Any when the function is # declared with a precise type self.warn_return_any = False # Warn about unused '# type: ignore' comments self.warn_unused_ignores = False # Warn about unused '[mypy-] config sections self.warn_unused_configs = False # Files in which to ignore all non-fatal errors self.ignore_errors = False # Apply strict None checking self.strict_optional = True # Show "note: In function "foo":" messages. self.show_error_context = False # Use nicer output (when possible). self.color_output = True self.error_summary = True # Files in which to allow strict-Optional related errors # TODO: Kill this in favor of show_none_errors self.strict_optional_whitelist = None # type: Optional[List[str]] # Alternate way to show/hide strict-None-checking related errors self.show_none_errors = True # Don't assume arguments with default values of None are Optional self.no_implicit_optional = False # Don't re-export names unless they are imported with `from ... as ...` self.implicit_reexport = True # Suppress toplevel errors caused by missing annotations self.allow_untyped_globals = False # Allow variable to be redefined with an arbitrary type in the same block # and the same nesting level as the initialization self.allow_redefinition = False # Prohibit equality, identity, and container checks for non-overlapping types. # This makes 1 == '1', 1 in ['1'], and 1 is '1' errors. self.strict_equality = False # Report an error for any branches inferred to be unreachable as a result of # type analysis. self.warn_unreachable = False # Variable names considered True self.always_true = [] # type: List[str] # Variable names considered False self.always_false = [] # type: List[str] # Use script name instead of __main__ self.scripts_are_modules = False # Config file name self.config_file = None # type: Optional[str] # A filename containing a JSON mapping from filenames to # mtime/size/hash arrays, used to avoid having to recalculate # source hashes as often. self.quickstart_file = None # type: Optional[str] # A comma-separated list of files/directories for mypy to type check; # supports globbing self.files = None # type: Optional[List[str]] # Write junit.xml to given file self.junit_xml = None # type: Optional[str] # Caching and incremental checking options self.incremental = True self.cache_dir = defaults.CACHE_DIR self.sqlite_cache = False self.debug_cache = False self.skip_version_check = False self.skip_cache_mtime_checks = False self.fine_grained_incremental = False # Include fine-grained dependencies in written cache files self.cache_fine_grained = False # Read cache files in fine-grained incremental mode (cache must include dependencies) self.use_fine_grained_cache = False # Tune certain behaviors when being used as a front-end to mypyc. Set per-module # in modules being compiled. Not in the config file or command line. self.mypyc = False # Disable the memory optimization of freeing ASTs when # possible. This isn't exposed as a command line option # because it is intended for software integrating with # mypy. (Like mypyc.) self.preserve_asts = False # Paths of user plugins self.plugins = [] # type: List[str] # Per-module options (raw) self.per_module_options = OrderedDict() # type: OrderedDict[str, Dict[str, object]] self._glob_options = [] # type: List[Tuple[str, Pattern[str]]] self.unused_configs = set() # type: Set[str] # -- development options -- self.verbosity = 0 # More verbose messages (for troubleshooting) self.pdb = False self.show_traceback = False self.raise_exceptions = False self.dump_type_stats = False self.dump_inference_stats = False self.dump_build_stats = False # -- test options -- # Stop after the semantic analysis phase self.semantic_analysis_only = False # Use stub builtins fixtures to speed up tests self.use_builtins_fixtures = False # -- experimental options -- self.shadow_file = None # type: Optional[List[List[str]]] self.show_column_numbers = False # type: bool self.show_error_codes = False # Use soft word wrap and show trimmed source snippets with error location markers. self.pretty = False self.dump_graph = False self.dump_deps = False self.logical_deps = False # If True, partial types can't span a module top level and a function self.local_partial_types = False # Some behaviors are changed when using Bazel (https://bazel.build). self.bazel = False # If True, export inferred types for all expressions as BuildResult.types self.export_types = False # List of package roots -- directories under these are packages even # if they don't have __init__.py. self.package_root = [] # type: List[str] self.cache_map = {} # type: Dict[str, Tuple[str, str]] # Don't properly free objects on exit, just kill the current process. self.fast_exit = False # Used to transform source code before parsing if not None # TODO: Make the type precise (AnyStr -> AnyStr) self.transform_source = None # type: Optional[Callable[[Any], Any]] # Print full path to each file in the report. self.show_absolute_path = False # type: bool # To avoid breaking plugin compatibility, keep providing new_semantic_analyzer @property def new_semantic_analyzer(self) -> bool: return True def snapshot(self) -> object: """Produce a comparable snapshot of this Option""" # Under mypyc, we don't have a __dict__, so we need to do worse things. d = dict(getattr(self, '__dict__', ())) for k in get_class_descriptors(Options): if hasattr(self, k) and k != "new_semantic_analyzer": d[k] = getattr(self, k) # Remove private attributes from snapshot d = {k: v for k, v in d.items() if not k.startswith('_')} return d def __repr__(self) -> str: return 'Options({})'.format(pprint.pformat(self.snapshot())) def apply_changes(self, changes: Dict[str, object]) -> 'Options': new_options = Options() # Under mypyc, we don't have a __dict__, so we need to do worse things. replace_object_state(new_options, self, copy_dict=True) for key, value in changes.items(): setattr(new_options, key, value) return new_options def build_per_module_cache(self) -> None: self._per_module_cache = {} # Config precedence is as follows: # 1. Concrete section names: foo.bar.baz # 2. "Unstructured" glob patterns: foo.*.baz, in the order # they appear in the file (last wins) # 3. "Well-structured" wildcard patterns: foo.bar.*, in specificity order. # Since structured configs inherit from structured configs above them in the hierarchy, # we need to process per-module configs in a careful order. # We have to process foo.* before foo.bar.* before foo.bar, # and we need to apply *.bar to foo.bar but not to foo.bar.*. # To do this, process all well-structured glob configs before non-glob configs and # exploit the fact that foo.* sorts earlier ASCIIbetically (unicodebetically?) # than foo.bar.*. # (A section being "processed last" results in its config "winning".) # Unstructured glob configs are stored and are all checked for each module. unstructured_glob_keys = [k for k in self.per_module_options.keys() if '*' in k[:-1]] structured_keys = [k for k in self.per_module_options.keys() if '*' not in k[:-1]] wildcards = sorted(k for k in structured_keys if k.endswith('.*')) concrete = [k for k in structured_keys if not k.endswith('.*')] for glob in unstructured_glob_keys: self._glob_options.append((glob, self.compile_glob(glob))) # We (for ease of implementation) treat unstructured glob # sections as used if any real modules use them or if any # concrete config sections use them. This means we need to # track which get used while constructing. self.unused_configs = set(unstructured_glob_keys) for key in wildcards + concrete: # Find what the options for this key would be, just based # on inheriting from parent configs. options = self.clone_for_module(key) # And then update it with its per-module options. self._per_module_cache[key] = options.apply_changes(self.per_module_options[key]) # Add the more structured sections into unused configs, since # they only count as used if actually used by a real module. self.unused_configs.update(structured_keys) def clone_for_module(self, module: str) -> 'Options': """Create an Options object that incorporates per-module options. NOTE: Once this method is called all Options objects should be considered read-only, else the caching might be incorrect. """ if self._per_module_cache is None: self.build_per_module_cache() assert self._per_module_cache is not None # If the module just directly has a config entry, use it. if module in self._per_module_cache: self.unused_configs.discard(module) return self._per_module_cache[module] # If not, search for glob paths at all the parents. So if we are looking for # options for foo.bar.baz, we search foo.bar.baz.*, foo.bar.*, foo.*, # in that order, looking for an entry. # This is technically quadratic in the length of the path, but module paths # don't actually get all that long. options = self path = module.split('.') for i in range(len(path), 0, -1): key = '.'.join(path[:i] + ['*']) if key in self._per_module_cache: self.unused_configs.discard(key) options = self._per_module_cache[key] break # OK and *now* we need to look for unstructured glob matches. # We only do this for concrete modules, not structured wildcards. if not module.endswith('.*'): for key, pattern in self._glob_options: if pattern.match(module): self.unused_configs.discard(key) options = options.apply_changes(self.per_module_options[key]) # We could update the cache to directly point to modules once # they have been looked up, but in testing this made things # slower and not faster, so we don't bother. return options def compile_glob(self, s: str) -> Pattern[str]: # Compile one of the glob patterns to a regex so that '.*' can # match *zero or more* module sections. This means we compile # '.*' into '(\..*)?'. parts = s.split('.') expr = re.escape(parts[0]) if parts[0] != '*' else '.*' for part in parts[1:]: expr += re.escape('.' + part) if part != '*' else r'(\..*)?' return re.compile(expr + '\\Z') def select_options_affecting_cache(self) -> Mapping[str, object]: return {opt: getattr(self, opt) for opt in OPTIONS_AFFECTING_CACHE} mypy-0.761/mypy/parse.py0000644€tŠÔÚ€2›s®0000000252313576752246021404 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Union, Optional from mypy.errors import Errors from mypy.options import Options from mypy.nodes import MypyFile def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors], options: Options) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. The python_version (major, minor) option determines the Python syntax variant. """ is_stub_file = fnam.endswith('.pyi') if options.transform_source is not None: source = options.transform_source(source) if options.python_version[0] >= 3 or is_stub_file: import mypy.fastparse return mypy.fastparse.parse(source, fnam=fnam, module=module, errors=errors, options=options) else: import mypy.fastparse2 return mypy.fastparse2.parse(source, fnam=fnam, module=module, errors=errors, options=options) mypy-0.761/mypy/plugin.py0000644€tŠÔÚ€2›s®0000007727613576752246021611 0ustar jukkaDROPBOX\Domain Users00000000000000"""Plugin system for extending mypy. At large scale the plugin system works as following: * Plugins are collected from the corresponding mypy config file option (either via paths to Python files, or installed Python modules) and imported using importlib. * Every module should get an entry point function (called 'plugin' by default, but may be overridden in the config file) that should accept a single string argument that is a full mypy version (includes git commit hash for dev versions) and return a subclass of mypy.plugins.Plugin. * All plugin class constructors should match the signature of mypy.plugin.Plugin (i.e. should accept an mypy.options.Options object), and *must* call super().__init__(). * At several steps during semantic analysis and type checking mypy calls special `get_xxx` methods on user plugins with a single string argument that is a fully qualified name (full name) of a relevant definition (see mypy.plugin.Plugin method docstrings for details). * The plugins are called in the order they are passed in the config option. Every plugin must decide whether to act on a given full name. The first plugin that returns non-None object will be used. * The above decision should be made using the limited common API specified by mypy.plugin.CommonPluginApi. * The callback returned by the plugin will be called with a larger context that includes relevant current state (e.g. a default return type, or a default attribute type) and a wider relevant API provider (e.g. SemanticAnalyzerPluginInterface or CheckerPluginInterface). * The result of this is used for further processing. See various `XxxContext` named tuples for details about which information is given to each hook. Plugin developers should ensure that their plugins work well in incremental and daemon modes. In particular, plugins should not hold global state, and should always call add_plugin_dependency() in plugin hooks called during semantic analysis. See the method docstring for more details. There is no dedicated cache storage for plugins, but plugins can store per-TypeInfo data in a special .metadata attribute that is serialized to the mypy caches between incremental runs. To avoid collisions between plugins, they are encouraged to store their state under a dedicated key coinciding with plugin name in the metadata dictionary. Every value stored there must be JSON-serializable. ## Notes about the semantic analyzer Mypy 0.710 introduced a new semantic analyzer that changed how plugins are expected to work in several notable ways (from mypy 0.730 the old semantic analyzer is no longer available): 1. The order of processing AST nodes in modules is different. The old semantic analyzer processed modules in textual order, one module at a time. The new semantic analyzer first processes the module top levels, including bodies of any top-level classes and classes nested within classes. ("Top-level" here means "not nested within a function/method".) Functions and methods are processed only after module top levels have been finished. If there is an import cycle, all module top levels in the cycle are processed before processing any functions or methods. Each unit of processing (a module top level or a function/method) is called a *target*. This also means that function signatures in the same module have not been analyzed yet when analyzing the module top level. If you need access to a function signature, you'll need to explicitly analyze the signature first using `anal_type()`. 2. Each target can be processed multiple times. This may happen if some forward references are not ready yet, for example. This means that semantic analyzer related plugin hooks can be called multiple times for the same full name. These plugin methods must thus be idempotent. 3. The `anal_type` API function returns None if some part of the type is not available yet. If this happens, the current target being analyzed will be *deferred*, which means that it will be processed again soon, in the hope that additional dependencies will be available. This may happen if there are forward references to types or inter-module references to types within an import cycle. Note that if there is a circular definition, mypy may decide to stop processing to avoid an infinite number of iterations. When this happens, `anal_type` will generate an error and return an `AnyType` type object during the final iteration (instead of None). 4. There is a new API method `defer()`. This can be used to explicitly request the current target to be reprocessed one more time. You don't need this to call this if `anal_type` returns None, however. 5. There is a new API property `final_iteration`, which is true once mypy detected no progress during the previous iteration or if the maximum semantic analysis iteration count has been reached. You must never defer during the final iteration, as it will cause a crash. 6. The `node` attribute of SymbolTableNode objects may contain a reference to a PlaceholderNode object. This object means that this definition has not been fully processed yet. If you encounter a PlaceholderNode, you should defer unless it's the final iteration. If it's the final iteration, you should generate an error message. It usually means that there's a cyclic definition that cannot be resolved by mypy. PlaceholderNodes can only refer to references inside an import cycle. If you are looking up things from another module, such as the builtins, that is outside the current module or import cycle, you can safely assume that you won't receive a placeholder. When testing your plugin, you should have a test case that forces a module top level to be processed multiple times. The easiest way to do this is to include a forward reference to a class in a top-level annotation. Example: c: C # Forward reference causes second analysis pass class C: pass Note that a forward reference in a function signature won't trigger another pass, since all functions are processed only after the top level has been fully analyzed. You can use `api.options.new_semantic_analyzer` to check whether the new semantic analyzer is enabled (it's always true in mypy 0.730 and later). """ from abc import abstractmethod, abstractproperty from typing import Any, Callable, List, Tuple, Optional, NamedTuple, TypeVar, Dict from mypy_extensions import trait, mypyc_attr from mypy.nodes import ( Expression, Context, ClassDef, SymbolTableNode, MypyFile, CallExpr ) from mypy.tvar_scope import TypeVarScope from mypy.types import Type, Instance, CallableType, TypeList, UnboundType, ProperType from mypy.messages import MessageBuilder from mypy.options import Options from mypy.lookup import lookup_fully_qualified from mypy.errorcodes import ErrorCode @trait class TypeAnalyzerPluginInterface: """Interface for accessing semantic analyzer functionality in plugins. Methods docstrings contain only basic info. Look for corresponding implementation docstrings in typeanal.py for more details. """ # An options object. Note: these are the cloned options for the current file. # This might be different from Plugin.options (that contains default/global options) # if there are per-file options in the config. This applies to all other interfaces # in this file. options = None # type: Options @abstractmethod def fail(self, msg: str, ctx: Context, *, code: Optional[ErrorCode] = None) -> None: """Emit an error message at given location.""" raise NotImplementedError @abstractmethod def named_type(self, name: str, args: List[Type]) -> Instance: """Construct an instance of a builtin type with given name.""" raise NotImplementedError @abstractmethod def analyze_type(self, typ: Type) -> Type: """Ananlyze an unbound type using the default mypy logic.""" raise NotImplementedError @abstractmethod def analyze_callable_args(self, arglist: TypeList) -> Optional[Tuple[List[Type], List[int], List[Optional[str]]]]: """Find types, kinds, and names of arguments from extended callable syntax.""" raise NotImplementedError # A context for a hook that semantically analyzes an unbound type. AnalyzeTypeContext = NamedTuple( 'AnalyzeTypeContext', [ ('type', UnboundType), # Type to analyze ('context', Context), # Relevant location context (e.g. for error messages) ('api', TypeAnalyzerPluginInterface)]) @mypyc_attr(allow_interpreted_subclasses=True) class CommonPluginApi: """ A common plugin API (shared between semantic analysis and type checking phases) that all plugin hooks get independently of the context. """ # Global mypy options. # Per-file options can be only accessed on various # XxxPluginInterface classes. options = None # type: Options @abstractmethod def lookup_fully_qualified(self, fullname: str) -> Optional[SymbolTableNode]: """Lookup a symbol by its full name (including module). This lookup function available for all plugins. Return None if a name is not found. This function doesn't support lookup from current scope. Use SemanticAnalyzerPluginInterface.lookup_qualified() for this.""" raise NotImplementedError @trait class CheckerPluginInterface: """Interface for accessing type checker functionality in plugins. Methods docstrings contain only basic info. Look for corresponding implementation docstrings in checker.py for more details. """ msg = None # type: MessageBuilder options = None # type: Options path = None # type: str # Type context for type inference @abstractproperty def type_context(self) -> List[Optional[Type]]: """Return the type context of the plugin""" raise NotImplementedError @abstractmethod def fail(self, msg: str, ctx: Context, *, code: Optional[ErrorCode] = None) -> None: """Emit an error message at given location.""" raise NotImplementedError @abstractmethod def named_generic_type(self, name: str, args: List[Type]) -> Instance: """Construct an instance of a builtin type with given type arguments.""" raise NotImplementedError @trait class SemanticAnalyzerPluginInterface: """Interface for accessing semantic analyzer functionality in plugins. Methods docstrings contain only basic info. Look for corresponding implementation docstrings in semanal.py for more details. # TODO: clean-up lookup functions. """ modules = None # type: Dict[str, MypyFile] # Options for current file. options = None # type: Options cur_mod_id = None # type: str msg = None # type: MessageBuilder @abstractmethod def named_type(self, qualified_name: str, args: Optional[List[Type]] = None) -> Instance: """Construct an instance of a builtin type with given type arguments.""" raise NotImplementedError @abstractmethod def parse_bool(self, expr: Expression) -> Optional[bool]: """Parse True/False literals.""" raise NotImplementedError @abstractmethod def fail(self, msg: str, ctx: Context, serious: bool = False, *, blocker: bool = False, code: Optional[ErrorCode] = None) -> None: """Emit an error message at given location.""" raise NotImplementedError @abstractmethod def anal_type(self, t: Type, *, tvar_scope: Optional[TypeVarScope] = None, allow_tuple_literal: bool = False, allow_unbound_tvars: bool = False, report_invalid_types: bool = True, third_pass: bool = False) -> Optional[Type]: """Analyze an unbound type. Return None if some part of the type is not ready yet. In this case the current target being analyzed will be deferred and analyzed again. """ raise NotImplementedError @abstractmethod def class_type(self, self_type: Type) -> Type: """Generate type of first argument of class methods from type of self.""" raise NotImplementedError @abstractmethod def builtin_type(self, fully_qualified_name: str) -> Instance: """Deprecated: use named_type instead.""" raise NotImplementedError @abstractmethod def lookup_fully_qualified(self, name: str) -> SymbolTableNode: """Lookup a symbol by its fully qualified name. Raise an error if not found. """ raise NotImplementedError @abstractmethod def lookup_fully_qualified_or_none(self, name: str) -> Optional[SymbolTableNode]: """Lookup a symbol by its fully qualified name. Return None if not found. """ raise NotImplementedError @abstractmethod def lookup_qualified(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: """Lookup symbol using a name in current scope. This follows Python local->non-local->global->builtins rules. """ raise NotImplementedError @abstractmethod def add_plugin_dependency(self, trigger: str, target: Optional[str] = None) -> None: """Specify semantic dependencies for generated methods/variables. If the symbol with full name given by trigger is found to be stale by mypy, then the body of node with full name given by target will be re-checked. By default, this is the node that is currently analyzed. For example, the dataclass plugin adds a generated __init__ method with a signature that depends on types of attributes in ancestor classes. If any attribute in an ancestor class gets stale (modified), we need to reprocess the subclasses (and thus regenerate __init__ methods). This is used by fine-grained incremental mode (mypy daemon). See mypy/server/deps.py for more details. """ raise NotImplementedError @abstractmethod def add_symbol_table_node(self, name: str, stnode: SymbolTableNode) -> Any: """Add node to global symbol table (or to nearest class if there is one).""" raise NotImplementedError @abstractmethod def qualified_name(self, n: str) -> str: """Make qualified name using current module and enclosing class (if any).""" raise NotImplementedError @abstractmethod def defer(self) -> None: """Call this to defer the processing of the current node. This will request an additional iteration of semantic analysis. """ raise NotImplementedError @abstractproperty def final_iteration(self) -> bool: """Is this the final iteration of semantic analysis?""" raise NotImplementedError # A context for querying for configuration data about a module for # cache invalidation purposes. ReportConfigContext = NamedTuple( 'DynamicClassDefContext', [ ('id', str), # Module name ('path', str), # Module file path ('is_check', bool) # Is this invocation for checking whether the config matches ]) # A context for a function hook that infers the return type of a function with # a special signature. # # A no-op callback would just return the inferred return type, but a useful # callback at least sometimes can infer a more precise type. FunctionContext = NamedTuple( 'FunctionContext', [ ('arg_types', List[List[Type]]), # List of actual caller types for each formal argument ('arg_kinds', List[List[int]]), # Ditto for argument kinds, see nodes.ARG_* constants # Names of formal parameters from the callee definition, # these will be sufficient in most cases. ('callee_arg_names', List[Optional[str]]), # Names of actual arguments in the call expression. For example, # in a situation like this: # def func(**kwargs) -> None: # pass # func(kw1=1, kw2=2) # callee_arg_names will be ['kwargs'] and arg_names will be [['kw1', 'kw2']]. ('arg_names', List[List[Optional[str]]]), ('default_return_type', Type), # Return type inferred from signature ('args', List[List[Expression]]), # Actual expressions for each formal argument ('context', Context), # Relevant location context (e.g. for error messages) ('api', CheckerPluginInterface)]) # A context for a method signature hook that infers a better signature for a # method. Note that argument types aren't available yet. If you need them, # you have to use a method hook instead. # TODO: document ProperType in the plugin changelog/update issue. MethodSigContext = NamedTuple( 'MethodSigContext', [ ('type', ProperType), # Base object type for method call ('args', List[List[Expression]]), # Actual expressions for each formal argument ('default_signature', CallableType), # Original signature of the method ('context', Context), # Relevant location context (e.g. for error messages) ('api', CheckerPluginInterface)]) # A context for a method hook that infers the return type of a method with a # special signature. # # This is very similar to FunctionContext (only differences are documented). MethodContext = NamedTuple( 'MethodContext', [ ('type', ProperType), # Base object type for method call ('arg_types', List[List[Type]]), # List of actual caller types for each formal argument # see FunctionContext for details about names and kinds ('arg_kinds', List[List[int]]), ('callee_arg_names', List[Optional[str]]), ('arg_names', List[List[Optional[str]]]), ('default_return_type', Type), # Return type inferred by mypy ('args', List[List[Expression]]), # Lists of actual expressions for every formal argument ('context', Context), ('api', CheckerPluginInterface)]) # A context for an attribute type hook that infers the type of an attribute. AttributeContext = NamedTuple( 'AttributeContext', [ ('type', ProperType), # Type of object with attribute ('default_attr_type', Type), # Original attribute type ('context', Context), # Relevant location context (e.g. for error messages) ('api', CheckerPluginInterface)]) # A context for a class hook that modifies the class definition. ClassDefContext = NamedTuple( 'ClassDefContext', [ ('cls', ClassDef), # The class definition ('reason', Expression), # The expression being applied (decorator, metaclass, base class) ('api', SemanticAnalyzerPluginInterface) ]) # A context for dynamic class definitions like # Base = declarative_base() DynamicClassDefContext = NamedTuple( 'DynamicClassDefContext', [ ('call', CallExpr), # The r.h.s. of dynamic class definition ('name', str), # The name this class is being assigned to ('api', SemanticAnalyzerPluginInterface) ]) @mypyc_attr(allow_interpreted_subclasses=True) class Plugin(CommonPluginApi): """Base class of all type checker plugins. This defines a no-op plugin. Subclasses can override some methods to provide some actual functionality. All get_ methods are treated as pure functions (you should assume that results might be cached). A plugin should return None from a get_ method to give way to other plugins. Look at the comments of various *Context objects for additional information on various hooks. """ def __init__(self, options: Options) -> None: self.options = options self.python_version = options.python_version # This can't be set in __init__ because it is executed too soon in build.py. # Therefore, build.py *must* set it later before graph processing starts # by calling set_modules(). self._modules = None # type: Optional[Dict[str, MypyFile]] def set_modules(self, modules: Dict[str, MypyFile]) -> None: self._modules = modules def lookup_fully_qualified(self, fullname: str) -> Optional[SymbolTableNode]: assert self._modules is not None return lookup_fully_qualified(fullname, self._modules) def report_config_data(self, ctx: ReportConfigContext) -> Any: """Get representation of configuration data for a module. The data must be encodable as JSON and will be stored in the cache metadata for the module. A mismatch between the cached values and the returned will result in that module's cache being invalidated and the module being rechecked. This can be called twice for each module, once after loading the cache to check if it is valid and once while writing new cache information. If is_check in the context is true, then the return of this call will be checked against the cached version. Otherwise the call is being made to determine what to put in the cache. This can be used to allow consulting extra cache files in certain complex situations. This can be used to incorporate external configuration information that might require changes to typechecking. """ return None def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: """Customize dependencies for a module. This hook allows adding in new dependencies for a module. It is called after parsing a file but before analysis. This can be useful if a library has dependencies that are dynamic based on configuration information, for example. Returns a list of (priority, module name, line number) tuples. The line number can be -1 when there is not a known real line number. Priorities are defined in mypy.build (but maybe shouldn't be). 10 is a good choice for priority. """ return [] def get_type_analyze_hook(self, fullname: str ) -> Optional[Callable[[AnalyzeTypeContext], Type]]: """Customize behaviour of the type analyzer for given full names. This method is called during the semantic analysis pass whenever mypy sees an unbound type. For example, while analysing this code: from lib import Special, Other var: Special def func(x: Other[int]) -> None: ... this method will be called with 'lib.Special', and then with 'lib.Other'. The callback returned by plugin must return an analyzed type, i.e. an instance of `mypy.types.Type`. """ return None def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: """Adjust the return type of a function call. This method is called after type checking a call. Plugin may adjust the return type inferred by mypy, and/or emit some error messages. Note, this hook is also called for class instantiation calls, so that in this example: from lib import Class, do_stuff do_stuff(42) Class() This method will be called with 'lib.do_stuff' and then with 'lib.Class'. """ return None def get_method_signature_hook(self, fullname: str ) -> Optional[Callable[[MethodSigContext], CallableType]]: """Adjust the signature of a method. This method is called before type checking a method call. Plugin may infer a better type for the method. The hook is also called for special Python dunder methods except __init__ and __new__ (use get_function_hook to customize class instantiation). This function is called with the method full name using the class where it was _defined_. For example, in this code: from lib import Special class Base: def method(self, arg: Any) -> Any: ... class Derived(Base): ... var: Derived var.method(42) x: Special y = x[0] this method is called with '__main__.Base.method', and then with 'lib.Special.__getitem__'. """ return None def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: """Adjust return type of a method call. This is the same as get_function_hook(), but is called with the method full name (again, using the class where the method is defined). """ return None def get_attribute_hook(self, fullname: str ) -> Optional[Callable[[AttributeContext], Type]]: """Adjust type of a class attribute. This method is called with attribute full name using the class where the attribute was defined (or Var.info.fullname for generated attributes). For classes without __getattr__ or __getattribute__, this hook is only called for names of fields/properties (but not methods) that exist in the instance MRO. For classes that implement __getattr__ or __getattribute__, this hook is called for all fields/properties, including nonexistent ones (but still not methods). For example: class Base: x: Any def __getattr__(self, attr: str) -> Any: ... class Derived(Base): ... var: Derived var.x var.y get_attribute_hook is called with '__main__.Base.x' and '__main__.Base.y'. However, if we had not implemented __getattr__ on Base, you would only get the callback for 'var.x'; 'var.y' would produce an error without calling the hook. """ return None def get_class_decorator_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: """Update class definition for given class decorators. The plugin can modify a TypeInfo _in place_ (for example add some generated methods to the symbol table). This hook is called after the class body was semantically analyzed. The hook is called with full names of all class decorators, for example """ return None def get_metaclass_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: """Update class definition for given declared metaclasses. Same as get_class_decorator_hook() but for metaclasses. Note: this hook will be only called for explicit metaclasses, not for inherited ones. TODO: probably it should also be called on inherited metaclasses. """ return None def get_base_class_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: """Update class definition for given base classes. Same as get_class_decorator_hook() but for base classes. Base classes don't need to refer to TypeInfos, if a base class refers to a variable with Any type, this hook will still be called. """ return None def get_customize_class_mro_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: """Customize MRO for given classes. The plugin can modify the class MRO _in place_. This method is called with the class full name before its body was semantically analyzed. """ return None def get_dynamic_class_hook(self, fullname: str ) -> Optional[Callable[[DynamicClassDefContext], None]]: """Semantically analyze a dynamic class definition. This plugin hook allows one to semantically analyze dynamic class definitions like: from lib import dynamic_class X = dynamic_class('X', []) For such definition, this hook will be called with 'lib.dynamic_class'. The plugin should create the corresponding TypeInfo, and place it into a relevant symbol table, e.g. using ctx.api.add_symbol_table_node(). """ return None T = TypeVar('T') class ChainedPlugin(Plugin): """A plugin that represents a sequence of chained plugins. Each lookup method returns the hook for the first plugin that reports a match. This class should not be subclassed -- use Plugin as the base class for all plugins. """ # TODO: Support caching of lookup results (through a LRU cache, for example). def __init__(self, options: Options, plugins: List[Plugin]) -> None: """Initialize chained plugin. Assume that the child plugins aren't mutated (results may be cached). """ super().__init__(options) self._plugins = plugins def set_modules(self, modules: Dict[str, MypyFile]) -> None: for plugin in self._plugins: plugin.set_modules(modules) def report_config_data(self, ctx: ReportConfigContext) -> Any: config_data = [plugin.report_config_data(ctx) for plugin in self._plugins] return config_data if any(x is not None for x in config_data) else None def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: deps = [] for plugin in self._plugins: deps.extend(plugin.get_additional_deps(file)) return deps def get_type_analyze_hook(self, fullname: str ) -> Optional[Callable[[AnalyzeTypeContext], Type]]: return self._find_hook(lambda plugin: plugin.get_type_analyze_hook(fullname)) def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: return self._find_hook(lambda plugin: plugin.get_function_hook(fullname)) def get_method_signature_hook(self, fullname: str ) -> Optional[Callable[[MethodSigContext], CallableType]]: return self._find_hook(lambda plugin: plugin.get_method_signature_hook(fullname)) def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: return self._find_hook(lambda plugin: plugin.get_method_hook(fullname)) def get_attribute_hook(self, fullname: str ) -> Optional[Callable[[AttributeContext], Type]]: return self._find_hook(lambda plugin: plugin.get_attribute_hook(fullname)) def get_class_decorator_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: return self._find_hook(lambda plugin: plugin.get_class_decorator_hook(fullname)) def get_metaclass_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: return self._find_hook(lambda plugin: plugin.get_metaclass_hook(fullname)) def get_base_class_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: return self._find_hook(lambda plugin: plugin.get_base_class_hook(fullname)) def get_customize_class_mro_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: return self._find_hook(lambda plugin: plugin.get_customize_class_mro_hook(fullname)) def get_dynamic_class_hook(self, fullname: str ) -> Optional[Callable[[DynamicClassDefContext], None]]: return self._find_hook(lambda plugin: plugin.get_dynamic_class_hook(fullname)) def _find_hook(self, lookup: Callable[[Plugin], T]) -> Optional[T]: for plugin in self._plugins: hook = lookup(plugin) if hook: return hook return None mypy-0.761/mypy/plugins/0000755€tŠÔÚ€2›s®0000000000013576752266021401 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/plugins/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246023476 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/plugins/attrs.py0000644€tŠÔÚ€2›s®0000007056713576752246023125 0ustar jukkaDROPBOX\Domain Users00000000000000"""Plugin for supporting the attrs library (http://www.attrs.org)""" from collections import OrderedDict from typing import Optional, Dict, List, cast, Tuple, Iterable from typing_extensions import Final import mypy.plugin # To avoid circular imports. from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.fixup import lookup_qualified_stnode from mypy.nodes import ( Context, Argument, Var, ARG_OPT, ARG_POS, TypeInfo, AssignmentStmt, TupleExpr, ListExpr, NameExpr, CallExpr, RefExpr, FuncDef, is_class_var, TempNode, Decorator, MemberExpr, Expression, SymbolTableNode, MDEF, JsonDict, OverloadedFuncDef, ARG_NAMED_OPT, ARG_NAMED, TypeVarExpr, PlaceholderNode ) from mypy.plugins.common import ( _get_argument, _get_bool_argument, _get_decorator_bool_argument, add_method ) from mypy.types import ( Type, AnyType, TypeOfAny, CallableType, NoneType, TypeVarDef, TypeVarType, Overloaded, UnionType, FunctionLike, get_proper_type ) from mypy.typeops import make_simplified_union from mypy.typevars import fill_typevars from mypy.util import unmangle from mypy.server.trigger import make_wildcard_trigger KW_ONLY_PYTHON_2_UNSUPPORTED = "kw_only is not supported in Python 2" # The names of the different functions that create classes or arguments. attr_class_makers = { 'attr.s', 'attr.attrs', 'attr.attributes', } # type: Final attr_dataclass_makers = { 'attr.dataclass', } # type: Final attr_attrib_makers = { 'attr.ib', 'attr.attrib', 'attr.attr', } # type: Final SELF_TVAR_NAME = '_AT' # type: Final class Converter: """Holds information about a `converter=` argument""" def __init__(self, name: Optional[str] = None, is_attr_converters_optional: bool = False) -> None: self.name = name self.is_attr_converters_optional = is_attr_converters_optional class Attribute: """The value of an attr.ib() call.""" def __init__(self, name: str, info: TypeInfo, has_default: bool, init: bool, kw_only: bool, converter: Converter, context: Context) -> None: self.name = name self.info = info self.has_default = has_default self.init = init self.kw_only = kw_only self.converter = converter self.context = context def argument(self, ctx: 'mypy.plugin.ClassDefContext') -> Argument: """Return this attribute as an argument to __init__.""" assert self.init init_type = self.info[self.name].type if self.converter.name: # When a converter is set the init_type is overridden by the first argument # of the converter method. converter = lookup_qualified_stnode(ctx.api.modules, self.converter.name, True) if not converter: # The converter may be a local variable. Check there too. converter = ctx.api.lookup_qualified(self.converter.name, self.info, True) # Get the type of the converter. converter_type = None # type: Optional[Type] if converter and isinstance(converter.node, TypeInfo): from mypy.checkmember import type_object_type # To avoid import cycle. converter_type = type_object_type(converter.node, ctx.api.builtin_type) elif converter and isinstance(converter.node, OverloadedFuncDef): converter_type = converter.node.type elif converter and converter.type: converter_type = converter.type init_type = None converter_type = get_proper_type(converter_type) if isinstance(converter_type, CallableType) and converter_type.arg_types: init_type = ctx.api.anal_type(converter_type.arg_types[0]) elif isinstance(converter_type, Overloaded): types = [] # type: List[Type] for item in converter_type.items(): # Walk the overloads looking for methods that can accept one argument. num_arg_types = len(item.arg_types) if not num_arg_types: continue if num_arg_types > 1 and any(kind == ARG_POS for kind in item.arg_kinds[1:]): continue types.append(item.arg_types[0]) # Make a union of all the valid types. if types: args = make_simplified_union(types) init_type = ctx.api.anal_type(args) if self.converter.is_attr_converters_optional and init_type: # If the converter was attr.converter.optional(type) then add None to # the allowed init_type. init_type = UnionType.make_union([init_type, NoneType()]) if not init_type: ctx.api.fail("Cannot determine __init__ type from converter", self.context) init_type = AnyType(TypeOfAny.from_error) elif self.converter.name == '': # This means we had a converter but it's not of a type we can infer. # Error was shown in _get_converter_name init_type = AnyType(TypeOfAny.from_error) if init_type is None: if ctx.api.options.disallow_untyped_defs: # This is a compromise. If you don't have a type here then the # __init__ will be untyped. But since the __init__ is added it's # pointing at the decorator. So instead we also show the error in the # assignment, which is where you would fix the issue. node = self.info[self.name].node assert node is not None ctx.api.msg.need_annotation_for_var(node, self.context) # Convert type not set to Any. init_type = AnyType(TypeOfAny.unannotated) if self.kw_only: arg_kind = ARG_NAMED_OPT if self.has_default else ARG_NAMED else: arg_kind = ARG_OPT if self.has_default else ARG_POS # Attrs removes leading underscores when creating the __init__ arguments. return Argument(Var(self.name.lstrip("_"), init_type), init_type, None, arg_kind) def serialize(self) -> JsonDict: """Serialize this object so it can be saved and restored.""" return { 'name': self.name, 'has_default': self.has_default, 'init': self.init, 'kw_only': self.kw_only, 'converter_name': self.converter.name, 'converter_is_attr_converters_optional': self.converter.is_attr_converters_optional, 'context_line': self.context.line, 'context_column': self.context.column, } @classmethod def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'Attribute': """Return the Attribute that was serialized.""" return Attribute( data['name'], info, data['has_default'], data['init'], data['kw_only'], Converter(data['converter_name'], data['converter_is_attr_converters_optional']), Context(line=data['context_line'], column=data['context_column']) ) def _determine_eq_order(ctx: 'mypy.plugin.ClassDefContext') -> Tuple[bool, bool]: """ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective values of eq and order. """ cmp = _get_decorator_optional_bool_argument(ctx, 'cmp') eq = _get_decorator_optional_bool_argument(ctx, 'eq') order = _get_decorator_optional_bool_argument(ctx, 'order') if cmp is not None and any((eq is not None, order is not None)): ctx.api.fail("Don't mix `cmp` with `eq' and `order`", ctx.reason) # cmp takes precedence due to bw-compatibility. if cmp is not None: return cmp, cmp # If left None, equality is on and ordering mirrors equality. if eq is None: eq = True if order is None: order = eq if eq is False and order is True: ctx.api.fail("eq must be True if order is True", ctx.reason) return eq, order def _get_decorator_optional_bool_argument( ctx: 'mypy.plugin.ClassDefContext', name: str, default: Optional[bool] = None, ) -> Optional[bool]: """Return the Optional[bool] argument for the decorator. This handles both @decorator(...) and @decorator. """ if isinstance(ctx.reason, CallExpr): attr_value = _get_argument(ctx.reason, name) if attr_value: if isinstance(attr_value, NameExpr): if attr_value.fullname == 'builtins.True': return True if attr_value.fullname == 'builtins.False': return False if attr_value.fullname == 'builtins.None': return None ctx.api.fail('"{}" argument must be True or False.'.format(name), ctx.reason) return default return default else: return default def attr_class_maker_callback(ctx: 'mypy.plugin.ClassDefContext', auto_attribs_default: bool = False) -> None: """Add necessary dunder methods to classes decorated with attr.s. attrs is a package that lets you define classes without writing dull boilerplate code. At a quick glance, the decorator searches the class body for assignments of `attr.ib`s (or annotated variables if auto_attribs=True), then depending on how the decorator is called, it will add an __init__ or all the __cmp__ methods. For frozen=True it will turn the attrs into properties. See http://www.attrs.org/en/stable/how-does-it-work.html for information on how attrs works. """ info = ctx.cls.info init = _get_decorator_bool_argument(ctx, 'init', True) frozen = _get_frozen(ctx) eq, order = _determine_eq_order(ctx) auto_attribs = _get_decorator_bool_argument(ctx, 'auto_attribs', auto_attribs_default) kw_only = _get_decorator_bool_argument(ctx, 'kw_only', False) if ctx.api.options.python_version[0] < 3: if auto_attribs: ctx.api.fail("auto_attribs is not supported in Python 2", ctx.reason) return if not info.defn.base_type_exprs: # Note: This will not catch subclassing old-style classes. ctx.api.fail("attrs only works with new-style classes", info.defn) return if kw_only: ctx.api.fail(KW_ONLY_PYTHON_2_UNSUPPORTED, ctx.reason) return attributes = _analyze_class(ctx, auto_attribs, kw_only) # Check if attribute types are ready. for attr in attributes: node = info.get(attr.name) if node is None: # This name is likely blocked by a star import. We don't need to defer because # defer() is already called by mark_incomplete(). return if node.type is None and not ctx.api.final_iteration: ctx.api.defer() return # Save the attributes so that subclasses can reuse them. ctx.cls.info.metadata['attrs'] = { 'attributes': [attr.serialize() for attr in attributes], 'frozen': frozen, } adder = MethodAdder(ctx) if init: _add_init(ctx, attributes, adder) if eq: _add_eq(ctx, adder) if order: _add_order(ctx, adder) if frozen: _make_frozen(ctx, attributes) def _get_frozen(ctx: 'mypy.plugin.ClassDefContext') -> bool: """Return whether this class is frozen.""" if _get_decorator_bool_argument(ctx, 'frozen', False): return True # Subclasses of frozen classes are frozen so check that. for super_info in ctx.cls.info.mro[1:-1]: if 'attrs' in super_info.metadata and super_info.metadata['attrs']['frozen']: return True return False def _analyze_class(ctx: 'mypy.plugin.ClassDefContext', auto_attribs: bool, kw_only: bool) -> List[Attribute]: """Analyze the class body of an attr maker, its parents, and return the Attributes found. auto_attribs=True means we'll generate attributes from type annotations also. kw_only=True means that all attributes created here will be keyword only args in __init__. """ own_attrs = OrderedDict() # type: OrderedDict[str, Attribute] # Walk the body looking for assignments and decorators. for stmt in ctx.cls.defs.body: if isinstance(stmt, AssignmentStmt): for attr in _attributes_from_assignment(ctx, stmt, auto_attribs, kw_only): # When attrs are defined twice in the same body we want to use the 2nd definition # in the 2nd location. So remove it from the OrderedDict. # Unless it's auto_attribs in which case we want the 2nd definition in the # 1st location. if not auto_attribs and attr.name in own_attrs: del own_attrs[attr.name] own_attrs[attr.name] = attr elif isinstance(stmt, Decorator): _cleanup_decorator(stmt, own_attrs) for attribute in own_attrs.values(): # Even though these look like class level assignments we want them to look like # instance level assignments. if attribute.name in ctx.cls.info.names: node = ctx.cls.info.names[attribute.name].node if isinstance(node, PlaceholderNode): # This node is not ready yet. continue assert isinstance(node, Var) node.is_initialized_in_class = False # Traverse the MRO and collect attributes from the parents. taken_attr_names = set(own_attrs) super_attrs = [] for super_info in ctx.cls.info.mro[1:-1]: if 'attrs' in super_info.metadata: # Each class depends on the set of attributes in its attrs ancestors. ctx.api.add_plugin_dependency(make_wildcard_trigger(super_info.fullname)) for data in super_info.metadata['attrs']['attributes']: # Only add an attribute if it hasn't been defined before. This # allows for overwriting attribute definitions by subclassing. if data['name'] not in taken_attr_names: a = Attribute.deserialize(super_info, data) super_attrs.append(a) taken_attr_names.add(a.name) attributes = super_attrs + list(own_attrs.values()) # Check the init args for correct default-ness. Note: This has to be done after all the # attributes for all classes have been read, because subclasses can override parents. last_default = False last_kw_only = False for i, attribute in enumerate(attributes): if not attribute.init: continue if attribute.kw_only: # Keyword-only attributes don't care whether they are default or not. last_kw_only = True continue # If the issue comes from merging different classes, report it # at the class definition point. context = attribute.context if i >= len(super_attrs) else ctx.cls if not attribute.has_default and last_default: ctx.api.fail( "Non-default attributes not allowed after default attributes.", context) if last_kw_only: ctx.api.fail( "Non keyword-only attributes are not allowed after a keyword-only attribute.", context ) last_default |= attribute.has_default return attributes def _attributes_from_assignment(ctx: 'mypy.plugin.ClassDefContext', stmt: AssignmentStmt, auto_attribs: bool, kw_only: bool) -> Iterable[Attribute]: """Return Attribute objects that are created by this assignment. The assignments can look like this: x = attr.ib() x = y = attr.ib() x, y = attr.ib(), attr.ib() or if auto_attribs is enabled also like this: x: type x: type = default_value """ for lvalue in stmt.lvalues: lvalues, rvalues = _parse_assignments(lvalue, stmt) if len(lvalues) != len(rvalues): # This means we have some assignment that isn't 1 to 1. # It can't be an attrib. continue for lhs, rvalue in zip(lvalues, rvalues): # Check if the right hand side is a call to an attribute maker. if (isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr) and rvalue.callee.fullname in attr_attrib_makers): attr = _attribute_from_attrib_maker(ctx, auto_attribs, kw_only, lhs, rvalue, stmt) if attr: yield attr elif auto_attribs and stmt.type and stmt.new_syntax and not is_class_var(lhs): yield _attribute_from_auto_attrib(ctx, kw_only, lhs, rvalue, stmt) def _cleanup_decorator(stmt: Decorator, attr_map: Dict[str, Attribute]) -> None: """Handle decorators in class bodies. `x.default` will set a default value on x `x.validator` and `x.default` will get removed to avoid throwing a type error. """ remove_me = [] for func_decorator in stmt.decorators: if (isinstance(func_decorator, MemberExpr) and isinstance(func_decorator.expr, NameExpr) and func_decorator.expr.name in attr_map): if func_decorator.name == 'default': attr_map[func_decorator.expr.name].has_default = True if func_decorator.name in ('default', 'validator'): # These are decorators on the attrib object that only exist during # class creation time. In order to not trigger a type error later we # just remove them. This might leave us with a Decorator with no # decorators (Emperor's new clothes?) # TODO: It would be nice to type-check these rather than remove them. # default should be Callable[[], T] # validator should be Callable[[Any, 'Attribute', T], Any] # where T is the type of the attribute. remove_me.append(func_decorator) for dec in remove_me: stmt.decorators.remove(dec) def _attribute_from_auto_attrib(ctx: 'mypy.plugin.ClassDefContext', kw_only: bool, lhs: NameExpr, rvalue: Expression, stmt: AssignmentStmt) -> Attribute: """Return an Attribute for a new type assignment.""" name = unmangle(lhs.name) # `x: int` (without equal sign) assigns rvalue to TempNode(AnyType()) has_rhs = not isinstance(rvalue, TempNode) return Attribute(name, ctx.cls.info, has_rhs, True, kw_only, Converter(), stmt) def _attribute_from_attrib_maker(ctx: 'mypy.plugin.ClassDefContext', auto_attribs: bool, kw_only: bool, lhs: NameExpr, rvalue: CallExpr, stmt: AssignmentStmt) -> Optional[Attribute]: """Return an Attribute from the assignment or None if you can't make one.""" if auto_attribs and not stmt.new_syntax: # auto_attribs requires an annotation on *every* attr.ib. assert lhs.node is not None ctx.api.msg.need_annotation_for_var(lhs.node, stmt) return None if len(stmt.lvalues) > 1: ctx.api.fail("Too many names for one attribute", stmt) return None # This is the type that belongs in the __init__ method for this attrib. init_type = stmt.type # Read all the arguments from the call. init = _get_bool_argument(ctx, rvalue, 'init', True) # Note: If the class decorator says kw_only=True the attribute is ignored. # See https://github.com/python-attrs/attrs/issues/481 for explanation. kw_only |= _get_bool_argument(ctx, rvalue, 'kw_only', False) if kw_only and ctx.api.options.python_version[0] < 3: ctx.api.fail(KW_ONLY_PYTHON_2_UNSUPPORTED, stmt) return None # TODO: Check for attr.NOTHING attr_has_default = bool(_get_argument(rvalue, 'default')) attr_has_factory = bool(_get_argument(rvalue, 'factory')) if attr_has_default and attr_has_factory: ctx.api.fail("Can't pass both `default` and `factory`.", rvalue) elif attr_has_factory: attr_has_default = True # If the type isn't set through annotation but is passed through `type=` use that. type_arg = _get_argument(rvalue, 'type') if type_arg and not init_type: try: un_type = expr_to_unanalyzed_type(type_arg) except TypeTranslationError: ctx.api.fail('Invalid argument to type', type_arg) else: init_type = ctx.api.anal_type(un_type) if init_type and isinstance(lhs.node, Var) and not lhs.node.type: # If there is no annotation, add one. lhs.node.type = init_type lhs.is_inferred_def = False # Note: convert is deprecated but works the same as converter. converter = _get_argument(rvalue, 'converter') convert = _get_argument(rvalue, 'convert') if convert and converter: ctx.api.fail("Can't pass both `convert` and `converter`.", rvalue) elif convert: ctx.api.fail("convert is deprecated, use converter", rvalue) converter = convert converter_info = _parse_converter(ctx, converter) name = unmangle(lhs.name) return Attribute(name, ctx.cls.info, attr_has_default, init, kw_only, converter_info, stmt) def _parse_converter(ctx: 'mypy.plugin.ClassDefContext', converter: Optional[Expression]) -> Converter: """Return the Converter object from an Expression.""" # TODO: Support complex converters, e.g. lambdas, calls, etc. if converter: if isinstance(converter, RefExpr) and converter.node: if (isinstance(converter.node, FuncDef) and converter.node.type and isinstance(converter.node.type, FunctionLike)): return Converter(converter.node.fullname) elif (isinstance(converter.node, OverloadedFuncDef) and is_valid_overloaded_converter(converter.node)): return Converter(converter.node.fullname) elif isinstance(converter.node, TypeInfo): return Converter(converter.node.fullname) if (isinstance(converter, CallExpr) and isinstance(converter.callee, RefExpr) and converter.callee.fullname == "attr.converters.optional" and converter.args and converter.args[0]): # Special handling for attr.converters.optional(type) # We extract the type and add make the init_args Optional in Attribute.argument argument = _parse_converter(ctx, converter.args[0]) argument.is_attr_converters_optional = True return argument # Signal that we have an unsupported converter. ctx.api.fail( "Unsupported converter, only named functions and types are currently supported", converter ) return Converter('') return Converter(None) def is_valid_overloaded_converter(defn: OverloadedFuncDef) -> bool: return all((not isinstance(item, Decorator) or isinstance(item.func.type, FunctionLike)) for item in defn.items) def _parse_assignments( lvalue: Expression, stmt: AssignmentStmt) -> Tuple[List[NameExpr], List[Expression]]: """Convert a possibly complex assignment expression into lists of lvalues and rvalues.""" lvalues = [] # type: List[NameExpr] rvalues = [] # type: List[Expression] if isinstance(lvalue, (TupleExpr, ListExpr)): if all(isinstance(item, NameExpr) for item in lvalue.items): lvalues = cast(List[NameExpr], lvalue.items) if isinstance(stmt.rvalue, (TupleExpr, ListExpr)): rvalues = stmt.rvalue.items elif isinstance(lvalue, NameExpr): lvalues = [lvalue] rvalues = [stmt.rvalue] return lvalues, rvalues def _add_eq(ctx: 'mypy.plugin.ClassDefContext', adder: 'MethodAdder') -> None: """Generate __eq__ and __ne__ for this class.""" # For __ne__ and __eq__ the type is: # def __ne__(self, other: object) -> bool bool_type = ctx.api.named_type('__builtins__.bool') object_type = ctx.api.named_type('__builtins__.object') args = [Argument(Var('other', object_type), object_type, None, ARG_POS)] for method in ['__ne__', '__eq__']: adder.add_method(method, args, bool_type) def _add_order(ctx: 'mypy.plugin.ClassDefContext', adder: 'MethodAdder') -> None: """Generate all the ordering methods for this class.""" bool_type = ctx.api.named_type('__builtins__.bool') object_type = ctx.api.named_type('__builtins__.object') # Make the types be: # AT = TypeVar('AT') # def __lt__(self: AT, other: AT) -> bool # This way comparisons with subclasses will work correctly. tvd = TypeVarDef(SELF_TVAR_NAME, ctx.cls.info.fullname + '.' + SELF_TVAR_NAME, -1, [], object_type) tvd_type = TypeVarType(tvd) self_tvar_expr = TypeVarExpr(SELF_TVAR_NAME, ctx.cls.info.fullname + '.' + SELF_TVAR_NAME, [], object_type) ctx.cls.info.names[SELF_TVAR_NAME] = SymbolTableNode(MDEF, self_tvar_expr) args = [Argument(Var('other', tvd_type), tvd_type, None, ARG_POS)] for method in ['__lt__', '__le__', '__gt__', '__ge__']: adder.add_method(method, args, bool_type, self_type=tvd_type, tvd=tvd) def _make_frozen(ctx: 'mypy.plugin.ClassDefContext', attributes: List[Attribute]) -> None: """Turn all the attributes into properties to simulate frozen classes.""" for attribute in attributes: if attribute.name in ctx.cls.info.names: # This variable belongs to this class so we can modify it. node = ctx.cls.info.names[attribute.name].node assert isinstance(node, Var) node.is_property = True else: # This variable belongs to a super class so create new Var so we # can modify it. var = Var(attribute.name, ctx.cls.info[attribute.name].type) var.info = ctx.cls.info var._fullname = '%s.%s' % (ctx.cls.info.fullname, var.name) ctx.cls.info.names[var.name] = SymbolTableNode(MDEF, var) var.is_property = True def _add_init(ctx: 'mypy.plugin.ClassDefContext', attributes: List[Attribute], adder: 'MethodAdder') -> None: """Generate an __init__ method for the attributes and add it to the class.""" args = [attribute.argument(ctx) for attribute in attributes if attribute.init] if all( # We use getattr rather than instance checks because the variable.type # might be wrapped into a Union or some other type, but even non-Any # types reliably track the fact that the argument was not annotated. getattr(arg.variable.type, "type_of_any", None) == TypeOfAny.unannotated for arg in args ): # This workaround makes --disallow-incomplete-defs usable with attrs, # but is definitely suboptimal as a long-term solution. # See https://github.com/python/mypy/issues/5954 for discussion. for a in args: a.variable.type = AnyType(TypeOfAny.implementation_artifact) a.type_annotation = AnyType(TypeOfAny.implementation_artifact) adder.add_method('__init__', args, NoneType()) class MethodAdder: """Helper to add methods to a TypeInfo. ctx: The ClassDefCtx we are using on which we will add methods. """ # TODO: Combine this with the code build_namedtuple_typeinfo to support both. def __init__(self, ctx: 'mypy.plugin.ClassDefContext') -> None: self.ctx = ctx self.self_type = fill_typevars(ctx.cls.info) def add_method(self, method_name: str, args: List[Argument], ret_type: Type, self_type: Optional[Type] = None, tvd: Optional[TypeVarDef] = None) -> None: """Add a method: def (self, ) -> ): ... to info. self_type: The type to use for the self argument or None to use the inferred self type. tvd: If the method is generic these should be the type variables. """ self_type = self_type if self_type is not None else self.self_type add_method(self.ctx, method_name, args, ret_type, self_type, tvd) mypy-0.761/mypy/plugins/common.py0000644€tŠÔÚ€2›s®0000001104113576752246023236 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Optional from mypy.nodes import ( ARG_POS, MDEF, Argument, Block, CallExpr, Expression, SYMBOL_FUNCBASE_TYPES, FuncDef, PassStmt, RefExpr, SymbolTableNode, Var ) from mypy.plugin import ClassDefContext from mypy.semanal import set_callable_name from mypy.types import CallableType, Overloaded, Type, TypeVarDef, get_proper_type from mypy.typevars import fill_typevars from mypy.util import get_unique_redefinition_name from mypy.typeops import try_getting_str_literals # noqa: F401 # Part of public API def _get_decorator_bool_argument( ctx: ClassDefContext, name: str, default: bool, ) -> bool: """Return the bool argument for the decorator. This handles both @decorator(...) and @decorator. """ if isinstance(ctx.reason, CallExpr): return _get_bool_argument(ctx, ctx.reason, name, default) else: return default def _get_bool_argument(ctx: ClassDefContext, expr: CallExpr, name: str, default: bool) -> bool: """Return the boolean value for an argument to a call or the default if it's not found. """ attr_value = _get_argument(expr, name) if attr_value: ret = ctx.api.parse_bool(attr_value) if ret is None: ctx.api.fail('"{}" argument must be True or False.'.format(name), expr) return default return ret return default def _get_argument(call: CallExpr, name: str) -> Optional[Expression]: """Return the expression for the specific argument.""" # To do this we use the CallableType of the callee to find the FormalArgument, # then walk the actual CallExpr looking for the appropriate argument. # # Note: I'm not hard-coding the index so that in the future we can support other # attrib and class makers. if not isinstance(call.callee, RefExpr): return None callee_type = None callee_node = call.callee.node if (isinstance(callee_node, (Var, SYMBOL_FUNCBASE_TYPES)) and callee_node.type): callee_node_type = get_proper_type(callee_node.type) if isinstance(callee_node_type, Overloaded): # We take the last overload. callee_type = callee_node_type.items()[-1] elif isinstance(callee_node_type, CallableType): callee_type = callee_node_type if not callee_type: return None argument = callee_type.argument_by_name(name) if not argument: return None assert argument.name for i, (attr_name, attr_value) in enumerate(zip(call.arg_names, call.args)): if argument.pos is not None and not attr_name and i == argument.pos: return attr_value if attr_name == argument.name: return attr_value return None def add_method( ctx: ClassDefContext, name: str, args: List[Argument], return_type: Type, self_type: Optional[Type] = None, tvar_def: Optional[TypeVarDef] = None, ) -> None: """Adds a new method to a class. """ info = ctx.cls.info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. if name in info.names: sym = info.names[name] if sym.plugin_generated and isinstance(sym.node, FuncDef): ctx.cls.defs.body.remove(sym.node) self_type = self_type or fill_typevars(info) function_type = ctx.api.named_type('__builtins__.function') args = [Argument(Var('self'), self_type, None, ARG_POS)] + args arg_types, arg_names, arg_kinds = [], [], [] for arg in args: assert arg.type_annotation, 'All arguments must be fully typed.' arg_types.append(arg.type_annotation) arg_names.append(arg.variable.name) arg_kinds.append(arg.kind) signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) if tvar_def: signature.variables = [tvar_def] func = FuncDef(name, args, Block([PassStmt()])) func.info = info func.type = set_callable_name(signature, func) func._fullname = info.fullname + '.' + name func.line = info.line # NOTE: we would like the plugin generated node to dominate, but we still # need to keep any existing definitions so they get semantically analyzed. if name in info.names: # Get a nice unique name instead. r_name = get_unique_redefinition_name(name, info.names) info.names[r_name] = info.names[name] info.names[name] = SymbolTableNode(MDEF, func, plugin_generated=True) info.defn.defs.body.append(func) mypy-0.761/mypy/plugins/ctypes.py0000644€tŠÔÚ€2›s®0000002461713576752246023272 0ustar jukkaDROPBOX\Domain Users00000000000000"""Plugin to provide accurate types for some parts of the ctypes module.""" from typing import List, Optional # Fully qualified instead of "from mypy.plugin import ..." to avoid circular import problems. import mypy.plugin from mypy import nodes from mypy.maptype import map_instance_to_supertype from mypy.messages import format_type from mypy.subtypes import is_subtype from mypy.types import ( AnyType, CallableType, Instance, NoneType, Type, TypeOfAny, UnionType, union_items, ProperType, get_proper_type ) from mypy.typeops import make_simplified_union def _get_bytes_type(api: 'mypy.plugin.CheckerPluginInterface') -> Instance: """Return the type corresponding to bytes on the current Python version. This is bytes in Python 3, and str in Python 2. """ return api.named_generic_type( 'builtins.bytes' if api.options.python_version >= (3,) else 'builtins.str', []) def _get_text_type(api: 'mypy.plugin.CheckerPluginInterface') -> Instance: """Return the type corresponding to Text on the current Python version. This is str in Python 3, and unicode in Python 2. """ return api.named_generic_type( 'builtins.str' if api.options.python_version >= (3,) else 'builtins.unicode', []) def _find_simplecdata_base_arg(tp: Instance, api: 'mypy.plugin.CheckerPluginInterface' ) -> Optional[ProperType]: """Try to find a parametrized _SimpleCData in tp's bases and return its single type argument. None is returned if _SimpleCData appears nowhere in tp's (direct or indirect) bases. """ if tp.type.has_base('ctypes._SimpleCData'): simplecdata_base = map_instance_to_supertype(tp, api.named_generic_type('ctypes._SimpleCData', [AnyType(TypeOfAny.special_form)]).type) assert len(simplecdata_base.args) == 1, '_SimpleCData takes exactly one type argument' return get_proper_type(simplecdata_base.args[0]) return None def _autoconvertible_to_cdata(tp: Type, api: 'mypy.plugin.CheckerPluginInterface') -> Type: """Get a type that is compatible with all types that can be implicitly converted to the given CData type. Examples: * c_int -> Union[c_int, int] * c_char_p -> Union[c_char_p, bytes, int, NoneType] * MyStructure -> MyStructure """ allowed_types = [] # If tp is a union, we allow all types that are convertible to at least one of the union # items. This is not quite correct - strictly speaking, only types convertible to *all* of the # union items should be allowed. This may be worth changing in the future, but the more # correct algorithm could be too strict to be useful. for t in union_items(tp): # Every type can be converted from itself (obviously). allowed_types.append(t) if isinstance(t, Instance): unboxed = _find_simplecdata_base_arg(t, api) if unboxed is not None: # If _SimpleCData appears in tp's (direct or indirect) bases, its type argument # specifies the type's "unboxed" version, which can always be converted back to # the original "boxed" type. allowed_types.append(unboxed) if t.type.has_base('ctypes._PointerLike'): # Pointer-like _SimpleCData subclasses can also be converted from # an int or None. allowed_types.append(api.named_generic_type('builtins.int', [])) allowed_types.append(NoneType()) return make_simplified_union(allowed_types) def _autounboxed_cdata(tp: Type) -> ProperType: """Get the auto-unboxed version of a CData type, if applicable. For *direct* _SimpleCData subclasses, the only type argument of _SimpleCData in the bases list is returned. For all other CData types, including indirect _SimpleCData subclasses, tp is returned as-is. """ tp = get_proper_type(tp) if isinstance(tp, UnionType): return make_simplified_union([_autounboxed_cdata(t) for t in tp.items]) elif isinstance(tp, Instance): for base in tp.type.bases: if base.type.fullname == 'ctypes._SimpleCData': # If tp has _SimpleCData as a direct base class, # the auto-unboxed type is the single type argument of the _SimpleCData type. assert len(base.args) == 1 return get_proper_type(base.args[0]) # If tp is not a concrete type, or if there is no _SimpleCData in the bases, # the type is not auto-unboxed. return tp def _get_array_element_type(tp: Type) -> Optional[ProperType]: """Get the element type of the Array type tp, or None if not specified.""" tp = get_proper_type(tp) if isinstance(tp, Instance): assert tp.type.fullname == 'ctypes.Array' if len(tp.args) == 1: return get_proper_type(tp.args[0]) return None def array_constructor_callback(ctx: 'mypy.plugin.FunctionContext') -> Type: """Callback to provide an accurate signature for the ctypes.Array constructor.""" # Extract the element type from the constructor's return type, i. e. the type of the array # being constructed. et = _get_array_element_type(ctx.default_return_type) if et is not None: allowed = _autoconvertible_to_cdata(et, ctx.api) assert len(ctx.arg_types) == 1, \ "The stub of the ctypes.Array constructor should have a single vararg parameter" for arg_num, (arg_kind, arg_type) in enumerate(zip(ctx.arg_kinds[0], ctx.arg_types[0]), 1): if arg_kind == nodes.ARG_POS and not is_subtype(arg_type, allowed): ctx.api.msg.fail( 'Array constructor argument {} of type {}' ' is not convertible to the array element type {}' .format(arg_num, format_type(arg_type), format_type(et)), ctx.context) elif arg_kind == nodes.ARG_STAR: ty = ctx.api.named_generic_type("typing.Iterable", [allowed]) if not is_subtype(arg_type, ty): it = ctx.api.named_generic_type("typing.Iterable", [et]) ctx.api.msg.fail( 'Array constructor argument {} of type {}' ' is not convertible to the array element type {}' .format(arg_num, format_type(arg_type), format_type(it)), ctx.context) return ctx.default_return_type def array_getitem_callback(ctx: 'mypy.plugin.MethodContext') -> Type: """Callback to provide an accurate return type for ctypes.Array.__getitem__.""" et = _get_array_element_type(ctx.type) if et is not None: unboxed = _autounboxed_cdata(et) assert len(ctx.arg_types) == 1, \ 'The stub of ctypes.Array.__getitem__ should have exactly one parameter' assert len(ctx.arg_types[0]) == 1, \ "ctypes.Array.__getitem__'s parameter should not be variadic" index_type = get_proper_type(ctx.arg_types[0][0]) if isinstance(index_type, Instance): if index_type.type.has_base('builtins.int'): return unboxed elif index_type.type.has_base('builtins.slice'): return ctx.api.named_generic_type('builtins.list', [unboxed]) return ctx.default_return_type def array_setitem_callback(ctx: 'mypy.plugin.MethodSigContext') -> CallableType: """Callback to provide an accurate signature for ctypes.Array.__setitem__.""" et = _get_array_element_type(ctx.type) if et is not None: allowed = _autoconvertible_to_cdata(et, ctx.api) assert len(ctx.default_signature.arg_types) == 2 index_type = get_proper_type(ctx.default_signature.arg_types[0]) if isinstance(index_type, Instance): arg_type = None if index_type.type.has_base('builtins.int'): arg_type = allowed elif index_type.type.has_base('builtins.slice'): arg_type = ctx.api.named_generic_type('builtins.list', [allowed]) if arg_type is not None: # Note: arg_type can only be None if index_type is invalid, in which case we use # the default signature and let mypy report an error about it. return ctx.default_signature.copy_modified( arg_types=ctx.default_signature.arg_types[:1] + [arg_type], ) return ctx.default_signature def array_iter_callback(ctx: 'mypy.plugin.MethodContext') -> Type: """Callback to provide an accurate return type for ctypes.Array.__iter__.""" et = _get_array_element_type(ctx.type) if et is not None: unboxed = _autounboxed_cdata(et) return ctx.api.named_generic_type('typing.Iterator', [unboxed]) return ctx.default_return_type def array_value_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: """Callback to provide an accurate type for ctypes.Array.value.""" et = _get_array_element_type(ctx.type) if et is not None: types = [] # type: List[Type] for tp in union_items(et): if isinstance(tp, AnyType): types.append(AnyType(TypeOfAny.from_another_any, source_any=tp)) elif isinstance(tp, Instance) and tp.type.fullname == 'ctypes.c_char': types.append(_get_bytes_type(ctx.api)) elif isinstance(tp, Instance) and tp.type.fullname == 'ctypes.c_wchar': types.append(_get_text_type(ctx.api)) else: ctx.api.msg.fail( 'Array attribute "value" is only available' ' with element type "c_char" or "c_wchar", not {}' .format(format_type(et)), ctx.context) return make_simplified_union(types) return ctx.default_attr_type def array_raw_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: """Callback to provide an accurate type for ctypes.Array.raw.""" et = _get_array_element_type(ctx.type) if et is not None: types = [] # type: List[Type] for tp in union_items(et): if (isinstance(tp, AnyType) or isinstance(tp, Instance) and tp.type.fullname == 'ctypes.c_char'): types.append(_get_bytes_type(ctx.api)) else: ctx.api.msg.fail( 'Array attribute "raw" is only available' ' with element type "c_char", not {}' .format(format_type(et)), ctx.context) return make_simplified_union(types) return ctx.default_attr_type mypy-0.761/mypy/plugins/dataclasses.py0000644€tŠÔÚ€2›s®0000004043613576752246024247 0ustar jukkaDROPBOX\Domain Users00000000000000"""Plugin that provides support for dataclasses.""" from typing import Dict, List, Set, Tuple, Optional from typing_extensions import Final from mypy.nodes import ( ARG_OPT, ARG_POS, MDEF, Argument, AssignmentStmt, CallExpr, Context, Expression, FuncDef, JsonDict, NameExpr, RefExpr, SymbolTableNode, TempNode, TypeInfo, Var, TypeVarExpr, PlaceholderNode ) from mypy.plugin import ClassDefContext from mypy.plugins.common import add_method, _get_decorator_bool_argument from mypy.types import Instance, NoneType, TypeVarDef, TypeVarType, get_proper_type from mypy.server.trigger import make_wildcard_trigger # The set of decorators that generate dataclasses. dataclass_makers = { 'dataclass', 'dataclasses.dataclass', } # type: Final SELF_TVAR_NAME = '_DT' # type: Final class DataclassAttribute: def __init__( self, name: str, is_in_init: bool, is_init_var: bool, has_default: bool, line: int, column: int, ) -> None: self.name = name self.is_in_init = is_in_init self.is_init_var = is_init_var self.has_default = has_default self.line = line self.column = column def to_argument(self, info: TypeInfo) -> Argument: return Argument( variable=self.to_var(info), type_annotation=info[self.name].type, initializer=None, kind=ARG_OPT if self.has_default else ARG_POS, ) def to_var(self, info: TypeInfo) -> Var: return Var(self.name, info[self.name].type) def serialize(self) -> JsonDict: return { 'name': self.name, 'is_in_init': self.is_in_init, 'is_init_var': self.is_init_var, 'has_default': self.has_default, 'line': self.line, 'column': self.column, } @classmethod def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'DataclassAttribute': return cls(**data) class DataclassTransformer: def __init__(self, ctx: ClassDefContext) -> None: self._ctx = ctx def transform(self) -> None: """Apply all the necessary transformations to the underlying dataclass so as to ensure it is fully type checked according to the rules in PEP 557. """ ctx = self._ctx info = self._ctx.cls.info attributes = self.collect_attributes() if attributes is None: # Some definitions are not ready, defer() should be already called. return for attr in attributes: node = info.get(attr.name) if node is None: # Nodes of superclass InitVars not used in __init__ cannot be reached. assert attr.is_init_var and not attr.is_in_init continue if node.type is None: ctx.api.defer() return decorator_arguments = { 'init': _get_decorator_bool_argument(self._ctx, 'init', True), 'eq': _get_decorator_bool_argument(self._ctx, 'eq', True), 'order': _get_decorator_bool_argument(self._ctx, 'order', False), 'frozen': _get_decorator_bool_argument(self._ctx, 'frozen', False), } # If there are no attributes, it may be that the semantic analyzer has not # processed them yet. In order to work around this, we can simply skip generating # __init__ if there are no attributes, because if the user truly did not define any, # then the object default __init__ with an empty signature will be present anyway. if (decorator_arguments['init'] and ('__init__' not in info.names or info.names['__init__'].plugin_generated) and attributes): add_method( ctx, '__init__', args=[attr.to_argument(info) for attr in attributes if attr.is_in_init], return_type=NoneType(), ) if (decorator_arguments['eq'] and info.get('__eq__') is None or decorator_arguments['order']): # Type variable for self types in generated methods. obj_type = ctx.api.named_type('__builtins__.object') self_tvar_expr = TypeVarExpr(SELF_TVAR_NAME, info.fullname + '.' + SELF_TVAR_NAME, [], obj_type) info.names[SELF_TVAR_NAME] = SymbolTableNode(MDEF, self_tvar_expr) # Add an eq method, but only if the class doesn't already have one. if decorator_arguments['eq'] and info.get('__eq__') is None: for method_name in ['__eq__', '__ne__']: # The TVar is used to enforce that "other" must have # the same type as self (covariant). Note the # "self_type" parameter to add_method. obj_type = ctx.api.named_type('__builtins__.object') cmp_tvar_def = TypeVarDef(SELF_TVAR_NAME, info.fullname + '.' + SELF_TVAR_NAME, -1, [], obj_type) cmp_other_type = TypeVarType(cmp_tvar_def) cmp_return_type = ctx.api.named_type('__builtins__.bool') add_method( ctx, method_name, args=[Argument(Var('other', cmp_other_type), cmp_other_type, None, ARG_POS)], return_type=cmp_return_type, self_type=cmp_other_type, tvar_def=cmp_tvar_def, ) # Add <, >, <=, >=, but only if the class has an eq method. if decorator_arguments['order']: if not decorator_arguments['eq']: ctx.api.fail('eq must be True if order is True', ctx.cls) for method_name in ['__lt__', '__gt__', '__le__', '__ge__']: # Like for __eq__ and __ne__, we want "other" to match # the self type. obj_type = ctx.api.named_type('__builtins__.object') order_tvar_def = TypeVarDef(SELF_TVAR_NAME, info.fullname + '.' + SELF_TVAR_NAME, -1, [], obj_type) order_other_type = TypeVarType(order_tvar_def) order_return_type = ctx.api.named_type('__builtins__.bool') order_args = [ Argument(Var('other', order_other_type), order_other_type, None, ARG_POS) ] existing_method = info.get(method_name) if existing_method is not None and not existing_method.plugin_generated: assert existing_method.node ctx.api.fail( 'You may not have a custom %s method when order=True' % method_name, existing_method.node, ) add_method( ctx, method_name, args=order_args, return_type=order_return_type, self_type=order_other_type, tvar_def=order_tvar_def, ) if decorator_arguments['frozen']: self._freeze(attributes) self.reset_init_only_vars(info, attributes) info.metadata['dataclass'] = { 'attributes': [attr.serialize() for attr in attributes], 'frozen': decorator_arguments['frozen'], } def reset_init_only_vars(self, info: TypeInfo, attributes: List[DataclassAttribute]) -> None: """Remove init-only vars from the class and reset init var declarations.""" for attr in attributes: if attr.is_init_var: if attr.name in info.names: del info.names[attr.name] else: # Nodes of superclass InitVars not used in __init__ cannot be reached. assert attr.is_init_var and not attr.is_in_init for stmt in info.defn.defs.body: if isinstance(stmt, AssignmentStmt) and stmt.unanalyzed_type: lvalue = stmt.lvalues[0] if isinstance(lvalue, NameExpr) and lvalue.name == attr.name: # Reset node so that another semantic analysis pass will # recreate a symbol node for this attribute. lvalue.node = None def collect_attributes(self) -> Optional[List[DataclassAttribute]]: """Collect all attributes declared in the dataclass and its parents. All assignments of the form a: SomeType b: SomeOtherType = ... are collected. """ # First, collect attributes belonging to the current class. ctx = self._ctx cls = self._ctx.cls attrs = [] # type: List[DataclassAttribute] known_attrs = set() # type: Set[str] for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. if not (isinstance(stmt, AssignmentStmt) and stmt.new_syntax): continue # a: int, b: str = 1, 'foo' is not supported syntax so we # don't have to worry about it. lhs = stmt.lvalues[0] if not isinstance(lhs, NameExpr): continue sym = cls.info.names.get(lhs.name) if sym is None: # This name is likely blocked by a star import. We don't need to defer because # defer() is already called by mark_incomplete(). continue node = sym.node if isinstance(node, PlaceholderNode): # This node is not ready yet. return None assert isinstance(node, Var) # x: ClassVar[int] is ignored by dataclasses. if node.is_classvar: continue # x: InitVar[int] is turned into x: int and is removed from the class. is_init_var = False node_type = get_proper_type(node.type) if (isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar'): is_init_var = True node.type = node_type.args[0] has_field_call, field_args = _collect_field_args(stmt.rvalue) is_in_init_param = field_args.get('init') if is_in_init_param is None: is_in_init = True else: is_in_init = bool(ctx.api.parse_bool(is_in_init_param)) has_default = False # Ensure that something like x: int = field() is rejected # after an attribute with a default. if has_field_call: has_default = 'default' in field_args or 'default_factory' in field_args # All other assignments are already type checked. elif not isinstance(stmt.rvalue, TempNode): has_default = True if not has_default: # Make all non-default attributes implicit because they are de-facto set # on self in the generated __init__(), not in the class body. sym.implicit = True known_attrs.add(lhs.name) attrs.append(DataclassAttribute( name=lhs.name, is_in_init=is_in_init, is_init_var=is_init_var, has_default=has_default, line=stmt.line, column=stmt.column, )) # Next, collect attributes belonging to any class in the MRO # as long as those attributes weren't already collected. This # makes it possible to overwrite attributes in subclasses. # copy() because we potentially modify all_attrs below and if this code requires debugging # we'll have unmodified attrs laying around. all_attrs = attrs.copy() init_method = cls.info.get_method('__init__') for info in cls.info.mro[1:-1]: if 'dataclass' not in info.metadata: continue super_attrs = [] # Each class depends on the set of attributes in its dataclass ancestors. ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) for data in info.metadata['dataclass']['attributes']: name = data['name'] # type: str if name not in known_attrs: attr = DataclassAttribute.deserialize(info, data) if attr.is_init_var and isinstance(init_method, FuncDef): # InitVars are removed from classes so, in order for them to be inherited # properly, we need to re-inject them into subclasses' sym tables here. # To do that, we look 'em up from the parents' __init__. These variables # are subsequently removed from the sym table at the end of # DataclassTransformer.transform. for arg, arg_name in zip(init_method.arguments, init_method.arg_names): if arg_name == attr.name: cls.info.names[attr.name] = SymbolTableNode(MDEF, arg.variable) known_attrs.add(name) super_attrs.append(attr) elif all_attrs: # How early in the attribute list an attribute appears is determined by the # reverse MRO, not simply MRO. # See https://docs.python.org/3/library/dataclasses.html#inheritance for # details. for attr in all_attrs: if attr.name == name: all_attrs.remove(attr) super_attrs.append(attr) break all_attrs = super_attrs + all_attrs # Ensure that arguments without a default don't follow # arguments that have a default. found_default = False for attr in all_attrs: # If we find any attribute that is_in_init but that # doesn't have a default after one that does have one, # then that's an error. if found_default and attr.is_in_init and not attr.has_default: # If the issue comes from merging different classes, report it # at the class definition point. context = (Context(line=attr.line, column=attr.column) if attr in attrs else ctx.cls) ctx.api.fail( 'Attributes without a default cannot follow attributes with one', context, ) found_default = found_default or (attr.has_default and attr.is_in_init) return all_attrs def _freeze(self, attributes: List[DataclassAttribute]) -> None: """Converts all attributes to @property methods in order to emulate frozen classes. """ info = self._ctx.cls.info for attr in attributes: sym_node = info.names.get(attr.name) if sym_node is not None: var = sym_node.node assert isinstance(var, Var) var.is_property = True else: var = attr.to_var(info) var.info = info var.is_property = True var._fullname = info.fullname + '.' + var.name info.names[var.name] = SymbolTableNode(MDEF, var) def dataclass_class_maker_callback(ctx: ClassDefContext) -> None: """Hooks into the class typechecking process to add support for dataclasses. """ transformer = DataclassTransformer(ctx) transformer.transform() def _collect_field_args(expr: Expression) -> Tuple[bool, Dict[str, Expression]]: """Returns a tuple where the first value represents whether or not the expression is a call to dataclass.field and the second is a dictionary of the keyword arguments that field() was called with. """ if ( isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == 'dataclasses.field' ): # field() only takes keyword arguments. args = {} for name, arg in zip(expr.arg_names, expr.args): assert name is not None args[name] = arg return True, args return False, {} mypy-0.761/mypy/plugins/default.py0000644€tŠÔÚ€2›s®0000004517313576752246023407 0ustar jukkaDROPBOX\Domain Users00000000000000from functools import partial from typing import Callable, Optional, List from mypy import message_registry from mypy.nodes import Expression, StrExpr, IntExpr, DictExpr, UnaryExpr from mypy.plugin import ( Plugin, FunctionContext, MethodContext, MethodSigContext, AttributeContext, ClassDefContext, CheckerPluginInterface, ) from mypy.plugins.common import try_getting_str_literals from mypy.types import ( Type, Instance, AnyType, TypeOfAny, CallableType, NoneType, TypedDictType, TypeVarType, TPDICT_FB_NAMES, get_proper_type, LiteralType ) from mypy.subtypes import is_subtype from mypy.typeops import make_simplified_union from mypy.checkexpr import is_literal_type_like class DefaultPlugin(Plugin): """Type checker plugin that is enabled by default.""" def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: from mypy.plugins import ctypes if fullname == 'contextlib.contextmanager': return contextmanager_callback elif fullname == 'builtins.open' and self.python_version[0] == 3: return open_callback elif fullname == 'ctypes.Array': return ctypes.array_constructor_callback return None def get_method_signature_hook(self, fullname: str ) -> Optional[Callable[[MethodSigContext], CallableType]]: from mypy.plugins import ctypes if fullname == 'typing.Mapping.get': return typed_dict_get_signature_callback elif fullname in set(n + '.setdefault' for n in TPDICT_FB_NAMES): return typed_dict_setdefault_signature_callback elif fullname in set(n + '.pop' for n in TPDICT_FB_NAMES): return typed_dict_pop_signature_callback elif fullname in set(n + '.update' for n in TPDICT_FB_NAMES): return typed_dict_update_signature_callback elif fullname in set(n + '.__delitem__' for n in TPDICT_FB_NAMES): return typed_dict_delitem_signature_callback elif fullname == 'ctypes.Array.__setitem__': return ctypes.array_setitem_callback return None def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: from mypy.plugins import ctypes if fullname == 'typing.Mapping.get': return typed_dict_get_callback elif fullname == 'builtins.int.__pow__': return int_pow_callback elif fullname == 'builtins.int.__neg__': return int_neg_callback elif fullname in set(n + '.setdefault' for n in TPDICT_FB_NAMES): return typed_dict_setdefault_callback elif fullname in set(n + '.pop' for n in TPDICT_FB_NAMES): return typed_dict_pop_callback elif fullname in set(n + '.__delitem__' for n in TPDICT_FB_NAMES): return typed_dict_delitem_callback elif fullname == 'ctypes.Array.__getitem__': return ctypes.array_getitem_callback elif fullname == 'ctypes.Array.__iter__': return ctypes.array_iter_callback elif fullname == 'pathlib.Path.open': return path_open_callback return None def get_attribute_hook(self, fullname: str ) -> Optional[Callable[[AttributeContext], Type]]: from mypy.plugins import ctypes from mypy.plugins import enums if fullname == 'ctypes.Array.value': return ctypes.array_value_callback elif fullname == 'ctypes.Array.raw': return ctypes.array_raw_callback elif fullname in enums.ENUM_NAME_ACCESS: return enums.enum_name_callback elif fullname in enums.ENUM_VALUE_ACCESS: return enums.enum_value_callback return None def get_class_decorator_hook(self, fullname: str ) -> Optional[Callable[[ClassDefContext], None]]: from mypy.plugins import attrs from mypy.plugins import dataclasses if fullname in attrs.attr_class_makers: return attrs.attr_class_maker_callback elif fullname in attrs.attr_dataclass_makers: return partial( attrs.attr_class_maker_callback, auto_attribs_default=True ) elif fullname in dataclasses.dataclass_makers: return dataclasses.dataclass_class_maker_callback return None def open_callback(ctx: FunctionContext) -> Type: """Infer a better return type for 'open'.""" return _analyze_open_signature( arg_types=ctx.arg_types, args=ctx.args, mode_arg_index=1, default_return_type=ctx.default_return_type, api=ctx.api, ) def path_open_callback(ctx: MethodContext) -> Type: """Infer a better return type for 'pathlib.Path.open'.""" return _analyze_open_signature( arg_types=ctx.arg_types, args=ctx.args, mode_arg_index=0, default_return_type=ctx.default_return_type, api=ctx.api, ) def _analyze_open_signature(arg_types: List[List[Type]], args: List[List[Expression]], mode_arg_index: int, default_return_type: Type, api: CheckerPluginInterface, ) -> Type: """A helper for analyzing any function that has approximately the same signature as the builtin 'open(...)' function. Currently, the only thing the caller can customize is the index of the 'mode' argument. If the mode argument is omitted or is a string literal, we refine the return type to either 'TextIO' or 'BinaryIO' as appropriate. """ mode = None if not arg_types or len(arg_types[mode_arg_index]) != 1: mode = 'r' else: mode_expr = args[mode_arg_index][0] if isinstance(mode_expr, StrExpr): mode = mode_expr.value if mode is not None: assert isinstance(default_return_type, Instance) # type: ignore if 'b' in mode: return api.named_generic_type('typing.BinaryIO', []) else: return api.named_generic_type('typing.TextIO', []) return default_return_type def contextmanager_callback(ctx: FunctionContext) -> Type: """Infer a better return type for 'contextlib.contextmanager'.""" # Be defensive, just in case. if ctx.arg_types and len(ctx.arg_types[0]) == 1: arg_type = get_proper_type(ctx.arg_types[0][0]) default_return = get_proper_type(ctx.default_return_type) if (isinstance(arg_type, CallableType) and isinstance(default_return, CallableType)): # The stub signature doesn't preserve information about arguments so # add them back here. return default_return.copy_modified( arg_types=arg_type.arg_types, arg_kinds=arg_type.arg_kinds, arg_names=arg_type.arg_names, variables=arg_type.variables, is_ellipsis_args=arg_type.is_ellipsis_args) return ctx.default_return_type def typed_dict_get_signature_callback(ctx: MethodSigContext) -> CallableType: """Try to infer a better signature type for TypedDict.get. This is used to get better type context for the second argument that depends on a TypedDict value type. """ signature = ctx.default_signature if (isinstance(ctx.type, TypedDictType) and len(ctx.args) == 2 and len(ctx.args[0]) == 1 and isinstance(ctx.args[0][0], StrExpr) and len(signature.arg_types) == 2 and len(signature.variables) == 1 and len(ctx.args[1]) == 1): key = ctx.args[0][0].value value_type = get_proper_type(ctx.type.items.get(key)) ret_type = signature.ret_type if value_type: default_arg = ctx.args[1][0] if (isinstance(value_type, TypedDictType) and isinstance(default_arg, DictExpr) and len(default_arg.items) == 0): # Caller has empty dict {} as default for typed dict. value_type = value_type.copy_modified(required_keys=set()) # Tweak the signature to include the value type as context. It's # only needed for type inference since there's a union with a type # variable that accepts everything. tv = TypeVarType(signature.variables[0]) return signature.copy_modified( arg_types=[signature.arg_types[0], make_simplified_union([value_type, tv])], ret_type=ret_type) return signature def typed_dict_get_callback(ctx: MethodContext) -> Type: """Infer a precise return type for TypedDict.get with literal first argument.""" if (isinstance(ctx.type, TypedDictType) and len(ctx.arg_types) >= 1 and len(ctx.arg_types[0]) == 1): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: return ctx.default_return_type output_types = [] # type: List[Type] for key in keys: value_type = get_proper_type(ctx.type.items.get(key)) if value_type is None: ctx.api.msg.typeddict_key_not_found(ctx.type, key, ctx.context) return AnyType(TypeOfAny.from_error) if len(ctx.arg_types) == 1: output_types.append(value_type) elif (len(ctx.arg_types) == 2 and len(ctx.arg_types[1]) == 1 and len(ctx.args[1]) == 1): default_arg = ctx.args[1][0] if (isinstance(default_arg, DictExpr) and len(default_arg.items) == 0 and isinstance(value_type, TypedDictType)): # Special case '{}' as the default for a typed dict type. output_types.append(value_type.copy_modified(required_keys=set())) else: output_types.append(value_type) output_types.append(ctx.arg_types[1][0]) if len(ctx.arg_types) == 1: output_types.append(NoneType()) return make_simplified_union(output_types) return ctx.default_return_type def typed_dict_pop_signature_callback(ctx: MethodSigContext) -> CallableType: """Try to infer a better signature type for TypedDict.pop. This is used to get better type context for the second argument that depends on a TypedDict value type. """ signature = ctx.default_signature str_type = ctx.api.named_generic_type('builtins.str', []) if (isinstance(ctx.type, TypedDictType) and len(ctx.args) == 2 and len(ctx.args[0]) == 1 and isinstance(ctx.args[0][0], StrExpr) and len(signature.arg_types) == 2 and len(signature.variables) == 1 and len(ctx.args[1]) == 1): key = ctx.args[0][0].value value_type = ctx.type.items.get(key) if value_type: # Tweak the signature to include the value type as context. It's # only needed for type inference since there's a union with a type # variable that accepts everything. tv = TypeVarType(signature.variables[0]) typ = make_simplified_union([value_type, tv]) return signature.copy_modified( arg_types=[str_type, typ], ret_type=typ) return signature.copy_modified(arg_types=[str_type, signature.arg_types[1]]) def typed_dict_pop_callback(ctx: MethodContext) -> Type: """Type check and infer a precise return type for TypedDict.pop.""" if (isinstance(ctx.type, TypedDictType) and len(ctx.arg_types) >= 1 and len(ctx.arg_types[0]) == 1): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) return AnyType(TypeOfAny.from_error) value_types = [] for key in keys: if key in ctx.type.required_keys: ctx.api.msg.typeddict_key_cannot_be_deleted(ctx.type, key, ctx.context) value_type = ctx.type.items.get(key) if value_type: value_types.append(value_type) else: ctx.api.msg.typeddict_key_not_found(ctx.type, key, ctx.context) return AnyType(TypeOfAny.from_error) if len(ctx.args[1]) == 0: return make_simplified_union(value_types) elif (len(ctx.arg_types) == 2 and len(ctx.arg_types[1]) == 1 and len(ctx.args[1]) == 1): return make_simplified_union([*value_types, ctx.arg_types[1][0]]) return ctx.default_return_type def typed_dict_setdefault_signature_callback(ctx: MethodSigContext) -> CallableType: """Try to infer a better signature type for TypedDict.setdefault. This is used to get better type context for the second argument that depends on a TypedDict value type. """ signature = ctx.default_signature str_type = ctx.api.named_generic_type('builtins.str', []) if (isinstance(ctx.type, TypedDictType) and len(ctx.args) == 2 and len(ctx.args[0]) == 1 and isinstance(ctx.args[0][0], StrExpr) and len(signature.arg_types) == 2 and len(ctx.args[1]) == 1): key = ctx.args[0][0].value value_type = ctx.type.items.get(key) if value_type: return signature.copy_modified(arg_types=[str_type, value_type]) return signature.copy_modified(arg_types=[str_type, signature.arg_types[1]]) def typed_dict_setdefault_callback(ctx: MethodContext) -> Type: """Type check TypedDict.setdefault and infer a precise return type.""" if (isinstance(ctx.type, TypedDictType) and len(ctx.arg_types) == 2 and len(ctx.arg_types[0]) == 1 and len(ctx.arg_types[1]) == 1): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) return AnyType(TypeOfAny.from_error) default_type = ctx.arg_types[1][0] value_types = [] for key in keys: value_type = ctx.type.items.get(key) if value_type is None: ctx.api.msg.typeddict_key_not_found(ctx.type, key, ctx.context) return AnyType(TypeOfAny.from_error) # The signature_callback above can't always infer the right signature # (e.g. when the expression is a variable that happens to be a Literal str) # so we need to handle the check ourselves here and make sure the provided # default can be assigned to all key-value pairs we're updating. if not is_subtype(default_type, value_type): ctx.api.msg.typeddict_setdefault_arguments_inconsistent( default_type, value_type, ctx.context) return AnyType(TypeOfAny.from_error) value_types.append(value_type) return make_simplified_union(value_types) return ctx.default_return_type def typed_dict_delitem_signature_callback(ctx: MethodSigContext) -> CallableType: # Replace NoReturn as the argument type. str_type = ctx.api.named_generic_type('builtins.str', []) return ctx.default_signature.copy_modified(arg_types=[str_type]) def typed_dict_delitem_callback(ctx: MethodContext) -> Type: """Type check TypedDict.__delitem__.""" if (isinstance(ctx.type, TypedDictType) and len(ctx.arg_types) == 1 and len(ctx.arg_types[0]) == 1): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) return AnyType(TypeOfAny.from_error) for key in keys: if key in ctx.type.required_keys: ctx.api.msg.typeddict_key_cannot_be_deleted(ctx.type, key, ctx.context) elif key not in ctx.type.items: ctx.api.msg.typeddict_key_not_found(ctx.type, key, ctx.context) return ctx.default_return_type def typed_dict_update_signature_callback(ctx: MethodSigContext) -> CallableType: """Try to infer a better signature type for TypedDict.update.""" signature = ctx.default_signature if (isinstance(ctx.type, TypedDictType) and len(signature.arg_types) == 1): arg_type = get_proper_type(signature.arg_types[0]) assert isinstance(arg_type, TypedDictType) arg_type = arg_type.as_anonymous() arg_type = arg_type.copy_modified(required_keys=set()) return signature.copy_modified(arg_types=[arg_type]) return signature def int_pow_callback(ctx: MethodContext) -> Type: """Infer a more precise return type for int.__pow__.""" # int.__pow__ has an optional modulo argument, # so we expect 2 argument positions if (len(ctx.arg_types) == 2 and len(ctx.arg_types[0]) == 1 and len(ctx.arg_types[1]) == 0): arg = ctx.args[0][0] if isinstance(arg, IntExpr): exponent = arg.value elif isinstance(arg, UnaryExpr) and arg.op == '-' and isinstance(arg.expr, IntExpr): exponent = -arg.expr.value else: # Right operand not an int literal or a negated literal -- give up. return ctx.default_return_type if exponent >= 0: return ctx.api.named_generic_type('builtins.int', []) else: return ctx.api.named_generic_type('builtins.float', []) return ctx.default_return_type def int_neg_callback(ctx: MethodContext) -> Type: """Infer a more precise return type for int.__neg__. This is mainly used to infer the return type as LiteralType if the original underlying object is a LiteralType object """ if isinstance(ctx.type, Instance) and ctx.type.last_known_value is not None: value = ctx.type.last_known_value.value fallback = ctx.type.last_known_value.fallback if isinstance(value, int): if is_literal_type_like(ctx.api.type_context[-1]): return LiteralType(value=-value, fallback=fallback) else: return ctx.type.copy_modified(last_known_value=LiteralType( value=-value, fallback=ctx.type, line=ctx.type.line, column=ctx.type.column, )) elif isinstance(ctx.type, LiteralType): value = ctx.type.value fallback = ctx.type.fallback if isinstance(value, int): return LiteralType(value=-value, fallback=fallback) return ctx.default_return_type mypy-0.761/mypy/plugins/enums.py0000644€tŠÔÚ€2›s®0000001236513576752246023107 0ustar jukkaDROPBOX\Domain Users00000000000000""" This file contains a variety of plugins for refining how mypy infers types of expressions involving Enums. Currently, this file focuses on providing better inference for expressions like 'SomeEnum.FOO.name' and 'SomeEnum.FOO.value'. Note that the type of both expressions will vary depending on exactly which instance of SomeEnum we're looking at. Note that this file does *not* contain all special-cased logic related to enums: we actually bake some of it directly in to the semantic analysis layer (see semanal_enum.py). """ from typing import Optional from typing_extensions import Final import mypy.plugin # To avoid circular imports. from mypy.types import Type, Instance, LiteralType, get_proper_type # Note: 'enum.EnumMeta' is deliberately excluded from this list. Classes that directly use # enum.EnumMeta do not necessarily automatically have the 'name' and 'value' attributes. ENUM_PREFIXES = {'enum.Enum', 'enum.IntEnum', 'enum.Flag', 'enum.IntFlag'} # type: Final ENUM_NAME_ACCESS = ( {'{}.name'.format(prefix) for prefix in ENUM_PREFIXES} | {'{}._name_'.format(prefix) for prefix in ENUM_PREFIXES} ) # type: Final ENUM_VALUE_ACCESS = ( {'{}.value'.format(prefix) for prefix in ENUM_PREFIXES} | {'{}._value_'.format(prefix) for prefix in ENUM_PREFIXES} ) # type: Final def enum_name_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: """This plugin refines the 'name' attribute in enums to act as if they were declared to be final. For example, the expression 'MyEnum.FOO.name' normally is inferred to be of type 'str'. This plugin will instead make the inferred type be a 'str' where the last known value is 'Literal["FOO"]'. This means it would be legal to use 'MyEnum.FOO.name' in contexts that expect a Literal type, just like any other Final variable or attribute. This plugin assumes that the provided context is an attribute access matching one of the strings found in 'ENUM_NAME_ACCESS'. """ enum_field_name = _extract_underlying_field_name(ctx.type) if enum_field_name is None: return ctx.default_attr_type else: str_type = ctx.api.named_generic_type('builtins.str', []) literal_type = LiteralType(enum_field_name, fallback=str_type) return str_type.copy_modified(last_known_value=literal_type) def enum_value_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: """This plugin refines the 'value' attribute in enums to refer to the original underlying value. For example, suppose we have the following: class SomeEnum: FOO = A() BAR = B() By default, mypy will infer that 'SomeEnum.FOO.value' and 'SomeEnum.BAR.value' both are of type 'Any'. This plugin refines this inference so that mypy understands the expressions are actually of types 'A' and 'B' respectively. This better reflects the actual runtime behavior. This plugin works simply by looking up the original value assigned to the enum. For example, when this plugin sees 'SomeEnum.BAR.value', it will look up whatever type 'BAR' had in the SomeEnum TypeInfo and use that as the inferred type of the overall expression. This plugin assumes that the provided context is an attribute access matching one of the strings found in 'ENUM_VALUE_ACCESS'. """ enum_field_name = _extract_underlying_field_name(ctx.type) if enum_field_name is None: return ctx.default_attr_type assert isinstance(ctx.type, Instance) info = ctx.type.type stnode = info.get(enum_field_name) if stnode is None: return ctx.default_attr_type underlying_type = get_proper_type(stnode.type) if underlying_type is None: # TODO: Deduce the inferred type if the user omits adding their own default types. # TODO: Consider using the return type of `Enum._generate_next_value_` here? return ctx.default_attr_type if isinstance(underlying_type, Instance) and underlying_type.type.fullname == 'enum.auto': # TODO: Deduce the correct inferred type when the user uses 'enum.auto'. # We should use the same strategy we end up picking up above. return ctx.default_attr_type return underlying_type def _extract_underlying_field_name(typ: Type) -> Optional[str]: """If the given type corresponds to some Enum instance, returns the original name of that enum. For example, if we receive in the type corresponding to 'SomeEnum.FOO', we return the string "SomeEnum.Foo". This helper takes advantage of the fact that Enum instances are valid to use inside Literal[...] types. An expression like 'SomeEnum.FOO' is actually represented by an Instance type with a Literal enum fallback. We can examine this Literal fallback to retrieve the string. """ typ = get_proper_type(typ) if not isinstance(typ, Instance): return None if not typ.type.is_enum: return None underlying_literal = typ.last_known_value if underlying_literal is None: return None # The checks above have verified this LiteralType is representing an enum value, # which means the 'value' field is guaranteed to be the name of the enum field # as a string. assert isinstance(underlying_literal.value, str) return underlying_literal.value mypy-0.761/mypy/reachability.py0000644€tŠÔÚ€2›s®0000002561413576752246022740 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities related to determining the reachability of code (in semantic analysis).""" from typing import Tuple, TypeVar, Union, Optional from typing_extensions import Final from mypy.nodes import ( Expression, IfStmt, Block, AssertStmt, NameExpr, UnaryExpr, MemberExpr, OpExpr, ComparisonExpr, StrExpr, UnicodeExpr, CallExpr, IntExpr, TupleExpr, IndexExpr, SliceExpr, Import, ImportFrom, ImportAll, LITERAL_YES ) from mypy.options import Options from mypy.traverser import TraverserVisitor from mypy.literals import literal # Inferred truth value of an expression. ALWAYS_TRUE = 1 # type: Final MYPY_TRUE = 2 # type: Final # True in mypy, False at runtime ALWAYS_FALSE = 3 # type: Final MYPY_FALSE = 4 # type: Final # False in mypy, True at runtime TRUTH_VALUE_UNKNOWN = 5 # type: Final inverted_truth_mapping = { ALWAYS_TRUE: ALWAYS_FALSE, ALWAYS_FALSE: ALWAYS_TRUE, TRUTH_VALUE_UNKNOWN: TRUTH_VALUE_UNKNOWN, MYPY_TRUE: MYPY_FALSE, MYPY_FALSE: MYPY_TRUE, } # type: Final def infer_reachability_of_if_statement(s: IfStmt, options: Options) -> None: for i in range(len(s.expr)): result = infer_condition_value(s.expr[i], options) if result in (ALWAYS_FALSE, MYPY_FALSE): # The condition is considered always false, so we skip the if/elif body. mark_block_unreachable(s.body[i]) elif result in (ALWAYS_TRUE, MYPY_TRUE): # This condition is considered always true, so all of the remaining # elif/else bodies should not be checked. if result == MYPY_TRUE: # This condition is false at runtime; this will affect # import priorities. mark_block_mypy_only(s.body[i]) for body in s.body[i + 1:]: mark_block_unreachable(body) # Make sure else body always exists and is marked as # unreachable so the type checker always knows that # all control flow paths will flow through the if # statement body. if not s.else_body: s.else_body = Block([]) mark_block_unreachable(s.else_body) break def assert_will_always_fail(s: AssertStmt, options: Options) -> bool: return infer_condition_value(s.expr, options) in (ALWAYS_FALSE, MYPY_FALSE) def infer_condition_value(expr: Expression, options: Options) -> int: """Infer whether the given condition is always true/false. Return ALWAYS_TRUE if always true, ALWAYS_FALSE if always false, MYPY_TRUE if true under mypy and false at runtime, MYPY_FALSE if false under mypy and true at runtime, else TRUTH_VALUE_UNKNOWN. """ pyversion = options.python_version name = '' negated = False alias = expr if isinstance(alias, UnaryExpr): if alias.op == 'not': expr = alias.expr negated = True result = TRUTH_VALUE_UNKNOWN if isinstance(expr, NameExpr): name = expr.name elif isinstance(expr, MemberExpr): name = expr.name elif isinstance(expr, OpExpr) and expr.op in ('and', 'or'): left = infer_condition_value(expr.left, options) if ((left in (ALWAYS_TRUE, MYPY_TRUE) and expr.op == 'and') or (left in (ALWAYS_FALSE, MYPY_FALSE) and expr.op == 'or')): # Either `True and ` or `False or `: the result will # always be the right-hand-side. return infer_condition_value(expr.right, options) else: # The result will always be the left-hand-side (e.g. ALWAYS_* or # TRUTH_VALUE_UNKNOWN). return left else: result = consider_sys_version_info(expr, pyversion) if result == TRUTH_VALUE_UNKNOWN: result = consider_sys_platform(expr, options.platform) if result == TRUTH_VALUE_UNKNOWN: if name == 'PY2': result = ALWAYS_TRUE if pyversion[0] == 2 else ALWAYS_FALSE elif name == 'PY3': result = ALWAYS_TRUE if pyversion[0] == 3 else ALWAYS_FALSE elif name == 'MYPY' or name == 'TYPE_CHECKING': result = MYPY_TRUE elif name in options.always_true: result = ALWAYS_TRUE elif name in options.always_false: result = ALWAYS_FALSE if negated: result = inverted_truth_mapping[result] return result def consider_sys_version_info(expr: Expression, pyversion: Tuple[int, ...]) -> int: """Consider whether expr is a comparison involving sys.version_info. Return ALWAYS_TRUE, ALWAYS_FALSE, or TRUTH_VALUE_UNKNOWN. """ # Cases supported: # - sys.version_info[] # - sys.version_info[:] # - sys.version_info # (in this case must be >, >=, <, <=, but cannot be ==, !=) if not isinstance(expr, ComparisonExpr): return TRUTH_VALUE_UNKNOWN # Let's not yet support chained comparisons. if len(expr.operators) > 1: return TRUTH_VALUE_UNKNOWN op = expr.operators[0] if op not in ('==', '!=', '<=', '>=', '<', '>'): return TRUTH_VALUE_UNKNOWN thing = contains_int_or_tuple_of_ints(expr.operands[1]) if thing is None: return TRUTH_VALUE_UNKNOWN index = contains_sys_version_info(expr.operands[0]) if isinstance(index, int) and isinstance(thing, int): # sys.version_info[i] k if 0 <= index <= 1: return fixed_comparison(pyversion[index], op, thing) else: return TRUTH_VALUE_UNKNOWN elif isinstance(index, tuple) and isinstance(thing, tuple): lo, hi = index if lo is None: lo = 0 if hi is None: hi = 2 if 0 <= lo < hi <= 2: val = pyversion[lo:hi] if len(val) == len(thing) or len(val) > len(thing) and op not in ('==', '!='): return fixed_comparison(val, op, thing) return TRUTH_VALUE_UNKNOWN def consider_sys_platform(expr: Expression, platform: str) -> int: """Consider whether expr is a comparison involving sys.platform. Return ALWAYS_TRUE, ALWAYS_FALSE, or TRUTH_VALUE_UNKNOWN. """ # Cases supported: # - sys.platform == 'posix' # - sys.platform != 'win32' # - sys.platform.startswith('win') if isinstance(expr, ComparisonExpr): # Let's not yet support chained comparisons. if len(expr.operators) > 1: return TRUTH_VALUE_UNKNOWN op = expr.operators[0] if op not in ('==', '!='): return TRUTH_VALUE_UNKNOWN if not is_sys_attr(expr.operands[0], 'platform'): return TRUTH_VALUE_UNKNOWN right = expr.operands[1] if not isinstance(right, (StrExpr, UnicodeExpr)): return TRUTH_VALUE_UNKNOWN return fixed_comparison(platform, op, right.value) elif isinstance(expr, CallExpr): if not isinstance(expr.callee, MemberExpr): return TRUTH_VALUE_UNKNOWN if len(expr.args) != 1 or not isinstance(expr.args[0], (StrExpr, UnicodeExpr)): return TRUTH_VALUE_UNKNOWN if not is_sys_attr(expr.callee.expr, 'platform'): return TRUTH_VALUE_UNKNOWN if expr.callee.name != 'startswith': return TRUTH_VALUE_UNKNOWN if platform.startswith(expr.args[0].value): return ALWAYS_TRUE else: return ALWAYS_FALSE else: return TRUTH_VALUE_UNKNOWN Targ = TypeVar('Targ', int, str, Tuple[int, ...]) def fixed_comparison(left: Targ, op: str, right: Targ) -> int: rmap = {False: ALWAYS_FALSE, True: ALWAYS_TRUE} if op == '==': return rmap[left == right] if op == '!=': return rmap[left != right] if op == '<=': return rmap[left <= right] if op == '>=': return rmap[left >= right] if op == '<': return rmap[left < right] if op == '>': return rmap[left > right] return TRUTH_VALUE_UNKNOWN def contains_int_or_tuple_of_ints(expr: Expression ) -> Union[None, int, Tuple[int], Tuple[int, ...]]: if isinstance(expr, IntExpr): return expr.value if isinstance(expr, TupleExpr): if literal(expr) == LITERAL_YES: thing = [] for x in expr.items: if not isinstance(x, IntExpr): return None thing.append(x.value) return tuple(thing) return None def contains_sys_version_info(expr: Expression ) -> Union[None, int, Tuple[Optional[int], Optional[int]]]: if is_sys_attr(expr, 'version_info'): return (None, None) # Same as sys.version_info[:] if isinstance(expr, IndexExpr) and is_sys_attr(expr.base, 'version_info'): index = expr.index if isinstance(index, IntExpr): return index.value if isinstance(index, SliceExpr): if index.stride is not None: if not isinstance(index.stride, IntExpr) or index.stride.value != 1: return None begin = end = None if index.begin_index is not None: if not isinstance(index.begin_index, IntExpr): return None begin = index.begin_index.value if index.end_index is not None: if not isinstance(index.end_index, IntExpr): return None end = index.end_index.value return (begin, end) return None def is_sys_attr(expr: Expression, name: str) -> bool: # TODO: This currently doesn't work with code like this: # - import sys as _sys # - from sys import version_info if isinstance(expr, MemberExpr) and expr.name == name: if isinstance(expr.expr, NameExpr) and expr.expr.name == 'sys': # TODO: Guard against a local named sys, etc. # (Though later passes will still do most checking.) return True return False def mark_block_unreachable(block: Block) -> None: block.is_unreachable = True block.accept(MarkImportsUnreachableVisitor()) class MarkImportsUnreachableVisitor(TraverserVisitor): """Visitor that flags all imports nested within a node as unreachable.""" def visit_import(self, node: Import) -> None: node.is_unreachable = True def visit_import_from(self, node: ImportFrom) -> None: node.is_unreachable = True def visit_import_all(self, node: ImportAll) -> None: node.is_unreachable = True def mark_block_mypy_only(block: Block) -> None: block.accept(MarkImportsMypyOnlyVisitor()) class MarkImportsMypyOnlyVisitor(TraverserVisitor): """Visitor that sets is_mypy_only (which affects priority).""" def visit_import(self, node: Import) -> None: node.is_mypy_only = True def visit_import_from(self, node: ImportFrom) -> None: node.is_mypy_only = True def visit_import_all(self, node: ImportAll) -> None: node.is_mypy_only = True mypy-0.761/mypy/renaming.py0000644€tŠÔÚ€2›s®0000003256413576752246022102 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, List from typing_extensions import Final from mypy.nodes import ( Block, AssignmentStmt, NameExpr, MypyFile, FuncDef, Lvalue, ListExpr, TupleExpr, WhileStmt, ForStmt, BreakStmt, ContinueStmt, TryStmt, WithStmt, StarExpr, ImportFrom, MemberExpr, IndexExpr, Import, ClassDef ) from mypy.traverser import TraverserVisitor # Scope kinds FILE = 0 # type: Final FUNCTION = 1 # type: Final CLASS = 2 # type: Final class VariableRenameVisitor(TraverserVisitor): """Rename variables to allow redefinition of variables. For example, consider this code: x = 0 f(x) x = "a" g(x) It will be transformed like this: x' = 0 f(x') x = "a" g(x) There will be two independent variables (x' and x) that will have separate inferred types. The publicly exposed variant will get the non-suffixed name. This is the last definition at module top level and the first definition (argument) within a function. Renaming only happens for assignments within the same block. Renaming is performed before semantic analysis, immediately after parsing. The implementation performs a rudimentary static analysis. The analysis is overly conservative to keep things simple. """ def __init__(self) -> None: # Counter for labeling new blocks self.block_id = 0 # Number of surrounding try statements that disallow variable redefinition self.disallow_redef_depth = 0 # Number of surrounding loop statements self.loop_depth = 0 # Map block id to loop depth. self.block_loop_depth = {} # type: Dict[int, int] # Stack of block ids being processed. self.blocks = [] # type: List[int] # List of scopes; each scope maps short (unqualified) name to block id. self.var_blocks = [] # type: List[Dict[str, int]] # References to variables that we may need to rename. List of # scopes; each scope is a mapping from name to list of collections # of names that refer to the same logical variable. self.refs = [] # type: List[Dict[str, List[List[NameExpr]]]] # Number of reads of the most recent definition of a variable (per scope) self.num_reads = [] # type: List[Dict[str, int]] # Kinds of nested scopes (FILE, FUNCTION or CLASS) self.scope_kinds = [] # type: List[int] def visit_mypy_file(self, file_node: MypyFile) -> None: """Rename variables within a file. This is the main entry point to this class. """ self.clear() self.enter_scope(FILE) self.enter_block() for d in file_node.defs: d.accept(self) self.leave_block() self.leave_scope() def visit_func_def(self, fdef: FuncDef) -> None: # Conservatively do not allow variable defined before a function to # be redefined later, since function could refer to either definition. self.reject_redefinition_of_vars_in_scope() self.enter_scope(FUNCTION) self.enter_block() for arg in fdef.arguments: name = arg.variable.name # 'self' can't be redefined since it's special as it allows definition of # attributes. 'cls' can't be used to define attributes so we can ignore it. can_be_redefined = name != 'self' # TODO: Proper check self.record_assignment(arg.variable.name, can_be_redefined) self.handle_arg(name) for stmt in fdef.body.body: stmt.accept(self) self.leave_block() self.leave_scope() def visit_class_def(self, cdef: ClassDef) -> None: self.reject_redefinition_of_vars_in_scope() self.enter_scope(CLASS) super().visit_class_def(cdef) self.leave_scope() def visit_block(self, block: Block) -> None: self.enter_block() super().visit_block(block) self.leave_block() def visit_while_stmt(self, stmt: WhileStmt) -> None: self.enter_loop() super().visit_while_stmt(stmt) self.leave_loop() def visit_for_stmt(self, stmt: ForStmt) -> None: stmt.expr.accept(self) self.analyze_lvalue(stmt.index, True) # Also analyze as non-lvalue so that every for loop index variable is assumed to be read. stmt.index.accept(self) self.enter_loop() stmt.body.accept(self) self.leave_loop() if stmt.else_body: stmt.else_body.accept(self) def visit_break_stmt(self, stmt: BreakStmt) -> None: self.reject_redefinition_of_vars_in_loop() def visit_continue_stmt(self, stmt: ContinueStmt) -> None: self.reject_redefinition_of_vars_in_loop() def visit_try_stmt(self, stmt: TryStmt) -> None: # Variables defined by a try statement get special treatment in the # type checker which allows them to be always redefined, so no need to # do renaming here. self.enter_try() super().visit_try_stmt(stmt) self.leave_try() def visit_with_stmt(self, stmt: WithStmt) -> None: for expr in stmt.expr: expr.accept(self) for target in stmt.target: if target is not None: self.analyze_lvalue(target) # We allow redefinitions in the body of a with statement for # convenience. This is unsafe since with statements can affect control # flow by catching exceptions, but this is rare except for # assertRaises() and other similar functions, where the exception is # raised by the last statement in the body, which usually isn't a # problem. stmt.body.accept(self) def visit_import(self, imp: Import) -> None: for id, as_id in imp.ids: self.record_assignment(as_id or id, False) def visit_import_from(self, imp: ImportFrom) -> None: for id, as_id in imp.names: self.record_assignment(as_id or id, False) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: s.rvalue.accept(self) for lvalue in s.lvalues: self.analyze_lvalue(lvalue) def analyze_lvalue(self, lvalue: Lvalue, is_nested: bool = False) -> None: """Process assignment; in particular, keep track of (re)defined names. Args: is_nested: True for non-outermost Lvalue in a multiple assignment such as "x, y = ..." """ if isinstance(lvalue, NameExpr): name = lvalue.name is_new = self.record_assignment(name, True) if is_new: self.handle_def(lvalue) else: self.handle_refine(lvalue) if is_nested: # This allows these to be redefined freely even if never read. Multiple # assignment like "x, _ _ = y" defines dummy variables that are never read. self.handle_ref(lvalue) elif isinstance(lvalue, (ListExpr, TupleExpr)): for item in lvalue.items: self.analyze_lvalue(item, is_nested=True) elif isinstance(lvalue, MemberExpr): lvalue.expr.accept(self) elif isinstance(lvalue, IndexExpr): lvalue.base.accept(self) lvalue.index.accept(self) elif isinstance(lvalue, StarExpr): # Propagate is_nested since in a typical use case like "x, *rest = ..." 'rest' may # be freely reused. self.analyze_lvalue(lvalue.expr, is_nested=is_nested) def visit_name_expr(self, expr: NameExpr) -> None: self.handle_ref(expr) # Helpers for renaming references def handle_arg(self, name: str) -> None: """Store function argument.""" self.refs[-1][name] = [[]] self.num_reads[-1][name] = 0 def handle_def(self, expr: NameExpr) -> None: """Store new name definition.""" name = expr.name names = self.refs[-1].setdefault(name, []) names.append([expr]) self.num_reads[-1][name] = 0 def handle_refine(self, expr: NameExpr) -> None: """Store assignment to an existing name (that replaces previous value, if any).""" name = expr.name if name in self.refs[-1]: names = self.refs[-1][name] if not names: names.append([]) names[-1].append(expr) def handle_ref(self, expr: NameExpr) -> None: """Store reference to defined name.""" name = expr.name if name in self.refs[-1]: names = self.refs[-1][name] if not names: names.append([]) names[-1].append(expr) num_reads = self.num_reads[-1] num_reads[name] = num_reads.get(name, 0) + 1 def flush_refs(self) -> None: """Rename all references within the current scope. This will be called at the end of a scope. """ is_func = self.scope_kinds[-1] == FUNCTION for name, refs in self.refs[-1].items(): if len(refs) == 1: # Only one definition -- no renaming neeed. continue if is_func: # In a function, don't rename the first definition, as it # may be an argument that must preserve the name. to_rename = refs[1:] else: # At module top level, don't rename the final definition, # as it will be publicly visible outside the module. to_rename = refs[:-1] for i, item in enumerate(to_rename): self.rename_refs(item, i) self.refs.pop() def rename_refs(self, names: List[NameExpr], index: int) -> None: name = names[0].name new_name = name + "'" * (index + 1) for expr in names: expr.name = new_name # Helpers for determining which assignments define new variables def clear(self) -> None: self.blocks = [] self.var_blocks = [] def enter_block(self) -> None: self.block_id += 1 self.blocks.append(self.block_id) self.block_loop_depth[self.block_id] = self.loop_depth def leave_block(self) -> None: self.blocks.pop() def enter_try(self) -> None: self.disallow_redef_depth += 1 def leave_try(self) -> None: self.disallow_redef_depth -= 1 def enter_loop(self) -> None: self.loop_depth += 1 def leave_loop(self) -> None: self.loop_depth -= 1 def current_block(self) -> int: return self.blocks[-1] def enter_scope(self, kind: int) -> None: self.var_blocks.append({}) self.refs.append({}) self.num_reads.append({}) self.scope_kinds.append(kind) def leave_scope(self) -> None: self.flush_refs() self.var_blocks.pop() self.num_reads.pop() self.scope_kinds.pop() def is_nested(self) -> int: return len(self.var_blocks) > 1 def reject_redefinition_of_vars_in_scope(self) -> None: """Make it impossible to redefine defined variables in the current scope. This is used if we encounter a function definition that can make it ambiguous which definition is live. Example: x = 0 def f() -> int: return x x = '' # Error -- cannot redefine x across function definition """ var_blocks = self.var_blocks[-1] for key in var_blocks: var_blocks[key] = -1 def reject_redefinition_of_vars_in_loop(self) -> None: """Reject redefinition of variables in the innermost loop. If there is an early exit from a loop, there may be ambiguity about which value may escpae the loop. Example where this matters: while f(): x = 0 if g(): break x = '' # Error -- not a redefinition reveal_type(x) # int This method ensures that the second assignment to 'x' doesn't introduce a new variable. """ var_blocks = self.var_blocks[-1] for key, block in var_blocks.items(): if self.block_loop_depth.get(block) == self.loop_depth: var_blocks[key] = -1 def record_assignment(self, name: str, can_be_redefined: bool) -> bool: """Record assignment to given name and return True if it defines a new variable. Args: can_be_redefined: If True, allows assignment in the same block to redefine this name (if this is a new definition) """ if self.num_reads[-1].get(name, -1) == 0: # Only set, not read, so no reason to redefine return False if self.disallow_redef_depth > 0: # Can't redefine within try/with a block. can_be_redefined = False block = self.current_block() var_blocks = self.var_blocks[-1] if name not in var_blocks: # New definition in this scope. if can_be_redefined: # Store the block where this was defined to allow redefinition in # the same block only. var_blocks[name] = block else: # This doesn't support arbitrary redefinition. var_blocks[name] = -1 return True elif var_blocks[name] == block: # Redefinition -- defines a new variable with the same name. return True else: # Assigns to an existing variable. return False mypy-0.761/mypy/report.py0000644€tŠÔÚ€2›s®0000010550113576752246021605 0ustar jukkaDROPBOX\Domain Users00000000000000"""Classes for producing HTML reports about imprecision.""" from abc import ABCMeta, abstractmethod import collections import json import os import shutil import tokenize import time import sys import itertools from operator import attrgetter from urllib.request import pathname2url import typing from typing import Any, Callable, Dict, List, Optional, Tuple, cast, Iterator from typing_extensions import Final from mypy.nodes import MypyFile, Expression, FuncDef from mypy import stats from mypy.options import Options from mypy.traverser import TraverserVisitor from mypy.types import Type, TypeOfAny from mypy.version import __version__ from mypy.defaults import REPORTER_NAMES try: # mypyc doesn't properly handle import from of submodules that we # don't have stubs for, hence the hacky double import import lxml.etree # type: ignore # noqa: F401 from lxml import etree LXML_INSTALLED = True except ImportError: LXML_INSTALLED = False type_of_any_name_map = collections.OrderedDict([ (TypeOfAny.unannotated, "Unannotated"), (TypeOfAny.explicit, "Explicit"), (TypeOfAny.from_unimported_type, "Unimported"), (TypeOfAny.from_omitted_generics, "Omitted Generics"), (TypeOfAny.from_error, "Error"), (TypeOfAny.special_form, "Special Form"), (TypeOfAny.implementation_artifact, "Implementation Artifact"), ]) # type: Final[collections.OrderedDict[int, str]] ReporterClasses = Dict[str, Tuple[Callable[['Reports', str], 'AbstractReporter'], bool]] reporter_classes = {} # type: Final[ReporterClasses] class Reports: def __init__(self, data_dir: str, report_dirs: Dict[str, str]) -> None: self.data_dir = data_dir self.reporters = [] # type: List[AbstractReporter] self.named_reporters = {} # type: Dict[str, AbstractReporter] for report_type, report_dir in sorted(report_dirs.items()): self.add_report(report_type, report_dir) def add_report(self, report_type: str, report_dir: str) -> 'AbstractReporter': try: return self.named_reporters[report_type] except KeyError: pass reporter_cls, needs_lxml = reporter_classes[report_type] if needs_lxml and not LXML_INSTALLED: print(('You must install the lxml package before you can run mypy' ' with `--{}-report`.\n' 'You can do this with `python3 -m pip install lxml`.').format(report_type), file=sys.stderr) raise ImportError reporter = reporter_cls(self, report_dir) self.reporters.append(reporter) self.named_reporters[report_type] = reporter return reporter def file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: for reporter in self.reporters: reporter.on_file(tree, modules, type_map, options) def finish(self) -> None: for reporter in self.reporters: reporter.on_finish() class AbstractReporter(metaclass=ABCMeta): def __init__(self, reports: Reports, output_dir: str) -> None: self.output_dir = output_dir if output_dir != '': stats.ensure_dir_exists(output_dir) @abstractmethod def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: pass @abstractmethod def on_finish(self) -> None: pass def register_reporter(report_name: str, reporter: Callable[[Reports, str], AbstractReporter], needs_lxml: bool = False) -> None: reporter_classes[report_name] = (reporter, needs_lxml) def alias_reporter(source_reporter: str, target_reporter: str) -> None: reporter_classes[target_reporter] = reporter_classes[source_reporter] def should_skip_path(path: str) -> bool: if stats.is_special_module(path): return True if path.startswith('..'): return True if 'stubs' in path.split('/') or 'stubs' in path.split(os.sep): return True return False def iterate_python_lines(path: str) -> Iterator[Tuple[int, str]]: """Return an iterator over (line number, line text) from a Python file.""" with tokenize.open(path) as input_file: for line_info in enumerate(input_file, 1): yield line_info class FuncCounterVisitor(TraverserVisitor): def __init__(self) -> None: super().__init__() self.counts = [0, 0] def visit_func_def(self, defn: FuncDef) -> None: self.counts[defn.type is not None] += 1 class LineCountReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.counts = {} # type: Dict[str, Tuple[int, int, int, int]] def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: # Count physical lines. This assumes the file's encoding is a # superset of ASCII (or at least uses \n in its line endings). with open(tree.path, 'rb') as f: physical_lines = len(f.readlines()) func_counter = FuncCounterVisitor() tree.accept(func_counter) unannotated_funcs, annotated_funcs = func_counter.counts total_funcs = annotated_funcs + unannotated_funcs # Don't count lines or functions as annotated if they have their errors ignored. if options.ignore_errors: annotated_funcs = 0 imputed_annotated_lines = (physical_lines * annotated_funcs // total_funcs if total_funcs else physical_lines) self.counts[tree._fullname] = (imputed_annotated_lines, physical_lines, annotated_funcs, total_funcs) def on_finish(self) -> None: counts = sorted(((c, p) for p, c in self.counts.items()), reverse=True) # type: List[Tuple[Tuple[int, int, int, int], str]] total_counts = tuple(sum(c[i] for c, p in counts) for i in range(4)) with open(os.path.join(self.output_dir, 'linecount.txt'), 'w') as f: f.write('{:7} {:7} {:6} {:6} total\n'.format(*total_counts)) for c, p in counts: f.write('{:7} {:7} {:6} {:6} {}\n'.format( c[0], c[1], c[2], c[3], p)) register_reporter('linecount', LineCountReporter) class AnyExpressionsReporter(AbstractReporter): """Report frequencies of different kinds of Any types.""" def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.counts = {} # type: Dict[str, Tuple[int, int]] self.any_types_counter = {} # type: Dict[str, typing.Counter[int]] def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname, modules=modules, typemap=type_map, all_nodes=True, visit_untyped_defs=False) tree.accept(visitor) self.any_types_counter[tree.fullname] = visitor.type_of_any_counter num_unanalyzed_lines = list(visitor.line_map.values()).count(stats.TYPE_UNANALYZED) # count each line of dead code as one expression of type "Any" num_any = visitor.num_any_exprs + num_unanalyzed_lines num_total = visitor.num_imprecise_exprs + visitor.num_precise_exprs + num_any if num_total > 0: self.counts[tree.fullname] = (num_any, num_total) def on_finish(self) -> None: self._report_any_exprs() self._report_types_of_anys() def _write_out_report(self, filename: str, header: List[str], rows: List[List[str]], footer: List[str], ) -> None: row_len = len(header) assert all(len(row) == row_len for row in rows + [header, footer]) min_column_distance = 3 # minimum distance between numbers in two columns widths = [-1] * row_len for row in rows + [header, footer]: for i, value in enumerate(row): widths[i] = max(widths[i], len(value)) for i, w in enumerate(widths): # Do not add min_column_distance to the first column. if i > 0: widths[i] = w + min_column_distance with open(os.path.join(self.output_dir, filename), 'w') as f: header_str = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(header, widths))) separator = '-' * len(header_str) f.write(header_str + '\n') f.write(separator + '\n') for row_values in rows: r = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(row_values, widths))) f.writelines(r + '\n') f.write(separator + '\n') footer_str = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(footer, widths))) f.writelines(footer_str + '\n') def _report_any_exprs(self) -> None: total_any = sum(num_any for num_any, _ in self.counts.values()) total_expr = sum(total for _, total in self.counts.values()) total_coverage = 100.0 if total_expr > 0: total_coverage = (float(total_expr - total_any) / float(total_expr)) * 100 column_names = ["Name", "Anys", "Exprs", "Coverage"] rows = [] # type: List[List[str]] for filename in sorted(self.counts): (num_any, num_total) = self.counts[filename] coverage = (float(num_total - num_any) / float(num_total)) * 100 coverage_str = '{:.2f}%'.format(coverage) rows.append([filename, str(num_any), str(num_total), coverage_str]) rows.sort(key=lambda x: x[0]) total_row = ["Total", str(total_any), str(total_expr), '{:.2f}%'.format(total_coverage)] self._write_out_report('any-exprs.txt', column_names, rows, total_row) def _report_types_of_anys(self) -> None: total_counter = collections.Counter() # type: typing.Counter[int] for counter in self.any_types_counter.values(): for any_type, value in counter.items(): total_counter[any_type] += value file_column_name = "Name" total_row_name = "Total" column_names = [file_column_name] + list(type_of_any_name_map.values()) rows = [] # type: List[List[str]] for filename, counter in self.any_types_counter.items(): rows.append([filename] + [str(counter[typ]) for typ in type_of_any_name_map]) rows.sort(key=lambda x: x[0]) total_row = [total_row_name] + [str(total_counter[typ]) for typ in type_of_any_name_map] self._write_out_report('types-of-anys.txt', column_names, rows, total_row) register_reporter('any-exprs', AnyExpressionsReporter) class LineCoverageVisitor(TraverserVisitor): def __init__(self, source: List[str]) -> None: self.source = source # For each line of source, we maintain a pair of # * the indentation level of the surrounding function # (-1 if not inside a function), and # * whether the surrounding function is typed. # Initially, everything is covered at indentation level -1. self.lines_covered = [(-1, True) for l in source] # The Python AST has position information for the starts of # elements, but not for their ends. Fortunately the # indentation-based syntax makes it pretty easy to find where a # block ends without doing any real parsing. # TODO: Handle line continuations (explicit and implicit) and # multi-line string literals. (But at least line continuations # are normally more indented than their surrounding block anyways, # by PEP 8.) def indentation_level(self, line_number: int) -> Optional[int]: """Return the indentation of a line of the source (specified by zero-indexed line number). Returns None for blank lines or comments.""" line = self.source[line_number] indent = 0 for char in list(line): if char == ' ': indent += 1 elif char == '\t': indent = 8 * ((indent + 8) // 8) elif char == '#': # Line is a comment; ignore it return None elif char == '\n': # Line is entirely whitespace; ignore it return None # TODO line continuation (\) else: # Found a non-whitespace character return indent # Line is entirely whitespace, and at end of file # with no trailing newline; ignore it return None def visit_func_def(self, defn: FuncDef) -> None: start_line = defn.get_line() - 1 start_indent = None # When a function is decorated, sometimes the start line will point to # whitespace or comments between the decorator and the function, so # we have to look for the start. while start_line < len(self.source): start_indent = self.indentation_level(start_line) if start_indent is not None: break start_line += 1 # If we can't find the function give up and don't annotate anything. # Our line numbers are not reliable enough to be asserting on. if start_indent is None: return cur_line = start_line + 1 end_line = cur_line # After this loop, function body will be lines [start_line, end_line) while cur_line < len(self.source): cur_indent = self.indentation_level(cur_line) if cur_indent is None: # Consume the line, but don't mark it as belonging to the function yet. cur_line += 1 elif start_indent is not None and cur_indent > start_indent: # A non-blank line that belongs to the function. cur_line += 1 end_line = cur_line else: # We reached a line outside the function definition. break is_typed = defn.type is not None for line in range(start_line, end_line): old_indent, _ = self.lines_covered[line] # If there was an old indent level for this line, and the new # level isn't increasing the indentation, ignore it. # This is to be defensive against funniness in our line numbers, # which are not always reliable. if old_indent <= start_indent: self.lines_covered[line] = (start_indent, is_typed) # Visit the body, in case there are nested functions super().visit_func_def(defn) class LineCoverageReporter(AbstractReporter): """Exact line coverage reporter. This reporter writes a JSON dictionary with one field 'lines' to the file 'coverage.json' in the specified report directory. The value of that field is a dictionary which associates to each source file's absolute pathname the list of line numbers that belong to typed functions in that file. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.lines_covered = {} # type: Dict[str, List[int]] def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: with open(tree.path) as f: tree_source = f.readlines() coverage_visitor = LineCoverageVisitor(tree_source) tree.accept(coverage_visitor) covered_lines = [] for line_number, (_, typed) in enumerate(coverage_visitor.lines_covered): if typed: covered_lines.append(line_number + 1) self.lines_covered[os.path.abspath(tree.path)] = covered_lines def on_finish(self) -> None: with open(os.path.join(self.output_dir, 'coverage.json'), 'w') as f: json.dump({'lines': self.lines_covered}, f) register_reporter('linecoverage', LineCoverageReporter) class FileInfo: def __init__(self, name: str, module: str) -> None: self.name = name self.module = module self.counts = [0] * len(stats.precision_names) def total(self) -> int: return sum(self.counts) def attrib(self) -> Dict[str, str]: return {name: str(val) for name, val in sorted(zip(stats.precision_names, self.counts))} class MemoryXmlReporter(AbstractReporter): """Internal reporter that generates XML in memory. This is used by all other XML-based reporters to avoid duplication. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.xslt_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.xslt') self.xslt_txt_path = os.path.join(reports.data_dir, 'xml', 'mypy-txt.xslt') self.css_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.css') xsd_path = os.path.join(reports.data_dir, 'xml', 'mypy.xsd') self.schema = etree.XMLSchema(etree.parse(xsd_path)) self.last_xml = None # type: Optional[Any] self.files = [] # type: List[FileInfo] # XML doesn't like control characters, but they are sometimes # legal in source code (e.g. comments, string literals). # Tabs (#x09) are allowed in XML content. control_fixer = str.maketrans( ''.join(chr(i) for i in range(32) if i != 9), '?' * 31) # type: Final def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: self.last_xml = None try: path = os.path.relpath(tree.path) except ValueError: return if should_skip_path(path): return visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname, modules=modules, typemap=type_map, all_nodes=True) tree.accept(visitor) root = etree.Element('mypy-report-file', name=path, module=tree._fullname) doc = etree.ElementTree(root) file_info = FileInfo(path, tree._fullname) for lineno, line_text in iterate_python_lines(path): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) file_info.counts[status] += 1 etree.SubElement(root, 'line', any_info=self._get_any_info_for_line(visitor, lineno), content=line_text.rstrip('\n').translate(self.control_fixer), number=str(lineno), precision=stats.precision_names[status]) # Assumes a layout similar to what XmlReporter uses. xslt_path = os.path.relpath('mypy-html.xslt', path) transform_pi = etree.ProcessingInstruction('xml-stylesheet', 'type="text/xsl" href="%s"' % pathname2url(xslt_path)) root.addprevious(transform_pi) self.schema.assertValid(doc) self.last_xml = doc self.files.append(file_info) @staticmethod def _get_any_info_for_line(visitor: stats.StatisticsVisitor, lineno: int) -> str: if lineno in visitor.any_line_map: result = "Any Types on this line: " counter = collections.Counter() # type: typing.Counter[int] for typ in visitor.any_line_map[lineno]: counter[typ.type_of_any] += 1 for any_type, occurrences in counter.items(): result += "\n{} (x{})".format(type_of_any_name_map[any_type], occurrences) return result else: return "No Anys on this line!" def on_finish(self) -> None: self.last_xml = None # index_path = os.path.join(self.output_dir, 'index.xml') output_files = sorted(self.files, key=lambda x: x.module) root = etree.Element('mypy-report-index', name='index') doc = etree.ElementTree(root) for file_info in output_files: etree.SubElement(root, 'file', file_info.attrib(), module=file_info.module, name=file_info.name, total=str(file_info.total())) xslt_path = os.path.relpath('mypy-html.xslt', '.') transform_pi = etree.ProcessingInstruction('xml-stylesheet', 'type="text/xsl" href="%s"' % pathname2url(xslt_path)) root.addprevious(transform_pi) self.schema.assertValid(doc) self.last_xml = doc register_reporter('memory-xml', MemoryXmlReporter, needs_lxml=True) def get_line_rate(covered_lines: int, total_lines: int) -> str: if total_lines == 0: return str(1.0) else: return '{:.4f}'.format(covered_lines / total_lines) class CoberturaPackage(object): """Container for XML and statistics mapping python modules to Cobertura package.""" def __init__(self, name: str) -> None: self.name = name self.classes = {} # type: Dict[str, Any] self.packages = {} # type: Dict[str, CoberturaPackage] self.total_lines = 0 self.covered_lines = 0 def as_xml(self) -> Any: package_element = etree.Element('package', complexity='1.0', name=self.name) package_element.attrib['branch-rate'] = '0' package_element.attrib['line-rate'] = get_line_rate(self.covered_lines, self.total_lines) classes_element = etree.SubElement(package_element, 'classes') for class_name in sorted(self.classes): classes_element.append(self.classes[class_name]) self.add_packages(package_element) return package_element def add_packages(self, parent_element: Any) -> None: if self.packages: packages_element = etree.SubElement(parent_element, 'packages') for package in sorted(self.packages.values(), key=attrgetter('name')): packages_element.append(package.as_xml()) class CoberturaXmlReporter(AbstractReporter): """Reporter for generating Cobertura compliant XML.""" def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.root = etree.Element('coverage', timestamp=str(int(time.time())), version=__version__) self.doc = etree.ElementTree(self.root) self.root_package = CoberturaPackage('.') def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: path = os.path.relpath(tree.path) visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname, modules=modules, typemap=type_map, all_nodes=True) tree.accept(visitor) class_name = os.path.basename(path) file_info = FileInfo(path, tree._fullname) class_element = etree.Element('class', complexity='1.0', filename=path, name=class_name) etree.SubElement(class_element, 'methods') lines_element = etree.SubElement(class_element, 'lines') with tokenize.open(path) as input_file: class_lines_covered = 0 class_total_lines = 0 for lineno, _ in enumerate(input_file, 1): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) hits = 0 branch = False if status == stats.TYPE_EMPTY: continue class_total_lines += 1 if status != stats.TYPE_ANY: class_lines_covered += 1 hits = 1 if status == stats.TYPE_IMPRECISE: branch = True file_info.counts[status] += 1 line_element = etree.SubElement(lines_element, 'line', branch=str(branch).lower(), hits=str(hits), number=str(lineno), precision=stats.precision_names[status]) if branch: line_element.attrib['condition-coverage'] = '50% (1/2)' class_element.attrib['branch-rate'] = '0' class_element.attrib['line-rate'] = get_line_rate(class_lines_covered, class_total_lines) # parent_module is set to whichever module contains this file. For most files, we want # to simply strip the last element off of the module. But for __init__.py files, # the module == the parent module. parent_module = file_info.module.rsplit('.', 1)[0] if file_info.name.endswith('__init__.py'): parent_module = file_info.module if parent_module not in self.root_package.packages: self.root_package.packages[parent_module] = CoberturaPackage(parent_module) current_package = self.root_package.packages[parent_module] packages_to_update = [self.root_package, current_package] for package in packages_to_update: package.total_lines += class_total_lines package.covered_lines += class_lines_covered current_package.classes[class_name] = class_element def on_finish(self) -> None: self.root.attrib['line-rate'] = get_line_rate(self.root_package.covered_lines, self.root_package.total_lines) self.root.attrib['branch-rate'] = '0' sources = etree.SubElement(self.root, 'sources') source_element = etree.SubElement(sources, 'source') source_element.text = os.getcwd() self.root_package.add_packages(self.root) out_path = os.path.join(self.output_dir, 'cobertura.xml') self.doc.write(out_path, encoding='utf-8', pretty_print=True) print('Generated Cobertura report:', os.path.abspath(out_path)) register_reporter('cobertura-xml', CoberturaXmlReporter, needs_lxml=True) class AbstractXmlReporter(AbstractReporter): """Internal abstract class for reporters that work via XML.""" def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) memory_reporter = reports.add_report('memory-xml', '') # The dependency will be called first. self.memory_xml = cast(MemoryXmlReporter, memory_reporter) class XmlReporter(AbstractXmlReporter): """Public reporter that exports XML. The produced XML files contain a reference to the absolute path of the html transform, so they will be locally viewable in a browser. However, there is a bug in Chrome and all other WebKit-based browsers that makes it fail from file:// URLs but work on http:// URLs. """ def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: last_xml = self.memory_xml.last_xml if last_xml is None: return path = os.path.relpath(tree.path) if path.startswith('..'): return out_path = os.path.join(self.output_dir, 'xml', path + '.xml') stats.ensure_dir_exists(os.path.dirname(out_path)) last_xml.write(out_path, encoding='utf-8') def on_finish(self) -> None: last_xml = self.memory_xml.last_xml assert last_xml is not None out_path = os.path.join(self.output_dir, 'index.xml') out_xslt = os.path.join(self.output_dir, 'mypy-html.xslt') out_css = os.path.join(self.output_dir, 'mypy-html.css') last_xml.write(out_path, encoding='utf-8') shutil.copyfile(self.memory_xml.xslt_html_path, out_xslt) shutil.copyfile(self.memory_xml.css_html_path, out_css) print('Generated XML report:', os.path.abspath(out_path)) register_reporter('xml', XmlReporter, needs_lxml=True) class XsltHtmlReporter(AbstractXmlReporter): """Public reporter that exports HTML via XSLT. This is slightly different than running `xsltproc` on the .xml files, because it passes a parameter to rewrite the links. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.xslt_html = etree.XSLT(etree.parse(self.memory_xml.xslt_html_path)) self.param_html = etree.XSLT.strparam('html') def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: last_xml = self.memory_xml.last_xml if last_xml is None: return path = os.path.relpath(tree.path) if path.startswith('..'): return out_path = os.path.join(self.output_dir, 'html', path + '.html') stats.ensure_dir_exists(os.path.dirname(out_path)) transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html)) with open(out_path, 'wb') as out_file: out_file.write(transformed_html) def on_finish(self) -> None: last_xml = self.memory_xml.last_xml assert last_xml is not None out_path = os.path.join(self.output_dir, 'index.html') out_css = os.path.join(self.output_dir, 'mypy-html.css') transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html)) with open(out_path, 'wb') as out_file: out_file.write(transformed_html) shutil.copyfile(self.memory_xml.css_html_path, out_css) print('Generated HTML report (via XSLT):', os.path.abspath(out_path)) register_reporter('xslt-html', XsltHtmlReporter, needs_lxml=True) class XsltTxtReporter(AbstractXmlReporter): """Public reporter that exports TXT via XSLT. Currently this only does the summary, not the individual reports. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.xslt_txt = etree.XSLT(etree.parse(self.memory_xml.xslt_txt_path)) def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: pass def on_finish(self) -> None: last_xml = self.memory_xml.last_xml assert last_xml is not None out_path = os.path.join(self.output_dir, 'index.txt') transformed_txt = bytes(self.xslt_txt(last_xml)) with open(out_path, 'wb') as out_file: out_file.write(transformed_txt) print('Generated TXT report (via XSLT):', os.path.abspath(out_path)) register_reporter('xslt-txt', XsltTxtReporter, needs_lxml=True) alias_reporter('xslt-html', 'html') alias_reporter('xslt-txt', 'txt') class LinePrecisionReporter(AbstractReporter): """Report per-module line counts for typing precision. Each line is classified into one of these categories: * precise (fully type checked) * imprecise (Any types in a type component, such as List[Any]) * any (something with an Any type, implicit or explicit) * empty (empty line, comment or docstring) * unanalyzed (mypy considers line unreachable) The meaning of these categories varies slightly depending on context. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.files = [] # type: List[FileInfo] def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: try: path = os.path.relpath(tree.path) except ValueError: return if should_skip_path(path): return visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname, modules=modules, typemap=type_map, all_nodes=True) tree.accept(visitor) file_info = FileInfo(path, tree._fullname) for lineno, _ in iterate_python_lines(path): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) file_info.counts[status] += 1 self.files.append(file_info) def on_finish(self) -> None: if not self.files: # Nothing to do. return output_files = sorted(self.files, key=lambda x: x.module) report_file = os.path.join(self.output_dir, 'lineprecision.txt') width = max(4, max(len(info.module) for info in output_files)) titles = ('Lines', 'Precise', 'Imprecise', 'Any', 'Empty', 'Unanalyzed') widths = (width,) + tuple(len(t) for t in titles) fmt = '{:%d} {:%d} {:%d} {:%d} {:%d} {:%d} {:%d}\n' % widths with open(report_file, 'w') as f: f.write( fmt.format('Name', *titles)) f.write('-' * (width + 51) + '\n') for file_info in output_files: counts = file_info.counts f.write(fmt.format(file_info.module.ljust(width), file_info.total(), counts[stats.TYPE_PRECISE], counts[stats.TYPE_IMPRECISE], counts[stats.TYPE_ANY], counts[stats.TYPE_EMPTY], counts[stats.TYPE_UNANALYZED])) register_reporter('lineprecision', LinePrecisionReporter) # Reporter class names are defined twice to speed up mypy startup, as this # module is slow to import. Ensure that the two definitions match. assert set(reporter_classes) == set(REPORTER_NAMES) mypy-0.761/mypy/sametypes.py0000644€tŠÔÚ€2›s®0000001472013576752246022306 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Sequence from mypy.types import ( Type, UnboundType, AnyType, NoneType, TupleType, TypedDictType, UnionType, CallableType, TypeVarType, Instance, TypeVisitor, ErasedType, Overloaded, PartialType, DeletedType, UninhabitedType, TypeType, LiteralType, ProperType, get_proper_type, TypeAliasType) from mypy.typeops import tuple_fallback, make_simplified_union def is_same_type(left: Type, right: Type) -> bool: """Is 'left' the same type as 'right'?""" left = get_proper_type(left) right = get_proper_type(right) if isinstance(right, UnboundType): # Make unbound types same as anything else to reduce the number of # generated spurious error messages. return True else: # Simplify types to canonical forms. # # There are multiple possible union types that represent the same type, # such as Union[int, bool, str] and Union[int, str]. Also, some union # types can be simplified to non-union types such as Union[int, bool] # -> int. It would be nice if we always had simplified union types but # this is currently not the case, though it often is. left = simplify_union(left) right = simplify_union(right) return left.accept(SameTypeVisitor(right)) def simplify_union(t: Type) -> ProperType: t = get_proper_type(t) if isinstance(t, UnionType): return make_simplified_union(t.items) return t def is_same_types(a1: Sequence[Type], a2: Sequence[Type]) -> bool: if len(a1) != len(a2): return False for i in range(len(a1)): if not is_same_type(a1[i], a2[i]): return False return True class SameTypeVisitor(TypeVisitor[bool]): """Visitor for checking whether two types are the 'same' type.""" def __init__(self, right: ProperType) -> None: self.right = right # visit_x(left) means: is left (which is an instance of X) the same type as # right? def visit_unbound_type(self, left: UnboundType) -> bool: return True def visit_any(self, left: AnyType) -> bool: return isinstance(self.right, AnyType) def visit_none_type(self, left: NoneType) -> bool: return isinstance(self.right, NoneType) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return isinstance(self.right, UninhabitedType) def visit_erased_type(self, left: ErasedType) -> bool: # We can get here when isinstance is used inside a lambda # whose type is being inferred. In any event, we have no reason # to think that an ErasedType will end up being the same as # any other type, except another ErasedType (for protocols). return isinstance(self.right, ErasedType) def visit_deleted_type(self, left: DeletedType) -> bool: return isinstance(self.right, DeletedType) def visit_instance(self, left: Instance) -> bool: return (isinstance(self.right, Instance) and left.type == self.right.type and is_same_types(left.args, self.right.args) and left.last_known_value == self.right.last_known_value) def visit_type_alias_type(self, left: TypeAliasType) -> bool: # Similar to protocols, two aliases with the same targets return False here, # but both is_subtype(t, s) and is_subtype(s, t) return True. return (isinstance(self.right, TypeAliasType) and left.alias == self.right.alias and is_same_types(left.args, self.right.args)) def visit_type_var(self, left: TypeVarType) -> bool: return (isinstance(self.right, TypeVarType) and left.id == self.right.id) def visit_callable_type(self, left: CallableType) -> bool: # FIX generics if isinstance(self.right, CallableType): cright = self.right return (is_same_type(left.ret_type, cright.ret_type) and is_same_types(left.arg_types, cright.arg_types) and left.arg_names == cright.arg_names and left.arg_kinds == cright.arg_kinds and left.is_type_obj() == cright.is_type_obj() and left.is_ellipsis_args == cright.is_ellipsis_args) else: return False def visit_tuple_type(self, left: TupleType) -> bool: if isinstance(self.right, TupleType): return (is_same_type(tuple_fallback(left), tuple_fallback(self.right)) and is_same_types(left.items, self.right.items)) else: return False def visit_typeddict_type(self, left: TypedDictType) -> bool: if isinstance(self.right, TypedDictType): if left.items.keys() != self.right.items.keys(): return False for (_, left_item_type, right_item_type) in left.zip(self.right): if not is_same_type(left_item_type, right_item_type): return False return True else: return False def visit_literal_type(self, left: LiteralType) -> bool: if isinstance(self.right, LiteralType): if left.value != self.right.value: return False return is_same_type(left.fallback, self.right.fallback) else: return False def visit_union_type(self, left: UnionType) -> bool: if isinstance(self.right, UnionType): # Check that everything in left is in right for left_item in left.items: if not any(is_same_type(left_item, right_item) for right_item in self.right.items): return False # Check that everything in right is in left for right_item in self.right.items: if not any(is_same_type(right_item, left_item) for left_item in left.items): return False return True else: return False def visit_overloaded(self, left: Overloaded) -> bool: if isinstance(self.right, Overloaded): return is_same_types(left.items(), self.right.items()) else: return False def visit_partial_type(self, left: PartialType) -> bool: # A partial type is not fully defined, so the result is indeterminate. We shouldn't # get here. raise RuntimeError def visit_type_type(self, left: TypeType) -> bool: if isinstance(self.right, TypeType): return is_same_type(left.item, self.right.item) else: return False mypy-0.761/mypy/scope.py0000644€tŠÔÚ€2›s®0000000774413576752246021415 0ustar jukkaDROPBOX\Domain Users00000000000000"""Track current scope to easily calculate the corresponding fine-grained target. TODO: Use everywhere where we track targets, including in mypy.errors. """ from contextlib import contextmanager from typing import List, Optional, Iterator, Tuple from mypy.nodes import TypeInfo, FuncBase SavedScope = Tuple[str, Optional[TypeInfo], Optional[FuncBase]] class Scope: """Track which target we are processing at any given time.""" def __init__(self) -> None: self.module = None # type: Optional[str] self.classes = [] # type: List[TypeInfo] self.function = None # type: Optional[FuncBase] # Number of nested scopes ignored (that don't get their own separate targets) self.ignored = 0 def current_module_id(self) -> str: assert self.module return self.module def current_target(self) -> str: """Return the current target (non-class; for a class return enclosing module).""" assert self.module if self.function: fullname = self.function.fullname return fullname or '' return self.module def current_full_target(self) -> str: """Return the current target (may be a class).""" assert self.module if self.function: return self.function.fullname if self.classes: return self.classes[-1].fullname return self.module def current_type_name(self) -> Optional[str]: """Return the current type's short name if it exists""" return self.classes[-1].name if self.classes else None def current_function_name(self) -> Optional[str]: """Return the current function's short name if it exists""" return self.function.name if self.function else None def enter_file(self, prefix: str) -> None: self.module = prefix self.classes = [] self.function = None self.ignored = 0 def enter_function(self, fdef: FuncBase) -> None: if not self.function: self.function = fdef else: # Nested functions are part of the topmost function target. self.ignored += 1 def enter_class(self, info: TypeInfo) -> None: """Enter a class target scope.""" if not self.function: self.classes.append(info) else: # Classes within functions are part of the enclosing function target. self.ignored += 1 def leave(self) -> None: """Leave the innermost scope (can be any kind of scope).""" if self.ignored: # Leave a scope that's included in the enclosing target. self.ignored -= 1 elif self.function: # Function is always the innermost target. self.function = None elif self.classes: # Leave the innermost class. self.classes.pop() else: # Leave module. assert self.module self.module = None def save(self) -> SavedScope: """Produce a saved scope that can be entered with saved_scope()""" assert self.module # We only save the innermost class, which is sufficient since # the rest are only needed for when classes are left. cls = self.classes[-1] if self.classes else None return (self.module, cls, self.function) @contextmanager def function_scope(self, fdef: FuncBase) -> Iterator[None]: self.enter_function(fdef) yield self.leave() @contextmanager def class_scope(self, info: TypeInfo) -> Iterator[None]: self.enter_class(info) yield self.leave() @contextmanager def saved_scope(self, saved: SavedScope) -> Iterator[None]: module, info, function = saved self.enter_file(module) if info: self.enter_class(info) if function: self.enter_function(function) yield if function: self.leave() if info: self.leave() self.leave() mypy-0.761/mypy/semanal.py0000644€tŠÔÚ€2›s®0000066732213576752246021730 0ustar jukkaDROPBOX\Domain Users00000000000000"""The semantic analyzer. Bind names to definitions and do various other simple consistency checks. Populate symbol tables. The semantic analyzer also detects special forms which reuse generic syntax such as NamedTuple and cast(). Multiple analysis iterations may be needed to analyze forward references and import cycles. Each iteration "fills in" additional bindings and references until everything has been bound. For example, consider this program: x = 1 y = x Here semantic analysis would detect that the assignment 'x = 1' defines a new variable, the type of which is to be inferred (in a later pass; type inference or type checking is not part of semantic analysis). Also, it would bind both references to 'x' to the same module-level variable (Var) node. The second assignment would also be analyzed, and the type of 'y' marked as being inferred. Semantic analysis of types is implemented in typeanal.py. See semanal_main.py for the top-level logic. Some important properties: * After semantic analysis is complete, no PlaceholderNode and PlaceholderType instances should remain. During semantic analysis, if we encounter one of these, the current target should be deferred. * A TypeInfo is only created once we know certain basic information about a type, such as the MRO, existence of a Tuple base class (e.g., for named tuples), and whether we have a TypedDict. We use a temporary PlaceholderNode node in the symbol table if some such information is missing. * For assignments, we only add a non-placeholder symbol table entry once we know the sort of thing being defined (variable, NamedTuple, type alias, etc.). * Every part of the analysis step must support multiple iterations over the same AST nodes, and each iteration must be able to fill in arbitrary things that were missing or incomplete in previous iterations. * Changes performed by the analysis need to be reversible, since mypy daemon strips and reuses existing ASTs (to improve performance and/or reduce memory use). """ from contextlib import contextmanager from typing import ( List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable, Iterator, Iterable ) from typing_extensions import Final from mypy.nodes import ( MypyFile, TypeInfo, Node, AssignmentStmt, FuncDef, OverloadedFuncDef, ClassDef, Var, GDEF, FuncItem, Import, Expression, Lvalue, ImportFrom, ImportAll, Block, LDEF, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, ExpressionStmt, ReturnStmt, RaiseStmt, AssertStmt, OperatorAssignmentStmt, WhileStmt, ForStmt, BreakStmt, ContinueStmt, IfStmt, TryStmt, WithStmt, DelStmt, GlobalDecl, SuperExpr, DictExpr, CallExpr, RefExpr, OpExpr, UnaryExpr, SliceExpr, CastExpr, RevealExpr, TypeApplication, Context, SymbolTable, SymbolTableNode, ListComprehension, GeneratorExpr, LambdaExpr, MDEF, Decorator, SetExpr, TypeVarExpr, StrExpr, BytesExpr, PrintStmt, ConditionalExpr, PromoteExpr, ComparisonExpr, StarExpr, ARG_POS, ARG_NAMED, type_aliases, YieldFromExpr, NamedTupleExpr, NonlocalDecl, SymbolNode, SetComprehension, DictionaryComprehension, TypeAlias, TypeAliasExpr, YieldExpr, ExecStmt, BackquoteExpr, ImportBase, AwaitExpr, IntExpr, FloatExpr, UnicodeExpr, TempNode, OverloadPart, PlaceholderNode, COVARIANT, CONTRAVARIANT, INVARIANT, nongen_builtins, get_member_expr_fullname, REVEAL_TYPE, REVEAL_LOCALS, is_final_node, TypedDictExpr, type_aliases_target_versions, EnumCallExpr, RUNTIME_PROTOCOL_DECOS, FakeExpression, Statement, AssignmentExpr, ) from mypy.tvar_scope import TypeVarScope from mypy.typevars import fill_typevars from mypy.visitor import NodeVisitor from mypy.errors import Errors, report_internal_error from mypy.messages import best_matches, MessageBuilder, pretty_or from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes from mypy.types import ( FunctionLike, UnboundType, TypeVarDef, TupleType, UnionType, StarType, CallableType, Overloaded, Instance, Type, AnyType, LiteralType, LiteralValue, TypeTranslator, TypeOfAny, TypeType, NoneType, PlaceholderType, TPDICT_NAMES, ProperType, get_proper_type, get_proper_types, TypeAliasType) from mypy.typeops import function_type from mypy.type_visitor import TypeQuery from mypy.nodes import implicit_module_attrs from mypy.typeanal import ( TypeAnalyser, analyze_type_alias, no_subscript_builtin_alias, TypeVariableQuery, TypeVarList, remove_dups, has_any_from_unimported_type, check_for_explicit_any, type_constructors, fix_instance_types ) from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.options import Options from mypy.plugin import ( Plugin, ClassDefContext, SemanticAnalyzerPluginInterface, DynamicClassDefContext ) from mypy.util import correct_relative_import, unmangle, module_prefix from mypy.scope import Scope from mypy.semanal_shared import ( SemanticAnalyzerInterface, set_callable_name, calculate_tuple_fallback, PRIORITY_FALLBACKS ) from mypy.semanal_namedtuple import NamedTupleAnalyzer from mypy.semanal_typeddict import TypedDictAnalyzer from mypy.semanal_enum import EnumCallAnalyzer from mypy.semanal_newtype import NewTypeAnalyzer from mypy.reachability import ( infer_reachability_of_if_statement, infer_condition_value, ALWAYS_FALSE, ALWAYS_TRUE, MYPY_TRUE, MYPY_FALSE ) from mypy.mro import calculate_mro, MroError T = TypeVar('T') # Map from the full name of a missing definition to the test fixture (under # test-data/unit/fixtures/) that provides the definition. This is used for # generating better error messages when running mypy tests only. SUGGESTED_TEST_FIXTURES = { 'builtins.list': 'list.pyi', 'builtins.dict': 'dict.pyi', 'builtins.set': 'set.pyi', 'builtins.bool': 'bool.pyi', 'builtins.Exception': 'exception.pyi', 'builtins.BaseException': 'exception.pyi', 'builtins.isinstance': 'isinstancelist.pyi', 'builtins.property': 'property.pyi', 'builtins.classmethod': 'classmethod.pyi', } # type: Final TYPES_FOR_UNIMPORTED_HINTS = { 'typing.Any', 'typing.Callable', 'typing.Dict', 'typing.Iterable', 'typing.Iterator', 'typing.List', 'typing.Optional', 'typing.Set', 'typing.Tuple', 'typing.TypeVar', 'typing.Union', 'typing.cast', } # type: Final # Special cased built-in classes that are needed for basic functionality and need to be # available very early on. CORE_BUILTIN_CLASSES = ['object', 'bool', 'function'] # type: Final # Used for tracking incomplete references Tag = int class SemanticAnalyzer(NodeVisitor[None], SemanticAnalyzerInterface, SemanticAnalyzerPluginInterface): """Semantically analyze parsed mypy files. The analyzer binds names and does various consistency checks for an AST. Note that type checking is performed as a separate pass. """ # Module name space modules = None # type: Dict[str, MypyFile] # Global name space for current module globals = None # type: SymbolTable # Names declared using "global" (separate set for each scope) global_decls = None # type: List[Set[str]] # Names declated using "nonlocal" (separate set for each scope) nonlocal_decls = None # type: List[Set[str]] # Local names of function scopes; None for non-function scopes. locals = None # type: List[Optional[SymbolTable]] # Whether each scope is a comprehension scope. is_comprehension_stack = None # type: List[bool] # Nested block depths of scopes block_depth = None # type: List[int] # TypeInfo of directly enclosing class (or None) type = None # type: Optional[TypeInfo] # Stack of outer classes (the second tuple item contains tvars). type_stack = None # type: List[Optional[TypeInfo]] # Type variables bound by the current scope, be it class or function tvar_scope = None # type: TypeVarScope # Per-module options options = None # type: Options # Stack of functions being analyzed function_stack = None # type: List[FuncItem] # Set to True if semantic analysis defines a name, or replaces a # placeholder definition. If some iteration makes no progress, # there can be at most one additional final iteration (see below). progress = False deferred = False # Set to true if another analysis pass is needed incomplete = False # Set to true if current module namespace is missing things # Is this the final iteration of semantic analysis (where we report # unbound names due to cyclic definitions and should not defer)? _final_iteration = False # These names couldn't be added to the symbol table due to incomplete deps. # Note that missing names are per module, _not_ per namespace. This means that e.g. # a missing name at global scope will block adding same name at a class scope. # This should not affect correctness and is purely a performance issue, # since it can cause unnecessary deferrals. These are represented as # PlaceholderNodes in the symbol table. We use this to ensure that the first # definition takes precedence even if it's incomplete. # # Note that a star import adds a special name '*' to the set, this blocks # adding _any_ names in the current file. missing_names = None # type: Set[str] # Callbacks that will be called after semantic analysis to tweak things. patches = None # type: List[Tuple[int, Callable[[], None]]] loop_depth = 0 # Depth of breakable loops cur_mod_id = '' # Current module id (or None) (phase 2) is_stub_file = False # Are we analyzing a stub file? _is_typeshed_stub_file = False # Are we analyzing a typeshed stub file? imports = None # type: Set[str] # Imported modules (during phase 2 analysis) # Note: some imports (and therefore dependencies) might # not be found in phase 1, for example due to * imports. errors = None # type: Errors # Keeps track of generated errors plugin = None # type: Plugin # Mypy plugin for special casing of library features statement = None # type: Optional[Statement] # Statement/definition being analyzed def __init__(self, modules: Dict[str, MypyFile], missing_modules: Set[str], incomplete_namespaces: Set[str], errors: Errors, plugin: Plugin) -> None: """Construct semantic analyzer. We reuse the same semantic analyzer instance across multiple modules. Args: modules: Global modules dictionary missing_modules: Modules that could not be imported encountered so far incomplete_namespaces: Namespaces that are being populated during semantic analysis (can contain modules and classes within the current SCC; mutated by the caller) errors: Report analysis errors using this instance """ self.locals = [None] self.is_comprehension_stack = [False] # Saved namespaces from previous iteration. Every top-level function/method body is # analyzed in several iterations until all names are resolved. We need to save # the local namespaces for the top level function and all nested functions between # these iterations. See also semanal_main.process_top_level_function(). self.saved_locals = {} \ # type: Dict[Union[FuncItem, GeneratorExpr, DictionaryComprehension], SymbolTable] self.imports = set() self.type = None self.type_stack = [] self.tvar_scope = TypeVarScope() self.function_stack = [] self.block_depth = [0] self.loop_depth = 0 self.errors = errors self.modules = modules self.msg = MessageBuilder(errors, modules) self.missing_modules = missing_modules # These namespaces are still in process of being populated. If we encounter a # missing name in these namespaces, we need to defer the current analysis target, # since it's possible that the name will be there once the namespace is complete. self.incomplete_namespaces = incomplete_namespaces self.all_exports = [] # type: List[str] # Map from module id to list of explicitly exported names (i.e. names in __all__). self.export_map = {} # type: Dict[str, List[str]] self.plugin = plugin # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True self.scope = Scope() # Trace line numbers for every file where deferral happened during analysis of # current SCC or top-level function. self.deferral_debug_context = [] # type: List[Tuple[str, int]] # mypyc doesn't properly handle implementing an abstractproperty # with a regular attribute so we make them properties @property def is_typeshed_stub_file(self) -> bool: return self._is_typeshed_stub_file @property def final_iteration(self) -> bool: return self._final_iteration # # Preparing module (performed before semantic analysis) # def prepare_file(self, file_node: MypyFile) -> None: """Prepare a freshly parsed file for semantic analysis.""" if 'builtins' in self.modules: file_node.names['__builtins__'] = SymbolTableNode(GDEF, self.modules['builtins']) if file_node.fullname == 'builtins': self.prepare_builtins_namespace(file_node) if file_node.fullname == 'typing': self.prepare_typing_namespace(file_node) def prepare_typing_namespace(self, file_node: MypyFile) -> None: """Remove dummy alias definitions such as List = TypeAlias(object) from typing. They will be replaced with real aliases when corresponding targets are ready. """ for stmt in file_node.defs.copy(): if (isinstance(stmt, AssignmentStmt) and len(stmt.lvalues) == 1 and isinstance(stmt.lvalues[0], NameExpr)): # Assignment to a simple name, remove it if it is a dummy alias. if 'typing.' + stmt.lvalues[0].name in type_aliases: file_node.defs.remove(stmt) def prepare_builtins_namespace(self, file_node: MypyFile) -> None: """Add certain special-cased definitions to the builtins module. Some definitions are too special or fundamental to be processed normally from the AST. """ names = file_node.names # Add empty definition for core built-in classes, since they are required for basic # operation. These will be completed later on. for name in CORE_BUILTIN_CLASSES: cdef = ClassDef(name, Block([])) # Dummy ClassDef, will be replaced later info = TypeInfo(SymbolTable(), cdef, 'builtins') info._fullname = 'builtins.%s' % name names[name] = SymbolTableNode(GDEF, info) bool_info = names['bool'].node assert isinstance(bool_info, TypeInfo) bool_type = Instance(bool_info, []) special_var_types = [ ('None', NoneType()), # reveal_type is a mypy-only function that gives an error with # the type of its arg. ('reveal_type', AnyType(TypeOfAny.special_form)), # reveal_locals is a mypy-only function that gives an error with the types of # locals ('reveal_locals', AnyType(TypeOfAny.special_form)), ('True', bool_type), ('False', bool_type), ('__debug__', bool_type), ] # type: List[Tuple[str, Type]] for name, typ in special_var_types: v = Var(name, typ) v._fullname = 'builtins.%s' % name file_node.names[name] = SymbolTableNode(GDEF, v) # # Analyzing a target # def refresh_partial(self, node: Union[MypyFile, FuncDef, OverloadedFuncDef], patches: List[Tuple[int, Callable[[], None]]], final_iteration: bool, file_node: MypyFile, options: Options, active_type: Optional[TypeInfo] = None) -> None: """Refresh a stale target in fine-grained incremental mode.""" self.patches = patches self.deferred = False self.incomplete = False self._final_iteration = final_iteration self.missing_names = set() with self.file_context(file_node, options, active_type): if isinstance(node, MypyFile): self.refresh_top_level(node) else: self.recurse_into_functions = True self.accept(node) del self.patches def refresh_top_level(self, file_node: MypyFile) -> None: """Reanalyze a stale module top-level in fine-grained incremental mode.""" self.recurse_into_functions = False self.add_implicit_module_attrs(file_node) for d in file_node.defs: self.accept(d) if file_node.fullname == 'typing': self.add_builtin_aliases(file_node) self.adjust_public_exports() self.export_map[self.cur_mod_id] = self.all_exports self.all_exports = [] def add_implicit_module_attrs(self, file_node: MypyFile) -> None: """Manually add implicit definitions of module '__name__' etc.""" for name, t in implicit_module_attrs.items(): # unicode docstrings should be accepted in Python 2 if name == '__doc__': if self.options.python_version >= (3, 0): typ = UnboundType('__builtins__.str') # type: Type else: typ = UnionType([UnboundType('__builtins__.str'), UnboundType('__builtins__.unicode')]) else: assert t is not None, 'type should be specified for {}'.format(name) typ = UnboundType(t) existing = file_node.names.get(name) if existing is not None and not isinstance(existing.node, PlaceholderNode): # Already exists. continue an_type = self.anal_type(typ) if an_type: var = Var(name, an_type) var._fullname = self.qualified_name(name) var.is_ready = True self.add_symbol(name, var, dummy_context()) else: self.add_symbol(name, PlaceholderNode(self.qualified_name(name), file_node, -1), dummy_context()) def add_builtin_aliases(self, tree: MypyFile) -> None: """Add builtin type aliases to typing module. For historical reasons, the aliases like `List = list` are not defined in typeshed stubs for typing module. Instead we need to manually add the corresponding nodes on the fly. We explicitly mark these aliases as normalized, so that a user can write `typing.List[int]`. """ assert tree.fullname == 'typing' for alias, target_name in type_aliases.items(): if type_aliases_target_versions[alias] > self.options.python_version: # This alias is not available on this Python version. continue name = alias.split('.')[-1] if name in tree.names and not isinstance(tree.names[name].node, PlaceholderNode): continue tag = self.track_incomplete_refs() n = self.lookup_fully_qualified_or_none(target_name) if n: if isinstance(n.node, PlaceholderNode): self.mark_incomplete(name, tree) else: # Found built-in class target. Create alias. target = self.named_type_or_none(target_name, []) assert target is not None # Transform List to List[Any], etc. fix_instance_types(target, self.fail, self.note) alias_node = TypeAlias(target, alias, line=-1, column=-1, # there is no context no_args=True, normalized=True) self.add_symbol(name, alias_node, tree) elif self.found_incomplete_ref(tag): # Built-in class target may not ready yet -- defer. self.mark_incomplete(name, tree) else: # Test fixtures may be missing some builtin classes, which is okay. # Kill the placeholder if there is one. if name in tree.names: assert isinstance(tree.names[name].node, PlaceholderNode) del tree.names[name] def adjust_public_exports(self) -> None: """Adjust the module visibility of globals due to __all__.""" if '__all__' in self.globals: for name, g in self.globals.items(): # Being included in __all__ explicitly exports and makes public. if name in self.all_exports: g.module_public = True g.module_hidden = False # But when __all__ is defined, and a symbol is not included in it, # it cannot be public. else: g.module_public = False @contextmanager def file_context(self, file_node: MypyFile, options: Options, active_type: Optional[TypeInfo] = None) -> Iterator[None]: """Configure analyzer for analyzing targets within a file/class. Args: file_node: target file options: options specific to the file active_type: must be the surrounding class to analyze method targets """ scope = self.scope self.options = options self.errors.set_file(file_node.path, file_node.fullname, scope=scope) self.cur_mod_node = file_node self.cur_mod_id = file_node.fullname scope.enter_file(self.cur_mod_id) self.is_stub_file = file_node.path.lower().endswith('.pyi') self._is_typeshed_stub_file = self.errors.is_typeshed_file(file_node.path) self.globals = file_node.names self.tvar_scope = TypeVarScope() self.named_tuple_analyzer = NamedTupleAnalyzer(options, self) self.typed_dict_analyzer = TypedDictAnalyzer(options, self, self.msg) self.enum_call_analyzer = EnumCallAnalyzer(options, self) self.newtype_analyzer = NewTypeAnalyzer(options, self, self.msg) # Counter that keeps track of references to undefined things potentially caused by # incomplete namespaces. self.num_incomplete_refs = 0 if active_type: scope.enter_class(active_type) self.enter_class(active_type.defn.info) for tvar in active_type.defn.type_vars: self.tvar_scope.bind_existing(tvar) yield if active_type: scope.leave() self.leave_class() self.type = None scope.leave() del self.options # # Functions # def visit_func_def(self, defn: FuncDef) -> None: self.statement = defn # Visit default values because they may contain assignment expressions. for arg in defn.arguments: if arg.initializer: arg.initializer.accept(self) defn.is_conditional = self.block_depth[-1] > 0 # Set full names even for those definitions that aren't added # to a symbol table. For example, for overload items. defn._fullname = self.qualified_name(defn.name) # We don't add module top-level functions to symbol tables # when we analyze their bodies in the second phase on analysis, # since they were added in the first phase. Nested functions # get always added, since they aren't separate targets. if not self.recurse_into_functions or len(self.function_stack) > 0: if not defn.is_decorated and not defn.is_overload: self.add_function_to_symbol_table(defn) if not self.recurse_into_functions: return with self.scope.function_scope(defn): self.analyze_func_def(defn) def analyze_func_def(self, defn: FuncDef) -> None: self.function_stack.append(defn) if defn.type: assert isinstance(defn.type, CallableType) self.update_function_type_variables(defn.type, defn) self.function_stack.pop() if self.is_class_scope(): # Method definition assert self.type is not None defn.info = self.type if defn.type is not None and defn.name in ('__init__', '__init_subclass__'): assert isinstance(defn.type, CallableType) if isinstance(get_proper_type(defn.type.ret_type), AnyType): defn.type = defn.type.copy_modified(ret_type=NoneType()) self.prepare_method_signature(defn, self.type) # Analyze function signature with self.tvar_scope_frame(self.tvar_scope.method_frame()): if defn.type: self.check_classvar_in_signature(defn.type) assert isinstance(defn.type, CallableType) # Signature must be analyzed in the surrounding scope so that # class-level imported names and type variables are in scope. analyzer = self.type_analyzer() tag = self.track_incomplete_refs() result = analyzer.visit_callable_type(defn.type, nested=False) # Don't store not ready types (including placeholders). if self.found_incomplete_ref(tag) or has_placeholder(result): self.defer(defn) return assert isinstance(result, ProperType) defn.type = result self.add_type_alias_deps(analyzer.aliases_used) self.check_function_signature(defn) if isinstance(defn, FuncDef): assert isinstance(defn.type, CallableType) defn.type = set_callable_name(defn.type, defn) self.analyze_arg_initializers(defn) self.analyze_function_body(defn) if defn.is_coroutine and isinstance(defn.type, CallableType) and not self.deferred: if defn.is_async_generator: # Async generator types are handled elsewhere pass else: # A coroutine defined as `async def foo(...) -> T: ...` # has external return type `Coroutine[Any, Any, T]`. any_type = AnyType(TypeOfAny.special_form) ret_type = self.named_type_or_none('typing.Coroutine', [any_type, any_type, defn.type.ret_type]) assert ret_type is not None, "Internal error: typing.Coroutine not found" defn.type = defn.type.copy_modified(ret_type=ret_type) def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: """Check basic signature validity and tweak annotation of self/cls argument.""" # Only non-static methods are special. functype = func.type if not func.is_static: if func.name in ['__init_subclass__', '__class_getitem__']: func.is_class = True if not func.arguments: self.fail('Method must have at least one argument', func) elif isinstance(functype, CallableType): self_type = get_proper_type(functype.arg_types[0]) if isinstance(self_type, AnyType): leading_type = fill_typevars(info) # type: Type if func.is_class or func.name == '__new__': leading_type = self.class_type(leading_type) func.type = replace_implicit_first_type(functype, leading_type) def set_original_def(self, previous: Optional[Node], new: Union[FuncDef, Decorator]) -> bool: """If 'new' conditionally redefine 'previous', set 'previous' as original We reject straight redefinitions of functions, as they are usually a programming error. For example: def f(): ... def f(): ... # Error: 'f' redefined """ if isinstance(new, Decorator): new = new.func if isinstance(previous, (FuncDef, Var, Decorator)) and new.is_conditional: new.original_def = previous return True else: return False def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> None: """Make any type variables in the signature of defn explicit. Update the signature of defn to contain type variable definitions if defn is generic. """ with self.tvar_scope_frame(self.tvar_scope.method_frame()): a = self.type_analyzer() fun_type.variables = a.bind_function_type_variables(fun_type, defn) def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: self.statement = defn self.add_function_to_symbol_table(defn) if not self.recurse_into_functions: return # NB: Since _visit_overloaded_func_def will call accept on the # underlying FuncDefs, the function might get entered twice. # This is fine, though, because only the outermost function is # used to compute targets. with self.scope.function_scope(defn): self.analyze_overloaded_func_def(defn) def analyze_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: # OverloadedFuncDef refers to any legitimate situation where you have # more than one declaration for the same function in a row. This occurs # with a @property with a setter or a deleter, and for a classic # @overload. defn._fullname = self.qualified_name(defn.name) # TODO: avoid modifying items. defn.items = defn.unanalyzed_items.copy() first_item = defn.items[0] first_item.is_overload = True first_item.accept(self) if isinstance(first_item, Decorator) and first_item.func.is_property: # This is a property. first_item.func.is_overload = True self.analyze_property_with_multi_part_definition(defn) typ = function_type(first_item.func, self.builtin_type('builtins.function')) assert isinstance(typ, CallableType) types = [typ] else: # This is an a normal overload. Find the item signatures, the # implementation (if outside a stub), and any missing @overload # decorators. types, impl, non_overload_indexes = self.analyze_overload_sigs_and_impl(defn) defn.impl = impl if non_overload_indexes: self.handle_missing_overload_decorators(defn, non_overload_indexes, some_overload_decorators=len(types) > 0) # If we found an implementation, remove it from the overload item list, # as it's special. if impl is not None: assert impl is defn.items[-1] defn.items = defn.items[:-1] elif not non_overload_indexes: self.handle_missing_overload_implementation(defn) if types: defn.type = Overloaded(types) defn.type.line = defn.line if not defn.items: # It was not a real overload after all, but function redefinition. We've # visited the redefinition(s) already. if not defn.impl: # For really broken overloads with no items and no implementation we need to keep # at least one item to hold basic information like function name. defn.impl = defn.unanalyzed_items[-1] return # We know this is an overload def. Infer properties and perform some checks. self.process_final_in_overload(defn) self.process_static_or_class_method_in_overload(defn) def analyze_overload_sigs_and_impl( self, defn: OverloadedFuncDef) -> Tuple[List[CallableType], Optional[OverloadPart], List[int]]: """Find overload signatures, the implementation, and items with missing @overload. Assume that the first was already analyzed. As a side effect: analyzes remaining items and updates 'is_overload' flags. """ types = [] non_overload_indexes = [] impl = None # type: Optional[OverloadPart] for i, item in enumerate(defn.items): if i != 0: # Assume that the first item was already visited item.is_overload = True item.accept(self) # TODO: support decorated overloaded functions properly if isinstance(item, Decorator): callable = function_type(item.func, self.builtin_type('builtins.function')) assert isinstance(callable, CallableType) if not any(refers_to_fullname(dec, 'typing.overload') for dec in item.decorators): if i == len(defn.items) - 1 and not self.is_stub_file: # Last item outside a stub is impl impl = item else: # Oops it wasn't an overload after all. A clear error # will vary based on where in the list it is, record # that. non_overload_indexes.append(i) else: item.func.is_overload = True types.append(callable) elif isinstance(item, FuncDef): if i == len(defn.items) - 1 and not self.is_stub_file: impl = item else: non_overload_indexes.append(i) return types, impl, non_overload_indexes def handle_missing_overload_decorators(self, defn: OverloadedFuncDef, non_overload_indexes: List[int], some_overload_decorators: bool) -> None: """Generate errors for overload items without @overload. Side effect: remote non-overload items. """ if some_overload_decorators: # Some of them were overloads, but not all. for idx in non_overload_indexes: if self.is_stub_file: self.fail("An implementation for an overloaded function " "is not allowed in a stub file", defn.items[idx]) else: self.fail("The implementation for an overloaded function " "must come last", defn.items[idx]) else: for idx in non_overload_indexes[1:]: self.name_already_defined(defn.name, defn.items[idx], defn.items[0]) if defn.impl: self.name_already_defined(defn.name, defn.impl, defn.items[0]) # Remove the non-overloads for idx in reversed(non_overload_indexes): del defn.items[idx] def handle_missing_overload_implementation(self, defn: OverloadedFuncDef) -> None: """Generate error about missing overload implementation (only if needed).""" if not self.is_stub_file: if self.type and self.type.is_protocol and not self.is_func_scope(): # An overloded protocol method doesn't need an implementation. for item in defn.items: if isinstance(item, Decorator): item.func.is_abstract = True else: item.is_abstract = True else: self.fail( "An overloaded function outside a stub file must have an implementation", defn) def process_final_in_overload(self, defn: OverloadedFuncDef) -> None: """Detect the @final status of an overloaded function (and perform checks).""" # If the implementation is marked as @final (or the first overload in # stubs), then the whole overloaded definition if @final. if any(item.is_final for item in defn.items): # We anyway mark it as final because it was probably the intention. defn.is_final = True # Only show the error once per overload bad_final = next(ov for ov in defn.items if ov.is_final) if not self.is_stub_file: self.fail("@final should be applied only to overload implementation", bad_final) elif any(item.is_final for item in defn.items[1:]): bad_final = next(ov for ov in defn.items[1:] if ov.is_final) self.fail("In a stub file @final must be applied only to the first overload", bad_final) if defn.impl is not None and defn.impl.is_final: defn.is_final = True def process_static_or_class_method_in_overload(self, defn: OverloadedFuncDef) -> None: class_status = [] static_status = [] for item in defn.items: if isinstance(item, Decorator): inner = item.func elif isinstance(item, FuncDef): inner = item else: assert False, "The 'item' variable is an unexpected type: {}".format(type(item)) class_status.append(inner.is_class) static_status.append(inner.is_static) if defn.impl is not None: if isinstance(defn.impl, Decorator): inner = defn.impl.func elif isinstance(defn.impl, FuncDef): inner = defn.impl else: assert False, "Unexpected impl type: {}".format(type(defn.impl)) class_status.append(inner.is_class) static_status.append(inner.is_static) if len(set(class_status)) != 1: self.msg.overload_inconsistently_applies_decorator('classmethod', defn) elif len(set(static_status)) != 1: self.msg.overload_inconsistently_applies_decorator('staticmethod', defn) else: defn.is_class = class_status[0] defn.is_static = static_status[0] def analyze_property_with_multi_part_definition(self, defn: OverloadedFuncDef) -> None: """Analyze a property defined using multiple methods (e.g., using @x.setter). Assume that the first method (@property) has already been analyzed. """ defn.is_property = True items = defn.items first_item = cast(Decorator, defn.items[0]) deleted_items = [] for i, item in enumerate(items[1:]): if isinstance(item, Decorator): if len(item.decorators) == 1: node = item.decorators[0] if isinstance(node, MemberExpr): if node.name == 'setter': # The first item represents the entire property. first_item.var.is_settable_property = True # Get abstractness from the original definition. item.func.is_abstract = first_item.func.is_abstract else: self.fail("Decorated property not supported", item) item.func.accept(self) else: self.fail('Unexpected definition for property "{}"'.format(first_item.func.name), item) deleted_items.append(i + 1) for i in reversed(deleted_items): del items[i] def add_function_to_symbol_table(self, func: Union[FuncDef, OverloadedFuncDef]) -> None: if self.is_class_scope(): assert self.type is not None func.info = self.type func._fullname = self.qualified_name(func.name) self.add_symbol(func.name, func, func) def analyze_arg_initializers(self, defn: FuncItem) -> None: with self.tvar_scope_frame(self.tvar_scope.method_frame()): # Analyze default arguments for arg in defn.arguments: if arg.initializer: arg.initializer.accept(self) def analyze_function_body(self, defn: FuncItem) -> None: is_method = self.is_class_scope() with self.tvar_scope_frame(self.tvar_scope.method_frame()): # Bind the type variables again to visit the body. if defn.type: a = self.type_analyzer() a.bind_function_type_variables(cast(CallableType, defn.type), defn) self.function_stack.append(defn) self.enter(defn) for arg in defn.arguments: self.add_local(arg.variable, defn) # The first argument of a non-static, non-class method is like 'self' # (though the name could be different), having the enclosing class's # instance type. if is_method and not defn.is_static and not defn.is_class and defn.arguments: defn.arguments[0].variable.is_self = True defn.body.accept(self) self.leave() self.function_stack.pop() def check_classvar_in_signature(self, typ: ProperType) -> None: if isinstance(typ, Overloaded): for t in typ.items(): # type: ProperType self.check_classvar_in_signature(t) return if not isinstance(typ, CallableType): return for t in get_proper_types(typ.arg_types) + [get_proper_type(typ.ret_type)]: if self.is_classvar(t): self.fail_invalid_classvar(t) # Show only one error per signature break def check_function_signature(self, fdef: FuncItem) -> None: sig = fdef.type assert isinstance(sig, CallableType) if len(sig.arg_types) < len(fdef.arguments): self.fail('Type signature has too few arguments', fdef) # Add dummy Any arguments to prevent crashes later. num_extra_anys = len(fdef.arguments) - len(sig.arg_types) extra_anys = [AnyType(TypeOfAny.from_error)] * num_extra_anys sig.arg_types.extend(extra_anys) elif len(sig.arg_types) > len(fdef.arguments): self.fail('Type signature has too many arguments', fdef, blocker=True) def visit_decorator(self, dec: Decorator) -> None: self.statement = dec # TODO: better don't modify them at all. dec.decorators = dec.original_decorators.copy() dec.func.is_conditional = self.block_depth[-1] > 0 if not dec.is_overload: self.add_symbol(dec.name, dec, dec) dec.func._fullname = self.qualified_name(dec.name) for d in dec.decorators: d.accept(self) removed = [] # type: List[int] no_type_check = False for i, d in enumerate(dec.decorators): # A bunch of decorators are special cased here. if refers_to_fullname(d, 'abc.abstractmethod'): removed.append(i) dec.func.is_abstract = True self.check_decorated_function_is_method('abstractmethod', dec) elif (refers_to_fullname(d, 'asyncio.coroutines.coroutine') or refers_to_fullname(d, 'types.coroutine')): removed.append(i) dec.func.is_awaitable_coroutine = True elif refers_to_fullname(d, 'builtins.staticmethod'): removed.append(i) dec.func.is_static = True dec.var.is_staticmethod = True self.check_decorated_function_is_method('staticmethod', dec) elif refers_to_fullname(d, 'builtins.classmethod'): removed.append(i) dec.func.is_class = True dec.var.is_classmethod = True self.check_decorated_function_is_method('classmethod', dec) elif (refers_to_fullname(d, 'builtins.property') or refers_to_fullname(d, 'abc.abstractproperty')): removed.append(i) dec.func.is_property = True dec.var.is_property = True if refers_to_fullname(d, 'abc.abstractproperty'): dec.func.is_abstract = True self.check_decorated_function_is_method('property', dec) if len(dec.func.arguments) > 1: self.fail('Too many arguments', dec.func) elif refers_to_fullname(d, 'typing.no_type_check'): dec.var.type = AnyType(TypeOfAny.special_form) no_type_check = True elif (refers_to_fullname(d, 'typing.final') or refers_to_fullname(d, 'typing_extensions.final')): if self.is_class_scope(): assert self.type is not None, "No type set at class scope" if self.type.is_protocol: self.msg.protocol_members_cant_be_final(d) else: dec.func.is_final = True dec.var.is_final = True removed.append(i) else: self.fail("@final cannot be used with non-method functions", d) for i in reversed(removed): del dec.decorators[i] if (not dec.is_overload or dec.var.is_property) and self.type: dec.var.info = self.type dec.var.is_initialized_in_class = True if not no_type_check and self.recurse_into_functions: dec.func.accept(self) if dec.decorators and dec.var.is_property: self.fail('Decorated property not supported', dec) def check_decorated_function_is_method(self, decorator: str, context: Context) -> None: if not self.type or self.is_func_scope(): self.fail("'%s' used with a non-method" % decorator, context) # # Classes # def visit_class_def(self, defn: ClassDef) -> None: self.statement = defn with self.tvar_scope_frame(self.tvar_scope.class_frame()): self.analyze_class(defn) def analyze_class(self, defn: ClassDef) -> None: fullname = self.qualified_name(defn.name) if not defn.info and not self.is_core_builtin_class(defn): # Add placeholder so that self-references in base classes can be # resolved. We don't want this to cause a deferral, since if there # are no incomplete references, we'll replace this with a TypeInfo # before returning. placeholder = PlaceholderNode(fullname, defn, defn.line, becomes_typeinfo=True) self.add_symbol(defn.name, placeholder, defn, can_defer=False) tag = self.track_incomplete_refs() # Restore base classes after previous iteration (things like Generic[T] might be removed). defn.base_type_exprs.extend(defn.removed_base_type_exprs) defn.removed_base_type_exprs.clear() self.update_metaclass(defn) bases = defn.base_type_exprs bases, tvar_defs, is_protocol = self.clean_up_bases_and_infer_type_variables(defn, bases, context=defn) for tvd in tvar_defs: if any(has_placeholder(t) for t in [tvd.upper_bound] + tvd.values): # Some type variable bounds or values are not ready, we need # to re-analyze this class. self.defer() self.analyze_class_keywords(defn) result = self.analyze_base_classes(bases) if result is None or self.found_incomplete_ref(tag): # Something was incomplete. Defer current target. self.mark_incomplete(defn.name, defn) return base_types, base_error = result if any(isinstance(base, PlaceholderType) for base, _ in base_types): # We need to know the TypeInfo of each base to construct the MRO. Placeholder types # are okay in nested positions, since they can't affect the MRO. self.mark_incomplete(defn.name, defn) return is_typeddict, info = self.typed_dict_analyzer.analyze_typeddict_classdef(defn) if is_typeddict: for decorator in defn.decorators: decorator.accept(self) if isinstance(decorator, RefExpr): if decorator.fullname in ('typing.final', 'typing_extensions.final'): self.fail("@final cannot be used with TypedDict", decorator) if info is None: self.mark_incomplete(defn.name, defn) else: self.prepare_class_def(defn, info) return if self.analyze_namedtuple_classdef(defn): return # Create TypeInfo for class now that base classes and the MRO can be calculated. self.prepare_class_def(defn) defn.type_vars = tvar_defs defn.info.type_vars = [tvar.name for tvar in tvar_defs] if base_error: defn.info.fallback_to_any = True with self.scope.class_scope(defn.info): self.configure_base_classes(defn, base_types) defn.info.is_protocol = is_protocol self.analyze_metaclass(defn) defn.info.runtime_protocol = False for decorator in defn.decorators: self.analyze_class_decorator(defn, decorator) self.analyze_class_body_common(defn) def is_core_builtin_class(self, defn: ClassDef) -> bool: return self.cur_mod_id == 'builtins' and defn.name in CORE_BUILTIN_CLASSES def analyze_class_body_common(self, defn: ClassDef) -> None: """Parts of class body analysis that are common to all kinds of class defs.""" self.enter_class(defn.info) defn.defs.accept(self) self.apply_class_plugin_hooks(defn) self.leave_class() def analyze_namedtuple_classdef(self, defn: ClassDef) -> bool: """Check if this class can define a named tuple.""" if defn.info and defn.info.is_named_tuple: # Don't reprocess everything. We just need to process methods defined # in the named tuple class body. is_named_tuple, info = True, defn.info # type: bool, Optional[TypeInfo] else: is_named_tuple, info = self.named_tuple_analyzer.analyze_namedtuple_classdef( defn, self.is_stub_file) if is_named_tuple: if info is None: self.mark_incomplete(defn.name, defn) else: self.prepare_class_def(defn, info) with self.scope.class_scope(defn.info): with self.named_tuple_analyzer.save_namedtuple_body(info): self.analyze_class_body_common(defn) return True return False def apply_class_plugin_hooks(self, defn: ClassDef) -> None: """Apply a plugin hook that may infer a more precise definition for a class.""" def get_fullname(expr: Expression) -> Optional[str]: if isinstance(expr, CallExpr): return get_fullname(expr.callee) elif isinstance(expr, IndexExpr): return get_fullname(expr.base) elif isinstance(expr, RefExpr): if expr.fullname: return expr.fullname # If we don't have a fullname look it up. This happens because base classes are # analyzed in a different manner (see exprtotype.py) and therefore those AST # nodes will not have full names. sym = self.lookup_type_node(expr) if sym: return sym.fullname return None for decorator in defn.decorators: decorator_name = get_fullname(decorator) if decorator_name: hook = self.plugin.get_class_decorator_hook(decorator_name) if hook: hook(ClassDefContext(defn, decorator, self)) if defn.metaclass: metaclass_name = get_fullname(defn.metaclass) if metaclass_name: hook = self.plugin.get_metaclass_hook(metaclass_name) if hook: hook(ClassDefContext(defn, defn.metaclass, self)) for base_expr in defn.base_type_exprs: base_name = get_fullname(base_expr) if base_name: hook = self.plugin.get_base_class_hook(base_name) if hook: hook(ClassDefContext(defn, base_expr, self)) def analyze_class_keywords(self, defn: ClassDef) -> None: for value in defn.keywords.values(): value.accept(self) def enter_class(self, info: TypeInfo) -> None: # Remember previous active class self.type_stack.append(self.type) self.locals.append(None) # Add class scope self.is_comprehension_stack.append(False) self.block_depth.append(-1) # The class body increments this to 0 self.type = info def leave_class(self) -> None: """ Restore analyzer state. """ self.block_depth.pop() self.locals.pop() self.is_comprehension_stack.pop() self.type = self.type_stack.pop() def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None: decorator.accept(self) if isinstance(decorator, RefExpr): if decorator.fullname in RUNTIME_PROTOCOL_DECOS: if defn.info.is_protocol: defn.info.runtime_protocol = True else: self.fail('@runtime_checkable can only be used with protocol classes', defn) elif decorator.fullname in ('typing.final', 'typing_extensions.final'): defn.info.is_final = True def clean_up_bases_and_infer_type_variables( self, defn: ClassDef, base_type_exprs: List[Expression], context: Context) -> Tuple[List[Expression], List[TypeVarDef], bool]: """Remove extra base classes such as Generic and infer type vars. For example, consider this class: class Foo(Bar, Generic[T]): ... Now we will remove Generic[T] from bases of Foo and infer that the type variable 'T' is a type argument of Foo. Note that this is performed *before* semantic analysis. Returns (remaining base expressions, inferred type variables, is protocol). """ removed = [] # type: List[int] declared_tvars = [] # type: TypeVarList is_protocol = False for i, base_expr in enumerate(base_type_exprs): self.analyze_type_expr(base_expr) try: base = expr_to_unanalyzed_type(base_expr) except TypeTranslationError: # This error will be caught later. continue result = self.analyze_class_typevar_declaration(base) if result is not None: if declared_tvars: self.fail('Only single Generic[...] or Protocol[...] can be in bases', context) removed.append(i) tvars = result[0] is_protocol |= result[1] declared_tvars.extend(tvars) if isinstance(base, UnboundType): sym = self.lookup_qualified(base.name, base) if sym is not None and sym.node is not None: if (sym.node.fullname in ('typing.Protocol', 'typing_extensions.Protocol') and i not in removed): # also remove bare 'Protocol' bases removed.append(i) is_protocol = True all_tvars = self.get_all_bases_tvars(base_type_exprs, removed) if declared_tvars: if len(remove_dups(declared_tvars)) < len(declared_tvars): self.fail("Duplicate type variables in Generic[...] or Protocol[...]", context) declared_tvars = remove_dups(declared_tvars) if not set(all_tvars).issubset(set(declared_tvars)): self.fail("If Generic[...] or Protocol[...] is present" " it should list all type variables", context) # In case of error, Generic tvars will go first declared_tvars = remove_dups(declared_tvars + all_tvars) else: declared_tvars = all_tvars for i in reversed(removed): # We need to actually remove the base class expressions like Generic[T], # mostly because otherwise they will create spurious dependencies in fine # grained incremental mode. defn.removed_base_type_exprs.append(defn.base_type_exprs[i]) del base_type_exprs[i] tvar_defs = [] # type: List[TypeVarDef] for name, tvar_expr in declared_tvars: tvar_def = self.tvar_scope.bind_new(name, tvar_expr) tvar_defs.append(tvar_def) return base_type_exprs, tvar_defs, is_protocol def analyze_class_typevar_declaration(self, base: Type) -> Optional[Tuple[TypeVarList, bool]]: """Analyze type variables declared using Generic[...] or Protocol[...]. Args: base: Non-analyzed base class Return None if the base class does not declare type variables. Otherwise, return the type variables. """ if not isinstance(base, UnboundType): return None unbound = base sym = self.lookup_qualified(unbound.name, unbound) if sym is None or sym.node is None: return None if (sym.node.fullname == 'typing.Generic' or sym.node.fullname == 'typing.Protocol' and base.args or sym.node.fullname == 'typing_extensions.Protocol' and base.args): is_proto = sym.node.fullname != 'typing.Generic' tvars = [] # type: TypeVarList for arg in unbound.args: tag = self.track_incomplete_refs() tvar = self.analyze_unbound_tvar(arg) if tvar: tvars.append(tvar) elif not self.found_incomplete_ref(tag): self.fail('Free type variable expected in %s[...]' % sym.node.name, base) return tvars, is_proto return None def analyze_unbound_tvar(self, t: Type) -> Optional[Tuple[str, TypeVarExpr]]: if not isinstance(t, UnboundType): return None unbound = t sym = self.lookup_qualified(unbound.name, unbound) if sym and isinstance(sym.node, PlaceholderNode): self.record_incomplete_ref() if sym is None or not isinstance(sym.node, TypeVarExpr): return None elif sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): # It's bound by our type variable scope return None else: assert isinstance(sym.node, TypeVarExpr) return unbound.name, sym.node def get_all_bases_tvars(self, base_type_exprs: List[Expression], removed: List[int]) -> TypeVarList: """Return all type variable references in bases.""" tvars = [] # type: TypeVarList for i, base_expr in enumerate(base_type_exprs): if i not in removed: try: base = expr_to_unanalyzed_type(base_expr) except TypeTranslationError: # This error will be caught later. continue base_tvars = base.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope)) tvars.extend(base_tvars) return remove_dups(tvars) def prepare_class_def(self, defn: ClassDef, info: Optional[TypeInfo] = None) -> None: """Prepare for the analysis of a class definition. Create an empty TypeInfo and store it in a symbol table, or if the 'info' argument is provided, store it instead (used for magic type definitions). """ if not defn.info: defn.fullname = self.qualified_name(defn.name) # TODO: Nested classes info = info or self.make_empty_type_info(defn) defn.info = info info.defn = defn if not self.is_func_scope(): info._fullname = self.qualified_name(defn.name) else: info._fullname = info.name self.add_symbol(defn.name, defn.info, defn) if self.is_nested_within_func_scope(): # We need to preserve local classes, let's store them # in globals under mangled unique names # # TODO: Putting local classes into globals breaks assumptions in fine-grained # incremental mode and we should avoid it. In general, this logic is too # ad-hoc and needs to be removed/refactored. if '@' not in defn.info._fullname: local_name = defn.info._fullname + '@' + str(defn.line) if defn.info.is_named_tuple: # Module is already correctly set in _fullname for named tuples. defn.info._fullname += '@' + str(defn.line) else: defn.info._fullname = self.cur_mod_id + '.' + local_name else: # Preserve name from previous fine-grained incremental run. local_name = defn.info._fullname defn.fullname = defn.info._fullname self.globals[local_name] = SymbolTableNode(GDEF, defn.info) def make_empty_type_info(self, defn: ClassDef) -> TypeInfo: if (self.is_module_scope() and self.cur_mod_id == 'builtins' and defn.name in CORE_BUILTIN_CLASSES): # Special case core built-in classes. A TypeInfo was already # created for it before semantic analysis, but with a dummy # ClassDef. Patch the real ClassDef object. info = self.globals[defn.name].node assert isinstance(info, TypeInfo) else: info = TypeInfo(SymbolTable(), defn, self.cur_mod_id) info.set_line(defn) return info def get_name_repr_of_expr(self, expr: Expression) -> Optional[str]: """Try finding a short simplified textual representation of a base class expression.""" if isinstance(expr, NameExpr): return expr.name if isinstance(expr, MemberExpr): return get_member_expr_fullname(expr) if isinstance(expr, IndexExpr): return self.get_name_repr_of_expr(expr.base) if isinstance(expr, CallExpr): return self.get_name_repr_of_expr(expr.callee) return None def analyze_base_classes( self, base_type_exprs: List[Expression]) -> Optional[Tuple[List[Tuple[ProperType, Expression]], bool]]: """Analyze base class types. Return None if some definition was incomplete. Otherwise, return a tuple with these items: * List of (analyzed type, original expression) tuples * Boolean indicating whether one of the bases had a semantic analysis error """ is_error = False bases = [] for base_expr in base_type_exprs: if (isinstance(base_expr, RefExpr) and base_expr.fullname in ('typing.NamedTuple',) + TPDICT_NAMES): # Ignore magic bases for now. continue try: base = self.expr_to_analyzed_type(base_expr, allow_placeholder=True) except TypeTranslationError: name = self.get_name_repr_of_expr(base_expr) if isinstance(base_expr, CallExpr): msg = 'Unsupported dynamic base class' else: msg = 'Invalid base class' if name: msg += ' "{}"'.format(name) self.fail(msg, base_expr) is_error = True continue if base is None: return None base = get_proper_type(base) bases.append((base, base_expr)) return bases, is_error def configure_base_classes(self, defn: ClassDef, bases: List[Tuple[ProperType, Expression]]) -> None: """Set up base classes. This computes several attributes on the corresponding TypeInfo defn.info related to the base classes: defn.info.bases, defn.info.mro, and miscellaneous others (at least tuple_type, fallback_to_any, and is_enum.) """ base_types = [] # type: List[Instance] info = defn.info info.tuple_type = None for base, base_expr in bases: if isinstance(base, TupleType): actual_base = self.configure_tuple_base_class(defn, base, base_expr) base_types.append(actual_base) elif isinstance(base, Instance): if base.type.is_newtype: self.fail("Cannot subclass NewType", defn) base_types.append(base) elif isinstance(base, AnyType): if self.options.disallow_subclassing_any: if isinstance(base_expr, (NameExpr, MemberExpr)): msg = "Class cannot subclass '{}' (has type 'Any')".format(base_expr.name) else: msg = "Class cannot subclass value of type 'Any'" self.fail(msg, base_expr) info.fallback_to_any = True else: msg = 'Invalid base class' name = self.get_name_repr_of_expr(base_expr) if name: msg += ' "{}"'.format(name) self.fail(msg, base_expr) info.fallback_to_any = True if self.options.disallow_any_unimported and has_any_from_unimported_type(base): if isinstance(base_expr, (NameExpr, MemberExpr)): prefix = "Base type {}".format(base_expr.name) else: prefix = "Base type" self.msg.unimported_type_becomes_any(prefix, base, base_expr) check_for_explicit_any(base, self.options, self.is_typeshed_stub_file, self.msg, context=base_expr) # Add 'object' as implicit base if there is no other base class. if not base_types and defn.fullname != 'builtins.object': base_types.append(self.object_type()) info.bases = base_types # Calculate the MRO. if not self.verify_base_classes(defn): self.set_dummy_mro(defn.info) return self.calculate_class_mro(defn, self.object_type) def configure_tuple_base_class(self, defn: ClassDef, base: TupleType, base_expr: Expression) -> Instance: info = defn.info # There may be an existing valid tuple type from previous semanal iterations. # Use equality to check if it is the case. if info.tuple_type and info.tuple_type != base: self.fail("Class has two incompatible bases derived from tuple", defn) defn.has_incompatible_baseclass = True info.tuple_type = base if isinstance(base_expr, CallExpr): defn.analyzed = NamedTupleExpr(base.partial_fallback.type) defn.analyzed.line = defn.line defn.analyzed.column = defn.column if base.partial_fallback.type.fullname == 'builtins.tuple': # Fallback can only be safely calculated after semantic analysis, since base # classes may be incomplete. Postpone the calculation. self.schedule_patch(PRIORITY_FALLBACKS, lambda: calculate_tuple_fallback(base)) return base.partial_fallback def set_dummy_mro(self, info: TypeInfo) -> None: # Give it an MRO consisting of just the class itself and object. info.mro = [info, self.object_type().type] info.bad_mro = True def calculate_class_mro(self, defn: ClassDef, obj_type: Optional[Callable[[], Instance]] = None) -> None: """Calculate method resolution order for a class. `obj_type` may be omitted in the third pass when all classes are already analyzed. It exists just to fill in empty base class list during second pass in case of an import cycle. """ try: calculate_mro(defn.info, obj_type) except MroError: self.fail('Cannot determine consistent method resolution ' 'order (MRO) for "%s"' % defn.name, defn) self.set_dummy_mro(defn.info) # Allow plugins to alter the MRO to handle the fact that `def mro()` # on metaclasses permits MRO rewriting. if defn.fullname: hook = self.plugin.get_customize_class_mro_hook(defn.fullname) if hook: hook(ClassDefContext(defn, FakeExpression(), self)) def update_metaclass(self, defn: ClassDef) -> None: """Lookup for special metaclass declarations, and update defn fields accordingly. * __metaclass__ attribute in Python 2 * six.with_metaclass(M, B1, B2, ...) * @six.add_metaclass(M) * future.utils.with_metaclass(M, B1, B2, ...) * past.utils.with_metaclass(M, B1, B2, ...) """ # Look for "__metaclass__ = " in Python 2 python2_meta_expr = None # type: Optional[Expression] if self.options.python_version[0] == 2: for body_node in defn.defs.body: if isinstance(body_node, ClassDef) and body_node.name == "__metaclass__": self.fail("Metaclasses defined as inner classes are not supported", body_node) break elif isinstance(body_node, AssignmentStmt) and len(body_node.lvalues) == 1: lvalue = body_node.lvalues[0] if isinstance(lvalue, NameExpr) and lvalue.name == "__metaclass__": python2_meta_expr = body_node.rvalue # Look for six.with_metaclass(M, B1, B2, ...) with_meta_expr = None # type: Optional[Expression] if len(defn.base_type_exprs) == 1: base_expr = defn.base_type_exprs[0] if isinstance(base_expr, CallExpr) and isinstance(base_expr.callee, RefExpr): base_expr.accept(self) if (base_expr.callee.fullname in {'six.with_metaclass', 'future.utils.with_metaclass', 'past.utils.with_metaclass'} and len(base_expr.args) >= 1 and all(kind == ARG_POS for kind in base_expr.arg_kinds)): with_meta_expr = base_expr.args[0] defn.base_type_exprs = base_expr.args[1:] # Look for @six.add_metaclass(M) add_meta_expr = None # type: Optional[Expression] for dec_expr in defn.decorators: if isinstance(dec_expr, CallExpr) and isinstance(dec_expr.callee, RefExpr): dec_expr.callee.accept(self) if (dec_expr.callee.fullname == 'six.add_metaclass' and len(dec_expr.args) == 1 and dec_expr.arg_kinds[0] == ARG_POS): add_meta_expr = dec_expr.args[0] break metas = {defn.metaclass, python2_meta_expr, with_meta_expr, add_meta_expr} - {None} if len(metas) == 0: return if len(metas) > 1: self.fail("Multiple metaclass definitions", defn) return defn.metaclass = metas.pop() def verify_base_classes(self, defn: ClassDef) -> bool: info = defn.info cycle = False for base in info.bases: baseinfo = base.type if self.is_base_class(info, baseinfo): self.fail('Cycle in inheritance hierarchy', defn) cycle = True if baseinfo.fullname == 'builtins.bool': self.fail("'%s' is not a valid base class" % baseinfo.name, defn, blocker=True) return False dup = find_duplicate(info.direct_base_classes()) if dup: self.fail('Duplicate base class "%s"' % dup.name, defn, blocker=True) return False return not cycle def is_base_class(self, t: TypeInfo, s: TypeInfo) -> bool: """Determine if t is a base class of s (but do not use mro).""" # Search the base class graph for t, starting from s. worklist = [s] visited = {s} while worklist: nxt = worklist.pop() if nxt == t: return True for base in nxt.bases: if base.type not in visited: worklist.append(base.type) visited.add(base.type) return False def analyze_metaclass(self, defn: ClassDef) -> None: if defn.metaclass: metaclass_name = None if isinstance(defn.metaclass, NameExpr): metaclass_name = defn.metaclass.name elif isinstance(defn.metaclass, MemberExpr): metaclass_name = get_member_expr_fullname(defn.metaclass) if metaclass_name is None: self.fail("Dynamic metaclass not supported for '%s'" % defn.name, defn.metaclass) return sym = self.lookup_qualified(metaclass_name, defn.metaclass) if sym is None: # Probably a name error - it is already handled elsewhere return if isinstance(sym.node, Var) and isinstance(get_proper_type(sym.node.type), AnyType): # 'Any' metaclass -- just ignore it. # # TODO: A better approach would be to record this information # and assume that the type object supports arbitrary # attributes, similar to an 'Any' base class. return if isinstance(sym.node, PlaceholderNode): self.defer(defn) return if not isinstance(sym.node, TypeInfo) or sym.node.tuple_type is not None: self.fail("Invalid metaclass '%s'" % metaclass_name, defn.metaclass) return if not sym.node.is_metaclass(): self.fail("Metaclasses not inheriting from 'type' are not supported", defn.metaclass) return inst = fill_typevars(sym.node) assert isinstance(inst, Instance) defn.info.declared_metaclass = inst defn.info.metaclass_type = defn.info.calculate_metaclass_type() if any(info.is_protocol for info in defn.info.mro): if (not defn.info.metaclass_type or defn.info.metaclass_type.type.fullname == 'builtins.type'): # All protocols and their subclasses have ABCMeta metaclass by default. # TODO: add a metaclass conflict check if there is another metaclass. abc_meta = self.named_type_or_none('abc.ABCMeta', []) if abc_meta is not None: # May be None in tests with incomplete lib-stub. defn.info.metaclass_type = abc_meta if defn.info.metaclass_type is None: # Inconsistency may happen due to multiple baseclasses even in classes that # do not declare explicit metaclass, but it's harder to catch at this stage if defn.metaclass is not None: self.fail("Inconsistent metaclass structure for '%s'" % defn.name, defn) else: if defn.info.metaclass_type.type.has_base('enum.EnumMeta'): defn.info.is_enum = True if defn.type_vars: self.fail("Enum class cannot be generic", defn) # # Imports # def visit_import(self, i: Import) -> None: self.statement = i for id, as_id in i.ids: if as_id is not None: self.add_module_symbol(id, as_id, module_public=True, context=i) else: # Modules imported in a stub file without using 'as x' won't get exported # When implicit re-exporting is disabled, we have the same behavior as stubs. module_public = ( not self.is_stub_file and self.options.implicit_reexport ) base = id.split('.')[0] self.add_module_symbol(base, base, module_public=module_public, context=i, module_hidden=not module_public) def visit_import_from(self, imp: ImportFrom) -> None: self.statement = imp module_id = self.correct_relative_import(imp) module = self.modules.get(module_id) for id, as_id in imp.names: fullname = module_id + '.' + id if module is None: node = None elif module_id == self.cur_mod_id and fullname in self.modules: # Submodule takes precedence over definition in surround package, for # compatibility with runtime semantics in typical use cases. This # could more precisely model runtime semantics by taking into account # the line number beyond which the local definition should take # precedence, but doesn't seem to be important in most use cases. node = SymbolTableNode(GDEF, self.modules[fullname]) else: node = module.names.get(id) missing_submodule = False imported_id = as_id or id # If the module does not contain a symbol with the name 'id', # try checking if it's a module instead. if not node: mod = self.modules.get(fullname) if mod is not None: kind = self.current_symbol_kind() node = SymbolTableNode(kind, mod) elif fullname in self.missing_modules: missing_submodule = True # If it is still not resolved, check for a module level __getattr__ if (module and not node and (module.is_stub or self.options.python_version >= (3, 7)) and '__getattr__' in module.names): # We store the fullname of the original definition so that we can # detect whether two imported names refer to the same thing. fullname = module_id + '.' + id gvar = self.create_getattr_var(module.names['__getattr__'], imported_id, fullname) if gvar: self.add_symbol(imported_id, gvar, imp) continue if node and not node.module_hidden: self.process_imported_symbol(node, module_id, id, as_id, fullname, imp) elif module and not missing_submodule: # Target module exists but the imported name is missing or hidden. self.report_missing_module_attribute(module_id, id, imported_id, imp) else: # Import of a missing (sub)module. self.add_unknown_imported_symbol(imported_id, imp, target_name=fullname) def process_imported_symbol(self, node: SymbolTableNode, module_id: str, id: str, as_id: Optional[str], fullname: str, context: ImportBase) -> None: imported_id = as_id or id if isinstance(node.node, PlaceholderNode): if self.final_iteration: self.report_missing_module_attribute(module_id, id, imported_id, context) return else: # This might become a type. self.mark_incomplete(imported_id, node.node, becomes_typeinfo=True) existing_symbol = self.globals.get(imported_id) if (existing_symbol and not isinstance(existing_symbol.node, PlaceholderNode) and not isinstance(node.node, PlaceholderNode)): # Import can redefine a variable. They get special treatment. if self.process_import_over_existing_name( imported_id, existing_symbol, node, context): return if existing_symbol and isinstance(node.node, PlaceholderNode): # Imports are special, some redefinitions are allowed, so wait until # we know what is the new symbol node. return # 'from m import x as x' exports x in a stub file or when implicit # re-exports are disabled. module_public = ( not self.is_stub_file and self.options.implicit_reexport or as_id is not None ) module_hidden = not module_public and fullname not in self.modules # NOTE: we take the original node even for final `Var`s. This is to support # a common pattern when constants are re-exported (same applies to import *). self.add_imported_symbol(imported_id, node, context, module_public=module_public, module_hidden=module_hidden) def report_missing_module_attribute(self, import_id: str, source_id: str, imported_id: str, context: Node) -> None: # Missing attribute. if self.is_incomplete_namespace(import_id): # We don't know whether the name will be there, since the namespace # is incomplete. Defer the current target. self.mark_incomplete(imported_id, context) return message = "Module '{}' has no attribute '{}'".format(import_id, source_id) # Suggest alternatives, if any match is found. module = self.modules.get(import_id) if module: alternatives = set(module.names.keys()).difference({source_id}) matches = best_matches(source_id, alternatives)[:3] if matches: suggestion = "; maybe {}?".format(pretty_or(matches)) message += "{}".format(suggestion) self.fail(message, context, code=codes.ATTR_DEFINED) self.add_unknown_imported_symbol(imported_id, context) if import_id == 'typing': # The user probably has a missing definition in a test fixture. Let's verify. fullname = 'builtins.{}'.format(source_id.lower()) if (self.lookup_fully_qualified_or_none(fullname) is None and fullname in SUGGESTED_TEST_FIXTURES): # Yes. Generate a helpful note. self.add_fixture_note(fullname, context) def process_import_over_existing_name(self, imported_id: str, existing_symbol: SymbolTableNode, module_symbol: SymbolTableNode, import_node: ImportBase) -> bool: if existing_symbol.node is module_symbol.node: # We added this symbol on previous iteration. return False if (existing_symbol.kind in (LDEF, GDEF, MDEF) and isinstance(existing_symbol.node, (Var, FuncDef, TypeInfo, Decorator, TypeAlias))): # This is a valid import over an existing definition in the file. Construct a dummy # assignment that we'll use to type check the import. lvalue = NameExpr(imported_id) lvalue.kind = existing_symbol.kind lvalue.node = existing_symbol.node rvalue = NameExpr(imported_id) rvalue.kind = module_symbol.kind rvalue.node = module_symbol.node if isinstance(rvalue.node, TypeAlias): # Suppress bogus errors from the dummy assignment if rvalue is an alias. # Otherwise mypy may complain that alias is invalid in runtime context. rvalue.is_alias_rvalue = True assignment = AssignmentStmt([lvalue], rvalue) for node in assignment, lvalue, rvalue: node.set_line(import_node) import_node.assignments.append(assignment) return True return False def add_fixture_note(self, fullname: str, ctx: Context) -> None: self.note('Maybe your test fixture does not define "{}"?'.format(fullname), ctx) if fullname in SUGGESTED_TEST_FIXTURES: self.note( 'Consider adding [builtins fixtures/{}] to your test description'.format( SUGGESTED_TEST_FIXTURES[fullname]), ctx) def correct_relative_import(self, node: Union[ImportFrom, ImportAll]) -> str: import_id, ok = correct_relative_import(self.cur_mod_id, node.relative, node.id, self.cur_mod_node.is_package_init_file()) if not ok: self.fail("Relative import climbs too many namespaces", node) return import_id def visit_import_all(self, i: ImportAll) -> None: i_id = self.correct_relative_import(i) if i_id in self.modules: m = self.modules[i_id] if self.is_incomplete_namespace(i_id): # Any names could be missing from the current namespace if the target module # namespace is incomplete. self.mark_incomplete('*', i) for name, node in m.names.items(): if node is None: continue # if '__all__' exists, all nodes not included have had module_public set to # False, and we can skip checking '_' because it's been explicitly included. if node.module_public and (not name.startswith('_') or '__all__' in m.names): if isinstance(node.node, MypyFile): # Star import of submodule from a package, add it as a dependency. self.imports.add(node.node.fullname) existing_symbol = self.lookup_current_scope(name) if existing_symbol and not isinstance(node.node, PlaceholderNode): # Import can redefine a variable. They get special treatment. if self.process_import_over_existing_name( name, existing_symbol, node, i): continue # In stub files, `from x import *` always reexports the symbols. # In regular files, only if implicit reexports are enabled. module_public = self.is_stub_file or self.options.implicit_reexport self.add_imported_symbol(name, node, i, module_public=module_public, module_hidden=not module_public) else: # Don't add any dummy symbols for 'from x import *' if 'x' is unknown. pass # # Assignment # def visit_assignment_expr(self, s: AssignmentExpr) -> None: s.value.accept(self) self.analyze_lvalue(s.target, escape_comprehensions=True) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: self.statement = s # Special case assignment like X = X. if self.analyze_identity_global_assignment(s): return tag = self.track_incomplete_refs() s.rvalue.accept(self) if self.found_incomplete_ref(tag) or self.should_wait_rhs(s.rvalue): # Initializer couldn't be fully analyzed. Defer the current node and give up. # Make sure that if we skip the definition of some local names, they can't be # added later in this scope, since an earlier definition should take precedence. for expr in names_modified_by_assignment(s): self.mark_incomplete(expr.name, expr) return # The r.h.s. is now ready to be classified, first check if it is a special form: special_form = False # * type alias if self.check_and_set_up_type_alias(s): s.is_alias_def = True special_form = True # * type variable definition elif self.process_typevar_declaration(s): special_form = True # * type constructors elif self.analyze_namedtuple_assign(s): special_form = True elif self.analyze_typeddict_assign(s): special_form = True elif self.newtype_analyzer.process_newtype_declaration(s): special_form = True elif self.analyze_enum_assign(s): special_form = True if special_form: self.record_special_form_lvalue(s) return # OK, this is a regular assignment, perform the necessary analysis steps. s.is_final_def = self.unwrap_final(s) self.analyze_lvalues(s) self.check_final_implicit_def(s) self.check_classvar(s) self.process_type_annotation(s) self.apply_dynamic_class_hook(s) self.store_final_status(s) if not s.type: self.process_module_assignment(s.lvalues, s.rvalue, s) self.process__all__(s) def analyze_identity_global_assignment(self, s: AssignmentStmt) -> bool: """Special case 'X = X' in global scope. This allows supporting some important use cases. Return true if special casing was applied. """ if not isinstance(s.rvalue, NameExpr) or len(s.lvalues) != 1: # Not of form 'X = X' return False lvalue = s.lvalues[0] if not isinstance(lvalue, NameExpr) or s.rvalue.name != lvalue.name: # Not of form 'X = X' return False if self.type is not None or self.is_func_scope(): # Not in global scope return False # It's an assignment like 'X = X' in the global scope. name = lvalue.name sym = self.lookup(name, s) if sym is None: if self.final_iteration: # Fall back to normal assignment analysis. return False else: self.defer() return True else: if sym.node is None: # Something special -- fall back to normal assignment analysis. return False if name not in self.globals: # The name is from builtins. Add an alias to the current module. self.add_symbol(name, sym.node, s) if not isinstance(sym.node, PlaceholderNode): for node in s.rvalue, lvalue: node.node = sym.node node.kind = GDEF node.fullname = sym.node.fullname return True def should_wait_rhs(self, rv: Expression) -> bool: """Can we already classify this r.h.s. of an assignment or should we wait? This returns True if we don't have enough information to decide whether an assignment is just a normal variable definition or a special form. Always return False if this is a final iteration. This will typically cause the lvalue to be classified as a variable plus emit an error. """ if self.final_iteration: # No chance, nothing has changed. return False if isinstance(rv, NameExpr): n = self.lookup(rv.name, rv) if n and isinstance(n.node, PlaceholderNode) and not n.node.becomes_typeinfo: return True elif isinstance(rv, MemberExpr): fname = get_member_expr_fullname(rv) if fname: n = self.lookup_qualified(fname, rv, suppress_errors=True) if n and isinstance(n.node, PlaceholderNode) and not n.node.becomes_typeinfo: return True elif isinstance(rv, IndexExpr) and isinstance(rv.base, RefExpr): return self.should_wait_rhs(rv.base) elif isinstance(rv, CallExpr) and isinstance(rv.callee, RefExpr): # This is only relevant for builtin SCC where things like 'TypeVar' # may be not ready. return self.should_wait_rhs(rv.callee) return False def can_be_type_alias(self, rv: Expression) -> bool: """Is this a valid r.h.s. for an alias definition? Note: this function should be only called for expressions where self.should_wait_rhs() returns False. """ if isinstance(rv, RefExpr) and self.is_type_ref(rv, bare=True): return True if isinstance(rv, IndexExpr) and self.is_type_ref(rv.base, bare=False): return True if self.is_none_alias(rv): return True return False def is_type_ref(self, rv: Expression, bare: bool = False) -> bool: """Does this expression refer to a type? This includes: * Special forms, like Any or Union * Classes (except subscripted enums) * Other type aliases * PlaceholderNodes with becomes_typeinfo=True (these can be not ready class definitions, and not ready aliases). If bare is True, this is not a base of an index expression, so some special forms are not valid (like a bare Union). Note: This method should be only used in context of a type alias definition. This method can only return True for RefExprs, to check if C[int] is a valid target for type alias call this method on expr.base (i.e. on C in C[int]). See also can_be_type_alias(). """ if not isinstance(rv, RefExpr): return False if isinstance(rv.node, TypeVarExpr): self.fail('Type variable "{}" is invalid as target for type alias'.format( rv.fullname), rv) return False if bare: # These three are valid even if bare, for example # A = Tuple is just equivalent to A = Tuple[Any, ...]. valid_refs = {'typing.Any', 'typing.Tuple', 'typing.Callable'} else: valid_refs = type_constructors if isinstance(rv.node, TypeAlias) or rv.fullname in valid_refs: return True if isinstance(rv.node, TypeInfo): if bare: return True # Assignment color = Color['RED'] defines a variable, not an alias. return not rv.node.is_enum if isinstance(rv, NameExpr): n = self.lookup(rv.name, rv) if n and isinstance(n.node, PlaceholderNode) and n.node.becomes_typeinfo: return True elif isinstance(rv, MemberExpr): fname = get_member_expr_fullname(rv) if fname: # The r.h.s. for variable definitions may not be a type reference but just # an instance attribute, so suppress the errors. n = self.lookup_qualified(fname, rv, suppress_errors=True) if n and isinstance(n.node, PlaceholderNode) and n.node.becomes_typeinfo: return True return False def is_none_alias(self, node: Expression) -> bool: """Is this a r.h.s. for a None alias? We special case the assignments like Void = type(None), to allow using Void in type annotations. """ if isinstance(node, CallExpr): if (isinstance(node.callee, NameExpr) and len(node.args) == 1 and isinstance(node.args[0], NameExpr)): call = self.lookup_qualified(node.callee.name, node.callee) arg = self.lookup_qualified(node.args[0].name, node.args[0]) if (call is not None and call.node and call.node.fullname == 'builtins.type' and arg is not None and arg.node and arg.node.fullname == 'builtins.None'): return True return False def record_special_form_lvalue(self, s: AssignmentStmt) -> None: """Record minimal necessary information about l.h.s. of a special form. This exists mostly for compatibility with the old semantic analyzer. """ lvalue = s.lvalues[0] assert isinstance(lvalue, NameExpr) lvalue.is_special_form = True if self.current_symbol_kind() == GDEF: lvalue.fullname = self.qualified_name(lvalue.name) lvalue.kind = self.current_symbol_kind() def analyze_enum_assign(self, s: AssignmentStmt) -> bool: """Check if s defines an Enum.""" if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.analyzed, EnumCallExpr): # Already analyzed enum -- nothing to do here. return True return self.enum_call_analyzer.process_enum_call(s, self.is_func_scope()) def analyze_namedtuple_assign(self, s: AssignmentStmt) -> bool: """Check if s defines a namedtuple.""" if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.analyzed, NamedTupleExpr): return True # This is a valid and analyzed named tuple definition, nothing to do here. if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], (NameExpr, MemberExpr)): return False lvalue = s.lvalues[0] name = lvalue.name is_named_tuple, info = self.named_tuple_analyzer.check_namedtuple(s.rvalue, name, self.is_func_scope()) if not is_named_tuple: return False if isinstance(lvalue, MemberExpr): self.fail("NamedTuple type as an attribute is not supported", lvalue) return False # Yes, it's a valid namedtuple, but defer if it is not ready. if not info: self.mark_incomplete(name, lvalue, becomes_typeinfo=True) return True def analyze_typeddict_assign(self, s: AssignmentStmt) -> bool: """Check if s defines a typed dict.""" if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.analyzed, TypedDictExpr): return True # This is a valid and analyzed typed dict definition, nothing to do here. if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], (NameExpr, MemberExpr)): return False lvalue = s.lvalues[0] name = lvalue.name is_typed_dict, info = self.typed_dict_analyzer.check_typeddict(s.rvalue, name, self.is_func_scope()) if not is_typed_dict: return False if isinstance(lvalue, MemberExpr): self.fail("TypedDict type as attribute is not supported", lvalue) return False # Yes, it's a valid typed dict, but defer if it is not ready. if not info: self.mark_incomplete(name, lvalue, becomes_typeinfo=True) return True def analyze_lvalues(self, s: AssignmentStmt) -> None: # We cannot use s.type, because analyze_simple_literal_type() will set it. explicit = s.unanalyzed_type is not None if self.is_final_type(s.unanalyzed_type): # We need to exclude bare Final. assert isinstance(s.unanalyzed_type, UnboundType) if not s.unanalyzed_type.args: explicit = False for lval in s.lvalues: self.analyze_lvalue(lval, explicit_type=explicit, is_final=s.is_final_def) def apply_dynamic_class_hook(self, s: AssignmentStmt) -> None: if len(s.lvalues) > 1: return lval = s.lvalues[0] if not isinstance(lval, NameExpr) or not isinstance(s.rvalue, CallExpr): return call = s.rvalue fname = None if isinstance(call.callee, RefExpr): fname = call.callee.fullname # check if method call if fname is None and isinstance(call.callee, MemberExpr): callee_expr = call.callee.expr if isinstance(callee_expr, RefExpr) and callee_expr.fullname: method_name = call.callee.name fname = callee_expr.fullname + '.' + method_name if fname: hook = self.plugin.get_dynamic_class_hook(fname) if hook: hook(DynamicClassDefContext(call, lval.name, self)) def unwrap_final(self, s: AssignmentStmt) -> bool: """Strip Final[...] if present in an assignment. This is done to invoke type inference during type checking phase for this assignment. Also, Final[...] desn't affect type in any way -- it is rather an access qualifier for given `Var`. Also perform various consistency checks. Returns True if Final[...] was present. """ if not s.unanalyzed_type or not self.is_final_type(s.unanalyzed_type): return False assert isinstance(s.unanalyzed_type, UnboundType) if len(s.unanalyzed_type.args) > 1: self.fail("Final[...] takes at most one type argument", s.unanalyzed_type) invalid_bare_final = False if not s.unanalyzed_type.args: s.type = None if isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs: invalid_bare_final = True self.fail("Type in Final[...] can only be omitted if there is an initializer", s) else: s.type = s.unanalyzed_type.args[0] if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], RefExpr): self.fail("Invalid final declaration", s) return False lval = s.lvalues[0] assert isinstance(lval, RefExpr) # Reset inferred status if it was set due to simple literal rvalue on previous iteration. # TODO: this is a best-effort quick fix, we should avoid the need to manually sync this, # see https://github.com/python/mypy/issues/6458. if lval.is_new_def: lval.is_inferred_def = s.type is None if self.loop_depth > 0: self.fail("Cannot use Final inside a loop", s) if self.type and self.type.is_protocol: self.msg.protocol_members_cant_be_final(s) if (isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs and not self.is_stub_file and not self.is_class_scope()): if not invalid_bare_final: # Skip extra error messages. self.msg.final_without_value(s) return True def check_final_implicit_def(self, s: AssignmentStmt) -> None: """Do basic checks for final declaration on self in __init__. Additional re-definition checks are performed by `analyze_lvalue`. """ if not s.is_final_def: return lval = s.lvalues[0] assert isinstance(lval, RefExpr) if isinstance(lval, MemberExpr): if not self.is_self_member_ref(lval): self.fail("Final can be only applied to a name or an attribute on self", s) s.is_final_def = False return else: assert self.function_stack if self.function_stack[-1].name != '__init__': self.fail("Can only declare a final attribute in class body or __init__", s) s.is_final_def = False return def store_final_status(self, s: AssignmentStmt) -> None: """If this is a locally valid final declaration, set the corresponding flag on `Var`.""" if s.is_final_def: if len(s.lvalues) == 1 and isinstance(s.lvalues[0], RefExpr): node = s.lvalues[0].node if isinstance(node, Var): node.is_final = True node.final_value = self.unbox_literal(s.rvalue) if (self.is_class_scope() and (isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs)): node.final_unset_in_class = True else: # Special case: deferred initialization of a final attribute in __init__. # In this case we just pretend this is a valid final definition to suppress # errors about assigning to final attribute. for lval in self.flatten_lvalues(s.lvalues): if isinstance(lval, MemberExpr) and self.is_self_member_ref(lval): assert self.type, "Self member outside a class" cur_node = self.type.names.get(lval.name, None) if cur_node and isinstance(cur_node.node, Var) and cur_node.node.is_final: assert self.function_stack top_function = self.function_stack[-1] if (top_function.name == '__init__' and cur_node.node.final_unset_in_class and not cur_node.node.final_set_in_init and not (isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs)): cur_node.node.final_set_in_init = True s.is_final_def = True def flatten_lvalues(self, lvalues: List[Expression]) -> List[Expression]: res = [] # type: List[Expression] for lv in lvalues: if isinstance(lv, (TupleExpr, ListExpr)): res.extend(self.flatten_lvalues(lv.items)) else: res.append(lv) return res def unbox_literal(self, e: Expression) -> Optional[Union[int, float, bool, str]]: if isinstance(e, (IntExpr, FloatExpr, StrExpr)): return e.value elif isinstance(e, NameExpr) and e.name in ('True', 'False'): return True if e.name == 'True' else False return None def process_type_annotation(self, s: AssignmentStmt) -> None: """Analyze type annotation or infer simple literal type.""" if s.type: lvalue = s.lvalues[-1] allow_tuple_literal = isinstance(lvalue, TupleExpr) analyzed = self.anal_type(s.type, allow_tuple_literal=allow_tuple_literal) # Don't store not ready types (including placeholders). if analyzed is None or has_placeholder(analyzed): return s.type = analyzed if (self.type and self.type.is_protocol and isinstance(lvalue, NameExpr) and isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs): if isinstance(lvalue.node, Var): lvalue.node.is_abstract_var = True else: if (any(isinstance(lv, NameExpr) and lv.is_inferred_def for lv in s.lvalues) and self.type and self.type.is_protocol and not self.is_func_scope()): self.fail('All protocol members must have explicitly declared types', s) # Set the type if the rvalue is a simple literal (even if the above error occurred). if len(s.lvalues) == 1 and isinstance(s.lvalues[0], RefExpr): if s.lvalues[0].is_inferred_def: s.type = self.analyze_simple_literal_type(s.rvalue, s.is_final_def) if s.type: # Store type into nodes. for lvalue in s.lvalues: self.store_declared_types(lvalue, s.type) def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Optional[Type]: """Return builtins.int if rvalue is an int literal, etc. If this is a 'Final' context, we return "Literal[...]" instead.""" if self.options.semantic_analysis_only or self.function_stack: # Skip this if we're only doing the semantic analysis pass. # This is mostly to avoid breaking unit tests. # Also skip inside a function; this is to avoid confusing # the code that handles dead code due to isinstance() # inside type variables with value restrictions (like # AnyStr). return None if isinstance(rvalue, FloatExpr): return self.named_type_or_none('builtins.float') value = None # type: Optional[LiteralValue] type_name = None # type: Optional[str] if isinstance(rvalue, IntExpr): value, type_name = rvalue.value, 'builtins.int' if isinstance(rvalue, StrExpr): value, type_name = rvalue.value, 'builtins.str' if isinstance(rvalue, BytesExpr): value, type_name = rvalue.value, 'builtins.bytes' if isinstance(rvalue, UnicodeExpr): value, type_name = rvalue.value, 'builtins.unicode' if type_name is not None: assert value is not None typ = self.named_type_or_none(type_name) if typ and is_final: return typ.copy_modified(last_known_value=LiteralType( value=value, fallback=typ, line=typ.line, column=typ.column, )) return typ return None def analyze_alias(self, rvalue: Expression, allow_placeholder: bool = False) -> Tuple[Optional[Type], List[str], Set[str], List[str]]: """Check if 'rvalue' is a valid type allowed for aliasing (e.g. not a type variable). If yes, return the corresponding type, a list of qualified type variable names for generic aliases, a set of names the alias depends on, and a list of type variables if the alias is generic. An schematic example for the dependencies: A = int B = str analyze_alias(Dict[A, B])[2] == {'__main__.A', '__main__.B'} """ dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic()) global_scope = not self.type and not self.function_stack res = analyze_type_alias(rvalue, self, self.tvar_scope, self.plugin, self.options, self.is_typeshed_stub_file, allow_unnormalized=self.is_stub_file, allow_placeholder=allow_placeholder, in_dynamic_func=dynamic, global_scope=global_scope) typ = None # type: Optional[Type] if res: typ, depends_on = res found_type_vars = typ.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope)) alias_tvars = [name for (name, node) in found_type_vars] qualified_tvars = [node.fullname for (name, node) in found_type_vars] else: alias_tvars = [] depends_on = set() qualified_tvars = [] return typ, alias_tvars, depends_on, qualified_tvars def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: """Check if assignment creates a type alias and set it up as needed. Return True if it is a type alias (even if the target is not ready), or False otherwise. Note: the resulting types for subscripted (including generic) aliases are also stored in rvalue.analyzed. """ lvalue = s.lvalues[0] if len(s.lvalues) > 1 or not isinstance(lvalue, NameExpr): # First rule: Only simple assignments like Alias = ... create aliases. return False if s.unanalyzed_type is not None: # Second rule: Explicit type (cls: Type[A] = A) always creates variable, not alias. return False existing = self.current_symbol_table().get(lvalue.name) # Third rule: type aliases can't be re-defined. For example: # A: Type[float] = int # A = float # OK, but this doesn't define an alias # B = int # B = float # Error! # Don't create an alias in these cases: if (existing and (isinstance(existing.node, Var) # existing variable or (isinstance(existing.node, TypeAlias) and not s.is_alias_def) # existing alias or (isinstance(existing.node, PlaceholderNode) and existing.node.node.line < s.line))): # previous incomplete definition # TODO: find a more robust way to track the order of definitions. # Note: if is_alias_def=True, this is just a node from previous iteration. if isinstance(existing.node, TypeAlias) and not s.is_alias_def: self.fail('Cannot assign multiple types to name "{}"' ' without an explicit "Type[...]" annotation' .format(lvalue.name), lvalue) return False non_global_scope = self.type or self.is_func_scope() if isinstance(s.rvalue, RefExpr) and non_global_scope: # Fourth rule (special case): Non-subscripted right hand side creates a variable # at class and function scopes. For example: # # class Model: # ... # class C: # model = Model # this is automatically a variable with type 'Type[Model]' # # without this rule, this typical use case will require a lot of explicit # annotations (see the second rule). return False rvalue = s.rvalue if not self.can_be_type_alias(rvalue): return False if existing and not isinstance(existing.node, (PlaceholderNode, TypeAlias)): # Cannot redefine existing node as type alias. return False res = None # type: Optional[Type] if self.is_none_alias(rvalue): res = NoneType() alias_tvars, depends_on, qualified_tvars = \ [], set(), [] # type: List[str], Set[str], List[str] else: tag = self.track_incomplete_refs() res, alias_tvars, depends_on, qualified_tvars = \ self.analyze_alias(rvalue, allow_placeholder=True) if not res: return False # TODO: Maybe we only need to reject top-level placeholders, similar # to base classes. if self.found_incomplete_ref(tag) or has_placeholder(res): # Since we have got here, we know this must be a type alias (incomplete refs # may appear in nested positions), therefore use becomes_typeinfo=True. self.mark_incomplete(lvalue.name, rvalue, becomes_typeinfo=True) return True self.add_type_alias_deps(depends_on) # In addition to the aliases used, we add deps on unbound # type variables, since they are erased from target type. self.add_type_alias_deps(qualified_tvars) # The above are only direct deps on other aliases. # For subscripted aliases, type deps from expansion are added in deps.py # (because the type is stored). check_for_explicit_any(res, self.options, self.is_typeshed_stub_file, self.msg, context=s) # When this type alias gets "inlined", the Any is not explicit anymore, # so we need to replace it with non-explicit Anys. if not has_placeholder(res): res = make_any_non_explicit(res) # Note: with the new (lazy) type alias representation we only need to set no_args to True # if the expected number of arguments is non-zero, so that aliases like A = List work. # However, eagerly expanding aliases like Text = str is a nice performance optimization. no_args = isinstance(res, Instance) and not res.args # type: ignore fix_instance_types(res, self.fail, self.note) alias_node = TypeAlias(res, self.qualified_name(lvalue.name), s.line, s.column, alias_tvars=alias_tvars, no_args=no_args) if isinstance(s.rvalue, (IndexExpr, CallExpr)): # CallExpr is for `void = type(None)` s.rvalue.analyzed = TypeAliasExpr(alias_node) s.rvalue.analyzed.line = s.line # we use the column from resulting target, to get better location for errors s.rvalue.analyzed.column = res.column elif isinstance(s.rvalue, RefExpr): s.rvalue.is_alias_rvalue = True if existing: # An alias gets updated. updated = False if isinstance(existing.node, TypeAlias): if existing.node.target != res: # Copy expansion to the existing alias, this matches how we update base classes # for a TypeInfo _in place_ if there are nested placeholders. existing.node.target = res existing.node.alias_tvars = alias_tvars existing.node.no_args = no_args updated = True else: # Otherwise just replace existing placeholder with type alias. existing.node = alias_node updated = True if updated: if self.final_iteration: self.cannot_resolve_name(lvalue.name, 'name', s) return True else: self.progress = True # We need to defer so that this change can get propagated to base classes. self.defer(s) else: self.add_symbol(lvalue.name, alias_node, s) if isinstance(rvalue, RefExpr) and isinstance(rvalue.node, TypeAlias): alias_node.normalized = rvalue.node.normalized return True def analyze_lvalue(self, lval: Lvalue, nested: bool = False, explicit_type: bool = False, is_final: bool = False, escape_comprehensions: bool = False) -> None: """Analyze an lvalue or assignment target. Args: lval: The target lvalue nested: If true, the lvalue is within a tuple or list lvalue expression explicit_type: Assignment has type annotation escape_comprehensions: If we are inside a comprehension, set the variable in the enclosing scope instead. This implements https://www.python.org/dev/peps/pep-0572/#scope-of-the-target """ if escape_comprehensions: assert isinstance(lval, NameExpr), "assignment expression target must be NameExpr" if isinstance(lval, NameExpr): self.analyze_name_lvalue(lval, explicit_type, is_final, escape_comprehensions) elif isinstance(lval, MemberExpr): self.analyze_member_lvalue(lval, explicit_type, is_final) if explicit_type and not self.is_self_member_ref(lval): self.fail('Type cannot be declared in assignment to non-self ' 'attribute', lval) elif isinstance(lval, IndexExpr): if explicit_type: self.fail('Unexpected type declaration', lval) lval.accept(self) elif isinstance(lval, TupleExpr): items = lval.items if len(items) == 0 and isinstance(lval, TupleExpr): self.fail("can't assign to ()", lval) self.analyze_tuple_or_list_lvalue(lval, explicit_type) elif isinstance(lval, StarExpr): if nested: self.analyze_lvalue(lval.expr, nested, explicit_type) else: self.fail('Starred assignment target must be in a list or tuple', lval) else: self.fail('Invalid assignment target', lval) def analyze_name_lvalue(self, lvalue: NameExpr, explicit_type: bool, is_final: bool, escape_comprehensions: bool) -> None: """Analyze an lvalue that targets a name expression. Arguments are similar to "analyze_lvalue". """ if lvalue.node: # This has been bound already in a previous iteration. return name = lvalue.name if self.is_alias_for_final_name(name): if is_final: self.fail("Cannot redefine an existing name as final", lvalue) else: self.msg.cant_assign_to_final(name, self.type is not None, lvalue) kind = self.current_symbol_kind() names = self.current_symbol_table() existing = names.get(name) outer = self.is_global_or_nonlocal(name) if (not existing or isinstance(existing.node, PlaceholderNode)) and not outer: # Define new variable. var = self.make_name_lvalue_var(lvalue, kind, not explicit_type) added = self.add_symbol(name, var, lvalue, escape_comprehensions=escape_comprehensions) # Only bind expression if we successfully added name to symbol table. if added: lvalue.is_new_def = True lvalue.is_inferred_def = True lvalue.kind = kind lvalue.node = var if kind == GDEF: lvalue.fullname = var._fullname else: lvalue.fullname = lvalue.name if self.is_func_scope(): if unmangle(name) == '_': # Special case for assignment to local named '_': always infer 'Any'. typ = AnyType(TypeOfAny.special_form) self.store_declared_types(lvalue, typ) if is_final and self.is_final_redefinition(kind, name): self.fail("Cannot redefine an existing name as final", lvalue) else: self.make_name_lvalue_point_to_existing_def(lvalue, explicit_type, is_final) def is_final_redefinition(self, kind: int, name: str) -> bool: if kind == GDEF: return self.is_mangled_global(name) and not self.is_initial_mangled_global(name) elif kind == MDEF and self.type: return unmangle(name) + "'" in self.type.names return False def is_alias_for_final_name(self, name: str) -> bool: if self.is_func_scope(): if not name.endswith("'"): # Not a mangled name -- can't be an alias return False name = unmangle(name) assert self.locals[-1] is not None, "No locals at function scope" existing = self.locals[-1].get(name) return existing is not None and is_final_node(existing.node) elif self.type is not None: orig_name = unmangle(name) + "'" if name == orig_name: return False existing = self.type.names.get(orig_name) return existing is not None and is_final_node(existing.node) else: orig_name = unmangle(name) + "'" if name == orig_name: return False existing = self.globals.get(orig_name) return existing is not None and is_final_node(existing.node) def make_name_lvalue_var(self, lvalue: NameExpr, kind: int, inferred: bool) -> Var: """Return a Var node for an lvalue that is a name expression.""" v = Var(lvalue.name) v.set_line(lvalue) v.is_inferred = inferred if kind == MDEF: assert self.type is not None v.info = self.type v.is_initialized_in_class = True if kind != LDEF: v._fullname = self.qualified_name(lvalue.name) else: # fullanme should never stay None v._fullname = lvalue.name v.is_ready = False # Type not inferred yet return v def make_name_lvalue_point_to_existing_def( self, lval: NameExpr, explicit_type: bool, is_final: bool) -> None: """Update an lvalue to point to existing definition in the same scope. Arguments are similar to "analyze_lvalue". Assume that an existing name exists. """ if is_final: # Redefining an existing name with final is always an error. self.fail("Cannot redefine an existing name as final", lval) original_def = self.lookup(lval.name, lval, suppress_errors=True) if original_def is None and self.type and not self.is_func_scope(): # Workaround to allow "x, x = ..." in class body. original_def = self.type.get(lval.name) if explicit_type: # Don't re-bind if there is a type annotation. self.name_already_defined(lval.name, lval, original_def) else: # Bind to an existing name. if original_def: self.bind_name_expr(lval, original_def) else: self.name_not_defined(lval.name, lval) self.check_lvalue_validity(lval.node, lval) def analyze_tuple_or_list_lvalue(self, lval: TupleExpr, explicit_type: bool = False) -> None: """Analyze an lvalue or assignment target that is a list or tuple.""" items = lval.items star_exprs = [item for item in items if isinstance(item, StarExpr)] if len(star_exprs) > 1: self.fail('Two starred expressions in assignment', lval) else: if len(star_exprs) == 1: star_exprs[0].valid = True for i in items: self.analyze_lvalue(i, nested=True, explicit_type=explicit_type) def analyze_member_lvalue(self, lval: MemberExpr, explicit_type: bool, is_final: bool) -> None: """Analyze lvalue that is a member expression. Arguments: lval: The target lvalue explicit_type: Assignment has type annotation is_final: Is the target final """ if lval.node: # This has been bound already in a previous iteration. return lval.accept(self) if self.is_self_member_ref(lval): assert self.type, "Self member outside a class" cur_node = self.type.names.get(lval.name) node = self.type.get(lval.name) if cur_node and is_final: # Overrides will be checked in type checker. self.fail("Cannot redefine an existing name as final", lval) # On first encounter with this definition, if this attribute was defined before # with an inferred type and it's marked with an explicit type now, give an error. if (not lval.node and cur_node and isinstance(cur_node.node, Var) and cur_node.node.is_inferred and explicit_type): self.attribute_already_defined(lval.name, lval, cur_node) # If the attribute of self is not defined in superclasses, create a new Var, ... if (node is None or (isinstance(node.node, Var) and node.node.is_abstract_var) # ... also an explicit declaration on self also creates a new Var. # Note that `explicit_type` might has been erased for bare `Final`, # so we also check if `is_final` is passed. or (cur_node is None and (explicit_type or is_final))): if self.type.is_protocol and node is None: self.fail("Protocol members cannot be defined via assignment to self", lval) else: # Implicit attribute definition in __init__. lval.is_new_def = True lval.is_inferred_def = True v = Var(lval.name) v.set_line(lval) v._fullname = self.qualified_name(lval.name) v.info = self.type v.is_ready = False v.explicit_self_type = explicit_type or is_final lval.def_var = v lval.node = v # TODO: should we also set lval.kind = MDEF? self.type.names[lval.name] = SymbolTableNode(MDEF, v, implicit=True) self.check_lvalue_validity(lval.node, lval) def is_self_member_ref(self, memberexpr: MemberExpr) -> bool: """Does memberexpr to refer to an attribute of self?""" if not isinstance(memberexpr.expr, NameExpr): return False node = memberexpr.expr.node return isinstance(node, Var) and node.is_self def check_lvalue_validity(self, node: Union[Expression, SymbolNode, None], ctx: Context) -> None: if isinstance(node, TypeVarExpr): self.fail('Invalid assignment target', ctx) elif isinstance(node, TypeInfo): self.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, ctx) def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: if isinstance(typ, StarType) and not isinstance(lvalue, StarExpr): self.fail('Star type only allowed for starred expressions', lvalue) if isinstance(lvalue, RefExpr): lvalue.is_inferred_def = False if isinstance(lvalue.node, Var): var = lvalue.node var.type = typ var.is_ready = True # If node is not a variable, we'll catch it elsewhere. elif isinstance(lvalue, TupleExpr): typ = get_proper_type(typ) if isinstance(typ, TupleType): if len(lvalue.items) != len(typ.items): self.fail('Incompatible number of tuple items', lvalue) return for item, itemtype in zip(lvalue.items, typ.items): self.store_declared_types(item, itemtype) else: self.fail('Tuple type expected for multiple variables', lvalue) elif isinstance(lvalue, StarExpr): # Historical behavior for the old parser if isinstance(typ, StarType): self.store_declared_types(lvalue.expr, typ.type) else: self.store_declared_types(lvalue.expr, typ) else: # This has been flagged elsewhere as an error, so just ignore here. pass def process_typevar_declaration(self, s: AssignmentStmt) -> bool: """Check if s declares a TypeVar; it yes, store it in symbol table. Return True if this looks like a type variable declaration (but maybe with errors), otherwise return False. """ call = self.get_typevar_declaration(s) if not call: return False lvalue = s.lvalues[0] assert isinstance(lvalue, NameExpr) if s.type: self.fail("Cannot declare the type of a type variable", s) return False name = lvalue.name if not self.check_typevar_name(call, name, s): return False # Constraining types n_values = call.arg_kinds[1:].count(ARG_POS) values = self.analyze_value_types(call.args[1:1 + n_values]) res = self.process_typevar_parameters(call.args[1 + n_values:], call.arg_names[1 + n_values:], call.arg_kinds[1 + n_values:], n_values, s) if res is None: return False variance, upper_bound = res existing = self.current_symbol_table().get(name) if existing and not (isinstance(existing.node, PlaceholderNode) or # Also give error for another type variable with the same name. (isinstance(existing.node, TypeVarExpr) and existing.node is call.analyzed)): self.fail("Cannot redefine '%s' as a type variable" % name, s) return False if self.options.disallow_any_unimported: for idx, constraint in enumerate(values, start=1): if has_any_from_unimported_type(constraint): prefix = "Constraint {}".format(idx) self.msg.unimported_type_becomes_any(prefix, constraint, s) if has_any_from_unimported_type(upper_bound): prefix = "Upper bound of type variable" self.msg.unimported_type_becomes_any(prefix, upper_bound, s) for t in values + [upper_bound]: check_for_explicit_any(t, self.options, self.is_typeshed_stub_file, self.msg, context=s) # mypyc suppresses making copies of a function to check each # possible type, so set the upper bound to Any to prevent that # from causing errors. if values and self.options.mypyc: upper_bound = AnyType(TypeOfAny.implementation_artifact) # Yes, it's a valid type variable definition! Add it to the symbol table. if not call.analyzed: type_var = TypeVarExpr(name, self.qualified_name(name), values, upper_bound, variance) type_var.line = call.line call.analyzed = type_var else: assert isinstance(call.analyzed, TypeVarExpr) if call.analyzed.values != values or call.analyzed.upper_bound != upper_bound: self.progress = True call.analyzed.upper_bound = upper_bound call.analyzed.values = values self.add_symbol(name, call.analyzed, s) return True def check_typevar_name(self, call: CallExpr, name: str, context: Context) -> bool: name = unmangle(name) if len(call.args) < 1: self.fail("Too few arguments for TypeVar()", context) return False if (not isinstance(call.args[0], (StrExpr, BytesExpr, UnicodeExpr)) or not call.arg_kinds[0] == ARG_POS): self.fail("TypeVar() expects a string literal as first argument", context) return False elif call.args[0].value != name: msg = "String argument 1 '{}' to TypeVar(...) does not match variable name '{}'" self.fail(msg.format(call.args[0].value, name), context) return False return True def get_typevar_declaration(self, s: AssignmentStmt) -> Optional[CallExpr]: """Returns the TypeVar() call expression if `s` is a type var declaration or None otherwise. """ if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): return None if not isinstance(s.rvalue, CallExpr): return None call = s.rvalue callee = call.callee if not isinstance(callee, RefExpr): return None if callee.fullname != 'typing.TypeVar': return None return call def process_typevar_parameters(self, args: List[Expression], names: List[Optional[str]], kinds: List[int], num_values: int, context: Context) -> Optional[Tuple[int, Type]]: has_values = (num_values > 0) covariant = False contravariant = False upper_bound = self.object_type() # type: Type for param_value, param_name, param_kind in zip(args, names, kinds): if not param_kind == ARG_NAMED: self.fail("Unexpected argument to TypeVar()", context) return None if param_name == 'covariant': if isinstance(param_value, NameExpr): if param_value.name == 'True': covariant = True else: self.fail("TypeVar 'covariant' may only be 'True'", context) return None else: self.fail("TypeVar 'covariant' may only be 'True'", context) return None elif param_name == 'contravariant': if isinstance(param_value, NameExpr): if param_value.name == 'True': contravariant = True else: self.fail("TypeVar 'contravariant' may only be 'True'", context) return None else: self.fail("TypeVar 'contravariant' may only be 'True'", context) return None elif param_name == 'bound': if has_values: self.fail("TypeVar cannot have both values and an upper bound", context) return None try: # We want to use our custom error message below, so we suppress # the default error message for invalid types here. analyzed = self.expr_to_analyzed_type(param_value, allow_placeholder=True, report_invalid_types=False) if analyzed is None: # Type variables are special: we need to place them in the symbol table # soon, even if upper bound is not ready yet. Otherwise avoiding # a "deadlock" in this common pattern would be tricky: # T = TypeVar('T', bound=Custom[Any]) # class Custom(Generic[T]): # ... analyzed = PlaceholderType(None, [], context.line) upper_bound = get_proper_type(analyzed) if isinstance(upper_bound, AnyType) and upper_bound.is_from_error: self.fail("TypeVar 'bound' must be a type", param_value) # Note: we do not return 'None' here -- we want to continue # using the AnyType as the upper bound. except TypeTranslationError: self.fail("TypeVar 'bound' must be a type", param_value) return None elif param_name == 'values': # Probably using obsolete syntax with values=(...). Explain the current syntax. self.fail("TypeVar 'values' argument not supported", context) self.fail("Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...))", context) return None else: self.fail("Unexpected argument to TypeVar(): {}".format(param_name), context) return None if covariant and contravariant: self.fail("TypeVar cannot be both covariant and contravariant", context) return None elif num_values == 1: self.fail("TypeVar cannot have only a single constraint", context) return None elif covariant: variance = COVARIANT elif contravariant: variance = CONTRAVARIANT else: variance = INVARIANT return variance, upper_bound def basic_new_typeinfo(self, name: str, basetype_or_fallback: Instance) -> TypeInfo: class_def = ClassDef(name, Block([])) if self.is_func_scope() and not self.type: # Full names of generated classes should always be prefixed with the module names # even if they are nested in a function, since these classes will be (de-)serialized. # (Note that the caller should append @line to the name to avoid collisions.) # TODO: clean this up, see #6422. class_def.fullname = self.cur_mod_id + '.' + self.qualified_name(name) else: class_def.fullname = self.qualified_name(name) info = TypeInfo(SymbolTable(), class_def, self.cur_mod_id) class_def.info = info mro = basetype_or_fallback.type.mro if not mro: # Forward reference, MRO should be recalculated in third pass. mro = [basetype_or_fallback.type, self.object_type().type] info.mro = [info] + mro info.bases = [basetype_or_fallback] return info def analyze_value_types(self, items: List[Expression]) -> List[Type]: """Analyze types from values expressions in type variable definition.""" result = [] # type: List[Type] for node in items: try: analyzed = self.anal_type(expr_to_unanalyzed_type(node), allow_placeholder=True) if analyzed is None: # Type variables are special: we need to place them in the symbol table # soon, even if some value is not ready yet, see process_typevar_parameters() # for an example. analyzed = PlaceholderType(None, [], node.line) result.append(analyzed) except TypeTranslationError: self.fail('Type expected', node) result.append(AnyType(TypeOfAny.from_error)) return result def check_classvar(self, s: AssignmentStmt) -> None: """Check if assignment defines a class variable.""" lvalue = s.lvalues[0] if len(s.lvalues) != 1 or not isinstance(lvalue, RefExpr): return if not s.type or not self.is_classvar(s.type): return if self.is_class_scope() and isinstance(lvalue, NameExpr): node = lvalue.node if isinstance(node, Var): node.is_classvar = True elif not isinstance(lvalue, MemberExpr) or self.is_self_member_ref(lvalue): # In case of member access, report error only when assigning to self # Other kinds of member assignments should be already reported self.fail_invalid_classvar(lvalue) def is_classvar(self, typ: Type) -> bool: if not isinstance(typ, UnboundType): return False sym = self.lookup_qualified(typ.name, typ) if not sym or not sym.node: return False return sym.node.fullname == 'typing.ClassVar' def is_final_type(self, typ: Optional[Type]) -> bool: if not isinstance(typ, UnboundType): return False sym = self.lookup_qualified(typ.name, typ) if not sym or not sym.node: return False return sym.node.fullname in ('typing.Final', 'typing_extensions.Final') def fail_invalid_classvar(self, context: Context) -> None: self.fail('ClassVar can only be used for assignments in class body', context) def process_module_assignment(self, lvals: List[Lvalue], rval: Expression, ctx: AssignmentStmt) -> None: """Propagate module references across assignments. Recursively handles the simple form of iterable unpacking; doesn't handle advanced unpacking with *rest, dictionary unpacking, etc. In an expression like x = y = z, z is the rval and lvals will be [x, y]. """ if (isinstance(rval, (TupleExpr, ListExpr)) and all(isinstance(v, TupleExpr) for v in lvals)): # rval and all lvals are either list or tuple, so we are dealing # with unpacking assignment like `x, y = a, b`. Mypy didn't # understand our all(isinstance(...)), so cast them as TupleExpr # so mypy knows it is safe to access their .items attribute. seq_lvals = cast(List[TupleExpr], lvals) # given an assignment like: # (x, y) = (m, n) = (a, b) # we now have: # seq_lvals = [(x, y), (m, n)] # seq_rval = (a, b) # We now zip this into: # elementwise_assignments = [(a, x, m), (b, y, n)] # where each elementwise assignment includes one element of rval and the # corresponding element of each lval. Basically we unpack # (x, y) = (m, n) = (a, b) # into elementwise assignments # x = m = a # y = n = b # and then we recursively call this method for each of those assignments. # If the rval and all lvals are not all of the same length, zip will just ignore # extra elements, so no error will be raised here; mypy will later complain # about the length mismatch in type-checking. elementwise_assignments = zip(rval.items, *[v.items for v in seq_lvals]) for rv, *lvs in elementwise_assignments: self.process_module_assignment(lvs, rv, ctx) elif isinstance(rval, RefExpr): rnode = self.lookup_type_node(rval) if rnode and isinstance(rnode.node, MypyFile): for lval in lvals: if not isinstance(lval, NameExpr): continue # respect explicitly annotated type if (isinstance(lval.node, Var) and lval.node.type is not None): continue lnode = self.current_symbol_table().get(lval.name) if lnode: if isinstance(lnode.node, MypyFile) and lnode.node is not rnode.node: self.fail( "Cannot assign multiple modules to name '{}' " "without explicit 'types.ModuleType' annotation".format(lval.name), ctx) # never create module alias except on initial var definition elif lval.is_inferred_def: lnode.kind = self.current_symbol_kind() assert rnode.node is not None lnode.node = rnode.node def process__all__(self, s: AssignmentStmt) -> None: """Export names if argument is a __all__ assignment.""" if (len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) and s.lvalues[0].name == '__all__' and s.lvalues[0].kind == GDEF and isinstance(s.rvalue, (ListExpr, TupleExpr))): self.add_exports(s.rvalue.items) # # Misc statements # def visit_block(self, b: Block) -> None: if b.is_unreachable: return self.block_depth[-1] += 1 for s in b.body: self.accept(s) self.block_depth[-1] -= 1 def visit_block_maybe(self, b: Optional[Block]) -> None: if b: self.visit_block(b) def visit_expression_stmt(self, s: ExpressionStmt) -> None: self.statement = s s.expr.accept(self) def visit_return_stmt(self, s: ReturnStmt) -> None: self.statement = s if not self.is_func_scope(): self.fail("'return' outside function", s) if s.expr: s.expr.accept(self) def visit_raise_stmt(self, s: RaiseStmt) -> None: self.statement = s if s.expr: s.expr.accept(self) if s.from_expr: s.from_expr.accept(self) def visit_assert_stmt(self, s: AssertStmt) -> None: self.statement = s if s.expr: s.expr.accept(self) if s.msg: s.msg.accept(self) def visit_operator_assignment_stmt(self, s: OperatorAssignmentStmt) -> None: self.statement = s s.lvalue.accept(self) s.rvalue.accept(self) if (isinstance(s.lvalue, NameExpr) and s.lvalue.name == '__all__' and s.lvalue.kind == GDEF and isinstance(s.rvalue, (ListExpr, TupleExpr))): self.add_exports(s.rvalue.items) def visit_while_stmt(self, s: WhileStmt) -> None: self.statement = s s.expr.accept(self) self.loop_depth += 1 s.body.accept(self) self.loop_depth -= 1 self.visit_block_maybe(s.else_body) def visit_for_stmt(self, s: ForStmt) -> None: self.statement = s s.expr.accept(self) # Bind index variables and check if they define new names. self.analyze_lvalue(s.index, explicit_type=s.index_type is not None) if s.index_type: if self.is_classvar(s.index_type): self.fail_invalid_classvar(s.index) allow_tuple_literal = isinstance(s.index, TupleExpr) analyzed = self.anal_type(s.index_type, allow_tuple_literal=allow_tuple_literal) if analyzed is not None: self.store_declared_types(s.index, analyzed) s.index_type = analyzed self.loop_depth += 1 self.visit_block(s.body) self.loop_depth -= 1 self.visit_block_maybe(s.else_body) def visit_break_stmt(self, s: BreakStmt) -> None: self.statement = s if self.loop_depth == 0: self.fail("'break' outside loop", s, serious=True, blocker=True) def visit_continue_stmt(self, s: ContinueStmt) -> None: self.statement = s if self.loop_depth == 0: self.fail("'continue' outside loop", s, serious=True, blocker=True) def visit_if_stmt(self, s: IfStmt) -> None: self.statement = s infer_reachability_of_if_statement(s, self.options) for i in range(len(s.expr)): s.expr[i].accept(self) self.visit_block(s.body[i]) self.visit_block_maybe(s.else_body) def visit_try_stmt(self, s: TryStmt) -> None: self.statement = s self.analyze_try_stmt(s, self) def analyze_try_stmt(self, s: TryStmt, visitor: NodeVisitor[None]) -> None: s.body.accept(visitor) for type, var, handler in zip(s.types, s.vars, s.handlers): if type: type.accept(visitor) if var: self.analyze_lvalue(var) handler.accept(visitor) if s.else_body: s.else_body.accept(visitor) if s.finally_body: s.finally_body.accept(visitor) def visit_with_stmt(self, s: WithStmt) -> None: self.statement = s types = [] # type: List[Type] if s.unanalyzed_type: assert isinstance(s.unanalyzed_type, ProperType) actual_targets = [t for t in s.target if t is not None] if len(actual_targets) == 0: # We have a type for no targets self.fail('Invalid type comment: "with" statement has no targets', s) elif len(actual_targets) == 1: # We have one target and one type types = [s.unanalyzed_type] elif isinstance(s.unanalyzed_type, TupleType): # We have multiple targets and multiple types if len(actual_targets) == len(s.unanalyzed_type.items): types = s.unanalyzed_type.items.copy() else: # But it's the wrong number of items self.fail('Incompatible number of types for "with" targets', s) else: # We have multiple targets and one type self.fail('Multiple types expected for multiple "with" targets', s) new_types = [] # type: List[Type] for e, n in zip(s.expr, s.target): e.accept(self) if n: self.analyze_lvalue(n, explicit_type=s.unanalyzed_type is not None) # Since we have a target, pop the next type from types if types: t = types.pop(0) if self.is_classvar(t): self.fail_invalid_classvar(n) allow_tuple_literal = isinstance(n, TupleExpr) analyzed = self.anal_type(t, allow_tuple_literal=allow_tuple_literal) if analyzed is not None: # TODO: Deal with this better new_types.append(analyzed) self.store_declared_types(n, analyzed) s.analyzed_types = new_types self.visit_block(s.body) def visit_del_stmt(self, s: DelStmt) -> None: self.statement = s s.expr.accept(self) if not self.is_valid_del_target(s.expr): self.fail('Invalid delete target', s) def is_valid_del_target(self, s: Expression) -> bool: if isinstance(s, (IndexExpr, NameExpr, MemberExpr)): return True elif isinstance(s, (TupleExpr, ListExpr)): return all(self.is_valid_del_target(item) for item in s.items) else: return False def visit_global_decl(self, g: GlobalDecl) -> None: self.statement = g for name in g.names: if name in self.nonlocal_decls[-1]: self.fail("Name '{}' is nonlocal and global".format(name), g) self.global_decls[-1].add(name) def visit_nonlocal_decl(self, d: NonlocalDecl) -> None: self.statement = d if not self.is_func_scope(): self.fail("nonlocal declaration not allowed at module level", d) else: for name in d.names: for table in reversed(self.locals[:-1]): if table is not None and name in table: break else: self.fail("No binding for nonlocal '{}' found".format(name), d) if self.locals[-1] is not None and name in self.locals[-1]: self.fail("Name '{}' is already defined in local " "scope before nonlocal declaration".format(name), d) if name in self.global_decls[-1]: self.fail("Name '{}' is nonlocal and global".format(name), d) self.nonlocal_decls[-1].add(name) def visit_print_stmt(self, s: PrintStmt) -> None: self.statement = s for arg in s.args: arg.accept(self) if s.target: s.target.accept(self) def visit_exec_stmt(self, s: ExecStmt) -> None: self.statement = s s.expr.accept(self) if s.globals: s.globals.accept(self) if s.locals: s.locals.accept(self) # # Expressions # def visit_name_expr(self, expr: NameExpr) -> None: n = self.lookup(expr.name, expr) if n: self.bind_name_expr(expr, n) def bind_name_expr(self, expr: NameExpr, sym: SymbolTableNode) -> None: """Bind name expression to a symbol table node.""" if isinstance(sym.node, TypeVarExpr) and self.tvar_scope.get_binding(sym): self.fail("'{}' is a type variable and only valid in type " "context".format(expr.name), expr) elif isinstance(sym.node, PlaceholderNode): self.process_placeholder(expr.name, 'name', expr) else: expr.kind = sym.kind expr.node = sym.node expr.fullname = sym.fullname def visit_super_expr(self, expr: SuperExpr) -> None: if not self.type and not expr.call.args: self.fail('"super" used outside class', expr) return expr.info = self.type for arg in expr.call.args: arg.accept(self) def visit_tuple_expr(self, expr: TupleExpr) -> None: for item in expr.items: if isinstance(item, StarExpr): item.valid = True item.accept(self) def visit_list_expr(self, expr: ListExpr) -> None: for item in expr.items: if isinstance(item, StarExpr): item.valid = True item.accept(self) def visit_set_expr(self, expr: SetExpr) -> None: for item in expr.items: if isinstance(item, StarExpr): item.valid = True item.accept(self) def visit_dict_expr(self, expr: DictExpr) -> None: for key, value in expr.items: if key is not None: key.accept(self) value.accept(self) def visit_star_expr(self, expr: StarExpr) -> None: if not expr.valid: # XXX TODO Change this error message self.fail('Can use starred expression only as assignment target', expr) else: expr.expr.accept(self) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: if not self.is_func_scope(): # not sure self.fail("'yield from' outside function", e, serious=True, blocker=True) else: if self.function_stack[-1].is_coroutine: self.fail("'yield from' in async function", e, serious=True, blocker=True) else: self.function_stack[-1].is_generator = True if e.expr: e.expr.accept(self) def visit_call_expr(self, expr: CallExpr) -> None: """Analyze a call expression. Some call expressions are recognized as special forms, including cast(...). """ expr.callee.accept(self) if refers_to_fullname(expr.callee, 'typing.cast'): # Special form cast(...). if not self.check_fixed_args(expr, 2, 'cast'): return # Translate first argument to an unanalyzed type. try: target = expr_to_unanalyzed_type(expr.args[0]) except TypeTranslationError: self.fail('Cast target is not a type', expr) return # Piggyback CastExpr object to the CallExpr object; it takes # precedence over the CallExpr semantics. expr.analyzed = CastExpr(expr.args[1], target) expr.analyzed.line = expr.line expr.analyzed.column = expr.column expr.analyzed.accept(self) elif refers_to_fullname(expr.callee, 'builtins.reveal_type'): if not self.check_fixed_args(expr, 1, 'reveal_type'): return expr.analyzed = RevealExpr(kind=REVEAL_TYPE, expr=expr.args[0]) expr.analyzed.line = expr.line expr.analyzed.column = expr.column expr.analyzed.accept(self) elif refers_to_fullname(expr.callee, 'builtins.reveal_locals'): # Store the local variable names into the RevealExpr for use in the # type checking pass local_nodes = [] # type: List[Var] if self.is_module_scope(): # try to determine just the variable declarations in module scope # self.globals.values() contains SymbolTableNode's # Each SymbolTableNode has an attribute node that is nodes.Var # look for variable nodes that marked as is_inferred # Each symboltable node has a Var node as .node local_nodes = [n.node for name, n in self.globals.items() if getattr(n.node, 'is_inferred', False) and isinstance(n.node, Var)] elif self.is_class_scope(): # type = None # type: Optional[TypeInfo] if self.type is not None: local_nodes = [st.node for st in self.type.names.values() if isinstance(st.node, Var)] elif self.is_func_scope(): # locals = None # type: List[Optional[SymbolTable]] if self.locals is not None: symbol_table = self.locals[-1] if symbol_table is not None: local_nodes = [st.node for st in symbol_table.values() if isinstance(st.node, Var)] expr.analyzed = RevealExpr(kind=REVEAL_LOCALS, local_nodes=local_nodes) expr.analyzed.line = expr.line expr.analyzed.column = expr.column expr.analyzed.accept(self) elif refers_to_fullname(expr.callee, 'typing.Any'): # Special form Any(...) no longer supported. self.fail('Any(...) is no longer supported. Use cast(Any, ...) instead', expr) elif refers_to_fullname(expr.callee, 'typing._promote'): # Special form _promote(...). if not self.check_fixed_args(expr, 1, '_promote'): return # Translate first argument to an unanalyzed type. try: target = expr_to_unanalyzed_type(expr.args[0]) except TypeTranslationError: self.fail('Argument 1 to _promote is not a type', expr) return expr.analyzed = PromoteExpr(target) expr.analyzed.line = expr.line expr.analyzed.accept(self) elif refers_to_fullname(expr.callee, 'builtins.dict'): expr.analyzed = self.translate_dict_call(expr) elif refers_to_fullname(expr.callee, 'builtins.divmod'): if not self.check_fixed_args(expr, 2, 'divmod'): return expr.analyzed = OpExpr('divmod', expr.args[0], expr.args[1]) expr.analyzed.line = expr.line expr.analyzed.accept(self) else: # Normal call expression. for a in expr.args: a.accept(self) if (isinstance(expr.callee, MemberExpr) and isinstance(expr.callee.expr, NameExpr) and expr.callee.expr.name == '__all__' and expr.callee.expr.kind == GDEF and expr.callee.name in ('append', 'extend')): if expr.callee.name == 'append' and expr.args: self.add_exports(expr.args[0]) elif (expr.callee.name == 'extend' and expr.args and isinstance(expr.args[0], (ListExpr, TupleExpr))): self.add_exports(expr.args[0].items) def translate_dict_call(self, call: CallExpr) -> Optional[DictExpr]: """Translate 'dict(x=y, ...)' to {'x': y, ...}. For other variants of dict(...), return None. """ if not call.args: return None if not all(kind == ARG_NAMED for kind in call.arg_kinds): # Must still accept those args. for a in call.args: a.accept(self) return None expr = DictExpr([(StrExpr(cast(str, key)), value) # since they are all ARG_NAMED for key, value in zip(call.arg_names, call.args)]) expr.set_line(call) expr.accept(self) return expr def check_fixed_args(self, expr: CallExpr, numargs: int, name: str) -> bool: """Verify that expr has specified number of positional args. Return True if the arguments are valid. """ s = 's' if numargs == 1: s = '' if len(expr.args) != numargs: self.fail("'%s' expects %d argument%s" % (name, numargs, s), expr) return False if expr.arg_kinds != [ARG_POS] * numargs: self.fail("'%s' must be called with %s positional argument%s" % (name, numargs, s), expr) return False return True def visit_member_expr(self, expr: MemberExpr) -> None: base = expr.expr base.accept(self) if isinstance(base, RefExpr) and isinstance(base.node, MypyFile): # Handle module attribute. sym = self.get_module_symbol(base.node, expr.name) if sym: if isinstance(sym.node, PlaceholderNode): self.process_placeholder(expr.name, 'attribute', expr) return expr.kind = sym.kind expr.fullname = sym.fullname expr.node = sym.node elif isinstance(base, RefExpr): # This branch handles the case C.bar (or cls.bar or self.bar inside # a classmethod/method), where C is a class and bar is a type # definition or a module resulting from `import bar` (or a module # assignment) inside class C. We look up bar in the class' TypeInfo # namespace. This is done only when bar is a module or a type; # other things (e.g. methods) are handled by other code in # checkmember. type_info = None if isinstance(base.node, TypeInfo): # C.bar where C is a class type_info = base.node elif isinstance(base.node, Var) and self.type and self.function_stack: # check for self.bar or cls.bar in method/classmethod func_def = self.function_stack[-1] if not func_def.is_static and isinstance(func_def.type, CallableType): formal_arg = func_def.type.argument_by_name(base.node.name) if formal_arg and formal_arg.pos == 0: type_info = self.type elif isinstance(base.node, TypeAlias) and base.node.no_args: assert isinstance(base.node.target, ProperType) if isinstance(base.node.target, Instance): type_info = base.node.target.type if type_info: n = type_info.names.get(expr.name) if n is not None and isinstance(n.node, (MypyFile, TypeInfo, TypeAlias)): if not n: return expr.kind = n.kind expr.fullname = n.fullname expr.node = n.node def visit_op_expr(self, expr: OpExpr) -> None: expr.left.accept(self) if expr.op in ('and', 'or'): inferred = infer_condition_value(expr.left, self.options) if ((inferred in (ALWAYS_FALSE, MYPY_FALSE) and expr.op == 'and') or (inferred in (ALWAYS_TRUE, MYPY_TRUE) and expr.op == 'or')): expr.right_unreachable = True return elif ((inferred in (ALWAYS_TRUE, MYPY_TRUE) and expr.op == 'and') or (inferred in (ALWAYS_FALSE, MYPY_FALSE) and expr.op == 'or')): expr.right_always = True expr.right.accept(self) def visit_comparison_expr(self, expr: ComparisonExpr) -> None: for operand in expr.operands: operand.accept(self) def visit_unary_expr(self, expr: UnaryExpr) -> None: expr.expr.accept(self) def visit_index_expr(self, expr: IndexExpr) -> None: base = expr.base base.accept(self) if (isinstance(base, RefExpr) and isinstance(base.node, TypeInfo) and not base.node.is_generic()): expr.index.accept(self) elif ((isinstance(base, RefExpr) and isinstance(base.node, TypeAlias)) or refers_to_class_or_function(base)): # We need to do full processing on every iteration, since some type # arguments may contain placeholder types. self.analyze_type_application(expr) else: expr.index.accept(self) def analyze_type_application(self, expr: IndexExpr) -> None: """Analyze special form -- type application (either direct or via type aliasing).""" types = self.analyze_type_application_args(expr) if types is None: return base = expr.base expr.analyzed = TypeApplication(base, types) expr.analyzed.line = expr.line expr.analyzed.column = expr.column # Types list, dict, set are not subscriptable, prohibit this if # subscripted either via type alias... if isinstance(base, RefExpr) and isinstance(base.node, TypeAlias): alias = base.node target = get_proper_type(alias.target) if isinstance(target, Instance): name = target.type.fullname if (alias.no_args and # this avoids bogus errors for already reported aliases name in nongen_builtins and not alias.normalized): self.fail(no_subscript_builtin_alias(name, propose_alt=False), expr) # ...or directly. else: n = self.lookup_type_node(base) if n and n.fullname in nongen_builtins: self.fail(no_subscript_builtin_alias(n.fullname, propose_alt=False), expr) def analyze_type_application_args(self, expr: IndexExpr) -> Optional[List[Type]]: """Analyze type arguments (index) in a type application. Return None if anything was incomplete. """ index = expr.index tag = self.track_incomplete_refs() self.analyze_type_expr(index) if self.found_incomplete_ref(tag): return None types = [] # type: List[Type] if isinstance(index, TupleExpr): items = index.items else: items = [index] for item in items: try: typearg = expr_to_unanalyzed_type(item) except TypeTranslationError: self.fail('Type expected within [...]', expr) return None # We always allow unbound type variables in IndexExpr, since we # may be analysing a type alias definition rvalue. The error will be # reported elsewhere if it is not the case. analyzed = self.anal_type(typearg, allow_unbound_tvars=True, allow_placeholder=True) if analyzed is None: return None types.append(analyzed) return types def visit_slice_expr(self, expr: SliceExpr) -> None: if expr.begin_index: expr.begin_index.accept(self) if expr.end_index: expr.end_index.accept(self) if expr.stride: expr.stride.accept(self) def visit_cast_expr(self, expr: CastExpr) -> None: expr.expr.accept(self) analyzed = self.anal_type(expr.type) if analyzed is not None: expr.type = analyzed def visit_reveal_expr(self, expr: RevealExpr) -> None: if expr.kind == REVEAL_TYPE: if expr.expr is not None: expr.expr.accept(self) else: # Reveal locals doesn't have an inner expression, there's no # need to traverse inside it pass def visit_type_application(self, expr: TypeApplication) -> None: expr.expr.accept(self) for i in range(len(expr.types)): analyzed = self.anal_type(expr.types[i]) if analyzed is not None: expr.types[i] = analyzed def visit_list_comprehension(self, expr: ListComprehension) -> None: expr.generator.accept(self) def visit_set_comprehension(self, expr: SetComprehension) -> None: expr.generator.accept(self) def visit_dictionary_comprehension(self, expr: DictionaryComprehension) -> None: self.enter(expr) self.analyze_comp_for(expr) expr.key.accept(self) expr.value.accept(self) self.leave() self.analyze_comp_for_2(expr) def visit_generator_expr(self, expr: GeneratorExpr) -> None: self.enter(expr) self.analyze_comp_for(expr) expr.left_expr.accept(self) self.leave() self.analyze_comp_for_2(expr) def analyze_comp_for(self, expr: Union[GeneratorExpr, DictionaryComprehension]) -> None: """Analyses the 'comp_for' part of comprehensions (part 1). That is the part after 'for' in (x for x in l if p). This analyzes variables and conditions which are analyzed in a local scope. """ for i, (index, sequence, conditions) in enumerate(zip(expr.indices, expr.sequences, expr.condlists)): if i > 0: sequence.accept(self) # Bind index variables. self.analyze_lvalue(index) for cond in conditions: cond.accept(self) def analyze_comp_for_2(self, expr: Union[GeneratorExpr, DictionaryComprehension]) -> None: """Analyses the 'comp_for' part of comprehensions (part 2). That is the part after 'for' in (x for x in l if p). This analyzes the 'l' part which is analyzed in the surrounding scope. """ expr.sequences[0].accept(self) def visit_lambda_expr(self, expr: LambdaExpr) -> None: self.analyze_arg_initializers(expr) self.analyze_function_body(expr) def visit_conditional_expr(self, expr: ConditionalExpr) -> None: expr.if_expr.accept(self) expr.cond.accept(self) expr.else_expr.accept(self) def visit_backquote_expr(self, expr: BackquoteExpr) -> None: expr.expr.accept(self) def visit__promote_expr(self, expr: PromoteExpr) -> None: analyzed = self.anal_type(expr.type) if analyzed is not None: expr.type = analyzed def visit_yield_expr(self, expr: YieldExpr) -> None: if not self.is_func_scope(): self.fail("'yield' outside function", expr, serious=True, blocker=True) else: if self.function_stack[-1].is_coroutine: if self.options.python_version < (3, 6): self.fail("'yield' in async function", expr, serious=True, blocker=True) else: self.function_stack[-1].is_generator = True self.function_stack[-1].is_async_generator = True else: self.function_stack[-1].is_generator = True if expr.expr: expr.expr.accept(self) def visit_await_expr(self, expr: AwaitExpr) -> None: if not self.is_func_scope(): self.fail("'await' outside function", expr) elif not self.function_stack[-1].is_coroutine: self.fail("'await' outside coroutine ('async def')", expr) expr.expr.accept(self) # # Lookup functions # def lookup(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: """Look up an unqualified (no dots) name in all active namespaces. Note that the result may contain a PlaceholderNode. The caller may want to defer in that case. Generate an error if the name is not defined unless suppress_errors is true or the current namespace is incomplete. In the latter case defer. """ implicit_name = False # 1a. Name declared using 'global x' takes precedence if name in self.global_decls[-1]: if name in self.globals: return self.globals[name] if not suppress_errors: self.name_not_defined(name, ctx) return None # 1b. Name declared using 'nonlocal x' takes precedence if name in self.nonlocal_decls[-1]: for table in reversed(self.locals[:-1]): if table is not None and name in table: return table[name] else: if not suppress_errors: self.name_not_defined(name, ctx) return None # 2. Class attributes (if within class definition) if self.type and not self.is_func_scope() and name in self.type.names: node = self.type.names[name] if not node.implicit: if self.is_active_symbol_in_class_body(node.node): return node else: # Defined through self.x assignment implicit_name = True implicit_node = node # 3. Local (function) scopes for table in reversed(self.locals): if table is not None and name in table: return table[name] # 4. Current file global scope if name in self.globals: return self.globals[name] # 5. Builtins b = self.globals.get('__builtins__', None) if b: assert isinstance(b.node, MypyFile) table = b.node.names if name in table: if name[0] == "_" and name[1] != "_": if not suppress_errors: self.name_not_defined(name, ctx) return None node = table[name] return node # Give up. if not implicit_name and not suppress_errors: self.name_not_defined(name, ctx) else: if implicit_name: return implicit_node return None def is_active_symbol_in_class_body(self, node: Optional[SymbolNode]) -> bool: """Can a symbol defined in class body accessed at current statement? Only allow access to class attributes textually after the definition, so that it's possible to fall back to the outer scope. Example: class X: ... class C: X = X # Initializer refers to outer scope Nested classes are an exception, since we want to support arbitrary forward references in type annotations. """ # TODO: Forward reference to name imported in class body is not # caught. assert self.statement # we are at class scope return (node is None or node.line < self.statement.line or not self.is_defined_in_current_module(node.fullname) or isinstance(node, TypeInfo) or (isinstance(node, PlaceholderNode) and node.becomes_typeinfo)) def is_defined_in_current_module(self, fullname: Optional[str]) -> bool: if fullname is None: return False return module_prefix(self.modules, fullname) == self.cur_mod_id def lookup_qualified(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: """Lookup a qualified name in all activate namespaces. Note that the result may contain a PlaceholderNode. The caller may want to defer in that case. Generate an error if the name is not defined unless suppress_errors is true or the current namespace is incomplete. In the latter case defer. """ if '.' not in name: # Simple case: look up a short name. return self.lookup(name, ctx, suppress_errors=suppress_errors) parts = name.split('.') namespace = self.cur_mod_id sym = self.lookup(parts[0], ctx, suppress_errors=suppress_errors) if sym: for i in range(1, len(parts)): node = sym.node part = parts[i] if isinstance(node, TypeInfo): nextsym = node.get(part) elif isinstance(node, MypyFile): nextsym = self.get_module_symbol(node, part) namespace = node.fullname elif isinstance(node, PlaceholderNode): return sym elif isinstance(node, TypeAlias) and node.no_args: assert isinstance(node.target, ProperType) if isinstance(node.target, Instance): nextsym = node.target.type.get(part) else: if isinstance(node, Var): typ = get_proper_type(node.type) if isinstance(typ, AnyType): # Allow access through Var with Any type without error. return self.implicit_symbol(sym, name, parts[i:], typ) # Lookup through invalid node, such as variable or function nextsym = None if not nextsym or nextsym.module_hidden: if not suppress_errors: self.name_not_defined(name, ctx, namespace=namespace) return None sym = nextsym return sym def lookup_type_node(self, expr: Expression) -> Optional[SymbolTableNode]: try: t = expr_to_unanalyzed_type(expr) except TypeTranslationError: return None if isinstance(t, UnboundType): n = self.lookup_qualified(t.name, expr, suppress_errors=True) return n return None def get_module_symbol(self, node: MypyFile, name: str) -> Optional[SymbolTableNode]: """Look up a symbol from a module. Return None if no matching symbol could be bound. """ module = node.fullname names = node.names sym = names.get(name) if not sym: fullname = module + '.' + name if fullname in self.modules: sym = SymbolTableNode(GDEF, self.modules[fullname]) elif self.is_incomplete_namespace(module): self.record_incomplete_ref() elif ('__getattr__' in names and (node.is_stub or self.options.python_version >= (3, 7))): gvar = self.create_getattr_var(names['__getattr__'], name, fullname) if gvar: sym = SymbolTableNode(GDEF, gvar) elif self.is_missing_module(fullname): # We use the fullname of the original definition so that we can # detect whether two names refer to the same thing. var_type = AnyType(TypeOfAny.from_unimported_type) v = Var(name, type=var_type) v._fullname = fullname sym = SymbolTableNode(GDEF, v) elif sym.module_hidden: sym = None return sym def is_missing_module(self, module: str) -> bool: return module in self.missing_modules def implicit_symbol(self, sym: SymbolTableNode, name: str, parts: List[str], source_type: AnyType) -> SymbolTableNode: """Create symbol for a qualified name reference through Any type.""" if sym.node is None: basename = None else: basename = sym.node.fullname if basename is None: fullname = name else: fullname = basename + '.' + '.'.join(parts) var_type = AnyType(TypeOfAny.from_another_any, source_type) var = Var(parts[-1], var_type) var._fullname = fullname return SymbolTableNode(GDEF, var) def create_getattr_var(self, getattr_defn: SymbolTableNode, name: str, fullname: str) -> Optional[Var]: """Create a dummy variable using module-level __getattr__ return type. If not possible, return None. Note that multiple Var nodes can be created for a single name. We can use the from_module_getattr and the fullname attributes to check if two dummy Var nodes refer to the same thing. Reusing Var nodes would require non-local mutable state, which we prefer to avoid. """ if isinstance(getattr_defn.node, (FuncDef, Var)): node_type = get_proper_type(getattr_defn.node.type) if isinstance(node_type, CallableType): typ = node_type.ret_type else: typ = AnyType(TypeOfAny.from_error) v = Var(name, type=typ) v._fullname = fullname v.from_module_getattr = True return v return None def lookup_fully_qualified(self, name: str) -> SymbolTableNode: """Lookup a fully qualified name. Assume that the name is defined. This happens in the global namespace -- the local module namespace is ignored. Note that this doesn't support visibility, module-level __getattr__, or nested classes. """ parts = name.split('.') n = self.modules[parts[0]] for i in range(1, len(parts) - 1): next_sym = n.names[parts[i]] assert isinstance(next_sym.node, MypyFile) n = next_sym.node return n.names[parts[-1]] def lookup_fully_qualified_or_none(self, fullname: str) -> Optional[SymbolTableNode]: """Lookup a fully qualified name that refers to a module-level definition. Don't assume that the name is defined. This happens in the global namespace -- the local module namespace is ignored. This does not dereference indirect refs. Note that this can't be used for names nested in class namespaces. """ # TODO: unify/clean-up/simplify lookup methods, see #4157. # TODO: support nested classes (but consider performance impact, # we might keep the module level only lookup for thing like 'builtins.int'). assert '.' in fullname module, name = fullname.rsplit('.', maxsplit=1) if module not in self.modules: return None filenode = self.modules[module] result = filenode.names.get(name) if result is None and self.is_incomplete_namespace(module): # TODO: More explicit handling of incomplete refs? self.record_incomplete_ref() return result def builtin_type(self, fully_qualified_name: str) -> Instance: sym = self.lookup_fully_qualified(fully_qualified_name) node = sym.node assert isinstance(node, TypeInfo) return Instance(node, [AnyType(TypeOfAny.special_form)] * len(node.defn.type_vars)) def object_type(self) -> Instance: return self.named_type('__builtins__.object') def str_type(self) -> Instance: return self.named_type('__builtins__.str') def named_type(self, qualified_name: str, args: Optional[List[Type]] = None) -> Instance: sym = self.lookup_qualified(qualified_name, Context()) assert sym, "Internal error: attempted to construct unknown type" node = sym.node assert isinstance(node, TypeInfo) if args: # TODO: assert len(args) == len(node.defn.type_vars) return Instance(node, args) return Instance(node, [AnyType(TypeOfAny.special_form)] * len(node.defn.type_vars)) def named_type_or_none(self, qualified_name: str, args: Optional[List[Type]] = None) -> Optional[Instance]: sym = self.lookup_fully_qualified_or_none(qualified_name) if not sym or isinstance(sym.node, PlaceholderNode): return None node = sym.node if isinstance(node, TypeAlias): assert isinstance(node.target, Instance) # type: ignore node = node.target.type assert isinstance(node, TypeInfo), node if args is not None: # TODO: assert len(args) == len(node.defn.type_vars) return Instance(node, args) return Instance(node, [AnyType(TypeOfAny.unannotated)] * len(node.defn.type_vars)) def lookup_current_scope(self, name: str) -> Optional[SymbolTableNode]: if self.locals[-1] is not None: return self.locals[-1].get(name) elif self.type is not None: return self.type.names.get(name) else: return self.globals.get(name) # # Adding symbols # def add_symbol(self, name: str, node: SymbolNode, context: Context, module_public: bool = True, module_hidden: bool = False, can_defer: bool = True, escape_comprehensions: bool = False) -> bool: """Add symbol to the currently active symbol table. Generally additions to symbol table should go through this method or one of the methods below so that kinds, redefinitions, conditional definitions, and skipped names are handled consistently. Return True if we actually added the symbol, or False if we refused to do so (because something is not ready). If can_defer is True, defer current target if adding a placeholder. """ if self.is_func_scope(): kind = LDEF elif self.type is not None: kind = MDEF else: kind = GDEF symbol = SymbolTableNode(kind, node, module_public=module_public, module_hidden=module_hidden) return self.add_symbol_table_node(name, symbol, context, can_defer, escape_comprehensions) def add_symbol_skip_local(self, name: str, node: SymbolNode) -> None: """Same as above, but skipping the local namespace. This doesn't check for previous definition and is only used for serialization of method-level classes. Classes defined within methods can be exposed through an attribute type, but method-level symbol tables aren't serialized. This method can be used to add such classes to an enclosing, serialized symbol table. """ # TODO: currently this is only used by named tuples. Use this method # also by typed dicts and normal classes, see issue #6422. if self.type is not None: names = self.type.names kind = MDEF else: names = self.globals kind = GDEF symbol = SymbolTableNode(kind, node) names[name] = symbol def add_symbol_table_node(self, name: str, symbol: SymbolTableNode, context: Optional[Context] = None, can_defer: bool = True, escape_comprehensions: bool = False) -> bool: """Add symbol table node to the currently active symbol table. Return True if we actually added the symbol, or False if we refused to do so (because something is not ready or it was a no-op). Generate an error if there is an invalid redefinition. If context is None, unconditionally add node, since we can't report an error. Note that this is used by plugins to forcibly replace nodes! TODO: Prevent plugins from replacing nodes, as it could cause problems? Args: name: short name of symbol symbol: Node to add can_defer: if True, defer current target if adding a placeholder context: error context (see above about None value) """ names = self.current_symbol_table(escape_comprehensions=escape_comprehensions) existing = names.get(name) if isinstance(symbol.node, PlaceholderNode) and can_defer: self.defer(context) if (existing is not None and context is not None and not is_valid_replacement(existing, symbol)): # There is an existing node, so this may be a redefinition. # If the new node points to the same node as the old one, # or if both old and new nodes are placeholders, we don't # need to do anything. old = existing.node new = symbol.node if isinstance(new, PlaceholderNode): # We don't know whether this is okay. Let's wait until the next iteration. return False if not is_same_symbol(old, new): if isinstance(new, (FuncDef, Decorator, OverloadedFuncDef, TypeInfo)): self.add_redefinition(names, name, symbol) if not (isinstance(new, (FuncDef, Decorator)) and self.set_original_def(old, new)): self.name_already_defined(name, context, existing) elif name not in self.missing_names and '*' not in self.missing_names: names[name] = symbol self.progress = True return True return False def add_redefinition(self, names: SymbolTable, name: str, symbol: SymbolTableNode) -> None: """Add a symbol table node that reflects a redefinition as a function or a class. Redefinitions need to be added to the symbol table so that they can be found through AST traversal, but they have dummy names of form 'name-redefinition[N]', where N ranges over 2, 3, ... (omitted for the first redefinition). Note: we always store redefinitions independently of whether they are valid or not (so they will be semantically analyzed), the caller should give an error for invalid redefinitions (such as e.g. variable redefined as a class). """ i = 1 # Don't serialize redefined nodes. They are likely to have # busted internal references which can cause problems with # serialization and they can't have any external references to # them. symbol.no_serialize = True while True: if i == 1: new_name = '{}-redefinition'.format(name) else: new_name = '{}-redefinition{}'.format(name, i) existing = names.get(new_name) if existing is None: names[new_name] = symbol return elif existing.node is symbol.node: # Already there return i += 1 def add_module_symbol(self, id: str, as_id: str, module_public: bool, context: Context, module_hidden: bool = False) -> None: """Add symbol that is a reference to a module object.""" if id in self.modules: node = self.modules[id] self.add_symbol(as_id, node, context, module_public=module_public, module_hidden=module_hidden) else: self.add_unknown_imported_symbol(as_id, context, target_name=id) def add_local(self, node: Union[Var, FuncDef, OverloadedFuncDef], context: Context) -> None: """Add local variable or function.""" assert self.is_func_scope() name = node.name node._fullname = name self.add_symbol(name, node, context) def add_imported_symbol(self, name: str, node: SymbolTableNode, context: Context, module_public: bool = True, module_hidden: bool = False) -> None: """Add an alias to an existing symbol through import.""" symbol = SymbolTableNode(node.kind, node.node, module_public=module_public, module_hidden=module_hidden) self.add_symbol_table_node(name, symbol, context) def add_unknown_imported_symbol(self, name: str, context: Context, target_name: Optional[str] = None) -> None: """Add symbol that we don't know what it points to because resolving an import failed. This can happen if a module is missing, or it is present, but doesn't have the imported attribute. The `target_name` is the name of symbol in the namespace it is imported from. For example, for 'from mod import x as y' the target_name is 'mod.x'. This is currently used only to track logical dependencies. """ existing = self.current_symbol_table().get(name) if existing and isinstance(existing.node, Var) and existing.node.is_suppressed_import: # This missing import was already added -- nothing to do here. return var = Var(name) if self.options.logical_deps and target_name is not None: # This makes it possible to add logical fine-grained dependencies # from a missing module. We can't use this by default, since in a # few places we assume that the full name points to a real # definition, but this name may point to nothing. var._fullname = target_name elif self.type: var._fullname = self.type.fullname + "." + name var.info = self.type else: var._fullname = self.qualified_name(name) var.is_ready = True any_type = AnyType(TypeOfAny.from_unimported_type, missing_import_name=var._fullname) var.type = any_type var.is_suppressed_import = True self.add_symbol(name, var, context) # # Other helpers # @contextmanager def tvar_scope_frame(self, frame: TypeVarScope) -> Iterator[None]: old_scope = self.tvar_scope self.tvar_scope = frame yield self.tvar_scope = old_scope def defer(self, debug_context: Optional[Context] = None) -> None: """Defer current analysis target to be analyzed again. This must be called if something in the current target is incomplete or has a placeholder node. However, this must *not* be called during the final analysis iteration! Instead, an error should be generated. Often 'process_placeholder' is a good way to either defer or generate an error. NOTE: Some methods, such as 'anal_type', 'mark_incomplete' and 'record_incomplete_ref', call this implicitly, or when needed. They are usually preferable to a direct defer() call. """ assert not self.final_iteration, 'Must not defer during final iteration' self.deferred = True # Store debug info for this deferral. line = (debug_context.line if debug_context else self.statement.line if self.statement else -1) self.deferral_debug_context.append((self.cur_mod_id, line)) def track_incomplete_refs(self) -> Tag: """Return tag that can be used for tracking references to incomplete names.""" return self.num_incomplete_refs def found_incomplete_ref(self, tag: Tag) -> bool: """Have we encountered an incomplete reference since starting tracking?""" return self.num_incomplete_refs != tag def record_incomplete_ref(self) -> None: """Record the encounter of an incomplete reference and defer current analysis target.""" self.defer() self.num_incomplete_refs += 1 def mark_incomplete(self, name: str, node: Node, becomes_typeinfo: bool = False) -> None: """Mark a definition as incomplete (and defer current analysis target). Also potentially mark the current namespace as incomplete. Args: name: The name that we weren't able to define (or '*' if the name is unknown) node: The node that refers to the name (definition or lvalue) becomes_typeinfo: Pass this to PlaceholderNode (used by special forms like named tuples that will create TypeInfos). """ self.defer(node) if name == '*': self.incomplete = True elif not self.is_global_or_nonlocal(name): fullname = self.qualified_name(name) assert self.statement placeholder = PlaceholderNode(fullname, node, self.statement.line, becomes_typeinfo=becomes_typeinfo) self.add_symbol(name, placeholder, context=dummy_context()) self.missing_names.add(name) def is_incomplete_namespace(self, fullname: str) -> bool: """Is a module or class namespace potentially missing some definitions? If a name is missing from an incomplete namespace, we'll need to defer the current analysis target. """ return fullname in self.incomplete_namespaces def process_placeholder(self, name: str, kind: str, ctx: Context) -> None: """Process a reference targeting placeholder node. If this is not a final iteration, defer current node, otherwise report an error. The 'kind' argument indicates if this a name or attribute expression (used for better error message). """ if self.final_iteration: self.cannot_resolve_name(name, kind, ctx) else: self.defer(ctx) def cannot_resolve_name(self, name: str, kind: str, ctx: Context) -> None: self.fail('Cannot resolve {} "{}" (possible cyclic definition)'.format(kind, name), ctx) def qualified_name(self, name: str) -> str: if self.type is not None: return self.type._fullname + '.' + name elif self.is_func_scope(): return name else: return self.cur_mod_id + '.' + name def enter(self, function: Union[FuncItem, GeneratorExpr, DictionaryComprehension]) -> None: """Enter a function, generator or comprehension scope.""" names = self.saved_locals.setdefault(function, SymbolTable()) self.locals.append(names) is_comprehension = isinstance(function, (GeneratorExpr, DictionaryComprehension)) self.is_comprehension_stack.append(is_comprehension) self.global_decls.append(set()) self.nonlocal_decls.append(set()) # -1 since entering block will increment this to 0. self.block_depth.append(-1) def leave(self) -> None: self.locals.pop() self.is_comprehension_stack.pop() self.global_decls.pop() self.nonlocal_decls.pop() self.block_depth.pop() def is_func_scope(self) -> bool: return self.locals[-1] is not None def is_nested_within_func_scope(self) -> bool: """Are we underneath a function scope, even if we are in a nested class also?""" return any(l is not None for l in self.locals) def is_class_scope(self) -> bool: return self.type is not None and not self.is_func_scope() def is_module_scope(self) -> bool: return not (self.is_class_scope() or self.is_func_scope()) def current_symbol_kind(self) -> int: if self.is_class_scope(): kind = MDEF elif self.is_func_scope(): kind = LDEF else: kind = GDEF return kind def current_symbol_table(self, escape_comprehensions: bool = False) -> SymbolTable: if self.is_func_scope(): assert self.locals[-1] is not None if escape_comprehensions: for i, is_comprehension in enumerate(reversed(self.is_comprehension_stack)): if not is_comprehension: names = self.locals[-1 - i] break else: assert False, "Should have at least one non-comprehension scope" else: names = self.locals[-1] assert names is not None elif self.type is not None: names = self.type.names else: names = self.globals return names def is_global_or_nonlocal(self, name: str) -> bool: return (self.is_func_scope() and (name in self.global_decls[-1] or name in self.nonlocal_decls[-1])) def add_exports(self, exp_or_exps: Union[Iterable[Expression], Expression]) -> None: exps = [exp_or_exps] if isinstance(exp_or_exps, Expression) else exp_or_exps for exp in exps: if isinstance(exp, StrExpr): self.all_exports.append(exp.value) def check_no_global(self, name: str, ctx: Context, is_overloaded_func: bool = False) -> None: if name in self.globals: prev_is_overloaded = isinstance(self.globals[name], OverloadedFuncDef) if is_overloaded_func and prev_is_overloaded: self.fail("Nonconsecutive overload {} found".format(name), ctx) elif prev_is_overloaded: self.fail("Definition of '{}' missing 'overload'".format(name), ctx) else: self.name_already_defined(name, ctx, self.globals[name]) def name_not_defined(self, name: str, ctx: Context, namespace: Optional[str] = None) -> None: if self.is_incomplete_namespace(namespace or self.cur_mod_id): # Target namespace is incomplete, so it's possible that the name will be defined # later on. Defer current target. self.record_incomplete_ref() return message = "Name '{}' is not defined".format(name) self.fail(message, ctx, code=codes.NAME_DEFINED) if 'builtins.{}'.format(name) in SUGGESTED_TEST_FIXTURES: # The user probably has a missing definition in a test fixture. Let's verify. fullname = 'builtins.{}'.format(name) if self.lookup_fully_qualified_or_none(fullname) is None: # Yes. Generate a helpful note. self.add_fixture_note(fullname, ctx) modules_with_unimported_hints = { name.split('.', 1)[0] for name in TYPES_FOR_UNIMPORTED_HINTS } lowercased = { name.lower(): name for name in TYPES_FOR_UNIMPORTED_HINTS } for module in modules_with_unimported_hints: fullname = '{}.{}'.format(module, name).lower() if fullname not in lowercased: continue # User probably forgot to import these types. hint = ( 'Did you forget to import it from "{module}"?' ' (Suggestion: "from {module} import {name}")' ).format(module=module, name=lowercased[fullname].rsplit('.', 1)[-1]) self.note(hint, ctx, code=codes.NAME_DEFINED) def already_defined(self, name: str, ctx: Context, original_ctx: Optional[Union[SymbolTableNode, SymbolNode]], noun: str) -> None: if isinstance(original_ctx, SymbolTableNode): node = original_ctx.node # type: Optional[SymbolNode] elif isinstance(original_ctx, SymbolNode): node = original_ctx else: node = None if isinstance(original_ctx, SymbolTableNode) and isinstance(original_ctx.node, MypyFile): # Since this is an import, original_ctx.node points to the module definition. # Therefore its line number is always 1, which is not useful for this # error message. extra_msg = ' (by an import)' elif node and node.line != -1 and self.is_local_name(node.fullname): # TODO: Using previous symbol node may give wrong line. We should use # the line number where the binding was established instead. extra_msg = ' on line {}'.format(node.line) else: extra_msg = ' (possibly by an import)' self.fail("{} '{}' already defined{}".format(noun, unmangle(name), extra_msg), ctx, code=codes.NO_REDEF) def name_already_defined(self, name: str, ctx: Context, original_ctx: Optional[Union[SymbolTableNode, SymbolNode]] = None ) -> None: self.already_defined(name, ctx, original_ctx, noun='Name') def attribute_already_defined(self, name: str, ctx: Context, original_ctx: Optional[Union[SymbolTableNode, SymbolNode]] = None ) -> None: self.already_defined(name, ctx, original_ctx, noun='Attribute') def is_local_name(self, name: str) -> bool: """Does name look like reference to a definition in the current module?""" return self.is_defined_in_current_module(name) or '.' not in name def fail(self, msg: str, ctx: Context, serious: bool = False, *, code: Optional[ErrorCode] = None, blocker: bool = False) -> None: if (not serious and not self.options.check_untyped_defs and self.function_stack and self.function_stack[-1].is_dynamic()): return # In case it's a bug and we don't really have context assert ctx is not None, msg self.errors.report(ctx.get_line(), ctx.get_column(), msg, blocker=blocker, code=code) def fail_blocker(self, msg: str, ctx: Context) -> None: self.fail(msg, ctx, blocker=True) def note(self, msg: str, ctx: Context, code: Optional[ErrorCode] = None) -> None: if (not self.options.check_untyped_defs and self.function_stack and self.function_stack[-1].is_dynamic()): return self.errors.report(ctx.get_line(), ctx.get_column(), msg, severity='note', code=code) def accept(self, node: Node) -> None: try: node.accept(self) except Exception as err: report_internal_error(err, self.errors.file, node.line, self.errors, self.options) def expr_to_analyzed_type(self, expr: Expression, report_invalid_types: bool = True, allow_placeholder: bool = False) -> Optional[Type]: if isinstance(expr, CallExpr): expr.accept(self) is_named_tuple, info = self.named_tuple_analyzer.check_namedtuple(expr, None, self.is_func_scope()) if not is_named_tuple: # Some form of namedtuple is the only valid type that looks like a call # expression. This isn't a valid type. raise TypeTranslationError() elif not info: self.defer(expr) return None assert info.tuple_type, "NamedTuple without tuple type" fallback = Instance(info, []) return TupleType(info.tuple_type.items, fallback=fallback) typ = expr_to_unanalyzed_type(expr) return self.anal_type(typ, report_invalid_types=report_invalid_types, allow_placeholder=allow_placeholder) def analyze_type_expr(self, expr: Expression) -> None: # There are certain expressions that mypy does not need to semantically analyze, # since they analyzed solely as type. (For example, indexes in type alias definitions # and base classes in class defs). External consumers of the mypy AST may need # them semantically analyzed, however, if they need to treat it as an expression # and not a type. (Which is to say, mypyc needs to do this.) Do the analysis # in a fresh tvar scope in order to suppress any errors about using type variables. with self.tvar_scope_frame(TypeVarScope()): expr.accept(self) def type_analyzer(self, *, tvar_scope: Optional[TypeVarScope] = None, allow_tuple_literal: bool = False, allow_unbound_tvars: bool = False, allow_placeholder: bool = False, report_invalid_types: bool = True) -> TypeAnalyser: if tvar_scope is None: tvar_scope = self.tvar_scope tpan = TypeAnalyser(self, tvar_scope, self.plugin, self.options, self.is_typeshed_stub_file, allow_unbound_tvars=allow_unbound_tvars, allow_tuple_literal=allow_tuple_literal, report_invalid_types=report_invalid_types, allow_unnormalized=self.is_stub_file, allow_placeholder=allow_placeholder) tpan.in_dynamic_func = bool(self.function_stack and self.function_stack[-1].is_dynamic()) tpan.global_scope = not self.type and not self.function_stack return tpan def anal_type(self, typ: Type, *, tvar_scope: Optional[TypeVarScope] = None, allow_tuple_literal: bool = False, allow_unbound_tvars: bool = False, allow_placeholder: bool = False, report_invalid_types: bool = True, third_pass: bool = False) -> Optional[Type]: """Semantically analyze a type. Args: typ: Type to analyze (if already analyzed, this is a no-op) allow_placeholder: If True, may return PlaceholderType if encountering an incomplete definition third_pass: Unused; only for compatibility with old semantic analyzer Return None only if some part of the type couldn't be bound *and* it referred to an incomplete namespace or definition. In this case also defer as needed. During a final iteration this won't return None; instead report an error if the type can't be analyzed and return AnyType. In case of other errors, report an error message and return AnyType. NOTE: The caller shouldn't defer even if this returns None or a placeholder type. """ a = self.type_analyzer(tvar_scope=tvar_scope, allow_unbound_tvars=allow_unbound_tvars, allow_tuple_literal=allow_tuple_literal, allow_placeholder=allow_placeholder, report_invalid_types=report_invalid_types) tag = self.track_incomplete_refs() typ = typ.accept(a) if self.found_incomplete_ref(tag): # Something could not be bound yet. return None self.add_type_alias_deps(a.aliases_used) return typ def class_type(self, self_type: Type) -> Type: return TypeType.make_normalized(self_type) def schedule_patch(self, priority: int, patch: Callable[[], None]) -> None: self.patches.append((priority, patch)) def report_hang(self) -> None: print('Deferral trace:') for mod, line in self.deferral_debug_context: print(' {}:{}'.format(mod, line)) self.errors.report(-1, -1, 'INTERNAL ERROR: maximum semantic analysis iteration count reached', blocker=True) def add_plugin_dependency(self, trigger: str, target: Optional[str] = None) -> None: """Add dependency from trigger to a target. If the target is not given explicitly, use the current target. """ if target is None: target = self.scope.current_target() self.cur_mod_node.plugin_deps.setdefault(trigger, set()).add(target) def add_type_alias_deps(self, aliases_used: Iterable[str], target: Optional[str] = None) -> None: """Add full names of type aliases on which the current node depends. This is used by fine-grained incremental mode to re-check the corresponding nodes. If `target` is None, then the target node used will be the current scope. """ if not aliases_used: # A basic optimization to avoid adding targets with no dependencies to # the `alias_deps` dict. return if target is None: target = self.scope.current_target() self.cur_mod_node.alias_deps[target].update(aliases_used) def is_mangled_global(self, name: str) -> bool: # A global is mangled if there exists at least one renamed variant. return unmangle(name) + "'" in self.globals def is_initial_mangled_global(self, name: str) -> bool: # If there are renamed definitions for a global, the first one has exactly one prime. return name == unmangle(name) + "'" def parse_bool(self, expr: Expression) -> Optional[bool]: if isinstance(expr, NameExpr): if expr.fullname == 'builtins.True': return True if expr.fullname == 'builtins.False': return False return None class HasPlaceholders(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_placeholder_type(self, t: PlaceholderType) -> bool: return True def has_placeholder(typ: Type) -> bool: """Check if a type contains any placeholder types (recursively).""" return typ.accept(HasPlaceholders()) def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike: if isinstance(sig, CallableType): if len(sig.arg_types) == 0: return sig return sig.copy_modified(arg_types=[new] + sig.arg_types[1:]) elif isinstance(sig, Overloaded): return Overloaded([cast(CallableType, replace_implicit_first_type(i, new)) for i in sig.items()]) else: assert False def refers_to_fullname(node: Expression, fullname: str) -> bool: """Is node a name or member expression with the given full name?""" if not isinstance(node, RefExpr): return False if node.fullname == fullname: return True if isinstance(node.node, TypeAlias): target = get_proper_type(node.node.target) if isinstance(target, Instance) and target.type.fullname == fullname: return True return False def refers_to_class_or_function(node: Expression) -> bool: """Does semantically analyzed node refer to a class?""" return (isinstance(node, RefExpr) and isinstance(node.node, (TypeInfo, FuncDef, OverloadedFuncDef))) def find_duplicate(list: List[T]) -> Optional[T]: """If the list has duplicates, return one of the duplicates. Otherwise, return None. """ for i in range(1, len(list)): if list[i] in list[:i]: return list[i] return None def remove_imported_names_from_symtable(names: SymbolTable, module: str) -> None: """Remove all imported names from the symbol table of a module.""" removed = [] # type: List[str] for name, node in names.items(): if node.node is None: continue fullname = node.node.fullname prefix = fullname[:fullname.rfind('.')] if prefix != module: removed.append(name) for name in removed: del names[name] def make_any_non_explicit(t: Type) -> Type: """Replace all Any types within in with Any that has attribute 'explicit' set to False""" return t.accept(MakeAnyNonExplicit()) class MakeAnyNonExplicit(TypeTranslator): def visit_any(self, t: AnyType) -> Type: if t.type_of_any == TypeOfAny.explicit: return t.copy_modified(TypeOfAny.special_form) return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: return t.copy_modified(args=[a.accept(self) for a in t.args]) def apply_semantic_analyzer_patches(patches: List[Tuple[int, Callable[[], None]]]) -> None: """Call patch callbacks in the right order. This should happen after semantic analyzer pass 3. """ patches_by_priority = sorted(patches, key=lambda x: x[0]) for priority, patch_func in patches_by_priority: patch_func() def names_modified_by_assignment(s: AssignmentStmt) -> List[NameExpr]: """Return all unqualified (short) names assigned to in an assignment statement.""" result = [] # type: List[NameExpr] for lvalue in s.lvalues: result += names_modified_in_lvalue(lvalue) return result def names_modified_in_lvalue(lvalue: Lvalue) -> List[NameExpr]: """Return all NameExpr assignment targets in an Lvalue.""" if isinstance(lvalue, NameExpr): return [lvalue] elif isinstance(lvalue, StarExpr): return names_modified_in_lvalue(lvalue.expr) elif isinstance(lvalue, (ListExpr, TupleExpr)): result = [] # type: List[NameExpr] for item in lvalue.items: result += names_modified_in_lvalue(item) return result return [] def is_same_var_from_getattr(n1: Optional[SymbolNode], n2: Optional[SymbolNode]) -> bool: """Do n1 and n2 refer to the same Var derived from module-level __getattr__?""" return (isinstance(n1, Var) and n1.from_module_getattr and isinstance(n2, Var) and n2.from_module_getattr and n1.fullname == n2.fullname) def dummy_context() -> Context: return TempNode(AnyType(TypeOfAny.special_form)) def is_valid_replacement(old: SymbolTableNode, new: SymbolTableNode) -> bool: """Can symbol table node replace an existing one? These are the only valid cases: 1. Placeholder gets replaced with a non-placeholder 2. Placeholder that isn't known to become type replaced with a placeholder that can become a type """ if isinstance(old.node, PlaceholderNode): if isinstance(new.node, PlaceholderNode): return not old.node.becomes_typeinfo and new.node.becomes_typeinfo else: return True return False def is_same_symbol(a: Optional[SymbolNode], b: Optional[SymbolNode]) -> bool: return (a == b or (isinstance(a, PlaceholderNode) and isinstance(b, PlaceholderNode)) or is_same_var_from_getattr(a, b)) mypy-0.761/mypy/semanal_classprop.py0000644€tŠÔÚ€2›s®0000001556013576752246024005 0ustar jukkaDROPBOX\Domain Users00000000000000"""Calculate some properties of classes. These happen after semantic analysis and before type checking. """ from typing import List, Set, Optional from typing_extensions import Final from mypy.nodes import ( Node, TypeInfo, Var, Decorator, OverloadedFuncDef, SymbolTable, CallExpr, PromoteExpr, ) from mypy.types import Instance, Type from mypy.errors import Errors from mypy.options import Options # Hard coded type promotions (shared between all Python versions). # These add extra ad-hoc edges to the subtyping relation. For example, # int is considered a subtype of float, even though there is no # subclass relationship. TYPE_PROMOTIONS = { 'builtins.int': 'float', 'builtins.float': 'complex', } # type: Final # Hard coded type promotions for Python 3. # # Note that the bytearray -> bytes promotion is a little unsafe # as some functions only accept bytes objects. Here convenience # trumps safety. TYPE_PROMOTIONS_PYTHON3 = TYPE_PROMOTIONS.copy() # type: Final TYPE_PROMOTIONS_PYTHON3.update({ 'builtins.bytearray': 'bytes', 'builtins.memoryview': 'bytes', }) # Hard coded type promotions for Python 2. # # These promotions are unsafe, but we are doing them anyway # for convenience and also for Python 3 compatibility # (bytearray -> str). TYPE_PROMOTIONS_PYTHON2 = TYPE_PROMOTIONS.copy() # type: Final TYPE_PROMOTIONS_PYTHON2.update({ 'builtins.str': 'unicode', 'builtins.bytearray': 'str', 'builtins.memoryview': 'str', }) def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: Errors) -> None: """Calculate abstract status of a class. Set is_abstract of the type to True if the type has an unimplemented abstract attribute. Also compute a list of abstract attributes. Report error is required ABCMeta metaclass is missing. """ if typ.typeddict_type: return # TypedDict can't be abstract concrete = set() # type: Set[str] abstract = [] # type: List[str] abstract_in_this_class = [] # type: List[str] if typ.is_newtype: # Special case: NewTypes are considered as always non-abstract, so they can be used as: # Config = NewType('Config', Mapping[str, str]) # default = Config({'cannot': 'modify'}) # OK typ.abstract_attributes = [] return for base in typ.mro: for name, symnode in base.names.items(): node = symnode.node if isinstance(node, OverloadedFuncDef): # Unwrap an overloaded function definition. We can just # check arbitrarily the first overload item. If the # different items have a different abstract status, there # should be an error reported elsewhere. if node.items: # can be empty for invalid overloads func = node.items[0] # type: Optional[Node] else: func = None else: func = node if isinstance(func, Decorator): fdef = func.func if fdef.is_abstract and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) elif isinstance(node, Var): if node.is_abstract_var and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) concrete.add(name) # In stubs, abstract classes need to be explicitly marked because it is too # easy to accidentally leave a concrete class abstract by forgetting to # implement some methods. typ.abstract_attributes = sorted(abstract) if is_stub_file: if typ.declared_metaclass and typ.declared_metaclass.type.fullname == 'abc.ABCMeta': return if typ.is_protocol: return if abstract and not abstract_in_this_class: def report(message: str, severity: str) -> None: errors.report(typ.line, typ.column, message, severity=severity) attrs = ", ".join('"{}"'.format(attr) for attr in sorted(abstract)) report("Class {} has abstract attributes {}".format(typ.fullname, attrs), 'error') report("If it is meant to be abstract, add 'abc.ABCMeta' as an explicit metaclass", 'note') def check_protocol_status(info: TypeInfo, errors: Errors) -> None: """Check that all classes in MRO of a protocol are protocols""" if info.is_protocol: for type in info.bases: if not type.type.is_protocol and type.type.fullname != 'builtins.object': def report(message: str, severity: str) -> None: errors.report(info.line, info.column, message, severity=severity) report('All bases of a protocol must be protocols', 'error') def calculate_class_vars(info: TypeInfo) -> None: """Try to infer additional class variables. Subclass attribute assignments with no type annotation are assumed to be classvar if overriding a declared classvar from the base class. This must happen after the main semantic analysis pass, since this depends on base class bodies having been fully analyzed. """ for name, sym in info.names.items(): node = sym.node if isinstance(node, Var) and node.info and node.is_inferred and not node.is_classvar: for base in info.mro[1:]: member = base.names.get(name) if (member is not None and isinstance(member.node, Var) and member.node.is_classvar): node.is_classvar = True def add_type_promotion(info: TypeInfo, module_names: SymbolTable, options: Options) -> None: """Setup extra, ad-hoc subtyping relationships between classes (promotion). This includes things like 'int' being compatible with 'float'. """ defn = info.defn promote_target = None # type: Optional[Type] for decorator in defn.decorators: if isinstance(decorator, CallExpr): analyzed = decorator.analyzed if isinstance(analyzed, PromoteExpr): # _promote class decorator (undocumented feature). promote_target = analyzed.type if not promote_target: promotions = (TYPE_PROMOTIONS_PYTHON3 if options.python_version[0] >= 3 else TYPE_PROMOTIONS_PYTHON2) if defn.fullname in promotions: target_sym = module_names.get(promotions[defn.fullname]) # With test stubs, the target may not exist. if target_sym: target_info = target_sym.node assert isinstance(target_info, TypeInfo) promote_target = Instance(target_info, []) defn.info._promote = promote_target mypy-0.761/mypy/semanal_enum.py0000644€tŠÔÚ€2›s®0000001624713576752246022746 0ustar jukkaDROPBOX\Domain Users00000000000000"""Semantic analysis of call-based Enum definitions. This is conceptually part of mypy.semanal (semantic analyzer pass 2). """ from typing import List, Tuple, Optional, Union, cast from mypy.nodes import ( Expression, Context, TypeInfo, AssignmentStmt, NameExpr, CallExpr, RefExpr, StrExpr, UnicodeExpr, TupleExpr, ListExpr, DictExpr, Var, SymbolTableNode, MDEF, ARG_POS, EnumCallExpr, MemberExpr ) from mypy.semanal_shared import SemanticAnalyzerInterface from mypy.options import Options class EnumCallAnalyzer: def __init__(self, options: Options, api: SemanticAnalyzerInterface) -> None: self.options = options self.api = api def process_enum_call(self, s: AssignmentStmt, is_func_scope: bool) -> bool: """Check if s defines an Enum; if yes, store the definition in symbol table. Return True if this looks like an Enum definition (but maybe with errors), otherwise return False. """ if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], (NameExpr, MemberExpr)): return False lvalue = s.lvalues[0] name = lvalue.name enum_call = self.check_enum_call(s.rvalue, name, is_func_scope) if enum_call is None: return False if isinstance(lvalue, MemberExpr): self.fail("Enum type as attribute is not supported", lvalue) return False # Yes, it's a valid Enum definition. Add it to the symbol table. self.api.add_symbol(name, enum_call, s) return True def check_enum_call(self, node: Expression, var_name: str, is_func_scope: bool) -> Optional[TypeInfo]: """Check if a call defines an Enum. Example: A = enum.Enum('A', 'foo bar') is equivalent to: class A(enum.Enum): foo = 1 bar = 2 """ if not isinstance(node, CallExpr): return None call = node callee = call.callee if not isinstance(callee, RefExpr): return None fullname = callee.fullname if fullname not in ('enum.Enum', 'enum.IntEnum', 'enum.Flag', 'enum.IntFlag'): return None items, values, ok = self.parse_enum_call_args(call, fullname.split('.')[-1]) if not ok: # Error. Construct dummy return value. info = self.build_enum_call_typeinfo(var_name, [], fullname) else: name = cast(Union[StrExpr, UnicodeExpr], call.args[0]).value if name != var_name or is_func_scope: # Give it a unique name derived from the line number. name += '@' + str(call.line) info = self.build_enum_call_typeinfo(name, items, fullname) # Store generated TypeInfo under both names, see semanal_namedtuple for more details. if name != var_name or is_func_scope: self.api.add_symbol_skip_local(name, info) call.analyzed = EnumCallExpr(info, items, values) call.analyzed.set_line(call.line, call.column) info.line = node.line return info def build_enum_call_typeinfo(self, name: str, items: List[str], fullname: str) -> TypeInfo: base = self.api.named_type_or_none(fullname) assert base is not None info = self.api.basic_new_typeinfo(name, base) info.metaclass_type = info.calculate_metaclass_type() info.is_enum = True for item in items: var = Var(item) var.info = info var.is_property = True var._fullname = '{}.{}'.format(info.fullname, item) info.names[item] = SymbolTableNode(MDEF, var) return info def parse_enum_call_args(self, call: CallExpr, class_name: str) -> Tuple[List[str], List[Optional[Expression]], bool]: """Parse arguments of an Enum call. Return a tuple of fields, values, was there an error. """ args = call.args if len(args) < 2: return self.fail_enum_call_arg("Too few arguments for %s()" % class_name, call) if len(args) > 2: return self.fail_enum_call_arg("Too many arguments for %s()" % class_name, call) if call.arg_kinds != [ARG_POS, ARG_POS]: return self.fail_enum_call_arg("Unexpected arguments to %s()" % class_name, call) if not isinstance(args[0], (StrExpr, UnicodeExpr)): return self.fail_enum_call_arg( "%s() expects a string literal as the first argument" % class_name, call) items = [] values = [] # type: List[Optional[Expression]] if isinstance(args[1], (StrExpr, UnicodeExpr)): fields = args[1].value for field in fields.replace(',', ' ').split(): items.append(field) elif isinstance(args[1], (TupleExpr, ListExpr)): seq_items = args[1].items if all(isinstance(seq_item, (StrExpr, UnicodeExpr)) for seq_item in seq_items): items = [cast(Union[StrExpr, UnicodeExpr], seq_item).value for seq_item in seq_items] elif all(isinstance(seq_item, (TupleExpr, ListExpr)) and len(seq_item.items) == 2 and isinstance(seq_item.items[0], (StrExpr, UnicodeExpr)) for seq_item in seq_items): for seq_item in seq_items: assert isinstance(seq_item, (TupleExpr, ListExpr)) name, value = seq_item.items assert isinstance(name, (StrExpr, UnicodeExpr)) items.append(name.value) values.append(value) else: return self.fail_enum_call_arg( "%s() with tuple or list expects strings or (name, value) pairs" % class_name, call) elif isinstance(args[1], DictExpr): for key, value in args[1].items: if not isinstance(key, (StrExpr, UnicodeExpr)): return self.fail_enum_call_arg( "%s() with dict literal requires string literals" % class_name, call) items.append(key.value) values.append(value) else: # TODO: Allow dict(x=1, y=2) as a substitute for {'x': 1, 'y': 2}? return self.fail_enum_call_arg( "%s() expects a string, tuple, list or dict literal as the second argument" % class_name, call) if len(items) == 0: return self.fail_enum_call_arg("%s() needs at least one item" % class_name, call) if not values: values = [None] * len(items) assert len(items) == len(values) return items, values, True def fail_enum_call_arg(self, message: str, context: Context) -> Tuple[List[str], List[Optional[Expression]], bool]: self.fail(message, context) return [], [], False # Helpers def fail(self, msg: str, ctx: Context) -> None: self.api.fail(msg, ctx) mypy-0.761/mypy/semanal_infer.py0000644€tŠÔÚ€2›s®0000001210413576752246023071 0ustar jukkaDROPBOX\Domain Users00000000000000"""Simple type inference for decorated functions during semantic analysis.""" from typing import Optional from mypy.nodes import Expression, Decorator, CallExpr, FuncDef, RefExpr, Var, ARG_POS from mypy.types import ( Type, CallableType, AnyType, TypeOfAny, TypeVarType, ProperType, get_proper_type ) from mypy.typeops import function_type from mypy.typevars import has_no_typevars from mypy.semanal_shared import SemanticAnalyzerInterface def infer_decorator_signature_if_simple(dec: Decorator, analyzer: SemanticAnalyzerInterface) -> None: """Try to infer the type of the decorated function. This lets us resolve additional references to decorated functions during type checking. Otherwise the type might not be available when we need it, since module top levels can't be deferred. This basically uses a simple special-purpose type inference engine just for decorators. """ if dec.var.is_property: # Decorators are expected to have a callable type (it's a little odd). if dec.func.type is None: dec.var.type = CallableType( [AnyType(TypeOfAny.special_form)], [ARG_POS], [None], AnyType(TypeOfAny.special_form), analyzer.named_type('__builtins__.function'), name=dec.var.name) elif isinstance(dec.func.type, CallableType): dec.var.type = dec.func.type return decorator_preserves_type = True for expr in dec.decorators: preserve_type = False if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef): if expr.node.type and is_identity_signature(expr.node.type): preserve_type = True if not preserve_type: decorator_preserves_type = False break if decorator_preserves_type: # No non-identity decorators left. We can trivially infer the type # of the function here. dec.var.type = function_type(dec.func, analyzer.named_type('__builtins__.function')) if dec.decorators: return_type = calculate_return_type(dec.decorators[0]) if return_type and isinstance(return_type, AnyType): # The outermost decorator will return Any so we know the type of the # decorated function. dec.var.type = AnyType(TypeOfAny.from_another_any, source_any=return_type) sig = find_fixed_callable_return(dec.decorators[0]) if sig: # The outermost decorator always returns the same kind of function, # so we know that this is the type of the decorated function. orig_sig = function_type(dec.func, analyzer.named_type('__builtins__.function')) sig.name = orig_sig.items()[0].name dec.var.type = sig def is_identity_signature(sig: Type) -> bool: """Is type a callable of form T -> T (where T is a type variable)?""" sig = get_proper_type(sig) if isinstance(sig, CallableType) and sig.arg_kinds == [ARG_POS]: if isinstance(sig.arg_types[0], TypeVarType) and isinstance(sig.ret_type, TypeVarType): return sig.arg_types[0].id == sig.ret_type.id return False def calculate_return_type(expr: Expression) -> Optional[ProperType]: """Return the return type if we can calculate it. This only uses information available during semantic analysis so this will sometimes return None because of insufficient information (as type inference hasn't run yet). """ if isinstance(expr, RefExpr): if isinstance(expr.node, FuncDef): typ = expr.node.type if typ is None: # No signature -> default to Any. return AnyType(TypeOfAny.unannotated) # Explicit Any return? if isinstance(typ, CallableType): return get_proper_type(typ.ret_type) return None elif isinstance(expr.node, Var): return get_proper_type(expr.node.type) elif isinstance(expr, CallExpr): return calculate_return_type(expr.callee) return None def find_fixed_callable_return(expr: Expression) -> Optional[CallableType]: """Return the return type, if expression refers to a callable that returns a callable. But only do this if the return type has no type variables. Return None otherwise. This approximates things a lot as this is supposed to be called before type checking when full type information is not available yet. """ if isinstance(expr, RefExpr): if isinstance(expr.node, FuncDef): typ = expr.node.type if typ: if isinstance(typ, CallableType) and has_no_typevars(typ.ret_type): ret_type = get_proper_type(typ.ret_type) if isinstance(ret_type, CallableType): return ret_type elif isinstance(expr, CallExpr): t = find_fixed_callable_return(expr.callee) if t: ret_type = get_proper_type(t.ret_type) if isinstance(ret_type, CallableType): return ret_type return None mypy-0.761/mypy/semanal_main.py0000644€tŠÔÚ€2›s®0000004233213576752246022720 0ustar jukkaDROPBOX\Domain Users00000000000000"""Top-level logic for the semantic analyzer. The semantic analyzer binds names, resolves imports, detects various special constructs that don't have dedicated AST nodes after parse (such as 'cast' which looks like a call), populates symbol tables, and performs various simple consistency checks. Semantic analysis of each SCC (strongly connected component; import cycle) is performed in one unit. Each module is analyzed as multiple separate *targets*; the module top level is one target and each function is a target. Nested functions are not separate targets, however. This is mostly identical to targets used by mypy daemon (but classes aren't targets in semantic analysis). We first analyze each module top level in an SCC. If we encounter some names that we can't bind because the target of the name may not have been processed yet, we *defer* the current target for further processing. Deferred targets will be analyzed additional times until everything can be bound, or we reach a maximum number of iterations. We keep track of a set of incomplete namespaces, i.e. namespaces that we haven't finished populating yet. References to these namespaces cause a deferral if they can't be satisfied. Initially every module in the SCC will be incomplete. """ import contextlib from typing import List, Tuple, Optional, Union, Callable, Iterator from typing_extensions import TYPE_CHECKING from mypy.nodes import ( MypyFile, TypeInfo, FuncDef, Decorator, OverloadedFuncDef, Var ) from mypy.semanal_typeargs import TypeArgumentAnalyzer from mypy.state import strict_optional_set from mypy.semanal import ( SemanticAnalyzer, apply_semantic_analyzer_patches, remove_imported_names_from_symtable ) from mypy.semanal_classprop import ( calculate_class_abstract_status, calculate_class_vars, check_protocol_status, add_type_promotion ) from mypy.errors import Errors from mypy.semanal_infer import infer_decorator_signature_if_simple from mypy.checker import FineGrainedDeferredNode from mypy.server.aststrip import SavedAttributes import mypy.build if TYPE_CHECKING: from mypy.build import Graph, State Patches = List[Tuple[int, Callable[[], None]]] # If we perform this many iterations, raise an exception since we are likely stuck. MAX_ITERATIONS = 20 # Number of passes over core modules before going on to the rest of the builtin SCC. CORE_WARMUP = 2 core_modules = ['typing', 'builtins', 'abc', 'collections'] def semantic_analysis_for_scc(graph: 'Graph', scc: List[str], errors: Errors) -> None: """Perform semantic analysis for all modules in a SCC (import cycle). Assume that reachability analysis has already been performed. The scc will be processed roughly in the order the modules are included in the list. """ patches = [] # type: Patches # Note that functions can't define new module-level attributes # using 'global x', since module top levels are fully processed # before functions. This limitation is unlikely to go away soon. process_top_levels(graph, scc, patches) process_functions(graph, scc, patches) # We use patch callbacks to fix up things when we expect relatively few # callbacks to be required. apply_semantic_analyzer_patches(patches) # This pass might need fallbacks calculated above. check_type_arguments(graph, scc, errors) calculate_class_properties(graph, scc, errors) check_blockers(graph, scc) # Clean-up builtins, so that TypeVar etc. are not accessible without importing. if 'builtins' in scc: cleanup_builtin_scc(graph['builtins']) def cleanup_builtin_scc(state: 'State') -> None: """Remove imported names from builtins namespace. This way names imported from typing in builtins.pyi aren't available by default (without importing them). We can only do this after processing the whole SCC is finished, when the imported names aren't needed for processing builtins.pyi itself. """ assert state.tree is not None remove_imported_names_from_symtable(state.tree.names, 'builtins') def semantic_analysis_for_targets( state: 'State', nodes: List[FineGrainedDeferredNode], graph: 'Graph', saved_attrs: SavedAttributes) -> None: """Semantically analyze only selected nodes in a given module. This essentially mirrors the logic of semantic_analysis_for_scc() except that we process only some targets. This is used in fine grained incremental mode, when propagating an update. The saved_attrs are implicitly declared instance attributes (attributes defined on self) removed by AST stripper that may need to be reintroduced here. They must be added before any methods are analyzed. """ patches = [] # type: Patches if any(isinstance(n.node, MypyFile) for n in nodes): # Process module top level first (if needed). process_top_levels(graph, [state.id], patches) restore_saved_attrs(saved_attrs) analyzer = state.manager.semantic_analyzer for n in nodes: if isinstance(n.node, MypyFile): # Already done above. continue process_top_level_function(analyzer, state, state.id, n.node.fullname, n.node, n.active_typeinfo, patches) apply_semantic_analyzer_patches(patches) check_type_arguments_in_targets(nodes, state, state.manager.errors) calculate_class_properties(graph, [state.id], state.manager.errors) def restore_saved_attrs(saved_attrs: SavedAttributes) -> None: """Restore instance variables removed during AST strip that haven't been added yet.""" for (cdef, name), sym in saved_attrs.items(): info = cdef.info existing = info.get(name) defined_in_this_class = name in info.names assert isinstance(sym.node, Var) # This needs to mimic the logic in SemanticAnalyzer.analyze_member_lvalue() # regarding the existing variable in class body or in a superclass: # If the attribute of self is not defined in superclasses, create a new Var. if (existing is None or # (An abstract Var is considered as not defined.) (isinstance(existing.node, Var) and existing.node.is_abstract_var) or # Also an explicit declaration on self creates a new Var unless # there is already one defined in the class body. sym.node.explicit_self_type and not defined_in_this_class): info.names[name] = sym def process_top_levels(graph: 'Graph', scc: List[str], patches: Patches) -> None: # Process top levels until everything has been bound. # Reverse order of the scc so the first modules in the original list will be # be processed first. This helps with performance. scc = list(reversed(scc)) # Initialize ASTs and symbol tables. for id in scc: state = graph[id] assert state.tree is not None state.manager.semantic_analyzer.prepare_file(state.tree) # Initially all namespaces in the SCC are incomplete (well they are empty). state.manager.incomplete_namespaces.update(scc) worklist = scc[:] # HACK: process core stuff first. This is mostly needed to support defining # named tuples in builtin SCC. if all(m in worklist for m in core_modules): worklist += list(reversed(core_modules)) * CORE_WARMUP final_iteration = False iteration = 0 analyzer = state.manager.semantic_analyzer analyzer.deferral_debug_context.clear() while worklist: iteration += 1 if iteration > MAX_ITERATIONS: # Just pick some module inside the current SCC for error context. assert state.tree is not None with analyzer.file_context(state.tree, state.options): analyzer.report_hang() break if final_iteration: # Give up. It's impossible to bind all names. state.manager.incomplete_namespaces.clear() all_deferred = [] # type: List[str] any_progress = False while worklist: next_id = worklist.pop() state = graph[next_id] assert state.tree is not None deferred, incomplete, progress = semantic_analyze_target(next_id, state, state.tree, None, final_iteration, patches) all_deferred += deferred any_progress = any_progress or progress if not incomplete: state.manager.incomplete_namespaces.discard(next_id) if final_iteration: assert not all_deferred, 'Must not defer during final iteration' # Reverse to process the targets in the same order on every iteration. This avoids # processing the same target twice in a row, which is inefficient. worklist = list(reversed(all_deferred)) final_iteration = not any_progress def process_functions(graph: 'Graph', scc: List[str], patches: Patches) -> None: # Process functions. for module in scc: tree = graph[module].tree assert tree is not None analyzer = graph[module].manager.semantic_analyzer # In principle, functions can be processed in arbitrary order, # but _methods_ must be processed in the order they are defined, # because some features (most notably partial types) depend on # order of definitions on self. # # There can be multiple generated methods per line. Use target # name as the second sort key to get a repeatable sort order on # Python 3.5, which doesn't preserve dictionary order. targets = sorted(get_all_leaf_targets(tree), key=lambda x: (x[1].line, x[0])) for target, node, active_type in targets: assert isinstance(node, (FuncDef, OverloadedFuncDef, Decorator)) process_top_level_function(analyzer, graph[module], module, target, node, active_type, patches) def process_top_level_function(analyzer: 'SemanticAnalyzer', state: 'State', module: str, target: str, node: Union[FuncDef, OverloadedFuncDef, Decorator], active_type: Optional[TypeInfo], patches: Patches) -> None: """Analyze single top-level function or method. Process the body of the function (including nested functions) again and again, until all names have been resolved (ot iteration limit reached). """ # We need one more iteration after incomplete is False (e.g. to report errors, if any). final_iteration = False incomplete = True # Start in the incomplete state (no missing names will be reported on first pass). # Note that we use module name, since functions don't create qualified names. deferred = [module] analyzer.deferral_debug_context.clear() analyzer.incomplete_namespaces.add(module) iteration = 0 while deferred: iteration += 1 if iteration == MAX_ITERATIONS: # Just pick some module inside the current SCC for error context. assert state.tree is not None with analyzer.file_context(state.tree, state.options): analyzer.report_hang() break if not (deferred or incomplete) or final_iteration: # OK, this is one last pass, now missing names will be reported. analyzer.incomplete_namespaces.discard(module) deferred, incomplete, progress = semantic_analyze_target(target, state, node, active_type, final_iteration, patches) if final_iteration: assert not deferred, 'Must not defer during final iteration' if not progress: final_iteration = True analyzer.incomplete_namespaces.discard(module) # After semantic analysis is done, discard local namespaces # to avoid memory hoarding. analyzer.saved_locals.clear() TargetInfo = Tuple[str, Union[MypyFile, FuncDef, OverloadedFuncDef, Decorator], Optional[TypeInfo]] def get_all_leaf_targets(file: MypyFile) -> List[TargetInfo]: """Return all leaf targets in a symbol table (module-level and methods).""" result = [] # type: List[TargetInfo] for fullname, node, active_type in file.local_definitions(): if isinstance(node.node, (FuncDef, OverloadedFuncDef, Decorator)): result.append((fullname, node.node, active_type)) return result def semantic_analyze_target(target: str, state: 'State', node: Union[MypyFile, FuncDef, OverloadedFuncDef, Decorator], active_type: Optional[TypeInfo], final_iteration: bool, patches: Patches) -> Tuple[List[str], bool, bool]: """Semantically analyze a single target. Return tuple with these items: - list of deferred targets - was some definition incomplete - were any new names were defined (or placeholders replaced) """ state.manager.processed_targets.append(target) tree = state.tree assert tree is not None analyzer = state.manager.semantic_analyzer # TODO: Move initialization to somewhere else analyzer.global_decls = [set()] analyzer.nonlocal_decls = [set()] analyzer.globals = tree.names analyzer.progress = False with state.wrap_context(check_blockers=False): refresh_node = node if isinstance(refresh_node, Decorator): # Decorator expressions will be processed as part of the module top level. refresh_node = refresh_node.func analyzer.refresh_partial(refresh_node, patches, final_iteration, file_node=tree, options=state.options, active_type=active_type) if isinstance(node, Decorator): infer_decorator_signature_if_simple(node, analyzer) for dep in analyzer.imports: state.add_dependency(dep) priority = mypy.build.PRI_LOW if priority <= state.priorities.get(dep, priority): state.priorities[dep] = priority if analyzer.deferred: return [target], analyzer.incomplete, analyzer.progress else: return [], analyzer.incomplete, analyzer.progress def check_type_arguments(graph: 'Graph', scc: List[str], errors: Errors) -> None: for module in scc: state = graph[module] assert state.tree analyzer = TypeArgumentAnalyzer(errors, state.options, errors.is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): state.tree.accept(analyzer) def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode], state: 'State', errors: Errors) -> None: """Check type arguments against type variable bounds and restrictions. This mirrors the logic in check_type_arguments() except that we process only some targets. This is used in fine grained incremental mode. """ analyzer = TypeArgumentAnalyzer(errors, state.options, errors.is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): for target in targets: func = None # type: Optional[Union[FuncDef, OverloadedFuncDef]] if isinstance(target.node, (FuncDef, OverloadedFuncDef)): func = target.node saved = (state.id, target.active_typeinfo, func) # module, class, function with errors.scope.saved_scope(saved) if errors.scope else nothing(): analyzer.recurse_into_functions = func is not None target.node.accept(analyzer) def calculate_class_properties(graph: 'Graph', scc: List[str], errors: Errors) -> None: for module in scc: tree = graph[module].tree assert tree for _, node, _ in tree.local_definitions(): if isinstance(node.node, TypeInfo): saved = (module, node.node, None) # module, class, function with errors.scope.saved_scope(saved) if errors.scope else nothing(): calculate_class_abstract_status(node.node, tree.is_stub, errors) check_protocol_status(node.node, errors) calculate_class_vars(node.node) add_type_promotion(node.node, tree.names, graph[module].options) def check_blockers(graph: 'Graph', scc: List[str]) -> None: for module in scc: graph[module].check_blockers() @contextlib.contextmanager def nothing() -> Iterator[None]: yield mypy-0.761/mypy/semanal_namedtuple.py0000644€tŠÔÚ€2›s®0000006223513576752246024136 0ustar jukkaDROPBOX\Domain Users00000000000000"""Semantic analysis of named tuple definitions. This is conceptually part of mypy.semanal. """ from contextlib import contextmanager from typing import Tuple, List, Dict, Mapping, Optional, Union, cast, Iterator from typing_extensions import Final from mypy.types import ( Type, TupleType, AnyType, TypeOfAny, TypeVarDef, CallableType, TypeType, TypeVarType ) from mypy.semanal_shared import ( SemanticAnalyzerInterface, set_callable_name, calculate_tuple_fallback, PRIORITY_FALLBACKS ) from mypy.nodes import ( Var, EllipsisExpr, Argument, StrExpr, BytesExpr, UnicodeExpr, ExpressionStmt, NameExpr, AssignmentStmt, PassStmt, Decorator, FuncBase, ClassDef, Expression, RefExpr, TypeInfo, NamedTupleExpr, CallExpr, Context, TupleExpr, ListExpr, SymbolTableNode, FuncDef, Block, TempNode, SymbolTable, TypeVarExpr, ARG_POS, ARG_NAMED_OPT, ARG_OPT, MDEF ) from mypy.options import Options from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.util import get_unique_redefinition_name # Matches "_prohibited" in typing.py, but adds __annotations__, which works at runtime but can't # easily be supported in a static checker. NAMEDTUPLE_PROHIBITED_NAMES = ('__new__', '__init__', '__slots__', '__getnewargs__', '_fields', '_field_defaults', '_field_types', '_make', '_replace', '_asdict', '_source', '__annotations__') # type: Final NAMEDTUP_CLASS_ERROR = ('Invalid statement in NamedTuple definition; ' 'expected "field_name: field_type [= default]"') # type: Final SELF_TVAR_NAME = '_NT' # type: Final class NamedTupleAnalyzer: def __init__(self, options: Options, api: SemanticAnalyzerInterface) -> None: self.options = options self.api = api def analyze_namedtuple_classdef(self, defn: ClassDef, is_stub_file: bool ) -> Tuple[bool, Optional[TypeInfo]]: """Analyze if given class definition can be a named tuple definition. Return a tuple where first item indicates whether this can possibly be a named tuple, and the second item is the corresponding TypeInfo (may be None if not ready and should be deferred). """ for base_expr in defn.base_type_exprs: if isinstance(base_expr, RefExpr): self.api.accept(base_expr) if base_expr.fullname == 'typing.NamedTuple': result = self.check_namedtuple_classdef(defn, is_stub_file) if result is None: # This is a valid named tuple, but some types are incomplete. return True, None items, types, default_items = result info = self.build_namedtuple_typeinfo( defn.name, items, types, default_items, defn.line) defn.info = info defn.analyzed = NamedTupleExpr(info, is_typed=True) defn.analyzed.line = defn.line defn.analyzed.column = defn.column # All done: this is a valid named tuple with all types known. return True, info # This can't be a valid named tuple. return False, None def check_namedtuple_classdef(self, defn: ClassDef, is_stub_file: bool ) -> Optional[Tuple[List[str], List[Type], Dict[str, Expression]]]: """Parse and validate fields in named tuple class definition. Return a three tuple: * field names * field types * field default values or None, if any of the types are not ready. """ if self.options.python_version < (3, 6) and not is_stub_file: self.fail('NamedTuple class syntax is only supported in Python 3.6', defn) return [], [], {} if len(defn.base_type_exprs) > 1: self.fail('NamedTuple should be a single base', defn) items = [] # type: List[str] types = [] # type: List[Type] default_items = {} # type: Dict[str, Expression] for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty namedtuples). if (isinstance(stmt, PassStmt) or (isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr))): continue # Also allow methods, including decorated ones. if isinstance(stmt, (Decorator, FuncBase)): continue # And docstrings. if (isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr)): continue self.fail(NAMEDTUP_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. self.fail(NAMEDTUP_CLASS_ERROR, stmt) else: # Append name and type in this case... name = stmt.lvalues[0].name items.append(name) if stmt.type is None: types.append(AnyType(TypeOfAny.unannotated)) else: analyzed = self.api.anal_type(stmt.type) if analyzed is None: # Something is incomplete. We need to defer this named tuple. return None types.append(analyzed) # ...despite possible minor failures that allow further analyzis. if name.startswith('_'): self.fail('NamedTuple field name cannot start with an underscore: {}' .format(name), stmt) if stmt.type is None or hasattr(stmt, 'new_syntax') and not stmt.new_syntax: self.fail(NAMEDTUP_CLASS_ERROR, stmt) elif isinstance(stmt.rvalue, TempNode): # x: int assigns rvalue to TempNode(AnyType()) if default_items: self.fail('Non-default NamedTuple fields cannot follow default fields', stmt) else: default_items[name] = stmt.rvalue return items, types, default_items def check_namedtuple(self, node: Expression, var_name: Optional[str], is_func_scope: bool) -> Tuple[bool, Optional[TypeInfo]]: """Check if a call defines a namedtuple. The optional var_name argument is the name of the variable to which this is assigned, if any. Return a tuple of two items: * Can it be a valid named tuple? * Corresponding TypeInfo, or None if not ready. If the definition is invalid but looks like a namedtuple, report errors but return (some) TypeInfo. """ if not isinstance(node, CallExpr): return False, None call = node callee = call.callee if not isinstance(callee, RefExpr): return False, None fullname = callee.fullname if fullname == 'collections.namedtuple': is_typed = False elif fullname == 'typing.NamedTuple': is_typed = True else: return False, None result = self.parse_namedtuple_args(call, fullname) if result: items, types, defaults, ok = result else: # This is a valid named tuple but some types are not ready. return True, None if not ok: # Error. Construct dummy return value. if var_name: name = var_name else: name = 'namedtuple@' + str(call.line) info = self.build_namedtuple_typeinfo(name, [], [], {}, node.line) self.store_namedtuple_info(info, name, call, is_typed) return True, info name = cast(Union[StrExpr, BytesExpr, UnicodeExpr], call.args[0]).value if name != var_name or is_func_scope: # There are three special cases where need to give it a unique name derived # from the line number: # * There is a name mismatch with l.h.s., therefore we need to disambiguate # situations like: # A = NamedTuple('Same', [('x', int)]) # B = NamedTuple('Same', [('y', str)]) # * This is a base class expression, since it often matches the class name: # class NT(NamedTuple('NT', [...])): # ... # * This is a local (function or method level) named tuple, since # two methods of a class can define a named tuple with the same name, # and they will be stored in the same namespace (see below). name += '@' + str(call.line) if len(defaults) > 0: default_items = { arg_name: default for arg_name, default in zip(items[-len(defaults):], defaults) } else: default_items = {} info = self.build_namedtuple_typeinfo(name, items, types, default_items, node.line) # If var_name is not None (i.e. this is not a base class expression), we always # store the generated TypeInfo under var_name in the current scope, so that # other definitions can use it. if var_name: self.store_namedtuple_info(info, var_name, call, is_typed) # There are three cases where we need to store the generated TypeInfo # second time (for the purpose of serialization): # * If there is a name mismatch like One = NamedTuple('Other', [...]) # we also store the info under name 'Other@lineno', this is needed # because classes are (de)serialized using their actual fullname, not # the name of l.h.s. # * If this is a method level named tuple. It can leak from the method # via assignment to self attribute and therefore needs to be serialized # (local namespaces are not serialized). # * If it is a base class expression. It was not stored above, since # there is no var_name (but it still needs to be serialized # since it is in MRO of some class). if name != var_name or is_func_scope: # NOTE: we skip local namespaces since they are not serialized. self.api.add_symbol_skip_local(name, info) return True, info def store_namedtuple_info(self, info: TypeInfo, name: str, call: CallExpr, is_typed: bool) -> None: self.api.add_symbol(name, info, call) call.analyzed = NamedTupleExpr(info, is_typed=is_typed) call.analyzed.set_line(call.line, call.column) def parse_namedtuple_args(self, call: CallExpr, fullname: str ) -> Optional[Tuple[List[str], List[Type], List[Expression], bool]]: """Parse a namedtuple() call into data needed to construct a type. Returns a 4-tuple: - List of argument names - List of argument types - Number of arguments that have a default value - Whether the definition typechecked. Return None if at least one of the types is not ready. """ # TODO: Share code with check_argument_count in checkexpr.py? args = call.args if len(args) < 2: return self.fail_namedtuple_arg("Too few arguments for namedtuple()", call) defaults = [] # type: List[Expression] if len(args) > 2: # Typed namedtuple doesn't support additional arguments. if fullname == 'typing.NamedTuple': return self.fail_namedtuple_arg("Too many arguments for NamedTuple()", call) for i, arg_name in enumerate(call.arg_names[2:], 2): if arg_name == 'defaults': arg = args[i] # We don't care what the values are, as long as the argument is an iterable # and we can count how many defaults there are. if isinstance(arg, (ListExpr, TupleExpr)): defaults = list(arg.items) else: self.fail( "List or tuple literal expected as the defaults argument to " "namedtuple()", arg ) break if call.arg_kinds[:2] != [ARG_POS, ARG_POS]: return self.fail_namedtuple_arg("Unexpected arguments to namedtuple()", call) if not isinstance(args[0], (StrExpr, BytesExpr, UnicodeExpr)): return self.fail_namedtuple_arg( "namedtuple() expects a string literal as the first argument", call) types = [] # type: List[Type] ok = True if not isinstance(args[1], (ListExpr, TupleExpr)): if (fullname == 'collections.namedtuple' and isinstance(args[1], (StrExpr, BytesExpr, UnicodeExpr))): str_expr = args[1] items = str_expr.value.replace(',', ' ').split() else: return self.fail_namedtuple_arg( "List or tuple literal expected as the second argument to namedtuple()", call) else: listexpr = args[1] if fullname == 'collections.namedtuple': # The fields argument contains just names, with implicit Any types. if any(not isinstance(item, (StrExpr, BytesExpr, UnicodeExpr)) for item in listexpr.items): return self.fail_namedtuple_arg("String literal expected as namedtuple() item", call) items = [cast(Union[StrExpr, BytesExpr, UnicodeExpr], item).value for item in listexpr.items] else: # The fields argument contains (name, type) tuples. result = self.parse_namedtuple_fields_with_types(listexpr.items, call) if result: items, types, _, ok = result else: # One of the types is not ready, defer. return None if not types: types = [AnyType(TypeOfAny.unannotated) for _ in items] underscore = [item for item in items if item.startswith('_')] if underscore: self.fail("namedtuple() field names cannot start with an underscore: " + ', '.join(underscore), call) if len(defaults) > len(items): self.fail("Too many defaults given in call to namedtuple()", call) defaults = defaults[:len(items)] return items, types, defaults, ok def parse_namedtuple_fields_with_types(self, nodes: List[Expression], context: Context ) -> Optional[Tuple[List[str], List[Type], List[Expression], bool]]: """Parse typed named tuple fields. Return (names, types, defaults, error ocurred), or None if at least one of the types is not ready. """ items = [] # type: List[str] types = [] # type: List[Type] for item in nodes: if isinstance(item, TupleExpr): if len(item.items) != 2: return self.fail_namedtuple_arg("Invalid NamedTuple field definition", item) name, type_node = item.items if isinstance(name, (StrExpr, BytesExpr, UnicodeExpr)): items.append(name.value) else: return self.fail_namedtuple_arg("Invalid NamedTuple() field name", item) try: type = expr_to_unanalyzed_type(type_node) except TypeTranslationError: return self.fail_namedtuple_arg('Invalid field type', type_node) analyzed = self.api.anal_type(type) # These should be all known, otherwise we would defer in visit_assignment_stmt(). if analyzed is None: return None types.append(analyzed) else: return self.fail_namedtuple_arg("Tuple expected as NamedTuple() field", item) return items, types, [], True def fail_namedtuple_arg(self, message: str, context: Context ) -> Tuple[List[str], List[Type], List[Expression], bool]: self.fail(message, context) return [], [], [], False def build_namedtuple_typeinfo(self, name: str, items: List[str], types: List[Type], default_items: Mapping[str, Expression], line: int) -> TypeInfo: strtype = self.api.named_type('__builtins__.str') implicit_any = AnyType(TypeOfAny.special_form) basetuple_type = self.api.named_type('__builtins__.tuple', [implicit_any]) dictype = (self.api.named_type_or_none('builtins.dict', [strtype, implicit_any]) or self.api.named_type('__builtins__.object')) # Actual signature should return OrderedDict[str, Union[types]] ordereddictype = (self.api.named_type_or_none('builtins.dict', [strtype, implicit_any]) or self.api.named_type('__builtins__.object')) fallback = self.api.named_type('__builtins__.tuple', [implicit_any]) # Note: actual signature should accept an invariant version of Iterable[UnionType[types]]. # but it can't be expressed. 'new' and 'len' should be callable types. iterable_type = self.api.named_type_or_none('typing.Iterable', [implicit_any]) function_type = self.api.named_type('__builtins__.function') info = self.api.basic_new_typeinfo(name, fallback) info.is_named_tuple = True tuple_base = TupleType(types, fallback) info.tuple_type = tuple_base info.line = line # We can't calculate the complete fallback type until after semantic # analysis, since otherwise base classes might be incomplete. Postpone a # callback function that patches the fallback. self.api.schedule_patch(PRIORITY_FALLBACKS, lambda: calculate_tuple_fallback(tuple_base)) def add_field(var: Var, is_initialized_in_class: bool = False, is_property: bool = False) -> None: var.info = info var.is_initialized_in_class = is_initialized_in_class var.is_property = is_property var._fullname = '%s.%s' % (info.fullname, var.name) info.names[var.name] = SymbolTableNode(MDEF, var) fields = [Var(item, typ) for item, typ in zip(items, types)] for var in fields: add_field(var, is_property=True) # We can't share Vars between fields and method arguments, since they # have different full names (the latter are normally used as local variables # in functions, so their full names are set to short names when generated methods # are analyzed). vars = [Var(item, typ) for item, typ in zip(items, types)] tuple_of_strings = TupleType([strtype for _ in items], basetuple_type) add_field(Var('_fields', tuple_of_strings), is_initialized_in_class=True) add_field(Var('_field_types', dictype), is_initialized_in_class=True) add_field(Var('_field_defaults', dictype), is_initialized_in_class=True) add_field(Var('_source', strtype), is_initialized_in_class=True) add_field(Var('__annotations__', ordereddictype), is_initialized_in_class=True) add_field(Var('__doc__', strtype), is_initialized_in_class=True) tvd = TypeVarDef(SELF_TVAR_NAME, info.fullname + '.' + SELF_TVAR_NAME, -1, [], info.tuple_type) selftype = TypeVarType(tvd) def add_method(funcname: str, ret: Type, args: List[Argument], is_classmethod: bool = False, is_new: bool = False, ) -> None: if is_classmethod or is_new: first = [Argument(Var('_cls'), TypeType.make_normalized(selftype), None, ARG_POS)] else: first = [Argument(Var('_self'), selftype, None, ARG_POS)] args = first + args types = [arg.type_annotation for arg in args] items = [arg.variable.name for arg in args] arg_kinds = [arg.kind for arg in args] assert None not in types signature = CallableType(cast(List[Type], types), arg_kinds, items, ret, function_type) signature.variables = [tvd] func = FuncDef(funcname, args, Block([])) func.info = info func.is_class = is_classmethod func.type = set_callable_name(signature, func) func._fullname = info.fullname + '.' + funcname func.line = line if is_classmethod: v = Var(funcname, func.type) v.is_classmethod = True v.info = info v._fullname = func._fullname func.is_decorated = True dec = Decorator(func, [NameExpr('classmethod')], v) dec.line = line sym = SymbolTableNode(MDEF, dec) else: sym = SymbolTableNode(MDEF, func) sym.plugin_generated = True info.names[funcname] = sym add_method('_replace', ret=selftype, args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED_OPT) for var in vars]) def make_init_arg(var: Var) -> Argument: default = default_items.get(var.name, None) kind = ARG_POS if default is None else ARG_OPT return Argument(var, var.type, default, kind) add_method('__new__', ret=selftype, args=[make_init_arg(var) for var in vars], is_new=True) add_method('_asdict', args=[], ret=ordereddictype) special_form_any = AnyType(TypeOfAny.special_form) add_method('_make', ret=selftype, is_classmethod=True, args=[Argument(Var('iterable', iterable_type), iterable_type, None, ARG_POS), Argument(Var('new'), special_form_any, EllipsisExpr(), ARG_NAMED_OPT), Argument(Var('len'), special_form_any, EllipsisExpr(), ARG_NAMED_OPT)]) self_tvar_expr = TypeVarExpr(SELF_TVAR_NAME, info.fullname + '.' + SELF_TVAR_NAME, [], info.tuple_type) info.names[SELF_TVAR_NAME] = SymbolTableNode(MDEF, self_tvar_expr) return info @contextmanager def save_namedtuple_body(self, named_tuple_info: TypeInfo) -> Iterator[None]: """Preserve the generated body of class-based named tuple and then restore it. Temporarily clear the names dict so we don't get errors about duplicate names that were already set in build_namedtuple_typeinfo (we already added the tuple field names while generating the TypeInfo, and actual duplicates are already reported). """ nt_names = named_tuple_info.names named_tuple_info.names = SymbolTable() yield # Make sure we didn't use illegal names, then reset the names in the typeinfo. for prohibited in NAMEDTUPLE_PROHIBITED_NAMES: if prohibited in named_tuple_info.names: if nt_names.get(prohibited) is named_tuple_info.names[prohibited]: continue ctx = named_tuple_info.names[prohibited].node assert ctx is not None self.fail('Cannot overwrite NamedTuple attribute "{}"'.format(prohibited), ctx) # Restore the names in the original symbol table. This ensures that the symbol # table contains the field objects created by build_namedtuple_typeinfo. Exclude # __doc__, which can legally be overwritten by the class. for key, value in nt_names.items(): if key in named_tuple_info.names: if key == '__doc__': continue sym = named_tuple_info.names[key] if isinstance(sym.node, (FuncBase, Decorator)) and not sym.plugin_generated: # Keep user-defined methods as is. continue # Keep existing (user-provided) definitions under mangled names, so they # get semantically analyzed. r_key = get_unique_redefinition_name(key, named_tuple_info.names) named_tuple_info.names[r_key] = sym named_tuple_info.names[key] = value # Helpers def fail(self, msg: str, ctx: Context) -> None: self.api.fail(msg, ctx) mypy-0.761/mypy/semanal_newtype.py0000644€tŠÔÚ€2›s®0000002200713576752246023464 0ustar jukkaDROPBOX\Domain Users00000000000000"""Semantic analysis of NewType definitions. This is conceptually part of mypy.semanal (semantic analyzer pass 2). """ from typing import Tuple, Optional from mypy.types import ( Type, Instance, CallableType, NoneType, TupleType, AnyType, PlaceholderType, TypeOfAny, get_proper_type ) from mypy.nodes import ( AssignmentStmt, NewTypeExpr, CallExpr, NameExpr, RefExpr, Context, StrExpr, BytesExpr, UnicodeExpr, Block, FuncDef, Argument, TypeInfo, Var, SymbolTableNode, MDEF, ARG_POS, PlaceholderNode ) from mypy.semanal_shared import SemanticAnalyzerInterface from mypy.options import Options from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type from mypy.messages import MessageBuilder, format_type from mypy.errorcodes import ErrorCode from mypy import errorcodes as codes class NewTypeAnalyzer: def __init__(self, options: Options, api: SemanticAnalyzerInterface, msg: MessageBuilder) -> None: self.options = options self.api = api self.msg = msg def process_newtype_declaration(self, s: AssignmentStmt) -> bool: """Check if s declares a NewType; if yes, store it in symbol table. Return True if it's a NewType declaration. The current target may be deferred as a side effect if the base type is not ready, even if the return value is True. The logic in this function mostly copies the logic for visit_class_def() with a single (non-Generic) base. """ name, call = self.analyze_newtype_declaration(s) if name is None or call is None: return False # OK, now we know this is a NewType. But the base type may be not ready yet, # add placeholder as we do for ClassDef. fullname = self.api.qualified_name(name) if (not call.analyzed or isinstance(call.analyzed, NewTypeExpr) and not call.analyzed.info): # Start from labeling this as a future class, as we do for normal ClassDefs. placeholder = PlaceholderNode(fullname, s, s.line, becomes_typeinfo=True) self.api.add_symbol(name, placeholder, s, can_defer=False) old_type, should_defer = self.check_newtype_args(name, call, s) old_type = get_proper_type(old_type) if not call.analyzed: call.analyzed = NewTypeExpr(name, old_type, line=call.line, column=call.column) if old_type is None: if should_defer: # Base type is not ready. self.api.defer() return True # Create the corresponding class definition if the aliased type is subtypeable if isinstance(old_type, TupleType): newtype_class_info = self.build_newtype_typeinfo(name, old_type, old_type.partial_fallback) newtype_class_info.tuple_type = old_type elif isinstance(old_type, Instance): if old_type.type.is_protocol: self.fail("NewType cannot be used with protocol classes", s) newtype_class_info = self.build_newtype_typeinfo(name, old_type, old_type) else: if old_type is not None: message = "Argument 2 to NewType(...) must be subclassable (got {})" self.fail(message.format(format_type(old_type)), s, code=codes.VALID_NEWTYPE) # Otherwise the error was already reported. old_type = AnyType(TypeOfAny.from_error) object_type = self.api.named_type('__builtins__.object') newtype_class_info = self.build_newtype_typeinfo(name, old_type, object_type) newtype_class_info.fallback_to_any = True check_for_explicit_any(old_type, self.options, self.api.is_typeshed_stub_file, self.msg, context=s) if self.options.disallow_any_unimported and has_any_from_unimported_type(old_type): self.msg.unimported_type_becomes_any("Argument 2 to NewType(...)", old_type, s) # If so, add it to the symbol table. assert isinstance(call.analyzed, NewTypeExpr) # As we do for normal classes, create the TypeInfo only once, then just # update base classes on next iterations (to get rid of placeholders there). if not call.analyzed.info: call.analyzed.info = newtype_class_info else: call.analyzed.info.bases = newtype_class_info.bases self.api.add_symbol(name, call.analyzed.info, s) newtype_class_info.line = s.line return True def analyze_newtype_declaration(self, s: AssignmentStmt) -> Tuple[Optional[str], Optional[CallExpr]]: """Return the NewType call expression if `s` is a newtype declaration or None otherwise.""" name, call = None, None if (len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) and isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr) and s.rvalue.callee.fullname == 'typing.NewType'): name = s.lvalues[0].name if s.type: self.fail("Cannot declare the type of a NewType declaration", s) names = self.api.current_symbol_table() existing = names.get(name) # Give a better error message than generic "Name already defined". if (existing and not isinstance(existing.node, PlaceholderNode) and not s.rvalue.analyzed): self.fail("Cannot redefine '%s' as a NewType" % name, s) # This dummy NewTypeExpr marks the call as sufficiently analyzed; it will be # overwritten later with a fully complete NewTypeExpr if there are no other # errors with the NewType() call. call = s.rvalue return name, call def check_newtype_args(self, name: str, call: CallExpr, context: Context) -> Tuple[Optional[Type], bool]: """Ananlyze base type in NewType call. Return a tuple (type, should defer). """ has_failed = False args, arg_kinds = call.args, call.arg_kinds if len(args) != 2 or arg_kinds[0] != ARG_POS or arg_kinds[1] != ARG_POS: self.fail("NewType(...) expects exactly two positional arguments", context) return None, False # Check first argument if not isinstance(args[0], (StrExpr, BytesExpr, UnicodeExpr)): self.fail("Argument 1 to NewType(...) must be a string literal", context) has_failed = True elif args[0].value != name: msg = "String argument 1 '{}' to NewType(...) does not match variable name '{}'" self.fail(msg.format(args[0].value, name), context) has_failed = True # Check second argument msg = "Argument 2 to NewType(...) must be a valid type" try: unanalyzed_type = expr_to_unanalyzed_type(args[1]) except TypeTranslationError: self.fail(msg, context) return None, False # We want to use our custom error message (see above), so we suppress # the default error message for invalid types here. old_type = get_proper_type(self.api.anal_type(unanalyzed_type, report_invalid_types=False)) should_defer = False if old_type is None or isinstance(old_type, PlaceholderType): should_defer = True # The caller of this function assumes that if we return a Type, it's always # a valid one. So, we translate AnyTypes created from errors into None. if isinstance(old_type, AnyType) and old_type.is_from_error: self.fail(msg, context) return None, False return None if has_failed else old_type, should_defer def build_newtype_typeinfo(self, name: str, old_type: Type, base_type: Instance) -> TypeInfo: info = self.api.basic_new_typeinfo(name, base_type) info.is_newtype = True # Add __init__ method args = [Argument(Var('self'), NoneType(), None, ARG_POS), self.make_argument('item', old_type)] signature = CallableType( arg_types=[Instance(info, []), old_type], arg_kinds=[arg.kind for arg in args], arg_names=['self', 'item'], ret_type=NoneType(), fallback=self.api.named_type('__builtins__.function'), name=name) init_func = FuncDef('__init__', args, Block([]), typ=signature) init_func.info = info init_func._fullname = self.api.qualified_name(name) + '.__init__' info.names['__init__'] = SymbolTableNode(MDEF, init_func) return info # Helpers def make_argument(self, name: str, type: Type) -> Argument: return Argument(Var(name), type, None, ARG_POS) def fail(self, msg: str, ctx: Context, *, code: Optional[ErrorCode] = None) -> None: self.api.fail(msg, ctx, code=code) mypy-0.761/mypy/semanal_pass1.py0000644€tŠÔÚ€2›s®0000001052213576752246023017 0ustar jukkaDROPBOX\Domain Users00000000000000"""Block/import reachability analysis.""" from mypy.nodes import ( MypyFile, AssertStmt, IfStmt, Block, AssignmentStmt, ExpressionStmt, ReturnStmt, ForStmt, Import, ImportAll, ImportFrom, ClassDef, FuncDef ) from mypy.traverser import TraverserVisitor from mypy.options import Options from mypy.reachability import infer_reachability_of_if_statement, assert_will_always_fail class SemanticAnalyzerPreAnalysis(TraverserVisitor): """Analyze reachability of blocks and imports and other local things. This runs before semantic analysis, so names have not been bound. Imports are also not resolved yet, so we can only access the current module. This determines static reachability of blocks and imports due to version and platform checks, among others. The main entry point is 'visit_file'. Reachability of imports needs to be determined very early in the build since this affects which modules will ultimately be processed. Consider this example: import sys def do_stuff(): # type: () -> None: if sys.python_version < (3,): import xyz # Only available in Python 2 xyz.whatever() ... The block containing 'import xyz' is unreachable in Python 3 mode. The import shouldn't be processed in Python 3 mode, even if the module happens to exist. """ def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) -> None: self.pyversion = options.python_version self.platform = options.platform self.cur_mod_id = mod_id self.cur_mod_node = file self.options = options self.is_global_scope = True for i, defn in enumerate(file.defs): defn.accept(self) if isinstance(defn, AssertStmt) and assert_will_always_fail(defn, options): # We've encountered an assert that's always false, # e.g. assert sys.platform == 'lol'. Truncate the # list of statements. This mutates file.defs too. del file.defs[i + 1:] break def visit_func_def(self, node: FuncDef) -> None: old_global_scope = self.is_global_scope self.is_global_scope = False super().visit_func_def(node) self.is_global_scope = old_global_scope file_node = self.cur_mod_node if (self.is_global_scope and file_node.is_stub and node.name == '__getattr__' and file_node.is_package_init_file()): # __init__.pyi with __getattr__ means that any submodules are assumed # to exist, even if there is no stub. Note that we can't verify that the # return type is compatible, since we haven't bound types yet. file_node.is_partial_stub_package = True def visit_class_def(self, node: ClassDef) -> None: old_global_scope = self.is_global_scope self.is_global_scope = False super().visit_class_def(node) self.is_global_scope = old_global_scope def visit_import_from(self, node: ImportFrom) -> None: node.is_top_level = self.is_global_scope super().visit_import_from(node) def visit_import_all(self, node: ImportAll) -> None: node.is_top_level = self.is_global_scope super().visit_import_all(node) def visit_import(self, node: Import) -> None: node.is_top_level = self.is_global_scope super().visit_import(node) def visit_if_stmt(self, s: IfStmt) -> None: infer_reachability_of_if_statement(s, self.options) for expr in s.expr: expr.accept(self) for node in s.body: node.accept(self) if s.else_body: s.else_body.accept(self) def visit_block(self, b: Block) -> None: if b.is_unreachable: return super().visit_block(b) # The remaining methods are an optimization: don't visit nested expressions # of common statements, since they can have no effect. def visit_assignment_stmt(self, s: AssignmentStmt) -> None: pass def visit_expression_stmt(self, s: ExpressionStmt) -> None: pass def visit_return_stmt(self, s: ReturnStmt) -> None: pass def visit_for_stmt(self, s: ForStmt) -> None: s.body.accept(self) if s.else_body is not None: s.else_body.accept(self) mypy-0.761/mypy/semanal_shared.py0000644€tŠÔÚ€2›s®0000001655113576752246023246 0ustar jukkaDROPBOX\Domain Users00000000000000"""Shared definitions used by different parts of semantic analysis.""" from abc import abstractmethod, abstractproperty from typing import Optional, List, Callable from typing_extensions import Final from mypy_extensions import trait from mypy.nodes import ( Context, SymbolTableNode, MypyFile, ImportedName, FuncDef, Node, TypeInfo, Expression, GDEF, SymbolNode, SymbolTable ) from mypy.util import correct_relative_import from mypy.types import ( Type, FunctionLike, Instance, TupleType, TPDICT_FB_NAMES, ProperType, get_proper_type ) from mypy.tvar_scope import TypeVarScope from mypy.errorcodes import ErrorCode from mypy import join # Priorities for ordering of patches within the "patch" phase of semantic analysis # (after the main pass): # Fix fallbacks (does joins) PRIORITY_FALLBACKS = 1 # type: Final @trait class SemanticAnalyzerCoreInterface: """A core abstract interface to generic semantic analyzer functionality. This is implemented by both semantic analyzer passes 2 and 3. """ @abstractmethod def lookup_qualified(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: raise NotImplementedError @abstractmethod def lookup_fully_qualified(self, name: str) -> SymbolTableNode: raise NotImplementedError @abstractmethod def lookup_fully_qualified_or_none(self, name: str) -> Optional[SymbolTableNode]: raise NotImplementedError @abstractmethod def fail(self, msg: str, ctx: Context, serious: bool = False, *, blocker: bool = False, code: Optional[ErrorCode] = None) -> None: raise NotImplementedError @abstractmethod def note(self, msg: str, ctx: Context, *, code: Optional[ErrorCode] = None) -> None: raise NotImplementedError @abstractmethod def record_incomplete_ref(self) -> None: raise NotImplementedError @abstractmethod def defer(self) -> None: raise NotImplementedError @abstractmethod def is_incomplete_namespace(self, fullname: str) -> bool: """Is a module or class namespace potentially missing some definitions?""" raise NotImplementedError @abstractproperty def final_iteration(self) -> bool: """Is this the final iteration of semantic analysis?""" raise NotImplementedError @trait class SemanticAnalyzerInterface(SemanticAnalyzerCoreInterface): """A limited abstract interface to some generic semantic analyzer pass 2 functionality. We use this interface for various reasons: * Looser coupling * Cleaner import graph * Less need to pass around callback functions """ @abstractmethod def lookup(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: raise NotImplementedError @abstractmethod def named_type(self, qualified_name: str, args: Optional[List[Type]] = None) -> Instance: raise NotImplementedError @abstractmethod def named_type_or_none(self, qualified_name: str, args: Optional[List[Type]] = None) -> Optional[Instance]: raise NotImplementedError @abstractmethod def accept(self, node: Node) -> None: raise NotImplementedError @abstractmethod def anal_type(self, t: Type, *, tvar_scope: Optional[TypeVarScope] = None, allow_tuple_literal: bool = False, allow_unbound_tvars: bool = False, report_invalid_types: bool = True) -> Optional[Type]: raise NotImplementedError @abstractmethod def basic_new_typeinfo(self, name: str, basetype_or_fallback: Instance) -> TypeInfo: raise NotImplementedError @abstractmethod def schedule_patch(self, priority: int, fn: Callable[[], None]) -> None: raise NotImplementedError @abstractmethod def add_symbol_table_node(self, name: str, stnode: SymbolTableNode) -> bool: """Add node to the current symbol table.""" raise NotImplementedError @abstractmethod def current_symbol_table(self) -> SymbolTable: """Get currently active symbol table. May be module, class, or local namespace. """ raise NotImplementedError @abstractmethod def add_symbol(self, name: str, node: SymbolNode, context: Context, module_public: bool = True, module_hidden: bool = False, can_defer: bool = True) -> bool: """Add symbol to the current symbol table.""" raise NotImplementedError @abstractmethod def add_symbol_skip_local(self, name: str, node: SymbolNode) -> None: """Add symbol to the current symbol table, skipping locals. This is used to store symbol nodes in a symbol table that is going to be serialized (local namespaces are not serialized). See implementation docstring for more details. """ raise NotImplementedError @abstractmethod def parse_bool(self, expr: Expression) -> Optional[bool]: raise NotImplementedError @abstractmethod def qualified_name(self, n: str) -> str: raise NotImplementedError @abstractproperty def is_typeshed_stub_file(self) -> bool: raise NotImplementedError def create_indirect_imported_name(file_node: MypyFile, module: str, relative: int, imported_name: str) -> Optional[SymbolTableNode]: """Create symbol table entry for a name imported from another module. These entries act as indirect references. """ target_module, ok = correct_relative_import( file_node.fullname, relative, module, file_node.is_package_init_file()) if not ok: return None target_name = '%s.%s' % (target_module, imported_name) link = ImportedName(target_name) # Use GDEF since this refers to a module-level definition. return SymbolTableNode(GDEF, link) def set_callable_name(sig: Type, fdef: FuncDef) -> ProperType: sig = get_proper_type(sig) if isinstance(sig, FunctionLike): if fdef.info: if fdef.info.fullname in TPDICT_FB_NAMES: # Avoid exposing the internal _TypedDict name. class_name = 'TypedDict' else: class_name = fdef.info.name return sig.with_name( '{} of {}'.format(fdef.name, class_name)) else: return sig.with_name(fdef.name) else: return sig def calculate_tuple_fallback(typ: TupleType) -> None: """Calculate a precise item type for the fallback of a tuple type. This must be called only after the main semantic analysis pass, since joins aren't available before that. Note that there is an apparent chicken and egg problem with respect to verifying type arguments against bounds. Verifying bounds might require fallbacks, but we might use the bounds to calculate the fallbacks. In partice this is not a problem, since the worst that can happen is that we have invalid type argument values, and these can happen in later stages as well (they will generate errors, but we don't prevent their existence). """ fallback = typ.partial_fallback assert fallback.type.fullname == 'builtins.tuple' fallback.args[0] = join.join_type_list(list(typ.items)) mypy-0.761/mypy/semanal_typeargs.py0000644€tŠÔÚ€2›s®0000001146313576752246023633 0ustar jukkaDROPBOX\Domain Users00000000000000"""Verify properties of type arguments, like 'int' in C[int] being valid. This must happen after semantic analysis since there can be placeholder types until the end of semantic analysis, and these break various type operations, including subtype checks. """ from typing import List, Optional, Set from mypy.nodes import TypeInfo, Context, MypyFile, FuncItem, ClassDef, Block from mypy.types import ( Type, Instance, TypeVarType, AnyType, get_proper_types, TypeAliasType, get_proper_type ) from mypy.mixedtraverser import MixedTraverserVisitor from mypy.subtypes import is_subtype from mypy.sametypes import is_same_type from mypy.errors import Errors from mypy.scope import Scope from mypy.options import Options from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes class TypeArgumentAnalyzer(MixedTraverserVisitor): def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file self.scope = Scope() # Should we also analyze function definitions, or only module top-levels? self.recurse_into_functions = True # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. self.seen_aliases = set() # type: Set[TypeAliasType] def visit_mypy_file(self, o: MypyFile) -> None: self.errors.set_file(o.path, o.fullname, scope=self.scope) self.scope.enter_file(o.fullname) super().visit_mypy_file(o) self.scope.leave() def visit_func(self, defn: FuncItem) -> None: if not self.recurse_into_functions: return with self.scope.function_scope(defn): super().visit_func(defn) def visit_class_def(self, defn: ClassDef) -> None: with self.scope.class_scope(defn.info): super().visit_class_def(defn) def visit_block(self, o: Block) -> None: if not o.is_unreachable: super().visit_block(o) def visit_type_alias_type(self, t: TypeAliasType) -> None: super().visit_type_alias_type(t) if t in self.seen_aliases: # Avoid infinite recursion on recursive type aliases. # Note: it is fine to skip the aliases we have already seen in non-recursive types, # since errors there have already already reported. return self.seen_aliases.add(t) get_proper_type(t).accept(self) def visit_instance(self, t: Instance) -> None: # Type argument counts were checked in the main semantic analyzer pass. We assume # that the counts are correct here. info = t.type for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): if tvar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail('Type variable "{}" not valid as type ' 'argument value for "{}"'.format( arg.name, info.name), t, code=codes.TYPE_VAR) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) if not is_subtype(arg, tvar.upper_bound): self.fail('Type argument "{}" of "{}" must be ' 'a subtype of "{}"'.format( arg, info.name, tvar.upper_bound), t, code=codes.TYPE_VAR) super().visit_instance(t) def check_type_var_values(self, type: TypeInfo, actuals: List[Type], arg_name: str, valids: List[Type], arg_number: int, context: Context) -> None: for actual in get_proper_types(actuals): if (not isinstance(actual, AnyType) and not any(is_same_type(actual, value) for value in valids)): if len(actuals) > 1 or not isinstance(actual, Instance): self.fail('Invalid type argument value for "{}"'.format( type.name), context, code=codes.TYPE_VAR) else: class_name = '"{}"'.format(type.name) actual_type_name = '"{}"'.format(actual.type.name) self.fail( message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( arg_name, class_name, actual_type_name), context, code=codes.TYPE_VAR) def fail(self, msg: str, context: Context, *, code: Optional[ErrorCode] = None) -> None: self.errors.report(context.get_line(), context.get_column(), msg, code=code) mypy-0.761/mypy/semanal_typeddict.py0000644€tŠÔÚ€2›s®0000003574313576752246023775 0ustar jukkaDROPBOX\Domain Users00000000000000"""Semantic analysis of TypedDict definitions.""" from collections import OrderedDict from typing import Optional, List, Set, Tuple from typing_extensions import Final from mypy.types import Type, AnyType, TypeOfAny, TypedDictType, TPDICT_NAMES from mypy.nodes import ( CallExpr, TypedDictExpr, Expression, NameExpr, Context, StrExpr, BytesExpr, UnicodeExpr, ClassDef, RefExpr, TypeInfo, AssignmentStmt, PassStmt, ExpressionStmt, EllipsisExpr, TempNode, DictExpr, ARG_POS, ARG_NAMED ) from mypy.semanal_shared import SemanticAnalyzerInterface from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.options import Options from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type from mypy.messages import MessageBuilder TPDICT_CLASS_ERROR = ('Invalid statement in TypedDict definition; ' 'expected "field_name: field_type"') # type: Final class TypedDictAnalyzer: def __init__(self, options: Options, api: SemanticAnalyzerInterface, msg: MessageBuilder) -> None: self.options = options self.api = api self.msg = msg def analyze_typeddict_classdef(self, defn: ClassDef) -> Tuple[bool, Optional[TypeInfo]]: """Analyze a class that may define a TypedDict. Assume that base classes have been analyzed already. Note: Unlike normal classes, we won't create a TypeInfo until the whole definition of the TypeDict (including the body and all key names and types) is complete. This is mostly because we store the corresponding TypedDictType in the TypeInfo. Return (is this a TypedDict, new TypeInfo). Specifics: * If we couldn't finish due to incomplete reference anywhere in the definition, return (True, None). * If this is not a TypedDict, return (False, None). """ possible = False for base_expr in defn.base_type_exprs: if isinstance(base_expr, RefExpr): self.api.accept(base_expr) if base_expr.fullname in TPDICT_NAMES or self.is_typeddict(base_expr): possible = True if possible: if (len(defn.base_type_exprs) == 1 and isinstance(defn.base_type_exprs[0], RefExpr) and defn.base_type_exprs[0].fullname in TPDICT_NAMES): # Building a new TypedDict fields, types, required_keys = self.analyze_typeddict_classdef_fields(defn) if fields is None: return True, None # Defer info = self.build_typeddict_typeinfo(defn.name, fields, types, required_keys) defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column return True, info # Extending/merging existing TypedDicts if any(not isinstance(expr, RefExpr) or expr.fullname not in TPDICT_NAMES and not self.is_typeddict(expr) for expr in defn.base_type_exprs): self.fail("All bases of a new TypedDict must be TypedDict types", defn) typeddict_bases = list(filter(self.is_typeddict, defn.base_type_exprs)) keys = [] # type: List[str] types = [] required_keys = set() for base in typeddict_bases: assert isinstance(base, RefExpr) assert isinstance(base.node, TypeInfo) assert isinstance(base.node.typeddict_type, TypedDictType) base_typed_dict = base.node.typeddict_type base_items = base_typed_dict.items valid_items = base_items.copy() for key in base_items: if key in keys: self.fail('Cannot overwrite TypedDict field "{}" while merging' .format(key), defn) valid_items.pop(key) keys.extend(valid_items.keys()) types.extend(valid_items.values()) required_keys.update(base_typed_dict.required_keys) new_keys, new_types, new_required_keys = self.analyze_typeddict_classdef_fields(defn, keys) if new_keys is None: return True, None # Defer keys.extend(new_keys) types.extend(new_types) required_keys.update(new_required_keys) info = self.build_typeddict_typeinfo(defn.name, keys, types, required_keys) defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column return True, info return False, None def analyze_typeddict_classdef_fields( self, defn: ClassDef, oldfields: Optional[List[str]] = None) -> Tuple[Optional[List[str]], List[Type], Set[str]]: """Analyze fields defined in a TypedDict class definition. This doesn't consider inherited fields (if any). Also consider totality, if given. Return tuple with these items: * List of keys (or None if found an incomplete reference --> deferral) * List of types for each key * Set of required keys """ fields = [] # type: List[str] types = [] # type: List[Type] for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty TypedDict's). if (not isinstance(stmt, PassStmt) and not (isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, (EllipsisExpr, StrExpr)))): self.fail(TPDICT_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. self.fail(TPDICT_CLASS_ERROR, stmt) else: name = stmt.lvalues[0].name if name in (oldfields or []): self.fail('Cannot overwrite TypedDict field "{}" while extending' .format(name), stmt) continue if name in fields: self.fail('Duplicate TypedDict field "{}"'.format(name), stmt) continue # Append name and type in this case... fields.append(name) if stmt.type is None: types.append(AnyType(TypeOfAny.unannotated)) else: analyzed = self.api.anal_type(stmt.type) if analyzed is None: return None, [], set() # Need to defer types.append(analyzed) # ...despite possible minor failures that allow further analyzis. if stmt.type is None or hasattr(stmt, 'new_syntax') and not stmt.new_syntax: self.fail(TPDICT_CLASS_ERROR, stmt) elif not isinstance(stmt.rvalue, TempNode): # x: int assigns rvalue to TempNode(AnyType()) self.fail('Right hand side values are not supported in TypedDict', stmt) total = True # type: Optional[bool] if 'total' in defn.keywords: total = self.api.parse_bool(defn.keywords['total']) if total is None: self.fail('Value of "total" must be True or False', defn) total = True required_keys = set(fields) if total else set() return fields, types, required_keys def check_typeddict(self, node: Expression, var_name: Optional[str], is_func_scope: bool) -> Tuple[bool, Optional[TypeInfo]]: """Check if a call defines a TypedDict. The optional var_name argument is the name of the variable to which this is assigned, if any. Return a pair (is it a typed dict, corresponding TypeInfo). If the definition is invalid but looks like a TypedDict, report errors but return (some) TypeInfo. If some type is not ready, return (True, None). """ if not isinstance(node, CallExpr): return False, None call = node callee = call.callee if not isinstance(callee, RefExpr): return False, None fullname = callee.fullname if fullname not in TPDICT_NAMES: return False, None res = self.parse_typeddict_args(call) if res is None: # This is a valid typed dict, but some type is not ready. # The caller should defer this until next iteration. return True, None name, items, types, total, ok = res if not ok: # Error. Construct dummy return value. info = self.build_typeddict_typeinfo('TypedDict', [], [], set()) else: if var_name is not None and name != var_name: self.fail( "First argument '{}' to TypedDict() does not match variable name '{}'".format( name, var_name), node) if name != var_name or is_func_scope: # Give it a unique name derived from the line number. name += '@' + str(call.line) required_keys = set(items) if total else set() info = self.build_typeddict_typeinfo(name, items, types, required_keys) info.line = node.line # Store generated TypeInfo under both names, see semanal_namedtuple for more details. if name != var_name or is_func_scope: self.api.add_symbol_skip_local(name, info) if var_name: self.api.add_symbol(var_name, info, node) call.analyzed = TypedDictExpr(info) call.analyzed.set_line(call.line, call.column) return True, info def parse_typeddict_args(self, call: CallExpr) -> Optional[Tuple[str, List[str], List[Type], bool, bool]]: """Parse typed dict call expression. Return names, types, totality, was there an error during parsing. If some type is not ready, return None. """ # TODO: Share code with check_argument_count in checkexpr.py? args = call.args if len(args) < 2: return self.fail_typeddict_arg("Too few arguments for TypedDict()", call) if len(args) > 3: return self.fail_typeddict_arg("Too many arguments for TypedDict()", call) # TODO: Support keyword arguments if call.arg_kinds not in ([ARG_POS, ARG_POS], [ARG_POS, ARG_POS, ARG_NAMED]): return self.fail_typeddict_arg("Unexpected arguments to TypedDict()", call) if len(args) == 3 and call.arg_names[2] != 'total': return self.fail_typeddict_arg( 'Unexpected keyword argument "{}" for "TypedDict"'.format(call.arg_names[2]), call) if not isinstance(args[0], (StrExpr, BytesExpr, UnicodeExpr)): return self.fail_typeddict_arg( "TypedDict() expects a string literal as the first argument", call) if not isinstance(args[1], DictExpr): return self.fail_typeddict_arg( "TypedDict() expects a dictionary literal as the second argument", call) total = True # type: Optional[bool] if len(args) == 3: total = self.api.parse_bool(call.args[2]) if total is None: return self.fail_typeddict_arg( 'TypedDict() "total" argument must be True or False', call) dictexpr = args[1] res = self.parse_typeddict_fields_with_types(dictexpr.items, call) if res is None: # One of the types is not ready, defer. return None items, types, ok = res for t in types: check_for_explicit_any(t, self.options, self.api.is_typeshed_stub_file, self.msg, context=call) if self.options.disallow_any_unimported: for t in types: if has_any_from_unimported_type(t): self.msg.unimported_type_becomes_any("Type of a TypedDict key", t, dictexpr) assert total is not None return args[0].value, items, types, total, ok def parse_typeddict_fields_with_types( self, dict_items: List[Tuple[Optional[Expression], Expression]], context: Context) -> Optional[Tuple[List[str], List[Type], bool]]: """Parse typed dict items passed as pairs (name expression, type expression). Return names, types, was there an error. If some type is not ready, return None. """ items = [] # type: List[str] types = [] # type: List[Type] for (field_name_expr, field_type_expr) in dict_items: if isinstance(field_name_expr, (StrExpr, BytesExpr, UnicodeExpr)): items.append(field_name_expr.value) else: name_context = field_name_expr or field_type_expr self.fail_typeddict_arg("Invalid TypedDict() field name", name_context) return [], [], False try: type = expr_to_unanalyzed_type(field_type_expr) except TypeTranslationError: self.fail_typeddict_arg('Invalid field type', field_type_expr) return [], [], False analyzed = self.api.anal_type(type) if analyzed is None: return None types.append(analyzed) return items, types, True def fail_typeddict_arg(self, message: str, context: Context) -> Tuple[str, List[str], List[Type], bool, bool]: self.fail(message, context) return '', [], [], True, False def build_typeddict_typeinfo(self, name: str, items: List[str], types: List[Type], required_keys: Set[str]) -> TypeInfo: # Prefer typing then typing_extensions if available. fallback = (self.api.named_type_or_none('typing._TypedDict', []) or self.api.named_type_or_none('typing_extensions._TypedDict', []) or self.api.named_type_or_none('mypy_extensions._TypedDict', [])) assert fallback is not None info = self.api.basic_new_typeinfo(name, fallback) info.typeddict_type = TypedDictType(OrderedDict(zip(items, types)), required_keys, fallback) return info # Helpers def is_typeddict(self, expr: Expression) -> bool: return (isinstance(expr, RefExpr) and isinstance(expr.node, TypeInfo) and expr.node.typeddict_type is not None) def fail(self, msg: str, ctx: Context) -> None: self.api.fail(msg, ctx) mypy-0.761/mypy/server/0000755€tŠÔÚ€2›s®0000000000013576752266021226 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/server/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246023323 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/server/astdiff.py0000644€tŠÔÚ€2›s®0000003723013576752246023223 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for comparing two versions of a module symbol table. The goal is to find which AST nodes have externally visible changes, so that we can fire triggers and re-process other parts of the program that are stale because of the changes. Only look at detail at definitions at the current module -- don't recurse into other modules. A summary of the module contents: * snapshot_symbol_table(...) creates an opaque snapshot description of a module/class symbol table (recursing into nested class symbol tables). * compare_symbol_table_snapshots(...) compares two snapshots for the same module id and returns fully qualified names of differences (which act as triggers). To compare two versions of a module symbol table, take snapshots of both versions and compare the snapshots. The use of snapshots makes it easy to compare two versions of the *same* symbol table that is being mutated. Summary of how this works for certain kinds of differences: * If a symbol table node is deleted or added (only present in old/new version of the symbol table), it is considered different, of course. * If a symbol table node refers to a different sort of thing in the new version, it is considered different (for example, if a class is replaced with a function). * If the signature of a function has changed, it is considered different. * If the type of a variable changes, it is considered different. * If the MRO of a class changes, or a non-generic class is turned into a generic class, the class is considered different (there are other such "big" differences that cause a class to be considered changed). However, just changes to attributes or methods don't generally constitute a difference at the class level -- these are handled at attribute level (say, 'mod.Cls.method' is different rather than 'mod.Cls' being different). * If an imported name targets a different name (say, 'from x import y' is replaced with 'from z import y'), the name in the module is considered different. If the target of an import continues to have the same name, but it's specifics change, this doesn't mean that the imported name is treated as changed. Say, there is 'from x import y' in 'm', and the type of 'x.y' has changed. This doesn't mean that that 'm.y' is considered changed. Instead, processing the difference in 'm' will be handled through fine-grained dependencies. """ from typing import Set, Dict, Tuple, Optional, Sequence, Union from mypy.nodes import ( SymbolTable, TypeInfo, Var, SymbolNode, Decorator, TypeVarExpr, TypeAlias, FuncBase, OverloadedFuncDef, FuncItem, MypyFile, UNBOUND_IMPORTED ) from mypy.types import ( Type, TypeVisitor, UnboundType, AnyType, NoneType, UninhabitedType, ErasedType, DeletedType, Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded, PartialType, TypeType, LiteralType, TypeAliasType ) from mypy.util import get_prefix # Snapshot representation of a symbol table node or type. The representation is # opaque -- the only supported operations are comparing for equality and # hashing (latter for type snapshots only). Snapshots can contain primitive # objects, nested tuples, lists and dictionaries and primitive objects (type # snapshots are immutable). # # For example, the snapshot of the 'int' type is ('Instance', 'builtins.int', ()). SnapshotItem = Tuple[object, ...] def compare_symbol_table_snapshots( name_prefix: str, snapshot1: Dict[str, SnapshotItem], snapshot2: Dict[str, SnapshotItem]) -> Set[str]: """Return names that are different in two snapshots of a symbol table. Only shallow (intra-module) differences are considered. References to things defined outside the module are compared based on the name of the target only. Recurse into class symbol tables (if the class is defined in the target module). Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method'). """ # Find names only defined only in one version. names1 = {'%s.%s' % (name_prefix, name) for name in snapshot1} names2 = {'%s.%s' % (name_prefix, name) for name in snapshot2} triggers = names1 ^ names2 # Look for names defined in both versions that are different. for name in set(snapshot1.keys()) & set(snapshot2.keys()): item1 = snapshot1[name] item2 = snapshot2[name] kind1 = item1[0] kind2 = item2[0] item_name = '%s.%s' % (name_prefix, name) if kind1 != kind2: # Different kind of node in two snapshots -> trivially different. triggers.add(item_name) elif kind1 == 'TypeInfo': if item1[:-1] != item2[:-1]: # Record major difference (outside class symbol tables). triggers.add(item_name) # Look for differences in nested class symbol table entries. assert isinstance(item1[-1], dict) assert isinstance(item2[-1], dict) triggers |= compare_symbol_table_snapshots(item_name, item1[-1], item2[-1]) else: # Shallow node (no interesting internal structure). Just use equality. if snapshot1[name] != snapshot2[name]: triggers.add(item_name) return triggers def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> Dict[str, SnapshotItem]: """Create a snapshot description that represents the state of a symbol table. The snapshot has a representation based on nested tuples and dicts that makes it easy and fast to find differences. Only "shallow" state is included in the snapshot -- references to things defined in other modules are represented just by the names of the targets. """ result = {} # type: Dict[str, SnapshotItem] for name, symbol in table.items(): node = symbol.node # TODO: cross_ref? fullname = node.fullname if node else None common = (fullname, symbol.kind, symbol.module_public) if isinstance(node, MypyFile): # This is a cross-reference to another module. # If the reference is busted because the other module is missing, # the node will be a "stale_info" TypeInfo produced by fixup, # but that doesn't really matter to us here. result[name] = ('Moduleref', common) elif isinstance(node, TypeVarExpr): result[name] = ('TypeVar', node.variance, [snapshot_type(value) for value in node.values], snapshot_type(node.upper_bound)) elif isinstance(node, TypeAlias): result[name] = ('TypeAlias', node.alias_tvars, node.normalized, node.no_args, snapshot_optional_type(node.target)) else: assert symbol.kind != UNBOUND_IMPORTED if node and get_prefix(node.fullname) != name_prefix: # This is a cross-reference to a node defined in another module. result[name] = ('CrossRef', common) else: result[name] = snapshot_definition(node, common) return result def snapshot_definition(node: Optional[SymbolNode], common: Tuple[object, ...]) -> Tuple[object, ...]: """Create a snapshot description of a symbol table node. The representation is nested tuples and dicts. Only externally visible attributes are included. """ if isinstance(node, FuncBase): # TODO: info if node.type: signature = snapshot_type(node.type) else: signature = snapshot_untyped_signature(node) return ('Func', common, node.is_property, node.is_final, node.is_class, node.is_static, signature) elif isinstance(node, Var): return ('Var', common, snapshot_optional_type(node.type), node.is_final) elif isinstance(node, Decorator): # Note that decorated methods are represented by Decorator instances in # a symbol table since we need to preserve information about the # decorated function (whether it's a class function, for # example). Top-level decorated functions, however, are represented by # the corresponding Var node, since that happens to provide enough # context. return ('Decorator', node.is_overload, snapshot_optional_type(node.var.type), snapshot_definition(node.func, common)) elif isinstance(node, TypeInfo): attrs = (node.is_abstract, node.is_enum, node.is_protocol, node.fallback_to_any, node.is_named_tuple, node.is_newtype, # We need this to e.g. trigger metaclass calculation in subclasses. snapshot_optional_type(node.metaclass_type), snapshot_optional_type(node.tuple_type), snapshot_optional_type(node.typeddict_type), [base.fullname for base in node.mro], # Note that the structure of type variables is a part of the external interface, # since creating instances might fail, for example: # T = TypeVar('T', bound=int) # class C(Generic[T]): # ... # x: C[str] <- this is invalid, and needs to be re-checked if `T` changes. # An alternative would be to create both deps: <...> -> C, and <...> -> , # but this currently seems a bit ad hoc. tuple(snapshot_type(TypeVarType(tdef)) for tdef in node.defn.type_vars), [snapshot_type(base) for base in node.bases], snapshot_optional_type(node._promote)) prefix = node.fullname symbol_table = snapshot_symbol_table(prefix, node.names) # Special dependency for abstract attribute handling. symbol_table['(abstract)'] = ('Abstract', tuple(sorted(node.abstract_attributes))) return ('TypeInfo', common, attrs, symbol_table) else: # Other node types are handled elsewhere. assert False, type(node) def snapshot_type(typ: Type) -> SnapshotItem: """Create a snapshot representation of a type using nested tuples.""" return typ.accept(SnapshotTypeVisitor()) def snapshot_optional_type(typ: Optional[Type]) -> Optional[SnapshotItem]: if typ: return snapshot_type(typ) else: return None def snapshot_types(types: Sequence[Type]) -> SnapshotItem: return tuple(snapshot_type(item) for item in types) def snapshot_simple_type(typ: Type) -> SnapshotItem: return (type(typ).__name__,) def encode_optional_str(s: Optional[str]) -> str: if s is None: return '' else: return s class SnapshotTypeVisitor(TypeVisitor[SnapshotItem]): """Creates a read-only, self-contained snapshot of a type object. Properties of a snapshot: - Contains (nested) tuples and other immutable primitive objects only. - References to AST nodes are replaced with full names of targets. - Has no references to mutable or non-primitive objects. - Two snapshots represent the same object if and only if they are equal. - Results must be sortable. It's important that tuples have consistent types and can't arbitrarily mix str and None values, for example, since they can't be compared. """ def visit_unbound_type(self, typ: UnboundType) -> SnapshotItem: return ('UnboundType', typ.name, typ.optional, typ.empty_tuple_index, snapshot_types(typ.args)) def visit_any(self, typ: AnyType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_none_type(self, typ: NoneType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_uninhabited_type(self, typ: UninhabitedType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_erased_type(self, typ: ErasedType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_deleted_type(self, typ: DeletedType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_instance(self, typ: Instance) -> SnapshotItem: return ('Instance', encode_optional_str(typ.type.fullname), snapshot_types(typ.args), ('None',) if typ.last_known_value is None else snapshot_type(typ.last_known_value)) def visit_type_var(self, typ: TypeVarType) -> SnapshotItem: return ('TypeVar', typ.name, typ.fullname, typ.id.raw_id, typ.id.meta_level, snapshot_types(typ.values), snapshot_type(typ.upper_bound), typ.variance) def visit_callable_type(self, typ: CallableType) -> SnapshotItem: # FIX generics return ('CallableType', snapshot_types(typ.arg_types), snapshot_type(typ.ret_type), tuple([encode_optional_str(name) for name in typ.arg_names]), tuple(typ.arg_kinds), typ.is_type_obj(), typ.is_ellipsis_args) def visit_tuple_type(self, typ: TupleType) -> SnapshotItem: return ('TupleType', snapshot_types(typ.items)) def visit_typeddict_type(self, typ: TypedDictType) -> SnapshotItem: items = tuple((key, snapshot_type(item_type)) for key, item_type in typ.items.items()) required = tuple(sorted(typ.required_keys)) return ('TypedDictType', items, required) def visit_literal_type(self, typ: LiteralType) -> SnapshotItem: return ('LiteralType', snapshot_type(typ.fallback), typ.value) def visit_union_type(self, typ: UnionType) -> SnapshotItem: # Sort and remove duplicates so that we can use equality to test for # equivalent union type snapshots. items = {snapshot_type(item) for item in typ.items} normalized = tuple(sorted(items)) return ('UnionType', normalized) def visit_overloaded(self, typ: Overloaded) -> SnapshotItem: return ('Overloaded', snapshot_types(typ.items())) def visit_partial_type(self, typ: PartialType) -> SnapshotItem: # A partial type is not fully defined, so the result is indeterminate. We shouldn't # get here. raise RuntimeError def visit_type_type(self, typ: TypeType) -> SnapshotItem: return ('TypeType', snapshot_type(typ.item)) def visit_type_alias_type(self, typ: TypeAliasType) -> SnapshotItem: assert typ.alias is not None return ('TypeAliasType', typ.alias.fullname, snapshot_types(typ.args)) def snapshot_untyped_signature(func: Union[OverloadedFuncDef, FuncItem]) -> Tuple[object, ...]: """Create a snapshot of the signature of a function that has no explicit signature. If the arguments to a function without signature change, it must be considered as different. We have this special casing since we don't store the implicit signature anywhere, and we'd rather not construct new Callable objects in this module (the idea is to only read properties of the AST here). """ if isinstance(func, FuncItem): return (tuple(func.arg_names), tuple(func.arg_kinds)) else: result = [] for item in func.items: if isinstance(item, Decorator): if item.var.type: result.append(snapshot_type(item.var.type)) else: result.append(('DecoratorWithoutType',)) else: result.append(snapshot_untyped_signature(item)) return tuple(result) mypy-0.761/mypy/server/astmerge.py0000644€tŠÔÚ€2›s®0000004445613576752246023422 0ustar jukkaDROPBOX\Domain Users00000000000000"""Merge a new version of a module AST and symbol table to older versions of those. When the source code of a module has a change in fine-grained incremental mode, we build a new AST from the updated source. However, other parts of the program may have direct references to parts of the old AST (namely, those nodes exposed in the module symbol table). The merge operation changes the identities of new AST nodes that have a correspondence in the old AST to the old ones so that existing cross-references in other modules will continue to point to the correct nodes. Also internal cross-references within the new AST are replaced. AST nodes that aren't externally visible will get new, distinct object identities. This applies to most expression and statement nodes, for example. We perform this merge operation so that we don't have to update all external references (which would be slow and fragile) or always perform translation when looking up references (which would be hard to retrofit). The AST merge operation is performed after semantic analysis. Semantic analysis has to deal with potentially multiple aliases to certain AST nodes (in particular, MypyFile nodes). Type checking assumes that we don't have multiple variants of a single AST node visible to the type checker. Discussion of some notable special cases: * If a node is replaced with a different kind of node (say, a function is replaced with a class), we don't perform the merge. Fine-grained dependencies will be used to rebind all references to the node. * If a function is replaced with another function with an identical signature, call sites continue to point to the same object (by identity) and don't need to be reprocessed. Similarly, if a class is replaced with a class that is sufficiently similar (MRO preserved, etc.), class references don't need any processing. A typical incremental update to a file only changes a few externally visible things in a module, and this means that often only few external references need any processing, even if the modified module is large. * A no-op update of a module should not require any processing outside the module, since all relevant object identities are preserved. * The AST diff operation (mypy.server.astdiff) and the top-level fine-grained incremental logic (mypy.server.update) handle the cases where the new AST has differences from the old one that may need to be propagated to elsewhere in the program. See the main entry point merge_asts for more details. """ from typing import Dict, List, cast, TypeVar, Optional from mypy.nodes import ( MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo, FuncDef, ClassDef, NamedTupleExpr, SymbolNode, Var, Statement, SuperExpr, NewTypeExpr, OverloadedFuncDef, LambdaExpr, TypedDictExpr, EnumCallExpr, FuncBase, TypeAliasExpr, CallExpr, CastExpr, MDEF ) from mypy.traverser import TraverserVisitor from mypy.types import ( Type, SyntheticTypeVisitor, Instance, AnyType, NoneType, CallableType, ErasedType, DeletedType, TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType, Overloaded, TypeVarDef, TypeList, CallableArgument, EllipsisType, StarType, LiteralType, RawExpressionType, PartialType, PlaceholderType, TypeAliasType ) from mypy.util import get_prefix, replace_object_state from mypy.typestate import TypeState def merge_asts(old: MypyFile, old_symbols: SymbolTable, new: MypyFile, new_symbols: SymbolTable) -> None: """Merge a new version of a module AST to a previous version. The main idea is to preserve the identities of externally visible nodes in the old AST (that have a corresponding node in the new AST). All old node state (outside identity) will come from the new AST. When this returns, 'old' will refer to the merged AST, but 'new_symbols' will be the new symbol table. 'new' and 'old_symbols' will no longer be valid. """ assert new.fullname == old.fullname # Find the mapping from new to old node identities for all nodes # whose identities should be preserved. replacement_map = replacement_map_from_symbol_table( old_symbols, new_symbols, prefix=old.fullname) # Also replace references to the new MypyFile node. replacement_map[new] = old # Perform replacements to everywhere within the new AST (not including symbol # tables). node = replace_nodes_in_ast(new, replacement_map) assert node is old # Also replace AST node references in the *new* symbol table (we'll # continue to use the new symbol table since it has all the new definitions # that have no correspondence in the old AST). replace_nodes_in_symbol_table(new_symbols, replacement_map) def replacement_map_from_symbol_table( old: SymbolTable, new: SymbolTable, prefix: str) -> Dict[SymbolNode, SymbolNode]: """Create a new-to-old object identity map by comparing two symbol table revisions. Both symbol tables must refer to revisions of the same module id. The symbol tables are compared recursively (recursing into nested class symbol tables), but only within the given module prefix. Don't recurse into other modules accessible through the symbol table. """ replacements = {} # type: Dict[SymbolNode, SymbolNode] for name, node in old.items(): if (name in new and (node.kind == MDEF or node.node and get_prefix(node.node.fullname) == prefix)): new_node = new[name] if (type(new_node.node) == type(node.node) # noqa and new_node.node and node.node and new_node.node.fullname == node.node.fullname and new_node.kind == node.kind): replacements[new_node.node] = node.node if isinstance(node.node, TypeInfo) and isinstance(new_node.node, TypeInfo): type_repl = replacement_map_from_symbol_table( node.node.names, new_node.node.names, prefix) replacements.update(type_repl) return replacements def replace_nodes_in_ast(node: SymbolNode, replacements: Dict[SymbolNode, SymbolNode]) -> SymbolNode: """Replace all references to replacement map keys within an AST node, recursively. Also replace the *identity* of any nodes that have replacements. Return the *replaced* version of the argument node (which may have a different identity, if it's included in the replacement map). """ visitor = NodeReplaceVisitor(replacements) node.accept(visitor) return replacements.get(node, node) SN = TypeVar('SN', bound=SymbolNode) class NodeReplaceVisitor(TraverserVisitor): """Transform some nodes to new identities in an AST. Only nodes that live in the symbol table may be replaced, which simplifies the implementation some. Also replace all references to the old identities. """ def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None: self.replacements = replacements def visit_mypy_file(self, node: MypyFile) -> None: node = self.fixup(node) node.defs = self.replace_statements(node.defs) super().visit_mypy_file(node) def visit_block(self, node: Block) -> None: super().visit_block(node) node.body = self.replace_statements(node.body) def visit_func_def(self, node: FuncDef) -> None: node = self.fixup(node) self.process_base_func(node) super().visit_func_def(node) def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> None: self.process_base_func(node) super().visit_overloaded_func_def(node) def visit_class_def(self, node: ClassDef) -> None: # TODO additional things? node.info = self.fixup_and_reset_typeinfo(node.info) node.defs.body = self.replace_statements(node.defs.body) info = node.info for tv in node.type_vars: self.process_type_var_def(tv) if info: if info.is_named_tuple: self.process_synthetic_type_info(info) else: self.process_type_info(info) super().visit_class_def(node) def process_base_func(self, node: FuncBase) -> None: self.fixup_type(node.type) node.info = self.fixup(node.info) if node.unanalyzed_type: # Unanalyzed types can have AST node references self.fixup_type(node.unanalyzed_type) def process_type_var_def(self, tv: TypeVarDef) -> None: for value in tv.values: self.fixup_type(value) self.fixup_type(tv.upper_bound) def visit_assignment_stmt(self, node: AssignmentStmt) -> None: self.fixup_type(node.type) super().visit_assignment_stmt(node) # Expressions def visit_name_expr(self, node: NameExpr) -> None: self.visit_ref_expr(node) def visit_member_expr(self, node: MemberExpr) -> None: if node.def_var: node.def_var = self.fixup(node.def_var) self.visit_ref_expr(node) super().visit_member_expr(node) def visit_ref_expr(self, node: RefExpr) -> None: if node.node is not None: node.node = self.fixup(node.node) if isinstance(node.node, Var): # The Var node may be an orphan and won't otherwise be processed. fixup_var(node.node, self.replacements) def visit_namedtuple_expr(self, node: NamedTupleExpr) -> None: super().visit_namedtuple_expr(node) node.info = self.fixup_and_reset_typeinfo(node.info) self.process_synthetic_type_info(node.info) def visit_cast_expr(self, node: CastExpr) -> None: super().visit_cast_expr(node) self.fixup_type(node.type) def visit_super_expr(self, node: SuperExpr) -> None: super().visit_super_expr(node) if node.info is not None: node.info = self.fixup(node.info) def visit_call_expr(self, node: CallExpr) -> None: super().visit_call_expr(node) if isinstance(node.analyzed, SymbolNode): node.analyzed = self.fixup(node.analyzed) def visit_newtype_expr(self, node: NewTypeExpr) -> None: if node.info: node.info = self.fixup_and_reset_typeinfo(node.info) self.process_synthetic_type_info(node.info) self.fixup_type(node.old_type) super().visit_newtype_expr(node) def visit_lambda_expr(self, node: LambdaExpr) -> None: node.info = self.fixup(node.info) super().visit_lambda_expr(node) def visit_typeddict_expr(self, node: TypedDictExpr) -> None: super().visit_typeddict_expr(node) node.info = self.fixup_and_reset_typeinfo(node.info) self.process_synthetic_type_info(node.info) def visit_enum_call_expr(self, node: EnumCallExpr) -> None: node.info = self.fixup_and_reset_typeinfo(node.info) self.process_synthetic_type_info(node.info) super().visit_enum_call_expr(node) def visit_type_alias_expr(self, node: TypeAliasExpr) -> None: self.fixup_type(node.type) super().visit_type_alias_expr(node) # Others def visit_var(self, node: Var) -> None: node.info = self.fixup(node.info) self.fixup_type(node.type) super().visit_var(node) # Helpers def fixup(self, node: SN) -> SN: if node in self.replacements: new = self.replacements[node] replace_object_state(new, node) return cast(SN, new) return node def fixup_and_reset_typeinfo(self, node: TypeInfo) -> TypeInfo: """Fix-up type info and reset subtype caches. This needs to be called at least once per each merged TypeInfo, as otherwise we may leak stale caches. """ if node in self.replacements: # The subclass relationships may change, so reset all caches relevant to the # old MRO. new = cast(TypeInfo, self.replacements[node]) TypeState.reset_all_subtype_caches_for(new) return self.fixup(node) def fixup_type(self, typ: Optional[Type]) -> None: if typ is not None: typ.accept(TypeReplaceVisitor(self.replacements)) def process_type_info(self, info: Optional[TypeInfo]) -> None: if info is None: return self.fixup_type(info.declared_metaclass) self.fixup_type(info.metaclass_type) self.fixup_type(info._promote) self.fixup_type(info.tuple_type) self.fixup_type(info.typeddict_type) info.defn.info = self.fixup(info) replace_nodes_in_symbol_table(info.names, self.replacements) for i, item in enumerate(info.mro): info.mro[i] = self.fixup(info.mro[i]) for i, base in enumerate(info.bases): self.fixup_type(info.bases[i]) def process_synthetic_type_info(self, info: TypeInfo) -> None: # Synthetic types (types not created using a class statement) don't # have bodies in the AST so we need to iterate over their symbol # tables separately, unlike normal classes. self.process_type_info(info) for name, node in info.names.items(): if node.node: node.node.accept(self) def replace_statements(self, nodes: List[Statement]) -> List[Statement]: result = [] for node in nodes: if isinstance(node, SymbolNode): node = self.fixup(node) result.append(node) return result class TypeReplaceVisitor(SyntheticTypeVisitor[None]): """Similar to NodeReplaceVisitor, but for type objects. Note: this visitor may sometimes visit unanalyzed types such as 'UnboundType' and 'RawExpressionType' For example, see NodeReplaceVisitor.process_base_func. """ def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None: self.replacements = replacements def visit_instance(self, typ: Instance) -> None: typ.type = self.fixup(typ.type) for arg in typ.args: arg.accept(self) if typ.last_known_value: typ.last_known_value.accept(self) def visit_type_alias_type(self, typ: TypeAliasType) -> None: assert typ.alias is not None typ.alias = self.fixup(typ.alias) for arg in typ.args: arg.accept(self) def visit_any(self, typ: AnyType) -> None: pass def visit_none_type(self, typ: NoneType) -> None: pass def visit_callable_type(self, typ: CallableType) -> None: for arg in typ.arg_types: arg.accept(self) typ.ret_type.accept(self) if typ.definition: # No need to fixup since this is just a cross-reference. typ.definition = self.replacements.get(typ.definition, typ.definition) # Fallback can be None for callable types that haven't been semantically analyzed. if typ.fallback is not None: typ.fallback.accept(self) for tv in typ.variables: tv.upper_bound.accept(self) for value in tv.values: value.accept(self) def visit_overloaded(self, t: Overloaded) -> None: for item in t.items(): item.accept(self) # Fallback can be None for overloaded types that haven't been semantically analyzed. if t.fallback is not None: t.fallback.accept(self) def visit_erased_type(self, t: ErasedType) -> None: # This type should exist only temporarily during type inference raise RuntimeError def visit_deleted_type(self, typ: DeletedType) -> None: pass def visit_partial_type(self, typ: PartialType) -> None: raise RuntimeError def visit_tuple_type(self, typ: TupleType) -> None: for item in typ.items: item.accept(self) # Fallback can be None for implicit tuple types that haven't been semantically analyzed. if typ.partial_fallback is not None: typ.partial_fallback.accept(self) def visit_type_type(self, typ: TypeType) -> None: typ.item.accept(self) def visit_type_var(self, typ: TypeVarType) -> None: typ.upper_bound.accept(self) for value in typ.values: value.accept(self) def visit_typeddict_type(self, typ: TypedDictType) -> None: for value_type in typ.items.values(): value_type.accept(self) typ.fallback.accept(self) def visit_raw_expression_type(self, t: RawExpressionType) -> None: pass def visit_literal_type(self, typ: LiteralType) -> None: typ.fallback.accept(self) def visit_unbound_type(self, typ: UnboundType) -> None: for arg in typ.args: arg.accept(self) def visit_type_list(self, typ: TypeList) -> None: for item in typ.items: item.accept(self) def visit_callable_argument(self, typ: CallableArgument) -> None: typ.typ.accept(self) def visit_ellipsis_type(self, typ: EllipsisType) -> None: pass def visit_star_type(self, typ: StarType) -> None: typ.type.accept(self) def visit_uninhabited_type(self, typ: UninhabitedType) -> None: pass def visit_union_type(self, typ: UnionType) -> None: for item in typ.items: item.accept(self) def visit_placeholder_type(self, t: PlaceholderType) -> None: for item in t.args: item.accept(self) # Helpers def fixup(self, node: SN) -> SN: if node in self.replacements: new = self.replacements[node] return cast(SN, new) return node def replace_nodes_in_symbol_table(symbols: SymbolTable, replacements: Dict[SymbolNode, SymbolNode]) -> None: for name, node in symbols.items(): if node.node: if node.node in replacements: new = replacements[node.node] old = node.node replace_object_state(new, old) node.node = new if isinstance(node.node, Var): # Handle them here just in case these aren't exposed through the AST. # TODO: Is this necessary? fixup_var(node.node, replacements) def fixup_var(node: Var, replacements: Dict[SymbolNode, SymbolNode]) -> None: if node.type: node.type.accept(TypeReplaceVisitor(replacements)) node.info = cast(TypeInfo, replacements.get(node.info, node.info)) mypy-0.761/mypy/server/aststrip.py0000644€tŠÔÚ€2›s®0000002527413576752246023461 0ustar jukkaDROPBOX\Domain Users00000000000000"""Strip/reset AST in-place to match state after semantic analyzer pre-analysis. Fine-grained incremental mode reruns semantic analysis main pass and type checking for *existing* AST nodes (targets) when changes are propagated using fine-grained dependencies. AST nodes attributes are sometimes changed during semantic analysis main pass, and running semantic analysis again on those nodes would produce incorrect results, since this pass isn't idempotent. This pass resets AST nodes to reflect the state after semantic pre-analysis, so that we can rerun semantic analysis. (The above is in contrast to behavior with modules that have source code changes, for which we re-parse the entire module and reconstruct a fresh AST. No stripping is required in this case. Both modes of operation should have the same outcome.) Notes: * This is currently pretty fragile, as we must carefully undo whatever changes can be made in semantic analysis main pass, including changes to symbol tables. * We reuse existing AST nodes because it makes it relatively straightforward to reprocess only a single target within a module efficiently. If there was a way to parse a single target within a file, in time proportional to the size of the target, we'd rather create fresh AST nodes than strip them. (This is possible only in Python 3.8+) * Currently we don't actually reset all changes, but only those known to affect non-idempotent semantic analysis behavior. TODO: It would be more principled and less fragile to reset everything changed in semantic analysis main pass and later. * Reprocessing may recreate AST nodes (such as Var nodes, and TypeInfo nodes created with assignment statements) that will get different identities from the original AST. Thus running an AST merge is necessary after stripping, even though some identities are preserved. """ import contextlib from typing import Union, Iterator, Optional, Dict, Tuple from mypy.nodes import ( FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, ClassDef, AssignmentStmt, ImportFrom, CallExpr, Decorator, OverloadedFuncDef, Node, TupleExpr, ListExpr, SuperExpr, IndexExpr, ImportAll, ForStmt, Block, CLASSDEF_NO_INFO, TypeInfo, StarExpr, Var, SymbolTableNode ) from mypy.traverser import TraverserVisitor from mypy.types import CallableType from mypy.typestate import TypeState SavedAttributes = Dict[Tuple[ClassDef, str], SymbolTableNode] def strip_target(node: Union[MypyFile, FuncDef, OverloadedFuncDef], saved_attrs: SavedAttributes) -> None: """Reset a fine-grained incremental target to state before semantic analysis. All TypeInfos are killed. Therefore we need to preserve the variables defined as attributes on self. This is done by patches (callbacks) returned from this function that re-add these variables when called. Args: node: node to strip saved_attrs: collect attributes here that may need to be re-added to classes afterwards if stripping a class body (this dict is mutated) """ visitor = NodeStripVisitor(saved_attrs) if isinstance(node, MypyFile): visitor.strip_file_top_level(node) else: node.accept(visitor) class NodeStripVisitor(TraverserVisitor): def __init__(self, saved_class_attrs: SavedAttributes) -> None: # The current active class. self.type = None # type: Optional[TypeInfo] # This is True at class scope, but not in methods. self.is_class_body = False # By default, process function definitions. If False, don't -- this is used for # processing module top levels. self.recurse_into_functions = True # These attributes were removed from top-level classes during strip and # will be added afterwards (if no existing definition is found). These # must be added back before semantically analyzing any methods. self.saved_class_attrs = saved_class_attrs def strip_file_top_level(self, file_node: MypyFile) -> None: """Strip a module top-level (don't recursive into functions).""" self.recurse_into_functions = False file_node.plugin_deps.clear() file_node.accept(self) for name in file_node.names.copy(): # TODO: this is a hot fix, we should delete all names, # see https://github.com/python/mypy/issues/6422. if '@' not in name: del file_node.names[name] def visit_block(self, b: Block) -> None: if b.is_unreachable: return super().visit_block(b) def visit_class_def(self, node: ClassDef) -> None: """Strip class body and type info, but don't strip methods.""" # We need to save the implicitly defined instance variables, # i.e. those defined as attributes on self. Otherwise, they would # be lost if we only reprocess top-levels (this kills TypeInfos) # but not the methods that defined those variables. if not self.recurse_into_functions: self.save_implicit_attributes(node) # We need to delete any entries that were generated by plugins, # since they will get regenerated. to_delete = {v.node for v in node.info.names.values() if v.plugin_generated} node.type_vars = [] node.base_type_exprs.extend(node.removed_base_type_exprs) node.removed_base_type_exprs = [] node.defs.body = [s for s in node.defs.body if s not in to_delete] # type: ignore[comparison-overlap] with self.enter_class(node.info): super().visit_class_def(node) TypeState.reset_subtype_caches_for(node.info) # Kill the TypeInfo, since there is none before semantic analysis. node.info = CLASSDEF_NO_INFO def save_implicit_attributes(self, node: ClassDef) -> None: """Produce callbacks that re-add attributes defined on self.""" for name, sym in node.info.names.items(): if isinstance(sym.node, Var) and sym.implicit: self.saved_class_attrs[node, name] = sym def visit_func_def(self, node: FuncDef) -> None: if not self.recurse_into_functions: return node.expanded = [] node.type = node.unanalyzed_type if node.type: # Type variable binder binds type variables before the type is analyzed, # this causes unanalyzed_type to be modified in place. We needed to revert this # in order to get the state exactly as it was before semantic analysis. # See also #4814. assert isinstance(node.type, CallableType) node.type.variables = [] with self.enter_method(node.info) if node.info else nothing(): super().visit_func_def(node) def visit_decorator(self, node: Decorator) -> None: node.var.type = None for expr in node.decorators: expr.accept(self) if self.recurse_into_functions: node.func.accept(self) else: # Only touch the final status if we re-process # the top level, since decorators are processed there. node.var.is_final = False node.func.is_final = False def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> None: if not self.recurse_into_functions: return # Revert change made during semantic analysis main pass. node.items = node.unanalyzed_items.copy() node.impl = None node.is_final = False super().visit_overloaded_func_def(node) def visit_assignment_stmt(self, node: AssignmentStmt) -> None: node.type = node.unanalyzed_type node.is_final_def = False node.is_alias_def = False if self.type and not self.is_class_body: for lvalue in node.lvalues: # Revert assignments made via self attributes. self.process_lvalue_in_method(lvalue) super().visit_assignment_stmt(node) def visit_import_from(self, node: ImportFrom) -> None: node.assignments = [] def visit_import_all(self, node: ImportAll) -> None: node.assignments = [] def visit_for_stmt(self, node: ForStmt) -> None: node.index_type = node.unanalyzed_index_type node.inferred_item_type = None node.inferred_iterator_type = None super().visit_for_stmt(node) def visit_name_expr(self, node: NameExpr) -> None: self.strip_ref_expr(node) def visit_member_expr(self, node: MemberExpr) -> None: self.strip_ref_expr(node) super().visit_member_expr(node) def visit_index_expr(self, node: IndexExpr) -> None: node.analyzed = None # May have been an alias or type application. super().visit_index_expr(node) def strip_ref_expr(self, node: RefExpr) -> None: node.kind = None node.node = None node.fullname = None node.is_new_def = False node.is_inferred_def = False def visit_call_expr(self, node: CallExpr) -> None: node.analyzed = None super().visit_call_expr(node) def visit_super_expr(self, node: SuperExpr) -> None: node.info = None super().visit_super_expr(node) def process_lvalue_in_method(self, lvalue: Node) -> None: if isinstance(lvalue, MemberExpr): if lvalue.is_new_def: # Remove defined attribute from the class symbol table. If is_new_def is # true for a MemberExpr, we know that it must be an assignment through # self, since only those can define new attributes. assert self.type is not None if lvalue.name in self.type.names: del self.type.names[lvalue.name] key = (self.type.defn, lvalue.name) if key in self.saved_class_attrs: del self.saved_class_attrs[key] elif isinstance(lvalue, (TupleExpr, ListExpr)): for item in lvalue.items: self.process_lvalue_in_method(item) elif isinstance(lvalue, StarExpr): self.process_lvalue_in_method(lvalue.expr) @contextlib.contextmanager def enter_class(self, info: TypeInfo) -> Iterator[None]: old_type = self.type old_is_class_body = self.is_class_body self.type = info self.is_class_body = True yield self.type = old_type self.is_class_body = old_is_class_body @contextlib.contextmanager def enter_method(self, info: TypeInfo) -> Iterator[None]: old_type = self.type old_is_class_body = self.is_class_body self.type = info self.is_class_body = False yield self.type = old_type self.is_class_body = old_is_class_body @contextlib.contextmanager def nothing() -> Iterator[None]: yield mypy-0.761/mypy/server/deps.py0000644€tŠÔÚ€2›s®0000013615113576752246022540 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generate fine-grained dependencies for AST nodes, for use in the daemon mode. Dependencies are stored in a map from *triggers* to *sets of affected locations*. A trigger is a string that represents a program property that has changed, such as the signature of a specific function. Triggers are written as '<...>' (angle brackets). When a program property changes, we determine the relevant trigger(s) and all affected locations. The latter are stale and will have to be reprocessed. An affected location is a string than can refer to a *target* (a non-nested function or method, or a module top level), a class, or a trigger (for recursively triggering other triggers). Here's an example representation of a simple dependency map (in format " -> locations"): -> m.f -> , m.A, m.f Assuming 'A' is a class, this means that 1) if a property of 'm.A.g', such as the signature, is changed, we need to process target (function) 'm.f' 2) if the MRO or other significant property of class 'm.A' changes, we need to process target 'm.f', the entire class 'm.A', and locations triggered by trigger '' (this explanation is a bit simplified; see below for more details). The triggers to fire are determined using mypy.server.astdiff. Examples of triggers: * '' represents a module attribute/function/class. If any externally visible property of 'x' changes, this gets fired. For changes within classes, only "big" changes cause the class to be triggered (such as a change in MRO). Smaller changes, such as changes to some attributes, don't trigger the entire class. * '' represents the type and kind of attribute/method 'x' of class 'mod.Cls'. This can also refer to an attribute inherited from a base class (relevant if it's accessed through a value of type 'Cls' instead of the base class type). * '' represents the existence of module 'package.mod'. This gets triggered if 'package.mod' is created or deleted, or if it gets changed into something other than a module. Examples of locations: * 'mod' is the top level of module 'mod' (doesn't include any function bodies, but includes class bodies not nested within a function). * 'mod.f' is function 'f' in module 'mod' (module-level variables aren't separate locations but are included in the module top level). Functions also include any nested functions and classes -- such nested definitions aren't separate locations, for simplicity of implementation. * 'mod.Cls.f' is method 'f' of 'mod.Cls'. Non-method attributes aren't locations. * 'mod.Cls' represents each method in class 'mod.Cls' + the top-level of the module 'mod'. (To simplify the implementation, there is no location that only includes the body of a class without the entire surrounding module top level.) * Trigger '<...>' as a location is an indirect way of referring to to all locations triggered by the trigger. These indirect locations keep the dependency map smaller and easier to manage. Triggers can be triggered by program changes such as these: * Addition or deletion of an attribute (or module). * Change of the kind of thing a name represents (such as a change from a function to a class). * Change of the static type of a name. Changes in the body of a function that aren't reflected in the signature don't cause the function to be triggered. More generally, we trigger only on changes that may affect type checking results outside the module that contains the change. We don't generate dependencies from builtins and certain other stdlib modules, since these change very rarely, and they would just increase the size of the dependency map significantly without significant benefit. Test cases for this module live in 'test-data/unit/deps*.test'. """ from typing import Dict, List, Set, Optional, Tuple from typing_extensions import DefaultDict from mypy.checkmember import bind_self from mypy.nodes import ( Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import, ImportFrom, CallExpr, CastExpr, TypeVarExpr, TypeApplication, IndexExpr, UnaryExpr, OpExpr, ComparisonExpr, GeneratorExpr, DictionaryComprehension, StarExpr, PrintStmt, ForStmt, WithStmt, TupleExpr, OperatorAssignmentStmt, DelStmt, YieldFromExpr, Decorator, Block, TypeInfo, FuncBase, OverloadedFuncDef, RefExpr, SuperExpr, Var, NamedTupleExpr, TypedDictExpr, LDEF, MDEF, GDEF, TypeAliasExpr, NewTypeExpr, ImportAll, EnumCallExpr, AwaitExpr, op_methods, reverse_op_methods, ops_with_inplace_method, unary_op_methods ) from mypy.traverser import TraverserVisitor from mypy.types import ( Type, Instance, AnyType, NoneType, TypeVisitor, CallableType, DeletedType, PartialType, TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType, FunctionLike, Overloaded, TypeOfAny, LiteralType, ErasedType, get_proper_type, ProperType, TypeAliasType) from mypy.server.trigger import make_trigger, make_wildcard_trigger from mypy.util import correct_relative_import from mypy.scope import Scope from mypy.typestate import TypeState from mypy.options import Options def get_dependencies(target: MypyFile, type_map: Dict[Expression, Type], python_version: Tuple[int, int], options: Options) -> Dict[str, Set[str]]: """Get all dependencies of a node, recursively.""" visitor = DependencyVisitor(type_map, python_version, target.alias_deps, options) target.accept(visitor) return visitor.map def get_dependencies_of_target(module_id: str, module_tree: MypyFile, target: Node, type_map: Dict[Expression, Type], python_version: Tuple[int, int]) -> Dict[str, Set[str]]: """Get dependencies of a target -- don't recursive into nested targets.""" # TODO: Add tests for this function. visitor = DependencyVisitor(type_map, python_version, module_tree.alias_deps) visitor.scope.enter_file(module_id) if isinstance(target, MypyFile): # Only get dependencies of the top-level of the module. Don't recurse into # functions. for defn in target.defs: # TODO: Recurse into top-level statements and class bodies but skip functions. if not isinstance(defn, (ClassDef, Decorator, FuncDef, OverloadedFuncDef)): defn.accept(visitor) elif isinstance(target, FuncBase) and target.info: # It's a method. # TODO: Methods in nested classes. visitor.scope.enter_class(target.info) target.accept(visitor) visitor.scope.leave() else: target.accept(visitor) visitor.scope.leave() return visitor.map class DependencyVisitor(TraverserVisitor): def __init__(self, type_map: Dict[Expression, Type], python_version: Tuple[int, int], alias_deps: 'DefaultDict[str, Set[str]]', options: Optional[Options] = None) -> None: self.scope = Scope() self.type_map = type_map self.python2 = python_version[0] == 2 # This attribute holds a mapping from target to names of type aliases # it depends on. These need to be processed specially, since they are # only present in expanded form in symbol tables. For example, after: # A = List[int] # x: A # The module symbol table will just have a Var `x` with type `List[int]`, # and the dependency of `x` on `A` is lost. Therefore the alias dependencies # are preserved at alias expansion points in `semanal.py`, stored as an attribute # on MypyFile, and then passed here. self.alias_deps = alias_deps self.map = {} # type: Dict[str, Set[str]] self.is_class = False self.is_package_init_file = False self.options = options def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname) self.is_package_init_file = o.is_package_init_file() self.add_type_alias_deps(self.scope.current_target()) for trigger, targets in o.plugin_deps.items(): self.map.setdefault(trigger, set()).update(targets) super().visit_mypy_file(o) self.scope.leave() def visit_func_def(self, o: FuncDef) -> None: self.scope.enter_function(o) target = self.scope.current_target() if o.type: if self.is_class and isinstance(o.type, FunctionLike): signature = bind_self(o.type) # type: Type else: signature = o.type for trigger in self.get_type_triggers(signature): self.add_dependency(trigger) self.add_dependency(trigger, target=make_trigger(target)) if o.info: for base in non_trivial_bases(o.info): # Base class __init__/__new__ doesn't generate a logical # dependency since the override can be incompatible. if not self.use_logical_deps() or o.name not in ('__init__', '__new__'): self.add_dependency(make_trigger(base.fullname + '.' + o.name)) self.add_type_alias_deps(self.scope.current_target()) super().visit_func_def(o) variants = set(o.expanded) - {o} for ex in variants: if isinstance(ex, FuncDef): super().visit_func_def(ex) self.scope.leave() def visit_decorator(self, o: Decorator) -> None: if not self.use_logical_deps(): # We don't need to recheck outer scope for an overload, only overload itself. # Also if any decorator is nested, it is not externally visible, so we don't need to # generate dependency. if not o.func.is_overload and self.scope.current_function_name() is None: self.add_dependency(make_trigger(o.func.fullname)) else: # Add logical dependencies from decorators to the function. For example, # if we have # @dec # def func(): ... # then if `dec` is unannotated, then it will "spoil" `func` and consequently # all call sites, making them all `Any`. for d in o.decorators: tname = None # type: Optional[str] if isinstance(d, RefExpr) and d.fullname is not None: tname = d.fullname if (isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and d.callee.fullname is not None): tname = d.callee.fullname if tname is not None: self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname)) super().visit_decorator(o) def visit_class_def(self, o: ClassDef) -> None: self.scope.enter_class(o.info) target = self.scope.current_full_target() self.add_dependency(make_trigger(target), target) old_is_class = self.is_class self.is_class = True # Add dependencies to type variables of a generic class. for tv in o.type_vars: self.add_dependency(make_trigger(tv.fullname), target) self.process_type_info(o.info) super().visit_class_def(o) self.is_class = old_is_class self.scope.leave() def visit_newtype_expr(self, o: NewTypeExpr) -> None: if o.info: self.scope.enter_class(o.info) self.process_type_info(o.info) self.scope.leave() def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) if info.is_protocol: for base_info in info.mro[:-1]: # We add dependencies from whole MRO to cover explicit subprotocols. # For example: # # class Super(Protocol): # x: int # class Sub(Super, Protocol): # y: int # # In this example we add -> , to invalidate Sub if # a new member is added to Super. self.add_dependency(make_wildcard_trigger(base_info.fullname), target=make_trigger(target)) # More protocol dependencies are collected in TypeState._snapshot_protocol_deps # after a full run or update is finished. self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): # Recheck Liskov if needed, self definitions are checked in the defining method if node.node.is_initialized_in_class and has_user_bases(info): self.add_dependency(make_trigger(info.fullname + '.' + name)) for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency(make_trigger(base_info.fullname + '.' + name), target=make_trigger(info.fullname + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): if self.use_logical_deps(): # Skip logical dependency if an attribute is not overridden. For example, # in case of: # class Base: # x = 1 # y = 2 # class Sub(Base): # x = 3 # we skip -> , because even if `y` is unannotated it # doesn't affect precision of Liskov checking. if name not in info.names: continue # __init__ and __new__ can be overridden with different signatures, so no # logical depedency. if name in ('__init__', '__new__'): continue self.add_dependency(make_trigger(base_info.fullname + '.' + name), target=make_trigger(info.fullname + '.' + name)) if not self.use_logical_deps(): # These dependencies are only useful for propagating changes -- # they aren't logical dependencies since __init__ and __new__ can be # overridden with a different signature. self.add_dependency(make_trigger(base_info.fullname + '.__init__'), target=make_trigger(info.fullname + '.__init__')) self.add_dependency(make_trigger(base_info.fullname + '.__new__'), target=make_trigger(info.fullname + '.__new__')) # If the set of abstract attributes change, this may invalidate class # instantiation, or change the generated error message, since Python checks # class abstract status when creating an instance. self.add_dependency(make_trigger(base_info.fullname + '.(abstract)'), target=make_trigger(info.fullname + '.__init__')) # If the base class abstract attributes change, subclass abstract # attributes need to be recalculated. self.add_dependency(make_trigger(base_info.fullname + '.(abstract)')) def visit_import(self, o: Import) -> None: for id, as_id in o.ids: self.add_dependency(make_trigger(id), self.scope.current_target()) def visit_import_from(self, o: ImportFrom) -> None: if self.use_logical_deps(): # Just importing a name doesn't create a logical dependency. return module_id, _ = correct_relative_import(self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file) self.add_dependency(make_trigger(module_id)) # needed if module is added/removed for name, as_name in o.names: self.add_dependency(make_trigger(module_id + '.' + name)) def visit_import_all(self, o: ImportAll) -> None: module_id, _ = correct_relative_import(self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file) # The current target needs to be rechecked if anything "significant" changes in the # target module namespace (as the imported definitions will need to be updated). self.add_dependency(make_wildcard_trigger(module_id)) def visit_block(self, o: Block) -> None: if not o.is_unreachable: super().visit_block(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: rvalue = o.rvalue if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr): analyzed = rvalue.analyzed self.add_type_dependencies(analyzed.upper_bound, target=make_trigger(analyzed.fullname)) for val in analyzed.values: self.add_type_dependencies(val, target=make_trigger(analyzed.fullname)) # We need to re-analyze the definition if bound or value is deleted. super().visit_call_expr(rvalue) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr): # Depend on types of named tuple items. info = rvalue.analyzed.info prefix = '%s.%s' % (self.scope.current_full_target(), info.name) for name, symnode in info.names.items(): if not name.startswith('_') and isinstance(symnode.node, Var): typ = symnode.node.type if typ: self.add_type_dependencies(typ) self.add_type_dependencies(typ, target=make_trigger(prefix)) attr_target = make_trigger('%s.%s' % (prefix, name)) self.add_type_dependencies(typ, target=attr_target) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypedDictExpr): # Depend on the underlying typeddict type info = rvalue.analyzed.info assert info.typeddict_type is not None prefix = '%s.%s' % (self.scope.current_full_target(), info.name) self.add_type_dependencies(info.typeddict_type, target=make_trigger(prefix)) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, EnumCallExpr): # Enum values are currently not checked, but for future we add the deps on them for name, symnode in rvalue.analyzed.info.names.items(): if isinstance(symnode.node, Var) and symnode.node.type: self.add_type_dependencies(symnode.node.type) elif o.is_alias_def: assert len(o.lvalues) == 1 lvalue = o.lvalues[0] assert isinstance(lvalue, NameExpr) typ = get_proper_type(self.type_map.get(lvalue)) if isinstance(typ, FunctionLike) and typ.is_type_obj(): class_name = typ.type_object().fullname self.add_dependency(make_trigger(class_name + '.__init__')) self.add_dependency(make_trigger(class_name + '.__new__')) if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr): self.add_type_dependencies(rvalue.analyzed.type) elif typ: self.add_type_dependencies(typ) else: # Normal assignment super().visit_assignment_stmt(o) for lvalue in o.lvalues: self.process_lvalue(lvalue) items = o.lvalues + [rvalue] for i in range(len(items) - 1): lvalue = items[i] rvalue = items[i + 1] if isinstance(lvalue, TupleExpr): self.add_attribute_dependency_for_expr(rvalue, '__iter__') if o.type: self.add_type_dependencies(o.type) if self.use_logical_deps() and o.unanalyzed_type is None: # Special case: for definitions without an explicit type like this: # x = func(...) # we add a logical dependency -> , because if `func` is not annotated, # then it will make all points of use of `x` unchecked. if (isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr) and rvalue.callee.fullname is not None): fname = None # type: Optional[str] if isinstance(rvalue.callee.node, TypeInfo): # use actual __init__ as a dependency source init = rvalue.callee.node.get('__init__') if init and isinstance(init.node, FuncBase): fname = init.node.fullname else: fname = rvalue.callee.fullname if fname is None: return for lv in o.lvalues: if isinstance(lv, RefExpr) and lv.fullname and lv.is_new_def: if lv.kind == LDEF: return # local definitions don't generate logical deps self.add_dependency(make_trigger(fname), make_trigger(lv.fullname)) def process_lvalue(self, lvalue: Expression) -> None: """Generate additional dependencies for an lvalue.""" if isinstance(lvalue, IndexExpr): self.add_operator_method_dependency(lvalue.base, '__setitem__') elif isinstance(lvalue, NameExpr): if lvalue.kind in (MDEF, GDEF): # Assignment to an attribute in the class body, or direct assignment to a # global variable. lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = self.get_type_triggers(lvalue_type) attr_trigger = make_trigger('%s.%s' % (self.scope.current_full_target(), lvalue.name)) for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, MemberExpr): if self.is_self_member_ref(lvalue) and lvalue.is_new_def: node = lvalue.node if isinstance(node, Var): info = node.info if info and has_user_bases(info): # Recheck Liskov for self definitions self.add_dependency(make_trigger(info.fullname + '.' + lvalue.name)) if lvalue.kind is None: # Reference to a non-module attribute if lvalue.expr not in self.type_map: # Unreachable assignment -> not checked so no dependencies to generate. return object_type = self.type_map[lvalue.expr] lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = self.get_type_triggers(lvalue_type) for attr_trigger in self.attribute_triggers(object_type, lvalue.name): for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, TupleExpr): for item in lvalue.items: self.process_lvalue(item) elif isinstance(lvalue, StarExpr): self.process_lvalue(lvalue.expr) def is_self_member_ref(self, memberexpr: MemberExpr) -> bool: """Does memberexpr to refer to an attribute of self?""" if not isinstance(memberexpr.expr, NameExpr): return False node = memberexpr.expr.node return isinstance(node, Var) and node.is_self def get_non_partial_lvalue_type(self, lvalue: RefExpr) -> Type: if lvalue not in self.type_map: # Likely a block considered unreachable during type checking. return UninhabitedType() lvalue_type = get_proper_type(self.type_map[lvalue]) if isinstance(lvalue_type, PartialType): if isinstance(lvalue.node, Var) and lvalue.node.type: lvalue_type = get_proper_type(lvalue.node.type) else: # Probably a secondary, non-definition assignment that doesn't # result in a non-partial type. We won't be able to infer any # dependencies from this so just return something. (The first, # definition assignment with a partial type is handled # differently, in the semantic analyzer.) assert not lvalue.is_new_def return UninhabitedType() return lvalue_type def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: super().visit_operator_assignment_stmt(o) self.process_lvalue(o.lvalue) method = op_methods[o.op] self.add_attribute_dependency_for_expr(o.lvalue, method) if o.op in ops_with_inplace_method: inplace_method = '__i' + method[2:] self.add_attribute_dependency_for_expr(o.lvalue, inplace_method) def visit_for_stmt(self, o: ForStmt) -> None: super().visit_for_stmt(o) if not o.is_async: # __getitem__ is only used if __iter__ is missing but for simplicity we # just always depend on both. self.add_attribute_dependency_for_expr(o.expr, '__iter__') self.add_attribute_dependency_for_expr(o.expr, '__getitem__') if o.inferred_iterator_type: if self.python2: method = 'next' else: method = '__next__' self.add_attribute_dependency(o.inferred_iterator_type, method) else: self.add_attribute_dependency_for_expr(o.expr, '__aiter__') if o.inferred_iterator_type: self.add_attribute_dependency(o.inferred_iterator_type, '__anext__') self.process_lvalue(o.index) if isinstance(o.index, TupleExpr): # Process multiple assignment to index variables. item_type = o.inferred_item_type if item_type: # This is similar to above. self.add_attribute_dependency(item_type, '__iter__') self.add_attribute_dependency(item_type, '__getitem__') if o.index_type: self.add_type_dependencies(o.index_type) def visit_with_stmt(self, o: WithStmt) -> None: super().visit_with_stmt(o) for e in o.expr: if not o.is_async: self.add_attribute_dependency_for_expr(e, '__enter__') self.add_attribute_dependency_for_expr(e, '__exit__') else: self.add_attribute_dependency_for_expr(e, '__aenter__') self.add_attribute_dependency_for_expr(e, '__aexit__') for typ in o.analyzed_types: self.add_type_dependencies(typ) def visit_print_stmt(self, o: PrintStmt) -> None: super().visit_print_stmt(o) if o.target: self.add_attribute_dependency_for_expr(o.target, 'write') def visit_del_stmt(self, o: DelStmt) -> None: super().visit_del_stmt(o) if isinstance(o.expr, IndexExpr): self.add_attribute_dependency_for_expr(o.expr.base, '__delitem__') # Expressions def process_global_ref_expr(self, o: RefExpr) -> None: if o.fullname is not None: self.add_dependency(make_trigger(o.fullname)) # If this is a reference to a type, generate a dependency to its # constructor. # IDEA: Avoid generating spurious dependencies for except statements, # class attribute references, etc., if performance is a problem. typ = get_proper_type(self.type_map.get(o)) if isinstance(typ, FunctionLike) and typ.is_type_obj(): class_name = typ.type_object().fullname self.add_dependency(make_trigger(class_name + '.__init__')) self.add_dependency(make_trigger(class_name + '.__new__')) def visit_name_expr(self, o: NameExpr) -> None: if o.kind == LDEF: # We don't track dependencies to local variables, since they # aren't externally visible. return if o.kind == MDEF: # Direct reference to member is only possible in the scope that # defined the name, so no dependency is required. return self.process_global_ref_expr(o) def visit_member_expr(self, e: MemberExpr) -> None: if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, TypeInfo): # Special case class attribute so that we don't depend on "__init__". self.add_dependency(make_trigger(e.expr.node.fullname)) else: super().visit_member_expr(e) if e.kind is not None: # Reference to a module attribute self.process_global_ref_expr(e) else: # Reference to a non-module (or missing) attribute if e.expr not in self.type_map: # No type available -- this happens for unreachable code. Since it's unreachable, # it wasn't type checked and we don't need to generate dependencies. return if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, MypyFile): # Special case: reference to a missing module attribute. self.add_dependency(make_trigger(e.expr.node.fullname + '.' + e.name)) return typ = get_proper_type(self.type_map[e.expr]) self.add_attribute_dependency(typ, e.name) if self.use_logical_deps() and isinstance(typ, AnyType): name = self.get_unimported_fullname(e, typ) if name is not None: # Generate a logical dependency from an unimported # definition (which comes from a missing module). # Example: # import missing # "missing" not in build # # def g() -> None: # missing.f() # Generate dependency from "missing.f" self.add_dependency(make_trigger(name)) def get_unimported_fullname(self, e: MemberExpr, typ: AnyType) -> Optional[str]: """If e refers to an unimported definition, infer the fullname of this. Return None if e doesn't refer to an unimported definition or if we can't determine the name. """ suffix = '' # Unwrap nested member expression to handle cases like "a.b.c.d" where # "a.b" is a known reference to an unimported module. Find the base # reference to an unimported module (such as "a.b") and the name suffix # (such as "c.d") needed to build a full name. while typ.type_of_any == TypeOfAny.from_another_any and isinstance(e.expr, MemberExpr): suffix = '.' + e.name + suffix e = e.expr if e.expr not in self.type_map: return None obj_type = get_proper_type(self.type_map[e.expr]) if not isinstance(obj_type, AnyType): # Can't find the base reference to the unimported module. return None typ = obj_type if typ.type_of_any == TypeOfAny.from_unimported_type and typ.missing_import_name: # Infer the full name of the unimported definition. return typ.missing_import_name + '.' + e.name + suffix return None def visit_super_expr(self, e: SuperExpr) -> None: # Arguments in "super(C, self)" won't generate useful logical deps. if not self.use_logical_deps(): super().visit_super_expr(e) if e.info is not None: name = e.name for base in non_trivial_bases(e.info): self.add_dependency(make_trigger(base.fullname + '.' + name)) if name in base.names: # No need to depend on further base classes, since we found # the target. This is safe since if the target gets # deleted or modified, we'll trigger it. break def visit_call_expr(self, e: CallExpr) -> None: if isinstance(e.callee, RefExpr) and e.callee.fullname == 'builtins.isinstance': self.process_isinstance_call(e) else: super().visit_call_expr(e) def process_isinstance_call(self, e: CallExpr) -> None: """Process "isinstance(...)" in a way to avoid some extra dependencies.""" if len(e.args) == 2: arg = e.args[1] if (isinstance(arg, RefExpr) and arg.kind == GDEF and isinstance(arg.node, TypeInfo) and arg.fullname): # Special case to avoid redundant dependencies from "__init__". self.add_dependency(make_trigger(arg.fullname)) return # In uncommon cases generate normal dependencies. These will include # spurious dependencies, but the performance impact is small. super().visit_call_expr(e) def visit_cast_expr(self, e: CastExpr) -> None: super().visit_cast_expr(e) self.add_type_dependencies(e.type) def visit_type_application(self, e: TypeApplication) -> None: super().visit_type_application(e) for typ in e.types: self.add_type_dependencies(typ) def visit_index_expr(self, e: IndexExpr) -> None: super().visit_index_expr(e) self.add_operator_method_dependency(e.base, '__getitem__') def visit_unary_expr(self, e: UnaryExpr) -> None: super().visit_unary_expr(e) if e.op not in unary_op_methods: return method = unary_op_methods[e.op] self.add_operator_method_dependency(e.expr, method) def visit_op_expr(self, e: OpExpr) -> None: super().visit_op_expr(e) self.process_binary_op(e.op, e.left, e.right) def visit_comparison_expr(self, e: ComparisonExpr) -> None: super().visit_comparison_expr(e) for i, op in enumerate(e.operators): left = e.operands[i] right = e.operands[i + 1] self.process_binary_op(op, left, right) if self.python2 and op in ('==', '!=', '<', '<=', '>', '>='): self.add_operator_method_dependency(left, '__cmp__') self.add_operator_method_dependency(right, '__cmp__') def process_binary_op(self, op: str, left: Expression, right: Expression) -> None: method = op_methods.get(op) if method: if op == 'in': self.add_operator_method_dependency(right, method) else: self.add_operator_method_dependency(left, method) rev_method = reverse_op_methods.get(method) if rev_method: self.add_operator_method_dependency(right, rev_method) def add_operator_method_dependency(self, e: Expression, method: str) -> None: typ = get_proper_type(self.type_map.get(e)) if typ is not None: self.add_operator_method_dependency_for_type(typ, method) def add_operator_method_dependency_for_type(self, typ: ProperType, method: str) -> None: # Note that operator methods can't be (non-metaclass) methods of type objects # (that is, TypeType objects or Callables representing a type). if isinstance(typ, TypeVarType): typ = get_proper_type(typ.upper_bound) if isinstance(typ, TupleType): typ = typ.partial_fallback if isinstance(typ, Instance): trigger = make_trigger(typ.type.fullname + '.' + method) self.add_dependency(trigger) elif isinstance(typ, UnionType): for item in typ.items: self.add_operator_method_dependency_for_type(get_proper_type(item), method) elif isinstance(typ, FunctionLike) and typ.is_type_obj(): self.add_operator_method_dependency_for_type(typ.fallback, method) elif isinstance(typ, TypeType): if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None: self.add_operator_method_dependency_for_type(typ.item.type.metaclass_type, method) def visit_generator_expr(self, e: GeneratorExpr) -> None: super().visit_generator_expr(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: super().visit_dictionary_comprehension(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_star_expr(self, e: StarExpr) -> None: super().visit_star_expr(e) self.add_iter_dependency(e.expr) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: super().visit_yield_from_expr(e) self.add_iter_dependency(e.expr) def visit_await_expr(self, e: AwaitExpr) -> None: super().visit_await_expr(e) self.add_attribute_dependency_for_expr(e.expr, '__await__') # Helpers def add_type_alias_deps(self, target: str) -> None: # Type aliases are special, because some of the dependencies are calculated # in semanal.py, before they are expanded. if target in self.alias_deps: for alias in self.alias_deps[target]: self.add_dependency(make_trigger(alias)) def add_dependency(self, trigger: str, target: Optional[str] = None) -> None: """Add dependency from trigger to a target. If the target is not given explicitly, use the current target. """ if trigger.startswith((' None: """Add dependencies to all components of a type. Args: target: If not None, override the default (current) target of the generated dependency. """ for trigger in self.get_type_triggers(typ): self.add_dependency(trigger, target) def add_attribute_dependency(self, typ: Type, name: str) -> None: """Add dependencies for accessing a named attribute of a type.""" targets = self.attribute_triggers(typ, name) for target in targets: self.add_dependency(target) def attribute_triggers(self, typ: Type, name: str) -> List[str]: """Return all triggers associated with the attribute of a type.""" typ = get_proper_type(typ) if isinstance(typ, TypeVarType): typ = get_proper_type(typ.upper_bound) if isinstance(typ, TupleType): typ = typ.partial_fallback if isinstance(typ, Instance): member = '%s.%s' % (typ.type.fullname, name) return [make_trigger(member)] elif isinstance(typ, FunctionLike) and typ.is_type_obj(): member = '%s.%s' % (typ.type_object().fullname, name) triggers = [make_trigger(member)] triggers.extend(self.attribute_triggers(typ.fallback, name)) return triggers elif isinstance(typ, UnionType): targets = [] for item in typ.items: targets.extend(self.attribute_triggers(item, name)) return targets elif isinstance(typ, TypeType): triggers = self.attribute_triggers(typ.item, name) if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None: triggers.append(make_trigger('%s.%s' % (typ.item.type.metaclass_type.type.fullname, name))) return triggers else: return [] def add_attribute_dependency_for_expr(self, e: Expression, name: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_attribute_dependency(typ, name) def add_iter_dependency(self, node: Expression) -> None: typ = self.type_map.get(node) if typ: self.add_attribute_dependency(typ, '__iter__') def use_logical_deps(self) -> bool: return self.options is not None and self.options.logical_deps def get_type_triggers(self, typ: Type) -> List[str]: return get_type_triggers(typ, self.use_logical_deps()) def get_type_triggers(typ: Type, use_logical_deps: bool) -> List[str]: """Return all triggers that correspond to a type becoming stale.""" return typ.accept(TypeTriggersVisitor(use_logical_deps)) class TypeTriggersVisitor(TypeVisitor[List[str]]): def __init__(self, use_logical_deps: bool) -> None: self.deps = [] # type: List[str] self.use_logical_deps = use_logical_deps def get_type_triggers(self, typ: Type) -> List[str]: return get_type_triggers(typ, self.use_logical_deps) def visit_instance(self, typ: Instance) -> List[str]: trigger = make_trigger(typ.type.fullname) triggers = [trigger] for arg in typ.args: triggers.extend(self.get_type_triggers(arg)) if typ.last_known_value: triggers.extend(self.get_type_triggers(typ.last_known_value)) return triggers def visit_type_alias_type(self, typ: TypeAliasType) -> List[str]: assert typ.alias is not None trigger = make_trigger(typ.alias.fullname) triggers = [trigger] for arg in typ.args: triggers.extend(self.get_type_triggers(arg)) # TODO: Add guard for infinite recursion here. Moreover, now that type aliases # are its own kind of types we can simplify the logic to rely on intermediate # dependencies (like for instance types). triggers.extend(self.get_type_triggers(typ.alias.target)) return triggers def visit_any(self, typ: AnyType) -> List[str]: if typ.missing_import_name is not None: return [make_trigger(typ.missing_import_name)] return [] def visit_none_type(self, typ: NoneType) -> List[str]: return [] def visit_callable_type(self, typ: CallableType) -> List[str]: triggers = [] for arg in typ.arg_types: triggers.extend(self.get_type_triggers(arg)) triggers.extend(self.get_type_triggers(typ.ret_type)) # fallback is a metaclass type for class objects, and is # processed separately. return triggers def visit_overloaded(self, typ: Overloaded) -> List[str]: triggers = [] for item in typ.items(): triggers.extend(self.get_type_triggers(item)) return triggers def visit_erased_type(self, t: ErasedType) -> List[str]: # This type should exist only temporarily during type inference assert False, "Should not see an erased type here" def visit_deleted_type(self, typ: DeletedType) -> List[str]: return [] def visit_partial_type(self, typ: PartialType) -> List[str]: assert False, "Should not see a partial type here" def visit_tuple_type(self, typ: TupleType) -> List[str]: triggers = [] for item in typ.items: triggers.extend(self.get_type_triggers(item)) triggers.extend(self.get_type_triggers(typ.partial_fallback)) return triggers def visit_type_type(self, typ: TypeType) -> List[str]: triggers = self.get_type_triggers(typ.item) if not self.use_logical_deps: old_triggers = triggers[:] for trigger in old_triggers: triggers.append(trigger.rstrip('>') + '.__init__>') triggers.append(trigger.rstrip('>') + '.__new__>') return triggers def visit_type_var(self, typ: TypeVarType) -> List[str]: triggers = [] if typ.fullname: triggers.append(make_trigger(typ.fullname)) if typ.upper_bound: triggers.extend(self.get_type_triggers(typ.upper_bound)) for val in typ.values: triggers.extend(self.get_type_triggers(val)) return triggers def visit_typeddict_type(self, typ: TypedDictType) -> List[str]: triggers = [] for item in typ.items.values(): triggers.extend(self.get_type_triggers(item)) triggers.extend(self.get_type_triggers(typ.fallback)) return triggers def visit_literal_type(self, typ: LiteralType) -> List[str]: return self.get_type_triggers(typ.fallback) def visit_unbound_type(self, typ: UnboundType) -> List[str]: return [] def visit_uninhabited_type(self, typ: UninhabitedType) -> List[str]: return [] def visit_union_type(self, typ: UnionType) -> List[str]: triggers = [] for item in typ.items: triggers.extend(self.get_type_triggers(item)) return triggers def merge_dependencies(new_deps: Dict[str, Set[str]], deps: Dict[str, Set[str]]) -> None: for trigger, targets in new_deps.items(): deps.setdefault(trigger, set()).update(targets) def non_trivial_bases(info: TypeInfo) -> List[TypeInfo]: return [base for base in info.mro[1:] if base.fullname != 'builtins.object'] def has_user_bases(info: TypeInfo) -> bool: return any(base.module_name not in ('builtins', 'typing', 'enum') for base in info.mro[1:]) def dump_all_dependencies(modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], python_version: Tuple[int, int], options: Options) -> None: """Generate dependencies for all interesting modules and print them to stdout.""" all_deps = {} # type: Dict[str, Set[str]] for id, node in modules.items(): # Uncomment for debugging: # print('processing', id) if id in ('builtins', 'typing') or '/typeshed/' in node.path: continue assert id == node.fullname deps = get_dependencies(node, type_map, python_version, options) for trigger, targets in deps.items(): all_deps.setdefault(trigger, set()).update(targets) TypeState.add_all_protocol_deps(all_deps) for trigger, targets in sorted(all_deps.items(), key=lambda x: x[0]): print(trigger) for target in sorted(targets): print(' %s' % target) mypy-0.761/mypy/server/mergecheck.py0000644€tŠÔÚ€2›s®0000000526213576752246023700 0ustar jukkaDROPBOX\Domain Users00000000000000"""Check for duplicate AST nodes after merge.""" from typing import Dict, List, Tuple from typing_extensions import Final from mypy.nodes import SymbolNode, Var, Decorator, FuncDef from mypy.server.objgraph import get_reachable_graph, get_path # If True, print more verbose output on failure. DUMP_MISMATCH_NODES = False # type: Final def check_consistency(o: object) -> None: """Fail if there are two AST nodes with the same fullname reachable from 'o'. Raise AssertionError on failure and print some debugging output. """ seen, parents = get_reachable_graph(o) reachable = list(seen.values()) syms = [x for x in reachable if isinstance(x, SymbolNode)] m = {} # type: Dict[str, SymbolNode] for sym in syms: fn = sym.fullname # Skip None names, since they are ambiguous. # TODO: Everything should have a proper full name? if fn is None: continue # Skip stuff that should be expected to have duplicate names if isinstance(sym, (Var, Decorator)): continue if isinstance(sym, FuncDef) and sym.is_overload: continue if fn not in m: m[sym.fullname] = sym continue # We have trouble and need to decide what to do about it. sym1, sym2 = sym, m[fn] # If the type changed, then it shouldn't have been merged. if type(sym1) is not type(sym2): continue path1 = get_path(sym1, seen, parents) path2 = get_path(sym2, seen, parents) if fn in m: print('\nDuplicate %r nodes with fullname %r found:' % (type(sym).__name__, fn)) print('[1] %d: %s' % (id(sym1), path_to_str(path1))) print('[2] %d: %s' % (id(sym2), path_to_str(path2))) if DUMP_MISMATCH_NODES and fn in m: # Add verbose output with full AST node contents. print('---') print(id(sym1), sym1) print('---') print(id(sym2), sym2) assert sym.fullname not in m def path_to_str(path: List[Tuple[object, object]]) -> str: result = '' for attr, obj in path: t = type(obj).__name__ if t in ('dict', 'tuple', 'SymbolTable', 'list'): result += '[%s]' % repr(attr) else: if isinstance(obj, Var): result += '.%s(%s:%s)' % (attr, t, obj.name) elif t in ('BuildManager', 'FineGrainedBuildManager'): # Omit class name for some classes that aren't part of a class # hierarchy since there isn't much ambiguity. result += '.%s' % attr else: result += '.%s(%s)' % (attr, t) return result mypy-0.761/mypy/server/objgraph.py0000644€tŠÔÚ€2›s®0000000636513576752246023404 0ustar jukkaDROPBOX\Domain Users00000000000000"""Find all objects reachable from a root object.""" from collections.abc import Iterable import weakref import types from typing import List, Dict, Iterator, Tuple, Mapping from typing_extensions import Final method_descriptor_type = type(object.__dir__) # type: Final method_wrapper_type = type(object().__ne__) # type: Final wrapper_descriptor_type = type(object.__ne__) # type: Final FUNCTION_TYPES = (types.BuiltinFunctionType, types.FunctionType, types.MethodType, method_descriptor_type, wrapper_descriptor_type, method_wrapper_type) # type: Final ATTR_BLACKLIST = { '__doc__', '__name__', '__class__', '__dict__', } # type: Final # Instances of these types can't have references to other objects ATOMIC_TYPE_BLACKLIST = { bool, int, float, str, type(None), object, } # type: Final # Don't look at most attributes of these types COLLECTION_TYPE_BLACKLIST = { list, set, dict, tuple, } # type: Final # Don't return these objects TYPE_BLACKLIST = { weakref.ReferenceType, } # type: Final def isproperty(o: object, attr: str) -> bool: return isinstance(getattr(type(o), attr, None), property) def get_edge_candidates(o: object) -> Iterator[Tuple[object, object]]: if type(o) not in COLLECTION_TYPE_BLACKLIST: for attr in dir(o): if attr not in ATTR_BLACKLIST and hasattr(o, attr) and not isproperty(o, attr): e = getattr(o, attr) if not type(e) in ATOMIC_TYPE_BLACKLIST: yield attr, e if isinstance(o, Mapping): for k, v in o.items(): yield k, v elif isinstance(o, Iterable) and not isinstance(o, str): for i, e in enumerate(o): yield i, e def get_edges(o: object) -> Iterator[Tuple[object, object]]: for s, e in get_edge_candidates(o): if (isinstance(e, FUNCTION_TYPES)): # We don't want to collect methods, but do want to collect values # in closures and self pointers to other objects if hasattr(e, '__closure__'): yield (s, '__closure__'), e.__closure__ # type: ignore if hasattr(e, '__self__'): se = e.__self__ # type: ignore if se is not o and se is not type(o): yield s.__self__, se # type: ignore else: if not type(e) in TYPE_BLACKLIST: yield s, e def get_reachable_graph(root: object) -> Tuple[Dict[int, object], Dict[int, Tuple[int, object]]]: parents = {} seen = {id(root): root} worklist = [root] while worklist: o = worklist.pop() for s, e in get_edges(o): if id(e) in seen: continue parents[id(e)] = (id(o), s) seen[id(e)] = e worklist.append(e) return seen, parents def get_path(o: object, seen: Dict[int, object], parents: Dict[int, Tuple[int, object]]) -> List[Tuple[object, object]]: path = [] while id(o) in parents: pid, attr = parents[id(o)] o = seen[pid] path.append((attr, o)) path.reverse() return path mypy-0.761/mypy/server/subexpr.py0000644€tŠÔÚ€2›s®0000001175513576752246023277 0ustar jukkaDROPBOX\Domain Users00000000000000"""Find all subexpressions of an AST node.""" from typing import List from mypy.nodes import ( Expression, Node, MemberExpr, YieldFromExpr, YieldExpr, CallExpr, OpExpr, ComparisonExpr, SliceExpr, CastExpr, RevealExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, IndexExpr, GeneratorExpr, ListComprehension, SetComprehension, DictionaryComprehension, ConditionalExpr, TypeApplication, LambdaExpr, StarExpr, BackquoteExpr, AwaitExpr, AssignmentExpr, ) from mypy.traverser import TraverserVisitor def get_subexpressions(node: Node) -> List[Expression]: visitor = SubexpressionFinder() node.accept(visitor) return visitor.expressions class SubexpressionFinder(TraverserVisitor): def __init__(self) -> None: self.expressions = [] # type: List[Expression] def visit_int_expr(self, o: Expression) -> None: self.add(o) def visit_name_expr(self, o: Expression) -> None: self.add(o) def visit_float_expr(self, o: Expression) -> None: self.add(o) def visit_str_expr(self, o: Expression) -> None: self.add(o) def visit_bytes_expr(self, o: Expression) -> None: self.add(o) def visit_unicode_expr(self, o: Expression) -> None: self.add(o) def visit_complex_expr(self, o: Expression) -> None: self.add(o) def visit_ellipsis(self, o: Expression) -> None: self.add(o) def visit_super_expr(self, o: Expression) -> None: self.add(o) def visit_type_var_expr(self, o: Expression) -> None: self.add(o) def visit_type_alias_expr(self, o: Expression) -> None: self.add(o) def visit_namedtuple_expr(self, o: Expression) -> None: self.add(o) def visit_typeddict_expr(self, o: Expression) -> None: self.add(o) def visit__promote_expr(self, o: Expression) -> None: self.add(o) def visit_newtype_expr(self, o: Expression) -> None: self.add(o) def visit_member_expr(self, e: MemberExpr) -> None: self.add(e) super().visit_member_expr(e) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: self.add(e) super().visit_yield_from_expr(e) def visit_yield_expr(self, e: YieldExpr) -> None: self.add(e) super().visit_yield_expr(e) def visit_call_expr(self, e: CallExpr) -> None: self.add(e) super().visit_call_expr(e) def visit_op_expr(self, e: OpExpr) -> None: self.add(e) super().visit_op_expr(e) def visit_comparison_expr(self, e: ComparisonExpr) -> None: self.add(e) super().visit_comparison_expr(e) def visit_slice_expr(self, e: SliceExpr) -> None: self.add(e) super().visit_slice_expr(e) def visit_cast_expr(self, e: CastExpr) -> None: self.add(e) super().visit_cast_expr(e) def visit_reveal_expr(self, e: RevealExpr) -> None: self.add(e) super().visit_reveal_expr(e) def visit_assignment_expr(self, e: AssignmentExpr) -> None: self.add(e) super().visit_assignment_expr(e) def visit_unary_expr(self, e: UnaryExpr) -> None: self.add(e) super().visit_unary_expr(e) def visit_list_expr(self, e: ListExpr) -> None: self.add(e) super().visit_list_expr(e) def visit_tuple_expr(self, e: TupleExpr) -> None: self.add(e) super().visit_tuple_expr(e) def visit_dict_expr(self, e: DictExpr) -> None: self.add(e) super().visit_dict_expr(e) def visit_set_expr(self, e: SetExpr) -> None: self.add(e) super().visit_set_expr(e) def visit_index_expr(self, e: IndexExpr) -> None: self.add(e) super().visit_index_expr(e) def visit_generator_expr(self, e: GeneratorExpr) -> None: self.add(e) super().visit_generator_expr(e) def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: self.add(e) super().visit_dictionary_comprehension(e) def visit_list_comprehension(self, e: ListComprehension) -> None: self.add(e) super().visit_list_comprehension(e) def visit_set_comprehension(self, e: SetComprehension) -> None: self.add(e) super().visit_set_comprehension(e) def visit_conditional_expr(self, e: ConditionalExpr) -> None: self.add(e) super().visit_conditional_expr(e) def visit_type_application(self, e: TypeApplication) -> None: self.add(e) super().visit_type_application(e) def visit_lambda_expr(self, e: LambdaExpr) -> None: self.add(e) super().visit_lambda_expr(e) def visit_star_expr(self, e: StarExpr) -> None: self.add(e) super().visit_star_expr(e) def visit_backquote_expr(self, e: BackquoteExpr) -> None: self.add(e) super().visit_backquote_expr(e) def visit_await_expr(self, e: AwaitExpr) -> None: self.add(e) super().visit_await_expr(e) def add(self, e: Expression) -> None: self.expressions.append(e) mypy-0.761/mypy/server/target.py0000644€tŠÔÚ€2›s®0000000035413576752246023066 0ustar jukkaDROPBOX\Domain Users00000000000000def trigger_to_target(s: str) -> str: assert s[0] == '<' # Strip off the angle brackets s = s[1:-1] # If there is a [wildcard] or similar, strip that off too if s[-1] == ']': s = s.split('[')[0] return s mypy-0.761/mypy/server/trigger.py0000644€tŠÔÚ€2›s®0000000142013576752246023236 0ustar jukkaDROPBOX\Domain Users00000000000000"""AST triggers that are used for fine-grained dependency handling.""" from typing_extensions import Final # Used as a suffix for triggers to handle "from m import *" dependencies (see also # make_wildcard_trigger) WILDCARD_TAG = '[wildcard]' # type: Final def make_trigger(name: str) -> str: return '<%s>' % name def make_wildcard_trigger(module: str) -> str: """Special trigger fired when any top-level name is changed in a module. Note that this is different from a module trigger, as module triggers are only fired if the module is created, deleted, or replaced with a non-module, whereas a wildcard trigger is triggered for namespace changes. This is used for "from m import *" dependencies. """ return '<%s%s>' % (module, WILDCARD_TAG) mypy-0.761/mypy/server/update.py0000644€tŠÔÚ€2›s®0000013522513576752246023070 0ustar jukkaDROPBOX\Domain Users00000000000000"""Update build by processing changes using fine-grained dependencies. Use fine-grained dependencies to update targets in other modules that may be affected by externally-visible changes in the changed modules. This forms the core of the fine-grained incremental daemon mode. This module is not used at all by the 'classic' (non-daemon) incremental mode. Here is some motivation for this mode: * By keeping program state in memory between incremental runs, we only have to process changed modules, not their dependencies. The classic incremental mode has to deserialize the symbol tables of all dependencies of changed modules, which can be slow for large programs. * Fine-grained dependencies allow processing only the relevant parts of modules indirectly affected by a change. Say, if only one function in a large module is affected by a change in another module, only this function is processed. The classic incremental mode always processes an entire file as a unit, which is typically much slower. * It's possible to independently process individual modules within an import cycle (SCC). Small incremental changes can be fast independent of the size of the related SCC. In classic incremental mode, any change within a SCC requires the entire SCC to be processed, which can slow things down considerably. Some terms: * A *target* is a function/method definition or the top level of a module. We refer to targets using their fully qualified name (e.g. 'mod.Cls.method'). Targets are the smallest units of processing during fine-grained incremental checking. * A *trigger* represents the properties of a part of a program, and it gets triggered/fired when these properties change. For example, '' refers to a module-level function. It gets triggered if the signature of the function changes, or if the function is removed, for example. Some program state is maintained across multiple build increments in memory: * The full ASTs of all modules are stored in memory all the time (this includes the type map). * A fine-grained dependency map is maintained, which maps triggers to affected program locations (these can be targets, triggers, or classes). The latter determine what other parts of a program need to be processed again due to a fired trigger. Here's a summary of how a fine-grained incremental program update happens: * Determine which modules have changes in their source code since the previous update. * Process changed modules one at a time. Perform a separate full update for each changed module, but only report the errors after all modules have been processed, since the intermediate states can generate bogus errors due to only seeing a partial set of changes. * Each changed module is processed in full. We parse the module, and run semantic analysis to create a new AST and symbol table for the module. Reuse the existing ASTs and symbol tables of modules that have no changes in their source code. At the end of this stage, we have two ASTs and symbol tables for the changed module (the old and the new versions). The latter AST has not yet been type checked. * Take a snapshot of the old symbol table. This is used later to determine which properties of the module have changed and which triggers to fire. * Merge the old AST with the new AST, preserving the identities of externally visible AST nodes for which we can find a corresponding node in the new AST. (Look at mypy.server.astmerge for the details.) This way all external references to AST nodes in the changed module will continue to point to the right nodes (assuming they still have a valid target). * Type check the new module. * Take another snapshot of the symbol table of the changed module. Look at the differences between the old and new snapshots to determine which parts of the changed modules have changed. The result is a set of fired triggers. * Using the dependency map and the fired triggers, decide which other targets have become stale and need to be reprocessed. * Create new fine-grained dependencies for the changed module. We don't garbage collect old dependencies, since extra dependencies are relatively harmless (they take some memory and can theoretically slow things down a bit by causing redundant work). This is implemented in mypy.server.deps. * Strip the stale AST nodes that we found above. This returns them to a state resembling the end of semantic analysis pass 1. We'll run semantic analysis again on the existing AST nodes, and since semantic analysis is not idempotent, we need to revert some changes made during semantic analysis. This is implemented in mypy.server.aststrip. * Run semantic analyzer passes 2 and 3 on the stale AST nodes, and type check them. We also need to do the symbol table snapshot comparison dance to find any changes, and we need to merge ASTs to preserve AST node identities. * If some triggers haven been fired, continue processing and repeat the previous steps until no triggers are fired. This is module is tested using end-to-end fine-grained incremental mode test cases (test-data/unit/fine-grained*.test). """ import time from typing import ( Dict, List, Set, Tuple, Union, Optional, NamedTuple, Sequence ) from typing_extensions import Final from mypy.build import ( BuildManager, State, BuildResult, Graph, load_graph, process_fresh_modules, DEBUG_FINE_GRAINED, FAKE_ROOT_MODULE, ) from mypy.modulefinder import BuildSource from mypy.checker import FineGrainedDeferredNode from mypy.errors import CompileError from mypy.nodes import ( MypyFile, FuncDef, TypeInfo, SymbolNode, Decorator, OverloadedFuncDef, SymbolTable ) from mypy.options import Options from mypy.fscache import FileSystemCache from mypy.server.astdiff import ( snapshot_symbol_table, compare_symbol_table_snapshots, SnapshotItem ) from mypy.semanal_main import semantic_analysis_for_scc, semantic_analysis_for_targets from mypy.server.astmerge import merge_asts from mypy.server.aststrip import strip_target, SavedAttributes from mypy.server.deps import get_dependencies_of_target, merge_dependencies from mypy.server.target import trigger_to_target from mypy.server.trigger import make_trigger, WILDCARD_TAG from mypy.util import module_prefix, split_target from mypy.typestate import TypeState MAX_ITER = 1000 # type: Final class FineGrainedBuildManager: def __init__(self, result: BuildResult) -> None: """Initialize fine-grained build based on a batch build. Args: result: Result from the initialized build. The manager and graph will be taken over by this class. manager: State of the build (mutated by this class) graph: Additional state of the build (mutated by this class) """ manager = result.manager self.manager = manager self.graph = result.graph self.previous_modules = get_module_to_path_map(self.graph) self.deps = manager.fg_deps # Merge in any root dependencies that may not have been loaded merge_dependencies(manager.load_fine_grained_deps(FAKE_ROOT_MODULE), self.deps) self.previous_targets_with_errors = manager.errors.targets() self.previous_messages = result.errors[:] # Module, if any, that had blocking errors in the last run as (id, path) tuple. self.blocking_error = None # type: Optional[Tuple[str, str]] # Module that we haven't processed yet but that are known to be stale. self.stale = [] # type: List[Tuple[str, str]] # Disable the cache so that load_graph doesn't try going back to disk # for the cache. self.manager.cache_enabled = False # Some hints to the test suite about what is going on: # Active triggers during the last update self.triggered = [] # type: List[str] # Modules passed to update during the last update self.changed_modules = [] # type: List[Tuple[str, str]] # Modules processed during the last update self.updated_modules = [] # type: List[str] # Targets processed during last update (for testing only). self.processed_targets = [] # type: List[str] def update(self, changed_modules: List[Tuple[str, str]], removed_modules: List[Tuple[str, str]]) -> List[str]: """Update previous build result by processing changed modules. Also propagate changes to other modules as needed, but only process those parts of other modules that are affected by the changes. Retain the existing ASTs and symbol tables of unaffected modules. Reuses original BuildManager and Graph. Args: changed_modules: Modules changed since the previous update/build; each is a (module id, path) tuple. Includes modified and added modules. Assume this is correct; it's not validated here. removed_modules: Modules that have been deleted since the previous update or removed from the build. Returns: A list of errors. """ self.processed_targets.clear() changed_modules = changed_modules + removed_modules removed_set = {module for module, _ in removed_modules} self.changed_modules = changed_modules if not changed_modules: return self.previous_messages # Reset find_module's caches for the new build. self.manager.find_module_cache.clear() self.triggered = [] self.updated_modules = [] changed_modules = dedupe_modules(changed_modules + self.stale) initial_set = {id for id, _ in changed_modules} self.manager.log_fine_grained('==== update %s ====' % ', '.join( repr(id) for id, _ in changed_modules)) if self.previous_targets_with_errors and is_verbose(self.manager): self.manager.log_fine_grained('previous targets with errors: %s' % sorted(self.previous_targets_with_errors)) if self.blocking_error: # Handle blocking errors first. We'll exit as soon as we find a # module that still has blocking errors. self.manager.log_fine_grained('existing blocker: %s' % self.blocking_error[0]) changed_modules = dedupe_modules([self.blocking_error] + changed_modules) self.blocking_error = None while True: result = self.update_one(changed_modules, initial_set, removed_set) changed_modules, (next_id, next_path), blocker_messages = result if blocker_messages is not None: self.blocking_error = (next_id, next_path) self.stale = changed_modules messages = blocker_messages break # It looks like we are done processing everything, so now # reprocess all targets with errors. We are careful to # support the possibility that reprocessing an errored module # might trigger loading of a module, but I am not sure # if this can really happen. if not changed_modules: # N.B: We just checked next_id, so manager.errors contains # the errors from it. Thus we consider next_id up to date # when propagating changes from the errored targets, # which prevents us from reprocessing errors in it. changed_modules = propagate_changes_using_dependencies( self.manager, self.graph, self.deps, set(), {next_id}, self.previous_targets_with_errors, self.processed_targets) changed_modules = dedupe_modules(changed_modules) if not changed_modules: # Preserve state needed for the next update. self.previous_targets_with_errors = self.manager.errors.targets() messages = self.manager.errors.new_messages() break self.previous_messages = messages[:] return messages def trigger(self, target: str) -> List[str]: """Trigger a specific target explicitly. This is intended for use by the suggestions engine. """ self.manager.errors.reset() changed_modules = propagate_changes_using_dependencies( self.manager, self.graph, self.deps, set(), set(), self.previous_targets_with_errors | {target}, []) # Preserve state needed for the next update. self.previous_targets_with_errors = self.manager.errors.targets() self.previous_messages = self.manager.errors.new_messages()[:] return self.update(changed_modules, []) def update_one(self, changed_modules: List[Tuple[str, str]], initial_set: Set[str], removed_set: Set[str]) -> Tuple[List[Tuple[str, str]], Tuple[str, str], Optional[List[str]]]: """Process a module from the list of changed modules. Returns: Tuple with these items: - Updated list of pending changed modules as (module id, path) tuples - Module which was actually processed as (id, path) tuple - If there was a blocking error, the error messages from it """ t0 = time.time() next_id, next_path = changed_modules.pop(0) if next_id not in self.previous_modules and next_id not in initial_set: self.manager.log_fine_grained('skip %r (module not in import graph)' % next_id) return changed_modules, (next_id, next_path), None result = self.update_module(next_id, next_path, next_id in removed_set) remaining, (next_id, next_path), blocker_messages = result changed_modules = [(id, path) for id, path in changed_modules if id != next_id] changed_modules = dedupe_modules(remaining + changed_modules) t1 = time.time() self.manager.log_fine_grained( "update once: {} in {:.3f}s - {} left".format( next_id, t1 - t0, len(changed_modules))) return changed_modules, (next_id, next_path), blocker_messages def update_module(self, module: str, path: str, force_removed: bool) -> Tuple[List[Tuple[str, str]], Tuple[str, str], Optional[List[str]]]: """Update a single modified module. If the module contains imports of previously unseen modules, only process one of the new modules and return the remaining work to be done. Args: module: Id of the module path: File system path of the module force_removed: If True, consider module removed from the build even if path exists (used for removing an existing file from the build) Returns: Tuple with these items: - Remaining modules to process as (module id, path) tuples - Module which was actually processed as (id, path) tuple - If there was a blocking error, the error messages from it """ self.manager.log_fine_grained('--- update single %r ---' % module) self.updated_modules.append(module) manager = self.manager previous_modules = self.previous_modules graph = self.graph ensure_deps_loaded(module, self.deps, graph) # If this is an already existing module, make sure that we have # its tree loaded so that we can snapshot it for comparison. ensure_trees_loaded(manager, graph, [module]) t0 = time.time() # Record symbol table snapshot of old version the changed module. old_snapshots = {} # type: Dict[str, Dict[str, SnapshotItem]] if module in manager.modules: snapshot = snapshot_symbol_table(module, manager.modules[module].names) old_snapshots[module] = snapshot manager.errors.reset() self.processed_targets.append(module) result = update_module_isolated(module, path, manager, previous_modules, graph, force_removed) if isinstance(result, BlockedUpdate): # Blocking error -- just give up module, path, remaining, errors = result self.previous_modules = get_module_to_path_map(graph) return remaining, (module, path), errors assert isinstance(result, NormalUpdate) # Work around #4124 module, path, remaining, tree = result # TODO: What to do with stale dependencies? t1 = time.time() triggered = calculate_active_triggers(manager, old_snapshots, {module: tree}) if is_verbose(self.manager): filtered = [trigger for trigger in triggered if not trigger.endswith('__>')] self.manager.log_fine_grained('triggered: %r' % sorted(filtered)) self.triggered.extend(triggered | self.previous_targets_with_errors) if module in graph: graph[module].update_fine_grained_deps(self.deps) graph[module].free_state() remaining += propagate_changes_using_dependencies( manager, graph, self.deps, triggered, {module}, targets_with_errors=set(), processed_targets=self.processed_targets) t2 = time.time() manager.add_stats( update_isolated_time=t1 - t0, propagate_time=t2 - t1) # Preserve state needed for the next update. self.previous_targets_with_errors.update(manager.errors.targets()) self.previous_modules = get_module_to_path_map(graph) return remaining, (module, path), None def find_unloaded_deps(manager: BuildManager, graph: Dict[str, State], initial: Sequence[str]) -> List[str]: """Find all the deps of the nodes in initial that haven't had their tree loaded. The key invariant here is that if a module is loaded, so are all of their dependencies. This means that when we encounter a loaded module, we don't need to explore its dependencies. (This invariant is slightly violated when dependencies are added, which can be handled by calling find_unloaded_deps directly on the new dependencies.) """ worklist = list(initial) seen = set() # type: Set[str] unloaded = [] while worklist: node = worklist.pop() if node in seen or node not in graph: continue seen.add(node) if node not in manager.modules: ancestors = graph[node].ancestors or [] worklist.extend(graph[node].dependencies + ancestors) unloaded.append(node) return unloaded def ensure_deps_loaded(module: str, deps: Dict[str, Set[str]], graph: Dict[str, State]) -> None: """Ensure that the dependencies on a module are loaded. Dependencies are loaded into the 'deps' dictionary. This also requires loading dependencies from any parent modules, since dependencies will get stored with parent modules when a module doesn't exist. """ if module in graph and graph[module].fine_grained_deps_loaded: return parts = module.split('.') for i in range(len(parts)): base = '.'.join(parts[:i + 1]) if base in graph and not graph[base].fine_grained_deps_loaded: merge_dependencies(graph[base].load_fine_grained_deps(), deps) graph[base].fine_grained_deps_loaded = True def ensure_trees_loaded(manager: BuildManager, graph: Dict[str, State], initial: Sequence[str]) -> None: """Ensure that the modules in initial and their deps have loaded trees.""" to_process = find_unloaded_deps(manager, graph, initial) if to_process: if is_verbose(manager): manager.log_fine_grained("Calling process_fresh_modules on set of size {} ({})".format( len(to_process), sorted(to_process))) process_fresh_modules(graph, to_process, manager) def fix_fg_dependencies(manager: BuildManager, deps: Dict[str, Set[str]]) -> None: """Populate the dependencies with stuff that build may have missed""" # This means the root module and typestate merge_dependencies(manager.load_fine_grained_deps(FAKE_ROOT_MODULE), deps) # TypeState.add_all_protocol_deps(deps) # The result of update_module_isolated when no blockers, with these items: # # - Id of the changed module (can be different from the module argument) # - Path of the changed module # - New AST for the changed module (None if module was deleted) # - Remaining changed modules that are not processed yet as (module id, path) # tuples (non-empty if the original changed module imported other new # modules) NormalUpdate = NamedTuple('NormalUpdate', [('module', str), ('path', str), ('remaining', List[Tuple[str, str]]), ('tree', Optional[MypyFile])]) # The result of update_module_isolated when there is a blocking error. Items # are similar to NormalUpdate (but there are fewer). BlockedUpdate = NamedTuple('BlockedUpdate', [('module', str), ('path', str), ('remaining', List[Tuple[str, str]]), ('messages', List[str])]) UpdateResult = Union[NormalUpdate, BlockedUpdate] def update_module_isolated(module: str, path: str, manager: BuildManager, previous_modules: Dict[str, str], graph: Graph, force_removed: bool) -> UpdateResult: """Build a new version of one changed module only. Don't propagate changes to elsewhere in the program. Raise CompileError on encountering a blocking error. Args: module: Changed module (modified, created or deleted) path: Path of the changed module manager: Build manager graph: Build graph force_removed: If True, consider the module removed from the build even it the file exists Returns a named tuple describing the result (see above for details). """ if module not in graph: manager.log_fine_grained('new module %r' % module) if not manager.fscache.isfile(path) or force_removed: delete_module(module, path, graph, manager) return NormalUpdate(module, path, [], None) sources = get_sources(manager.fscache, previous_modules, [(module, path)]) if module in manager.missing_modules: manager.missing_modules.remove(module) orig_module = module orig_state = graph.get(module) orig_tree = manager.modules.get(module) def restore(ids: List[str]) -> None: # For each of the modules in ids, restore that id's old # manager.modules and graphs entries. (Except for the original # module, this means deleting them.) for id in ids: if id == orig_module and orig_tree: manager.modules[id] = orig_tree elif id in manager.modules: del manager.modules[id] if id == orig_module and orig_state: graph[id] = orig_state elif id in graph: del graph[id] new_modules = [] # type: List[State] try: if module in graph: del graph[module] load_graph(sources, manager, graph, new_modules) except CompileError as err: # Parse error somewhere in the program -- a blocker assert err.module_with_blocker restore([module] + [st.id for st in new_modules]) return BlockedUpdate(err.module_with_blocker, path, [], err.messages) # Reparsing the file may have brought in dependencies that we # didn't have before. Make sure that they are loaded to restore # the invariant that a module having a loaded tree implies that # its dependencies do as well. ensure_trees_loaded(manager, graph, graph[module].dependencies) # Find any other modules brought in by imports. changed_modules = [(st.id, st.xpath) for st in new_modules] # If there are multiple modules to process, only process one of them and return # the remaining ones to the caller. if len(changed_modules) > 1: # As an optimization, look for a module that imports no other changed modules. module, path = find_relative_leaf_module(changed_modules, graph) changed_modules.remove((module, path)) remaining_modules = changed_modules # The remaining modules haven't been processed yet so drop them. restore([id for id, _ in remaining_modules]) manager.log_fine_grained('--> %r (newly imported)' % module) else: remaining_modules = [] state = graph[module] # Process the changed file. state.parse_file() assert state.tree is not None, "file must be at least parsed" t0 = time.time() # TODO: state.fix_suppressed_dependencies()? try: semantic_analysis_for_scc(graph, [state.id], manager.errors) except CompileError as err: # There was a blocking error, so module AST is incomplete. Restore old modules. restore([module]) return BlockedUpdate(module, path, remaining_modules, err.messages) # Merge old and new ASTs. new_modules_dict = {module: state.tree} # type: Dict[str, Optional[MypyFile]] replace_modules_with_new_variants(manager, graph, {orig_module: orig_tree}, new_modules_dict) t1 = time.time() # Perform type checking. state.type_checker().reset() state.type_check_first_pass() state.type_check_second_pass() t2 = time.time() state.finish_passes() t3 = time.time() manager.add_stats( semanal_time=t1 - t0, typecheck_time=t2 - t1, finish_passes_time=t3 - t2) graph[module] = state return NormalUpdate(module, path, remaining_modules, state.tree) def find_relative_leaf_module(modules: List[Tuple[str, str]], graph: Graph) -> Tuple[str, str]: """Find a module in a list that directly imports no other module in the list. If no such module exists, return the lexicographically first module from the list. Always return one of the items in the modules list. NOTE: If both 'abc' and 'typing' have changed, an effect of the above rule is that we prefer 'abc', even if both are in the same SCC. This works around a false positive in 'typing', at least in tests. Args: modules: List of (module, path) tuples (non-empty) graph: Program import graph that contains all modules in the module list """ assert modules # Sort for repeatable results. modules = sorted(modules) module_set = {module for module, _ in modules} for module, path in modules: state = graph[module] if len(set(state.dependencies) & module_set) == 0: # Found it! return module, path # Could not find any. Just return the first module (by lexicographic order). return modules[0] def delete_module(module_id: str, path: str, graph: Graph, manager: BuildManager) -> None: manager.log_fine_grained('delete module %r' % module_id) # TODO: Remove deps for the module (this only affects memory use, not correctness) if module_id in graph: del graph[module_id] if module_id in manager.modules: del manager.modules[module_id] components = module_id.split('.') if len(components) > 1: # Delete reference to module in parent module. parent_id = '.'.join(components[:-1]) # If parent module is ignored, it won't be included in the modules dictionary. if parent_id in manager.modules: parent = manager.modules[parent_id] if components[-1] in parent.names: del parent.names[components[-1]] # If the module is removed from the build but still exists, then # we mark it as missing so that it will get picked up by import from still. if manager.fscache.isfile(path): manager.missing_modules.add(module_id) def dedupe_modules(modules: List[Tuple[str, str]]) -> List[Tuple[str, str]]: seen = set() # type: Set[str] result = [] for id, path in modules: if id not in seen: seen.add(id) result.append((id, path)) return result def get_module_to_path_map(graph: Graph) -> Dict[str, str]: return {module: node.xpath for module, node in graph.items()} def get_sources(fscache: FileSystemCache, modules: Dict[str, str], changed_modules: List[Tuple[str, str]]) -> List[BuildSource]: sources = [] for id, path in changed_modules: if fscache.isfile(path): sources.append(BuildSource(path, id, None)) return sources def calculate_active_triggers(manager: BuildManager, old_snapshots: Dict[str, Dict[str, SnapshotItem]], new_modules: Dict[str, Optional[MypyFile]]) -> Set[str]: """Determine activated triggers by comparing old and new symbol tables. For example, if only the signature of function m.f is different in the new symbol table, return {''}. """ names = set() # type: Set[str] for id in new_modules: snapshot1 = old_snapshots.get(id) if snapshot1 is None: names.add(id) snapshot1 = {} new = new_modules[id] if new is None: snapshot2 = snapshot_symbol_table(id, SymbolTable()) names.add(id) else: snapshot2 = snapshot_symbol_table(id, new.names) diff = compare_symbol_table_snapshots(id, snapshot1, snapshot2) package_nesting_level = id.count('.') for item in diff.copy(): if (item.count('.') <= package_nesting_level + 1 and item.split('.')[-1] not in ('__builtins__', '__file__', '__name__', '__package__', '__doc__')): # Activate catch-all wildcard trigger for top-level module changes (used for # "from m import *"). This also gets triggered by changes to module-private # entries, but as these unneeded dependencies only result in extra processing, # it's a minor problem. # # TODO: Some __* names cause mistriggers. Fix the underlying issue instead of # special casing them here. diff.add(id + WILDCARD_TAG) if item.count('.') > package_nesting_level + 1: # These are for changes within classes, used by protocols. diff.add(item.rsplit('.', 1)[0] + WILDCARD_TAG) names |= diff return {make_trigger(name) for name in names} def replace_modules_with_new_variants( manager: BuildManager, graph: Dict[str, State], old_modules: Dict[str, Optional[MypyFile]], new_modules: Dict[str, Optional[MypyFile]]) -> None: """Replace modules with newly builds versions. Retain the identities of externally visible AST nodes in the old ASTs so that references to the affected modules from other modules will still be valid (unless something was deleted or replaced with an incompatible definition, in which case there will be dangling references that will be handled by propagate_changes_using_dependencies). """ for id in new_modules: preserved_module = old_modules.get(id) new_module = new_modules[id] if preserved_module and new_module is not None: merge_asts(preserved_module, preserved_module.names, new_module, new_module.names) manager.modules[id] = preserved_module graph[id].tree = preserved_module def propagate_changes_using_dependencies( manager: BuildManager, graph: Dict[str, State], deps: Dict[str, Set[str]], triggered: Set[str], up_to_date_modules: Set[str], targets_with_errors: Set[str], processed_targets: List[str]) -> List[Tuple[str, str]]: """Transitively rechecks targets based on triggers and the dependency map. Returns a list (module id, path) tuples representing modules that contain a target that needs to be reprocessed but that has not been parsed yet. Processed targets should be appended to processed_targets (used in tests only, to test the order of processing targets). """ num_iter = 0 remaining_modules = [] # type: List[Tuple[str, str]] # Propagate changes until nothing visible has changed during the last # iteration. while triggered or targets_with_errors: num_iter += 1 if num_iter > MAX_ITER: raise RuntimeError('Max number of iterations (%d) reached (endless loop?)' % MAX_ITER) todo, unloaded, stale_protos = find_targets_recursive(manager, graph, triggered, deps, up_to_date_modules) # TODO: we sort to make it deterministic, but this is *incredibly* ad hoc remaining_modules.extend((id, graph[id].xpath) for id in sorted(unloaded)) # Also process targets that used to have errors, as otherwise some # errors might be lost. for target in targets_with_errors: id = module_prefix(graph, target) if id is not None and id not in up_to_date_modules: if id not in todo: todo[id] = set() manager.log_fine_grained('process target with error: %s' % target) more_nodes, _ = lookup_target(manager, target) todo[id].update(more_nodes) triggered = set() # First invalidate subtype caches in all stale protocols. # We need to do this to avoid false negatives if the protocol itself is # unchanged, but was marked stale because its sub- (or super-) type changed. for info in stale_protos: TypeState.reset_subtype_caches_for(info) # Then fully reprocess all targets. # TODO: Preserve order (set is not optimal) for id, nodes in sorted(todo.items(), key=lambda x: x[0]): assert id not in up_to_date_modules triggered |= reprocess_nodes(manager, graph, id, nodes, deps, processed_targets) # Changes elsewhere may require us to reprocess modules that were # previously considered up to date. For example, there may be a # dependency loop that loops back to an originally processed module. up_to_date_modules = set() targets_with_errors = set() if is_verbose(manager): manager.log_fine_grained('triggered: %r' % list(triggered)) return remaining_modules def find_targets_recursive( manager: BuildManager, graph: Graph, triggers: Set[str], deps: Dict[str, Set[str]], up_to_date_modules: Set[str]) -> Tuple[Dict[str, Set[FineGrainedDeferredNode]], Set[str], Set[TypeInfo]]: """Find names of all targets that need to reprocessed, given some triggers. Returns: A tuple containing a: * Dictionary from module id to a set of stale targets. * A set of module ids for unparsed modules with stale targets. """ result = {} # type: Dict[str, Set[FineGrainedDeferredNode]] worklist = triggers processed = set() # type: Set[str] stale_protos = set() # type: Set[TypeInfo] unloaded_files = set() # type: Set[str] # Find AST nodes corresponding to each target. # # TODO: Don't rely on a set, since the items are in an unpredictable order. while worklist: processed |= worklist current = worklist worklist = set() for target in current: if target.startswith('<'): module_id = module_prefix(graph, trigger_to_target(target)) if module_id: ensure_deps_loaded(module_id, deps, graph) worklist |= deps.get(target, set()) - processed else: module_id = module_prefix(graph, target) if module_id is None: # Deleted module. continue if module_id in up_to_date_modules: # Already processed. continue if (module_id not in manager.modules or manager.modules[module_id].is_cache_skeleton): # We haven't actually parsed and checked the module, so we don't have # access to the actual nodes. # Add it to the queue of files that need to be processed fully. unloaded_files.add(module_id) continue if module_id not in result: result[module_id] = set() manager.log_fine_grained('process: %s' % target) deferred, stale_proto = lookup_target(manager, target) if stale_proto: stale_protos.add(stale_proto) result[module_id].update(deferred) return result, unloaded_files, stale_protos def reprocess_nodes(manager: BuildManager, graph: Dict[str, State], module_id: str, nodeset: Set[FineGrainedDeferredNode], deps: Dict[str, Set[str]], processed_targets: List[str]) -> Set[str]: """Reprocess a set of nodes within a single module. Return fired triggers. """ if module_id not in graph: manager.log_fine_grained('%s not in graph (blocking errors or deleted?)' % module_id) return set() file_node = manager.modules[module_id] old_symbols = find_symbol_tables_recursive(file_node.fullname, file_node.names) old_symbols = {name: names.copy() for name, names in old_symbols.items()} old_symbols_snapshot = snapshot_symbol_table(file_node.fullname, file_node.names) def key(node: FineGrainedDeferredNode) -> int: # Unlike modules which are sorted by name within SCC, # nodes within the same module are sorted by line number, because # this is how they are processed in normal mode. return node.node.line nodes = sorted(nodeset, key=key) options = graph[module_id].options manager.errors.set_file_ignored_lines( file_node.path, file_node.ignored_lines, options.ignore_errors) targets = set() for node in nodes: target = target_from_node(module_id, node.node) if target is not None: targets.add(target) manager.errors.clear_errors_in_targets(file_node.path, targets) # If one of the nodes is the module itself, emit any errors that # happened before semantic analysis. for target in targets: if target == module_id: for info in graph[module_id].early_errors: manager.errors.add_error_info(info) # Strip semantic analysis information. saved_attrs = {} # type: SavedAttributes for deferred in nodes: processed_targets.append(deferred.node.fullname) strip_target(deferred.node, saved_attrs) semantic_analysis_for_targets(graph[module_id], nodes, graph, saved_attrs) # Merge symbol tables to preserve identities of AST nodes. The file node will remain # the same, but other nodes may have been recreated with different identities, such as # NamedTuples defined using assignment statements. new_symbols = find_symbol_tables_recursive(file_node.fullname, file_node.names) for name in old_symbols: if name in new_symbols: merge_asts(file_node, old_symbols[name], file_node, new_symbols[name]) # Type check. checker = graph[module_id].type_checker() checker.reset() # We seem to need additional passes in fine-grained incremental mode. checker.pass_num = 0 checker.last_pass = 3 more = checker.check_second_pass(nodes) while more: more = False if graph[module_id].type_checker().check_second_pass(): more = True if manager.options.export_types: manager.all_types.update(graph[module_id].type_map()) new_symbols_snapshot = snapshot_symbol_table(file_node.fullname, file_node.names) # Check if any attribute types were changed and need to be propagated further. changed = compare_symbol_table_snapshots(file_node.fullname, old_symbols_snapshot, new_symbols_snapshot) new_triggered = {make_trigger(name) for name in changed} # Dependencies may have changed. update_deps(module_id, nodes, graph, deps, options) # Report missing imports. graph[module_id].verify_dependencies() graph[module_id].free_state() return new_triggered def find_symbol_tables_recursive(prefix: str, symbols: SymbolTable) -> Dict[str, SymbolTable]: """Find all nested symbol tables. Args: prefix: Full name prefix (used for return value keys and to filter result so that cross references to other modules aren't included) symbols: Root symbol table Returns a dictionary from full name to corresponding symbol table. """ result = {} result[prefix] = symbols for name, node in symbols.items(): if isinstance(node.node, TypeInfo) and node.node.fullname.startswith(prefix + '.'): more = find_symbol_tables_recursive(prefix + '.' + name, node.node.names) result.update(more) return result def update_deps(module_id: str, nodes: List[FineGrainedDeferredNode], graph: Dict[str, State], deps: Dict[str, Set[str]], options: Options) -> None: for deferred in nodes: node = deferred.node type_map = graph[module_id].type_map() tree = graph[module_id].tree assert tree is not None, "Tree must be processed at this stage" new_deps = get_dependencies_of_target(module_id, tree, node, type_map, options.python_version) for trigger, targets in new_deps.items(): deps.setdefault(trigger, set()).update(targets) # Merge also the newly added protocol deps (if any). TypeState.update_protocol_deps(deps) def lookup_target(manager: BuildManager, target: str) -> Tuple[List[FineGrainedDeferredNode], Optional[TypeInfo]]: """Look up a target by fully-qualified name. The first item in the return tuple is a list of deferred nodes that needs to be reprocessed. If the target represents a TypeInfo corresponding to a protocol, return it as a second item in the return tuple, otherwise None. """ def not_found() -> None: manager.log_fine_grained( "Can't find matching target for %s (stale dependency?)" % target) modules = manager.modules items = split_target(modules, target) if items is None: not_found() # Stale dependency return [], None module, rest = items if rest: components = rest.split('.') else: components = [] node = modules[module] # type: Optional[SymbolNode] file = None # type: Optional[MypyFile] active_class = None for c in components: if isinstance(node, TypeInfo): active_class = node if isinstance(node, MypyFile): file = node if (not isinstance(node, (MypyFile, TypeInfo)) or c not in node.names): not_found() # Stale dependency return [], None # Don't reprocess plugin generated targets. They should get # stripped and regenerated when the containing target is # reprocessed. if node.names[c].plugin_generated: return [], None node = node.names[c].node if isinstance(node, TypeInfo): # A ClassDef target covers the body of the class and everything defined # within it. To get the body we include the entire surrounding target, # typically a module top-level, since we don't support processing class # bodies as separate entitites for simplicity. assert file is not None if node.fullname != target: # This is a reference to a different TypeInfo, likely due to a stale dependency. # Processing them would spell trouble -- for example, we could be refreshing # a deserialized TypeInfo with missing attributes. not_found() return [], None result = [FineGrainedDeferredNode(file, None)] stale_info = None # type: Optional[TypeInfo] if node.is_protocol: stale_info = node for name, symnode in node.names.items(): node = symnode.node if isinstance(node, FuncDef): method, _ = lookup_target(manager, target + '.' + name) result.extend(method) return result, stale_info if isinstance(node, Decorator): # Decorator targets actually refer to the function definition only. node = node.func if not isinstance(node, (FuncDef, MypyFile, OverloadedFuncDef)): # The target can't be refreshed. It's possible that the target was # changed to another type and we have a stale dependency pointing to it. not_found() return [], None if node.fullname != target: # Stale reference points to something unexpected. We shouldn't process since the # context will be wrong and it could be a partially initialized deserialized node. not_found() return [], None return [FineGrainedDeferredNode(node, active_class)], None def is_verbose(manager: BuildManager) -> bool: return manager.options.verbosity >= 1 or DEBUG_FINE_GRAINED def target_from_node(module: str, node: Union[FuncDef, MypyFile, OverloadedFuncDef] ) -> Optional[str]: """Return the target name corresponding to a deferred node. Args: module: Must be module id of the module that defines 'node' Returns the target name, or None if the node is not a valid target in the given module (for example, if it's actually defined in another module). """ if isinstance(node, MypyFile): if module != node.fullname: # Actually a reference to another module -- likely a stale dependency. return None return module else: # OverloadedFuncDef or FuncDef if node.info: return '%s.%s' % (node.info.fullname, node.name) else: return '%s.%s' % (module, node.name) mypy-0.761/mypy/sharedparse.py0000644€tŠÔÚ€2›s®0000000404213576752246022571 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from typing_extensions import Final """Shared logic between our three mypy parser files.""" _NON_BINARY_MAGIC_METHODS = { "__abs__", "__call__", "__complex__", "__contains__", "__del__", "__delattr__", "__delitem__", "__enter__", "__exit__", "__float__", "__getattr__", "__getattribute__", "__getitem__", "__hex__", "__init__", "__init_subclass__", "__int__", "__invert__", "__iter__", "__len__", "__long__", "__neg__", "__new__", "__nonzero__", "__oct__", "__pos__", "__repr__", "__reversed__", "__setattr__", "__setitem__", "__str__", "__unicode__", } # type: Final MAGIC_METHODS_ALLOWING_KWARGS = { "__init__", "__init_subclass__", "__new__", "__call__", } # type: Final BINARY_MAGIC_METHODS = { "__add__", "__and__", "__cmp__", "__divmod__", "__div__", "__eq__", "__floordiv__", "__ge__", "__gt__", "__iadd__", "__iand__", "__idiv__", "__ifloordiv__", "__ilshift__", "__imod__", "__imul__", "__ior__", "__ipow__", "__irshift__", "__isub__", "__ixor__", "__le__", "__lshift__", "__lt__", "__mod__", "__mul__", "__ne__", "__or__", "__pow__", "__radd__", "__rand__", "__rdiv__", "__rfloordiv__", "__rlshift__", "__rmod__", "__rmul__", "__ror__", "__rpow__", "__rrshift__", "__rshift__", "__rsub__", "__rxor__", "__sub__", "__xor__", } # type: Final assert not (_NON_BINARY_MAGIC_METHODS & BINARY_MAGIC_METHODS) MAGIC_METHODS = _NON_BINARY_MAGIC_METHODS | BINARY_MAGIC_METHODS # type: Final MAGIC_METHODS_POS_ARGS_ONLY = MAGIC_METHODS - MAGIC_METHODS_ALLOWING_KWARGS # type: Final def special_function_elide_names(name: str) -> bool: return name in MAGIC_METHODS_POS_ARGS_ONLY def argument_elide_name(name: Optional[str]) -> bool: return name is not None and name.startswith("__") and not name.endswith("__") mypy-0.761/mypy/sitepkgs.py0000644€tŠÔÚ€2›s®0000000173313576752246022125 0ustar jukkaDROPBOX\Domain Users00000000000000from __future__ import print_function """This file is used to find the site packages of a Python executable, which may be Python 2. This file MUST remain compatible with Python 2. Since we cannot make any assumptions about the Python being executed, this module should not use *any* dependencies outside of the standard library found in Python 2. This file is run each mypy run, so it should be kept as fast as possible. """ if __name__ == '__main__': import sys sys.path = sys.path[1:] # we don't want to pick up mypy.types from distutils.sysconfig import get_python_lib import site MYPY = False if MYPY: from typing import List def getsitepackages(): # type: () -> List[str] if hasattr(site, 'getusersitepackages') and hasattr(site, 'getsitepackages'): user_dir = site.getusersitepackages() return site.getsitepackages() + [user_dir] else: return [get_python_lib()] if __name__ == '__main__': print(repr(getsitepackages())) mypy-0.761/mypy/solve.py0000644€tŠÔÚ€2›s®0000000547413576752246021432 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type inference constraint solving""" from typing import List, Dict, Optional from collections import defaultdict from mypy.types import Type, AnyType, UninhabitedType, TypeVarId, TypeOfAny, get_proper_type from mypy.constraints import Constraint, SUPERTYPE_OF from mypy.join import join_types from mypy.meet import meet_types from mypy.subtypes import is_subtype def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint], strict: bool = True) -> List[Optional[Type]]: """Solve type constraints. Return the best type(s) for type variables; each type can be None if the value of the variable could not be solved. If a variable has no constraints, if strict=True then arbitrarily pick NoneType as the value of the type variable. If strict=False, pick AnyType. """ # Collect a list of constraints for each type variable. cmap = defaultdict(list) # type: Dict[TypeVarId, List[Constraint]] for con in constraints: cmap[con.type_var].append(con) res = [] # type: List[Optional[Type]] # Solve each type variable separately. for tvar in vars: bottom = None # type: Optional[Type] top = None # type: Optional[Type] candidate = None # type: Optional[Type] # Process each constraint separately, and calculate the lower and upper # bounds based on constraints. Note that we assume that the constraint # targets do not have constraint references. for c in cmap.get(tvar, []): if c.op == SUPERTYPE_OF: if bottom is None: bottom = c.target else: bottom = join_types(bottom, c.target) else: if top is None: top = c.target else: top = meet_types(top, c.target) top = get_proper_type(top) bottom = get_proper_type(bottom) if isinstance(top, AnyType) or isinstance(bottom, AnyType): source_any = top if isinstance(top, AnyType) else bottom assert isinstance(source_any, AnyType) res.append(AnyType(TypeOfAny.from_another_any, source_any=source_any)) continue elif bottom is None: if top: candidate = top else: # No constraints for type variable -- 'UninhabitedType' is the most specific type. if strict: candidate = UninhabitedType() candidate.ambiguous = True else: candidate = AnyType(TypeOfAny.special_form) elif top is None: candidate = bottom elif is_subtype(bottom, top): candidate = bottom else: candidate = None res.append(candidate) return res mypy-0.761/mypy/split_namespace.py0000644€tŠÔÚ€2›s®0000000235313576752246023442 0ustar jukkaDROPBOX\Domain Users00000000000000"""Split namespace for argparse to allow separating options by prefix. We use this to direct some options to an Options object and some to a regular namespace. """ # In its own file largely because mypyc doesn't support its use of # __getattr__/__setattr__ and has some issues with __dict__ import argparse from typing import Tuple, Any class SplitNamespace(argparse.Namespace): def __init__(self, standard_namespace: object, alt_namespace: object, alt_prefix: str) -> None: self.__dict__['_standard_namespace'] = standard_namespace self.__dict__['_alt_namespace'] = alt_namespace self.__dict__['_alt_prefix'] = alt_prefix def _get(self) -> Tuple[Any, Any]: return (self._standard_namespace, self._alt_namespace) def __setattr__(self, name: str, value: Any) -> None: if name.startswith(self._alt_prefix): setattr(self._alt_namespace, name[len(self._alt_prefix):], value) else: setattr(self._standard_namespace, name, value) def __getattr__(self, name: str) -> Any: if name.startswith(self._alt_prefix): return getattr(self._alt_namespace, name[len(self._alt_prefix):]) else: return getattr(self._standard_namespace, name) mypy-0.761/mypy/state.py0000644€tŠÔÚ€2›s®0000000076113576752246021414 0ustar jukkaDROPBOX\Domain Users00000000000000from contextlib import contextmanager from typing import Optional, Tuple, Iterator # These are global mutable state. Don't add anything here unless there's a very # good reason. # Value varies by file being processed strict_optional = False find_occurrences = None # type: Optional[Tuple[str, str]] @contextmanager def strict_optional_set(value: bool) -> Iterator[None]: global strict_optional saved = strict_optional strict_optional = value yield strict_optional = saved mypy-0.761/mypy/stats.py0000644€tŠÔÚ€2›s®0000004145613576752246021440 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for calculating and reporting statistics about types.""" import os from collections import Counter from contextlib import contextmanager import typing from typing import Dict, List, cast, Optional, Union, Iterator from typing_extensions import Final from mypy.traverser import TraverserVisitor from mypy.typeanal import collect_all_inner_types from mypy.types import ( Type, AnyType, Instance, FunctionLike, TupleType, TypeVarType, TypeQuery, CallableType, TypeOfAny, get_proper_type, get_proper_types ) from mypy import nodes from mypy.nodes import ( Expression, FuncDef, TypeApplication, AssignmentStmt, NameExpr, CallExpr, MypyFile, MemberExpr, OpExpr, ComparisonExpr, IndexExpr, UnaryExpr, YieldFromExpr, RefExpr, ClassDef, AssignmentExpr, ImportFrom, Import, ImportAll, PassStmt, BreakStmt, ContinueStmt, StrExpr, BytesExpr, UnicodeExpr, IntExpr, FloatExpr, ComplexExpr, EllipsisExpr, ExpressionStmt, Node ) from mypy.util import correct_relative_import from mypy.argmap import map_formals_to_actuals TYPE_EMPTY = 0 # type: Final TYPE_UNANALYZED = 1 # type: Final # type of non-typechecked code TYPE_PRECISE = 2 # type: Final TYPE_IMPRECISE = 3 # type: Final TYPE_ANY = 4 # type: Final precision_names = [ 'empty', 'unanalyzed', 'precise', 'imprecise', 'any', ] # type: Final class StatisticsVisitor(TraverserVisitor): def __init__(self, inferred: bool, filename: str, modules: Dict[str, MypyFile], typemap: Optional[Dict[Expression, Type]] = None, all_nodes: bool = False, visit_untyped_defs: bool = True) -> None: self.inferred = inferred self.filename = filename self.modules = modules self.typemap = typemap self.all_nodes = all_nodes self.visit_untyped_defs = visit_untyped_defs self.num_precise_exprs = 0 self.num_imprecise_exprs = 0 self.num_any_exprs = 0 self.num_simple_types = 0 self.num_generic_types = 0 self.num_tuple_types = 0 self.num_function_types = 0 self.num_typevar_types = 0 self.num_complex_types = 0 self.num_any_types = 0 self.line = -1 self.line_map = {} # type: Dict[int, int] self.type_of_any_counter = Counter() # type: typing.Counter[int] self.any_line_map = {} # type: Dict[int, List[AnyType]] # For each scope (top level/function), whether the scope was type checked # (annotated function). # # TODO: Handle --check-untyped-defs self.checked_scopes = [True] self.output = [] # type: List[str] TraverserVisitor.__init__(self) def visit_mypy_file(self, o: MypyFile) -> None: self.cur_mod_node = o self.cur_mod_id = o.fullname super().visit_mypy_file(o) def visit_import_from(self, imp: ImportFrom) -> None: self.process_import(imp) def visit_import_all(self, imp: ImportAll) -> None: self.process_import(imp) def process_import(self, imp: Union[ImportFrom, ImportAll]) -> None: import_id, ok = correct_relative_import(self.cur_mod_id, imp.relative, imp.id, self.cur_mod_node.is_package_init_file()) if ok and import_id in self.modules: kind = TYPE_PRECISE else: kind = TYPE_ANY self.record_line(imp.line, kind) def visit_import(self, imp: Import) -> None: if all(id in self.modules for id, _ in imp.ids): kind = TYPE_PRECISE else: kind = TYPE_ANY self.record_line(imp.line, kind) def visit_func_def(self, o: FuncDef) -> None: with self.enter_scope(o): self.line = o.line if len(o.expanded) > 1 and o.expanded != [o] * len(o.expanded): if o in o.expanded: print('{}:{}: ERROR: cycle in function expansion; skipping'.format( self.filename, o.get_line())) return for defn in o.expanded: self.visit_func_def(cast(FuncDef, defn)) else: if o.type: sig = cast(CallableType, o.type) arg_types = sig.arg_types if (sig.arg_names and sig.arg_names[0] == 'self' and not self.inferred): arg_types = arg_types[1:] for arg in arg_types: self.type(arg) self.type(sig.ret_type) elif self.all_nodes: self.record_line(self.line, TYPE_ANY) if not o.is_dynamic() or self.visit_untyped_defs: super().visit_func_def(o) @contextmanager def enter_scope(self, o: FuncDef) -> Iterator[None]: self.checked_scopes.append(o.type is not None and self.checked_scopes[-1]) yield None self.checked_scopes.pop() def is_checked_scope(self) -> bool: return self.checked_scopes[-1] def visit_class_def(self, o: ClassDef) -> None: self.record_line(o.line, TYPE_PRECISE) # TODO: Look at base classes # Override this method because we don't want to analyze base_type_exprs (base_type_exprs # are base classes in a class declaration). # While base_type_exprs are technically expressions, type analyzer does not visit them and # they are not in the typemap. for d in o.decorators: d.accept(self) o.defs.accept(self) def visit_type_application(self, o: TypeApplication) -> None: self.line = o.line for t in o.types: self.type(t) super().visit_type_application(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: self.line = o.line if (isinstance(o.rvalue, nodes.CallExpr) and isinstance(o.rvalue.analyzed, nodes.TypeVarExpr)): # Type variable definition -- not a real assignment. return if o.type: self.type(o.type) elif self.inferred and not self.all_nodes: # if self.all_nodes is set, lvalues will be visited later for lvalue in o.lvalues: if isinstance(lvalue, nodes.TupleExpr): items = lvalue.items else: items = [lvalue] for item in items: if isinstance(item, RefExpr) and item.is_inferred_def: if self.typemap is not None: self.type(self.typemap.get(item)) super().visit_assignment_stmt(o) def visit_expression_stmt(self, o: ExpressionStmt) -> None: if isinstance(o.expr, (StrExpr, UnicodeExpr, BytesExpr)): # Docstring self.record_line(o.line, TYPE_EMPTY) else: super().visit_expression_stmt(o) def visit_pass_stmt(self, o: PassStmt) -> None: self.record_precise_if_checked_scope(o) def visit_break_stmt(self, o: BreakStmt) -> None: self.record_precise_if_checked_scope(o) def visit_continue_stmt(self, o: ContinueStmt) -> None: self.record_precise_if_checked_scope(o) def visit_name_expr(self, o: NameExpr) -> None: if o.fullname in ('builtins.None', 'builtins.True', 'builtins.False', 'builtins.Ellipsis'): self.record_precise_if_checked_scope(o) else: self.process_node(o) super().visit_name_expr(o) def visit_yield_from_expr(self, o: YieldFromExpr) -> None: if o.expr: o.expr.accept(self) def visit_call_expr(self, o: CallExpr) -> None: self.process_node(o) if o.analyzed: o.analyzed.accept(self) else: o.callee.accept(self) for a in o.args: a.accept(self) self.record_call_target_precision(o) def record_call_target_precision(self, o: CallExpr) -> None: """Record precision of formal argument types used in a call.""" if not self.typemap or o.callee not in self.typemap: # Type not availabe. return callee_type = get_proper_type(self.typemap[o.callee]) if isinstance(callee_type, CallableType): self.record_callable_target_precision(o, callee_type) else: pass # TODO: Handle overloaded functions, etc. def record_callable_target_precision(self, o: CallExpr, callee: CallableType) -> None: """Record imprecision caused by callee argument types. This only considers arguments passed in a call expression. Arguments with default values that aren't provided in a call arguably don't contribute to typing imprecision at the *call site* (but they contribute at the function definition). """ assert self.typemap typemap = self.typemap actual_to_formal = map_formals_to_actuals( o.arg_kinds, o.arg_names, callee.arg_kinds, callee.arg_names, lambda n: typemap[o.args[n]]) for formals in actual_to_formal: for n in formals: formal = get_proper_type(callee.arg_types[n]) if isinstance(formal, AnyType): self.record_line(o.line, TYPE_ANY) elif is_imprecise(formal): self.record_line(o.line, TYPE_IMPRECISE) def visit_member_expr(self, o: MemberExpr) -> None: self.process_node(o) super().visit_member_expr(o) def visit_op_expr(self, o: OpExpr) -> None: self.process_node(o) super().visit_op_expr(o) def visit_comparison_expr(self, o: ComparisonExpr) -> None: self.process_node(o) super().visit_comparison_expr(o) def visit_index_expr(self, o: IndexExpr) -> None: self.process_node(o) super().visit_index_expr(o) def visit_assignment_expr(self, o: AssignmentExpr) -> None: self.process_node(o) super().visit_assignment_expr(o) def visit_unary_expr(self, o: UnaryExpr) -> None: self.process_node(o) super().visit_unary_expr(o) def visit_str_expr(self, o: StrExpr) -> None: self.record_precise_if_checked_scope(o) def visit_unicode_expr(self, o: UnicodeExpr) -> None: self.record_precise_if_checked_scope(o) def visit_bytes_expr(self, o: BytesExpr) -> None: self.record_precise_if_checked_scope(o) def visit_int_expr(self, o: IntExpr) -> None: self.record_precise_if_checked_scope(o) def visit_float_expr(self, o: FloatExpr) -> None: self.record_precise_if_checked_scope(o) def visit_complex_expr(self, o: ComplexExpr) -> None: self.record_precise_if_checked_scope(o) def visit_ellipsis(self, o: EllipsisExpr) -> None: self.record_precise_if_checked_scope(o) # Helpers def process_node(self, node: Expression) -> None: if self.all_nodes: if self.typemap is not None: self.line = node.line self.type(self.typemap.get(node)) def record_precise_if_checked_scope(self, node: Node) -> None: if isinstance(node, Expression) and self.typemap and node not in self.typemap: kind = TYPE_UNANALYZED elif self.is_checked_scope(): kind = TYPE_PRECISE else: kind = TYPE_ANY self.record_line(node.line, kind) def type(self, t: Optional[Type]) -> None: t = get_proper_type(t) if not t: # If an expression does not have a type, it is often due to dead code. # Don't count these because there can be an unanalyzed value on a line with other # analyzed expressions, which overwrite the TYPE_UNANALYZED. self.record_line(self.line, TYPE_UNANALYZED) return if isinstance(t, AnyType) and is_special_form_any(t): # TODO: What if there is an error in special form definition? self.record_line(self.line, TYPE_PRECISE) return if isinstance(t, AnyType): self.log(' !! Any type around line %d' % self.line) self.num_any_exprs += 1 self.record_line(self.line, TYPE_ANY) elif ((not self.all_nodes and is_imprecise(t)) or (self.all_nodes and is_imprecise2(t))): self.log(' !! Imprecise type around line %d' % self.line) self.num_imprecise_exprs += 1 self.record_line(self.line, TYPE_IMPRECISE) else: self.num_precise_exprs += 1 self.record_line(self.line, TYPE_PRECISE) for typ in get_proper_types(collect_all_inner_types(t)) + [t]: if isinstance(typ, AnyType): typ = get_original_any(typ) if is_special_form_any(typ): continue self.type_of_any_counter[typ.type_of_any] += 1 self.num_any_types += 1 if self.line in self.any_line_map: self.any_line_map[self.line].append(typ) else: self.any_line_map[self.line] = [typ] elif isinstance(typ, Instance): if typ.args: if any(is_complex(arg) for arg in typ.args): self.num_complex_types += 1 else: self.num_generic_types += 1 else: self.num_simple_types += 1 elif isinstance(typ, FunctionLike): self.num_function_types += 1 elif isinstance(typ, TupleType): if any(is_complex(item) for item in typ.items): self.num_complex_types += 1 else: self.num_tuple_types += 1 elif isinstance(typ, TypeVarType): self.num_typevar_types += 1 def log(self, string: str) -> None: self.output.append(string) def record_line(self, line: int, precision: int) -> None: self.line_map[line] = max(precision, self.line_map.get(line, TYPE_EMPTY)) def dump_type_stats(tree: MypyFile, path: str, modules: Dict[str, MypyFile], inferred: bool = False, typemap: Optional[Dict[Expression, Type]] = None) -> None: if is_special_module(path): return print(path) visitor = StatisticsVisitor(inferred, filename=tree.fullname, modules=modules, typemap=typemap) tree.accept(visitor) for line in visitor.output: print(line) print(' ** precision **') print(' precise ', visitor.num_precise_exprs) print(' imprecise', visitor.num_imprecise_exprs) print(' any ', visitor.num_any_exprs) print(' ** kinds **') print(' simple ', visitor.num_simple_types) print(' generic ', visitor.num_generic_types) print(' function ', visitor.num_function_types) print(' tuple ', visitor.num_tuple_types) print(' TypeVar ', visitor.num_typevar_types) print(' complex ', visitor.num_complex_types) print(' any ', visitor.num_any_types) def is_special_module(path: str) -> bool: return os.path.basename(path) in ('abc.pyi', 'typing.pyi', 'builtins.pyi') def is_imprecise(t: Type) -> bool: return t.accept(HasAnyQuery()) class HasAnyQuery(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return not is_special_form_any(t) def is_imprecise2(t: Type) -> bool: return t.accept(HasAnyQuery2()) class HasAnyQuery2(HasAnyQuery): def visit_callable_type(self, t: CallableType) -> bool: # We don't want to flag references to functions with some Any # argument types (etc.) since they generally don't mean trouble. return False def is_generic(t: Type) -> bool: t = get_proper_type(t) return isinstance(t, Instance) and bool(t.args) def is_complex(t: Type) -> bool: t = get_proper_type(t) return is_generic(t) or isinstance(t, (FunctionLike, TupleType, TypeVarType)) def ensure_dir_exists(dir: str) -> None: if not os.path.exists(dir): os.makedirs(dir) def is_special_form_any(t: AnyType) -> bool: return get_original_any(t).type_of_any == TypeOfAny.special_form def get_original_any(t: AnyType) -> AnyType: if t.type_of_any == TypeOfAny.from_another_any: assert t.source_any assert t.source_any.type_of_any != TypeOfAny.from_another_any t = t.source_any return t mypy-0.761/mypy/strconv.py0000644€tŠÔÚ€2›s®0000005146113576752246021775 0ustar jukkaDROPBOX\Domain Users00000000000000"""Conversion of parse tree nodes to strings.""" import re import os from typing import Any, List, Tuple, Optional, Union, Sequence from mypy.util import short_type, IdMapper import mypy.nodes from mypy.visitor import NodeVisitor class StrConv(NodeVisitor[str]): """Visitor for converting a node to a human-readable string. For example, an MypyFile node from program '1' is converted into something like this: MypyFile:1( fnam ExpressionStmt:1( IntExpr(1))) """ def __init__(self, show_ids: bool = False) -> None: self.show_ids = show_ids self.id_mapper = None # type: Optional[IdMapper] if show_ids: self.id_mapper = IdMapper() def get_id(self, o: object) -> Optional[int]: if self.id_mapper: return self.id_mapper.id(o) return None def format_id(self, o: object) -> str: if self.id_mapper: return '<{}>'.format(self.get_id(o)) else: return '' def dump(self, nodes: Sequence[object], obj: 'mypy.nodes.Context') -> str: """Convert a list of items to a multiline pretty-printed string. The tag is produced from the type name of obj and its line number. See mypy.util.dump_tagged for a description of the nodes argument. """ tag = short_type(obj) + ':' + str(obj.get_line()) if self.show_ids: assert self.id_mapper is not None tag += '<{}>'.format(self.get_id(obj)) return dump_tagged(nodes, tag, self) def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]: """Return a list in a format suitable for dump() that represents the arguments and the body of a function. The caller can then decorate the array with information specific to methods, global functions or anonymous functions. """ args = [] # type: List[Union[mypy.nodes.Var, Tuple[str, List[mypy.nodes.Node]]]] extra = [] # type: List[Tuple[str, List[mypy.nodes.Var]]] for arg in o.arguments: kind = arg.kind # type: int if kind in (mypy.nodes.ARG_POS, mypy.nodes.ARG_NAMED): args.append(arg.variable) elif kind in (mypy.nodes.ARG_OPT, mypy.nodes.ARG_NAMED_OPT): assert arg.initializer is not None args.append(('default', [arg.variable, arg.initializer])) elif kind == mypy.nodes.ARG_STAR: extra.append(('VarArg', [arg.variable])) elif kind == mypy.nodes.ARG_STAR2: extra.append(('DictVarArg', [arg.variable])) a = [] # type: List[Any] if args: a.append(('Args', args)) if o.type: a.append(o.type) if o.is_generator: a.append('Generator') a.extend(extra) a.append(o.body) return a # Top-level structures def visit_mypy_file(self, o: 'mypy.nodes.MypyFile') -> str: # Skip implicit definitions. a = [o.defs] # type: List[Any] if o.is_bom: a.insert(0, 'BOM') # Omit path to special file with name "main". This is used to simplify # test case descriptions; the file "main" is used by default in many # test cases. if o.path != 'main': # Insert path. Normalize directory separators to / to unify test # case# output in all platforms. a.insert(0, o.path.replace(os.sep, '/')) if o.ignored_lines: a.append('IgnoredLines(%s)' % ', '.join(str(line) for line in sorted(o.ignored_lines))) return self.dump(a, o) def visit_import(self, o: 'mypy.nodes.Import') -> str: a = [] for id, as_id in o.ids: if as_id is not None: a.append('{} : {}'.format(id, as_id)) else: a.append(id) return 'Import:{}({})'.format(o.line, ', '.join(a)) def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> str: a = [] for name, as_name in o.names: if as_name is not None: a.append('{} : {}'.format(name, as_name)) else: a.append(name) return 'ImportFrom:{}({}, [{}])'.format(o.line, "." * o.relative + o.id, ', '.join(a)) def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> str: return 'ImportAll:{}({})'.format(o.line, "." * o.relative + o.id) # Definitions def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> str: a = self.func_helper(o) a.insert(0, o.name) arg_kinds = {arg.kind for arg in o.arguments} if len(arg_kinds & {mypy.nodes.ARG_NAMED, mypy.nodes.ARG_NAMED_OPT}) > 0: a.insert(1, 'MaxPos({})'.format(o.max_pos)) if o.is_abstract: a.insert(-1, 'Abstract') if o.is_static: a.insert(-1, 'Static') if o.is_class: a.insert(-1, 'Class') if o.is_property: a.insert(-1, 'Property') return self.dump(a, o) def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> str: a = o.items[:] # type: Any if o.type: a.insert(0, o.type) if o.impl: a.insert(0, o.impl) if o.is_static: a.insert(-1, 'Static') if o.is_class: a.insert(-1, 'Class') return self.dump(a, o) def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> str: a = [o.name, o.defs.body] # Display base types unless they are implicitly just builtins.object # (in this case base_type_exprs is empty). if o.base_type_exprs: if o.info and o.info.bases: if (len(o.info.bases) != 1 or o.info.bases[0].type.fullname != 'builtins.object'): a.insert(1, ('BaseType', o.info.bases)) else: a.insert(1, ('BaseTypeExpr', o.base_type_exprs)) if o.type_vars: a.insert(1, ('TypeVars', o.type_vars)) if o.metaclass: a.insert(1, 'Metaclass({})'.format(o.metaclass)) if o.decorators: a.insert(1, ('Decorators', o.decorators)) if o.info and o.info._promote: a.insert(1, 'Promote({})'.format(o.info._promote)) if o.info and o.info.tuple_type: a.insert(1, ('TupleType', [o.info.tuple_type])) if o.info and o.info.fallback_to_any: a.insert(1, 'FallbackToAny') return self.dump(a, o) def visit_var(self, o: 'mypy.nodes.Var') -> str: lst = '' # Add :nil line number tag if no line number is specified to remain # compatible with old test case descriptions that assume this. if o.line < 0: lst = ':nil' return 'Var' + lst + '(' + o.name + ')' def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> str: return self.dump([o.names], o) def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> str: return self.dump([o.names], o) def visit_decorator(self, o: 'mypy.nodes.Decorator') -> str: return self.dump([o.var, o.decorators, o.func], o) # Statements def visit_block(self, o: 'mypy.nodes.Block') -> str: return self.dump(o.body, o) def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> str: return self.dump([o.expr], o) def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> str: a = [] # type: List[Any] if len(o.lvalues) > 1: a = [('Lvalues', o.lvalues)] else: a = [o.lvalues[0]] a.append(o.rvalue) if o.type: a.append(o.type) return self.dump(a, o) def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') -> str: return self.dump([o.op, o.lvalue, o.rvalue], o) def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> str: a = [o.expr, o.body] # type: List[Any] if o.else_body: a.append(('Else', o.else_body.body)) return self.dump(a, o) def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> str: a = [] # type: List[Any] if o.is_async: a.append(('Async', '')) a.append(o.index) if o.index_type: a.append(o.index_type) a.extend([o.expr, o.body]) if o.else_body: a.append(('Else', o.else_body.body)) return self.dump(a, o) def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> str: return self.dump([o.expr], o) def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> str: a = [] # type: List[Any] for i in range(len(o.expr)): a.append(('If', [o.expr[i]])) a.append(('Then', o.body[i].body)) if not o.else_body: return self.dump(a, o) else: return self.dump([a, ('Else', o.else_body.body)], o) def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> str: return self.dump([], o) def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> str: return self.dump([], o) def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> str: return self.dump([], o) def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> str: return self.dump([o.expr, o.from_expr], o) def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> str: if o.msg is not None: return self.dump([o.expr, o.msg], o) else: return self.dump([o.expr], o) def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> str: return self.dump([o.expr], o) def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> str: return self.dump([o.expr], o) def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> str: a = [o.body] # type: List[Any] for i in range(len(o.vars)): a.append(o.types[i]) if o.vars[i]: a.append(o.vars[i]) a.append(o.handlers[i]) if o.else_body: a.append(('Else', o.else_body.body)) if o.finally_body: a.append(('Finally', o.finally_body.body)) return self.dump(a, o) def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> str: a = [] # type: List[Any] if o.is_async: a.append(('Async', '')) for i in range(len(o.expr)): a.append(('Expr', [o.expr[i]])) if o.target[i]: a.append(('Target', [o.target[i]])) if o.unanalyzed_type: a.append(o.unanalyzed_type) return self.dump(a + [o.body], o) def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> str: a = o.args[:] # type: List[Any] if o.target: a.append(('Target', [o.target])) if o.newline: a.append('Newline') return self.dump(a, o) def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> str: return self.dump([o.expr, o.globals, o.locals], o) # Expressions # Simple expressions def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> str: return 'IntExpr({})'.format(o.value) def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> str: return 'StrExpr({})'.format(self.str_repr(o.value)) def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> str: return 'BytesExpr({})'.format(self.str_repr(o.value)) def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> str: return 'UnicodeExpr({})'.format(self.str_repr(o.value)) def str_repr(self, s: str) -> str: s = re.sub(r'\\u[0-9a-fA-F]{4}', lambda m: '\\' + m.group(0), s) return re.sub('[^\\x20-\\x7e]', lambda m: r'\u%.4x' % ord(m.group(0)), s) def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> str: return 'FloatExpr({})'.format(o.value) def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> str: return 'ComplexExpr({})'.format(o.value) def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> str: return 'Ellipsis' def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> str: return self.dump([o.expr], o) def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> str: pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_inferred_def or o.is_special_form, o.node) if isinstance(o.node, mypy.nodes.Var) and o.node.is_final: pretty += ' = {}'.format(o.node.final_value) return short_type(o) + '(' + pretty + ')' def pretty_name(self, name: str, kind: Optional[int], fullname: Optional[str], is_inferred_def: bool, target_node: 'Optional[mypy.nodes.Node]' = None) -> str: n = name if is_inferred_def: n += '*' if target_node: id = self.format_id(target_node) else: id = '' if isinstance(target_node, mypy.nodes.MypyFile) and name == fullname: n += id elif kind == mypy.nodes.GDEF or (fullname != name and fullname is not None): # Append fully qualified name for global references. n += ' [{}{}]'.format(fullname, id) elif kind == mypy.nodes.LDEF: # Add tag to signify a local reference. n += ' [l{}]'.format(id) elif kind == mypy.nodes.MDEF: # Add tag to signify a member reference. n += ' [m{}]'.format(id) else: n += id return n def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> str: pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_inferred_def, o.node) return self.dump([o.expr, pretty], o) def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> str: return self.dump([o.expr], o) def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> str: if o.expr: return self.dump([o.expr.accept(self)], o) else: return self.dump([], o) def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> str: if o.analyzed: return o.analyzed.accept(self) args = [] # type: List[mypy.nodes.Expression] extra = [] # type: List[Union[str, Tuple[str, List[Any]]]] for i, kind in enumerate(o.arg_kinds): if kind in [mypy.nodes.ARG_POS, mypy.nodes.ARG_STAR]: args.append(o.args[i]) if kind == mypy.nodes.ARG_STAR: extra.append('VarArg') elif kind == mypy.nodes.ARG_NAMED: extra.append(('KwArgs', [o.arg_names[i], o.args[i]])) elif kind == mypy.nodes.ARG_STAR2: extra.append(('DictVarArg', [o.args[i]])) else: raise RuntimeError('unknown kind %d' % kind) a = [o.callee, ('Args', args)] # type: List[Any] return self.dump(a + extra, o) def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> str: return self.dump([o.op, o.left, o.right], o) def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> str: return self.dump([o.operators, o.operands], o) def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> str: return self.dump([o.expr, o.type], o) def visit_reveal_expr(self, o: 'mypy.nodes.RevealExpr') -> str: if o.kind == mypy.nodes.REVEAL_TYPE: return self.dump([o.expr], o) else: # REVEAL_LOCALS return self.dump([o.local_nodes], o) def visit_assignment_expr(self, o: 'mypy.nodes.AssignmentExpr') -> str: return self.dump([o.target, o.value], o) def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> str: return self.dump([o.op, o.expr], o) def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> str: return self.dump(o.items, o) def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> str: return self.dump([[k, v] for k, v in o.items], o) def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> str: return self.dump(o.items, o) def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> str: return self.dump(o.items, o) def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> str: if o.analyzed: return o.analyzed.accept(self) return self.dump([o.base, o.index], o) def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> str: return self.dump([o.name, o.call], o) def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> str: return self.dump([o.expr, ('Types', o.types)], o) def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> str: import mypy.types a = [] # type: List[Any] if o.variance == mypy.nodes.COVARIANT: a += ['Variance(COVARIANT)'] if o.variance == mypy.nodes.CONTRAVARIANT: a += ['Variance(CONTRAVARIANT)'] if o.values: a += [('Values', o.values)] if not mypy.types.is_named_instance(o.upper_bound, 'builtins.object'): a += ['UpperBound({})'.format(o.upper_bound)] return self.dump(a, o) def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> str: return 'TypeAliasExpr({})'.format(o.type) def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> str: return 'NamedTupleExpr:{}({}, {})'.format(o.line, o.info.name, o.info.tuple_type) def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> str: return 'EnumCallExpr:{}({}, {})'.format(o.line, o.info.name, o.items) def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> str: return 'TypedDictExpr:{}({})'.format(o.line, o.info.name) def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> str: return 'PromoteExpr:{}({})'.format(o.line, o.type) def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> str: return 'NewTypeExpr:{}({}, {})'.format(o.line, o.name, self.dump([o.old_type], o)) def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> str: a = self.func_helper(o) return self.dump(a, o) def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> str: condlists = o.condlists if any(o.condlists) else None return self.dump([o.left_expr, o.indices, o.sequences, condlists], o) def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> str: return self.dump([o.generator], o) def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> str: return self.dump([o.generator], o) def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> str: condlists = o.condlists if any(o.condlists) else None return self.dump([o.key, o.value, o.indices, o.sequences, condlists], o) def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> str: return self.dump([('Condition', [o.cond]), o.if_expr, o.else_expr], o) def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> str: a = [o.begin_index, o.end_index, o.stride] # type: List[Any] if not a[0]: a[0] = '' if not a[1]: a[1] = '' return self.dump(a, o) def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> str: return self.dump([o.expr], o) def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> str: return self.dump([o.type], o) def dump_tagged(nodes: Sequence[object], tag: Optional[str], str_conv: 'StrConv') -> str: """Convert an array into a pretty-printed multiline string representation. The format is tag( item1.. itemN) Individual items are formatted like this: - arrays are flattened - pairs (str, array) are converted recursively, so that str is the tag - other items are converted to strings and indented """ from mypy.types import Type, TypeStrVisitor a = [] # type: List[str] if tag: a.append(tag + '(') for n in nodes: if isinstance(n, list): if n: a.append(dump_tagged(n, None, str_conv)) elif isinstance(n, tuple): s = dump_tagged(n[1], n[0], str_conv) a.append(indent(s, 2)) elif isinstance(n, mypy.nodes.Node): a.append(indent(n.accept(str_conv), 2)) elif isinstance(n, Type): a.append(indent(n.accept(TypeStrVisitor(str_conv.id_mapper)), 2)) elif n: a.append(indent(str(n), 2)) if tag: a[-1] += ')' return '\n'.join(a) def indent(s: str, n: int) -> str: """Indent all the lines in s (separated by newlines) by n spaces.""" s = ' ' * n + s s = s.replace('\n', '\n' + ' ' * n) return s mypy-0.761/mypy/stubdoc.py0000644€tŠÔÚ€2›s®0000003154613576752246021744 0ustar jukkaDROPBOX\Domain Users00000000000000"""Parsing/inferring signatures from documentation. This module provides several functions to generate better stubs using docstrings and Sphinx docs (.rst files). """ import re import io import contextlib import tokenize from typing import ( Optional, MutableMapping, MutableSequence, List, Sequence, Tuple, NamedTuple, Any ) from typing_extensions import Final # Type alias for signatures strings in format ('func_name', '(arg, opt_arg=False)'). Sig = Tuple[str, str] _TYPE_RE = re.compile(r'^[a-zA-Z_][\w\[\], ]*(\.[a-zA-Z_][\w\[\], ]*)*$') # type: Final _ARG_NAME_RE = re.compile(r'\**[A-Za-z_][A-Za-z0-9_]*$') # type: Final def is_valid_type(s: str) -> bool: """Try to determine whether a string might be a valid type annotation.""" if s in ('True', 'False', 'retval'): return False if ',' in s and '[' not in s: return False return _TYPE_RE.match(s) is not None class ArgSig: """Signature info for a single argument.""" def __init__(self, name: str, type: Optional[str] = None, default: bool = False): self.name = name if type and not is_valid_type(type): raise ValueError("Invalid type: " + type) self.type = type # Does this argument have a default value? self.default = default def __repr__(self) -> str: return "ArgSig(name={}, type={}, default={})".format(repr(self.name), repr(self.type), repr(self.default)) def __eq__(self, other: Any) -> bool: if isinstance(other, ArgSig): return (self.name == other.name and self.type == other.type and self.default == other.default) return False FunctionSig = NamedTuple('FunctionSig', [ ('name', str), ('args', List[ArgSig]), ('ret_type', str) ]) # States of the docstring parser. STATE_INIT = 1 # type: Final STATE_FUNCTION_NAME = 2 # type: Final STATE_ARGUMENT_LIST = 3 # type: Final STATE_ARGUMENT_TYPE = 4 # type: Final STATE_ARGUMENT_DEFAULT = 5 # type: Final STATE_RETURN_VALUE = 6 # type: Final STATE_OPEN_BRACKET = 7 # type: Final # For generic types. class DocStringParser: """Parse function signatures in documentation.""" def __init__(self, function_name: str) -> None: # Only search for signatures of function with this name. self.function_name = function_name self.state = [STATE_INIT] self.accumulator = "" self.arg_type = None # type: Optional[str] self.arg_name = "" self.arg_default = None # type: Optional[str] self.ret_type = "Any" self.found = False self.args = [] # type: List[ArgSig] # Valid signatures found so far. self.signatures = [] # type: List[FunctionSig] def add_token(self, token: tokenize.TokenInfo) -> None: """Process next token from the token stream.""" if (token.type == tokenize.NAME and token.string == self.function_name and self.state[-1] == STATE_INIT): self.state.append(STATE_FUNCTION_NAME) elif (token.type == tokenize.OP and token.string == '(' and self.state[-1] == STATE_FUNCTION_NAME): self.state.pop() self.accumulator = "" self.found = True self.state.append(STATE_ARGUMENT_LIST) elif self.state[-1] == STATE_FUNCTION_NAME: # Reset state, function name not followed by '('. self.state.pop() elif (token.type == tokenize.OP and token.string in ('[', '(', '{') and self.state[-1] != STATE_INIT): self.accumulator += token.string self.state.append(STATE_OPEN_BRACKET) elif (token.type == tokenize.OP and token.string in (']', ')', '}') and self.state[-1] == STATE_OPEN_BRACKET): self.accumulator += token.string self.state.pop() elif (token.type == tokenize.OP and token.string == ':' and self.state[-1] == STATE_ARGUMENT_LIST): self.arg_name = self.accumulator self.accumulator = "" self.state.append(STATE_ARGUMENT_TYPE) elif (token.type == tokenize.OP and token.string == '=' and self.state[-1] in (STATE_ARGUMENT_LIST, STATE_ARGUMENT_TYPE)): if self.state[-1] == STATE_ARGUMENT_TYPE: self.arg_type = self.accumulator self.state.pop() else: self.arg_name = self.accumulator self.accumulator = "" self.state.append(STATE_ARGUMENT_DEFAULT) elif (token.type == tokenize.OP and token.string in (',', ')') and self.state[-1] in (STATE_ARGUMENT_LIST, STATE_ARGUMENT_DEFAULT, STATE_ARGUMENT_TYPE)): if self.state[-1] == STATE_ARGUMENT_DEFAULT: self.arg_default = self.accumulator self.state.pop() elif self.state[-1] == STATE_ARGUMENT_TYPE: self.arg_type = self.accumulator self.state.pop() elif self.state[-1] == STATE_ARGUMENT_LIST: self.arg_name = self.accumulator if not _ARG_NAME_RE.match(self.arg_name): # Invalid argument name. self.reset() return if token.string == ')': self.state.pop() try: self.args.append(ArgSig(name=self.arg_name, type=self.arg_type, default=bool(self.arg_default))) except ValueError: # wrong type, use Any self.args.append(ArgSig(name=self.arg_name, type=None, default=bool(self.arg_default))) self.arg_name = "" self.arg_type = None self.arg_default = None self.accumulator = "" elif token.type == tokenize.OP and token.string == '->' and self.state[-1] == STATE_INIT: self.accumulator = "" self.state.append(STATE_RETURN_VALUE) # ENDMAKER is necessary for python 3.4 and 3.5. elif (token.type in (tokenize.NEWLINE, tokenize.ENDMARKER) and self.state[-1] in (STATE_INIT, STATE_RETURN_VALUE)): if self.state[-1] == STATE_RETURN_VALUE: if not is_valid_type(self.accumulator): self.reset() return self.ret_type = self.accumulator self.accumulator = "" self.state.pop() if self.found: self.signatures.append(FunctionSig(name=self.function_name, args=self.args, ret_type=self.ret_type)) self.found = False self.args = [] self.ret_type = 'Any' # Leave state as INIT. else: self.accumulator += token.string def reset(self) -> None: self.state = [STATE_INIT] self.args = [] self.found = False self.accumulator = "" def get_signatures(self) -> List[FunctionSig]: """Return sorted copy of the list of signatures found so far.""" def has_arg(name: str, signature: FunctionSig) -> bool: return any(x.name == name for x in signature.args) def args_kwargs(signature: FunctionSig) -> bool: return has_arg('*args', signature) and has_arg('**kwargs', signature) # Move functions with (*args, **kwargs) in their signature to last place. return list(sorted(self.signatures, key=lambda x: 1 if args_kwargs(x) else 0)) def infer_sig_from_docstring(docstr: str, name: str) -> Optional[List[FunctionSig]]: """Convert function signature to list of TypedFunctionSig Look for function signatures of function in docstring. Signature is a string of the format () -> or perhaps without the return type. Returns empty list, when no signature is found, one signature in typical case, multiple signatures, if docstring specifies multiple signatures for overload functions. Return None if the docstring is empty. Arguments: * docstr: docstring * name: name of function for which signatures are to be found """ if not docstr: return None state = DocStringParser(name) # Return all found signatures, even if there is a parse error after some are found. with contextlib.suppress(tokenize.TokenError): try: tokens = tokenize.tokenize(io.BytesIO(docstr.encode('utf-8')).readline) for token in tokens: state.add_token(token) except IndentationError: return None sigs = state.get_signatures() def is_unique_args(sig: FunctionSig) -> bool: """return true if function argument names are unique""" return len(sig.args) == len(set((arg.name for arg in sig.args))) # Return only signatures that have unique argument names. Mypy fails on non-uniqnue arg names. return [sig for sig in sigs if is_unique_args(sig)] def infer_arg_sig_from_docstring(docstr: str) -> List[ArgSig]: """Convert signature in form of "(self: TestClass, arg0: str='ada')" to List[TypedArgList].""" ret = infer_sig_from_docstring("stub" + docstr, "stub") if ret: return ret[0].args return [] def parse_signature(sig: str) -> Optional[Tuple[str, List[str], List[str]]]: """Split function signature into its name, positional an optional arguments. The expected format is "func_name(arg, opt_arg=False)". Return the name of function and lists of positional and optional argument names. """ m = re.match(r'([.a-zA-Z0-9_]+)\(([^)]*)\)', sig) if not m: return None name = m.group(1) name = name.split('.')[-1] arg_string = m.group(2) if not arg_string.strip(): # Simple case -- no arguments. return name, [], [] args = [arg.strip() for arg in arg_string.split(',')] positional = [] optional = [] i = 0 while i < len(args): # Accept optional arguments as in both formats: x=None and [x]. if args[i].startswith('[') or '=' in args[i]: break positional.append(args[i].rstrip('[')) i += 1 if args[i - 1].endswith('['): break while i < len(args): arg = args[i] arg = arg.strip('[]') arg = arg.split('=')[0] optional.append(arg) i += 1 return name, positional, optional def build_signature(positional: Sequence[str], optional: Sequence[str]) -> str: """Build function signature from lists of positional and optional argument names.""" args = [] # type: MutableSequence[str] args.extend(positional) for arg in optional: if arg.startswith('*'): args.append(arg) else: args.append('%s=...' % arg) sig = '(%s)' % ', '.join(args) # Ad-hoc fixes. sig = sig.replace('(self)', '') return sig def parse_all_signatures(lines: Sequence[str]) -> Tuple[List[Sig], List[Sig]]: """Parse all signatures in a given reST document. Return lists of found signatures for functions and classes. """ sigs = [] class_sigs = [] for line in lines: line = line.strip() m = re.match(r'\.\. *(function|method|class) *:: *[a-zA-Z_]', line) if m: sig = line.split('::')[1].strip() parsed = parse_signature(sig) if parsed: name, fixed, optional = parsed if m.group(1) != 'class': sigs.append((name, build_signature(fixed, optional))) else: class_sigs.append((name, build_signature(fixed, optional))) return sorted(sigs), sorted(class_sigs) def find_unique_signatures(sigs: Sequence[Sig]) -> List[Sig]: """Remove names with duplicate found signatures.""" sig_map = {} # type: MutableMapping[str, List[str]] for name, sig in sigs: sig_map.setdefault(name, []).append(sig) result = [] for name, name_sigs in sig_map.items(): if len(set(name_sigs)) == 1: result.append((name, name_sigs[0])) return sorted(result) def infer_prop_type_from_docstring(docstr: str) -> Optional[str]: """Check for Google/Numpy style docstring type annotation for a property. The docstring has the format ": ". In the type string, we allow the following characters: * dot: because sometimes classes are annotated using full path * brackets: to allow type hints like List[int] * comma/space: things like Tuple[int, int] """ if not docstr: return None test_str = r'^([a-zA-Z0-9_, \.\[\]]*): ' m = re.match(test_str, docstr) return m.group(1) if m else None mypy-0.761/mypy/stubgen.py0000755€tŠÔÚ€2›s®0000017614113576752246021754 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Generator of dynamically typed draft stubs for arbitrary modules. The logic of this script can be split in three steps: * parsing options and finding sources: - use runtime imports be default (to find also C modules) - or use mypy's mechanisms, if importing is prohibited * (optionally) semantically analysing the sources using mypy (as a single set) * emitting the stubs text: - for Python modules: from ASTs using StubGenerator - for C modules using runtime introspection and (optionally) Sphinx docs During first and third steps some problematic files can be skipped, but any blocking error during second step will cause the whole program to stop. Basic usage: $ stubgen foo.py bar.py some_directory => Generate out/foo.pyi, out/bar.pyi, and stubs for some_directory (recursively). $ stubgen -m urllib.parse => Generate out/urllib/parse.pyi. $ stubgen -p urllib => Generate stubs for whole urlib package (recursively). For Python 2 mode, use --py2: $ stubgen --py2 -m textwrap For C modules, you can get more precise function signatures by parsing .rst (Sphinx) documentation for extra information. For this, use the --doc-dir option: $ stubgen --doc-dir /Python-3.4.2/Doc/library -m curses Note: The generated stubs should be verified manually. TODO: - support stubs for C modules in Python 2 mode - detect 'if PY2 / is_py2' etc. and either preserve those or only include Python 2 or 3 case - maybe use .rst docs also for Python modules - maybe export more imported names if there is no __all__ (this affects ssl.SSLError, for example) - a quick and dirty heuristic would be to turn this on if a module has something like 'from x import y as _y' - we don't seem to always detect properties ('closed' in 'io', for example) """ import glob import os import os.path import sys import traceback import argparse from collections import defaultdict from typing import ( List, Dict, Tuple, Iterable, Mapping, Optional, Set, cast, ) from typing_extensions import Final import mypy.build import mypy.parse import mypy.errors import mypy.traverser import mypy.mixedtraverser import mypy.util from mypy import defaults from mypy.modulefinder import FindModuleCache, SearchPaths, BuildSource, default_lib_path from mypy.nodes import ( Expression, IntExpr, UnaryExpr, StrExpr, BytesExpr, NameExpr, FloatExpr, MemberExpr, TupleExpr, ListExpr, ComparisonExpr, CallExpr, IndexExpr, EllipsisExpr, ClassDef, MypyFile, Decorator, AssignmentStmt, TypeInfo, IfStmt, ImportAll, ImportFrom, Import, FuncDef, FuncBase, TempNode, Block, Statement, OverloadedFuncDef, ARG_POS, ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT ) from mypy.stubgenc import generate_stub_for_c_module from mypy.stubutil import ( default_py2_interpreter, CantImport, generate_guarded, walk_packages, find_module_path_and_all_py2, find_module_path_and_all_py3, report_missing, fail_missing, remove_misplaced_type_comments, common_dir_prefix ) from mypy.stubdoc import parse_all_signatures, find_unique_signatures, Sig from mypy.options import Options as MypyOptions from mypy.types import ( Type, TypeStrVisitor, CallableType, UnboundType, NoneType, TupleType, TypeList, Instance, AnyType ) from mypy.visitor import NodeVisitor from mypy.find_sources import create_source_list, InvalidSourceList from mypy.build import build from mypy.errors import CompileError, Errors from mypy.traverser import has_return_statement from mypy.moduleinspect import ModuleInspect # Common ways of naming package containing vendored modules. VENDOR_PACKAGES = [ 'packages', 'vendor', 'vendored', '_vendor', '_vendored_packages', ] # type: Final # Avoid some file names that are unnecessary or likely to cause trouble (\n for end of path). BLACKLIST = [ '/six.py\n', # Likely vendored six; too dynamic for us to handle '/vendored/', # Vendored packages '/vendor/', # Vendored packages '/_vendor/', '/_vendored_packages/', ] # type: Final # Special-cased names that are implicitly exported from the stub (from m import y as y). EXTRA_EXPORTED = { 'pyasn1_modules.rfc2437.univ', 'pyasn1_modules.rfc2459.char', 'pyasn1_modules.rfc2459.univ', } # type: Final # These names should be omitted from generated stubs. IGNORED_DUNDERS = { '__all__', '__author__', '__version__', '__about__', '__copyright__', '__email__', '__license__', '__summary__', '__title__', '__uri__', '__str__', '__repr__', '__getstate__', '__setstate__', '__slots__', } # type: Final # These methods are expected to always return a non-trivial value. METHODS_WITH_RETURN_VALUE = { '__ne__', '__eq__', '__lt__', '__le__', '__gt__', '__ge__', '__hash__', '__iter__', } # type: Final class Options: """Represents stubgen options. This class is mutable to simplify testing. """ def __init__(self, pyversion: Tuple[int, int], no_import: bool, doc_dir: str, search_path: List[str], interpreter: str, parse_only: bool, ignore_errors: bool, include_private: bool, output_dir: str, modules: List[str], packages: List[str], files: List[str], verbose: bool, quiet: bool, export_less: bool) -> None: # See parse_options for descriptions of the flags. self.pyversion = pyversion self.no_import = no_import self.doc_dir = doc_dir self.search_path = search_path self.interpreter = interpreter self.decointerpreter = interpreter self.parse_only = parse_only self.ignore_errors = ignore_errors self.include_private = include_private self.output_dir = output_dir self.modules = modules self.packages = packages self.files = files self.verbose = verbose self.quiet = quiet self.export_less = export_less class StubSource(BuildSource): """A single source for stub: can be a Python or C module. A simple extension of BuildSource that also carries the AST and the value of __all__ detected at runtime. """ def __init__(self, module: str, path: Optional[str] = None, runtime_all: Optional[List[str]] = None) -> None: super().__init__(path, module, None) self.runtime_all = runtime_all self.ast = None # type: Optional[MypyFile] # What was generated previously in the stub file. We keep track of these to generate # nicely formatted output (add empty line between non-empty classes, for example). EMPTY = 'EMPTY' # type: Final FUNC = 'FUNC' # type: Final CLASS = 'CLASS' # type: Final EMPTY_CLASS = 'EMPTY_CLASS' # type: Final VAR = 'VAR' # type: Final NOT_IN_ALL = 'NOT_IN_ALL' # type: Final # Indicates that we failed to generate a reasonable output # for a given node. These should be manually replaced by a user. ERROR_MARKER = '' # type: Final class AnnotationPrinter(TypeStrVisitor): """Visitor used to print existing annotations in a file. The main difference from TypeStrVisitor is a better treatment of unbound types. Notes: * This visitor doesn't add imports necessary for annotations, this is done separately by ImportTracker. * It can print all kinds of types, but the generated strings may not be valid (notably callable types) since it prints the same string that reveal_type() does. * For Instance types it prints the fully qualified names. """ # TODO: Generate valid string representation for callable types. # TODO: Use short names for Instances. def __init__(self, stubgen: 'StubGenerator') -> None: super().__init__() self.stubgen = stubgen def visit_any(self, t: AnyType) -> str: s = super().visit_any(t) self.stubgen.import_tracker.require_name(s) return s def visit_unbound_type(self, t: UnboundType) -> str: s = t.name self.stubgen.import_tracker.require_name(s) if t.args: s += '[{}]'.format(self.list_str(t.args)) return s def visit_none_type(self, t: NoneType) -> str: return "None" def visit_type_list(self, t: TypeList) -> str: return '[{}]'.format(self.list_str(t.items)) class AliasPrinter(NodeVisitor[str]): """Visitor used to collect type aliases _and_ type variable definitions. Visit r.h.s of the definition to get the string representation of type alias. """ def __init__(self, stubgen: 'StubGenerator') -> None: self.stubgen = stubgen super().__init__() def visit_call_expr(self, node: CallExpr) -> str: # Call expressions are not usually types, but we also treat `X = TypeVar(...)` as a # type alias that has to be preserved (even if TypeVar is not the same as an alias) callee = node.callee.accept(self) args = [] for name, arg, kind in zip(node.arg_names, node.args, node.arg_kinds): if kind == ARG_POS: args.append(arg.accept(self)) elif kind == ARG_STAR: args.append('*' + arg.accept(self)) elif kind == ARG_STAR2: args.append('**' + arg.accept(self)) elif kind == ARG_NAMED: args.append('{}={}'.format(name, arg.accept(self))) else: raise ValueError("Unknown argument kind %d in call" % kind) return "{}({})".format(callee, ", ".join(args)) def visit_name_expr(self, node: NameExpr) -> str: self.stubgen.import_tracker.require_name(node.name) return node.name def visit_member_expr(self, o: MemberExpr) -> str: node = o # type: Expression trailer = '' while isinstance(node, MemberExpr): trailer = '.' + node.name + trailer node = node.expr if not isinstance(node, NameExpr): return ERROR_MARKER self.stubgen.import_tracker.require_name(node.name) return node.name + trailer def visit_str_expr(self, node: StrExpr) -> str: return repr(node.value) def visit_index_expr(self, node: IndexExpr) -> str: base = node.base.accept(self) index = node.index.accept(self) return "{}[{}]".format(base, index) def visit_tuple_expr(self, node: TupleExpr) -> str: return ", ".join(n.accept(self) for n in node.items) def visit_list_expr(self, node: ListExpr) -> str: return "[{}]".format(", ".join(n.accept(self) for n in node.items)) def visit_ellipsis(self, node: EllipsisExpr) -> str: return "..." class ImportTracker: """Record necessary imports during stub generation.""" def __init__(self) -> None: # module_for['foo'] has the module name where 'foo' was imported from, or None if # 'foo' is a module imported directly; examples # 'from pkg.m import f as foo' ==> module_for['foo'] == 'pkg.m' # 'from m import f' ==> module_for['f'] == 'm' # 'import m' ==> module_for['m'] == None self.module_for = {} # type: Dict[str, Optional[str]] # direct_imports['foo'] is the module path used when the name 'foo' was added to the # namespace. # import foo.bar.baz ==> direct_imports['foo'] == 'foo.bar.baz' self.direct_imports = {} # type: Dict[str, str] # reverse_alias['foo'] is the name that 'foo' had originally when imported with an # alias; examples # 'import numpy as np' ==> reverse_alias['np'] == 'numpy' # 'from decimal import Decimal as D' ==> reverse_alias['D'] == 'Decimal' self.reverse_alias = {} # type: Dict[str, str] # required_names is the set of names that are actually used in a type annotation self.required_names = set() # type: Set[str] # Names that should be reexported if they come from another module self.reexports = set() # type: Set[str] def add_import_from(self, module: str, names: List[Tuple[str, Optional[str]]]) -> None: for name, alias in names: self.module_for[alias or name] = module if alias: self.reverse_alias[alias] = name def add_import(self, module: str, alias: Optional[str] = None) -> None: name = module.split('.')[0] self.module_for[alias or name] = None self.direct_imports[name] = module if alias: self.reverse_alias[alias] = name def require_name(self, name: str) -> None: self.required_names.add(name.split('.')[0]) def reexport(self, name: str) -> None: """Mark a given non qualified name as needed in __all__. This means that in case it comes from a module, it should be imported with an alias even is the alias is the same as the name. """ self.require_name(name) self.reexports.add(name) def import_lines(self) -> List[str]: """The list of required import lines (as strings with python code).""" result = [] # To summarize multiple names imported from a same module, we collect those # in the `module_map` dictionary, mapping a module path to the list of names that should # be imported from it. the names can also be alias in the form 'original as alias' module_map = defaultdict(list) # type: Mapping[str, List[str]] for name in sorted(self.required_names): # If we haven't seen this name in an import statement, ignore it if name not in self.module_for: continue m = self.module_for[name] if m is not None: # This name was found in a from ... import ... # Collect the name in the module_map if name in self.reverse_alias: name = '{} as {}'.format(self.reverse_alias[name], name) elif name in self.reexports: name = '{} as {}'.format(name, name) module_map[m].append(name) else: # This name was found in an import ... # We can already generate the import line if name in self.reverse_alias: name, alias = self.reverse_alias[name], name source = self.direct_imports.get(name, 'FIXME') result.append("import {} as {}\n".format(source, alias)) elif name in self.reexports: assert '.' not in name # Because reexports only has nonqualified names result.append("import {} as {}\n".format(name, name)) else: result.append("import {}\n".format(self.direct_imports[name])) # Now generate all the from ... import ... lines collected in module_map for module, names in sorted(module_map.items()): result.append("from {} import {}\n".format(module, ', '.join(sorted(names)))) return result def find_defined_names(file: MypyFile) -> Set[str]: finder = DefinitionFinder() file.accept(finder) return finder.names class DefinitionFinder(mypy.traverser.TraverserVisitor): """Find names of things defined at the top level of a module.""" # TODO: Assignment statements etc. def __init__(self) -> None: # Short names of things defined at the top level. self.names = set() # type: Set[str] def visit_class_def(self, o: ClassDef) -> None: # Don't recurse into classes, as we only keep track of top-level definitions. self.names.add(o.name) def visit_func_def(self, o: FuncDef) -> None: # Don't recurse, as we only keep track of top-level definitions. self.names.add(o.name) def find_referenced_names(file: MypyFile) -> Set[str]: finder = ReferenceFinder() file.accept(finder) return finder.refs class ReferenceFinder(mypy.mixedtraverser.MixedTraverserVisitor): """Find all name references (both local and global).""" # TODO: Filter out local variable and class attribute references def __init__(self) -> None: # Short names of things defined at the top level. self.refs = set() # type: Set[str] def visit_block(self, block: Block) -> None: if not block.is_unreachable: super().visit_block(block) def visit_name_expr(self, e: NameExpr) -> None: self.refs.add(e.name) def visit_instance(self, t: Instance) -> None: self.add_ref(t.type.fullname) super().visit_instance(t) def visit_unbound_type(self, t: UnboundType) -> None: if t.name: self.add_ref(t.name) def visit_tuple_type(self, t: TupleType) -> None: # Ignore fallback for item in t.items: item.accept(self) def visit_callable_type(self, t: CallableType) -> None: # Ignore fallback for arg in t.arg_types: arg.accept(self) t.ret_type.accept(self) def add_ref(self, fullname: str) -> None: self.refs.add(fullname.split('.')[-1]) class StubGenerator(mypy.traverser.TraverserVisitor): """Generate stub text from a mypy AST.""" def __init__(self, _all_: Optional[List[str]], pyversion: Tuple[int, int], include_private: bool = False, analyzed: bool = False, export_less: bool = False) -> None: # Best known value of __all__. self._all_ = _all_ self._output = [] # type: List[str] self._decorators = [] # type: List[str] self._import_lines = [] # type: List[str] # Current indent level (indent is hardcoded to 4 spaces). self._indent = '' # Stack of defined variables (per scope). self._vars = [[]] # type: List[List[str]] # What was generated previously in the stub file. self._state = EMPTY self._toplevel_names = [] # type: List[str] self._pyversion = pyversion self._include_private = include_private self.import_tracker = ImportTracker() # Was the tree semantically analysed before? self.analyzed = analyzed # Disable implicit exports of package-internal imports? self.export_less = export_less # Add imports that could be implicitly generated self.import_tracker.add_import_from("collections", [("namedtuple", None)]) # Names in __all__ are required for name in _all_ or (): if name not in IGNORED_DUNDERS: self.import_tracker.reexport(name) self.defined_names = set() # type: Set[str] # Short names of methods defined in the body of the current class self.method_names = set() # type: Set[str] def visit_mypy_file(self, o: MypyFile) -> None: self.module = o.fullname # Current module being processed self.path = o.path self.defined_names = find_defined_names(o) self.referenced_names = find_referenced_names(o) typing_imports = ["Any", "Optional", "TypeVar"] for t in typing_imports: if t not in self.defined_names: alias = None else: alias = '_' + t self.import_tracker.add_import_from("typing", [(t, alias)]) super().visit_mypy_file(o) undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names] if undefined_names: if self._state != EMPTY: self.add('\n') self.add('# Names in __all__ with no definition:\n') for name in sorted(undefined_names): self.add('# %s\n' % name) def visit_func_def(self, o: FuncDef, is_abstract: bool = False) -> None: if (self.is_private_name(o.name, o.fullname) or self.is_not_in_all(o.name) or self.is_recorded_name(o.name)): self.clear_decorators() return if not self._indent and self._state not in (EMPTY, FUNC) and not o.is_awaitable_coroutine: self.add('\n') if not self.is_top_level(): self_inits = find_self_initializers(o) for init, value in self_inits: if init in self.method_names: # Can't have both an attribute and a method/property with the same name. continue init_code = self.get_init(init, value) if init_code: self.add(init_code) # dump decorators, just before "def ..." for s in self._decorators: self.add(s) self.clear_decorators() self.add("%s%sdef %s(" % (self._indent, 'async ' if o.is_coroutine else '', o.name)) self.record_name(o.name) args = [] # type: List[str] for i, arg_ in enumerate(o.arguments): var = arg_.variable kind = arg_.kind name = var.name annotated_type = (o.unanalyzed_type.arg_types[i] if isinstance(o.unanalyzed_type, CallableType) else None) # I think the name check is incorrect: there are libraries which # name their 0th argument other than self/cls is_self_arg = i == 0 and name == 'self' is_cls_arg = i == 0 and name == 'cls' if (annotated_type is None and not arg_.initializer and not is_self_arg and not is_cls_arg): self.add_typing_import("Any") annotation = ": {}".format(self.typing_name("Any")) elif annotated_type and not is_self_arg: annotation = ": {}".format(self.print_annotation(annotated_type)) else: annotation = "" if arg_.initializer: initializer = '...' if kind in (ARG_NAMED, ARG_NAMED_OPT) and not any(arg.startswith('*') for arg in args): args.append('*') if not annotation: typename = self.get_str_type_of_node(arg_.initializer, True) annotation = ': {} = ...'.format(typename) else: annotation += '={}'.format(initializer) arg = name + annotation elif kind == ARG_STAR: arg = '*%s%s' % (name, annotation) elif kind == ARG_STAR2: arg = '**%s%s' % (name, annotation) else: arg = name + annotation args.append(arg) retname = None if isinstance(o.unanalyzed_type, CallableType): retname = self.print_annotation(o.unanalyzed_type.ret_type) elif isinstance(o, FuncDef) and (o.is_abstract or o.name in METHODS_WITH_RETURN_VALUE): # Always assume abstract methods return Any unless explicitly annotated. Also # some dunder methods should not have a None return type. retname = self.typing_name('Any') self.add_typing_import("Any") elif o.name == '__init__' or not has_return_statement(o) and not is_abstract: retname = 'None' retfield = '' if retname is not None: retfield = ' -> ' + retname self.add(', '.join(args)) self.add("){}: ...\n".format(retfield)) self._state = FUNC def visit_decorator(self, o: Decorator) -> None: if self.is_private_name(o.func.name, o.func.fullname): return is_abstract = False for decorator in o.original_decorators: if isinstance(decorator, NameExpr): if self.process_name_expr_decorator(decorator, o): is_abstract = True elif isinstance(decorator, MemberExpr): if self.process_member_expr_decorator(decorator, o): is_abstract = True self.visit_func_def(o.func, is_abstract=is_abstract) def process_name_expr_decorator(self, expr: NameExpr, context: Decorator) -> bool: """Process a function decorator of form @foo. Only preserve certain special decorators such as @abstractmethod. Return True if the decorator makes a method abstract. """ is_abstract = False name = expr.name if name in ('property', 'staticmethod', 'classmethod'): self.add_decorator(name) elif self.import_tracker.module_for.get(name) in ('asyncio', 'asyncio.coroutines', 'types'): self.add_coroutine_decorator(context.func, name, name) elif self.refers_to_fullname(name, 'abc.abstractmethod'): self.add_decorator(name) self.import_tracker.require_name(name) is_abstract = True elif self.refers_to_fullname(name, 'abc.abstractproperty'): self.add_decorator('property') self.add_decorator('abc.abstractmethod') is_abstract = True return is_abstract def refers_to_fullname(self, name: str, fullname: str) -> bool: module, short = fullname.rsplit('.', 1) return (self.import_tracker.module_for.get(name) == module and (name == short or self.import_tracker.reverse_alias.get(name) == short)) def process_member_expr_decorator(self, expr: MemberExpr, context: Decorator) -> bool: """Process a function decorator of form @foo.bar. Only preserve certain special decorators such as @abstractmethod. Return True if the decorator makes a method abstract. """ is_abstract = False if expr.name == 'setter' and isinstance(expr.expr, NameExpr): self.add_decorator('%s.setter' % expr.expr.name) elif (isinstance(expr.expr, NameExpr) and (expr.expr.name == 'abc' or self.import_tracker.reverse_alias.get('abc')) and expr.name in ('abstractmethod', 'abstractproperty')): if expr.name == 'abstractproperty': self.import_tracker.require_name(expr.expr.name) self.add_decorator('%s' % ('property')) self.add_decorator('%s.%s' % (expr.expr.name, 'abstractmethod')) else: self.import_tracker.require_name(expr.expr.name) self.add_decorator('%s.%s' % (expr.expr.name, expr.name)) is_abstract = True elif expr.name == 'coroutine': if (isinstance(expr.expr, MemberExpr) and expr.expr.name == 'coroutines' and isinstance(expr.expr.expr, NameExpr) and (expr.expr.expr.name == 'asyncio' or self.import_tracker.reverse_alias.get(expr.expr.expr.name) == 'asyncio')): self.add_coroutine_decorator(context.func, '%s.coroutines.coroutine' % (expr.expr.expr.name,), expr.expr.expr.name) elif (isinstance(expr.expr, NameExpr) and (expr.expr.name in ('asyncio', 'types') or self.import_tracker.reverse_alias.get(expr.expr.name) in ('asyncio', 'asyncio.coroutines', 'types'))): self.add_coroutine_decorator(context.func, expr.expr.name + '.coroutine', expr.expr.name) return is_abstract def visit_class_def(self, o: ClassDef) -> None: self.method_names = find_method_names(o.defs.body) sep = None # type: Optional[int] if not self._indent and self._state != EMPTY: sep = len(self._output) self.add('\n') self.add('%sclass %s' % (self._indent, o.name)) self.record_name(o.name) base_types = self.get_base_types(o) if base_types: for base in base_types: self.import_tracker.require_name(base) if isinstance(o.metaclass, (NameExpr, MemberExpr)): meta = o.metaclass.accept(AliasPrinter(self)) base_types.append('metaclass=' + meta) elif self.analyzed and o.info.is_abstract: base_types.append('metaclass=abc.ABCMeta') self.import_tracker.add_import('abc') self.import_tracker.require_name('abc') if base_types: self.add('(%s)' % ', '.join(base_types)) self.add(':\n') n = len(self._output) self._indent += ' ' self._vars.append([]) super().visit_class_def(o) self._indent = self._indent[:-4] self._vars.pop() self._vars[-1].append(o.name) if len(self._output) == n: if self._state == EMPTY_CLASS and sep is not None: self._output[sep] = '' self._output[-1] = self._output[-1][:-1] + ' ...\n' self._state = EMPTY_CLASS else: self._state = CLASS self.method_names = set() def get_base_types(self, cdef: ClassDef) -> List[str]: """Get list of base classes for a class.""" base_types = [] # type: List[str] for base in cdef.base_type_exprs: if isinstance(base, NameExpr): if base.name != 'object': base_types.append(base.name) elif isinstance(base, MemberExpr): modname = get_qualified_name(base.expr) base_types.append('%s.%s' % (modname, base.name)) elif isinstance(base, IndexExpr): p = AliasPrinter(self) base_types.append(base.accept(p)) return base_types def visit_block(self, o: Block) -> None: # Unreachable statements may be partially uninitialized and that may # cause trouble. if not o.is_unreachable: super().visit_block(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: foundl = [] for lvalue in o.lvalues: if isinstance(lvalue, NameExpr) and self.is_namedtuple(o.rvalue): assert isinstance(o.rvalue, CallExpr) self.process_namedtuple(lvalue, o.rvalue) continue if (self.is_top_level() and isinstance(lvalue, NameExpr) and not self.is_private_name(lvalue.name) and # it is never an alias with explicit annotation not o.unanalyzed_type and self.is_alias_expression(o.rvalue)): self.process_typealias(lvalue, o.rvalue) continue if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): items = lvalue.items if isinstance(o.unanalyzed_type, TupleType): # type: ignore annotations = o.unanalyzed_type.items # type: Iterable[Optional[Type]] else: annotations = [None] * len(items) else: items = [lvalue] annotations = [o.unanalyzed_type] sep = False found = False for item, annotation in zip(items, annotations): if isinstance(item, NameExpr): init = self.get_init(item.name, o.rvalue, annotation) if init: found = True if not sep and not self._indent and \ self._state not in (EMPTY, VAR): init = '\n' + init sep = True self.add(init) self.record_name(item.name) foundl.append(found) if all(foundl): self._state = VAR def is_namedtuple(self, expr: Expression) -> bool: if not isinstance(expr, CallExpr): return False callee = expr.callee return ((isinstance(callee, NameExpr) and callee.name.endswith('namedtuple')) or (isinstance(callee, MemberExpr) and callee.name == 'namedtuple')) def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None: if self._state != EMPTY: self.add('\n') name = repr(getattr(rvalue.args[0], 'value', ERROR_MARKER)) if isinstance(rvalue.args[1], StrExpr): items = repr(rvalue.args[1].value) elif isinstance(rvalue.args[1], (ListExpr, TupleExpr)): list_items = cast(List[StrExpr], rvalue.args[1].items) items = '[%s]' % ', '.join(repr(item.value) for item in list_items) else: self.add('%s%s: Any' % (self._indent, lvalue.name)) self.import_tracker.require_name('Any') return self.import_tracker.require_name('namedtuple') self.add('%s%s = namedtuple(%s, %s)\n' % (self._indent, lvalue.name, name, items)) self._state = CLASS def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool: """Return True for things that look like target for an alias. Used to know if assignments look like type aliases, function alias, or module alias. """ # Assignment of TypeVar(...) are passed through if (isinstance(expr, CallExpr) and isinstance(expr.callee, NameExpr) and expr.callee.name == 'TypeVar'): return True elif isinstance(expr, EllipsisExpr): return not top_level elif isinstance(expr, NameExpr): if expr.name in ('True', 'False'): return False elif expr.name == 'None': return not top_level else: return not self.is_private_name(expr.name) elif isinstance(expr, MemberExpr) and self.analyzed: # Also add function and module aliases. return ((top_level and isinstance(expr.node, (FuncDef, Decorator, MypyFile)) or isinstance(expr.node, TypeInfo)) and not self.is_private_member(expr.node.fullname)) elif (isinstance(expr, IndexExpr) and isinstance(expr.base, NameExpr) and not self.is_private_name(expr.base.name)): if isinstance(expr.index, TupleExpr): indices = expr.index.items else: indices = [expr.index] if expr.base.name == 'Callable' and len(indices) == 2: args, ret = indices if isinstance(args, EllipsisExpr): indices = [ret] elif isinstance(args, ListExpr): indices = args.items + [ret] else: return False return all(self.is_alias_expression(i, top_level=False) for i in indices) else: return False def process_typealias(self, lvalue: NameExpr, rvalue: Expression) -> None: p = AliasPrinter(self) self.add("{} = {}\n".format(lvalue.name, rvalue.accept(p))) self.record_name(lvalue.name) self._vars[-1].append(lvalue.name) def visit_if_stmt(self, o: IfStmt) -> None: # Ignore if __name__ == '__main__'. expr = o.expr[0] if (isinstance(expr, ComparisonExpr) and isinstance(expr.operands[0], NameExpr) and isinstance(expr.operands[1], StrExpr) and expr.operands[0].name == '__name__' and '__main__' in expr.operands[1].value): return super().visit_if_stmt(o) def visit_import_all(self, o: ImportAll) -> None: self.add_import_line('from %s%s import *\n' % ('.' * o.relative, o.id)) def visit_import_from(self, o: ImportFrom) -> None: exported_names = set() # type: Set[str] import_names = [] module, relative = translate_module_name(o.id, o.relative) if self.module: full_module, ok = mypy.util.correct_relative_import( self.module, relative, module, self.path.endswith('.__init__.py') ) if not ok: full_module = module else: full_module = module if module == '__future__': return # Not preserved for name, as_name in o.names: if name == 'six': # Vendored six -- translate into plain 'import six'. self.visit_import(Import([('six', None)])) continue exported = False if as_name is None and self.module and (self.module + '.' + name) in EXTRA_EXPORTED: # Special case certain names that should be exported, against our general rules. exported = True is_private = self.is_private_name(name, full_module + '.' + name) if (as_name is None and name not in self.referenced_names and (not self._all_ or name in IGNORED_DUNDERS) and not is_private and module not in ('abc', 'typing', 'asyncio')): # An imported name that is never referenced in the module is assumed to be # exported, unless there is an explicit __all__. Note that we need to special # case 'abc' since some references are deleted during semantic analysis. exported = True top_level = full_module.split('.')[0] if (as_name is None and not self.export_less and (not self._all_ or name in IGNORED_DUNDERS) and self.module and not is_private and top_level in (self.module.split('.')[0], '_' + self.module.split('.')[0])): # Export imports from the same package, since we can't reliably tell whether they # are part of the public API. exported = True if exported: self.import_tracker.reexport(name) as_name = name import_names.append((name, as_name)) self.import_tracker.add_import_from('.' * relative + module, import_names) self._vars[-1].extend(alias or name for name, alias in import_names) for name, alias in import_names: self.record_name(alias or name) if self._all_: # Include import froms that import names defined in __all__. names = [name for name, alias in o.names if name in self._all_ and alias is None and name not in IGNORED_DUNDERS] exported_names.update(names) def visit_import(self, o: Import) -> None: for id, as_id in o.ids: self.import_tracker.add_import(id, as_id) if as_id is None: target_name = id.split('.')[0] else: target_name = as_id self._vars[-1].append(target_name) self.record_name(target_name) def get_init(self, lvalue: str, rvalue: Expression, annotation: Optional[Type] = None) -> Optional[str]: """Return initializer for a variable. Return None if we've generated one already or if the variable is internal. """ if lvalue in self._vars[-1]: # We've generated an initializer already for this variable. return None # TODO: Only do this at module top level. if self.is_private_name(lvalue) or self.is_not_in_all(lvalue): return None self._vars[-1].append(lvalue) if annotation is not None: typename = self.print_annotation(annotation) if (isinstance(annotation, UnboundType) and not annotation.args and annotation.name == 'Final' and self.import_tracker.module_for.get('Final') in ('typing', 'typing_extensions')): # Final without type argument is invalid in stubs. final_arg = self.get_str_type_of_node(rvalue) typename += '[{}]'.format(final_arg) else: typename = self.get_str_type_of_node(rvalue) has_rhs = not (isinstance(rvalue, TempNode) and rvalue.no_rhs) initializer = " = ..." if has_rhs and not self.is_top_level() else "" return '%s%s: %s%s\n' % (self._indent, lvalue, typename, initializer) def add(self, string: str) -> None: """Add text to generated stub.""" self._output.append(string) def add_decorator(self, name: str) -> None: if not self._indent and self._state not in (EMPTY, FUNC): self._decorators.append('\n') self._decorators.append('%s@%s\n' % (self._indent, name)) def clear_decorators(self) -> None: self._decorators.clear() def typing_name(self, name: str) -> str: if name in self.defined_names: # Avoid name clash between name from typing and a name defined in stub. return '_' + name else: return name def add_typing_import(self, name: str) -> None: """Add a name to be imported from typing, unless it's imported already. The import will be internal to the stub. """ name = self.typing_name(name) self.import_tracker.require_name(name) def add_import_line(self, line: str) -> None: """Add a line of text to the import section, unless it's already there.""" if line not in self._import_lines: self._import_lines.append(line) def add_coroutine_decorator(self, func: FuncDef, name: str, require_name: str) -> None: func.is_awaitable_coroutine = True self.add_decorator(name) self.import_tracker.require_name(require_name) def output(self) -> str: """Return the text for the stub.""" imports = '' if self._import_lines: imports += ''.join(self._import_lines) imports += ''.join(self.import_tracker.import_lines()) if imports and self._output: imports += '\n' return imports + ''.join(self._output) def is_not_in_all(self, name: str) -> bool: if self.is_private_name(name): return False if self._all_: return self.is_top_level() and name not in self._all_ return False def is_private_name(self, name: str, fullname: Optional[str] = None) -> bool: if self._include_private: return False if fullname in EXTRA_EXPORTED: return False return name.startswith('_') and (not name.endswith('__') or name in IGNORED_DUNDERS) def is_private_member(self, fullname: str) -> bool: parts = fullname.split('.') for part in parts: if self.is_private_name(part): return True return False def get_str_type_of_node(self, rvalue: Expression, can_infer_optional: bool = False) -> str: if isinstance(rvalue, IntExpr): return 'int' if isinstance(rvalue, StrExpr): return 'str' if isinstance(rvalue, BytesExpr): return 'bytes' if isinstance(rvalue, FloatExpr): return 'float' if isinstance(rvalue, UnaryExpr) and isinstance(rvalue.expr, IntExpr): return 'int' if isinstance(rvalue, NameExpr) and rvalue.name in ('True', 'False'): return 'bool' if can_infer_optional and \ isinstance(rvalue, NameExpr) and rvalue.name == 'None': self.add_typing_import('Optional') self.add_typing_import('Any') return '{}[{}]'.format(self.typing_name('Optional'), self.typing_name('Any')) self.add_typing_import('Any') return self.typing_name('Any') def print_annotation(self, t: Type) -> str: printer = AnnotationPrinter(self) return t.accept(printer) def is_top_level(self) -> bool: """Are we processing the top level of a file?""" return self._indent == '' def record_name(self, name: str) -> None: """Mark a name as defined. This only does anything if at the top level of a module. """ if self.is_top_level(): self._toplevel_names.append(name) def is_recorded_name(self, name: str) -> bool: """Has this name been recorded previously?""" return self.is_top_level() and name in self._toplevel_names def find_method_names(defs: List[Statement]) -> Set[str]: # TODO: Traverse into nested definitions result = set() for defn in defs: if isinstance(defn, FuncDef): result.add(defn.name) elif isinstance(defn, Decorator): result.add(defn.func.name) elif isinstance(defn, OverloadedFuncDef): for item in defn.items: result.update(find_method_names([item])) return result class SelfTraverser(mypy.traverser.TraverserVisitor): def __init__(self) -> None: self.results = [] # type: List[Tuple[str, Expression]] def visit_assignment_stmt(self, o: AssignmentStmt) -> None: lvalue = o.lvalues[0] if (isinstance(lvalue, MemberExpr) and isinstance(lvalue.expr, NameExpr) and lvalue.expr.name == 'self'): self.results.append((lvalue.name, o.rvalue)) def find_self_initializers(fdef: FuncBase) -> List[Tuple[str, Expression]]: """Find attribute initializers in a method. Return a list of pairs (attribute name, r.h.s. expression). """ traverser = SelfTraverser() fdef.accept(traverser) return traverser.results def get_qualified_name(o: Expression) -> str: if isinstance(o, NameExpr): return o.name elif isinstance(o, MemberExpr): return '%s.%s' % (get_qualified_name(o.expr), o.name) else: return ERROR_MARKER def remove_blacklisted_modules(modules: List[StubSource]) -> List[StubSource]: return [module for module in modules if module.path is None or not is_blacklisted_path(module.path)] def is_blacklisted_path(path: str) -> bool: return any(substr in (normalize_path_separators(path) + '\n') for substr in BLACKLIST) def normalize_path_separators(path: str) -> str: if sys.platform == 'win32': return path.replace('\\', '/') return path def collect_build_targets(options: Options, mypy_opts: MypyOptions) -> Tuple[List[StubSource], List[StubSource]]: """Collect files for which we need to generate stubs. Return list of Python modules and C modules. """ if options.packages or options.modules: if options.no_import: py_modules = find_module_paths_using_search(options.modules, options.packages, options.search_path, options.pyversion) c_modules = [] # type: List[StubSource] else: # Using imports is the default, since we can also find C modules. py_modules, c_modules = find_module_paths_using_imports(options.modules, options.packages, options.interpreter, options.pyversion, options.verbose, options.quiet) else: # Use mypy native source collection for files and directories. try: source_list = create_source_list(options.files, mypy_opts) except InvalidSourceList as e: raise SystemExit(str(e)) py_modules = [StubSource(m.module, m.path) for m in source_list] c_modules = [] py_modules = remove_blacklisted_modules(py_modules) return py_modules, c_modules def find_module_paths_using_imports(modules: List[str], packages: List[str], interpreter: str, pyversion: Tuple[int, int], verbose: bool, quiet: bool) -> Tuple[List[StubSource], List[StubSource]]: """Find path and runtime value of __all__ (if possible) for modules and packages. This function uses runtime Python imports to get the information. """ with ModuleInspect() as inspect: py_modules = [] # type: List[StubSource] c_modules = [] # type: List[StubSource] found = list(walk_packages(inspect, packages, verbose)) modules = modules + found modules = [mod for mod in modules if not is_non_library_module(mod)] # We don't want to run any tests or scripts for mod in modules: try: if pyversion[0] == 2: result = find_module_path_and_all_py2(mod, interpreter) else: result = find_module_path_and_all_py3(inspect, mod, verbose) except CantImport as e: tb = traceback.format_exc() if verbose: sys.stdout.write(tb) if not quiet: report_missing(mod, e.message, tb) continue if not result: c_modules.append(StubSource(mod)) else: path, runtime_all = result py_modules.append(StubSource(mod, path, runtime_all)) return py_modules, c_modules def is_non_library_module(module: str) -> bool: """Does module look like a test module or a script?""" if module.endswith(( '.tests', '.test', '.testing', '_tests', '_test_suite', 'test_util', 'test_utils', 'test_base', '.__main__', '.conftest', # Used by pytest '.setup', # Typically an install script )): return True if module.split('.')[-1].startswith('test_'): return True if ('.tests.' in module or '.test.' in module or '.testing.' in module or '.SelfTest.' in module): return True return False def translate_module_name(module: str, relative: int) -> Tuple[str, int]: for pkg in VENDOR_PACKAGES: for alt in 'six.moves', 'six': substr = '{}.{}'.format(pkg, alt) if (module.endswith('.' + substr) or (module == substr and relative)): return alt, 0 if '.' + substr + '.' in module: return alt + '.' + module.partition('.' + substr + '.')[2], 0 return module, relative def find_module_paths_using_search(modules: List[str], packages: List[str], search_path: List[str], pyversion: Tuple[int, int]) -> List[StubSource]: """Find sources for modules and packages requested. This function just looks for source files at the file system level. This is used if user passes --no-import, and will not find C modules. Exit if some of the modules or packages can't be found. """ result = [] # type: List[StubSource] typeshed_path = default_lib_path(mypy.build.default_data_dir(), pyversion, None) search_paths = SearchPaths(('.',) + tuple(search_path), (), (), tuple(typeshed_path)) cache = FindModuleCache(search_paths) for module in modules: module_path = cache.find_module(module) if not module_path: fail_missing(module) result.append(StubSource(module, module_path)) for package in packages: p_result = cache.find_modules_recursive(package) if not p_result: fail_missing(package) sources = [StubSource(m.module, m.path) for m in p_result] result.extend(sources) result = [m for m in result if not is_non_library_module(m.module)] return result def mypy_options(stubgen_options: Options) -> MypyOptions: """Generate mypy options using the flag passed by user.""" options = MypyOptions() options.follow_imports = 'skip' options.incremental = False options.ignore_errors = True options.semantic_analysis_only = True options.python_version = stubgen_options.pyversion options.show_traceback = True options.transform_source = remove_misplaced_type_comments return options def parse_source_file(mod: StubSource, mypy_options: MypyOptions) -> None: """Parse a source file. On success, store AST in the corresponding attribute of the stub source. If there are syntax errors, print them and exit. """ assert mod.path is not None, "Not found module was not skipped" with open(mod.path, 'rb') as f: data = f.read() source = mypy.util.decode_python_encoding(data, mypy_options.python_version) errors = Errors() mod.ast = mypy.parse.parse(source, fnam=mod.path, module=mod.module, errors=errors, options=mypy_options) mod.ast._fullname = mod.module if errors.is_blockers(): # Syntax error! for m in errors.new_messages(): sys.stderr.write('%s\n' % m) sys.exit(1) def generate_asts_for_modules(py_modules: List[StubSource], parse_only: bool, mypy_options: MypyOptions, verbose: bool) -> None: """Use mypy to parse (and optionally analyze) source files.""" if not py_modules: return # Nothing to do here, but there may be C modules if verbose: print('Processing %d files...' % len(py_modules)) if parse_only: for mod in py_modules: parse_source_file(mod, mypy_options) return # Perform full semantic analysis of the source set. try: res = build(list(py_modules), mypy_options) except CompileError as e: raise SystemExit("Critical error during semantic analysis: {}".format(e)) for mod in py_modules: mod.ast = res.graph[mod.module].tree # Use statically inferred __all__ if there is no runtime one. if mod.runtime_all is None: mod.runtime_all = res.manager.semantic_analyzer.export_map[mod.module] def generate_stub_from_ast(mod: StubSource, target: str, parse_only: bool = False, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, include_private: bool = False, export_less: bool = False) -> None: """Use analysed (or just parsed) AST to generate type stub for single file. If directory for target doesn't exist it will created. Existing stub will be overwritten. """ gen = StubGenerator(mod.runtime_all, pyversion=pyversion, include_private=include_private, analyzed=not parse_only, export_less=export_less) assert mod.ast is not None, "This function must be used only with analyzed modules" mod.ast.accept(gen) # Write output to file. subdir = os.path.dirname(target) if subdir and not os.path.isdir(subdir): os.makedirs(subdir) with open(target, 'w') as file: file.write(''.join(gen.output())) def collect_docs_signatures(doc_dir: str) -> Tuple[Dict[str, str], Dict[str, str]]: """Gather all function and class signatures in the docs. Return a tuple (function signatures, class signatures). Currently only used for C modules. """ all_sigs = [] # type: List[Sig] all_class_sigs = [] # type: List[Sig] for path in glob.glob('%s/*.rst' % doc_dir): with open(path) as f: loc_sigs, loc_class_sigs = parse_all_signatures(f.readlines()) all_sigs += loc_sigs all_class_sigs += loc_class_sigs sigs = dict(find_unique_signatures(all_sigs)) class_sigs = dict(find_unique_signatures(all_class_sigs)) return sigs, class_sigs def generate_stubs(options: Options) -> None: """Main entry point for the program.""" mypy_opts = mypy_options(options) py_modules, c_modules = collect_build_targets(options, mypy_opts) # Collect info from docs (if given): sigs = class_sigs = None # type: Optional[Dict[str, str]] if options.doc_dir: sigs, class_sigs = collect_docs_signatures(options.doc_dir) # Use parsed sources to generate stubs for Python modules. generate_asts_for_modules(py_modules, options.parse_only, mypy_opts, options.verbose) files = [] for mod in py_modules: assert mod.path is not None, "Not found module was not skipped" target = mod.module.replace('.', '/') if os.path.basename(mod.path) == '__init__.py': target += '/__init__.pyi' else: target += '.pyi' target = os.path.join(options.output_dir, target) files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): generate_stub_from_ast(mod, target, options.parse_only, options.pyversion, options.include_private, options.export_less) # Separately analyse C modules using different logic. for mod in c_modules: if any(py_mod.module.startswith(mod.module + '.') for py_mod in py_modules + c_modules): target = mod.module.replace('.', '/') + '/__init__.pyi' else: target = mod.module.replace('.', '/') + '.pyi' target = os.path.join(options.output_dir, target) files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): generate_stub_for_c_module(mod.module, target, sigs=sigs, class_sigs=class_sigs) num_modules = len(py_modules) + len(c_modules) if not options.quiet and num_modules > 0: print('Processed %d modules' % num_modules) if len(files) == 1: print('Generated %s' % files[0]) else: print('Generated files under %s' % common_dir_prefix(files) + os.sep) HEADER = """%(prog)s [-h] [--py2] [more options, see -h] [-m MODULE] [-p PACKAGE] [files ...]""" DESCRIPTION = """ Generate draft stubs for modules. Stubs are generated in directory ./out, to avoid overriding files with manual changes. This directory is assumed to exist. """ def parse_options(args: List[str]) -> Options: parser = argparse.ArgumentParser(prog='stubgen', usage=HEADER, description=DESCRIPTION) parser.add_argument('--py2', action='store_true', help="run in Python 2 mode (default: Python 3 mode)") parser.add_argument('--ignore-errors', action='store_true', help="ignore errors when trying to generate stubs for modules") parser.add_argument('--no-import', action='store_true', help="don't import the modules, just parse and analyze them " "(doesn't work with C extension modules and might not " "respect __all__)") parser.add_argument('--parse-only', action='store_true', help="don't perform semantic analysis of sources, just parse them " "(only applies to Python modules, might affect quality of stubs)") parser.add_argument('--include-private', action='store_true', help="generate stubs for objects and members considered private " "(single leading underscore and no trailing underscores)") parser.add_argument('--export-less', action='store_true', help=("don't implicitly export all names imported from other modules " "in the same package")) parser.add_argument('-v', '--verbose', action='store_true', help="show more verbose messages") parser.add_argument('-q', '--quiet', action='store_true', help="show fewer messages") parser.add_argument('--doc-dir', metavar='PATH', default='', help="use .rst documentation in PATH (this may result in " "better stubs in some cases; consider setting this to " "DIR/Python-X.Y.Z/Doc/library)") parser.add_argument('--search-path', metavar='PATH', default='', help="specify module search directories, separated by ':' " "(currently only used if --no-import is given)") parser.add_argument('--python-executable', metavar='PATH', dest='interpreter', default='', help="use Python interpreter at PATH (only works for " "Python 2 right now)") parser.add_argument('-o', '--output', metavar='PATH', dest='output_dir', default='out', help="change the output directory [default: %(default)s]") parser.add_argument('-m', '--module', action='append', metavar='MODULE', dest='modules', default=[], help="generate stub for module; can repeat for more modules") parser.add_argument('-p', '--package', action='append', metavar='PACKAGE', dest='packages', default=[], help="generate stubs for package recursively; can be repeated") parser.add_argument(metavar='files', nargs='*', dest='files', help="generate stubs for given files or directories") ns = parser.parse_args(args) pyversion = defaults.PYTHON2_VERSION if ns.py2 else defaults.PYTHON3_VERSION if not ns.interpreter: ns.interpreter = sys.executable if pyversion[0] == 3 else default_py2_interpreter() if ns.modules + ns.packages and ns.files: parser.error("May only specify one of: modules/packages or files.") if ns.quiet and ns.verbose: parser.error('Cannot specify both quiet and verbose messages') # Create the output folder if it doesn't already exist. if not os.path.exists(ns.output_dir): os.makedirs(ns.output_dir) return Options(pyversion=pyversion, no_import=ns.no_import, doc_dir=ns.doc_dir, search_path=ns.search_path.split(':'), interpreter=ns.interpreter, ignore_errors=ns.ignore_errors, parse_only=ns.parse_only, include_private=ns.include_private, output_dir=ns.output_dir, modules=ns.modules, packages=ns.packages, files=ns.files, verbose=ns.verbose, quiet=ns.quiet, export_less=ns.export_less) def main() -> None: mypy.util.check_python_version('stubgen') # Make sure that the current directory is in sys.path so that # stubgen can be run on packages in the current directory. if not ('' in sys.path or '.' in sys.path): sys.path.insert(0, '') options = parse_options(sys.argv[1:]) generate_stubs(options) if __name__ == '__main__': main() mypy-0.761/mypy/stubgenc.py0000755€tŠÔÚ€2›s®0000003560413576752246022115 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Stub generator for C modules. The public interface is via the mypy.stubgen module. """ import importlib import inspect import os.path import re from typing import List, Dict, Tuple, Optional, Mapping, Any, Set from types import ModuleType from mypy.moduleinspect import is_c_module from mypy.stubdoc import ( infer_sig_from_docstring, infer_prop_type_from_docstring, ArgSig, infer_arg_sig_from_docstring, FunctionSig ) def generate_stub_for_c_module(module_name: str, target: str, sigs: Optional[Dict[str, str]] = None, class_sigs: Optional[Dict[str, str]] = None) -> None: """Generate stub for C module. This combines simple runtime introspection (looking for docstrings and attributes with simple builtin types) and signatures inferred from .rst documentation (if given). If directory for target doesn't exist it will be created. Existing stub will be overwritten. """ module = importlib.import_module(module_name) assert is_c_module(module), '%s is not a C module' % module_name subdir = os.path.dirname(target) if subdir and not os.path.isdir(subdir): os.makedirs(subdir) imports = [] # type: List[str] functions = [] # type: List[str] done = set() items = sorted(module.__dict__.items(), key=lambda x: x[0]) for name, obj in items: if is_c_function(obj): generate_c_function_stub(module, name, obj, functions, imports=imports, sigs=sigs) done.add(name) types = [] # type: List[str] for name, obj in items: if name.startswith('__') and name.endswith('__'): continue if is_c_type(obj): generate_c_type_stub(module, name, obj, types, imports=imports, sigs=sigs, class_sigs=class_sigs) done.add(name) variables = [] for name, obj in items: if name.startswith('__') and name.endswith('__'): continue if name not in done and not inspect.ismodule(obj): type_str = type(obj).__name__ if type_str not in ('int', 'str', 'bytes', 'float', 'bool'): type_str = 'Any' variables.append('%s: %s' % (name, type_str)) output = [] for line in sorted(set(imports)): output.append(line) for line in variables: output.append(line) if output and functions: output.append('') for line in functions: output.append(line) for line in types: if line.startswith('class') and output and output[-1]: output.append('') output.append(line) output = add_typing_import(output) with open(target, 'w') as file: for line in output: file.write('%s\n' % line) def add_typing_import(output: List[str]) -> List[str]: """Add typing imports for collections/types that occur in the generated stub.""" names = [] for name in ['Any', 'Union', 'Tuple', 'Optional', 'List', 'Dict']: if any(re.search(r'\b%s\b' % name, line) for line in output): names.append(name) if names: return ['from typing import %s' % ', '.join(names), ''] + output else: return output[:] def is_c_function(obj: object) -> bool: return inspect.isbuiltin(obj) or type(obj) is type(ord) def is_c_method(obj: object) -> bool: return inspect.ismethoddescriptor(obj) or type(obj) in (type(str.index), type(str.__add__), type(str.__new__)) def is_c_classmethod(obj: object) -> bool: return inspect.isbuiltin(obj) or type(obj).__name__ in ('classmethod', 'classmethod_descriptor') def is_c_property(obj: object) -> bool: return inspect.isdatadescriptor(obj) and hasattr(obj, 'fget') def is_c_property_readonly(prop: Any) -> bool: return prop.fset is None def is_c_type(obj: object) -> bool: return inspect.isclass(obj) or type(obj) is type(int) def generate_c_function_stub(module: ModuleType, name: str, obj: object, output: List[str], imports: List[str], self_var: Optional[str] = None, sigs: Optional[Dict[str, str]] = None, class_name: Optional[str] = None, class_sigs: Optional[Dict[str, str]] = None) -> None: """Generate stub for a single function or method. The result (always a single line) will be appended to 'output'. If necessary, any required names will be added to 'imports'. The 'class_name' is used to find signature of __init__ or __new__ in 'class_sigs'. """ if sigs is None: sigs = {} if class_sigs is None: class_sigs = {} ret_type = 'None' if name == '__init__' and class_name else 'Any' if (name in ('__new__', '__init__') and name not in sigs and class_name and class_name in class_sigs): inferred = [FunctionSig(name=name, args=infer_arg_sig_from_docstring(class_sigs[class_name]), ret_type=ret_type)] # type: Optional[List[FunctionSig]] else: docstr = getattr(obj, '__doc__', None) inferred = infer_sig_from_docstring(docstr, name) if not inferred: if class_name and name not in sigs: inferred = [FunctionSig(name, args=infer_method_sig(name), ret_type=ret_type)] else: inferred = [FunctionSig(name=name, args=infer_arg_sig_from_docstring( sigs.get(name, '(*args, **kwargs)')), ret_type=ret_type)] is_overloaded = len(inferred) > 1 if inferred else False if is_overloaded: imports.append('from typing import overload') if inferred: for signature in inferred: sig = [] for arg in signature.args: if arg.name == self_var: arg_def = self_var else: arg_def = arg.name if arg_def == 'None': arg_def = '_none' # None is not a valid argument name if arg.type: arg_def += ": " + strip_or_import(arg.type, module, imports) if arg.default: arg_def += " = ..." sig.append(arg_def) if is_overloaded: output.append('@overload') output.append('def {function}({args}) -> {ret}: ...'.format( function=name, args=", ".join(sig), ret=strip_or_import(signature.ret_type, module, imports) )) def strip_or_import(typ: str, module: ModuleType, imports: List[str]) -> str: """Strips unnecessary module names from typ. If typ represents a type that is inside module or is a type coming from builtins, remove module declaration from it. Return stripped name of the type. Arguments: typ: name of the type module: in which this type is used imports: list of import statements (may be modified during the call) """ stripped_type = typ if module and typ.startswith(module.__name__ + '.'): stripped_type = typ[len(module.__name__) + 1:] elif '.' in typ: arg_module = typ[:typ.rindex('.')] if arg_module == 'builtins': stripped_type = typ[len('builtins') + 1:] else: imports.append('import %s' % (arg_module,)) return stripped_type def generate_c_property_stub(name: str, obj: object, output: List[str], readonly: bool) -> None: """Generate property stub using introspection of 'obj'. Try to infer type from docstring, append resulting lines to 'output'. """ docstr = getattr(obj, '__doc__', None) inferred = infer_prop_type_from_docstring(docstr) if not inferred: inferred = 'Any' output.append('@property') output.append('def {}(self) -> {}: ...'.format(name, inferred)) if not readonly: output.append('@{}.setter'.format(name)) output.append('def {}(self, val: {}) -> None: ...'.format(name, inferred)) def generate_c_type_stub(module: ModuleType, class_name: str, obj: type, output: List[str], imports: List[str], sigs: Optional[Dict[str, str]] = None, class_sigs: Optional[Dict[str, str]] = None) -> None: """Generate stub for a single class using runtime introspection. The result lines will be appended to 'output'. If necessary, any required names will be added to 'imports'. """ # typeshed gives obj.__dict__ the not quite correct type Dict[str, Any] # (it could be a mappingproxy!), which makes mypyc mad, so obfuscate it. obj_dict = getattr(obj, '__dict__') # type: Mapping[str, Any] # noqa items = sorted(obj_dict.items(), key=lambda x: method_name_sort_key(x[0])) methods = [] # type: List[str] properties = [] # type: List[str] done = set() # type: Set[str] for attr, value in items: if is_c_method(value) or is_c_classmethod(value): done.add(attr) if not is_skipped_attribute(attr): if attr == '__new__': # TODO: We should support __new__. if '__init__' in obj_dict: # Avoid duplicate functions if both are present. # But is there any case where .__new__() has a # better signature than __init__() ? continue attr = '__init__' if is_c_classmethod(value): methods.append('@classmethod') self_var = 'cls' else: self_var = 'self' generate_c_function_stub(module, attr, value, methods, imports=imports, self_var=self_var, sigs=sigs, class_name=class_name, class_sigs=class_sigs) elif is_c_property(value): done.add(attr) generate_c_property_stub(attr, value, properties, is_c_property_readonly(value)) variables = [] for attr, value in items: if is_skipped_attribute(attr): continue if attr not in done: variables.append('%s: Any = ...' % attr) all_bases = obj.mro() if all_bases[-1] is object: # TODO: Is this always object? del all_bases[-1] # remove pybind11_object. All classes generated by pybind11 have pybind11_object in their MRO, # which only overrides a few functions in object type if all_bases and all_bases[-1].__name__ == 'pybind11_object': del all_bases[-1] # remove the class itself all_bases = all_bases[1:] # Remove base classes of other bases as redundant. bases = [] # type: List[type] for base in all_bases: if not any(issubclass(b, base) for b in bases): bases.append(base) if bases: bases_str = '(%s)' % ', '.join( strip_or_import( get_type_fullname(base), module, imports ) for base in bases ) else: bases_str = '' if not methods and not variables and not properties: output.append('class %s%s: ...' % (class_name, bases_str)) else: output.append('class %s%s:' % (class_name, bases_str)) for variable in variables: output.append(' %s' % variable) for method in methods: output.append(' %s' % method) for prop in properties: output.append(' %s' % prop) def get_type_fullname(typ: type) -> str: return '%s.%s' % (typ.__module__, typ.__name__) def method_name_sort_key(name: str) -> Tuple[int, str]: """Sort methods in classes in a typical order. I.e.: constructor, normal methods, special methods. """ if name in ('__new__', '__init__'): return 0, name if name.startswith('__') and name.endswith('__'): return 2, name return 1, name def is_skipped_attribute(attr: str) -> bool: return attr in ('__getattribute__', '__str__', '__repr__', '__doc__', '__dict__', '__module__', '__weakref__') # For pickling def infer_method_sig(name: str) -> List[ArgSig]: args = None # type: Optional[List[ArgSig]] if name.startswith('__') and name.endswith('__'): name = name[2:-2] if name in ('hash', 'iter', 'next', 'sizeof', 'copy', 'deepcopy', 'reduce', 'getinitargs', 'int', 'float', 'trunc', 'complex', 'bool', 'abs', 'bytes', 'dir', 'len', 'reversed', 'round', 'index', 'enter'): args = [] elif name == 'getitem': args = [ArgSig(name='index')] elif name == 'setitem': args = [ArgSig(name='index'), ArgSig(name='object')] elif name in ('delattr', 'getattr'): args = [ArgSig(name='name')] elif name == 'setattr': args = [ArgSig(name='name'), ArgSig(name='value')] elif name == 'getstate': args = [] elif name == 'setstate': args = [ArgSig(name='state')] elif name in ('eq', 'ne', 'lt', 'le', 'gt', 'ge', 'add', 'radd', 'sub', 'rsub', 'mul', 'rmul', 'mod', 'rmod', 'floordiv', 'rfloordiv', 'truediv', 'rtruediv', 'divmod', 'rdivmod', 'pow', 'rpow', 'xor', 'rxor', 'or', 'ror', 'and', 'rand', 'lshift', 'rlshift', 'rshift', 'rrshift', 'contains', 'delitem', 'iadd', 'iand', 'ifloordiv', 'ilshift', 'imod', 'imul', 'ior', 'ipow', 'irshift', 'isub', 'itruediv', 'ixor'): args = [ArgSig(name='other')] elif name in ('neg', 'pos', 'invert'): args = [] elif name == 'get': args = [ArgSig(name='instance'), ArgSig(name='owner')] elif name == 'set': args = [ArgSig(name='instance'), ArgSig(name='value')] elif name == 'reduce_ex': args = [ArgSig(name='protocol')] elif name == 'exit': args = [ArgSig(name='type'), ArgSig(name='value'), ArgSig(name='traceback')] if args is None: args = [ArgSig(name='*args'), ArgSig(name='**kwargs')] return [ArgSig(name='self')] + args mypy-0.761/mypy/stubutil.py0000644€tŠÔÚ€2›s®0000002203713576752246022147 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for mypy.stubgen, mypy.stubgenc, and mypy.stubdoc modules.""" import sys import os.path import json import subprocess import re from contextlib import contextmanager from typing import Optional, Tuple, List, Iterator, Union from typing_extensions import overload from mypy.moduleinspect import ModuleInspect, InspectError # Modules that may fail when imported, or that may have side effects (fully qualified). NOT_IMPORTABLE_MODULES = () class CantImport(Exception): def __init__(self, module: str, message: str): self.module = module self.message = message def default_py2_interpreter() -> str: """Find a system Python 2 interpreter. Return full path or exit if failed. """ # TODO: Make this do something reasonable in Windows. for candidate in ('/usr/bin/python2', '/usr/bin/python'): if not os.path.exists(candidate): continue output = subprocess.check_output([candidate, '--version'], stderr=subprocess.STDOUT).strip() if b'Python 2' in output: return candidate raise SystemExit("Can't find a Python 2 interpreter -- " "please use the --python-executable option") def walk_packages(inspect: ModuleInspect, packages: List[str], verbose: bool = False) -> Iterator[str]: """Iterates through all packages and sub-packages in the given list. This uses runtime imports (in another process) to find both Python and C modules. For Python packages we simply pass the __path__ attribute to pkgutil.walk_packages() to get the content of the package (all subpackages and modules). However, packages in C extensions do not have this attribute, so we have to roll out our own logic: recursively find all modules imported in the package that have matching names. """ for package_name in packages: if package_name in NOT_IMPORTABLE_MODULES: print('%s: Skipped (blacklisted)' % package_name) continue if verbose: print('Trying to import %r for runtime introspection' % package_name) try: prop = inspect.get_package_properties(package_name) except InspectError: report_missing(package_name) continue yield prop.name if prop.is_c_module: # Recursively iterate through the subpackages for submodule in walk_packages(inspect, prop.subpackages, verbose): yield submodule else: for submodule in prop.subpackages: yield submodule def find_module_path_and_all_py2(module: str, interpreter: str) -> Optional[Tuple[Optional[str], Optional[List[str]]]]: """Return tuple (module path, module __all__) for a Python 2 module. The path refers to the .py/.py[co] file. The second tuple item is None if the module doesn't define __all__. Raise CantImport if the module can't be imported, or exit if it's a C extension module. """ cmd_template = '{interpreter} -c "%s"'.format(interpreter=interpreter) code = ("import importlib, json; mod = importlib.import_module('%s'); " "print(mod.__file__); print(json.dumps(getattr(mod, '__all__', None)))") % module try: output_bytes = subprocess.check_output(cmd_template % code, shell=True) except subprocess.CalledProcessError as e: path = find_module_path_using_py2_sys_path(module, interpreter) if path is None: raise CantImport(module, str(e)) return path, None output = output_bytes.decode('ascii').strip().splitlines() module_path = output[0] if not module_path.endswith(('.py', '.pyc', '.pyo')): raise SystemExit('%s looks like a C module; they are not supported for Python 2' % module) if module_path.endswith(('.pyc', '.pyo')): module_path = module_path[:-1] module_all = json.loads(output[1]) return module_path, module_all def find_module_path_using_py2_sys_path(module: str, interpreter: str) -> Optional[str]: """Try to find the path of a .py file for a module using Python 2 sys.path. Return None if no match was found. """ out = subprocess.run( [interpreter, '-c', 'import sys; import json; print(json.dumps(sys.path))'], check=True, stdout=subprocess.PIPE ).stdout sys_path = json.loads(out.decode('utf-8')) return find_module_path_using_sys_path(module, sys_path) def find_module_path_using_sys_path(module: str, sys_path: List[str]) -> Optional[str]: relative_candidates = ( module.replace('.', '/') + '.py', os.path.join(module.replace('.', '/'), '__init__.py') ) for base in sys_path: for relative_path in relative_candidates: path = os.path.join(base, relative_path) if os.path.isfile(path): return path return None def find_module_path_and_all_py3(inspect: ModuleInspect, module: str, verbose: bool) -> Optional[Tuple[Optional[str], Optional[List[str]]]]: """Find module and determine __all__ for a Python 3 module. Return None if the module is a C module. Return (module_path, __all__) if it is a Python module. Raise CantImport if import failed. """ if module in NOT_IMPORTABLE_MODULES: raise CantImport(module, '') # TODO: Support custom interpreters. if verbose: print('Trying to import %r for runtime introspection' % module) try: mod = inspect.get_package_properties(module) except InspectError as e: # Fall back to finding the module using sys.path. path = find_module_path_using_sys_path(module, sys.path) if path is None: raise CantImport(module, str(e)) return path, None if mod.is_c_module: return None return mod.file, mod.all @contextmanager def generate_guarded(mod: str, target: str, ignore_errors: bool = True, verbose: bool = False) -> Iterator[None]: """Ignore or report errors during stub generation. Optionally report success. """ if verbose: print('Processing %s' % mod) try: yield except Exception as e: if not ignore_errors: raise e else: # --ignore-errors was passed print("Stub generation failed for", mod, file=sys.stderr) else: if verbose: print('Created %s' % target) PY2_MODULES = {'cStringIO', 'urlparse', 'collections.UserDict'} def report_missing(mod: str, message: Optional[str] = '', traceback: str = '') -> None: if message: message = ' with error: ' + message print('{}: Failed to import, skipping{}'.format(mod, message)) m = re.search(r"ModuleNotFoundError: No module named '([^']*)'", traceback) if m: missing_module = m.group(1) if missing_module in PY2_MODULES: print('note: Try --py2 for Python 2 mode') def fail_missing(mod: str) -> None: raise SystemExit("Can't find module '{}' (consider using --search-path)".format(mod)) @overload def remove_misplaced_type_comments(source: bytes) -> bytes: ... @overload def remove_misplaced_type_comments(source: str) -> str: ... def remove_misplaced_type_comments(source: Union[str, bytes]) -> Union[str, bytes]: """Remove comments from source that could be understood as misplaced type comments. Normal comments may look like misplaced type comments, and since they cause blocking parse errors, we want to avoid them. """ if isinstance(source, bytes): # This gives us a 1-1 character code mapping, so it's roundtrippable. text = source.decode('latin1') else: text = source # Remove something that looks like a variable type comment but that's by itself # on a line, as it will often generate a parse error (unless it's # type: ignore). text = re.sub(r'^[ \t]*# +type: +["\'a-zA-Z_].*$', '', text, flags=re.MULTILINE) # Remove something that looks like a function type comment after docstring, # which will result in a parse error. text = re.sub(r'""" *\n[ \t\n]*# +type: +\(.*$', '"""\n', text, flags=re.MULTILINE) text = re.sub(r"''' *\n[ \t\n]*# +type: +\(.*$", "'''\n", text, flags=re.MULTILINE) # Remove something that looks like a badly formed function type comment. text = re.sub(r'^[ \t]*# +type: +\([^()]+(\)[ \t]*)?$', '', text, flags=re.MULTILINE) if isinstance(source, bytes): return text.encode('latin1') else: return text def common_dir_prefix(paths: List[str]) -> str: if not paths: return '.' cur = os.path.dirname(paths[0]) for path in paths[1:]: while True: path = os.path.dirname(path) if (cur + '/').startswith(path + '/'): cur = path break return cur or '.' mypy-0.761/mypy/subtypes.py0000644€tŠÔÚ€2›s®0000017363313576752246022163 0ustar jukkaDROPBOX\Domain Users00000000000000from contextlib import contextmanager from typing import Any, List, Optional, Callable, Tuple, Iterator, Set, Union, cast, TypeVar from typing_extensions import Final from mypy.types import ( Type, AnyType, UnboundType, TypeVisitor, FormalArgument, NoneType, Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType, DeletedType, UninhabitedType, TypeType, is_named_instance, FunctionLike, TypeOfAny, LiteralType, get_proper_type, TypeAliasType ) import mypy.applytype import mypy.constraints import mypy.typeops import mypy.sametypes from mypy.erasetype import erase_type # Circular import; done in the function instead. # import mypy.solve from mypy.nodes import ( FuncBase, Var, Decorator, OverloadedFuncDef, TypeInfo, CONTRAVARIANT, COVARIANT, ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2 ) from mypy.maptype import map_instance_to_supertype from mypy.expandtype import expand_type_by_instance from mypy.typestate import TypeState, SubtypeKind from mypy import state # Flags for detected protocol members IS_SETTABLE = 1 # type: Final IS_CLASSVAR = 2 # type: Final IS_CLASS_OR_STATIC = 3 # type: Final TypeParameterChecker = Callable[[Type, Type, int], bool] def check_type_parameter(lefta: Type, righta: Type, variance: int) -> bool: if variance == COVARIANT: return is_subtype(lefta, righta) elif variance == CONTRAVARIANT: return is_subtype(righta, lefta) else: return is_equivalent(lefta, righta) def ignore_type_parameter(s: Type, t: Type, v: int) -> bool: return True def is_subtype(left: Type, right: Type, *, ignore_type_params: bool = False, ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False, ignore_promotions: bool = False) -> bool: """Is 'left' subtype of 'right'? Also consider Any to be a subtype of any type, and vice versa. This recursively applies to components of composite types (List[int] is subtype of List[Any], for example). type_parameter_checker is used to check the type parameters (for example, A with B in is_subtype(C[A], C[B]). The default checks for subtype relation between the type arguments (e.g., A and B), taking the variance of the type var into account. """ if TypeState.is_assumed_subtype(left, right): return True if (isinstance(left, TypeAliasType) and isinstance(right, TypeAliasType) and left.is_recursive and right.is_recursive): # This case requires special care because it may cause infinite recursion. # Our view on recursive types is known under a fancy name of equirecursive mu-types. # Roughly this means that a recursive type is defined as an alias where right hand side # can refer to the type as a whole, for example: # A = Union[int, Tuple[A, ...]] # and an alias unrolled once represents the *same type*, in our case all these represent # the same type: # A # Union[int, Tuple[A, ...]] # Union[int, Tuple[Union[int, Tuple[A, ...]], ...]] # The algorithm for subtyping is then essentially under the assumption that left <: right, # check that get_proper_type(left) <: get_proper_type(right). On the example above, # If we start with: # A = Union[int, Tuple[A, ...]] # B = Union[int, Tuple[B, ...]] # When checking if A <: B we push pair (A, B) onto 'assuming' stack, then when after few # steps we come back to initial call is_subtype(A, B) and immediately return True. with pop_on_exit(TypeState._assuming, left, right): return _is_subtype(left, right, ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names, ignore_declared_variance=ignore_declared_variance, ignore_promotions=ignore_promotions) return _is_subtype(left, right, ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names, ignore_declared_variance=ignore_declared_variance, ignore_promotions=ignore_promotions) def _is_subtype(left: Type, right: Type, *, ignore_type_params: bool = False, ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False, ignore_promotions: bool = False) -> bool: orig_right = right orig_left = left left = get_proper_type(left) right = get_proper_type(right) if (isinstance(right, AnyType) or isinstance(right, UnboundType) or isinstance(right, ErasedType)): return True elif isinstance(right, UnionType) and not isinstance(left, UnionType): # Normally, when 'left' is not itself a union, the only way # 'left' can be a subtype of the union 'right' is if it is a # subtype of one of the items making up the union. is_subtype_of_item = any(is_subtype(orig_left, item, ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names, ignore_declared_variance=ignore_declared_variance, ignore_promotions=ignore_promotions) for item in right.items) # However, if 'left' is a type variable T, T might also have # an upper bound which is itself a union. This case will be # handled below by the SubtypeVisitor. We have to check both # possibilities, to handle both cases like T <: Union[T, U] # and cases like T <: B where B is the upper bound of T and is # a union. (See #2314.) if not isinstance(left, TypeVarType): return is_subtype_of_item elif is_subtype_of_item: return True # otherwise, fall through return left.accept(SubtypeVisitor(orig_right, ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names, ignore_declared_variance=ignore_declared_variance, ignore_promotions=ignore_promotions)) def is_subtype_ignoring_tvars(left: Type, right: Type) -> bool: return is_subtype(left, right, ignore_type_params=True) def is_equivalent(a: Type, b: Type, *, ignore_type_params: bool = False, ignore_pos_arg_names: bool = False ) -> bool: return ( is_subtype(a, b, ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names) and is_subtype(b, a, ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names)) class SubtypeVisitor(TypeVisitor[bool]): def __init__(self, right: Type, *, ignore_type_params: bool, ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False, ignore_promotions: bool = False) -> None: self.right = get_proper_type(right) self.orig_right = right self.ignore_type_params = ignore_type_params self.ignore_pos_arg_names = ignore_pos_arg_names self.ignore_declared_variance = ignore_declared_variance self.ignore_promotions = ignore_promotions self.check_type_parameter = (ignore_type_parameter if ignore_type_params else check_type_parameter) self._subtype_kind = SubtypeVisitor.build_subtype_kind( ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names, ignore_declared_variance=ignore_declared_variance, ignore_promotions=ignore_promotions) @staticmethod def build_subtype_kind(*, ignore_type_params: bool = False, ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False, ignore_promotions: bool = False) -> SubtypeKind: return (False, # is proper subtype? ignore_type_params, ignore_pos_arg_names, ignore_declared_variance, ignore_promotions) def _is_subtype(self, left: Type, right: Type) -> bool: return is_subtype(left, right, ignore_type_params=self.ignore_type_params, ignore_pos_arg_names=self.ignore_pos_arg_names, ignore_declared_variance=self.ignore_declared_variance, ignore_promotions=self.ignore_promotions) # visit_x(left) means: is left (which is an instance of X) a subtype of # right? def visit_unbound_type(self, left: UnboundType) -> bool: return True def visit_any(self, left: AnyType) -> bool: return True def visit_none_type(self, left: NoneType) -> bool: if state.strict_optional: return (isinstance(self.right, NoneType) or is_named_instance(self.right, 'builtins.object') or isinstance(self.right, Instance) and self.right.type.is_protocol and not self.right.type.protocol_members) else: return True def visit_uninhabited_type(self, left: UninhabitedType) -> bool: return True def visit_erased_type(self, left: ErasedType) -> bool: return True def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: if left.type.fallback_to_any: if isinstance(self.right, NoneType): # NOTE: `None` is a *non-subclassable* singleton, therefore no class # can by a subtype of it, even with an `Any` fallback. # This special case is needed to treat descriptors in classes with # dynamic base classes correctly, see #5456. return False return True right = self.right if isinstance(right, TupleType) and mypy.typeops.tuple_fallback(right).type.is_enum: return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) if isinstance(right, Instance): if TypeState.is_cached_subtype_check(self._subtype_kind, left, right): return True if not self.ignore_promotions: for base in left.type.mro: if base._promote and self._is_subtype(base._promote, self.right): TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) return True rname = right.type.fullname # Always try a nominal check if possible, # there might be errors that a user wants to silence *once*. if ((left.type.has_base(rname) or rname == 'builtins.object') and not self.ignore_declared_variance): # Map left type to corresponding right instances. t = map_instance_to_supertype(left, right.type) nominal = all(self.check_type_parameter(lefta, righta, tvar.variance) for lefta, righta, tvar in zip(t.args, right.args, right.type.defn.type_vars)) if nominal: TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) return nominal if right.type.is_protocol and is_protocol_implementation(left, right): return True return False if isinstance(right, TypeType): item = right.item if isinstance(item, TupleType): item = mypy.typeops.tuple_fallback(item) if is_named_instance(left, 'builtins.type'): return self._is_subtype(TypeType(AnyType(TypeOfAny.special_form)), right) if left.type.is_metaclass(): if isinstance(item, AnyType): return True if isinstance(item, Instance): return is_named_instance(item, 'builtins.object') if isinstance(right, CallableType): # Special case: Instance can be a subtype of Callable. call = find_member('__call__', left, left, is_operator=True) if call: return self._is_subtype(call, right) return False else: return False def visit_type_var(self, left: TypeVarType) -> bool: right = self.right if isinstance(right, TypeVarType) and left.id == right.id: return True if left.values and self._is_subtype( mypy.typeops.make_simplified_union(left.values), right): return True return self._is_subtype(left.upper_bound, self.right) def visit_callable_type(self, left: CallableType) -> bool: right = self.right if isinstance(right, CallableType): return is_callable_compatible( left, right, is_compat=self._is_subtype, ignore_pos_arg_names=self.ignore_pos_arg_names) elif isinstance(right, Overloaded): return all(self._is_subtype(left, item) for item in right.items()) elif isinstance(right, Instance): if right.type.is_protocol and right.type.protocol_members == ['__call__']: # OK, a callable can implement a protocol with a single `__call__` member. # TODO: we should probably explicitly exclude self-types in this case. call = find_member('__call__', right, left, is_operator=True) assert call is not None if self._is_subtype(left, call): return True return self._is_subtype(left.fallback, right) elif isinstance(right, TypeType): # This is unsound, we don't check the __init__ signature. return left.is_type_obj() and self._is_subtype(left.ret_type, right.item) else: return False def visit_tuple_type(self, left: TupleType) -> bool: right = self.right if isinstance(right, Instance): if is_named_instance(right, 'typing.Sized'): return True elif (is_named_instance(right, 'builtins.tuple') or is_named_instance(right, 'typing.Iterable') or is_named_instance(right, 'typing.Container') or is_named_instance(right, 'typing.Sequence') or is_named_instance(right, 'typing.Reversible')): if right.args: iter_type = right.args[0] else: iter_type = AnyType(TypeOfAny.special_form) return all(self._is_subtype(li, iter_type) for li in left.items) elif self._is_subtype(mypy.typeops.tuple_fallback(left), right): return True return False elif isinstance(right, TupleType): if len(left.items) != len(right.items): return False for l, r in zip(left.items, right.items): if not self._is_subtype(l, r): return False rfallback = mypy.typeops.tuple_fallback(right) if is_named_instance(rfallback, 'builtins.tuple'): # No need to verify fallback. This is useful since the calculated fallback # may be inconsistent due to how we calculate joins between unions vs. # non-unions. For example, join(int, str) == object, whereas # join(Union[int, C], Union[str, C]) == Union[int, str, C]. return True lfallback = mypy.typeops.tuple_fallback(left) if not self._is_subtype(lfallback, rfallback): return False return True else: return False def visit_typeddict_type(self, left: TypedDictType) -> bool: right = self.right if isinstance(right, Instance): return self._is_subtype(left.fallback, right) elif isinstance(right, TypedDictType): if not left.names_are_wider_than(right): return False for name, l, r in left.zip(right): if not is_equivalent(l, r, ignore_type_params=self.ignore_type_params): return False # Non-required key is not compatible with a required key since # indexing may fail unexpectedly if a required key is missing. # Required key is not compatible with a non-required key since # the prior doesn't support 'del' but the latter should support # it. # # NOTE: 'del' support is currently not implemented (#3550). We # don't want to have to change subtyping after 'del' support # lands so here we are anticipating that change. if (name in left.required_keys) != (name in right.required_keys): return False # (NOTE: Fallbacks don't matter.) return True else: return False def visit_literal_type(self, left: LiteralType) -> bool: if isinstance(self.right, LiteralType): return left == self.right else: return self._is_subtype(left.fallback, self.right) def visit_overloaded(self, left: Overloaded) -> bool: right = self.right if isinstance(right, Instance): if right.type.is_protocol and right.type.protocol_members == ['__call__']: # same as for CallableType call = find_member('__call__', right, left, is_operator=True) assert call is not None if self._is_subtype(left, call): return True return self._is_subtype(left.fallback, right) elif isinstance(right, CallableType): for item in left.items(): if self._is_subtype(item, right): return True return False elif isinstance(right, Overloaded): # Ensure each overload in the right side (the supertype) is accounted for. previous_match_left_index = -1 matched_overloads = set() possible_invalid_overloads = set() for right_index, right_item in enumerate(right.items()): found_match = False for left_index, left_item in enumerate(left.items()): subtype_match = self._is_subtype(left_item, right_item)\ # Order matters: we need to make sure that the index of # this item is at least the index of the previous one. if subtype_match and previous_match_left_index <= left_index: if not found_match: # Update the index of the previous match. previous_match_left_index = left_index found_match = True matched_overloads.add(left_item) possible_invalid_overloads.discard(left_item) else: # If this one overlaps with the supertype in any way, but it wasn't # an exact match, then it's a potential error. if (is_callable_compatible(left_item, right_item, is_compat=self._is_subtype, ignore_return=True, ignore_pos_arg_names=self.ignore_pos_arg_names) or is_callable_compatible(right_item, left_item, is_compat=self._is_subtype, ignore_return=True, ignore_pos_arg_names=self.ignore_pos_arg_names)): # If this is an overload that's already been matched, there's no # problem. if left_item not in matched_overloads: possible_invalid_overloads.add(left_item) if not found_match: return False if possible_invalid_overloads: # There were potentially invalid overloads that were never matched to the # supertype. return False return True elif isinstance(right, UnboundType): return True elif isinstance(right, TypeType): # All the items must have the same type object status, so # it's sufficient to query only (any) one of them. # This is unsound, we don't check all the __init__ signatures. return left.is_type_obj() and self._is_subtype(left.items()[0], right) else: return False def visit_union_type(self, left: UnionType) -> bool: return all(self._is_subtype(item, self.orig_right) for item in left.items) def visit_partial_type(self, left: PartialType) -> bool: # This is indeterminate as we don't really know the complete type yet. raise RuntimeError def visit_type_type(self, left: TypeType) -> bool: right = self.right if isinstance(right, TypeType): return self._is_subtype(left.item, right.item) if isinstance(right, CallableType): # This is unsound, we don't check the __init__ signature. return self._is_subtype(left.item, right.ret_type) if isinstance(right, Instance): if right.type.fullname in ['builtins.object', 'builtins.type']: return True item = left.item if isinstance(item, TypeVarType): item = get_proper_type(item.upper_bound) if isinstance(item, Instance): metaclass = item.type.metaclass_type return metaclass is not None and self._is_subtype(metaclass, right) return False def visit_type_alias_type(self, left: TypeAliasType) -> bool: assert False, "This should be never called, got {}".format(left) T = TypeVar('T', Instance, TypeAliasType) @contextmanager def pop_on_exit(stack: List[Tuple[T, T]], left: T, right: T) -> Iterator[None]: stack.append((left, right)) yield stack.pop() def is_protocol_implementation(left: Instance, right: Instance, proper_subtype: bool = False) -> bool: """Check whether 'left' implements the protocol 'right'. If 'proper_subtype' is True, then check for a proper subtype. Treat recursive protocols by using the 'assuming' structural subtype matrix (in sparse representation, i.e. as a list of pairs (subtype, supertype)), see also comment in nodes.TypeInfo. When we enter a check for classes (A, P), defined as following:: class P(Protocol): def f(self) -> P: ... class A: def f(self) -> A: ... this results in A being a subtype of P without infinite recursion. On every false result, we pop the assumption, thus avoiding an infinite recursion as well. """ assert right.type.is_protocol # We need to record this check to generate protocol fine-grained dependencies. TypeState.record_protocol_subtype_check(left.type, right.type) assuming = right.type.assuming_proper if proper_subtype else right.type.assuming for (l, r) in reversed(assuming): if (mypy.sametypes.is_same_type(l, left) and mypy.sametypes.is_same_type(r, right)): return True with pop_on_exit(assuming, left, right): for member in right.type.protocol_members: # nominal subtyping currently ignores '__init__' and '__new__' signatures if member in ('__init__', '__new__'): continue ignore_names = member != '__call__' # __call__ can be passed kwargs # The third argument below indicates to what self type is bound. # We always bind self to the subtype. (Similarly to nominal types). supertype = get_proper_type(find_member(member, right, left)) assert supertype is not None subtype = get_proper_type(find_member(member, left, left)) # Useful for debugging: # print(member, 'of', left, 'has type', subtype) # print(member, 'of', right, 'has type', supertype) if not subtype: return False if not proper_subtype: # Nominal check currently ignores arg names # NOTE: If we ever change this, be sure to also change the call to # SubtypeVisitor.build_subtype_kind(...) down below. is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=ignore_names) else: is_compat = is_proper_subtype(subtype, supertype) if not is_compat: return False if isinstance(subtype, NoneType) and isinstance(supertype, CallableType): # We want __hash__ = None idiom to work even without --strict-optional return False subflags = get_member_flags(member, left.type) superflags = get_member_flags(member, right.type) if IS_SETTABLE in superflags: # Check opposite direction for settable attributes. if not is_subtype(supertype, subtype): return False if (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): return False if IS_SETTABLE in superflags and IS_SETTABLE not in subflags: return False # This rule is copied from nominal check in checker.py if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags: return False if not proper_subtype: # Nominal check currently ignores arg names, but __call__ is special for protocols ignore_names = right.type.protocol_members != ['__call__'] subtype_kind = SubtypeVisitor.build_subtype_kind(ignore_pos_arg_names=ignore_names) else: subtype_kind = ProperSubtypeVisitor.build_subtype_kind() TypeState.record_subtype_cache_entry(subtype_kind, left, right) return True def find_member(name: str, itype: Instance, subtype: Type, is_operator: bool = False) -> Optional[Type]: """Find the type of member by 'name' in 'itype's TypeInfo. Fin the member type after applying type arguments from 'itype', and binding 'self' to 'subtype'. Return None if member was not found. """ # TODO: this code shares some logic with checkmember.analyze_member_access, # consider refactoring. info = itype.type method = info.get_method(name) if method: if method.is_property: assert isinstance(method, OverloadedFuncDef) dec = method.items[0] assert isinstance(dec, Decorator) return find_node_type(dec.var, itype, subtype) return find_node_type(method, itype, subtype) else: # don't have such method, maybe variable or decorator? node = info.get(name) if not node: v = None else: v = node.node if isinstance(v, Decorator): v = v.var if isinstance(v, Var): return find_node_type(v, itype, subtype) if (not v and name not in ['__getattr__', '__setattr__', '__getattribute__'] and not is_operator): for method_name in ('__getattribute__', '__getattr__'): # Normally, mypy assumes that instances that define __getattr__ have all # attributes with the corresponding return type. If this will produce # many false negatives, then this could be prohibited for # structural subtyping. method = info.get_method(method_name) if method and method.info.fullname != 'builtins.object': getattr_type = get_proper_type(find_node_type(method, itype, subtype)) if isinstance(getattr_type, CallableType): return getattr_type.ret_type if itype.type.fallback_to_any: return AnyType(TypeOfAny.special_form) return None def get_member_flags(name: str, info: TypeInfo) -> Set[int]: """Detect whether a member 'name' is settable, whether it is an instance or class variable, and whether it is class or static method. The flags are defined as following: * IS_SETTABLE: whether this attribute can be set, not set for methods and non-settable properties; * IS_CLASSVAR: set if the variable is annotated as 'x: ClassVar[t]'; * IS_CLASS_OR_STATIC: set for methods decorated with @classmethod or with @staticmethod. """ method = info.get_method(name) setattr_meth = info.get_method('__setattr__') if method: # this could be settable property if method.is_property: assert isinstance(method, OverloadedFuncDef) dec = method.items[0] assert isinstance(dec, Decorator) if dec.var.is_settable_property or setattr_meth: return {IS_SETTABLE} return set() node = info.get(name) if not node: if setattr_meth: return {IS_SETTABLE} return set() v = node.node if isinstance(v, Decorator): if v.var.is_staticmethod or v.var.is_classmethod: return {IS_CLASS_OR_STATIC} # just a variable if isinstance(v, Var) and not v.is_property: flags = {IS_SETTABLE} if v.is_classvar: flags.add(IS_CLASSVAR) return flags return set() def find_node_type(node: Union[Var, FuncBase], itype: Instance, subtype: Type) -> Type: """Find type of a variable or method 'node' (maybe also a decorated method). Apply type arguments from 'itype', and bind 'self' to 'subtype'. """ from mypy.typeops import bind_self if isinstance(node, FuncBase): typ = mypy.typeops.function_type( node, fallback=Instance(itype.type.mro[-1], [])) # type: Optional[Type] else: typ = node.type typ = get_proper_type(typ) if typ is None: return AnyType(TypeOfAny.from_error) # We don't need to bind 'self' for static methods, since there is no 'self'. if (isinstance(node, FuncBase) or (isinstance(typ, FunctionLike) and node.is_initialized_in_class and not node.is_staticmethod)): assert isinstance(typ, FunctionLike) signature = bind_self(typ, subtype) if node.is_property: assert isinstance(signature, CallableType) typ = signature.ret_type else: typ = signature itype = map_instance_to_supertype(itype, node.info) typ = expand_type_by_instance(typ, itype) return typ def non_method_protocol_members(tp: TypeInfo) -> List[str]: """Find all non-callable members of a protocol.""" assert tp.is_protocol result = [] # type: List[str] anytype = AnyType(TypeOfAny.special_form) instance = Instance(tp, [anytype] * len(tp.defn.type_vars)) for member in tp.protocol_members: typ = get_proper_type(find_member(member, instance, instance)) if not isinstance(typ, CallableType): result.append(member) return result def is_callable_compatible(left: CallableType, right: CallableType, *, is_compat: Callable[[Type, Type], bool], is_compat_return: Optional[Callable[[Type, Type], bool]] = None, ignore_return: bool = False, ignore_pos_arg_names: bool = False, check_args_covariantly: bool = False, allow_partial_overlap: bool = False) -> bool: """Is the left compatible with the right, using the provided compatibility check? is_compat: The check we want to run against the parameters. is_compat_return: The check we want to run against the return type. If None, use the 'is_compat' check. check_args_covariantly: If true, check if the left's args is compatible with the right's instead of the other way around (contravariantly). This function is mostly used to check if the left is a subtype of the right which is why the default is to check the args contravariantly. However, it's occasionally useful to check the args using some other check, so we leave the variance configurable. For example, when checking the validity of overloads, it's useful to see if the first overload alternative has more precise arguments then the second. We would want to check the arguments covariantly in that case. Note! The following two function calls are NOT equivalent: is_callable_compatible(f, g, is_compat=is_subtype, check_args_covariantly=False) is_callable_compatible(g, f, is_compat=is_subtype, check_args_covariantly=True) The two calls are similar in that they both check the function arguments in the same direction: they both run `is_subtype(argument_from_g, argument_from_f)`. However, the two calls differ in which direction they check things like keyword arguments. For example, suppose f and g are defined like so: def f(x: int, *y: int) -> int: ... def g(x: int) -> int: ... In this case, the first call will succeed and the second will fail: f is a valid stand-in for g but not vice-versa. allow_partial_overlap: By default this function returns True if and only if *all* calls to left are also calls to right (with respect to the provided 'is_compat' function). If this parameter is set to 'True', we return True if *there exists at least one* call to left that's also a call to right. In other words, we perform an existential check instead of a universal one; we require left to only overlap with right instead of being a subset. For example, suppose we set 'is_compat' to some subtype check and compare following: f(x: float, y: str = "...", *args: bool) -> str g(*args: int) -> str This function would normally return 'False': f is not a subtype of g. However, we would return True if this parameter is set to 'True': the two calls are compatible if the user runs "f_or_g(3)". In the context of that specific call, the two functions effectively have signatures of: f2(float) -> str g2(int) -> str Here, f2 is a valid subtype of g2 so we return True. Specifically, if this parameter is set this function will: - Ignore optional arguments on either the left or right that have no corresponding match. - No longer mandate optional arguments on either side are also optional on the other. - No longer mandate that if right has a *arg or **kwarg that left must also have the same. Note: when this argument is set to True, this function becomes "symmetric" -- the following calls are equivalent: is_callable_compatible(f, g, is_compat=some_check, check_args_covariantly=False, allow_partial_overlap=True) is_callable_compatible(g, f, is_compat=some_check, check_args_covariantly=True, allow_partial_overlap=True) If the 'some_check' function is also symmetric, the two calls would be equivalent whether or not we check the args covariantly. """ if is_compat_return is None: is_compat_return = is_compat # If either function is implicitly typed, ignore positional arg names too if left.implicit or right.implicit: ignore_pos_arg_names = True # Non-type cannot be a subtype of type. if right.is_type_obj() and not left.is_type_obj(): return False # A callable L is a subtype of a generic callable R if L is a # subtype of every type obtained from R by substituting types for # the variables of R. We can check this by simply leaving the # generic variables of R as type variables, effectively varying # over all possible values. # It's okay even if these variables share ids with generic # type variables of L, because generating and solving # constraints for the variables of L to make L a subtype of R # (below) treats type variables on the two sides as independent. if left.variables: # Apply generic type variables away in left via type inference. unified = unify_generic_callable(left, right, ignore_return=ignore_return) if unified is None: return False else: left = unified # If we allow partial overlaps, we don't need to leave R generic: # if we can find even just a single typevar assignment which # would make these callables compatible, we should return True. # So, we repeat the above checks in the opposite direction. This also # lets us preserve the 'symmetry' property of allow_partial_overlap. if allow_partial_overlap and right.variables: unified = unify_generic_callable(right, left, ignore_return=ignore_return) if unified is not None: right = unified # Check return types. if not ignore_return and not is_compat_return(left.ret_type, right.ret_type): return False if check_args_covariantly: is_compat = flip_compat_check(is_compat) if right.is_ellipsis_args: return True left_star = left.var_arg() left_star2 = left.kw_arg() right_star = right.var_arg() right_star2 = right.kw_arg() # Match up corresponding arguments and check them for compatibility. In # every pair (argL, argR) of corresponding arguments from L and R, argL must # be "more general" than argR if L is to be a subtype of R. # Arguments are corresponding if they either share a name, share a position, # or both. If L's corresponding argument is ambiguous, L is not a subtype of R. # If left has one corresponding argument by name and another by position, # consider them to be one "merged" argument (and not ambiguous) if they're # both optional, they're name-only and position-only respectively, and they # have the same type. This rule allows functions with (*args, **kwargs) to # properly stand in for the full domain of formal arguments that they're # used for in practice. # Every argument in R must have a corresponding argument in L, and every # required argument in L must have a corresponding argument in R. # Phase 1: Confirm every argument in R has a corresponding argument in L. # Phase 1a: If left and right can both accept an infinite number of args, # their types must be compatible. # # Furthermore, if we're checking for compatibility in all cases, # we confirm that if R accepts an infinite number of arguments, # L must accept the same. def _incompatible(left_arg: Optional[FormalArgument], right_arg: Optional[FormalArgument]) -> bool: if right_arg is None: return False if left_arg is None: return not allow_partial_overlap return not is_compat(right_arg.typ, left_arg.typ) if _incompatible(left_star, right_star) or _incompatible(left_star2, right_star2): return False # Phase 1b: Check non-star args: for every arg right can accept, left must # also accept. The only exception is if we are allowing partial # partial overlaps: in that case, we ignore optional args on the right. for right_arg in right.formal_arguments(): left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg) if left_arg is None: if allow_partial_overlap and not right_arg.required: continue return False if not are_args_compatible(left_arg, right_arg, ignore_pos_arg_names, allow_partial_overlap, is_compat): return False # Phase 1c: Check var args. Right has an infinite series of optional positional # arguments. Get all further positional args of left, and make sure # they're more general then the corresponding member in right. if right_star is not None: # Synthesize an anonymous formal argument for the right right_by_position = right.try_synthesizing_arg_from_vararg(None) assert right_by_position is not None i = right_star.pos assert i is not None while i < len(left.arg_kinds) and left.arg_kinds[i] in (ARG_POS, ARG_OPT): if allow_partial_overlap and left.arg_kinds[i] == ARG_OPT: break left_by_position = left.argument_by_position(i) assert left_by_position is not None if not are_args_compatible(left_by_position, right_by_position, ignore_pos_arg_names, allow_partial_overlap, is_compat): return False i += 1 # Phase 1d: Check kw args. Right has an infinite series of optional named # arguments. Get all further named args of left, and make sure # they're more general then the corresponding member in right. if right_star2 is not None: right_names = {name for name in right.arg_names if name is not None} left_only_names = set() for name, kind in zip(left.arg_names, left.arg_kinds): if name is None or kind in (ARG_STAR, ARG_STAR2) or name in right_names: continue left_only_names.add(name) # Synthesize an anonymous formal argument for the right right_by_name = right.try_synthesizing_arg_from_kwarg(None) assert right_by_name is not None for name in left_only_names: left_by_name = left.argument_by_name(name) assert left_by_name is not None if allow_partial_overlap and not left_by_name.required: continue if not are_args_compatible(left_by_name, right_by_name, ignore_pos_arg_names, allow_partial_overlap, is_compat): return False # Phase 2: Left must not impose additional restrictions. # (Every required argument in L must have a corresponding argument in R) # Note: we already checked the *arg and **kwarg arguments in phase 1a. for left_arg in left.formal_arguments(): right_by_name = (right.argument_by_name(left_arg.name) if left_arg.name is not None else None) right_by_pos = (right.argument_by_position(left_arg.pos) if left_arg.pos is not None else None) # If the left hand argument corresponds to two right-hand arguments, # neither of them can be required. if (right_by_name is not None and right_by_pos is not None and right_by_name != right_by_pos and (right_by_pos.required or right_by_name.required)): return False # All *required* left-hand arguments must have a corresponding # right-hand argument. Optional args do not matter. if left_arg.required and right_by_pos is None and right_by_name is None: return False return True def are_args_compatible( left: FormalArgument, right: FormalArgument, ignore_pos_arg_names: bool, allow_partial_overlap: bool, is_compat: Callable[[Type, Type], bool]) -> bool: def is_different(left_item: Optional[object], right_item: Optional[object]) -> bool: """Checks if the left and right items are different. If the right item is unspecified (e.g. if the right callable doesn't care about what name or position its arg has), we default to returning False. If we're allowing partial overlap, we also default to returning False if the left callable also doesn't care.""" if right_item is None: return False if allow_partial_overlap and left_item is None: return False return left_item != right_item # If right has a specific name it wants this argument to be, left must # have the same. if is_different(left.name, right.name): # But pay attention to whether we're ignoring positional arg names if not ignore_pos_arg_names or right.pos is None: return False # If right is at a specific position, left must have the same: if is_different(left.pos, right.pos): return False # If right's argument is optional, left's must also be # (unless we're relaxing the checks to allow potential # rather then definite compatibility). if not allow_partial_overlap and not right.required and left.required: return False # If we're allowing partial overlaps and neither arg is required, # the types don't actually need to be the same if allow_partial_overlap and not left.required and not right.required: return True # Left must have a more general type return is_compat(right.typ, left.typ) def flip_compat_check(is_compat: Callable[[Type, Type], bool]) -> Callable[[Type, Type], bool]: def new_is_compat(left: Type, right: Type) -> bool: return is_compat(right, left) return new_is_compat def unify_generic_callable(type: CallableType, target: CallableType, ignore_return: bool, return_constraint_direction: Optional[int] = None, ) -> Optional[CallableType]: """Try to unify a generic callable type with another callable type. Return unified CallableType if successful; otherwise, return None. """ import mypy.solve if return_constraint_direction is None: return_constraint_direction = mypy.constraints.SUBTYPE_OF constraints = [] # type: List[mypy.constraints.Constraint] for arg_type, target_arg_type in zip(type.arg_types, target.arg_types): c = mypy.constraints.infer_constraints( arg_type, target_arg_type, mypy.constraints.SUPERTYPE_OF) constraints.extend(c) if not ignore_return: c = mypy.constraints.infer_constraints( type.ret_type, target.ret_type, return_constraint_direction) constraints.extend(c) type_var_ids = [tvar.id for tvar in type.variables] inferred_vars = mypy.solve.solve_constraints(type_var_ids, constraints) if None in inferred_vars: return None non_none_inferred_vars = cast(List[Type], inferred_vars) had_errors = False def report(*args: Any) -> None: nonlocal had_errors had_errors = True applied = mypy.applytype.apply_generic_arguments(type, non_none_inferred_vars, report, context=target) if had_errors: return None return applied def restrict_subtype_away(t: Type, s: Type, *, ignore_promotions: bool = False) -> Type: """Return t minus s for runtime type assertions. If we can't determine a precise result, return a supertype of the ideal result (just t is a valid result). This is used for type inference of runtime type checks such as isinstance(). Currently this just removes elements of a union type. """ t = get_proper_type(t) s = get_proper_type(s) if isinstance(t, UnionType): new_items = [restrict_subtype_away(item, s, ignore_promotions=ignore_promotions) for item in t.relevant_items() if (isinstance(get_proper_type(item), AnyType) or not covers_at_runtime(item, s, ignore_promotions))] return UnionType.make_union(new_items) else: return t def covers_at_runtime(item: Type, supertype: Type, ignore_promotions: bool) -> bool: """Will isinstance(item, supertype) always return True at runtime?""" item = get_proper_type(item) # Since runtime type checks will ignore type arguments, erase the types. supertype = erase_type(supertype) if is_proper_subtype(erase_type(item), supertype, ignore_promotions=ignore_promotions, erase_instances=True): return True if isinstance(supertype, Instance) and supertype.type.is_protocol: # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. if is_proper_subtype(item, supertype, ignore_promotions=ignore_promotions): return True if isinstance(item, TypedDictType) and isinstance(supertype, Instance): # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). if supertype.type.fullname == 'builtins.dict': return True # TODO: Add more special cases. return False def is_proper_subtype(left: Type, right: Type, *, ignore_promotions: bool = False, erase_instances: bool = False, keep_erased_types: bool = False) -> bool: """Is left a proper subtype of right? For proper subtypes, there's no need to rely on compatibility due to Any types. Every usable type is a proper subtype of itself. If erase_instances is True, erase left instance *after* mapping it to supertype (this is useful for runtime isinstance() checks). If keep_erased_types is True, do not consider ErasedType a subtype of all types (used by type inference against unions). """ if TypeState.is_assumed_proper_subtype(left, right): return True if (isinstance(left, TypeAliasType) and isinstance(right, TypeAliasType) and left.is_recursive and right.is_recursive): # This case requires special care because it may cause infinite recursion. # See is_subtype() for more info. with pop_on_exit(TypeState._assuming_proper, left, right): return _is_proper_subtype(left, right, ignore_promotions=ignore_promotions, erase_instances=erase_instances, keep_erased_types=keep_erased_types) return _is_proper_subtype(left, right, ignore_promotions=ignore_promotions, erase_instances=erase_instances, keep_erased_types=keep_erased_types) def _is_proper_subtype(left: Type, right: Type, *, ignore_promotions: bool = False, erase_instances: bool = False, keep_erased_types: bool = False) -> bool: orig_left = left orig_right = right left = get_proper_type(left) right = get_proper_type(right) if isinstance(right, UnionType) and not isinstance(left, UnionType): return any([is_proper_subtype(orig_left, item, ignore_promotions=ignore_promotions, erase_instances=erase_instances, keep_erased_types=keep_erased_types) for item in right.items]) return left.accept(ProperSubtypeVisitor(orig_right, ignore_promotions=ignore_promotions, erase_instances=erase_instances, keep_erased_types=keep_erased_types)) class ProperSubtypeVisitor(TypeVisitor[bool]): def __init__(self, right: Type, *, ignore_promotions: bool = False, erase_instances: bool = False, keep_erased_types: bool = False) -> None: self.right = get_proper_type(right) self.orig_right = right self.ignore_promotions = ignore_promotions self.erase_instances = erase_instances self.keep_erased_types = keep_erased_types self._subtype_kind = ProperSubtypeVisitor.build_subtype_kind( ignore_promotions=ignore_promotions, erase_instances=erase_instances, keep_erased_types=keep_erased_types ) @staticmethod def build_subtype_kind(*, ignore_promotions: bool = False, erase_instances: bool = False, keep_erased_types: bool = False) -> SubtypeKind: return True, ignore_promotions, erase_instances, keep_erased_types def _is_proper_subtype(self, left: Type, right: Type) -> bool: return is_proper_subtype(left, right, ignore_promotions=self.ignore_promotions, erase_instances=self.erase_instances, keep_erased_types=self.keep_erased_types) def visit_unbound_type(self, left: UnboundType) -> bool: # This can be called if there is a bad type annotation. The result probably # doesn't matter much but by returning True we simplify these bad types away # from unions, which could filter out some bogus messages. return True def visit_any(self, left: AnyType) -> bool: return isinstance(self.right, AnyType) def visit_none_type(self, left: NoneType) -> bool: if state.strict_optional: return (isinstance(self.right, NoneType) or is_named_instance(self.right, 'builtins.object')) return True def visit_uninhabited_type(self, left: UninhabitedType) -> bool: return True def visit_erased_type(self, left: ErasedType) -> bool: # This may be encountered during type inference. The result probably doesn't # matter much. # TODO: it actually does matter, figure out more principled logic about this. if self.keep_erased_types: return False return True def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: right = self.right if isinstance(right, Instance): if TypeState.is_cached_subtype_check(self._subtype_kind, left, right): return True if not self.ignore_promotions: for base in left.type.mro: if base._promote and self._is_proper_subtype(base._promote, right): TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) return True if left.type.has_base(right.type.fullname): def check_argument(leftarg: Type, rightarg: Type, variance: int) -> bool: if variance == COVARIANT: return self._is_proper_subtype(leftarg, rightarg) elif variance == CONTRAVARIANT: return self._is_proper_subtype(rightarg, leftarg) else: return mypy.sametypes.is_same_type(leftarg, rightarg) # Map left type to corresponding right instances. left = map_instance_to_supertype(left, right.type) if self.erase_instances: erased = erase_type(left) assert isinstance(erased, Instance) left = erased nominal = all(check_argument(ta, ra, tvar.variance) for ta, ra, tvar in zip(left.args, right.args, right.type.defn.type_vars)) if nominal: TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) return nominal if (right.type.is_protocol and is_protocol_implementation(left, right, proper_subtype=True)): return True return False if isinstance(right, CallableType): call = find_member('__call__', left, left, is_operator=True) if call: return self._is_proper_subtype(call, right) return False return False def visit_type_var(self, left: TypeVarType) -> bool: if isinstance(self.right, TypeVarType) and left.id == self.right.id: return True if left.values and self._is_proper_subtype( mypy.typeops.make_simplified_union(left.values), self.right): return True return self._is_proper_subtype(left.upper_bound, self.right) def visit_callable_type(self, left: CallableType) -> bool: right = self.right if isinstance(right, CallableType): return is_callable_compatible(left, right, is_compat=self._is_proper_subtype) elif isinstance(right, Overloaded): return all(self._is_proper_subtype(left, item) for item in right.items()) elif isinstance(right, Instance): return self._is_proper_subtype(left.fallback, right) elif isinstance(right, TypeType): # This is unsound, we don't check the __init__ signature. return left.is_type_obj() and self._is_proper_subtype(left.ret_type, right.item) return False def visit_tuple_type(self, left: TupleType) -> bool: right = self.right if isinstance(right, Instance): if (is_named_instance(right, 'builtins.tuple') or is_named_instance(right, 'typing.Iterable') or is_named_instance(right, 'typing.Container') or is_named_instance(right, 'typing.Sequence') or is_named_instance(right, 'typing.Reversible')): if not right.args: return False iter_type = get_proper_type(right.args[0]) if is_named_instance(right, 'builtins.tuple') and isinstance(iter_type, AnyType): # TODO: We shouldn't need this special case. This is currently needed # for isinstance(x, tuple), though it's unclear why. return True return all(self._is_proper_subtype(li, iter_type) for li in left.items) return self._is_proper_subtype(mypy.typeops.tuple_fallback(left), right) elif isinstance(right, TupleType): if len(left.items) != len(right.items): return False for l, r in zip(left.items, right.items): if not self._is_proper_subtype(l, r): return False return self._is_proper_subtype(mypy.typeops.tuple_fallback(left), mypy.typeops.tuple_fallback(right)) return False def visit_typeddict_type(self, left: TypedDictType) -> bool: right = self.right if isinstance(right, TypedDictType): for name, typ in left.items.items(): if (name in right.items and not mypy.sametypes.is_same_type(typ, right.items[name])): return False for name, typ in right.items.items(): if name not in left.items: return False return True return self._is_proper_subtype(left.fallback, right) def visit_literal_type(self, left: LiteralType) -> bool: if isinstance(self.right, LiteralType): return left == self.right else: return self._is_proper_subtype(left.fallback, self.right) def visit_overloaded(self, left: Overloaded) -> bool: # TODO: What's the right thing to do here? return False def visit_union_type(self, left: UnionType) -> bool: return all([self._is_proper_subtype(item, self.orig_right) for item in left.items]) def visit_partial_type(self, left: PartialType) -> bool: # TODO: What's the right thing to do here? return False def visit_type_type(self, left: TypeType) -> bool: right = self.right if isinstance(right, TypeType): # This is unsound, we don't check the __init__ signature. return self._is_proper_subtype(left.item, right.item) if isinstance(right, CallableType): # This is also unsound because of __init__. return right.is_type_obj() and self._is_proper_subtype(left.item, right.ret_type) if isinstance(right, Instance): if right.type.fullname == 'builtins.type': # TODO: Strictly speaking, the type builtins.type is considered equivalent to # Type[Any]. However, this would break the is_proper_subtype check in # conditional_type_map for cases like isinstance(x, type) when the type # of x is Type[int]. It's unclear what's the right way to address this. return True if right.type.fullname == 'builtins.object': return True item = left.item if isinstance(item, TypeVarType): item = get_proper_type(item.upper_bound) if isinstance(item, Instance): metaclass = item.type.metaclass_type return metaclass is not None and self._is_proper_subtype(metaclass, right) return False def visit_type_alias_type(self, left: TypeAliasType) -> bool: assert False, "This should be never called, got {}".format(left) def is_more_precise(left: Type, right: Type, *, ignore_promotions: bool = False) -> bool: """Check if left is a more precise type than right. A left is a proper subtype of right, left is also more precise than right. Also, if right is Any, left is more precise than right, for any left. """ # TODO Should List[int] be more precise than List[Any]? right = get_proper_type(right) if isinstance(right, AnyType): return True return is_proper_subtype(left, right, ignore_promotions=ignore_promotions) mypy-0.761/mypy/suggestions.py0000644€tŠÔÚ€2›s®0000011505213576752246022646 0ustar jukkaDROPBOX\Domain Users00000000000000"""Mechanisms for inferring function types based on callsites. Currently works by collecting all argument types at callsites, synthesizing a list of possible function types from that, trying them all, and picking the one with the fewest errors that we think is the "best". Can return JSON that pyannotate can use to apply the annotations to code. There are a bunch of TODOs here: * Maybe want a way to surface the choices not selected?? * We can generate an exponential number of type suggestions, and probably want a way to not always need to check them all. * Our heuristics for what types to try are primitive and not yet supported by real practice. * More! Other things: * This is super brute force. Could we integrate with the typechecker more to understand more about what is going on? * Like something with tracking constraints/unification variables? * No understanding of type variables at *all* """ from typing import ( List, Optional, Tuple, Dict, Callable, Union, NamedTuple, TypeVar, Iterator, cast, ) from typing_extensions import TypedDict from mypy.state import strict_optional_set from mypy.types import ( Type, AnyType, TypeOfAny, CallableType, UnionType, NoneType, Instance, TupleType, TypeVarType, FunctionLike, UninhabitedType, TypeStrVisitor, TypeTranslator, is_optional, remove_optional, ProperType, get_proper_type, TypedDictType, TypeAliasType ) from mypy.build import State, Graph from mypy.nodes import ( ARG_STAR, ARG_NAMED, ARG_STAR2, ARG_NAMED_OPT, FuncDef, MypyFile, SymbolTable, Decorator, RefExpr, SymbolNode, TypeInfo, Expression, ReturnStmt, CallExpr, reverse_builtin_aliases, ) from mypy.server.update import FineGrainedBuildManager from mypy.util import split_target from mypy.find_sources import SourceFinder, InvalidSourceList from mypy.modulefinder import PYTHON_EXTENSIONS from mypy.plugin import Plugin, FunctionContext, MethodContext from mypy.traverser import TraverserVisitor from mypy.checkexpr import has_any_type, map_actuals_to_formals from mypy.join import join_type_list from mypy.meet import meet_type_list from mypy.sametypes import is_same_type from mypy.typeops import make_simplified_union from contextlib import contextmanager import itertools import json import os PyAnnotateSignature = TypedDict('PyAnnotateSignature', {'return_type': str, 'arg_types': List[str]}) Callsite = NamedTuple( 'Callsite', [('path', str), ('line', int), ('arg_kinds', List[List[int]]), ('callee_arg_names', List[Optional[str]]), ('arg_names', List[List[Optional[str]]]), ('arg_types', List[List[Type]])]) class SuggestionPlugin(Plugin): """Plugin that records all calls to a given target.""" def __init__(self, target: str) -> None: if target.endswith(('.__new__', '.__init__')): target = target.rsplit('.', 1)[0] self.target = target # List of call sites found by dmypy suggest: # (path, line, , , ) self.mystery_hits = [] # type: List[Callsite] def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: if fullname == self.target: return self.log else: return None def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: if fullname == self.target: return self.log else: return None def log(self, ctx: Union[FunctionContext, MethodContext]) -> Type: self.mystery_hits.append(Callsite( ctx.api.path, ctx.context.line, ctx.arg_kinds, ctx.callee_arg_names, ctx.arg_names, ctx.arg_types)) return ctx.default_return_type # NOTE: We could make this a bunch faster by implementing a StatementVisitor that skips # traversing into expressions class ReturnFinder(TraverserVisitor): """Visitor for finding all types returned from a function.""" def __init__(self, typemap: Dict[Expression, Type]) -> None: self.typemap = typemap self.return_types = [] # type: List[Type] def visit_return_stmt(self, o: ReturnStmt) -> None: if o.expr is not None and o.expr in self.typemap: self.return_types.append(self.typemap[o.expr]) def visit_func_def(self, o: FuncDef) -> None: # Skip nested functions pass def get_return_types(typemap: Dict[Expression, Type], func: FuncDef) -> List[Type]: """Find all the types returned by return statements in func.""" finder = ReturnFinder(typemap) func.body.accept(finder) return finder.return_types class ArgUseFinder(TraverserVisitor): """Visitor for finding all the types of arguments that each arg is passed to. This is extremely simple minded but might be effective anyways. """ def __init__(self, func: FuncDef, typemap: Dict[Expression, Type]) -> None: self.typemap = typemap self.arg_types = { arg.variable: [] for arg in func.arguments } # type: Dict[SymbolNode, List[Type]] def visit_call_expr(self, o: CallExpr) -> None: if not any(isinstance(e, RefExpr) and e.node in self.arg_types for e in o.args): return typ = get_proper_type(self.typemap.get(o.callee)) if not isinstance(typ, CallableType): return formal_to_actual = map_actuals_to_formals( o.arg_kinds, o.arg_names, typ.arg_kinds, typ.arg_names, lambda n: AnyType(TypeOfAny.special_form)) for i, args in enumerate(formal_to_actual): for arg_idx in args: arg = o.args[arg_idx] if isinstance(arg, RefExpr) and arg.node in self.arg_types: self.arg_types[arg.node].append(typ.arg_types[i]) def get_arg_uses(typemap: Dict[Expression, Type], func: FuncDef) -> List[List[Type]]: """Find all the types of arguments that each arg is passed to. For example, given def foo(x: int) -> None: ... def bar(x: str) -> None: ... def test(x, y): foo(x) bar(y) this will return [[int], [str]]. """ finder = ArgUseFinder(func, typemap) func.body.accept(finder) return [finder.arg_types[arg.variable] for arg in func.arguments] class SuggestionFailure(Exception): pass def is_explicit_any(typ: AnyType) -> bool: # Originally I wanted to count as explicit anything derived from an explicit any, but that # seemed too strict in some testing. # return (typ.type_of_any == TypeOfAny.explicit # or (typ.source_any is not None and typ.source_any.type_of_any == TypeOfAny.explicit)) # Important question: what should we do with source_any stuff? Does that count? # And actually should explicit anys count at all?? Maybe not! return typ.type_of_any == TypeOfAny.explicit def is_implicit_any(typ: Type) -> bool: typ = get_proper_type(typ) return isinstance(typ, AnyType) and not is_explicit_any(typ) class SuggestionEngine: """Engine for finding call sites and suggesting signatures.""" def __init__(self, fgmanager: FineGrainedBuildManager, *, json: bool, no_errors: bool = False, no_any: bool = False, try_text: bool = False, flex_any: Optional[float] = None, use_fixme: Optional[str] = None, max_guesses: Optional[int] = None ) -> None: self.fgmanager = fgmanager self.manager = fgmanager.manager self.plugin = self.manager.plugin self.graph = fgmanager.graph self.finder = SourceFinder(self.manager.fscache) self.give_json = json self.no_errors = no_errors self.try_text = try_text self.flex_any = flex_any if no_any: self.flex_any = 1.0 self.max_guesses = max_guesses or 64 self.use_fixme = use_fixme def suggest(self, function: str) -> str: """Suggest an inferred type for function.""" mod, func_name, node = self.find_node(function) with self.restore_after(mod): with self.with_export_types(): suggestion = self.get_suggestion(mod, node) if self.give_json: return self.json_suggestion(mod, func_name, node, suggestion) else: return self.format_signature(suggestion) def suggest_callsites(self, function: str) -> str: """Find a list of call sites of function.""" mod, _, node = self.find_node(function) with self.restore_after(mod): callsites, _ = self.get_callsites(node) return '\n'.join(dedup( ["%s:%s: %s" % (path, line, self.format_args(arg_kinds, arg_names, arg_types)) for path, line, arg_kinds, _, arg_names, arg_types in callsites] )) @contextmanager def restore_after(self, module: str) -> Iterator[None]: """Context manager that reloads a module after executing the body. This should undo any damage done to the module state while mucking around. """ try: yield finally: self.reload(self.graph[module]) @contextmanager def with_export_types(self) -> Iterator[None]: """Context manager that enables the export_types flag in the body. This causes type information to be exported into the manager's all_types variable. """ old = self.manager.options.export_types self.manager.options.export_types = True try: yield finally: self.manager.options.export_types = old def get_trivial_type(self, fdef: FuncDef) -> CallableType: """Generate a trivial callable type from a func def, with all Anys""" # The Anys are marked as being from the suggestion engine # since they need some special treatment (specifically, # constraint generation ignores them.) return CallableType( [AnyType(TypeOfAny.suggestion_engine) for a in fdef.arg_kinds], fdef.arg_kinds, fdef.arg_names, AnyType(TypeOfAny.suggestion_engine), self.builtin_type('builtins.function')) def get_starting_type(self, fdef: FuncDef) -> CallableType: if isinstance(fdef.type, CallableType): return make_suggestion_anys(fdef.type) else: return self.get_trivial_type(fdef) def get_args(self, is_method: bool, base: CallableType, defaults: List[Optional[Type]], callsites: List[Callsite], uses: List[List[Type]]) -> List[List[Type]]: """Produce a list of type suggestions for each argument type.""" types = [] # type: List[List[Type]] for i in range(len(base.arg_kinds)): # Make self args Any but this will get overriden somewhere in the checker if i == 0 and is_method: types.append([AnyType(TypeOfAny.suggestion_engine)]) continue all_arg_types = [] for call in callsites: for typ in call.arg_types[i - is_method]: # Collect all the types except for implicit anys if not is_implicit_any(typ): all_arg_types.append(typ) all_use_types = [] for typ in uses[i]: # Collect all the types except for implicit anys if not is_implicit_any(typ): all_use_types.append(typ) # Add in any default argument types default = defaults[i] if default: all_arg_types.append(default) if all_use_types: all_use_types.append(default) arg_types = [] if (all_arg_types and all(isinstance(get_proper_type(tp), NoneType) for tp in all_arg_types)): arg_types.append( UnionType.make_union([all_arg_types[0], AnyType(TypeOfAny.explicit)])) elif all_arg_types: arg_types.extend(generate_type_combinations(all_arg_types)) else: arg_types.append(AnyType(TypeOfAny.explicit)) if all_use_types: # This is a meet because the type needs to be compatible with all the uses arg_types.append(meet_type_list(all_use_types)) types.append(arg_types) return types def get_default_arg_types(self, state: State, fdef: FuncDef) -> List[Optional[Type]]: return [self.manager.all_types[arg.initializer] if arg.initializer else None for arg in fdef.arguments] def add_adjustments(self, typs: List[Type]) -> List[Type]: if not self.try_text or self.manager.options.python_version[0] != 2: return typs translator = StrToText(self.builtin_type) return dedup(typs + [tp.accept(translator) for tp in typs]) def get_guesses(self, is_method: bool, base: CallableType, defaults: List[Optional[Type]], callsites: List[Callsite], uses: List[List[Type]]) -> List[CallableType]: """Compute a list of guesses for a function's type. This focuses just on the argument types, and doesn't change the provided return type. """ options = self.get_args(is_method, base, defaults, callsites, uses) options = [self.add_adjustments(tps) for tps in options] # Take the first `max_guesses` guesses. product = itertools.islice(itertools.product(*options), 0, self.max_guesses) return [refine_callable(base, base.copy_modified(arg_types=list(x))) for x in product] def get_callsites(self, func: FuncDef) -> Tuple[List[Callsite], List[str]]: """Find all call sites of a function.""" new_type = self.get_starting_type(func) collector_plugin = SuggestionPlugin(func.fullname) self.plugin._plugins.insert(0, collector_plugin) try: errors = self.try_type(func, new_type) finally: self.plugin._plugins.pop(0) return collector_plugin.mystery_hits, errors def filter_options( self, guesses: List[CallableType], is_method: bool, ignore_return: bool ) -> List[CallableType]: """Apply any configured filters to the possible guesses. Currently the only option is filtering based on Any prevalance.""" return [ t for t in guesses if self.flex_any is None or any_score_callable(t, is_method, ignore_return) >= self.flex_any ] def find_best(self, func: FuncDef, guesses: List[CallableType]) -> Tuple[CallableType, int]: """From a list of possible function types, find the best one. For best, we want the fewest errors, then the best "score" from score_callable. """ if not guesses: raise SuggestionFailure("No guesses that match criteria!") errors = {guess: self.try_type(func, guess) for guess in guesses} best = min(guesses, key=lambda s: (count_errors(errors[s]), self.score_callable(s))) return best, count_errors(errors[best]) def get_guesses_from_parent(self, node: FuncDef) -> List[CallableType]: """Try to get a guess of a method type from a parent class.""" if not node.info: return [] for parent in node.info.mro[1:]: pnode = parent.names.get(node.name) if pnode and isinstance(pnode.node, (FuncDef, Decorator)): typ = get_proper_type(pnode.node.type) # FIXME: Doesn't work right with generic tyeps if isinstance(typ, CallableType) and len(typ.arg_types) == len(node.arguments): # Return the first thing we find, since it probably doesn't make sense # to grab things further up in the chain if an earlier parent has it. return [typ] return [] def get_suggestion(self, mod: str, node: FuncDef) -> PyAnnotateSignature: """Compute a suggestion for a function. Return the type and whether the first argument should be ignored. """ graph = self.graph callsites, orig_errors = self.get_callsites(node) uses = get_arg_uses(self.manager.all_types, node) if self.no_errors and orig_errors: raise SuggestionFailure("Function does not typecheck.") is_method = bool(node.info) and not node.is_static with strict_optional_set(graph[mod].options.strict_optional): guesses = self.get_guesses( is_method, self.get_starting_type(node), self.get_default_arg_types(graph[mod], node), callsites, uses, ) guesses += self.get_guesses_from_parent(node) guesses = self.filter_options(guesses, is_method, ignore_return=True) best, _ = self.find_best(node, guesses) # Now try to find the return type! self.try_type(node, best) returns = get_return_types(self.manager.all_types, node) with strict_optional_set(graph[mod].options.strict_optional): if returns: ret_types = generate_type_combinations(returns) else: ret_types = [NoneType()] guesses = [best.copy_modified(ret_type=refine_type(best.ret_type, t)) for t in ret_types] guesses = self.filter_options(guesses, is_method, ignore_return=False) best, errors = self.find_best(node, guesses) if self.no_errors and errors: raise SuggestionFailure("No annotation without errors") return self.pyannotate_signature(mod, is_method, best) def format_args(self, arg_kinds: List[List[int]], arg_names: List[List[Optional[str]]], arg_types: List[List[Type]]) -> str: args = [] # type: List[str] for i in range(len(arg_types)): for kind, name, typ in zip(arg_kinds[i], arg_names[i], arg_types[i]): arg = self.format_type(None, typ) if kind == ARG_STAR: arg = '*' + arg elif kind == ARG_STAR2: arg = '**' + arg elif kind in (ARG_NAMED, ARG_NAMED_OPT): if name: arg = "%s=%s" % (name, arg) args.append(arg) return "(%s)" % (", ".join(args)) def find_node(self, key: str) -> Tuple[str, str, FuncDef]: """From a target name, return module/target names and the func def. The 'key' argument can be in one of two formats: * As the function full name, e.g., package.module.Cls.method * As the function location as file and line separated by column, e.g., path/to/file.py:42 """ # TODO: Also return OverloadedFuncDef -- currently these are ignored. node = None # type: Optional[SymbolNode] if ':' in key: if key.count(':') > 1: raise SuggestionFailure( 'Malformed location for function: {}. Must be either' ' package.module.Class.method or path/to/file.py:line'.format(key)) file, line = key.split(':') if not line.isdigit(): raise SuggestionFailure('Line number must be a number. Got {}'.format(line)) line_number = int(line) modname, node = self.find_node_by_file_and_line(file, line_number) tail = node.fullname[len(modname) + 1:] # add one to account for '.' else: target = split_target(self.fgmanager.graph, key) if not target: raise SuggestionFailure("Cannot find module for %s" % (key,)) modname, tail = target node = self.find_node_by_module_and_name(modname, tail) if isinstance(node, Decorator): node = self.extract_from_decorator(node) if not node: raise SuggestionFailure("Object %s is a decorator we can't handle" % key) if not isinstance(node, FuncDef): raise SuggestionFailure("Object %s is not a function" % key) return modname, tail, node def find_node_by_module_and_name(self, modname: str, tail: str) -> Optional[SymbolNode]: """Find symbol node by module id and qualified name. Raise SuggestionFailure if can't find one. """ tree = self.ensure_loaded(self.fgmanager.graph[modname]) # N.B. This is reimplemented from update's lookup_target # basically just to produce better error messages. names = tree.names # type: SymbolTable # Look through any classes components = tail.split('.') for i, component in enumerate(components[:-1]): if component not in names: raise SuggestionFailure("Unknown class %s.%s" % (modname, '.'.join(components[:i + 1]))) node = names[component].node # type: Optional[SymbolNode] if not isinstance(node, TypeInfo): raise SuggestionFailure("Object %s.%s is not a class" % (modname, '.'.join(components[:i + 1]))) names = node.names # Look for the actual function/method funcname = components[-1] if funcname not in names: key = modname + '.' + tail raise SuggestionFailure("Unknown %s %s" % ("method" if len(components) > 1 else "function", key)) return names[funcname].node def find_node_by_file_and_line(self, file: str, line: int) -> Tuple[str, SymbolNode]: """Find symbol node by path to file and line number. Find the first function declared *before or on* the line number. Return module id and the node found. Raise SuggestionFailure if can't find one. """ if not any(file.endswith(ext) for ext in PYTHON_EXTENSIONS): raise SuggestionFailure('Source file is not a Python file') try: modname, _ = self.finder.crawl_up(os.path.normpath(file)) except InvalidSourceList: raise SuggestionFailure('Invalid source file name: ' + file) if modname not in self.graph: raise SuggestionFailure('Unknown module: ' + modname) # We must be sure about any edits in this file as this might affect the line numbers. tree = self.ensure_loaded(self.fgmanager.graph[modname], force=True) node = None # type: Optional[SymbolNode] closest_line = None # type: Optional[int] # TODO: Handle nested functions. for _, sym, _ in tree.local_definitions(): if isinstance(sym.node, (FuncDef, Decorator)): sym_line = sym.node.line # TODO: add support for OverloadedFuncDef. else: continue # We want the closest function above the specified line if sym_line <= line and (closest_line is None or sym_line > closest_line): closest_line = sym_line node = sym.node if not node: raise SuggestionFailure('Cannot find a function at line {}'.format(line)) return modname, node def extract_from_decorator(self, node: Decorator) -> Optional[FuncDef]: for dec in node.decorators: typ = None if (isinstance(dec, RefExpr) and isinstance(dec.node, FuncDef)): typ = dec.node.type elif (isinstance(dec, CallExpr) and isinstance(dec.callee, RefExpr) and isinstance(dec.callee.node, FuncDef) and isinstance(dec.callee.node.type, CallableType)): typ = get_proper_type(dec.callee.node.type.ret_type) if not isinstance(typ, FunctionLike): return None for ct in typ.items(): if not (len(ct.arg_types) == 1 and isinstance(ct.arg_types[0], TypeVarType) and ct.arg_types[0] == ct.ret_type): return None return node.func def try_type(self, func: FuncDef, typ: ProperType) -> List[str]: """Recheck a function while assuming it has type typ. Return all error messages. """ old = func.unanalyzed_type # During reprocessing, unanalyzed_type gets copied to type (by aststrip). # We set type to None to ensure that the type always changes during # reprocessing. func.type = None func.unanalyzed_type = typ try: res = self.fgmanager.trigger(func.fullname) # if res: # print('===', typ) # print('\n'.join(res)) return res finally: func.unanalyzed_type = old def reload(self, state: State, check_errors: bool = False) -> List[str]: """Recheck the module given by state. If check_errors is true, raise an exception if there are errors. """ assert state.path is not None return self.fgmanager.update([(state.id, state.path)], []) def ensure_loaded(self, state: State, force: bool = False) -> MypyFile: """Make sure that the module represented by state is fully loaded.""" if not state.tree or state.tree.is_cache_skeleton or force: self.reload(state) assert state.tree is not None return state.tree def builtin_type(self, s: str) -> Instance: return self.manager.semantic_analyzer.builtin_type(s) def json_suggestion(self, mod: str, func_name: str, node: FuncDef, suggestion: PyAnnotateSignature) -> str: """Produce a json blob for a suggestion suitable for application by pyannotate.""" # pyannotate irritatingly drops class names for class and static methods if node.is_class or node.is_static: func_name = func_name.split('.', 1)[-1] # pyannotate works with either paths relative to where the # module is rooted or with absolute paths. We produce absolute # paths because it is simpler. path = os.path.abspath(self.graph[mod].xpath) obj = { 'signature': suggestion, 'line': node.line, 'path': path, 'func_name': func_name, 'samples': 0 } return json.dumps([obj], sort_keys=True) def pyannotate_signature( self, cur_module: Optional[str], is_method: bool, typ: CallableType ) -> PyAnnotateSignature: """Format a callable type as a pyannotate dict""" start = int(is_method) return { 'arg_types': [self.format_type(cur_module, t) for t in typ.arg_types[start:]], 'return_type': self.format_type(cur_module, typ.ret_type), } def format_signature(self, sig: PyAnnotateSignature) -> str: """Format a callable type in a way suitable as an annotation... kind of""" return "({}) -> {}".format( ", ".join(sig['arg_types']), sig['return_type'] ) def format_type(self, cur_module: Optional[str], typ: Type) -> str: if self.use_fixme and isinstance(get_proper_type(typ), AnyType): return self.use_fixme return typ.accept(TypeFormatter(cur_module, self.graph)) def score_type(self, t: Type, arg_pos: bool) -> int: """Generate a score for a type that we use to pick which type to use. Lower is better, prefer non-union/non-any types. Don't penalize optionals. """ t = get_proper_type(t) if isinstance(t, AnyType): return 20 if arg_pos and isinstance(t, NoneType): return 20 if isinstance(t, UnionType): if any(isinstance(get_proper_type(x), AnyType) for x in t.items): return 20 if any(has_any_type(x) for x in t.items): return 15 if not is_optional(t): return 10 if isinstance(t, CallableType) and (has_any_type(t) or is_tricky_callable(t)): return 10 if self.try_text and isinstance(t, Instance) and t.type.fullname == 'builtins.str': return 1 return 0 def score_callable(self, t: CallableType) -> int: return (sum([self.score_type(x, arg_pos=True) for x in t.arg_types]) + self.score_type(t.ret_type, arg_pos=False)) def any_score_type(ut: Type, arg_pos: bool) -> float: """Generate a very made up number representing the Anyness of a type. Higher is better, 1.0 is max """ t = get_proper_type(ut) if isinstance(t, AnyType) and t.type_of_any != TypeOfAny.suggestion_engine: return 0 if isinstance(t, NoneType) and arg_pos: return 0.5 if isinstance(t, UnionType): if any(isinstance(get_proper_type(x), AnyType) for x in t.items): return 0.5 if any(has_any_type(x) for x in t.items): return 0.25 if isinstance(t, CallableType) and is_tricky_callable(t): return 0.5 if has_any_type(t): return 0.5 return 1.0 def any_score_callable(t: CallableType, is_method: bool, ignore_return: bool) -> float: # Ignore the first argument of methods scores = [any_score_type(x, arg_pos=True) for x in t.arg_types[int(is_method):]] # Return type counts twice (since it spreads type information), unless it is # None in which case it does not count at all. (Though it *does* still count # if there are no arguments.) if not isinstance(get_proper_type(t.ret_type), NoneType) or not scores: ret = 1.0 if ignore_return else any_score_type(t.ret_type, arg_pos=False) scores += [ret, ret] return sum(scores) / len(scores) def is_tricky_callable(t: CallableType) -> bool: """Is t a callable that we need to put a ... in for syntax reasons?""" return t.is_ellipsis_args or any( k in (ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT) for k in t.arg_kinds) class TypeFormatter(TypeStrVisitor): """Visitor used to format types """ # TODO: Probably a lot def __init__(self, module: Optional[str], graph: Graph) -> None: super().__init__() self.module = module self.graph = graph def visit_any(self, t: AnyType) -> str: if t.missing_import_name: return t.missing_import_name else: return "Any" def visit_instance(self, t: Instance) -> str: s = t.type.fullname or t.type.name or None if s is None: return '' if s in reverse_builtin_aliases: s = reverse_builtin_aliases[s] mod_obj = split_target(self.graph, s) assert mod_obj mod, obj = mod_obj # If a class is imported into the current module, rewrite the reference # to point to the current module. This helps the annotation tool avoid # inserting redundant imports when a type has been reexported. if self.module: parts = obj.split('.') # need to split the object part if it is a nested class tree = self.graph[self.module].tree if tree and parts[0] in tree.names: mod = self.module if (mod, obj) == ('builtins', 'tuple'): mod, obj = 'typing', 'Tuple[' + t.args[0].accept(self) + ', ...]' elif t.args != []: obj += '[{}]'.format(self.list_str(t.args)) if mod_obj == ('builtins', 'unicode'): return 'Text' elif mod == 'builtins': return obj else: delim = '.' if '.' not in obj else ':' return mod + delim + obj def visit_tuple_type(self, t: TupleType) -> str: if t.partial_fallback and t.partial_fallback.type: fallback_name = t.partial_fallback.type.fullname if fallback_name != 'builtins.tuple': return t.partial_fallback.accept(self) s = self.list_str(t.items) return 'Tuple[{}]'.format(s) def visit_uninhabited_type(self, t: UninhabitedType) -> str: return "Any" def visit_typeddict_type(self, t: TypedDictType) -> str: return t.fallback.accept(self) def visit_union_type(self, t: UnionType) -> str: if len(t.items) == 2 and is_optional(t): return "Optional[{}]".format(remove_optional(t).accept(self)) else: return super().visit_union_type(t) def visit_callable_type(self, t: CallableType) -> str: # TODO: use extended callables? if is_tricky_callable(t): arg_str = "..." else: # Note: for default arguments, we just assume that they # are required. This isn't right, but neither is the # other thing, and I suspect this will produce more better # results than falling back to `...` args = [typ.accept(self) for typ in t.arg_types] arg_str = "[{}]".format(", ".join(args)) return "Callable[{}, {}]".format(arg_str, t.ret_type.accept(self)) class StrToText(TypeTranslator): def __init__(self, builtin_type: Callable[[str], Instance]) -> None: self.text_type = builtin_type('builtins.unicode') def visit_type_alias_type(self, t: TypeAliasType) -> Type: exp_t = get_proper_type(t) if isinstance(exp_t, Instance) and exp_t.type.fullname == 'builtins.str': return self.text_type return t.copy_modified(args=[a.accept(self) for a in t.args]) def visit_instance(self, t: Instance) -> Type: if t.type.fullname == 'builtins.str': return self.text_type else: return super().visit_instance(t) TType = TypeVar('TType', bound=Type) def make_suggestion_anys(t: TType) -> TType: """Make all anys in the type as coming from the suggestion engine. This keeps those Anys from influencing constraint generation, which allows us to do better when refining types. """ return cast(TType, t.accept(MakeSuggestionAny())) class MakeSuggestionAny(TypeTranslator): def visit_any(self, t: AnyType) -> Type: if not t.missing_import_name: return t.copy_modified(type_of_any=TypeOfAny.suggestion_engine) else: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: return t.copy_modified(args=[a.accept(self) for a in t.args]) def generate_type_combinations(types: List[Type]) -> List[Type]: """Generate possible combinations of a list of types. mypy essentially supports two different ways to do this: joining the types and unioning the types. We try both. """ joined_type = join_type_list(types) union_type = make_simplified_union(types) if is_same_type(joined_type, union_type): return [joined_type] else: return [joined_type, union_type] def count_errors(msgs: List[str]) -> int: return len([x for x in msgs if ' error: ' in x]) def refine_type(ti: Type, si: Type) -> Type: """Refine `ti` by replacing Anys in it with information taken from `si` This basically works by, when the types have the same structure, traversing both of them in parallel and replacing Any on the left with whatever the type on the right is. If the types don't have the same structure (or aren't supported), the left type is chosen. For example: refine(Any, T) = T, for all T refine(float, int) = float refine(List[Any], List[int]) = List[int] refine(Dict[int, Any], Dict[Any, int]) = Dict[int, int] refine(Tuple[int, Any], Tuple[Any, int]) = Tuple[int, int] refine(Callable[[Any], Any], Callable[[int], int]) = Callable[[int], int] refine(Callable[..., int], Callable[[int, float], Any]) = Callable[[int, float], int] refine(Optional[Any], int) = Optional[int] refine(Optional[Any], Optional[int]) = Optional[int] refine(Optional[Any], Union[int, str]) = Optional[Union[int, str]] refine(Optional[List[Any]], List[int]) = List[int] """ t = get_proper_type(ti) s = get_proper_type(si) if isinstance(t, AnyType): # If s is also an Any, we return if it is a missing_import Any return t if isinstance(s, AnyType) and t.missing_import_name else s if isinstance(t, Instance) and isinstance(s, Instance) and t.type == s.type: return t.copy_modified(args=[refine_type(ta, sa) for ta, sa in zip(t.args, s.args)]) if ( isinstance(t, TupleType) and isinstance(s, TupleType) and t.partial_fallback == s.partial_fallback and len(t.items) == len(s.items) ): return t.copy_modified(items=[refine_type(ta, sa) for ta, sa in zip(t.items, s.items)]) if isinstance(t, CallableType) and isinstance(s, CallableType): return refine_callable(t, s) if isinstance(t, UnionType): return refine_union(t, s) # TODO: Refining of builtins.tuple, Type? return t def refine_union(t: UnionType, s: ProperType) -> Type: """Refine a union type based on another type. This is done by refining every component of the union against the right hand side type (or every component of its union if it is one). If an element of the union is succesfully refined, we drop it from the union in favor of the refined versions. """ # Don't try to do any union refining if the types are already the # same. This prevents things like refining Optional[Any] against # itself and producing None. if t == s: return t rhs_items = s.items if isinstance(s, UnionType) else [s] new_items = [] for lhs in t.items: refined = False for rhs in rhs_items: new = refine_type(lhs, rhs) if new != lhs: new_items.append(new) refined = True if not refined: new_items.append(lhs) # Turn strict optional on when simplifying the union since we # don't want to drop Nones. with strict_optional_set(True): return make_simplified_union(new_items) def refine_callable(t: CallableType, s: CallableType) -> CallableType: """Refine a callable based on another. See comments for refine_type. """ if t.fallback != s.fallback: return t if t.is_ellipsis_args and not is_tricky_callable(s): return s.copy_modified(ret_type=refine_type(t.ret_type, s.ret_type)) if is_tricky_callable(t) or t.arg_kinds != s.arg_kinds: return t return t.copy_modified( arg_types=[refine_type(ta, sa) for ta, sa in zip(t.arg_types, s.arg_types)], ret_type=refine_type(t.ret_type, s.ret_type), ) T = TypeVar('T') def dedup(old: List[T]) -> List[T]: new = [] # type: List[T] for x in old: if x not in new: new.append(x) return new mypy-0.761/mypy/test/0000755€tŠÔÚ€2›s®0000000000013576752266020677 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/test/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246022774 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/test/collect.py0000644€tŠÔÚ€2›s®0000000000013576752246022662 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/test/config.py0000644€tŠÔÚ€2›s®0000000143113576752246022513 0ustar jukkaDROPBOX\Domain Users00000000000000import os.path provided_prefix = os.getenv('MYPY_TEST_PREFIX', None) if provided_prefix: PREFIX = provided_prefix else: this_file_dir = os.path.dirname(os.path.realpath(__file__)) PREFIX = os.path.dirname(os.path.dirname(this_file_dir)) # Location of test data files such as test case descriptions. test_data_prefix = os.path.join(PREFIX, 'test-data', 'unit') package_path = os.path.join(PREFIX, 'test-data', 'packages') assert os.path.isdir(test_data_prefix), \ 'Test data prefix ({}) not set correctly'.format(test_data_prefix) # Temp directory used for the temp files created when running test cases. # This is *within* the tempfile.TemporaryDirectory that is chroot'ed per testcase. # It is also hard-coded in numerous places, so don't change it. test_temp_dir = 'tmp' mypy-0.761/mypy/test/data.py0000644€tŠÔÚ€2›s®0000005511213576752246022164 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for processing .test files containing test case descriptions.""" import os.path import os import tempfile import posixpath import re import shutil from abc import abstractmethod import sys import pytest # type: ignore # no pytest in typeshed from typing import List, Tuple, Set, Optional, Iterator, Any, Dict, NamedTuple, Union from mypy.test.config import test_data_prefix, test_temp_dir, PREFIX root_dir = os.path.normpath(PREFIX) # File modify/create operation: copy module contents from source_path. UpdateFile = NamedTuple('UpdateFile', [('module', str), ('source_path', str), ('target_path', str)]) # File delete operation: delete module file. DeleteFile = NamedTuple('DeleteFile', [('module', str), ('path', str)]) FileOperation = Union[UpdateFile, DeleteFile] def parse_test_case(case: 'DataDrivenTestCase') -> None: """Parse and prepare a single case from suite with test case descriptions. This method is part of the setup phase, just before the test case is run. """ test_items = parse_test_data(case.data, case.name) base_path = case.suite.base_path if case.suite.native_sep: join = os.path.join else: join = posixpath.join out_section_missing = case.suite.required_out_section normalize_output = True files = [] # type: List[Tuple[str, str]] # path and contents output_files = [] # type: List[Tuple[str, str]] # path and contents for output files output = [] # type: List[str] # Regular output errors output2 = {} # type: Dict[int, List[str]] # Output errors for incremental, runs 2+ deleted_paths = {} # type: Dict[int, Set[str]] # from run number of paths stale_modules = {} # type: Dict[int, Set[str]] # from run number to module names rechecked_modules = {} # type: Dict[ int, Set[str]] # from run number module names triggered = [] # type: List[str] # Active triggers (one line per incremental step) targets = {} # type: Dict[int, List[str]] # Fine-grained targets (per fine-grained update) # Process the parsed items. Each item has a header of form [id args], # optionally followed by lines of text. item = first_item = test_items[0] for item in test_items[1:]: if item.id == 'file' or item.id == 'outfile': # Record an extra file needed for the test case. assert item.arg is not None contents = expand_variables('\n'.join(item.data)) file_entry = (join(base_path, item.arg), contents) if item.id == 'file': files.append(file_entry) else: output_files.append(file_entry) elif item.id in ('builtins', 'builtins_py2'): # Use an alternative stub file for the builtins module. assert item.arg is not None mpath = join(os.path.dirname(case.file), item.arg) fnam = 'builtins.pyi' if item.id == 'builtins' else '__builtin__.pyi' with open(mpath, encoding='utf8') as f: files.append((join(base_path, fnam), f.read())) elif item.id == 'typing': # Use an alternative stub file for the typing module. assert item.arg is not None src_path = join(os.path.dirname(case.file), item.arg) with open(src_path, encoding='utf8') as f: files.append((join(base_path, 'typing.pyi'), f.read())) elif re.match(r'stale[0-9]*$', item.id): passnum = 1 if item.id == 'stale' else int(item.id[len('stale'):]) assert passnum > 0 modules = (set() if item.arg is None else {t.strip() for t in item.arg.split(',')}) stale_modules[passnum] = modules elif re.match(r'rechecked[0-9]*$', item.id): passnum = 1 if item.id == 'rechecked' else int(item.id[len('rechecked'):]) assert passnum > 0 modules = (set() if item.arg is None else {t.strip() for t in item.arg.split(',')}) rechecked_modules[passnum] = modules elif re.match(r'targets[0-9]*$', item.id): passnum = 1 if item.id == 'targets' else int(item.id[len('targets'):]) assert passnum > 0 reprocessed = [] if item.arg is None else [t.strip() for t in item.arg.split(',')] targets[passnum] = reprocessed elif item.id == 'delete': # File to delete during a multi-step test case assert item.arg is not None m = re.match(r'(.*)\.([0-9]+)$', item.arg) assert m, 'Invalid delete section: {}'.format(item.arg) num = int(m.group(2)) assert num >= 2, "Can't delete during step {}".format(num) full = join(base_path, m.group(1)) deleted_paths.setdefault(num, set()).add(full) elif re.match(r'out[0-9]*$', item.id): if item.arg == 'skip-path-normalization': normalize_output = False tmp_output = [expand_variables(line) for line in item.data] if os.path.sep == '\\' and normalize_output: tmp_output = [fix_win_path(line) for line in tmp_output] if item.id == 'out' or item.id == 'out1': output = tmp_output else: passnum = int(item.id[len('out'):]) assert passnum > 1 output2[passnum] = tmp_output out_section_missing = False elif item.id == 'triggered' and item.arg is None: triggered = item.data else: raise ValueError( 'Invalid section header {} in {} at line {}'.format( item.id, case.file, item.line)) if out_section_missing: raise ValueError( '{}, line {}: Required output section not found'.format( case.file, first_item.line)) for passnum in stale_modules.keys(): if passnum not in rechecked_modules: # If the set of rechecked modules isn't specified, make it the same as the set # of modules with a stale public interface. rechecked_modules[passnum] = stale_modules[passnum] if (passnum in stale_modules and passnum in rechecked_modules and not stale_modules[passnum].issubset(rechecked_modules[passnum])): raise ValueError( ('Stale modules after pass {} must be a subset of rechecked ' 'modules ({}:{})').format(passnum, case.file, first_item.line)) input = first_item.data expand_errors(input, output, 'main') for file_path, contents in files: expand_errors(contents.split('\n'), output, file_path) case.input = input case.output = output case.output2 = output2 case.lastline = item.line case.files = files case.output_files = output_files case.expected_stale_modules = stale_modules case.expected_rechecked_modules = rechecked_modules case.deleted_paths = deleted_paths case.triggered = triggered or [] case.normalize_output = normalize_output case.expected_fine_grained_targets = targets class DataDrivenTestCase(pytest.Item): # type: ignore # inheriting from Any """Holds parsed data-driven test cases, and handles directory setup and teardown.""" input = None # type: List[str] output = None # type: List[str] # Output for the first pass output2 = None # type: Dict[int, List[str]] # Output for runs 2+, indexed by run number # full path of test suite file = '' line = 0 # (file path, file content) tuples files = None # type: List[Tuple[str, str]] expected_stale_modules = None # type: Dict[int, Set[str]] expected_rechecked_modules = None # type: Dict[int, Set[str]] expected_fine_grained_targets = None # type: Dict[int, List[str]] # Whether or not we should normalize the output to standardize things like # forward vs backward slashes in file paths for Windows vs Linux. normalize_output = True def __init__(self, parent: 'DataSuiteCollector', suite: 'DataSuite', file: str, name: str, writescache: bool, only_when: str, platform: Optional[str], skip: bool, data: str, line: int) -> None: super().__init__(name, parent) self.suite = suite self.file = file self.writescache = writescache self.only_when = only_when if ((platform == 'windows' and sys.platform != 'win32') or (platform == 'posix' and sys.platform == 'win32')): skip = True self.skip = skip self.data = data self.line = line self.old_cwd = None # type: Optional[str] self.tmpdir = None # type: Optional[tempfile.TemporaryDirectory[str]] def runtest(self) -> None: if self.skip: pytest.skip() suite = self.parent.obj() suite.setup() try: suite.run_case(self) except Exception: # As a debugging aid, support copying the contents of the tmp directory somewhere save_dir = self.config.getoption('--save-failures-to', None) # type: Optional[str] if save_dir: assert self.tmpdir is not None target_dir = os.path.join(save_dir, os.path.basename(self.tmpdir.name)) print("Copying data from test {} to {}".format(self.name, target_dir)) if not os.path.isabs(target_dir): assert self.old_cwd target_dir = os.path.join(self.old_cwd, target_dir) shutil.copytree(self.tmpdir.name, target_dir) raise def setup(self) -> None: parse_test_case(case=self) self.old_cwd = os.getcwd() self.tmpdir = tempfile.TemporaryDirectory(prefix='mypy-test-') os.chdir(self.tmpdir.name) os.mkdir(test_temp_dir) for path, content in self.files: dir = os.path.dirname(path) os.makedirs(dir, exist_ok=True) with open(path, 'w', encoding='utf8') as f: f.write(content) def teardown(self) -> None: assert self.old_cwd is not None and self.tmpdir is not None, \ "test was not properly set up" os.chdir(self.old_cwd) try: self.tmpdir.cleanup() except OSError: pass self.old_cwd = None self.tmpdir = None def reportinfo(self) -> Tuple[str, int, str]: return self.file, self.line, self.name def repr_failure(self, excinfo: Any) -> str: if excinfo.errisinstance(SystemExit): # We assume that before doing exit() (which raises SystemExit) we've printed # enough context about what happened so that a stack trace is not useful. # In particular, uncaught exceptions during semantic analysis or type checking # call exit() and they already print out a stack trace. excrepr = excinfo.exconly() else: self.parent._prunetraceback(excinfo) excrepr = excinfo.getrepr(style='short') return "data: {}:{}:\n{}".format(self.file, self.line, excrepr) def find_steps(self) -> List[List[FileOperation]]: """Return a list of descriptions of file operations for each incremental step. The first list item corresponds to the first incremental step, the second for the second step, etc. Each operation can either be a file modification/creation (UpdateFile) or deletion (DeleteFile). Defaults to having two steps if there aern't any operations. """ steps = {} # type: Dict[int, List[FileOperation]] for path, _ in self.files: m = re.match(r'.*\.([0-9]+)$', path) if m: num = int(m.group(1)) assert num >= 2 target_path = re.sub(r'\.[0-9]+$', '', path) module = module_from_path(target_path) operation = UpdateFile(module, path, target_path) steps.setdefault(num, []).append(operation) for num, paths in self.deleted_paths.items(): assert num >= 2 for path in paths: module = module_from_path(path) steps.setdefault(num, []).append(DeleteFile(module, path)) max_step = max(steps) if steps else 2 return [steps.get(num, []) for num in range(2, max_step + 1)] def module_from_path(path: str) -> str: path = re.sub(r'\.pyi?$', '', path) # We can have a mix of Unix-style and Windows-style separators. parts = re.split(r'[/\\]', path) assert parts[0] == test_temp_dir del parts[0] module = '.'.join(parts) module = re.sub(r'\.__init__$', '', module) return module class TestItem: """Parsed test caseitem. An item is of the form [id arg] .. data .. """ id = '' arg = '' # type: Optional[str] # Text data, array of 8-bit strings data = None # type: List[str] file = '' line = 0 # Line number in file def __init__(self, id: str, arg: Optional[str], data: List[str], line: int) -> None: self.id = id self.arg = arg self.data = data self.line = line def parse_test_data(raw_data: str, name: str) -> List[TestItem]: """Parse a list of lines that represent a sequence of test items.""" lines = ['', '[case ' + name + ']'] + raw_data.split('\n') ret = [] # type: List[TestItem] data = [] # type: List[str] id = None # type: Optional[str] arg = None # type: Optional[str] i = 0 i0 = 0 while i < len(lines): s = lines[i].strip() if lines[i].startswith('[') and s.endswith(']'): if id: data = collapse_line_continuation(data) data = strip_list(data) ret.append(TestItem(id, arg, strip_list(data), i0 + 1)) i0 = i id = s[1:-1] arg = None if ' ' in id: arg = id[id.index(' ') + 1:] id = id[:id.index(' ')] data = [] elif lines[i].startswith('\\['): data.append(lines[i][1:]) elif not lines[i].startswith('--'): data.append(lines[i]) elif lines[i].startswith('----'): data.append(lines[i][2:]) i += 1 # Process the last item. if id: data = collapse_line_continuation(data) data = strip_list(data) ret.append(TestItem(id, arg, data, i0 + 1)) return ret def strip_list(l: List[str]) -> List[str]: """Return a stripped copy of l. Strip whitespace at the end of all lines, and strip all empty lines from the end of the array. """ r = [] # type: List[str] for s in l: # Strip spaces at end of line r.append(re.sub(r'\s+$', '', s)) while len(r) > 0 and r[-1] == '': r.pop() return r def collapse_line_continuation(l: List[str]) -> List[str]: r = [] # type: List[str] cont = False for s in l: ss = re.sub(r'\\$', '', s) if cont: r[-1] += re.sub('^ +', '', ss) else: r.append(ss) cont = s.endswith('\\') return r def expand_variables(s: str) -> str: return s.replace('', root_dir) def expand_errors(input: List[str], output: List[str], fnam: str) -> None: """Transform comments such as '# E: message' or '# E:3: message' in input. The result is lines like 'fnam:line: error: message'. """ for i in range(len(input)): # The first in the split things isn't a comment for possible_err_comment in input[i].split(' # ')[1:]: m = re.search( r'^([ENW]):((?P\d+):)? (?P.*)$', possible_err_comment.strip()) if m: if m.group(1) == 'E': severity = 'error' elif m.group(1) == 'N': severity = 'note' elif m.group(1) == 'W': severity = 'warning' col = m.group('col') message = m.group('message') message = message.replace('\\#', '#') # adds back escaped # character if col is None: output.append( '{}:{}: {}: {}'.format(fnam, i + 1, severity, message)) else: output.append('{}:{}:{}: {}: {}'.format( fnam, i + 1, col, severity, message)) def fix_win_path(line: str) -> str: r"""Changes Windows paths to Linux paths in error messages. E.g. foo\bar.py -> foo/bar.py. """ line = line.replace(root_dir, root_dir.replace('\\', '/')) m = re.match(r'^([\S/]+):(\d+:)?(\s+.*)', line) if not m: return line else: filename, lineno, message = m.groups() return '{}:{}{}'.format(filename.replace('\\', '/'), lineno or '', message) def fix_cobertura_filename(line: str) -> str: r"""Changes filename paths to Linux paths in Cobertura output files. E.g. filename="pkg\subpkg\a.py" -> filename="pkg/subpkg/a.py". """ m = re.search(r' None: group = parser.getgroup('mypy') group.addoption('--update-data', action='store_true', default=False, help='Update test data to reflect actual output' ' (supported only for certain tests)') group.addoption('--save-failures-to', default=None, help='Copy the temp directories from failing tests to a target directory') group.addoption('--mypy-verbose', action='count', help='Set the verbose flag when creating mypy Options') group.addoption('--mypyc-showc', action='store_true', default=False, help='Display C code on mypyc test failures') # This function name is special to pytest. See # http://doc.pytest.org/en/latest/writing_plugins.html#collection-hooks def pytest_pycollect_makeitem(collector: Any, name: str, obj: object) -> 'Optional[Any]': """Called by pytest on each object in modules configured in conftest.py files. collector is pytest.Collector, returns Optional[pytest.Class] """ if isinstance(obj, type): # Only classes derived from DataSuite contain test cases, not the DataSuite class itself if issubclass(obj, DataSuite) and obj is not DataSuite: # Non-None result means this obj is a test case. # The collect method of the returned DataSuiteCollector instance will be called later, # with self.obj being obj. return DataSuiteCollector(name, parent=collector) return None def split_test_cases(parent: 'DataSuiteCollector', suite: 'DataSuite', file: str) -> Iterator['DataDrivenTestCase']: """Iterate over raw test cases in file, at collection time, ignoring sub items. The collection phase is slow, so any heavy processing should be deferred to after uninteresting tests are filtered (when using -k PATTERN switch). """ with open(file, encoding='utf-8') as f: data = f.read() cases = re.split(r'^\[case ([a-zA-Z_0-9]+)' r'(-writescache)?' r'(-only_when_cache|-only_when_nocache)?' r'(-posix|-windows)?' r'(-skip)?' r'\][ \t]*$\n', data, flags=re.DOTALL | re.MULTILINE) line_no = cases[0].count('\n') + 1 for i in range(1, len(cases), 6): name, writescache, only_when, platform_flag, skip, data = cases[i:i + 6] platform = platform_flag[1:] if platform_flag else None yield DataDrivenTestCase(parent, suite, file, name=add_test_name_suffix(name, suite.test_name_suffix), writescache=bool(writescache), only_when=only_when, platform=platform, skip=bool(skip), data=data, line=line_no) line_no += data.count('\n') + 1 class DataSuiteCollector(pytest.Class): # type: ignore # inheriting from Any def collect(self) -> Iterator[pytest.Item]: # type: ignore """Called by pytest on each of the object returned from pytest_pycollect_makeitem""" # obj is the object for which pytest_pycollect_makeitem returned self. suite = self.obj # type: DataSuite for f in suite.files: yield from split_test_cases(self, suite, os.path.join(suite.data_prefix, f)) def add_test_name_suffix(name: str, suffix: str) -> str: # Find magic suffix of form "-foobar" (used for things like "-skip"). m = re.search(r'-[-A-Za-z0-9]+$', name) if m: # Insert suite-specific test name suffix before the magic suffix # which must be the last thing in the test case name since we # are using endswith() checks. magic_suffix = m.group(0) return name[:-len(magic_suffix)] + suffix + magic_suffix else: return name + suffix def is_incremental(testcase: DataDrivenTestCase) -> bool: return 'incremental' in testcase.name.lower() or 'incremental' in testcase.file def has_stable_flags(testcase: DataDrivenTestCase) -> bool: if any(re.match(r'# flags[2-9]:', line) for line in testcase.input): return False for filename, contents in testcase.files: if os.path.basename(filename).startswith('mypy.ini.'): return False return True class DataSuite: # option fields - class variables files = None # type: List[str] base_path = test_temp_dir # Allow external users of the test code to override the data prefix data_prefix = test_data_prefix required_out_section = False native_sep = False # Name suffix automatically added to each test case in the suite (can be # used to distinguish test cases in suites that share data files) test_name_suffix = '' def setup(self) -> None: """Setup fixtures (ad-hoc)""" pass @abstractmethod def run_case(self, testcase: DataDrivenTestCase) -> None: raise NotImplementedError mypy-0.761/mypy/test/helpers.py0000644€tŠÔÚ€2›s®0000004020613576752246022713 0ustar jukkaDROPBOX\Domain Users00000000000000import os import re import sys import time import shutil import contextlib from typing import List, Iterable, Dict, Tuple, Callable, Any, Optional, Iterator from mypy import defaults import mypy.api as api import pytest # type: ignore # no pytest in typeshed # Exporting Suite as alias to TestCase for backwards compatibility # TODO: avoid aliasing - import and subclass TestCase directly from unittest import TestCase as Suite # noqa: F401 (re-exporting) from mypy.main import process_options from mypy.options import Options from mypy.test.data import DataDrivenTestCase, fix_cobertura_filename from mypy.test.config import test_temp_dir import mypy.version skip = pytest.mark.skip # AssertStringArraysEqual displays special line alignment helper messages if # the first different line has at least this many characters, MIN_LINE_LENGTH_FOR_ALIGNMENT = 5 def run_mypy(args: List[str]) -> None: __tracebackhide__ = True outval, errval, status = api.run(args + ['--show-traceback', '--no-site-packages', '--no-silence-site-packages']) if status != 0: sys.stdout.write(outval) sys.stderr.write(errval) pytest.fail(msg="Sample check failed", pytrace=False) def assert_string_arrays_equal(expected: List[str], actual: List[str], msg: str) -> None: """Assert that two string arrays are equal. We consider "can't" and "cannot" equivalent, by replacing the former with the latter before comparing. Display any differences in a human-readable form. """ actual = clean_up(actual) actual = [line.replace("can't", "cannot") for line in actual] expected = [line.replace("can't", "cannot") for line in expected] if actual != expected: num_skip_start = num_skipped_prefix_lines(expected, actual) num_skip_end = num_skipped_suffix_lines(expected, actual) sys.stderr.write('Expected:\n') # If omit some lines at the beginning, indicate it by displaying a line # with '...'. if num_skip_start > 0: sys.stderr.write(' ...\n') # Keep track of the first different line. first_diff = -1 # Display only this many first characters of identical lines. width = 75 for i in range(num_skip_start, len(expected) - num_skip_end): if i >= len(actual) or expected[i] != actual[i]: if first_diff < 0: first_diff = i sys.stderr.write(' {:<45} (diff)'.format(expected[i])) else: e = expected[i] sys.stderr.write(' ' + e[:width]) if len(e) > width: sys.stderr.write('...') sys.stderr.write('\n') if num_skip_end > 0: sys.stderr.write(' ...\n') sys.stderr.write('Actual:\n') if num_skip_start > 0: sys.stderr.write(' ...\n') for j in range(num_skip_start, len(actual) - num_skip_end): if j >= len(expected) or expected[j] != actual[j]: sys.stderr.write(' {:<45} (diff)'.format(actual[j])) else: a = actual[j] sys.stderr.write(' ' + a[:width]) if len(a) > width: sys.stderr.write('...') sys.stderr.write('\n') if not actual: sys.stderr.write(' (empty)\n') if num_skip_end > 0: sys.stderr.write(' ...\n') sys.stderr.write('\n') if 0 <= first_diff < len(actual) and ( len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT): # Display message that helps visualize the differences between two # long lines. show_align_message(expected[first_diff], actual[first_diff]) raise AssertionError(msg) def assert_module_equivalence(name: str, expected: Iterable[str], actual: Iterable[str]) -> None: expected_normalized = sorted(expected) actual_normalized = sorted(set(actual).difference({"__main__"})) assert_string_arrays_equal( expected_normalized, actual_normalized, ('Actual modules ({}) do not match expected modules ({}) ' 'for "[{} ...]"').format( ', '.join(actual_normalized), ', '.join(expected_normalized), name)) def assert_target_equivalence(name: str, expected: List[str], actual: List[str]) -> None: """Compare actual and expected targets (order sensitive).""" assert_string_arrays_equal( expected, actual, ('Actual targets ({}) do not match expected targets ({}) ' 'for "[{} ...]"').format( ', '.join(actual), ', '.join(expected), name)) def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> None: assert testcase.old_cwd is not None, "test was not properly set up" testcase_path = os.path.join(testcase.old_cwd, testcase.file) with open(testcase_path, encoding='utf8') as f: data_lines = f.read().splitlines() test = '\n'.join(data_lines[testcase.line:testcase.lastline]) mapping = {} # type: Dict[str, List[str]] for old, new in zip(testcase.output, output): PREFIX = 'error:' ind = old.find(PREFIX) if ind != -1 and old[:ind] == new[:ind]: old, new = old[ind + len(PREFIX):], new[ind + len(PREFIX):] mapping.setdefault(old, []).append(new) for old in mapping: if test.count(old) == len(mapping[old]): betweens = test.split(old) # Interleave betweens and mapping[old] from itertools import chain interleaved = [betweens[0]] + \ list(chain.from_iterable(zip(mapping[old], betweens[1:]))) test = ''.join(interleaved) data_lines[testcase.line:testcase.lastline] = [test] data = '\n'.join(data_lines) with open(testcase_path, 'w', encoding='utf8') as f: print(data, file=f) def show_align_message(s1: str, s2: str) -> None: """Align s1 and s2 so that the their first difference is highlighted. For example, if s1 is 'foobar' and s2 is 'fobar', display the following lines: E: foobar A: fobar ^ If s1 and s2 are long, only display a fragment of the strings around the first difference. If s1 is very short, do nothing. """ # Seeing what went wrong is trivial even without alignment if the expected # string is very short. In this case do nothing to simplify output. if len(s1) < 4: return maxw = 72 # Maximum number of characters shown sys.stderr.write('Alignment of first line difference:\n') trunc = False while s1[:30] == s2[:30]: s1 = s1[10:] s2 = s2[10:] trunc = True if trunc: s1 = '...' + s1 s2 = '...' + s2 max_len = max(len(s1), len(s2)) extra = '' if max_len > maxw: extra = '...' # Write a chunk of both lines, aligned. sys.stderr.write(' E: {}{}\n'.format(s1[:maxw], extra)) sys.stderr.write(' A: {}{}\n'.format(s2[:maxw], extra)) # Write an indicator character under the different columns. sys.stderr.write(' ') for j in range(min(maxw, max(len(s1), len(s2)))): if s1[j:j + 1] != s2[j:j + 1]: sys.stderr.write('^') # Difference break else: sys.stderr.write(' ') # Equal sys.stderr.write('\n') def clean_up(a: List[str]) -> List[str]: """Remove common directory prefix from all strings in a. This uses a naive string replace; it seems to work well enough. Also remove trailing carriage returns. """ res = [] for s in a: prefix = os.sep ss = s for p in prefix, prefix.replace(os.sep, '/'): if p != '/' and p != '//' and p != '\\' and p != '\\\\': ss = ss.replace(p, '') # Ignore spaces at end of line. ss = re.sub(' +$', '', ss) res.append(re.sub('\\r$', '', ss)) return res @contextlib.contextmanager def local_sys_path_set() -> Iterator[None]: """Temporary insert current directory into sys.path. This can be used by test cases that do runtime imports, for example by the stubgen tests. """ old_sys_path = sys.path[:] if not ('' in sys.path or '.' in sys.path): sys.path.insert(0, '') try: yield finally: sys.path = old_sys_path def num_skipped_prefix_lines(a1: List[str], a2: List[str]) -> int: num_eq = 0 while num_eq < min(len(a1), len(a2)) and a1[num_eq] == a2[num_eq]: num_eq += 1 return max(0, num_eq - 4) def num_skipped_suffix_lines(a1: List[str], a2: List[str]) -> int: num_eq = 0 while (num_eq < min(len(a1), len(a2)) and a1[-num_eq - 1] == a2[-num_eq - 1]): num_eq += 1 return max(0, num_eq - 4) def testfile_pyversion(path: str) -> Tuple[int, int]: if path.endswith('python2.test'): return defaults.PYTHON2_VERSION else: return defaults.PYTHON3_VERSION def testcase_pyversion(path: str, testcase_name: str) -> Tuple[int, int]: if testcase_name.endswith('python2'): return defaults.PYTHON2_VERSION else: return testfile_pyversion(path) def normalize_error_messages(messages: List[str]) -> List[str]: """Translate an array of error messages to use / as path separator.""" a = [] for m in messages: a.append(m.replace(os.sep, '/')) return a def retry_on_error(func: Callable[[], Any], max_wait: float = 1.0) -> None: """Retry callback with exponential backoff when it raises OSError. If the function still generates an error after max_wait seconds, propagate the exception. This can be effective against random file system operation failures on Windows. """ t0 = time.time() wait_time = 0.01 while True: try: func() return except OSError: wait_time = min(wait_time * 2, t0 + max_wait - time.time()) if wait_time <= 0.01: # Done enough waiting, the error seems persistent. raise time.sleep(wait_time) # TODO: assert_true and assert_false are redundant - use plain assert def assert_true(b: bool, msg: Optional[str] = None) -> None: if not b: raise AssertionError(msg) def assert_false(b: bool, msg: Optional[str] = None) -> None: if b: raise AssertionError(msg) def good_repr(obj: object) -> str: if isinstance(obj, str): if obj.count('\n') > 1: bits = ["'''\\"] for line in obj.split('\n'): # force repr to use ' not ", then cut it off bits.append(repr('"' + line)[2:-1]) bits[-1] += "'''" return '\n'.join(bits) return repr(obj) def assert_equal(a: object, b: object, fmt: str = '{} != {}') -> None: if a != b: raise AssertionError(fmt.format(good_repr(a), good_repr(b))) def typename(t: type) -> str: if '.' in str(t): return str(t).split('.')[-1].rstrip("'>") else: return str(t)[8:-2] def assert_type(typ: type, value: object) -> None: if type(value) != typ: raise AssertionError('Invalid type {}, expected {}'.format( typename(type(value)), typename(typ))) def parse_options(program_text: str, testcase: DataDrivenTestCase, incremental_step: int) -> Options: """Parse comments like '# flags: --foo' in a test case.""" options = Options() flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE) if incremental_step > 1: flags2 = re.search('# flags{}: (.*)$'.format(incremental_step), program_text, flags=re.MULTILINE) if flags2: flags = flags2 if flags: flag_list = flags.group(1).split() flag_list.append('--no-site-packages') # the tests shouldn't need an installed Python targets, options = process_options(flag_list, require_targets=False) if targets: # TODO: support specifying targets via the flags pragma raise RuntimeError('Specifying targets via the flags pragma is not supported.') else: flag_list = [] options = Options() # TODO: Enable strict optional in test cases by default (requires *many* test case changes) options.strict_optional = False options.error_summary = False # Allow custom python version to override testcase_pyversion. if all(flag.split('=')[0] not in ['--python-version', '-2', '--py2'] for flag in flag_list): options.python_version = testcase_pyversion(testcase.file, testcase.name) if testcase.config.getoption('--mypy-verbose'): options.verbosity = testcase.config.getoption('--mypy-verbose') return options def split_lines(*streams: bytes) -> List[str]: """Returns a single list of string lines from the byte streams in args.""" return [ s for stream in streams for s in stream.decode('utf8').splitlines() ] def copy_and_fudge_mtime(source_path: str, target_path: str) -> None: # In some systems, mtime has a resolution of 1 second which can # cause annoying-to-debug issues when a file has the same size # after a change. We manually set the mtime to circumvent this. # Note that we increment the old file's mtime, which guarentees a # different value, rather than incrementing the mtime after the # copy, which could leave the mtime unchanged if the old file had # a similarly fudged mtime. new_time = None if os.path.isfile(target_path): new_time = os.stat(target_path).st_mtime + 1 # Use retries to work around potential flakiness on Windows (AppVeyor). retry_on_error(lambda: shutil.copy(source_path, target_path)) if new_time: os.utime(target_path, times=(new_time, new_time)) def check_test_output_files(testcase: DataDrivenTestCase, step: int, strip_prefix: str = '') -> None: for path, expected_content in testcase.output_files: if path.startswith(strip_prefix): path = path[len(strip_prefix):] if not os.path.exists(path): raise AssertionError( 'Expected file {} was not produced by test case{}'.format( path, ' on step %d' % step if testcase.output2 else '')) with open(path, 'r', encoding='utf8') as output_file: actual_output_content = output_file.read().splitlines() normalized_output = normalize_file_output(actual_output_content, os.path.abspath(test_temp_dir)) # We always normalize things like timestamp, but only handle operating-system # specific things if requested. if testcase.normalize_output: if testcase.suite.native_sep and os.path.sep == '\\': normalized_output = [fix_cobertura_filename(line) for line in normalized_output] normalized_output = normalize_error_messages(normalized_output) assert_string_arrays_equal(expected_content.splitlines(), normalized_output, 'Output file {} did not match its expected output{}'.format( path, ' on step %d' % step if testcase.output2 else '')) def normalize_file_output(content: List[str], current_abs_path: str) -> List[str]: """Normalize file output for comparison.""" timestamp_regex = re.compile(r'\d{10}') result = [x.replace(current_abs_path, '$PWD') for x in content] version = mypy.version.__version__ result = [re.sub(r'\b' + re.escape(version) + r'\b', '$VERSION', x) for x in result] # We generate a new mypy.version when building mypy wheels that # lacks base_version, so handle that case. base_version = getattr(mypy.version, 'base_version', version) result = [re.sub(r'\b' + re.escape(base_version) + r'\b', '$VERSION', x) for x in result] result = [timestamp_regex.sub('$TIMESTAMP', x) for x in result] return result mypy-0.761/mypy/test/testapi.py0000644€tŠÔÚ€2›s®0000000260513576752246022723 0ustar jukkaDROPBOX\Domain Users00000000000000from io import StringIO import sys import mypy.api from mypy.test.helpers import Suite class APISuite(Suite): def setUp(self) -> None: self.sys_stdout = sys.stdout self.sys_stderr = sys.stderr sys.stdout = self.stdout = StringIO() sys.stderr = self.stderr = StringIO() def tearDown(self) -> None: sys.stdout = self.sys_stdout sys.stderr = self.sys_stderr assert self.stdout.getvalue() == '' assert self.stderr.getvalue() == '' def test_capture_bad_opt(self) -> None: """stderr should be captured when a bad option is passed.""" _, stderr, _ = mypy.api.run(['--some-bad-option']) assert isinstance(stderr, str) assert stderr != '' def test_capture_empty(self) -> None: """stderr should be captured when a bad option is passed.""" _, stderr, _ = mypy.api.run([]) assert isinstance(stderr, str) assert stderr != '' def test_capture_help(self) -> None: """stdout should be captured when --help is passed.""" stdout, _, _ = mypy.api.run(['--help']) assert isinstance(stdout, str) assert stdout != '' def test_capture_version(self) -> None: """stdout should be captured when --version is passed.""" stdout, _, _ = mypy.api.run(['--version']) assert isinstance(stdout, str) assert stdout != '' mypy-0.761/mypy/test/testargs.py0000644€tŠÔÚ€2›s®0000000626313576752246023112 0ustar jukkaDROPBOX\Domain Users00000000000000"""Ensure the argparse parser and Options class are in sync. In particular, verify that the argparse defaults are the same as the Options defaults, and that argparse doesn't assign any new members to the Options object it creates. """ import argparse import sys from mypy.test.helpers import Suite, assert_equal from mypy.options import Options from mypy.main import process_options, infer_python_executable class ArgSuite(Suite): def test_coherence(self) -> None: options = Options() _, parsed_options = process_options([], require_targets=False) # FIX: test this too. Requires changing working dir to avoid finding 'setup.cfg' options.config_file = parsed_options.config_file assert_equal(options.snapshot(), parsed_options.snapshot()) def test_executable_inference(self) -> None: """Test the --python-executable flag with --python-version""" sys_ver_str = '{ver.major}.{ver.minor}'.format(ver=sys.version_info) base = ['file.py'] # dummy file # test inference given one (infer the other) matching_version = base + ['--python-version={}'.format(sys_ver_str)] _, options = process_options(matching_version) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable matching_version = base + ['--python-executable={}'.format(sys.executable)] _, options = process_options(matching_version) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable # test inference given both matching_version = base + ['--python-version={}'.format(sys_ver_str), '--python-executable={}'.format(sys.executable)] _, options = process_options(matching_version) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable # test that --no-site-packages will disable executable inference matching_version = base + ['--python-version={}'.format(sys_ver_str), '--no-site-packages'] _, options = process_options(matching_version) assert options.python_version == sys.version_info[:2] assert options.python_executable is None # Test setting python_version/executable from config file special_opts = argparse.Namespace() special_opts.python_executable = None special_opts.python_version = None special_opts.no_executable = None # first test inferring executable from version options = Options() options.python_executable = None options.python_version = sys.version_info[:2] infer_python_executable(options, special_opts) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable # then test inferring version from executable options = Options() options.python_executable = sys.executable infer_python_executable(options, special_opts) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable mypy-0.761/mypy/test/testcheck.py0000644€tŠÔÚ€2›s®0000003377313576752246023241 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type checker test cases""" import os import re import sys from typing import Dict, List, Set, Tuple from mypy import build from mypy.build import Graph from mypy.modulefinder import BuildSource, SearchPaths, FindModuleCache from mypy.test.config import test_temp_dir, test_data_prefix from mypy.test.data import ( DataDrivenTestCase, DataSuite, FileOperation, UpdateFile, module_from_path ) from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, assert_module_equivalence, retry_on_error, update_testcase_output, parse_options, copy_and_fudge_mtime, assert_target_equivalence, check_test_output_files ) from mypy.errors import CompileError from mypy.semanal_main import core_modules # List of files that contain test case descriptions. typecheck_files = [ 'check-basic.test', 'check-callable.test', 'check-classes.test', 'check-statements.test', 'check-generics.test', 'check-dynamic-typing.test', 'check-inference.test', 'check-inference-context.test', 'check-kwargs.test', 'check-overloading.test', 'check-type-checks.test', 'check-abstract.test', 'check-multiple-inheritance.test', 'check-super.test', 'check-modules.test', 'check-typevar-values.test', 'check-unsupported.test', 'check-unreachable-code.test', 'check-unions.test', 'check-isinstance.test', 'check-lists.test', 'check-namedtuple.test', 'check-narrowing.test', 'check-typeddict.test', 'check-type-aliases.test', 'check-ignore.test', 'check-type-promotion.test', 'check-semanal-error.test', 'check-flags.test', 'check-incremental.test', 'check-serialize.test', 'check-bound.test', 'check-optional.test', 'check-fastparse.test', 'check-warnings.test', 'check-async-await.test', 'check-newtype.test', 'check-class-namedtuple.test', 'check-selftype.test', 'check-python2.test', 'check-columns.test', 'check-functions.test', 'check-tuples.test', 'check-expressions.test', 'check-generic-subtyping.test', 'check-varargs.test', 'check-newsyntax.test', 'check-protocols.test', 'check-underscores.test', 'check-classvar.test', 'check-enum.test', 'check-incomplete-fixture.test', 'check-custom-plugin.test', 'check-default-plugin.test', 'check-attr.test', 'check-ctypes.test', 'check-dataclasses.test', 'check-final.test', 'check-redefine.test', 'check-literal.test', 'check-newsemanal.test', 'check-inline-config.test', 'check-reports.test', 'check-errorcodes.test', 'check-annotated.test', ] # Tests that use Python 3.8-only AST features (like expression-scoped ignores): if sys.version_info >= (3, 8): typecheck_files.append('check-python38.test') # Special tests for platforms with case-insensitive filesystems. if sys.platform in ('darwin', 'win32'): typecheck_files.append('check-modules-case.test') class TypeCheckSuite(DataSuite): files = typecheck_files def run_case(self, testcase: DataDrivenTestCase) -> None: incremental = ('incremental' in testcase.name.lower() or 'incremental' in testcase.file or 'serialize' in testcase.file) if incremental: # Incremental tests are run once with a cold cache, once with a warm cache. # Expect success on first run, errors from testcase.output (if any) on second run. num_steps = max([2] + list(testcase.output2.keys())) # Check that there are no file changes beyond the last run (they would be ignored). for dn, dirs, files in os.walk(os.curdir): for file in files: m = re.search(r'\.([2-9])$', file) if m and int(m.group(1)) > num_steps: raise ValueError( 'Output file {} exists though test case only has {} runs'.format( file, num_steps)) steps = testcase.find_steps() for step in range(1, num_steps + 1): idx = step - 2 ops = steps[idx] if idx < len(steps) and idx >= 0 else [] self.run_case_once(testcase, ops, step) else: self.run_case_once(testcase) def run_case_once(self, testcase: DataDrivenTestCase, operations: List[FileOperation] = [], incremental_step: int = 0) -> None: original_program_text = '\n'.join(testcase.input) module_data = self.parse_module(original_program_text, incremental_step) # Unload already loaded plugins, they may be updated. for file, _ in testcase.files: module = module_from_path(file) if module.endswith('_plugin') and module in sys.modules: del sys.modules[module] if incremental_step == 0 or incremental_step == 1: # In run 1, copy program text to program file. for module_name, program_path, program_text in module_data: if module_name == '__main__': with open(program_path, 'w', encoding='utf8') as f: f.write(program_text) break elif incremental_step > 1: # In runs 2+, copy *.[num] files to * files. for op in operations: if isinstance(op, UpdateFile): # Modify/create file copy_and_fudge_mtime(op.source_path, op.target_path) else: # Delete file # Use retries to work around potential flakiness on Windows (AppVeyor). path = op.path retry_on_error(lambda: os.remove(path)) # Parse options after moving files (in case mypy.ini is being moved). options = parse_options(original_program_text, testcase, incremental_step) options.use_builtins_fixtures = True options.show_traceback = True # Enable some options automatically based on test file name. if 'optional' in testcase.file: options.strict_optional = True if 'columns' in testcase.file: options.show_column_numbers = True if 'errorcodes' in testcase.file: options.show_error_codes = True if incremental_step and options.incremental: # Don't overwrite # flags: --no-incremental in incremental test cases options.incremental = True else: options.incremental = False # Don't waste time writing cache unless we are specifically looking for it if not testcase.writescache: options.cache_dir = os.devnull sources = [] for module_name, program_path, program_text in module_data: # Always set to none so we're forced to reread the module in incremental mode sources.append(BuildSource(program_path, module_name, None if incremental_step else program_text)) plugin_dir = os.path.join(test_data_prefix, 'plugins') sys.path.insert(0, plugin_dir) res = None try: res = build.build(sources=sources, options=options, alt_lib_path=test_temp_dir) a = res.errors except CompileError as e: a = e.messages finally: assert sys.path[0] == plugin_dir del sys.path[0] if testcase.normalize_output: a = normalize_error_messages(a) # Make sure error messages match if incremental_step == 0: # Not incremental msg = 'Unexpected type checker output ({}, line {})' output = testcase.output elif incremental_step == 1: msg = 'Unexpected type checker output in incremental, run 1 ({}, line {})' output = testcase.output elif incremental_step > 1: msg = ('Unexpected type checker output in incremental, run {}'.format( incremental_step) + ' ({}, line {})') output = testcase.output2.get(incremental_step, []) else: raise AssertionError() if output != a and testcase.config.getoption('--update-data', False): update_testcase_output(testcase, a) assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line)) if res: if options.cache_dir != os.devnull: self.verify_cache(module_data, res.errors, res.manager, res.graph) name = 'targets' if incremental_step: name += str(incremental_step + 1) expected = testcase.expected_fine_grained_targets.get(incremental_step + 1) actual = res.manager.processed_targets # Skip the initial builtin cycle. actual = [t for t in actual if not any(t.startswith(mod) for mod in core_modules + ['mypy_extensions'])] if expected is not None: assert_target_equivalence(name, expected, actual) if incremental_step > 1: suffix = '' if incremental_step == 2 else str(incremental_step - 1) expected_rechecked = testcase.expected_rechecked_modules.get(incremental_step - 1) if expected_rechecked is not None: assert_module_equivalence( 'rechecked' + suffix, expected_rechecked, res.manager.rechecked_modules) expected_stale = testcase.expected_stale_modules.get(incremental_step - 1) if expected_stale is not None: assert_module_equivalence( 'stale' + suffix, expected_stale, res.manager.stale_modules) if testcase.output_files: check_test_output_files(testcase, incremental_step, strip_prefix='tmp/') def verify_cache(self, module_data: List[Tuple[str, str, str]], a: List[str], manager: build.BuildManager, graph: Graph) -> None: # There should be valid cache metadata for each module except # for those that had an error in themselves or one of their # dependencies. error_paths = self.find_error_message_paths(a) busted_paths = {m.path for id, m in manager.modules.items() if graph[id].transitive_error} modules = self.find_module_files(manager) modules.update({module_name: path for module_name, path, text in module_data}) missing_paths = self.find_missing_cache_files(modules, manager) # We would like to assert error_paths.issubset(busted_paths) # but this runs into trouble because while some 'notes' are # really errors that cause an error to be marked, many are # just notes attached to other errors. assert error_paths or not busted_paths, "Some modules reported error despite no errors" if not missing_paths == busted_paths: raise AssertionError("cache data discrepancy %s != %s" % (missing_paths, busted_paths)) def find_error_message_paths(self, a: List[str]) -> Set[str]: hits = set() for line in a: m = re.match(r'([^\s:]+):(\d+:)?(\d+:)? (error|warning|note):', line) if m: p = m.group(1) hits.add(p) return hits def find_module_files(self, manager: build.BuildManager) -> Dict[str, str]: modules = {} for id, module in manager.modules.items(): modules[id] = module.path return modules def find_missing_cache_files(self, modules: Dict[str, str], manager: build.BuildManager) -> Set[str]: ignore_errors = True missing = {} for id, path in modules.items(): meta = build.find_cache_meta(id, path, manager) if not build.validate_meta(meta, id, path, ignore_errors, manager): missing[id] = path return set(missing.values()) def parse_module(self, program_text: str, incremental_step: int = 0) -> List[Tuple[str, str, str]]: """Return the module and program names for a test case. Normally, the unit tests will parse the default ('__main__') module and follow all the imports listed there. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: mypy -m foo.bar foo.baz You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). Return a list of tuples (module name, file name, program text). """ m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE) if incremental_step > 1: alt_regex = '# cmd{}: mypy -m ([a-zA-Z0-9_. ]+)$'.format(incremental_step) alt_m = re.search(alt_regex, program_text, flags=re.MULTILINE) if alt_m is not None: # Optionally return a different command if in a later step # of incremental mode, otherwise default to reusing the # original cmd. m = alt_m if m: # The test case wants to use a non-default main # module. Look up the module and give it as the thing to # analyze. module_names = m.group(1) out = [] search_paths = SearchPaths((test_temp_dir,), (), (), ()) cache = FindModuleCache(search_paths) for module_name in module_names.split(' '): path = cache.find_module(module_name) assert path is not None, "Can't find ad hoc case file" with open(path, encoding='utf8') as f: program_text = f.read() out.append((module_name, path, program_text)) return out else: return [('__main__', 'main', program_text)] mypy-0.761/mypy/test/testcmdline.py0000644€tŠÔÚ€2›s®0000000754413576752246023574 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for the command line. To begin we test that "mypy [/]" always recurses down the whole tree. """ import os import re import subprocess import sys from typing import List from mypy.test.config import test_temp_dir, PREFIX from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, check_test_output_files ) # Path to Python 3 interpreter python3_path = sys.executable # Files containing test case descriptions. cmdline_files = [ 'cmdline.test', 'reports.test', ] class PythonCmdlineSuite(DataSuite): files = cmdline_files native_sep = True def run_case(self, testcase: DataDrivenTestCase) -> None: for step in [1] + sorted(testcase.output2): test_python_cmdline(testcase, step) def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: assert testcase.old_cwd is not None, "test was not properly set up" # Write the program to a file. program = '_program.py' program_path = os.path.join(test_temp_dir, program) with open(program_path, 'w', encoding='utf8') as file: for s in testcase.input: file.write('{}\n'.format(s)) args = parse_args(testcase.input[0]) args.append('--show-traceback') args.append('--no-site-packages') if '--error-summary' not in args: args.append('--no-error-summary') # Type check the program. fixed = [python3_path, '-m', 'mypy'] env = os.environ.copy() env['PYTHONPATH'] = PREFIX process = subprocess.Popen(fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=test_temp_dir, env=env) outb, errb = process.communicate() result = process.returncode # Split output into lines. out = [s.rstrip('\n\r') for s in str(outb, 'utf8').splitlines()] err = [s.rstrip('\n\r') for s in str(errb, 'utf8').splitlines()] if "PYCHARM_HOSTED" in os.environ: for pos, line in enumerate(err): if line.startswith('pydev debugger: '): # Delete the attaching debugger message itself, plus the extra newline added. del err[pos:pos + 2] break # Remove temp file. os.remove(program_path) # Compare actual output to expected. if testcase.output_files: # Ignore stdout, but we insist on empty stderr and zero status. if err or result: raise AssertionError( 'Expected zero status and empty stderr%s, got %d and\n%s' % (' on step %d' % step if testcase.output2 else '', result, '\n'.join(err + out))) check_test_output_files(testcase, step) else: if testcase.normalize_output: out = normalize_error_messages(err + out) obvious_result = 1 if out else 0 if obvious_result != result: out.append('== Return code: {}'.format(result)) expected_out = testcase.output if step == 1 else testcase.output2[step] # Strip "tmp/" out of the test so that # E: works... expected_out = [s.replace("tmp" + os.sep, "") for s in expected_out] assert_string_arrays_equal(expected_out, out, 'Invalid output ({}, line {}){}'.format( testcase.file, testcase.line, ' on step %d' % step if testcase.output2 else '')) def parse_args(line: str) -> List[str]: """Parse the first line of the program for the command line. This should have the form # cmd: mypy For example: # cmd: mypy pkg/ """ m = re.match('# cmd: mypy (.*)$', line) if not m: return [] # No args; mypy will spit out an error. return m.group(1).split() mypy-0.761/mypy/test/testdaemon.py0000644€tŠÔÚ€2›s®0000000545613576752246023424 0ustar jukkaDROPBOX\Domain Users00000000000000"""End-to-end test cases for the daemon (dmypy). These are special because they run multiple shell commands. """ import os import subprocess import sys from typing import List, Tuple from mypy.test.config import test_temp_dir, PREFIX from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages # Files containing test cases descriptions. daemon_files = [ 'daemon.test', ] class DaemonSuite(DataSuite): files = daemon_files def run_case(self, testcase: DataDrivenTestCase) -> None: try: test_daemon(testcase) finally: # Kill the daemon if it's still running. run_cmd('dmypy kill') def test_daemon(testcase: DataDrivenTestCase) -> None: assert testcase.old_cwd is not None, "test was not properly set up" for i, step in enumerate(parse_script(testcase.input)): cmd = step[0] expected_lines = step[1:] assert cmd.startswith('$') cmd = cmd[1:].strip() cmd = cmd.replace('{python}', sys.executable) sts, output = run_cmd(cmd) output_lines = output.splitlines() output_lines = normalize_error_messages(output_lines) if sts: output_lines.append('== Return code: %d' % sts) assert_string_arrays_equal(expected_lines, output_lines, "Command %d (%s) did not give expected output" % (i + 1, cmd)) def parse_script(input: List[str]) -> List[List[str]]: """Parse testcase.input into steps. Each command starts with a line starting with '$'. The first line (less '$') is sent to the shell. The remaining lines are expected output. """ steps = [] step = [] # type: List[str] for line in input: if line.startswith('$'): if step: assert step[0].startswith('$') steps.append(step) step = [] step.append(line) if step: steps.append(step) return steps def run_cmd(input: str) -> Tuple[int, str]: if input.startswith('dmypy '): input = sys.executable + ' -m mypy.' + input if input.startswith('mypy '): input = sys.executable + ' -m' + input env = os.environ.copy() env['PYTHONPATH'] = PREFIX try: output = subprocess.check_output(input, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, cwd=test_temp_dir, env=env) return 0, output except subprocess.CalledProcessError as err: return err.returncode, err.output mypy-0.761/mypy/test/testdeps.py0000644€tŠÔÚ€2›s®0000000755713576752246023120 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for generating node-level dependencies (for fine-grained incremental checking)""" import os from collections import defaultdict from typing import List, Tuple, Dict, Optional, Set from typing_extensions import DefaultDict from mypy import build, defaults from mypy.modulefinder import BuildSource from mypy.errors import CompileError from mypy.nodes import MypyFile, Expression from mypy.options import Options from mypy.server.deps import get_dependencies from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, parse_options from mypy.types import Type from mypy.typestate import TypeState # Only dependencies in these modules are dumped dumped_modules = ['__main__', 'pkg', 'pkg.mod'] class GetDependenciesSuite(DataSuite): files = [ 'deps.test', 'deps-types.test', 'deps-generics.test', 'deps-expressions.test', 'deps-statements.test', 'deps-classes.test', ] def run_case(self, testcase: DataDrivenTestCase) -> None: src = '\n'.join(testcase.input) dump_all = '# __dump_all__' in src if testcase.name.endswith('python2'): python_version = defaults.PYTHON2_VERSION else: python_version = defaults.PYTHON3_VERSION options = parse_options(src, testcase, incremental_step=1) options.use_builtins_fixtures = True options.show_traceback = True options.cache_dir = os.devnull options.python_version = python_version options.export_types = True options.preserve_asts = True messages, files, type_map = self.build(src, options) a = messages if files is None or type_map is None: if not a: a = ['Unknown compile error (likely syntax error in test case or fixture)'] else: deps = defaultdict(set) # type: DefaultDict[str, Set[str]] for module in files: if module in dumped_modules or dump_all and module not in ('abc', 'typing', 'mypy_extensions', 'typing_extensions', 'enum'): new_deps = get_dependencies(files[module], type_map, python_version, options) for source in new_deps: deps[source].update(new_deps[source]) TypeState.add_all_protocol_deps(deps) for source, targets in sorted(deps.items()): if source.startswith((' Tuple[List[str], Optional[Dict[str, MypyFile]], Optional[Dict[Expression, Type]]]: try: result = build.build(sources=[BuildSource('main', None, source)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: Should perhaps not return None here. return e.messages, None, None return result.errors, result.files, result.types mypy-0.761/mypy/test/testdiff.py0000644€tŠÔÚ€2›s®0000000476013576752246023066 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for AST diff (used for fine-grained incremental checking)""" import os from typing import List, Tuple, Dict, Optional from mypy import build from mypy.modulefinder import BuildSource from mypy.defaults import PYTHON3_VERSION from mypy.errors import CompileError from mypy.nodes import MypyFile from mypy.options import Options from mypy.server.astdiff import snapshot_symbol_table, compare_symbol_table_snapshots from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, parse_options class ASTDiffSuite(DataSuite): files = ['diff.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: first_src = '\n'.join(testcase.input) files_dict = dict(testcase.files) second_src = files_dict['tmp/next.py'] options = parse_options(first_src, testcase, 1) messages1, files1 = self.build(first_src, options) messages2, files2 = self.build(second_src, options) a = [] if messages1: a.extend(messages1) if messages2: a.append('== next ==') a.extend(messages2) assert files1 is not None and files2 is not None, ('cases where CompileError' ' occurred should not be run') prefix = '__main__' snapshot1 = snapshot_symbol_table(prefix, files1['__main__'].names) snapshot2 = snapshot_symbol_table(prefix, files2['__main__'].names) diff = compare_symbol_table_snapshots(prefix, snapshot1, snapshot2) for trigger in sorted(diff): a.append(trigger) assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format(testcase.file, testcase.line)) def build(self, source: str, options: Options) -> Tuple[List[str], Optional[Dict[str, MypyFile]]]: options.use_builtins_fixtures = True options.show_traceback = True options.cache_dir = os.devnull options.python_version = PYTHON3_VERSION try: result = build.build(sources=[BuildSource('main', None, source)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: Is it okay to return None? return e.messages, None return result.errors, result.files mypy-0.761/mypy/test/testerrorstream.py0000644€tŠÔÚ€2›s®0000000271213576752246024516 0ustar jukkaDROPBOX\Domain Users00000000000000"""Tests for mypy incremental error output.""" from typing import List from mypy import build from mypy.test.helpers import assert_string_arrays_equal from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.modulefinder import BuildSource from mypy.errors import CompileError from mypy.options import Options class ErrorStreamSuite(DataSuite): required_out_section = True base_path = '.' files = ['errorstream.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: test_error_stream(testcase) def test_error_stream(testcase: DataDrivenTestCase) -> None: """Perform a single error streaming test case. The argument contains the description of the test case. """ options = Options() options.show_traceback = True logged_messages = [] # type: List[str] def flush_errors(msgs: List[str], serious: bool) -> None: if msgs: logged_messages.append('==== Errors flushed ====') logged_messages.extend(msgs) sources = [BuildSource('main', '__main__', '\n'.join(testcase.input))] try: build.build(sources=sources, options=options, flush_errors=flush_errors) except CompileError as e: assert e.messages == [] assert_string_arrays_equal(testcase.output, logged_messages, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line)) mypy-0.761/mypy/test/testfinegrained.py0000644€tŠÔÚ€2›s®0000003057113576752246024430 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for fine-grained incremental checking. Each test cases runs a batch build followed by one or more fine-grained incremental steps. We verify that each step produces the expected output. See the comment at the top of test-data/unit/fine-grained.test for more information. N.B.: Unlike most of the other test suites, testfinegrained does not rely on an alt_lib_path for finding source files. This means that they can test interactions with the lib_path that is built implicitly based on specified sources. """ import os import re from typing import List, Dict, Any, Tuple, cast from mypy import build from mypy.modulefinder import BuildSource from mypy.errors import CompileError from mypy.options import Options from mypy.test.config import test_temp_dir from mypy.test.data import ( DataDrivenTestCase, DataSuite, UpdateFile ) from mypy.test.helpers import ( assert_string_arrays_equal, parse_options, copy_and_fudge_mtime, assert_module_equivalence, assert_target_equivalence ) from mypy.server.mergecheck import check_consistency from mypy.dmypy_util import DEFAULT_STATUS_FILE from mypy.dmypy_server import Server from mypy.config_parser import parse_config_file from mypy.find_sources import create_source_list import pytest # type: ignore # no pytest in typeshed # Set to True to perform (somewhat expensive) checks for duplicate AST nodes after merge CHECK_CONSISTENCY = False class FineGrainedSuite(DataSuite): files = [ 'fine-grained.test', 'fine-grained-cycles.test', 'fine-grained-blockers.test', 'fine-grained-modules.test', 'fine-grained-suggest.test', ] # Whether to use the fine-grained cache in the testing. This is overridden # by a trivial subclass to produce a suite that uses the cache. use_cache = False # Decide whether to skip the test. This could have been structured # as a filter() classmethod also, but we want the tests reported # as skipped, not just elided. def should_skip(self, testcase: DataDrivenTestCase) -> bool: if self.use_cache: if testcase.only_when == '-only_when_nocache': return True # TODO: In caching mode we currently don't well support # starting from cached states with errors in them. if testcase.output and testcase.output[0] != '==': return True else: if testcase.only_when == '-only_when_cache': return True return False def run_case(self, testcase: DataDrivenTestCase) -> None: if self.should_skip(testcase): pytest.skip() return main_src = '\n'.join(testcase.input) main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w', encoding='utf8') as f: f.write(main_src) options = self.get_options(main_src, testcase, build_cache=False) build_options = self.get_options(main_src, testcase, build_cache=True) server = Server(options, DEFAULT_STATUS_FILE) num_regular_incremental_steps = self.get_build_steps(main_src) step = 1 sources = self.parse_sources(main_src, step, options) if step <= num_regular_incremental_steps: messages = self.build(build_options, sources) else: messages = self.run_check(server, sources) a = [] if messages: a.extend(normalize_messages(messages)) assert testcase.tmpdir a.extend(self.maybe_suggest(step, server, main_src, testcase.tmpdir.name)) if server.fine_grained_manager: if CHECK_CONSISTENCY: check_consistency(server.fine_grained_manager) steps = testcase.find_steps() all_triggered = [] for operations in steps: step += 1 for op in operations: if isinstance(op, UpdateFile): # Modify/create file copy_and_fudge_mtime(op.source_path, op.target_path) else: # Delete file os.remove(op.path) sources = self.parse_sources(main_src, step, options) if step <= num_regular_incremental_steps: new_messages = self.build(build_options, sources) else: new_messages = self.run_check(server, sources) updated = [] # type: List[str] changed = [] # type: List[str] targets = [] # type: List[str] if server.fine_grained_manager: if CHECK_CONSISTENCY: check_consistency(server.fine_grained_manager) all_triggered.append(server.fine_grained_manager.triggered) updated = server.fine_grained_manager.updated_modules changed = [mod for mod, file in server.fine_grained_manager.changed_modules] targets = server.fine_grained_manager.processed_targets expected_stale = testcase.expected_stale_modules.get(step - 1) if expected_stale is not None: assert_module_equivalence( 'stale' + str(step - 1), expected_stale, changed) expected_rechecked = testcase.expected_rechecked_modules.get(step - 1) if expected_rechecked is not None: assert_module_equivalence( 'rechecked' + str(step - 1), expected_rechecked, updated) expected = testcase.expected_fine_grained_targets.get(step) if expected: assert_target_equivalence( 'targets' + str(step), expected, targets) new_messages = normalize_messages(new_messages) a.append('==') a.extend(new_messages) assert testcase.tmpdir a.extend(self.maybe_suggest(step, server, main_src, testcase.tmpdir.name)) # Normalize paths in test output (for Windows). a = [line.replace('\\', '/') for line in a] assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line)) if testcase.triggered: assert_string_arrays_equal( testcase.triggered, self.format_triggered(all_triggered), 'Invalid active triggers ({}, line {})'.format(testcase.file, testcase.line)) def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bool) -> Options: # This handles things like '# flags: --foo'. options = parse_options(source, testcase, incremental_step=1) options.incremental = True options.use_builtins_fixtures = True options.show_traceback = True options.error_summary = False options.fine_grained_incremental = not build_cache options.use_fine_grained_cache = self.use_cache and not build_cache options.cache_fine_grained = self.use_cache options.local_partial_types = True if options.follow_imports == 'normal': options.follow_imports = 'error' for name, _ in testcase.files: if 'mypy.ini' in name: parse_config_file(options, name) break return options def run_check(self, server: Server, sources: List[BuildSource]) -> List[str]: response = server.check(sources, is_tty=False, terminal_width=-1) out = cast(str, response['out'] or response['err']) return out.splitlines() def build(self, options: Options, sources: List[BuildSource]) -> List[str]: try: result = build.build(sources=sources, options=options) except CompileError as e: return e.messages return result.errors def format_triggered(self, triggered: List[List[str]]) -> List[str]: result = [] for n, triggers in enumerate(triggered): filtered = [trigger for trigger in triggers if not trigger.endswith('__>')] filtered = sorted(filtered) result.append(('%d: %s' % (n + 2, ', '.join(filtered))).strip()) return result def get_build_steps(self, program_text: str) -> int: """Get the number of regular incremental steps to run, from the test source""" if not self.use_cache: return 0 m = re.search('# num_build_steps: ([0-9]+)$', program_text, flags=re.MULTILINE) if m is not None: return int(m.group(1)) return 1 def parse_sources(self, program_text: str, incremental_step: int, options: Options) -> List[BuildSource]: """Return target BuildSources for a test case. Normally, the unit tests will check all files included in the test case. This differs from how testcheck works by default, as dmypy doesn't currently support following imports. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: main a.py You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). """ m = re.search('# cmd: mypy ([a-zA-Z0-9_./ ]+)$', program_text, flags=re.MULTILINE) regex = '# cmd{}: mypy ([a-zA-Z0-9_./ ]+)$'.format(incremental_step) alt_m = re.search(regex, program_text, flags=re.MULTILINE) if alt_m is not None: # Optionally return a different command if in a later step # of incremental mode, otherwise default to reusing the # original cmd. m = alt_m if m: # The test case wants to use a non-default set of files. paths = [os.path.join(test_temp_dir, path) for path in m.group(1).strip().split()] return create_source_list(paths, options) else: base = BuildSource(os.path.join(test_temp_dir, 'main'), '__main__', None) # Use expand_dir instead of create_source_list to avoid complaints # when there aren't any .py files in an increment return [base] + create_source_list([test_temp_dir], options, allow_empty_dir=True) def maybe_suggest(self, step: int, server: Server, src: str, tmp_dir: str) -> List[str]: output = [] # type: List[str] targets = self.get_suggest(src, step) for flags, target in targets: json = '--json' in flags callsites = '--callsites' in flags no_any = '--no-any' in flags no_errors = '--no-errors' in flags try_text = '--try-text' in flags m = re.match('--flex-any=([0-9.]+)', flags) flex_any = float(m.group(1)) if m else None m = re.match(r'--use-fixme=(\w+)', flags) use_fixme = m.group(1) if m else None m = re.match('--max-guesses=([0-9]+)', flags) max_guesses = int(m.group(1)) if m else None res = cast(Dict[str, Any], server.cmd_suggest( target.strip(), json=json, no_any=no_any, no_errors=no_errors, try_text=try_text, flex_any=flex_any, use_fixme=use_fixme, callsites=callsites, max_guesses=max_guesses)) val = res['error'] if 'error' in res else res['out'] + res['err'] if json: # JSON contains already escaped \ on Windows, so requires a bit of care. val = val.replace('\\\\', '\\') val = val.replace(os.path.realpath(tmp_dir) + os.path.sep, '') output.extend(val.strip().split('\n')) return normalize_messages(output) def get_suggest(self, program_text: str, incremental_step: int) -> List[Tuple[str, str]]: step_bit = '1?' if incremental_step == 1 else str(incremental_step) regex = '# suggest{}: (--[a-zA-Z0-9_\\-./=?^ ]+ )*([a-zA-Z0-9_.:/?^ ]+)$'.format(step_bit) m = re.findall(regex, program_text, flags=re.MULTILINE) return m def normalize_messages(messages: List[str]) -> List[str]: return [re.sub('^tmp' + re.escape(os.sep), '', message) for message in messages] mypy-0.761/mypy/test/testfinegrainedcache.py0000644€tŠÔÚ€2›s®0000000103513576752246025405 0ustar jukkaDROPBOX\Domain Users00000000000000"""Tests for fine-grained incremental checking using the cache. All of the real code for this lives in testfinegrained.py. """ # We can't "import FineGrainedSuite from ..." because that will cause pytest # to collect the non-caching tests when running this file. import mypy.test.testfinegrained class FineGrainedCacheSuite(mypy.test.testfinegrained.FineGrainedSuite): use_cache = True test_name_suffix = '_cached' files = ( mypy.test.testfinegrained.FineGrainedSuite.files + ['fine-grained-cache-incremental.test']) mypy-0.761/mypy/test/testformatter.py0000644€tŠÔÚ€2›s®0000000510313576752246024151 0ustar jukkaDROPBOX\Domain Users00000000000000from unittest import TestCase, main from mypy.util import trim_source_line, split_words class FancyErrorFormattingTestCases(TestCase): def test_trim_source(self) -> None: assert trim_source_line('0123456789abcdef', max_len=16, col=5, min_width=2) == ('0123456789abcdef', 0) # Locations near start. assert trim_source_line('0123456789abcdef', max_len=7, col=0, min_width=2) == ('0123456...', 0) assert trim_source_line('0123456789abcdef', max_len=7, col=4, min_width=2) == ('0123456...', 0) # Middle locations. assert trim_source_line('0123456789abcdef', max_len=7, col=5, min_width=2) == ('...1234567...', -2) assert trim_source_line('0123456789abcdef', max_len=7, col=6, min_width=2) == ('...2345678...', -1) assert trim_source_line('0123456789abcdef', max_len=7, col=8, min_width=2) == ('...456789a...', 1) # Locations near the end. assert trim_source_line('0123456789abcdef', max_len=7, col=11, min_width=2) == ('...789abcd...', 4) assert trim_source_line('0123456789abcdef', max_len=7, col=13, min_width=2) == ('...9abcdef', 6) assert trim_source_line('0123456789abcdef', max_len=7, col=15, min_width=2) == ('...9abcdef', 6) def test_split_words(self) -> None: assert split_words('Simple message') == ['Simple', 'message'] assert split_words('Message with "Some[Long, Types]"' ' in it') == ['Message', 'with', '"Some[Long, Types]"', 'in', 'it'] assert split_words('Message with "Some[Long, Types]"' ' and [error-code]') == ['Message', 'with', '"Some[Long, Types]"', 'and', '[error-code]'] assert split_words('"Type[Stands, First]" then words') == ['"Type[Stands, First]"', 'then', 'words'] assert split_words('First words "Then[Stands, Type]"') == ['First', 'words', '"Then[Stands, Type]"'] assert split_words('"Type[Only, Here]"') == ['"Type[Only, Here]"'] assert split_words('OneWord') == ['OneWord'] assert split_words(' ') == ['', ''] if __name__ == '__main__': main() mypy-0.761/mypy/test/testgraph.py0000644€tŠÔÚ€2›s®0000000613513576752246023255 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for graph processing code in build.py.""" import sys from typing import AbstractSet, Dict, Set, List from mypy.test.helpers import assert_equal, Suite from mypy.build import BuildManager, State, BuildSourceSet from mypy.modulefinder import SearchPaths from mypy.build import topsort, strongly_connected_components, sorted_components, order_ascc from mypy.version import __version__ from mypy.options import Options from mypy.report import Reports from mypy.plugin import Plugin from mypy.errors import Errors from mypy.fscache import FileSystemCache class GraphSuite(Suite): def test_topsort(self) -> None: a = frozenset({'A'}) b = frozenset({'B'}) c = frozenset({'C'}) d = frozenset({'D'}) data = {a: {b, c}, b: {d}, c: {d}} # type: Dict[AbstractSet[str], Set[AbstractSet[str]]] res = list(topsort(data)) assert_equal(res, [{d}, {b, c}, {a}]) def test_scc(self) -> None: vertices = {'A', 'B', 'C', 'D'} edges = {'A': ['B', 'C'], 'B': ['C'], 'C': ['B', 'D'], 'D': []} # type: Dict[str, List[str]] sccs = set(frozenset(x) for x in strongly_connected_components(vertices, edges)) assert_equal(sccs, {frozenset({'A'}), frozenset({'B', 'C'}), frozenset({'D'})}) def _make_manager(self) -> BuildManager: errors = Errors() options = Options() fscache = FileSystemCache() search_paths = SearchPaths((), (), (), ()) manager = BuildManager( data_dir='', search_paths=search_paths, ignore_prefix='', source_set=BuildSourceSet([]), reports=Reports('', {}), options=options, version_id=__version__, plugin=Plugin(options), plugins_snapshot={}, errors=errors, flush_errors=lambda msgs, serious: None, fscache=fscache, stdout=sys.stdout, stderr=sys.stderr, ) return manager def test_sorted_components(self) -> None: manager = self._make_manager() graph = {'a': State('a', None, 'import b, c', manager), 'd': State('d', None, 'pass', manager), 'b': State('b', None, 'import c', manager), 'c': State('c', None, 'import b, d', manager), } res = sorted_components(graph) assert_equal(res, [frozenset({'d'}), frozenset({'c', 'b'}), frozenset({'a'})]) def test_order_ascc(self) -> None: manager = self._make_manager() graph = {'a': State('a', None, 'import b, c', manager), 'd': State('d', None, 'def f(): import a', manager), 'b': State('b', None, 'import c', manager), 'c': State('c', None, 'import b, d', manager), } res = sorted_components(graph) assert_equal(res, [frozenset({'a', 'd', 'c', 'b'})]) ascc = res[0] scc = order_ascc(graph, ascc) assert_equal(scc, ['d', 'c', 'b', 'a']) mypy-0.761/mypy/test/testinfer.py0000644€tŠÔÚ€2›s®0000001571413576752246023262 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for type inference helper functions.""" from typing import List, Optional, Tuple, Union from mypy.test.helpers import Suite, assert_equal from mypy.argmap import map_actuals_to_formals from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED from mypy.types import AnyType, TupleType, Type, TypeOfAny from mypy.test.typefixture import TypeFixture class MapActualsToFormalsSuite(Suite): """Test cases for checkexpr.map_actuals_to_formals.""" def test_basic(self) -> None: self.assert_map([], [], []) def test_positional_only(self) -> None: self.assert_map([ARG_POS], [ARG_POS], [[0]]) self.assert_map([ARG_POS, ARG_POS], [ARG_POS, ARG_POS], [[0], [1]]) def test_optional(self) -> None: self.assert_map([], [ARG_OPT], [[]]) self.assert_map([ARG_POS], [ARG_OPT], [[0]]) self.assert_map([ARG_POS], [ARG_OPT, ARG_OPT], [[0], []]) def test_callee_star(self) -> None: self.assert_map([], [ARG_STAR], [[]]) self.assert_map([ARG_POS], [ARG_STAR], [[0]]) self.assert_map([ARG_POS, ARG_POS], [ARG_STAR], [[0, 1]]) def test_caller_star(self) -> None: self.assert_map([ARG_STAR], [ARG_STAR], [[0]]) self.assert_map([ARG_POS, ARG_STAR], [ARG_STAR], [[0, 1]]) self.assert_map([ARG_STAR], [ARG_POS, ARG_STAR], [[0], [0]]) self.assert_map([ARG_STAR], [ARG_OPT, ARG_STAR], [[0], [0]]) def test_too_many_caller_args(self) -> None: self.assert_map([ARG_POS], [], []) self.assert_map([ARG_STAR], [], []) self.assert_map([ARG_STAR], [ARG_POS], [[0]]) def test_tuple_star(self) -> None: any_type = AnyType(TypeOfAny.special_form) self.assert_vararg_map( [ARG_STAR], [ARG_POS], [[0]], self.tuple(any_type)) self.assert_vararg_map( [ARG_STAR], [ARG_POS, ARG_POS], [[0], [0]], self.tuple(any_type, any_type)) self.assert_vararg_map( [ARG_STAR], [ARG_POS, ARG_OPT, ARG_OPT], [[0], [0], []], self.tuple(any_type, any_type)) def tuple(self, *args: Type) -> TupleType: return TupleType(list(args), TypeFixture().std_tuple) def test_named_args(self) -> None: self.assert_map( ['x'], [(ARG_POS, 'x')], [[0]]) self.assert_map( ['y', 'x'], [(ARG_POS, 'x'), (ARG_POS, 'y')], [[1], [0]]) def test_some_named_args(self) -> None: self.assert_map( ['y'], [(ARG_OPT, 'x'), (ARG_OPT, 'y'), (ARG_OPT, 'z')], [[], [0], []]) def test_missing_named_arg(self) -> None: self.assert_map( ['y'], [(ARG_OPT, 'x')], [[]]) def test_duplicate_named_arg(self) -> None: self.assert_map( ['x', 'x'], [(ARG_OPT, 'x')], [[0, 1]]) def test_varargs_and_bare_asterisk(self) -> None: self.assert_map( [ARG_STAR], [ARG_STAR, (ARG_NAMED, 'x')], [[0], []]) self.assert_map( [ARG_STAR, 'x'], [ARG_STAR, (ARG_NAMED, 'x')], [[0], [1]]) def test_keyword_varargs(self) -> None: self.assert_map( ['x'], [ARG_STAR2], [[0]]) self.assert_map( ['x', ARG_STAR2], [ARG_STAR2], [[0, 1]]) self.assert_map( ['x', ARG_STAR2], [(ARG_POS, 'x'), ARG_STAR2], [[0], [1]]) self.assert_map( [ARG_POS, ARG_STAR2], [(ARG_POS, 'x'), ARG_STAR2], [[0], [1]]) def test_both_kinds_of_varargs(self) -> None: self.assert_map( [ARG_STAR, ARG_STAR2], [(ARG_POS, 'x'), (ARG_POS, 'y')], [[0, 1], [0, 1]]) def test_special_cases(self) -> None: self.assert_map([ARG_STAR], [ARG_STAR, ARG_STAR2], [[0], []]) self.assert_map([ARG_STAR, ARG_STAR2], [ARG_STAR, ARG_STAR2], [[0], [1]]) self.assert_map([ARG_STAR2], [(ARG_POS, 'x'), ARG_STAR2], [[0], [0]]) self.assert_map([ARG_STAR2], [ARG_STAR2], [[0]]) def assert_map(self, caller_kinds_: List[Union[int, str]], callee_kinds_: List[Union[int, Tuple[int, str]]], expected: List[List[int]], ) -> None: caller_kinds, caller_names = expand_caller_kinds(caller_kinds_) callee_kinds, callee_names = expand_callee_kinds(callee_kinds_) result = map_actuals_to_formals( caller_kinds, caller_names, callee_kinds, callee_names, lambda i: AnyType(TypeOfAny.special_form)) assert_equal(result, expected) def assert_vararg_map(self, caller_kinds: List[int], callee_kinds: List[int], expected: List[List[int]], vararg_type: Type, ) -> None: result = map_actuals_to_formals( caller_kinds, [], callee_kinds, [], lambda i: vararg_type) assert_equal(result, expected) def expand_caller_kinds(kinds_or_names: List[Union[int, str]] ) -> Tuple[List[int], List[Optional[str]]]: kinds = [] names = [] # type: List[Optional[str]] for k in kinds_or_names: if isinstance(k, str): kinds.append(ARG_NAMED) names.append(k) else: kinds.append(k) names.append(None) return kinds, names def expand_callee_kinds(kinds_and_names: List[Union[int, Tuple[int, str]]] ) -> Tuple[List[int], List[Optional[str]]]: kinds = [] names = [] # type: List[Optional[str]] for v in kinds_and_names: if isinstance(v, tuple): kinds.append(v[0]) names.append(v[1]) else: kinds.append(v) names.append(None) return kinds, names mypy-0.761/mypy/test/testipc.py0000644€tŠÔÚ€2›s®0000000434113576752246022724 0ustar jukkaDROPBOX\Domain Users00000000000000from unittest import TestCase, main from multiprocessing import Process, Queue from mypy.ipc import IPCClient, IPCServer import pytest # type: ignore import sys import time CONNECTION_NAME = 'dmypy-test-ipc' def server(msg: str, q: 'Queue[str]') -> None: server = IPCServer(CONNECTION_NAME) q.put(server.connection_name) data = b'' while not data: with server: server.write(msg.encode()) data = server.read() server.cleanup() class IPCTests(TestCase): def test_transaction_large(self) -> None: queue = Queue() # type: Queue[str] msg = 't' * 200000 # longer than the max read size of 100_000 p = Process(target=server, args=(msg, queue), daemon=True) p.start() connection_name = queue.get() with IPCClient(connection_name, timeout=1) as client: assert client.read() == msg.encode() client.write(b'test') queue.close() queue.join_thread() p.join() def test_connect_twice(self) -> None: queue = Queue() # type: Queue[str] msg = 'this is a test message' p = Process(target=server, args=(msg, queue), daemon=True) p.start() connection_name = queue.get() with IPCClient(connection_name, timeout=1) as client: assert client.read() == msg.encode() client.write(b'') # don't let the server hang up yet, we want to connect again. with IPCClient(connection_name, timeout=1) as client: assert client.read() == msg.encode() client.write(b'test') queue.close() queue.join_thread() p.join() assert p.exitcode == 0 # Run test_connect_twice a lot, in the hopes of finding issues. # This is really slow, so it is skipped, but can be enabled if # needed to debug IPC issues. @pytest.mark.skip def test_connect_alot(self) -> None: t0 = time.time() for i in range(1000): try: print(i, 'start') self.test_connect_twice() finally: t1 = time.time() print(i, t1 - t0) sys.stdout.flush() t0 = t1 if __name__ == '__main__': main() mypy-0.761/mypy/test/testmerge.py0000644€tŠÔÚ€2›s®0000002201213576752246023243 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for AST merge (used for fine-grained incremental checking)""" import os import shutil from typing import List, Tuple, Dict, Optional from mypy import build from mypy.build import BuildResult from mypy.modulefinder import BuildSource from mypy.defaults import PYTHON3_VERSION from mypy.errors import CompileError from mypy.nodes import ( Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, Var, TypeVarExpr, UNBOUND_IMPORTED ) from mypy.server.subexpr import get_subexpressions from mypy.server.update import FineGrainedBuildManager from mypy.strconv import StrConv from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options from mypy.types import TypeStrVisitor, Type from mypy.util import short_type, IdMapper # Which data structures to dump in a test case? SYMTABLE = 'SYMTABLE' TYPEINFO = ' TYPEINFO' TYPES = 'TYPES' AST = 'AST' NOT_DUMPED_MODULES = ( 'builtins', 'typing', 'abc', 'contextlib', 'sys', 'mypy_extensions', 'typing_extensions', 'enum', ) class ASTMergeSuite(DataSuite): files = ['merge.test'] def setup(self) -> None: super().setup() self.str_conv = StrConv(show_ids=True) assert self.str_conv.id_mapper is not None self.id_mapper = self.str_conv.id_mapper # type: IdMapper self.type_str_conv = TypeStrVisitor(self.id_mapper) def run_case(self, testcase: DataDrivenTestCase) -> None: name = testcase.name # We use the test case name to decide which data structures to dump. # Dumping everything would result in very verbose test cases. if name.endswith('_symtable'): kind = SYMTABLE elif name.endswith('_typeinfo'): kind = TYPEINFO elif name.endswith('_types'): kind = TYPES else: kind = AST main_src = '\n'.join(testcase.input) result = self.build(main_src, testcase) assert result is not None, 'cases where CompileError occurred should not be run' result.manager.fscache.flush() fine_grained_manager = FineGrainedBuildManager(result) a = [] if result.errors: a.extend(result.errors) target_path = os.path.join(test_temp_dir, 'target.py') shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), target_path) a.extend(self.dump(fine_grained_manager, kind)) old_subexpr = get_subexpressions(result.manager.modules['target']) a.append('==>') new_file, new_types = self.build_increment(fine_grained_manager, 'target', target_path) a.extend(self.dump(fine_grained_manager, kind)) for expr in old_subexpr: if isinstance(expr, TypeVarExpr): # These are merged so we can't perform the check. continue # Verify that old AST nodes are removed from the expression type map. assert expr not in new_types if testcase.normalize_output: a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format(testcase.file, testcase.line)) def build(self, source: str, testcase: DataDrivenTestCase) -> Optional[BuildResult]: options = parse_options(source, testcase, incremental_step=1) options.incremental = True options.fine_grained_incremental = True options.use_builtins_fixtures = True options.export_types = True options.show_traceback = True options.python_version = PYTHON3_VERSION main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w', encoding='utf8') as f: f.write(source) try: result = build.build(sources=[BuildSource(main_path, None, None)], options=options, alt_lib_path=test_temp_dir) except CompileError: # TODO: Is it okay to return None? return None return result def build_increment(self, manager: FineGrainedBuildManager, module_id: str, path: str) -> Tuple[MypyFile, Dict[Expression, Type]]: manager.update([(module_id, path)], []) module = manager.manager.modules[module_id] type_map = manager.graph[module_id].type_map() return module, type_map def dump(self, manager: FineGrainedBuildManager, kind: str) -> List[str]: modules = manager.manager.modules if kind == AST: return self.dump_asts(modules) elif kind == TYPEINFO: return self.dump_typeinfos(modules) elif kind == SYMTABLE: return self.dump_symbol_tables(modules) elif kind == TYPES: return self.dump_types(manager) assert False, 'Invalid kind %s' % kind def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]: a = [] for m in sorted(modules): if m in NOT_DUMPED_MODULES: # We don't support incremental checking of changes to builtins, etc. continue s = modules[m].accept(self.str_conv) a.extend(s.splitlines()) return a def dump_symbol_tables(self, modules: Dict[str, MypyFile]) -> List[str]: a = [] for id in sorted(modules): if not is_dumped_module(id): # We don't support incremental checking of changes to builtins, etc. continue a.extend(self.dump_symbol_table(id, modules[id].names)) return a def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> List[str]: a = ['{}:'.format(module_id)] for name in sorted(symtable): if name.startswith('__'): continue a.append(' {}: {}'.format(name, self.format_symbol_table_node(symtable[name]))) return a def format_symbol_table_node(self, node: SymbolTableNode) -> str: if node.node is None: if node.kind == UNBOUND_IMPORTED: return 'UNBOUND_IMPORTED' return 'None' if isinstance(node.node, Node): s = '{}<{}>'.format(str(type(node.node).__name__), self.id_mapper.id(node.node)) else: s = '? ({})'.format(type(node.node)) if (isinstance(node.node, Var) and node.node.type and not node.node.fullname.startswith('typing.')): typestr = self.format_type(node.node.type) s += '({})'.format(typestr) return s def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]: a = [] for id in sorted(modules): if not is_dumped_module(id): continue a.extend(self.dump_typeinfos_recursive(modules[id].names)) return a def dump_typeinfos_recursive(self, names: SymbolTable) -> List[str]: a = [] for name, node in sorted(names.items(), key=lambda x: x[0]): if isinstance(node.node, TypeInfo): a.extend(self.dump_typeinfo(node.node)) a.extend(self.dump_typeinfos_recursive(node.node.names)) return a def dump_typeinfo(self, info: TypeInfo) -> List[str]: if info.fullname == 'enum.Enum': # Avoid noise return [] s = info.dump(str_conv=self.str_conv, type_str_conv=self.type_str_conv) return s.splitlines() def dump_types(self, manager: FineGrainedBuildManager) -> List[str]: a = [] # To make the results repeatable, we try to generate unique and # deterministic sort keys. for module_id in sorted(manager.manager.modules): if not is_dumped_module(module_id): continue all_types = manager.manager.all_types # Compute a module type map from the global type map tree = manager.graph[module_id].tree assert tree is not None type_map = {node: all_types[node] for node in get_subexpressions(tree) if node in all_types} if type_map: a.append('## {}'.format(module_id)) for expr in sorted(type_map, key=lambda n: (n.line, short_type(n), str(n) + str(type_map[n]))): typ = type_map[expr] a.append('{}:{}: {}'.format(short_type(expr), expr.line, self.format_type(typ))) return a def format_type(self, typ: Type) -> str: return typ.accept(self.type_str_conv) def is_dumped_module(id: str) -> bool: return id not in NOT_DUMPED_MODULES and (not id.startswith('_') or id == '__main__') mypy-0.761/mypy/test/testmodulefinder.py0000644€tŠÔÚ€2›s®0000001223013576752246024622 0ustar jukkaDROPBOX\Domain Users00000000000000import os from mypy.options import Options from mypy.modulefinder import FindModuleCache, SearchPaths from mypy.test.helpers import Suite, assert_equal from mypy.test.config import package_path data_path = os.path.relpath(os.path.join(package_path, "modulefinder")) class ModuleFinderSuite(Suite): def setUp(self) -> None: self.search_paths = SearchPaths( python_path=(), mypy_path=( os.path.join(data_path, "nsx-pkg1"), os.path.join(data_path, "nsx-pkg2"), os.path.join(data_path, "nsx-pkg3"), os.path.join(data_path, "nsy-pkg1"), os.path.join(data_path, "nsy-pkg2"), os.path.join(data_path, "pkg1"), os.path.join(data_path, "pkg2"), ), package_path=(), typeshed_path=(), ) options = Options() options.namespace_packages = True self.fmc_ns = FindModuleCache(self.search_paths, options=options) options = Options() options.namespace_packages = False self.fmc_nons = FindModuleCache(self.search_paths, options=options) def test__no_namespace_packages__nsx(self) -> None: """ If namespace_packages is False, we shouldn't find nsx """ found_module = self.fmc_nons.find_module("nsx") self.assertIsNone(found_module) def test__no_namespace_packages__nsx_a(self) -> None: """ If namespace_packages is False, we shouldn't find nsx.a. """ found_module = self.fmc_nons.find_module("nsx.a") self.assertIsNone(found_module) def test__no_namespace_packages__find_a_in_pkg1(self) -> None: """ Find find pkg1/a.py for "a" with namespace_packages False. """ found_module = self.fmc_nons.find_module("a") expected = os.path.join(data_path, "pkg1", "a.py") assert_equal(expected, found_module) def test__no_namespace_packages__find_b_in_pkg2(self) -> None: found_module = self.fmc_ns.find_module("b") expected = os.path.join(data_path, "pkg2", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_as_namespace_pkg_in_pkg1(self) -> None: """ There's no __init__.py in any of the nsx dirs, return the path to the first one found in mypypath. """ found_module = self.fmc_ns.find_module("nsx") expected = os.path.join(data_path, "nsx-pkg1", "nsx") assert_equal(expected, found_module) def test__find_nsx_a_init_in_pkg1(self) -> None: """ Find nsx-pkg1/nsx/a/__init__.py for "nsx.a" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.a") expected = os.path.join(data_path, "nsx-pkg1", "nsx", "a", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_b_init_in_pkg2(self) -> None: """ Find nsx-pkg2/nsx/b/__init__.py for "nsx.b" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.b") expected = os.path.join(data_path, "nsx-pkg2", "nsx", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_c_c_in_pkg3(self) -> None: """ Find nsx-pkg3/nsx/c/c.py for "nsx.c.c" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.c.c") expected = os.path.join(data_path, "nsx-pkg3", "nsx", "c", "c.py") assert_equal(expected, found_module) def test__find_nsy_a__init_pyi(self) -> None: """ Prefer nsy-pkg1/a/__init__.pyi file over __init__.py. """ found_module = self.fmc_ns.find_module("nsy.a") expected = os.path.join(data_path, "nsy-pkg1", "nsy", "a", "__init__.pyi") assert_equal(expected, found_module) def test__find_nsy_b__init_py(self) -> None: """ There is a nsy-pkg2/nsy/b.pyi, but also a nsy-pkg2/nsy/b/__init__.py. We expect to find the latter when looking up "nsy.b" as a package is preferred over a module. """ found_module = self.fmc_ns.find_module("nsy.b") expected = os.path.join(data_path, "nsy-pkg2", "nsy", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsy_c_pyi(self) -> None: """ There is a nsy-pkg2/nsy/c.pyi and nsy-pkg2/nsy/c.py We expect to find the former when looking up "nsy.b" as .pyi is preferred over .py. """ found_module = self.fmc_ns.find_module("nsy.c") expected = os.path.join(data_path, "nsy-pkg2", "nsy", "c.pyi") assert_equal(expected, found_module) def test__find_a_in_pkg1(self) -> None: found_module = self.fmc_ns.find_module("a") expected = os.path.join(data_path, "pkg1", "a.py") assert_equal(expected, found_module) def test__find_b_init_in_pkg2(self) -> None: found_module = self.fmc_ns.find_module("b") expected = os.path.join(data_path, "pkg2", "b", "__init__.py") assert_equal(expected, found_module) def test__find_d_nowhere(self) -> None: found_module = self.fmc_ns.find_module("d") self.assertIsNone(found_module) mypy-0.761/mypy/test/testmoduleinfo.py0000644€tŠÔÚ€2›s®0000000117413576752246024313 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy import moduleinfo from mypy.test.helpers import assert_true, assert_false, Suite class ModuleInfoSuite(Suite): def test_is_in_module_collection(self) -> None: assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo')) assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo.bar')) assert_false(moduleinfo.is_in_module_collection({'foo'}, 'fo')) assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar')) assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar.zar')) assert_false(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo')) mypy-0.761/mypy/test/testmypyc.py0000644€tŠÔÚ€2›s®0000000055113576752246023311 0ustar jukkaDROPBOX\Domain Users00000000000000"""A basic check to make sure that we are using a mypyc-compiled version when expected.""" import mypy from unittest import TestCase import os class MypycTest(TestCase): def test_using_mypyc(self) -> None: if os.getenv('TEST_MYPYC', None) == '1': assert not mypy.__file__.endswith('.py'), "Expected to find a mypyc-compiled version" mypy-0.761/mypy/test/testparse.py0000644€tŠÔÚ€2›s®0000000527713576752246023274 0ustar jukkaDROPBOX\Domain Users00000000000000"""Tests for the mypy parser.""" import sys from pytest import skip # type: ignore[import] from mypy import defaults from mypy.test.helpers import assert_string_arrays_equal, parse_options from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.parse import parse from mypy.errors import CompileError from mypy.options import Options class ParserSuite(DataSuite): required_out_section = True base_path = '.' files = ['parse.test', 'parse-python2.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: test_parser(testcase) def test_parser(testcase: DataDrivenTestCase) -> None: """Perform a single parser test case. The argument contains the description of the test case. """ options = Options() if testcase.file.endswith('python2.test'): options.python_version = defaults.PYTHON2_VERSION else: options.python_version = defaults.PYTHON3_VERSION try: n = parse(bytes('\n'.join(testcase.input), 'ascii'), fnam='main', module='__main__', errors=None, options=options) a = str(n).split('\n') except CompileError as e: a = e.messages assert_string_arrays_equal(testcase.output, a, 'Invalid parser output ({}, line {})'.format( testcase.file, testcase.line)) # The file name shown in test case output. This is displayed in error # messages, and must match the file name in the test case descriptions. INPUT_FILE_NAME = 'file' class ParseErrorSuite(DataSuite): required_out_section = True base_path = '.' files = ['parse-errors.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: test_parse_error(testcase) def test_parse_error(testcase: DataDrivenTestCase) -> None: try: options = parse_options('\n'.join(testcase.input), testcase, 0) if options.python_version != sys.version_info[:2]: skip() # Compile temporary file. The test file contains non-ASCII characters. parse(bytes('\n'.join(testcase.input), 'utf-8'), INPUT_FILE_NAME, '__main__', None, options) raise AssertionError('No errors reported') except CompileError as e: if e.module_with_blocker is not None: assert e.module_with_blocker == '__main__' # Verify that there was a compile error and that the error messages # are equivalent. assert_string_arrays_equal( testcase.output, e.messages, 'Invalid compiler output ({}, line {})'.format(testcase.file, testcase.line)) mypy-0.761/mypy/test/testpep561.py0000644€tŠÔÚ€2›s®0000003640213576752246023174 0ustar jukkaDROPBOX\Domain Users00000000000000from contextlib import contextmanager from enum import Enum import os import pytest # type: ignore import subprocess from subprocess import PIPE import sys import tempfile from typing import Tuple, List, Generator, Optional from unittest import TestCase, main import mypy.api from mypy.modulefinder import get_site_packages_dirs from mypy.test.config import package_path from mypy.util import try_find_python2_interpreter # NOTE: options.use_builtins_fixtures should not be set in these # tests, otherwise mypy will ignore installed third-party packages. SIMPLE_PROGRAM = """ from typedpkg.sample import ex from typedpkg import dne a = ex(['']) reveal_type(a) """ _NAMESPACE_PROGRAM = """ {import_style} from typedpkg_ns.ns.dne import dne af("abc") bf(False) dne(123) af(False) bf(2) dne("abc") """ class NSImportStyle(Enum): # These should all be on exactly two lines because NamespaceMsg # uses line numbers which expect the imports to be exactly two lines from_import = """\ from typedpkg.pkg.aaa import af from typedpkg_ns.ns.bbb import bf""" import_as = """\ import typedpkg.pkg.aaa as nm; af = nm.af import typedpkg_ns.ns.bbb as am; bf = am.bf""" reg_import = """\ import typedpkg.pkg.aaa; af = typedpkg.pkg.aaa.af import typedpkg_ns.ns.bbb; bf = typedpkg_ns.ns.bbb.bf""" class SimpleMsg(Enum): msg_dne = "{tempfile}:3: error: Module 'typedpkg' has no attribute 'dne'" msg_list = "{tempfile}:5: note: Revealed type is 'builtins.list[builtins.str]'" msg_tuple = "{tempfile}:5: note: Revealed type is 'builtins.tuple[builtins.str]'" class NamespaceMsg(Enum): cfm_beta = ("{tempfile}:4: error: Cannot find implementation or library stub for module named " "'typedpkg_ns.ns.dne'") help_note = ('{tempfile}:4: note: See https://mypy.readthedocs.io/en/latest/' 'running_mypy.html#missing-imports') bool_str = ('{tempfile}:10: error: Argument 1 has incompatible type ' '"bool"; expected "str"') int_bool = ('{tempfile}:11: error: Argument 1 has incompatible type ' '"int"; expected "bool"') to_bool_str = ('{tempfile}:10: error: Argument 1 to "af" has incompatible type ' '"bool"; expected "str"') to_int_bool = ('{tempfile}:11: error: Argument 1 to "bf" has incompatible type ' '"int"; expected "bool"') def create_ns_program_src(import_style: NSImportStyle) -> str: return _NAMESPACE_PROGRAM.format(import_style=import_style.value) class ExampleProg(object): _fname = 'test_program.py' def __init__(self, source_code: str) -> None: self._source_code = source_code self._temp_dir = None # type: Optional[tempfile.TemporaryDirectory[str]] self._full_fname = '' def create(self) -> None: self._temp_dir = tempfile.TemporaryDirectory() self._full_fname = os.path.join(self._temp_dir.name, self._fname) with open(self._full_fname, 'w+', encoding='utf8') as f: f.write(self._source_code) def cleanup(self) -> None: if self._temp_dir: self._temp_dir.cleanup() def build_msg(self, *msgs: Enum) -> str: return '\n'.join( msg.value.format(tempfile=self._full_fname) for msg in msgs ) + '\n' def check_mypy_run(self, python_executable: str, expected_out: List[Enum], expected_err: str = '', expected_returncode: int = 1, venv_dir: Optional[str] = None) -> None: """Helper to run mypy and check the output.""" cmd_line = [self._full_fname] if venv_dir is not None: old_dir = os.getcwd() os.chdir(venv_dir) try: cmd_line.append('--no-error-summary') if python_executable != sys.executable: cmd_line.append('--python-executable={}'.format(python_executable)) out, err, returncode = mypy.api.run(cmd_line) assert out == self.build_msg(*expected_out), err assert err == expected_err, out assert returncode == expected_returncode, returncode finally: if venv_dir is not None: os.chdir(old_dir) class TestPEP561(TestCase): @contextmanager def virtualenv(self, python_executable: str = sys.executable ) -> Generator[Tuple[str, str], None, None]: """Context manager that creates a virtualenv in a temporary directory returns the path to the created Python executable""" # Sadly, we need virtualenv, as the Python 3 venv module does not support creating a venv # for Python 2, and Python 2 does not have its own venv. with tempfile.TemporaryDirectory() as venv_dir: proc = subprocess.run([sys.executable, '-m', 'virtualenv', '-p{}'.format(python_executable), venv_dir], cwd=os.getcwd(), stdout=PIPE, stderr=PIPE) if proc.returncode != 0: err = proc.stdout.decode('utf-8') + proc.stderr.decode('utf-8') self.fail("Failed to create venv. Do you have virtualenv installed?\n" + err) if sys.platform == 'win32': yield venv_dir, os.path.abspath(os.path.join(venv_dir, 'Scripts', 'python')) else: yield venv_dir, os.path.abspath(os.path.join(venv_dir, 'bin', 'python')) def install_package(self, pkg: str, python_executable: str = sys.executable, use_pip: bool = True, editable: bool = False) -> None: """Context manager to temporarily install a package from test-data/packages/pkg/""" working_dir = os.path.join(package_path, pkg) if use_pip: install_cmd = [python_executable, '-m', 'pip', 'install'] if editable: install_cmd.append('-e') install_cmd.append('.') else: install_cmd = [python_executable, 'setup.py'] if editable: install_cmd.append('develop') else: install_cmd.append('install') proc = subprocess.run(install_cmd, cwd=working_dir, stdout=PIPE, stderr=PIPE) if proc.returncode != 0: self.fail(proc.stdout.decode('utf-8') + proc.stderr.decode('utf-8')) def setUp(self) -> None: self.simple_prog = ExampleProg(SIMPLE_PROGRAM) self.from_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.from_import)) self.import_as_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.import_as)) self.regular_import_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.reg_import)) def tearDown(self) -> None: self.simple_prog.cleanup() self.from_ns_prog.cleanup() self.import_as_ns_prog.cleanup() self.regular_import_ns_prog.cleanup() def test_get_pkg_dirs(self) -> None: """Check that get_package_dirs works.""" dirs = get_site_packages_dirs(sys.executable) assert dirs @pytest.mark.skipif(sys.platform == 'darwin' and hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix, reason="Temporarily skip to avoid having a virtualenv within a venv.") def test_typedpkg_stub_package(self) -> None: self.simple_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg-stubs', python_executable) self.simple_prog.check_mypy_run( python_executable, [SimpleMsg.msg_dne, SimpleMsg.msg_list], venv_dir=venv_dir, ) @pytest.mark.skipif(sys.platform == 'darwin' and hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix, reason="Temporarily skip to avoid having a virtualenv within a venv.") def test_typedpkg(self) -> None: self.simple_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg', python_executable) self.simple_prog.check_mypy_run( python_executable, [SimpleMsg.msg_tuple], venv_dir=venv_dir, ) def test_mypy_path_is_respected(self) -> None: packages = 'packages' pkg_name = 'a' with tempfile.TemporaryDirectory() as temp_dir: old_dir = os.getcwd() os.chdir(temp_dir) try: # Create the pkg for files to go into full_pkg_name = os.path.join(temp_dir, packages, pkg_name) os.makedirs(full_pkg_name) # Create the empty __init__ file to declare a package pkg_init_name = os.path.join(temp_dir, packages, pkg_name, '__init__.py') open(pkg_init_name, 'w', encoding='utf8').close() mypy_config_path = os.path.join(temp_dir, 'mypy.ini') with open(mypy_config_path, 'w') as mypy_file: mypy_file.write('[mypy]\n') mypy_file.write('mypy_path = ./{}\n'.format(packages)) with self.virtualenv() as venv: venv_dir, python_executable = venv cmd_line_args = [] if python_executable != sys.executable: cmd_line_args.append('--python-executable={}'.format(python_executable)) cmd_line_args.extend(['--config-file', mypy_config_path, '--package', pkg_name]) out, err, returncode = mypy.api.run(cmd_line_args) assert returncode == 0 finally: os.chdir(old_dir) def test_stub_and_typed_pkg(self) -> None: self.simple_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg', python_executable) self.install_package('typedpkg-stubs', python_executable) self.simple_prog.check_mypy_run( python_executable, [SimpleMsg.msg_list], venv_dir=venv_dir, ) @pytest.mark.skipif(sys.platform == 'darwin' and hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix, reason="Temporarily skip to avoid having a virtualenv within a venv.") def test_typedpkg_stubs_python2(self) -> None: self.simple_prog.create() python2 = try_find_python2_interpreter() if python2: with self.virtualenv(python2) as venv: venv_dir, py2 = venv self.install_package('typedpkg-stubs', py2) self.simple_prog.check_mypy_run( py2, [SimpleMsg.msg_dne, SimpleMsg.msg_list], venv_dir=venv_dir, ) def test_typedpkg_python2(self) -> None: self.simple_prog.create() python2 = try_find_python2_interpreter() if python2: with self.virtualenv(python2) as venv: venv_dir, py2 = venv self.install_package('typedpkg', py2) self.simple_prog.check_mypy_run( py2, [SimpleMsg.msg_tuple], venv_dir=venv_dir, ) @pytest.mark.skipif(sys.platform == 'darwin' and hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix, reason="Temporarily skip to avoid having a virtualenv within a venv.") def test_typedpkg_egg(self) -> None: self.simple_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg', python_executable, use_pip=False) self.simple_prog.check_mypy_run( python_executable, [SimpleMsg.msg_tuple], venv_dir=venv_dir, ) @pytest.mark.skipif(sys.platform == 'darwin' and hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix, reason="Temporarily skip to avoid having a virtualenv within a venv.") def test_typedpkg_editable(self) -> None: self.simple_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg', python_executable, editable=True) self.simple_prog.check_mypy_run( python_executable, [SimpleMsg.msg_tuple], venv_dir=venv_dir, ) @pytest.mark.skipif(sys.platform == 'darwin' and hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix, reason="Temporarily skip to avoid having a virtualenv within a venv.") def test_typedpkg_egg_editable(self) -> None: self.simple_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg', python_executable, use_pip=False, editable=True) self.simple_prog.check_mypy_run( python_executable, [SimpleMsg.msg_tuple], venv_dir=venv_dir, ) def test_nested_and_namespace_from_import(self) -> None: self.from_ns_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg', python_executable) self.install_package('typedpkg_ns', python_executable) self.from_ns_prog.check_mypy_run( python_executable, [NamespaceMsg.cfm_beta, NamespaceMsg.help_note, NamespaceMsg.to_bool_str, NamespaceMsg.to_int_bool], venv_dir=venv_dir, ) def test_nested_and_namespace_import_as(self) -> None: self.import_as_ns_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg', python_executable) self.install_package('typedpkg_ns', python_executable) self.import_as_ns_prog.check_mypy_run( python_executable, [NamespaceMsg.cfm_beta, NamespaceMsg.help_note, NamespaceMsg.bool_str, NamespaceMsg.int_bool], venv_dir=venv_dir, ) def test_nested_and_namespace_regular_import(self) -> None: self.regular_import_ns_prog.create() with self.virtualenv() as venv: venv_dir, python_executable = venv self.install_package('typedpkg', python_executable) self.install_package('typedpkg_ns', python_executable) self.regular_import_ns_prog.check_mypy_run( python_executable, [NamespaceMsg.cfm_beta, NamespaceMsg.help_note, NamespaceMsg.bool_str, NamespaceMsg.int_bool], venv_dir=venv_dir, ) if __name__ == '__main__': main() mypy-0.761/mypy/test/testpythoneval.py0000644€tŠÔÚ€2›s®0000000772513576752246024353 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for running mypy programs using a Python interpreter. Each test case type checks a program then runs it using Python. The output (stdout) of the program is compared to expected output. Type checking uses full builtins and other stubs. Note: Currently Python interpreter paths are hard coded. Note: These test cases are *not* included in the main test suite, as including this suite would slow down the main suite too much. """ import os import os.path import re import subprocess from subprocess import PIPE import sys from tempfile import TemporaryDirectory import pytest # type: ignore # no pytest in typeshed from typing import List from mypy.defaults import PYTHON3_VERSION from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, split_lines from mypy.util import try_find_python2_interpreter from mypy import api # Path to Python 3 interpreter python3_path = sys.executable program_re = re.compile(r'\b_program.py\b') class PythonEvaluationSuite(DataSuite): files = ['pythoneval.test', 'python2eval.test', 'pythoneval-asyncio.test'] cache_dir = TemporaryDirectory() def run_case(self, testcase: DataDrivenTestCase) -> None: test_python_evaluation(testcase, os.path.join(self.cache_dir.name, '.mypy_cache')) def test_python_evaluation(testcase: DataDrivenTestCase, cache_dir: str) -> None: """Runs Mypy in a subprocess. If this passes without errors, executes the script again with a given Python version. """ assert testcase.old_cwd is not None, "test was not properly set up" # TODO: Enable strict optional for these tests mypy_cmdline = [ '--show-traceback', '--no-site-packages', '--no-strict-optional', '--no-silence-site-packages', '--no-error-summary', ] py2 = testcase.name.lower().endswith('python2') if py2: mypy_cmdline.append('--py2') interpreter = try_find_python2_interpreter() if interpreter is None: # Skip, can't find a Python 2 interpreter. pytest.skip() # placate the type checker return else: interpreter = python3_path mypy_cmdline.append('--python-version={}'.format('.'.join(map(str, PYTHON3_VERSION)))) # Write the program to a file. program = '_' + testcase.name + '.py' program_path = os.path.join(test_temp_dir, program) mypy_cmdline.append(program_path) with open(program_path, 'w', encoding='utf8') as file: for s in testcase.input: file.write('{}\n'.format(s)) mypy_cmdline.append('--cache-dir={}'.format(cache_dir)) output = [] # Type check the program. out, err, returncode = api.run(mypy_cmdline) # split lines, remove newlines, and remove directory of test case for line in (out + err).splitlines(): if line.startswith(test_temp_dir + os.sep): output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n")) else: output.append(line.rstrip("\r\n")) if returncode == 0: # Execute the program. proc = subprocess.run([interpreter, '-Wignore', program], cwd=test_temp_dir, stdout=PIPE, stderr=PIPE) output.extend(split_lines(proc.stdout, proc.stderr)) # Remove temp file. os.remove(program_path) for i, line in enumerate(output): if os.path.sep + 'typeshed' + os.path.sep in line: output[i] = line.split(os.path.sep)[-1] assert_string_arrays_equal(adapt_output(testcase), output, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line)) def adapt_output(testcase: DataDrivenTestCase) -> List[str]: """Translates the generic _program.py into the actual filename.""" program = '_' + testcase.name + '.py' return [program_re.sub(program, line) for line in testcase.output] mypy-0.761/mypy/test/testreports.py0000644€tŠÔÚ€2›s®0000000260613576752246023651 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for reports generated by mypy.""" import textwrap from mypy.test.helpers import Suite, assert_equal from mypy.report import CoberturaPackage, get_line_rate import lxml.etree as etree # type: ignore class CoberturaReportSuite(Suite): def test_get_line_rate(self) -> None: assert_equal('1.0', get_line_rate(0, 0)) assert_equal('0.3333', get_line_rate(1, 3)) def test_as_xml(self) -> None: cobertura_package = CoberturaPackage('foobar') cobertura_package.covered_lines = 21 cobertura_package.total_lines = 42 child_package = CoberturaPackage('raz') child_package.covered_lines = 10 child_package.total_lines = 10 child_package.classes['class'] = etree.Element('class') cobertura_package.packages['raz'] = child_package expected_output = textwrap.dedent('''\ ''').encode('ascii') assert_equal(expected_output, etree.tostring(cobertura_package.as_xml(), pretty_print=True)) mypy-0.761/mypy/test/testsamples.py0000644€tŠÔÚ€2›s®0000000540113576752246023613 0ustar jukkaDROPBOX\Domain Users00000000000000"""Self check mypy package""" import sys import os.path from typing import List, Set from mypy.test.helpers import Suite, run_mypy class TypeshedSuite(Suite): def check_stubs(self, version: str, *directories: str) -> None: if not directories: directories = (version,) for stub_type in ['stdlib', 'third_party']: for dir in directories: seen = {'__builtin__'} # we don't want to check __builtin__, as it causes problems modules = [] stubdir = os.path.join('typeshed', stub_type, dir) for f in find_files(stubdir, suffix='.pyi'): module = file_to_module(f[len(stubdir) + 1:]) if module not in seen: seen.add(module) modules.extend(['-m', module]) if modules: run_mypy(['--python-version={}'.format(version)] + modules) def test_2(self) -> None: self.check_stubs("2.7", "2", "2and3") def test_3(self) -> None: sys_ver_str = '.'.join(map(str, sys.version_info[:2])) self.check_stubs(sys_ver_str, "3", "2and3") def test_34(self) -> None: self.check_stubs("3.4") def test_35(self) -> None: self.check_stubs("3.5") def test_36(self) -> None: self.check_stubs("3.6") def test_37(self) -> None: self.check_stubs("3.7") class SamplesSuite(Suite): def test_samples(self) -> None: for f in find_files(os.path.join('test-data', 'samples'), suffix='.py'): mypy_args = ['--no-strict-optional'] if f == os.path.join('test-data', 'samples', 'crawl2.py'): # This test requires 3.5 for async functions mypy_args.append('--python-version=3.5') run_mypy(mypy_args + [f]) def test_stdlibsamples(self) -> None: seen = set() # type: Set[str] stdlibsamples_dir = os.path.join('test-data', 'stdlib-samples', '3.2', 'test') modules = [] # type: List[str] for f in find_files(stdlibsamples_dir, prefix='test_', suffix='.py'): if f not in seen: seen.add(f) modules.append(f) if modules: # TODO: Remove need for --no-strict-optional run_mypy(['--no-strict-optional', '--platform=linux'] + modules) def find_files(base: str, prefix: str = '', suffix: str = '') -> List[str]: return [os.path.join(root, f) for root, dirs, files in os.walk(base) for f in files if f.startswith(prefix) and f.endswith(suffix)] def file_to_module(file: str) -> str: rv = os.path.splitext(file)[0].replace(os.sep, '.') if rv.endswith('.__init__'): rv = rv[:-len('.__init__')] return rv mypy-0.761/mypy/test/testsemanal.py0000644€tŠÔÚ€2›s®0000001702313576752246023572 0ustar jukkaDROPBOX\Domain Users00000000000000"""Semantic analyzer test cases""" import os.path from typing import Dict, List from mypy import build from mypy.modulefinder import BuildSource from mypy.defaults import PYTHON3_VERSION from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, testfile_pyversion, parse_options ) from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.config import test_temp_dir from mypy.errors import CompileError from mypy.nodes import TypeInfo from mypy.options import Options # Semantic analyzer test cases: dump parse tree # Semantic analysis test case description files. semanal_files = ['semanal-basic.test', 'semanal-expressions.test', 'semanal-classes.test', 'semanal-types.test', 'semanal-typealiases.test', 'semanal-modules.test', 'semanal-statements.test', 'semanal-abstractclasses.test', 'semanal-namedtuple.test', 'semanal-typeddict.test', 'semenal-literal.test', 'semanal-classvar.test', 'semanal-python2.test'] def get_semanal_options(program_text: str, testcase: DataDrivenTestCase) -> Options: options = parse_options(program_text, testcase, 1) options.use_builtins_fixtures = True options.semantic_analysis_only = True options.show_traceback = True options.python_version = PYTHON3_VERSION return options class SemAnalSuite(DataSuite): files = semanal_files native_sep = True def run_case(self, testcase: DataDrivenTestCase) -> None: test_semanal(testcase) def test_semanal(testcase: DataDrivenTestCase) -> None: """Perform a semantic analysis test case. The testcase argument contains a description of the test case (inputs and output). """ try: src = '\n'.join(testcase.input) options = get_semanal_options(src, testcase) options.python_version = testfile_pyversion(testcase.file) result = build.build(sources=[BuildSource('main', None, src)], options=options, alt_lib_path=test_temp_dir) a = result.errors if a: raise CompileError(a) # Include string representations of the source files in the actual # output. for fnam in sorted(result.files.keys()): f = result.files[fnam] # Omit the builtins module and files with a special marker in the # path. # TODO the test is not reliable if (not f.path.endswith((os.sep + 'builtins.pyi', 'typing.pyi', 'mypy_extensions.pyi', 'typing_extensions.pyi', 'abc.pyi', 'collections.pyi', 'sys.pyi')) and not os.path.basename(f.path).startswith('_') and not os.path.splitext( os.path.basename(f.path))[0].endswith('_')): a += str(f).split('\n') except CompileError as e: a = e.messages if testcase.normalize_output: a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, 'Invalid semantic analyzer output ({}, line {})'.format(testcase.file, testcase.line)) # Semantic analyzer error test cases class SemAnalErrorSuite(DataSuite): files = ['semanal-errors.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: test_semanal_error(testcase) def test_semanal_error(testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: src = '\n'.join(testcase.input) res = build.build(sources=[BuildSource('main', None, src)], options=get_semanal_options(src, testcase), alt_lib_path=test_temp_dir) a = res.errors assert a, 'No errors reported in {}, line {}'.format(testcase.file, testcase.line) except CompileError as e: # Verify that there was a compile error and that the error messages # are equivalent. a = e.messages if testcase.normalize_output: a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, 'Invalid compiler output ({}, line {})'.format(testcase.file, testcase.line)) # SymbolNode table export test cases class SemAnalSymtableSuite(DataSuite): required_out_section = True files = ['semanal-symtable.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: # Build test case input. src = '\n'.join(testcase.input) result = build.build(sources=[BuildSource('main', None, src)], options=get_semanal_options(src, testcase), alt_lib_path=test_temp_dir) # The output is the symbol table converted into a string. a = result.errors if a: raise CompileError(a) for f in sorted(result.files.keys()): if f not in ('builtins', 'typing', 'abc'): a.append('{}:'.format(f)) for s in str(result.files[f].names).split('\n'): a.append(' ' + s) except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, 'Invalid semantic analyzer output ({}, line {})'.format( testcase.file, testcase.line)) # Type info export test cases class SemAnalTypeInfoSuite(DataSuite): required_out_section = True files = ['semanal-typeinfo.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: # Build test case input. src = '\n'.join(testcase.input) result = build.build(sources=[BuildSource('main', None, src)], options=get_semanal_options(src, testcase), alt_lib_path=test_temp_dir) a = result.errors if a: raise CompileError(a) # Collect all TypeInfos in top-level modules. typeinfos = TypeInfoMap() for f in result.files.values(): for n in f.names.values(): if isinstance(n.node, TypeInfo): assert n.fullname is not None typeinfos[n.fullname] = n.node # The output is the symbol table converted into a string. a = str(typeinfos).split('\n') except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, 'Invalid semantic analyzer output ({}, line {})'.format( testcase.file, testcase.line)) class TypeInfoMap(Dict[str, TypeInfo]): def __str__(self) -> str: a = ['TypeInfoMap('] # type: List[str] for x, y in sorted(self.items()): if isinstance(x, str) and (not x.startswith('builtins.') and not x.startswith('typing.') and not x.startswith('abc.')): ti = ('\n' + ' ').join(str(y).split('\n')) a.append(' {} : {}'.format(x, ti)) a[-1] += ')' return '\n'.join(a) mypy-0.761/mypy/test/testsolve.py0000644€tŠÔÚ€2›s®0000001342713576752246023306 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for the constraint solver used in type inference.""" from typing import List, Union, Tuple, Optional from mypy.test.helpers import Suite, assert_equal from mypy.constraints import SUPERTYPE_OF, SUBTYPE_OF, Constraint from mypy.solve import solve_constraints from mypy.test.typefixture import TypeFixture from mypy.types import Type, TypeVarType, TypeVarId class SolveSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_empty_input(self) -> None: self.assert_solve([], [], []) def test_simple_supertype_constraints(self) -> None: self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.a)], [(self.fx.a, self.fx.o)]) self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.a), self.supc(self.fx.t, self.fx.b)], [(self.fx.a, self.fx.o)]) def test_simple_subtype_constraints(self) -> None: self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.a)], [self.fx.a]) self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [self.fx.b]) def test_both_kinds_of_constraints(self) -> None: self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.a)], [(self.fx.b, self.fx.a)]) def test_unsatisfiable_constraints(self) -> None: # The constraints are impossible to satisfy. self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [None]) def test_exactly_specified_result(self) -> None: self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.b)], [(self.fx.b, self.fx.b)]) def test_multiple_variables(self) -> None: self.assert_solve([self.fx.t.id, self.fx.s.id], [self.supc(self.fx.t, self.fx.b), self.supc(self.fx.s, self.fx.c), self.subc(self.fx.t, self.fx.a)], [(self.fx.b, self.fx.a), (self.fx.c, self.fx.o)]) def test_no_constraints_for_var(self) -> None: self.assert_solve([self.fx.t.id], [], [self.fx.uninhabited]) self.assert_solve([self.fx.t.id, self.fx.s.id], [], [self.fx.uninhabited, self.fx.uninhabited]) self.assert_solve([self.fx.t.id, self.fx.s.id], [self.supc(self.fx.s, self.fx.a)], [self.fx.uninhabited, (self.fx.a, self.fx.o)]) def test_simple_constraints_with_dynamic_type(self) -> None: self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.a)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) # self.assert_solve([self.fx.t.id], # [self.subc(self.fx.t, self.fx.anyt), # self.subc(self.fx.t, self.fx.a)], # [(self.fx.anyt, self.fx.anyt)]) # TODO: figure out what this should be after changes to meet(any, X) def test_both_normal_and_any_types_in_results(self) -> None: # If one of the bounds is any, we promote the other bound to # any as well, since otherwise the type range does not make sense. self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.a)], [(self.fx.anyt, self.fx.anyt)]) def assert_solve(self, vars: List[TypeVarId], constraints: List[Constraint], results: List[Union[None, Type, Tuple[Type, Type]]], ) -> None: res = [] # type: List[Optional[Type]] for r in results: if isinstance(r, tuple): res.append(r[0]) else: res.append(r) actual = solve_constraints(vars, constraints) assert_equal(str(actual), str(res)) def supc(self, type_var: TypeVarType, bound: Type) -> Constraint: return Constraint(type_var.id, SUPERTYPE_OF, bound) def subc(self, type_var: TypeVarType, bound: Type) -> Constraint: return Constraint(type_var.id, SUBTYPE_OF, bound) mypy-0.761/mypy/test/teststubgen.py0000644€tŠÔÚ€2›s®0000010304613576752246023622 0ustar jukkaDROPBOX\Domain Users00000000000000import io import os.path import shutil import sys import tempfile import re import unittest from types import ModuleType from typing import Any, List, Tuple, Optional from mypy.test.helpers import ( assert_equal, assert_string_arrays_equal, local_sys_path_set ) from mypy.test.data import DataSuite, DataDrivenTestCase from mypy.errors import CompileError from mypy.stubgen import ( generate_stubs, parse_options, Options, collect_build_targets, mypy_options, is_blacklisted_path, is_non_library_module ) from mypy.stubutil import walk_packages, remove_misplaced_type_comments, common_dir_prefix from mypy.stubgenc import generate_c_type_stub, infer_method_sig, generate_c_function_stub from mypy.stubdoc import ( parse_signature, parse_all_signatures, build_signature, find_unique_signatures, infer_sig_from_docstring, infer_prop_type_from_docstring, FunctionSig, ArgSig, infer_arg_sig_from_docstring, is_valid_type ) from mypy.moduleinspect import ModuleInspect, InspectError class StubgenCmdLineSuite(unittest.TestCase): """Test cases for processing command-line options and finding files.""" @unittest.skipIf(sys.platform == 'win32', "clean up fails on Windows") def test_files_found(self) -> None: current = os.getcwd() with tempfile.TemporaryDirectory() as tmp: try: os.chdir(tmp) os.mkdir('subdir') self.make_file('subdir', 'a.py') self.make_file('subdir', 'b.py') os.mkdir(os.path.join('subdir', 'pack')) self.make_file('subdir', 'pack', '__init__.py') opts = parse_options(['subdir']) py_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) assert_equal(c_mods, []) files = {mod.path for mod in py_mods} assert_equal(files, {os.path.join('subdir', 'pack', '__init__.py'), os.path.join('subdir', 'a.py'), os.path.join('subdir', 'b.py')}) finally: os.chdir(current) @unittest.skipIf(sys.platform == 'win32', "clean up fails on Windows") def test_packages_found(self) -> None: current = os.getcwd() with tempfile.TemporaryDirectory() as tmp: try: os.chdir(tmp) os.mkdir('pack') self.make_file('pack', '__init__.py', content='from . import a, b') self.make_file('pack', 'a.py') self.make_file('pack', 'b.py') opts = parse_options(['-p', 'pack']) py_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) assert_equal(c_mods, []) files = {os.path.relpath(mod.path or 'FAIL') for mod in py_mods} assert_equal(files, {os.path.join('pack', '__init__.py'), os.path.join('pack', 'a.py'), os.path.join('pack', 'b.py')}) finally: os.chdir(current) @unittest.skipIf(sys.platform == 'win32', "clean up fails on Windows") def test_module_not_found(self) -> None: current = os.getcwd() captured_output = io.StringIO() sys.stdout = captured_output with tempfile.TemporaryDirectory() as tmp: try: os.chdir(tmp) self.make_file(tmp, 'mymodule.py', content='import a') opts = parse_options(['-m', 'mymodule']) py_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) assert captured_output.getvalue() == '' finally: sys.stdout = sys.__stdout__ os.chdir(current) def make_file(self, *path: str, content: str = '') -> None: file = os.path.join(*path) with open(file, 'w') as f: f.write(content) def run(self, result: Optional[Any] = None) -> Optional[Any]: with local_sys_path_set(): return super().run(result) class StubgenCliParseSuite(unittest.TestCase): def test_walk_packages(self) -> None: with ModuleInspect() as m: assert_equal( set(walk_packages(m, ["mypy.errors"])), {"mypy.errors"}) assert_equal( set(walk_packages(m, ["mypy.errors", "mypy.stubgen"])), {"mypy.errors", "mypy.stubgen"}) all_mypy_packages = set(walk_packages(m, ["mypy"])) self.assertTrue(all_mypy_packages.issuperset({ "mypy", "mypy.errors", "mypy.stubgen", "mypy.test", "mypy.test.helpers", })) class StubgenUtilSuite(unittest.TestCase): """Unit tests for stubgen utility functions.""" def test_parse_signature(self) -> None: self.assert_parse_signature('func()', ('func', [], [])) def test_parse_signature_with_args(self) -> None: self.assert_parse_signature('func(arg)', ('func', ['arg'], [])) self.assert_parse_signature('do(arg, arg2)', ('do', ['arg', 'arg2'], [])) def test_parse_signature_with_optional_args(self) -> None: self.assert_parse_signature('func([arg])', ('func', [], ['arg'])) self.assert_parse_signature('func(arg[, arg2])', ('func', ['arg'], ['arg2'])) self.assert_parse_signature('func([arg[, arg2]])', ('func', [], ['arg', 'arg2'])) def test_parse_signature_with_default_arg(self) -> None: self.assert_parse_signature('func(arg=None)', ('func', [], ['arg'])) self.assert_parse_signature('func(arg, arg2=None)', ('func', ['arg'], ['arg2'])) self.assert_parse_signature('func(arg=1, arg2="")', ('func', [], ['arg', 'arg2'])) def test_parse_signature_with_qualified_function(self) -> None: self.assert_parse_signature('ClassName.func(arg)', ('func', ['arg'], [])) def test_parse_signature_with_kw_only_arg(self) -> None: self.assert_parse_signature('ClassName.func(arg, *, arg2=1)', ('func', ['arg', '*'], ['arg2'])) def test_parse_signature_with_star_arg(self) -> None: self.assert_parse_signature('ClassName.func(arg, *args)', ('func', ['arg', '*args'], [])) def test_parse_signature_with_star_star_arg(self) -> None: self.assert_parse_signature('ClassName.func(arg, **args)', ('func', ['arg', '**args'], [])) def assert_parse_signature(self, sig: str, result: Tuple[str, List[str], List[str]]) -> None: assert_equal(parse_signature(sig), result) def test_build_signature(self) -> None: assert_equal(build_signature([], []), '()') assert_equal(build_signature(['arg'], []), '(arg)') assert_equal(build_signature(['arg', 'arg2'], []), '(arg, arg2)') assert_equal(build_signature(['arg'], ['arg2']), '(arg, arg2=...)') assert_equal(build_signature(['arg'], ['arg2', '**x']), '(arg, arg2=..., **x)') def test_parse_all_signatures(self) -> None: assert_equal(parse_all_signatures(['random text', '.. function:: fn(arg', '.. function:: fn()', ' .. method:: fn2(arg)']), ([('fn', '()'), ('fn2', '(arg)')], [])) def test_find_unique_signatures(self) -> None: assert_equal(find_unique_signatures( [('func', '()'), ('func', '()'), ('func2', '()'), ('func2', '(arg)'), ('func3', '(arg, arg2)')]), [('func', '()'), ('func3', '(arg, arg2)')]) def test_infer_sig_from_docstring(self) -> None: assert_equal(infer_sig_from_docstring('\nfunc(x) - y', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x')], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=None)', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x'), ArgSig(name='Y_a', default=True)], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=3)', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x'), ArgSig(name='Y_a', default=True)], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=[1, 2, 3])', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x'), ArgSig(name='Y_a', default=True)], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nafunc(x) - y', 'func'), []) assert_equal(infer_sig_from_docstring('\nfunc(x, y', 'func'), []) assert_equal(infer_sig_from_docstring('\nfunc(x=z(y))', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', default=True)], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc x', 'func'), []) # Try to infer signature from type annotation. assert_equal(infer_sig_from_docstring('\nfunc(x: int)', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='int')], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc(x: int=3)', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='int', default=True)], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc(x: int=3) -> int', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='int', default=True)], ret_type='int')]) assert_equal(infer_sig_from_docstring('\nfunc(x: int=3) -> int \n', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='int', default=True)], ret_type='int')]) assert_equal(infer_sig_from_docstring('\nfunc(x: Tuple[int, str]) -> str', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='Tuple[int,str]')], ret_type='str')]) assert_equal( infer_sig_from_docstring('\nfunc(x: Tuple[int, Tuple[str, int], str], y: int) -> str', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='Tuple[int,Tuple[str,int],str]'), ArgSig(name='y', type='int')], ret_type='str')]) assert_equal(infer_sig_from_docstring('\nfunc(x: foo.bar)', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='foo.bar')], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc(x: list=[1,2,[3,4]])', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='list', default=True)], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc(x: str="nasty[")', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='str', default=True)], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc[(x: foo.bar, invalid]', 'func'), []) assert_equal(infer_sig_from_docstring('\nfunc(x: invalid::type)', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type=None)], ret_type='Any')]) assert_equal(infer_sig_from_docstring('\nfunc(x: str="")', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x', type='str', default=True)], ret_type='Any')]) def test_infer_sig_from_docstring_duplicate_args(self) -> None: assert_equal(infer_sig_from_docstring('\nfunc(x, x) -> str\nfunc(x, y) -> int', 'func'), [FunctionSig(name='func', args=[ArgSig(name='x'), ArgSig(name='y')], ret_type='int')]) def test_infer_sig_from_docstring_bad_indentation(self) -> None: assert_equal(infer_sig_from_docstring(""" x x x """, 'func'), None) def test_infer_arg_sig_from_docstring(self) -> None: assert_equal(infer_arg_sig_from_docstring("(*args, **kwargs)"), [ArgSig(name='*args'), ArgSig(name='**kwargs')]) assert_equal( infer_arg_sig_from_docstring( "(x: Tuple[int, Tuple[str, int], str]=(1, ('a', 2), 'y'), y: int=4)"), [ArgSig(name='x', type='Tuple[int,Tuple[str,int],str]', default=True), ArgSig(name='y', type='int', default=True)]) def test_infer_prop_type_from_docstring(self) -> None: assert_equal(infer_prop_type_from_docstring('str: A string.'), 'str') assert_equal(infer_prop_type_from_docstring('Optional[int]: An int.'), 'Optional[int]') assert_equal(infer_prop_type_from_docstring('Tuple[int, int]: A tuple.'), 'Tuple[int, int]') assert_equal(infer_prop_type_from_docstring('\nstr: A string.'), None) def test_infer_sig_from_docstring_square_brackets(self) -> None: assert infer_sig_from_docstring( 'fetch_row([maxrows, how]) -- Fetches stuff', 'fetch_row', ) == [] def test_remove_misplaced_type_comments_1(self) -> None: good = """ \u1234 def f(x): # type: (int) -> int def g(x): # type: (int) -> int def h(): # type: () int x = 1 # type: int """ assert_equal(remove_misplaced_type_comments(good), good) def test_remove_misplaced_type_comments_2(self) -> None: bad = """ def f(x): # type: Callable[[int], int] pass # type: "foo" # type: 'bar' x = 1 # type: int """ bad_fixed = """ def f(x): pass x = 1 """ assert_equal(remove_misplaced_type_comments(bad), bad_fixed) def test_remove_misplaced_type_comments_3(self) -> None: bad = ''' def f(x): """docstring""" # type: (int) -> int pass def g(x): """docstring """ # type: (int) -> int pass ''' bad_fixed = ''' def f(x): """docstring""" pass def g(x): """docstring """ pass ''' assert_equal(remove_misplaced_type_comments(bad), bad_fixed) def test_remove_misplaced_type_comments_4(self) -> None: bad = """ def f(x): '''docstring''' # type: (int) -> int pass def g(x): '''docstring ''' # type: (int) -> int pass """ bad_fixed = """ def f(x): '''docstring''' pass def g(x): '''docstring ''' pass """ assert_equal(remove_misplaced_type_comments(bad), bad_fixed) def test_remove_misplaced_type_comments_5(self) -> None: bad = """ def f(x): # type: (int, List[Any], # float, bool) -> int pass def g(x): # type: (int, List[Any]) pass """ bad_fixed = """ def f(x): # float, bool) -> int pass def g(x): pass """ assert_equal(remove_misplaced_type_comments(bad), bad_fixed) def test_remove_misplaced_type_comments_bytes(self) -> None: original = b""" \xbf def f(x): # type: (int) -> int def g(x): # type: (int) -> int pass def h(): # type: int pass x = 1 # type: int """ dest = b""" \xbf def f(x): # type: (int) -> int def g(x): # type: (int) -> int pass def h(): pass x = 1 # type: int """ assert_equal(remove_misplaced_type_comments(original), dest) def test_common_dir_prefix(self) -> None: assert common_dir_prefix([]) == '.' assert common_dir_prefix(['x.pyi']) == '.' assert common_dir_prefix(['./x.pyi']) == '.' assert common_dir_prefix(['foo/bar/x.pyi']) == 'foo/bar' assert common_dir_prefix(['foo/bar/x.pyi', 'foo/bar/y.pyi']) == 'foo/bar' assert common_dir_prefix(['foo/bar/x.pyi', 'foo/y.pyi']) == 'foo' assert common_dir_prefix(['foo/x.pyi', 'foo/bar/y.pyi']) == 'foo' assert common_dir_prefix(['foo/bar/zar/x.pyi', 'foo/y.pyi']) == 'foo' assert common_dir_prefix(['foo/x.pyi', 'foo/bar/zar/y.pyi']) == 'foo' assert common_dir_prefix(['foo/bar/zar/x.pyi', 'foo/bar/y.pyi']) == 'foo/bar' assert common_dir_prefix(['foo/bar/x.pyi', 'foo/bar/zar/y.pyi']) == 'foo/bar' class StubgenHelpersSuite(unittest.TestCase): def test_is_blacklisted_path(self) -> None: assert not is_blacklisted_path('foo/bar.py') assert not is_blacklisted_path('foo.py') assert not is_blacklisted_path('foo/xvendor/bar.py') assert not is_blacklisted_path('foo/vendorx/bar.py') assert is_blacklisted_path('foo/vendor/bar.py') assert is_blacklisted_path('foo/vendored/bar.py') assert is_blacklisted_path('foo/vendored/bar/thing.py') assert is_blacklisted_path('foo/six.py') def test_is_non_library_module(self) -> None: assert not is_non_library_module('foo') assert not is_non_library_module('foo.bar') # The following could be test modules, but we are very conservative and # don't treat them as such since they could plausibly be real modules. assert not is_non_library_module('foo.bartest') assert not is_non_library_module('foo.bartests') assert not is_non_library_module('foo.testbar') assert is_non_library_module('foo.test') assert is_non_library_module('foo.test.foo') assert is_non_library_module('foo.tests') assert is_non_library_module('foo.tests.foo') assert is_non_library_module('foo.testing.foo') assert is_non_library_module('foo.SelfTest.foo') assert is_non_library_module('foo.test_bar') assert is_non_library_module('foo.bar_tests') assert is_non_library_module('foo.testing') assert is_non_library_module('foo.conftest') assert is_non_library_module('foo.bar_test_util') assert is_non_library_module('foo.bar_test_utils') assert is_non_library_module('foo.bar_test_base') assert is_non_library_module('foo.setup') assert is_non_library_module('foo.__main__') class StubgenPythonSuite(DataSuite): """Data-driven end-to-end test cases that generate stub files. You can use these magic test case name suffixes: *_semanal Run semantic analysis (slow as this uses real stubs -- only use when necessary) *_import Import module and perform runtime introspection (in the current process!) You can use these magic comments: # flags: --some-stubgen-option ... Specify custom stubgen options # modules: module1 module2 ... Specify which modules to output (by default only 'main') """ required_out_section = True base_path = '.' files = ['stubgen.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: with local_sys_path_set(): self.run_case_inner(testcase) def run_case_inner(self, testcase: DataDrivenTestCase) -> None: extra = [] # Extra command-line args mods = [] # Module names to process source = '\n'.join(testcase.input) for file, content in testcase.files + [('./main.py', source)]: # Strip ./ prefix and .py suffix. mod = file[2:-3].replace('/', '.') if mod.endswith('.__init__'): mod, _, _ = mod.rpartition('.') mods.append(mod) if '-p ' not in source: extra.extend(['-m', mod]) with open(file, 'w') as f: f.write(content) options = self.parse_flags(source, extra) modules = self.parse_modules(source) out_dir = 'out' try: try: if not testcase.name.endswith('_import'): options.no_import = True if not testcase.name.endswith('_semanal'): options.parse_only = True generate_stubs(options) a = [] # type: List[str] for module in modules: fnam = module_to_path(out_dir, module) self.add_file(fnam, a, header=len(modules) > 1) except CompileError as e: a = e.messages assert_string_arrays_equal(testcase.output, a, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line)) finally: for mod in mods: if mod in sys.modules: del sys.modules[mod] shutil.rmtree(out_dir) def parse_flags(self, program_text: str, extra: List[str]) -> Options: flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE) if flags: flag_list = flags.group(1).split() else: flag_list = [] options = parse_options(flag_list + extra) if '--verbose' not in flag_list: options.quiet = True else: options.verbose = True return options def parse_modules(self, program_text: str) -> List[str]: modules = re.search('# modules: (.*)$', program_text, flags=re.MULTILINE) if modules: return modules.group(1).split() else: return ['main'] def add_file(self, path: str, result: List[str], header: bool) -> None: if not os.path.exists(path): result.append('<%s was not generated>' % path.replace('\\', '/')) return if header: result.append('# {}'.format(path[4:])) with open(path, encoding='utf8') as file: result.extend(file.read().splitlines()) self_arg = ArgSig(name='self') class StubgencSuite(unittest.TestCase): """Unit tests for stub generation from C modules using introspection. Note that these don't cover a lot! """ def test_infer_hash_sig(self) -> None: assert_equal(infer_method_sig('__hash__'), [self_arg]) def test_infer_getitem_sig(self) -> None: assert_equal(infer_method_sig('__getitem__'), [self_arg, ArgSig(name='index')]) def test_infer_setitem_sig(self) -> None: assert_equal(infer_method_sig('__setitem__'), [self_arg, ArgSig(name='index'), ArgSig(name='object')]) def test_infer_binary_op_sig(self) -> None: for op in ('eq', 'ne', 'lt', 'le', 'gt', 'ge', 'add', 'radd', 'sub', 'rsub', 'mul', 'rmul'): assert_equal(infer_method_sig('__%s__' % op), [self_arg, ArgSig(name='other')]) def test_infer_unary_op_sig(self) -> None: for op in ('neg', 'pos'): assert_equal(infer_method_sig('__%s__' % op), [self_arg]) def test_generate_c_type_stub_no_crash_for_object(self) -> None: output = [] # type: List[str] mod = ModuleType('module', '') # any module is fine imports = [] # type: List[str] generate_c_type_stub(mod, 'alias', object, output, imports) assert_equal(imports, []) assert_equal(output[0], 'class alias:') def test_generate_c_type_stub_variable_type_annotation(self) -> None: # This class mimics the stubgen unit test 'testClassVariable' class TestClassVariableCls: x = 1 output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType('module', '') # any module is fine generate_c_type_stub(mod, 'C', TestClassVariableCls, output, imports) assert_equal(imports, []) assert_equal(output, ['class C:', ' x: Any = ...']) def test_generate_c_type_inheritance(self) -> None: class TestClass(KeyError): pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType('module, ') generate_c_type_stub(mod, 'C', TestClass, output, imports) assert_equal(output, ['class C(KeyError): ...', ]) assert_equal(imports, []) def test_generate_c_type_inheritance_same_module(self) -> None: class TestBaseClass: pass class TestClass(TestBaseClass): pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType(TestBaseClass.__module__, '') generate_c_type_stub(mod, 'C', TestClass, output, imports) assert_equal(output, ['class C(TestBaseClass): ...', ]) assert_equal(imports, []) def test_generate_c_type_inheritance_other_module(self) -> None: import argparse class TestClass(argparse.Action): pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType('module', '') generate_c_type_stub(mod, 'C', TestClass, output, imports) assert_equal(output, ['class C(argparse.Action): ...', ]) assert_equal(imports, ['import argparse']) def test_generate_c_type_with_docstring(self) -> None: class TestClass: def test(self, arg0: str) -> None: """ test(self: TestClass, arg0: int) """ pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') assert_equal(output, ['def test(self, arg0: int) -> Any: ...']) assert_equal(imports, []) def test_generate_c_type_with_docstring_empty_default(self) -> None: class TestClass: def test(self, arg0: str = "") -> None: """ test(self: TestClass, arg0: str = "") """ pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') assert_equal(output, ['def test(self, arg0: str = ...) -> Any: ...']) assert_equal(imports, []) def test_generate_c_function_other_module_arg(self) -> None: """Test that if argument references type from other module, module will be imported.""" # Provide different type in python spec than in docstring to make sure, that docstring # information is used. def test(arg0: str) -> None: """ test(arg0: argparse.Action) """ pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType(self.__module__, '') generate_c_function_stub(mod, 'test', test, output, imports) assert_equal(output, ['def test(arg0: argparse.Action) -> Any: ...']) assert_equal(imports, ['import argparse']) def test_generate_c_function_same_module_arg(self) -> None: """Test that if argument references type from same module but using full path, no module will be imported, and type specification will be striped to local reference. """ # Provide different type in python spec than in docstring to make sure, that docstring # information is used. def test(arg0: str) -> None: """ test(arg0: argparse.Action) """ pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType('argparse', '') generate_c_function_stub(mod, 'test', test, output, imports) assert_equal(output, ['def test(arg0: Action) -> Any: ...']) assert_equal(imports, []) def test_generate_c_function_other_module_ret(self) -> None: """Test that if return type references type from other module, module will be imported.""" def test(arg0: str) -> None: """ test(arg0: str) -> argparse.Action """ pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType(self.__module__, '') generate_c_function_stub(mod, 'test', test, output, imports) assert_equal(output, ['def test(arg0: str) -> argparse.Action: ...']) assert_equal(imports, ['import argparse']) def test_generate_c_function_same_module_ret(self) -> None: """Test that if return type references type from same module but using full path, no module will be imported, and type specification will be striped to local reference. """ def test(arg0: str) -> None: """ test(arg0: str) -> argparse.Action """ pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType('argparse', '') generate_c_function_stub(mod, 'test', test, output, imports) assert_equal(output, ['def test(arg0: str) -> Action: ...']) assert_equal(imports, []) def test_generate_c_type_with_overload_pybind11(self) -> None: class TestClass: def __init__(self, arg0: str) -> None: """ __init__(*args, **kwargs) Overloaded function. 1. __init__(self: TestClass, arg0: str) -> None 2. __init__(self: TestClass, arg0: str, arg1: str) -> None """ pass output = [] # type: List[str] imports = [] # type: List[str] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, '__init__', TestClass.__init__, output, imports, self_var='self', class_name='TestClass') assert_equal(output, [ '@overload', 'def __init__(self, arg0: str) -> None: ...', '@overload', 'def __init__(self, arg0: str, arg1: str) -> None: ...', '@overload', 'def __init__(*args, **kwargs) -> Any: ...']) assert_equal(set(imports), {'from typing import overload'}) class ArgSigSuite(unittest.TestCase): def test_repr(self) -> None: assert_equal(repr(ArgSig(name='asd"dsa')), "ArgSig(name='asd\"dsa', type=None, default=False)") assert_equal(repr(ArgSig(name="asd'dsa")), 'ArgSig(name="asd\'dsa", type=None, default=False)') assert_equal(repr(ArgSig("func", 'str')), "ArgSig(name='func', type='str', default=False)") assert_equal(repr(ArgSig("func", 'str', default=True)), "ArgSig(name='func', type='str', default=True)") class IsValidTypeSuite(unittest.TestCase): def test_is_valid_type(self) -> None: assert is_valid_type('int') assert is_valid_type('str') assert is_valid_type('Foo_Bar234') assert is_valid_type('foo.bar') assert is_valid_type('List[int]') assert is_valid_type('Dict[str, int]') assert is_valid_type('None') assert not is_valid_type('foo-bar') assert not is_valid_type('x->y') assert not is_valid_type('True') assert not is_valid_type('False') assert not is_valid_type('x,y') assert not is_valid_type('x, y') class ModuleInspectSuite(unittest.TestCase): def test_python_module(self) -> None: with ModuleInspect() as m: p = m.get_package_properties('inspect') assert p is not None assert p.name == 'inspect' assert p.file assert p.path is None assert p.is_c_module is False assert p.subpackages == [] def test_python_package(self) -> None: with ModuleInspect() as m: p = m.get_package_properties('unittest') assert p is not None assert p.name == 'unittest' assert p.file assert p.path assert p.is_c_module is False assert p.subpackages assert all(sub.startswith('unittest.') for sub in p.subpackages) def test_c_module(self) -> None: with ModuleInspect() as m: p = m.get_package_properties('_socket') assert p is not None assert p.name == '_socket' assert p.path is None assert p.is_c_module is True assert p.subpackages == [] def test_non_existent(self) -> None: with ModuleInspect() as m: with self.assertRaises(InspectError) as e: m.get_package_properties('foobar-non-existent') assert str(e.exception) == "No module named 'foobar-non-existent'" def module_to_path(out_dir: str, module: str) -> str: fnam = os.path.join(out_dir, '{}.pyi'.format(module.replace('.', '/'))) if not os.path.exists(fnam): alt_fnam = fnam.replace('.pyi', '/__init__.pyi') if os.path.exists(alt_fnam): return alt_fnam return fnam mypy-0.761/mypy/test/testsubtypes.py0000644€tŠÔÚ€2›s®0000002121213576752246024023 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.test.helpers import Suite, assert_true, skip from mypy.nodes import CONTRAVARIANT, INVARIANT, COVARIANT from mypy.subtypes import is_subtype from mypy.test.typefixture import TypeFixture, InterfaceTypeFixture from mypy.types import Type class SubtypingSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture(INVARIANT) self.fx_contra = TypeFixture(CONTRAVARIANT) self.fx_co = TypeFixture(COVARIANT) def test_trivial_cases(self) -> None: for simple in self.fx_co.a, self.fx_co.o, self.fx_co.b: self.assert_subtype(simple, simple) def test_instance_subtyping(self) -> None: self.assert_strict_subtype(self.fx.a, self.fx.o) self.assert_strict_subtype(self.fx.b, self.fx.o) self.assert_strict_subtype(self.fx.b, self.fx.a) self.assert_not_subtype(self.fx.a, self.fx.d) self.assert_not_subtype(self.fx.b, self.fx.c) def test_simple_generic_instance_subtyping_invariant(self) -> None: self.assert_subtype(self.fx.ga, self.fx.ga) self.assert_subtype(self.fx.hab, self.fx.hab) self.assert_not_subtype(self.fx.ga, self.fx.g2a) self.assert_not_subtype(self.fx.ga, self.fx.gb) self.assert_not_subtype(self.fx.gb, self.fx.ga) def test_simple_generic_instance_subtyping_covariant(self) -> None: self.assert_subtype(self.fx_co.ga, self.fx_co.ga) self.assert_subtype(self.fx_co.hab, self.fx_co.hab) self.assert_not_subtype(self.fx_co.ga, self.fx_co.g2a) self.assert_not_subtype(self.fx_co.ga, self.fx_co.gb) self.assert_subtype(self.fx_co.gb, self.fx_co.ga) def test_simple_generic_instance_subtyping_contravariant(self) -> None: self.assert_subtype(self.fx_contra.ga, self.fx_contra.ga) self.assert_subtype(self.fx_contra.hab, self.fx_contra.hab) self.assert_not_subtype(self.fx_contra.ga, self.fx_contra.g2a) self.assert_subtype(self.fx_contra.ga, self.fx_contra.gb) self.assert_not_subtype(self.fx_contra.gb, self.fx_contra.ga) def test_generic_subtyping_with_inheritance_invariant(self) -> None: self.assert_subtype(self.fx.gsab, self.fx.gb) self.assert_not_subtype(self.fx.gsab, self.fx.ga) self.assert_not_subtype(self.fx.gsaa, self.fx.gb) def test_generic_subtyping_with_inheritance_covariant(self) -> None: self.assert_subtype(self.fx_co.gsab, self.fx_co.gb) self.assert_subtype(self.fx_co.gsab, self.fx_co.ga) self.assert_not_subtype(self.fx_co.gsaa, self.fx_co.gb) def test_generic_subtyping_with_inheritance_contravariant(self) -> None: self.assert_subtype(self.fx_contra.gsab, self.fx_contra.gb) self.assert_not_subtype(self.fx_contra.gsab, self.fx_contra.ga) self.assert_subtype(self.fx_contra.gsaa, self.fx_contra.gb) def test_interface_subtyping(self) -> None: self.assert_subtype(self.fx.e, self.fx.f) self.assert_equivalent(self.fx.f, self.fx.f) self.assert_not_subtype(self.fx.a, self.fx.f) @skip def test_generic_interface_subtyping(self) -> None: # TODO make this work fx2 = InterfaceTypeFixture() self.assert_subtype(fx2.m1, fx2.gfa) self.assert_not_subtype(fx2.m1, fx2.gfb) self.assert_equivalent(fx2.gfa, fx2.gfa) def test_basic_callable_subtyping(self) -> None: self.assert_strict_subtype(self.fx.callable(self.fx.o, self.fx.d), self.fx.callable(self.fx.a, self.fx.d)) self.assert_strict_subtype(self.fx.callable(self.fx.d, self.fx.b), self.fx.callable(self.fx.d, self.fx.a)) self.assert_strict_subtype(self.fx.callable(self.fx.a, self.fx.nonet), self.fx.callable(self.fx.a, self.fx.a)) self.assert_unrelated( self.fx.callable(self.fx.a, self.fx.a, self.fx.a), self.fx.callable(self.fx.a, self.fx.a)) def test_default_arg_callable_subtyping(self) -> None: self.assert_strict_subtype( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.a, self.fx.d, self.fx.a)) self.assert_strict_subtype( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.a, self.fx.a)) self.assert_strict_subtype( self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a)) self.assert_unrelated( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.d, self.fx.d, self.fx.a)) self.assert_unrelated( self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable_default(1, self.fx.a, self.fx.a, self.fx.a)) self.assert_unrelated( self.fx.callable_default(1, self.fx.a, self.fx.a), self.fx.callable(self.fx.a, self.fx.a, self.fx.a)) def test_var_arg_callable_subtyping_1(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable_var_arg(0, self.fx.b, self.fx.a)) def test_var_arg_callable_subtyping_2(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable(self.fx.b, self.fx.a)) def test_var_arg_callable_subtyping_3(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable(self.fx.a)) def test_var_arg_callable_subtyping_4(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.b, self.fx.a)) def test_var_arg_callable_subtyping_5(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.b, self.fx.a)) def test_var_arg_callable_subtyping_6(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.f, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.e, self.fx.d)) def test_var_arg_callable_subtyping_7(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.d), self.fx.callable(self.fx.a, self.fx.d)) def test_var_arg_callable_subtyping_8(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.d), self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d)) self.assert_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d)) def test_var_arg_callable_subtyping_9(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d), self.fx.callable_var_arg(0, self.fx.a, self.fx.d)) self.assert_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.d)) def test_type_callable_subtyping(self) -> None: self.assert_subtype( self.fx.callable_type(self.fx.d, self.fx.a), self.fx.type_type) self.assert_strict_subtype( self.fx.callable_type(self.fx.d, self.fx.b), self.fx.callable(self.fx.d, self.fx.a)) self.assert_strict_subtype(self.fx.callable_type(self.fx.a, self.fx.b), self.fx.callable(self.fx.a, self.fx.b)) # IDEA: Maybe add these test cases (they are tested pretty well in type # checker tests already): # * more interface subtyping test cases # * more generic interface subtyping test cases # * type variables # * tuple types # * None type # * any type # * generic function types def assert_subtype(self, s: Type, t: Type) -> None: assert_true(is_subtype(s, t), '{} not subtype of {}'.format(s, t)) def assert_not_subtype(self, s: Type, t: Type) -> None: assert_true(not is_subtype(s, t), '{} subtype of {}'.format(s, t)) def assert_strict_subtype(self, s: Type, t: Type) -> None: self.assert_subtype(s, t) self.assert_not_subtype(t, s) def assert_equivalent(self, s: Type, t: Type) -> None: self.assert_subtype(s, t) self.assert_subtype(t, s) def assert_unrelated(self, s: Type, t: Type) -> None: self.assert_not_subtype(s, t) self.assert_not_subtype(t, s) mypy-0.761/mypy/test/testtransform.py0000644€tŠÔÚ€2›s®0000000517013576752246024165 0ustar jukkaDROPBOX\Domain Users00000000000000"""Identity AST transform test cases""" import os.path from mypy import build from mypy.modulefinder import BuildSource from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, parse_options ) from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.config import test_temp_dir from mypy.test.visitors import TypeAssertTransformVisitor from mypy.errors import CompileError class TransformSuite(DataSuite): required_out_section = True # Reuse semantic analysis test cases. files = ['semanal-basic.test', 'semanal-expressions.test', 'semanal-classes.test', 'semanal-types.test', 'semanal-modules.test', 'semanal-statements.test', 'semanal-abstractclasses.test', 'semanal-python2.test'] native_sep = True def run_case(self, testcase: DataDrivenTestCase) -> None: test_transform(testcase) def test_transform(testcase: DataDrivenTestCase) -> None: """Perform an identity transform test case.""" try: src = '\n'.join(testcase.input) options = parse_options(src, testcase, 1) options.use_builtins_fixtures = True options.semantic_analysis_only = True options.show_traceback = True result = build.build(sources=[BuildSource('main', None, src)], options=options, alt_lib_path=test_temp_dir) a = result.errors if a: raise CompileError(a) # Include string representations of the source files in the actual # output. for fnam in sorted(result.files.keys()): f = result.files[fnam] # Omit the builtins module and files with a special marker in the # path. # TODO the test is not reliable if (not f.path.endswith((os.sep + 'builtins.pyi', 'typing.pyi', 'abc.pyi')) and not os.path.basename(f.path).startswith('_') and not os.path.splitext( os.path.basename(f.path))[0].endswith('_')): t = TypeAssertTransformVisitor() f = t.mypyfile(f) a += str(f).split('\n') except CompileError as e: a = e.messages if testcase.normalize_output: a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, 'Invalid semantic analyzer output ({}, line {})'.format(testcase.file, testcase.line)) mypy-0.761/mypy/test/testtypegen.py0000644€tŠÔÚ€2›s®0000000560113576752246023624 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for the type checker: exporting inferred types""" import re from mypy import build from mypy.modulefinder import BuildSource from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal from mypy.test.visitors import SkippedNodeSearcher, ignore_node from mypy.util import short_type from mypy.nodes import NameExpr from mypy.errors import CompileError from mypy.options import Options class TypeExportSuite(DataSuite): required_out_section = True files = ['typexport-basic.test'] def run_case(self, testcase: DataDrivenTestCase) -> None: try: line = testcase.input[0] mask = '' if line.startswith('##'): mask = '(' + line[2:].strip() + ')$' src = '\n'.join(testcase.input) options = Options() options.strict_optional = False # TODO: Enable strict optional checking options.use_builtins_fixtures = True options.show_traceback = True options.export_types = True options.preserve_asts = True result = build.build(sources=[BuildSource('main', None, src)], options=options, alt_lib_path=test_temp_dir) a = result.errors map = result.types nodes = map.keys() # Ignore NameExpr nodes of variables with explicit (trivial) types # to simplify output. searcher = SkippedNodeSearcher() for file in result.files.values(): file.accept(searcher) ignored = searcher.nodes # Filter nodes that should be included in the output. keys = [] for node in nodes: if node.line is not None and node.line != -1 and map[node]: if ignore_node(node) or node in ignored: continue if (re.match(mask, short_type(node)) or (isinstance(node, NameExpr) and re.match(mask, node.name))): # Include node in output. keys.append(node) for key in sorted(keys, key=lambda n: (n.line, short_type(n), str(n) + str(map[n]))): ts = str(map[key]).replace('*', '') # Remove erased tags ts = ts.replace('__main__.', '') a.append('{}({}) : {}'.format(short_type(key), key.line, ts)) except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, 'Invalid type checker output ({}, line {})'.format(testcase.file, testcase.line)) mypy-0.761/mypy/test/testtypes.py0000644€tŠÔÚ€2›s®0000012531713576752246023324 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for mypy types and type operations.""" from typing import List, Tuple from mypy.test.helpers import Suite, assert_equal, assert_true, assert_false, assert_type, skip from mypy.erasetype import erase_type from mypy.expandtype import expand_type from mypy.join import join_types, join_simple from mypy.meet import meet_types, narrow_declared_type from mypy.sametypes import is_same_type from mypy.indirection import TypeIndirectionVisitor from mypy.types import ( UnboundType, AnyType, CallableType, TupleType, TypeVarDef, Type, Instance, NoneType, Overloaded, TypeType, UnionType, UninhabitedType, TypeVarId, TypeOfAny, LiteralType, get_proper_type ) from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, CONTRAVARIANT, INVARIANT, COVARIANT from mypy.subtypes import is_subtype, is_more_precise, is_proper_subtype from mypy.test.typefixture import TypeFixture, InterfaceTypeFixture from mypy.state import strict_optional_set from mypy.typeops import true_only, false_only class TypesSuite(Suite): def setUp(self) -> None: self.x = UnboundType('X') # Helpers self.y = UnboundType('Y') self.fx = TypeFixture() self.function = self.fx.function def test_any(self) -> None: assert_equal(str(AnyType(TypeOfAny.special_form)), 'Any') def test_simple_unbound_type(self) -> None: u = UnboundType('Foo') assert_equal(str(u), 'Foo?') def test_generic_unbound_type(self) -> None: u = UnboundType('Foo', [UnboundType('T'), AnyType(TypeOfAny.special_form)]) assert_equal(str(u), 'Foo?[T?, Any]') def test_callable_type(self) -> None: c = CallableType([self.x, self.y], [ARG_POS, ARG_POS], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c), 'def (X?, Y?) -> Any') c2 = CallableType([], [], [], NoneType(), self.fx.function) assert_equal(str(c2), 'def ()') def test_callable_type_with_default_args(self) -> None: c = CallableType([self.x, self.y], [ARG_POS, ARG_OPT], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c), 'def (X?, Y? =) -> Any') c2 = CallableType([self.x, self.y], [ARG_OPT, ARG_OPT], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c2), 'def (X? =, Y? =) -> Any') def test_callable_type_with_var_args(self) -> None: c = CallableType([self.x], [ARG_STAR], [None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c), 'def (*X?) -> Any') c2 = CallableType([self.x, self.y], [ARG_POS, ARG_STAR], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c2), 'def (X?, *Y?) -> Any') c3 = CallableType([self.x, self.y], [ARG_OPT, ARG_STAR], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c3), 'def (X? =, *Y?) -> Any') def test_tuple_type(self) -> None: assert_equal(str(TupleType([], self.fx.std_tuple)), 'Tuple[]') assert_equal(str(TupleType([self.x], self.fx.std_tuple)), 'Tuple[X?]') assert_equal(str(TupleType([self.x, AnyType(TypeOfAny.special_form)], self.fx.std_tuple)), 'Tuple[X?, Any]') def test_type_variable_binding(self) -> None: assert_equal(str(TypeVarDef('X', 'X', 1, [], self.fx.o)), 'X') assert_equal(str(TypeVarDef('X', 'X', 1, [self.x, self.y], self.fx.o)), 'X in (X?, Y?)') def test_generic_function_type(self) -> None: c = CallableType([self.x, self.y], [ARG_POS, ARG_POS], [None, None], self.y, self.function, name=None, variables=[TypeVarDef('X', 'X', -1, [], self.fx.o)]) assert_equal(str(c), 'def [X] (X?, Y?) -> Y?') v = [TypeVarDef('Y', 'Y', -1, [], self.fx.o), TypeVarDef('X', 'X', -2, [], self.fx.o)] c2 = CallableType([], [], [], NoneType(), self.function, name=None, variables=v) assert_equal(str(c2), 'def [Y, X] ()') def test_type_alias_expand_once(self) -> None: A, target = self.fx.def_alias_1(self.fx.a) assert get_proper_type(A) == target assert get_proper_type(target) == target A, target = self.fx.def_alias_2(self.fx.a) assert get_proper_type(A) == target assert get_proper_type(target) == target def test_type_alias_expand_all(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) assert A.expand_all_if_possible() is None A, _ = self.fx.def_alias_2(self.fx.a) assert A.expand_all_if_possible() is None B = self.fx.non_rec_alias(self.fx.a) C = self.fx.non_rec_alias(TupleType([B, B], Instance(self.fx.std_tuplei, [B]))) assert C.expand_all_if_possible() == TupleType([self.fx.a, self.fx.a], Instance(self.fx.std_tuplei, [self.fx.a])) def test_indirection_no_infinite_recursion(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) visitor = TypeIndirectionVisitor() modules = A.accept(visitor) assert modules == {'__main__', 'builtins'} A, _ = self.fx.def_alias_2(self.fx.a) visitor = TypeIndirectionVisitor() modules = A.accept(visitor) assert modules == {'__main__', 'builtins'} class TypeOpsSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture(INVARIANT) self.fx_co = TypeFixture(COVARIANT) self.fx_contra = TypeFixture(CONTRAVARIANT) # expand_type def test_trivial_expand(self) -> None: for t in (self.fx.a, self.fx.o, self.fx.t, self.fx.nonet, self.tuple(self.fx.a), self.callable([], self.fx.a, self.fx.a), self.fx.anyt): self.assert_expand(t, [], t) self.assert_expand(t, [], t) self.assert_expand(t, [], t) def test_trivial_expand_recursive(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) self.assert_expand(A, [], A) A, _ = self.fx.def_alias_2(self.fx.a) self.assert_expand(A, [], A) def test_expand_naked_type_var(self) -> None: self.assert_expand(self.fx.t, [(self.fx.t.id, self.fx.a)], self.fx.a) self.assert_expand(self.fx.t, [(self.fx.s.id, self.fx.a)], self.fx.t) def test_expand_basic_generic_types(self) -> None: self.assert_expand(self.fx.gt, [(self.fx.t.id, self.fx.a)], self.fx.ga) # IDEA: Add test cases for # tuple types # callable types # multiple arguments def assert_expand(self, orig: Type, map_items: List[Tuple[TypeVarId, Type]], result: Type, ) -> None: lower_bounds = {} for id, t in map_items: lower_bounds[id] = t exp = expand_type(orig, lower_bounds) # Remove erased tags (asterisks). assert_equal(str(exp).replace('*', ''), str(result)) # erase_type def test_trivial_erase(self) -> None: for t in (self.fx.a, self.fx.o, self.fx.nonet, self.fx.anyt): self.assert_erase(t, t) def test_erase_with_type_variable(self) -> None: self.assert_erase(self.fx.t, self.fx.anyt) def test_erase_with_generic_type(self) -> None: self.assert_erase(self.fx.ga, self.fx.gdyn) self.assert_erase(self.fx.hab, Instance(self.fx.hi, [self.fx.anyt, self.fx.anyt])) def test_erase_with_generic_type_recursive(self) -> None: tuple_any = Instance(self.fx.std_tuplei, [AnyType(TypeOfAny.explicit)]) A, _ = self.fx.def_alias_1(self.fx.a) self.assert_erase(A, tuple_any) A, _ = self.fx.def_alias_2(self.fx.a) self.assert_erase(A, UnionType([self.fx.a, tuple_any])) def test_erase_with_tuple_type(self) -> None: self.assert_erase(self.tuple(self.fx.a), self.fx.std_tuple) def test_erase_with_function_type(self) -> None: self.assert_erase(self.fx.callable(self.fx.a, self.fx.b), CallableType(arg_types=[self.fx.anyt, self.fx.anyt], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=[None, None], ret_type=self.fx.anyt, fallback=self.fx.function)) def test_erase_with_type_object(self) -> None: self.assert_erase(self.fx.callable_type(self.fx.a, self.fx.b), CallableType(arg_types=[self.fx.anyt, self.fx.anyt], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=[None, None], ret_type=self.fx.anyt, fallback=self.fx.type_type)) def test_erase_with_type_type(self) -> None: self.assert_erase(self.fx.type_a, self.fx.type_a) self.assert_erase(self.fx.type_t, self.fx.type_any) def assert_erase(self, orig: Type, result: Type) -> None: assert_equal(str(erase_type(orig)), str(result)) # is_more_precise def test_is_more_precise(self) -> None: fx = self.fx assert_true(is_more_precise(fx.b, fx.a)) assert_true(is_more_precise(fx.b, fx.b)) assert_true(is_more_precise(fx.b, fx.b)) assert_true(is_more_precise(fx.b, fx.anyt)) assert_true(is_more_precise(self.tuple(fx.b, fx.a), self.tuple(fx.b, fx.a))) assert_true(is_more_precise(self.tuple(fx.b, fx.b), self.tuple(fx.b, fx.a))) assert_false(is_more_precise(fx.a, fx.b)) assert_false(is_more_precise(fx.anyt, fx.b)) # is_proper_subtype def test_is_proper_subtype(self) -> None: fx = self.fx assert_true(is_proper_subtype(fx.a, fx.a)) assert_true(is_proper_subtype(fx.b, fx.a)) assert_true(is_proper_subtype(fx.b, fx.o)) assert_true(is_proper_subtype(fx.b, fx.o)) assert_false(is_proper_subtype(fx.a, fx.b)) assert_false(is_proper_subtype(fx.o, fx.b)) assert_true(is_proper_subtype(fx.anyt, fx.anyt)) assert_false(is_proper_subtype(fx.a, fx.anyt)) assert_false(is_proper_subtype(fx.anyt, fx.a)) assert_true(is_proper_subtype(fx.ga, fx.ga)) assert_true(is_proper_subtype(fx.gdyn, fx.gdyn)) assert_false(is_proper_subtype(fx.ga, fx.gdyn)) assert_false(is_proper_subtype(fx.gdyn, fx.ga)) assert_true(is_proper_subtype(fx.t, fx.t)) assert_false(is_proper_subtype(fx.t, fx.s)) assert_true(is_proper_subtype(fx.a, UnionType([fx.a, fx.b]))) assert_true(is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.a, fx.b, fx.c]))) assert_false(is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.b, fx.c]))) def test_is_proper_subtype_covariance(self) -> None: fx_co = self.fx_co assert_true(is_proper_subtype(fx_co.gsab, fx_co.gb)) assert_true(is_proper_subtype(fx_co.gsab, fx_co.ga)) assert_false(is_proper_subtype(fx_co.gsaa, fx_co.gb)) assert_true(is_proper_subtype(fx_co.gb, fx_co.ga)) assert_false(is_proper_subtype(fx_co.ga, fx_co.gb)) def test_is_proper_subtype_contravariance(self) -> None: fx_contra = self.fx_contra assert_true(is_proper_subtype(fx_contra.gsab, fx_contra.gb)) assert_false(is_proper_subtype(fx_contra.gsab, fx_contra.ga)) assert_true(is_proper_subtype(fx_contra.gsaa, fx_contra.gb)) assert_false(is_proper_subtype(fx_contra.gb, fx_contra.ga)) assert_true(is_proper_subtype(fx_contra.ga, fx_contra.gb)) def test_is_proper_subtype_invariance(self) -> None: fx = self.fx assert_true(is_proper_subtype(fx.gsab, fx.gb)) assert_false(is_proper_subtype(fx.gsab, fx.ga)) assert_false(is_proper_subtype(fx.gsaa, fx.gb)) assert_false(is_proper_subtype(fx.gb, fx.ga)) assert_false(is_proper_subtype(fx.ga, fx.gb)) def test_is_proper_subtype_and_subtype_literal_types(self) -> None: fx = self.fx lit1 = LiteralType(1, fx.a) lit2 = LiteralType("foo", fx.d) lit3 = LiteralType("bar", fx.d) assert_true(is_proper_subtype(lit1, fx.a)) assert_false(is_proper_subtype(lit1, fx.d)) assert_false(is_proper_subtype(fx.a, lit1)) assert_true(is_proper_subtype(fx.uninhabited, lit1)) assert_false(is_proper_subtype(lit1, fx.uninhabited)) assert_true(is_proper_subtype(lit1, lit1)) assert_false(is_proper_subtype(lit1, lit2)) assert_false(is_proper_subtype(lit2, lit3)) assert_true(is_subtype(lit1, fx.a)) assert_false(is_subtype(lit1, fx.d)) assert_false(is_subtype(fx.a, lit1)) assert_true(is_subtype(fx.uninhabited, lit1)) assert_false(is_subtype(lit1, fx.uninhabited)) assert_true(is_subtype(lit1, lit1)) assert_false(is_subtype(lit1, lit2)) assert_false(is_subtype(lit2, lit3)) assert_false(is_proper_subtype(lit1, fx.anyt)) assert_false(is_proper_subtype(fx.anyt, lit1)) assert_true(is_subtype(lit1, fx.anyt)) assert_true(is_subtype(fx.anyt, lit1)) def test_subtype_aliases(self) -> None: A1, _ = self.fx.def_alias_1(self.fx.a) AA1, _ = self.fx.def_alias_1(self.fx.a) assert_true(is_subtype(A1, AA1)) assert_true(is_subtype(AA1, A1)) A2, _ = self.fx.def_alias_2(self.fx.a) AA2, _ = self.fx.def_alias_2(self.fx.a) assert_true(is_subtype(A2, AA2)) assert_true(is_subtype(AA2, A2)) B1, _ = self.fx.def_alias_1(self.fx.b) B2, _ = self.fx.def_alias_2(self.fx.b) assert_true(is_subtype(B1, A1)) assert_true(is_subtype(B2, A2)) assert_false(is_subtype(A1, B1)) assert_false(is_subtype(A2, B2)) assert_false(is_subtype(A2, A1)) assert_true(is_subtype(A1, A2)) # can_be_true / can_be_false def test_empty_tuple_always_false(self) -> None: tuple_type = self.tuple() assert_true(tuple_type.can_be_false) assert_false(tuple_type.can_be_true) def test_nonempty_tuple_always_true(self) -> None: tuple_type = self.tuple(AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)) assert_true(tuple_type.can_be_true) assert_false(tuple_type.can_be_false) def test_union_can_be_true_if_any_true(self) -> None: union_type = UnionType([self.fx.a, self.tuple()]) assert_true(union_type.can_be_true) def test_union_can_not_be_true_if_none_true(self) -> None: union_type = UnionType([self.tuple(), self.tuple()]) assert_false(union_type.can_be_true) def test_union_can_be_false_if_any_false(self) -> None: union_type = UnionType([self.fx.a, self.tuple()]) assert_true(union_type.can_be_false) def test_union_can_not_be_false_if_none_false(self) -> None: union_type = UnionType([self.tuple(self.fx.a), self.tuple(self.fx.d)]) assert_false(union_type.can_be_false) # true_only / false_only def test_true_only_of_false_type_is_uninhabited(self) -> None: to = true_only(NoneType()) assert_type(UninhabitedType, to) def test_true_only_of_true_type_is_idempotent(self) -> None: always_true = self.tuple(AnyType(TypeOfAny.special_form)) to = true_only(always_true) assert_true(always_true is to) def test_true_only_of_instance(self) -> None: to = true_only(self.fx.a) assert_equal(str(to), "A") assert_true(to.can_be_true) assert_false(to.can_be_false) assert_type(Instance, to) # The original class still can be false assert_true(self.fx.a.can_be_false) def test_true_only_of_union(self) -> None: tup_type = self.tuple(AnyType(TypeOfAny.special_form)) # Union of something that is unknown, something that is always true, something # that is always false union_type = UnionType([self.fx.a, tup_type, self.tuple()]) to = true_only(union_type) assert isinstance(to, UnionType) assert_equal(len(to.items), 2) assert_true(to.items[0].can_be_true) assert_false(to.items[0].can_be_false) assert_true(to.items[1] is tup_type) def test_false_only_of_true_type_is_uninhabited(self) -> None: with strict_optional_set(True): fo = false_only(self.tuple(AnyType(TypeOfAny.special_form))) assert_type(UninhabitedType, fo) def test_false_only_tuple(self) -> None: with strict_optional_set(False): fo = false_only(self.tuple(self.fx.a)) assert_equal(fo, NoneType()) with strict_optional_set(True): fo = false_only(self.tuple(self.fx.a)) assert_equal(fo, UninhabitedType()) def test_false_only_of_false_type_is_idempotent(self) -> None: always_false = NoneType() fo = false_only(always_false) assert_true(always_false is fo) def test_false_only_of_instance(self) -> None: fo = false_only(self.fx.a) assert_equal(str(fo), "A") assert_false(fo.can_be_true) assert_true(fo.can_be_false) assert_type(Instance, fo) # The original class still can be true assert_true(self.fx.a.can_be_true) def test_false_only_of_union(self) -> None: with strict_optional_set(True): tup_type = self.tuple() # Union of something that is unknown, something that is always true, something # that is always false union_type = UnionType([self.fx.a, self.tuple(AnyType(TypeOfAny.special_form)), tup_type]) assert_equal(len(union_type.items), 3) fo = false_only(union_type) assert isinstance(fo, UnionType) assert_equal(len(fo.items), 2) assert_false(fo.items[0].can_be_true) assert_true(fo.items[0].can_be_false) assert_true(fo.items[1] is tup_type) # Helpers def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def callable(self, vars: List[str], *a: Type) -> CallableType: """callable(args, a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r and type arguments vars. """ tv = [] # type: List[TypeVarDef] n = -1 for v in vars: tv.append(TypeVarDef(v, v, n, [], self.fx.o)) n -= 1 return CallableType(list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.fx.function, name=None, variables=tv) class JoinSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_trivial_cases(self) -> None: for simple in self.fx.a, self.fx.o, self.fx.b: self.assert_join(simple, simple, simple) def test_class_subtyping(self) -> None: self.assert_join(self.fx.a, self.fx.o, self.fx.o) self.assert_join(self.fx.b, self.fx.o, self.fx.o) self.assert_join(self.fx.a, self.fx.d, self.fx.o) self.assert_join(self.fx.b, self.fx.c, self.fx.a) self.assert_join(self.fx.b, self.fx.d, self.fx.o) def test_tuples(self) -> None: self.assert_join(self.tuple(), self.tuple(), self.tuple()) self.assert_join(self.tuple(self.fx.a), self.tuple(self.fx.a), self.tuple(self.fx.a)) self.assert_join(self.tuple(self.fx.b, self.fx.c), self.tuple(self.fx.a, self.fx.d), self.tuple(self.fx.a, self.fx.o)) self.assert_join(self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, self.fx.o) self.assert_join(self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), self.fx.o) def test_function_types(self) -> None: self.assert_join(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b)) self.assert_join(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.b, self.fx.b), self.callable(self.fx.b, self.fx.b)) self.assert_join(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.a), self.callable(self.fx.a, self.fx.a)) self.assert_join(self.callable(self.fx.a, self.fx.b), self.fx.function, self.fx.function) self.assert_join(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.d, self.fx.b), self.fx.function) def test_type_vars(self) -> None: self.assert_join(self.fx.t, self.fx.t, self.fx.t) self.assert_join(self.fx.s, self.fx.s, self.fx.s) self.assert_join(self.fx.t, self.fx.s, self.fx.o) def test_none(self) -> None: # Any type t joined with None results in t. for t in [NoneType(), self.fx.a, self.fx.o, UnboundType('x'), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), self.fx.anyt]: self.assert_join(t, NoneType(), t) def test_unbound_type(self) -> None: self.assert_join(UnboundType('x'), UnboundType('x'), self.fx.anyt) self.assert_join(UnboundType('x'), UnboundType('y'), self.fx.anyt) # Any type t joined with an unbound type results in dynamic. Unbound # type means that there is an error somewhere in the program, so this # does not affect type safety (whatever the result). for t in [self.fx.a, self.fx.o, self.fx.ga, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_join(t, UnboundType('X'), self.fx.anyt) def test_any_type(self) -> None: # Join against 'Any' type always results in 'Any'. for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneType(), UnboundType('x'), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_join(t, self.fx.anyt, self.fx.anyt) def test_mixed_truth_restricted_type_simple(self) -> None: # join_simple against differently restricted truthiness types drops restrictions. true_a = true_only(self.fx.a) false_o = false_only(self.fx.o) j = join_simple(self.fx.o, true_a, false_o) assert_true(j.can_be_true) assert_true(j.can_be_false) def test_mixed_truth_restricted_type(self) -> None: # join_types against differently restricted truthiness types drops restrictions. true_any = true_only(AnyType(TypeOfAny.special_form)) false_o = false_only(self.fx.o) j = join_types(true_any, false_o) assert_true(j.can_be_true) assert_true(j.can_be_false) def test_other_mixed_types(self) -> None: # In general, joining unrelated types produces object. for t1 in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: for t2 in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: if str(t1) != str(t2): self.assert_join(t1, t2, self.fx.o) def test_simple_generics(self) -> None: self.assert_join(self.fx.ga, self.fx.ga, self.fx.ga) self.assert_join(self.fx.ga, self.fx.gb, self.fx.ga) self.assert_join(self.fx.ga, self.fx.gd, self.fx.o) self.assert_join(self.fx.ga, self.fx.g2a, self.fx.o) self.assert_join(self.fx.ga, self.fx.nonet, self.fx.ga) self.assert_join(self.fx.ga, self.fx.anyt, self.fx.anyt) for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_join(t, self.fx.ga, self.fx.o) def test_generics_with_multiple_args(self) -> None: self.assert_join(self.fx.hab, self.fx.hab, self.fx.hab) self.assert_join(self.fx.hab, self.fx.hbb, self.fx.hab) self.assert_join(self.fx.had, self.fx.haa, self.fx.o) def test_generics_with_inheritance(self) -> None: self.assert_join(self.fx.gsab, self.fx.gb, self.fx.gb) self.assert_join(self.fx.gsba, self.fx.gb, self.fx.ga) self.assert_join(self.fx.gsab, self.fx.gd, self.fx.o) def test_generics_with_inheritance_and_shared_supertype(self) -> None: self.assert_join(self.fx.gsba, self.fx.gs2a, self.fx.ga) self.assert_join(self.fx.gsab, self.fx.gs2a, self.fx.ga) self.assert_join(self.fx.gsab, self.fx.gs2d, self.fx.o) def test_generic_types_and_any(self) -> None: self.assert_join(self.fx.gdyn, self.fx.ga, self.fx.gdyn) def test_callables_with_any(self) -> None: self.assert_join(self.callable(self.fx.a, self.fx.a, self.fx.anyt, self.fx.a), self.callable(self.fx.a, self.fx.anyt, self.fx.a, self.fx.anyt), self.callable(self.fx.a, self.fx.anyt, self.fx.anyt, self.fx.anyt)) def test_overloaded(self) -> None: c = self.callable def ov(*items: CallableType) -> Overloaded: return Overloaded(list(items)) fx = self.fx func = fx.function c1 = c(fx.a, fx.a) c2 = c(fx.b, fx.b) c3 = c(fx.c, fx.c) self.assert_join(ov(c1, c2), c1, c1) self.assert_join(ov(c1, c2), c2, c2) self.assert_join(ov(c1, c2), ov(c1, c2), ov(c1, c2)) self.assert_join(ov(c1, c2), ov(c1, c3), c1) self.assert_join(ov(c2, c1), ov(c3, c1), c1) self.assert_join(ov(c1, c2), c3, func) def test_overloaded_with_any(self) -> None: c = self.callable def ov(*items: CallableType) -> Overloaded: return Overloaded(list(items)) fx = self.fx any = fx.anyt self.assert_join(ov(c(fx.a, fx.a), c(fx.b, fx.b)), c(any, fx.b), c(any, fx.b)) self.assert_join(ov(c(fx.a, fx.a), c(any, fx.b)), c(fx.b, fx.b), c(any, fx.b)) @skip def test_join_interface_types(self) -> None: self.assert_join(self.fx.f, self.fx.f, self.fx.f) self.assert_join(self.fx.f, self.fx.f2, self.fx.o) self.assert_join(self.fx.f, self.fx.f3, self.fx.f) @skip def test_join_interface_and_class_types(self) -> None: self.assert_join(self.fx.o, self.fx.f, self.fx.o) self.assert_join(self.fx.a, self.fx.f, self.fx.o) self.assert_join(self.fx.e, self.fx.f, self.fx.f) @skip def test_join_class_types_with_interface_result(self) -> None: # Unique result self.assert_join(self.fx.e, self.fx.e2, self.fx.f) # Ambiguous result self.assert_join(self.fx.e2, self.fx.e3, self.fx.anyt) @skip def test_generic_interfaces(self) -> None: fx = InterfaceTypeFixture() self.assert_join(fx.gfa, fx.gfa, fx.gfa) self.assert_join(fx.gfa, fx.gfb, fx.o) self.assert_join(fx.m1, fx.gfa, fx.gfa) self.assert_join(fx.m1, fx.gfb, fx.o) def test_simple_type_objects(self) -> None: t1 = self.type_callable(self.fx.a, self.fx.a) t2 = self.type_callable(self.fx.b, self.fx.b) tr = self.type_callable(self.fx.b, self.fx.a) self.assert_join(t1, t1, t1) j = join_types(t1, t1) assert isinstance(j, CallableType) assert_true(j.is_type_obj()) self.assert_join(t1, t2, tr) self.assert_join(t1, self.fx.type_type, self.fx.type_type) self.assert_join(self.fx.type_type, self.fx.type_type, self.fx.type_type) def test_type_type(self) -> None: self.assert_join(self.fx.type_a, self.fx.type_b, self.fx.type_a) self.assert_join(self.fx.type_b, self.fx.type_any, self.fx.type_any) self.assert_join(self.fx.type_b, self.fx.type_type, self.fx.type_type) self.assert_join(self.fx.type_b, self.fx.type_c, self.fx.type_a) self.assert_join(self.fx.type_c, self.fx.type_d, TypeType.make_normalized(self.fx.o)) self.assert_join(self.fx.type_type, self.fx.type_any, self.fx.type_type) self.assert_join(self.fx.type_b, self.fx.anyt, self.fx.anyt) def test_literal_type(self) -> None: a = self.fx.a d = self.fx.d lit1 = LiteralType(1, a) lit2 = LiteralType(2, a) lit3 = LiteralType("foo", d) self.assert_join(lit1, lit1, lit1) self.assert_join(lit1, a, a) self.assert_join(lit1, d, self.fx.o) self.assert_join(lit1, lit2, a) self.assert_join(lit1, lit3, self.fx.o) self.assert_join(lit1, self.fx.anyt, self.fx.anyt) self.assert_join(UnionType([lit1, lit2]), lit2, UnionType([lit1, lit2])) self.assert_join(UnionType([lit1, lit2]), a, a) self.assert_join(UnionType([lit1, lit3]), a, UnionType([a, lit3])) self.assert_join(UnionType([d, lit3]), lit3, UnionType([d, lit3])) self.assert_join(UnionType([d, lit3]), d, UnionType([d, lit3])) self.assert_join(UnionType([a, lit1]), lit1, UnionType([a, lit1])) self.assert_join(UnionType([a, lit1]), lit2, UnionType([a, lit1])) self.assert_join(UnionType([lit1, lit2]), UnionType([lit1, lit2]), UnionType([lit1, lit2])) # The order in which we try joining two unions influences the # ordering of the items in the final produced unions. So, we # manually call 'assert_simple_join' and tune the output # after swapping the arguments here. self.assert_simple_join(UnionType([lit1, lit2]), UnionType([lit2, lit3]), UnionType([lit1, lit2, lit3])) self.assert_simple_join(UnionType([lit2, lit3]), UnionType([lit1, lit2]), UnionType([lit2, lit3, lit1])) # There are additional test cases in check-inference.test. # TODO: Function types + varargs and default args. def assert_join(self, s: Type, t: Type, join: Type) -> None: self.assert_simple_join(s, t, join) self.assert_simple_join(t, s, join) def assert_simple_join(self, s: Type, t: Type, join: Type) -> None: result = join_types(s, t) actual = str(result) expected = str(join) assert_equal(actual, expected, 'join({}, {}) == {{}} ({{}} expected)'.format(s, t)) assert_true(is_subtype(s, result), '{} not subtype of {}'.format(s, result)) assert_true(is_subtype(t, result), '{} not subtype of {}'.format(t, result)) def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.function) def type_callable(self, *a: Type) -> CallableType: """type_callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, and which represents a type. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.type_type) class MeetSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_trivial_cases(self) -> None: for simple in self.fx.a, self.fx.o, self.fx.b: self.assert_meet(simple, simple, simple) def test_class_subtyping(self) -> None: self.assert_meet(self.fx.a, self.fx.o, self.fx.a) self.assert_meet(self.fx.a, self.fx.b, self.fx.b) self.assert_meet(self.fx.b, self.fx.o, self.fx.b) self.assert_meet(self.fx.a, self.fx.d, NoneType()) self.assert_meet(self.fx.b, self.fx.c, NoneType()) def test_tuples(self) -> None: self.assert_meet(self.tuple(), self.tuple(), self.tuple()) self.assert_meet(self.tuple(self.fx.a), self.tuple(self.fx.a), self.tuple(self.fx.a)) self.assert_meet(self.tuple(self.fx.b, self.fx.c), self.tuple(self.fx.a, self.fx.d), self.tuple(self.fx.b, NoneType())) self.assert_meet(self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, self.tuple(self.fx.a, self.fx.a)) self.assert_meet(self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), NoneType()) def test_function_types(self) -> None: self.assert_meet(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b)) self.assert_meet(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.b, self.fx.b), self.callable(self.fx.a, self.fx.b)) self.assert_meet(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.a), self.callable(self.fx.a, self.fx.b)) def test_type_vars(self) -> None: self.assert_meet(self.fx.t, self.fx.t, self.fx.t) self.assert_meet(self.fx.s, self.fx.s, self.fx.s) self.assert_meet(self.fx.t, self.fx.s, NoneType()) def test_none(self) -> None: self.assert_meet(NoneType(), NoneType(), NoneType()) self.assert_meet(NoneType(), self.fx.anyt, NoneType()) # Any type t joined with None results in None, unless t is Any. for t in [self.fx.a, self.fx.o, UnboundType('x'), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, NoneType(), NoneType()) def test_unbound_type(self) -> None: self.assert_meet(UnboundType('x'), UnboundType('x'), self.fx.anyt) self.assert_meet(UnboundType('x'), UnboundType('y'), self.fx.anyt) self.assert_meet(UnboundType('x'), self.fx.anyt, UnboundType('x')) # The meet of any type t with an unbound type results in dynamic. # Unbound type means that there is an error somewhere in the program, # so this does not affect type safety. for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, UnboundType('X'), self.fx.anyt) def test_dynamic_type(self) -> None: # Meet against dynamic type always results in dynamic. for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneType(), UnboundType('x'), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, self.fx.anyt, t) def test_simple_generics(self) -> None: self.assert_meet(self.fx.ga, self.fx.ga, self.fx.ga) self.assert_meet(self.fx.ga, self.fx.o, self.fx.ga) self.assert_meet(self.fx.ga, self.fx.gb, self.fx.gb) self.assert_meet(self.fx.ga, self.fx.gd, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.g2a, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.nonet, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.anyt, self.fx.ga) for t in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, self.fx.ga, self.fx.nonet) def test_generics_with_multiple_args(self) -> None: self.assert_meet(self.fx.hab, self.fx.hab, self.fx.hab) self.assert_meet(self.fx.hab, self.fx.haa, self.fx.hab) self.assert_meet(self.fx.hab, self.fx.had, self.fx.nonet) self.assert_meet(self.fx.hab, self.fx.hbb, self.fx.hbb) def test_generics_with_inheritance(self) -> None: self.assert_meet(self.fx.gsab, self.fx.gb, self.fx.gsab) self.assert_meet(self.fx.gsba, self.fx.gb, self.fx.nonet) def test_generics_with_inheritance_and_shared_supertype(self) -> None: self.assert_meet(self.fx.gsba, self.fx.gs2a, self.fx.nonet) self.assert_meet(self.fx.gsab, self.fx.gs2a, self.fx.nonet) def test_generic_types_and_dynamic(self) -> None: self.assert_meet(self.fx.gdyn, self.fx.ga, self.fx.ga) def test_callables_with_dynamic(self) -> None: self.assert_meet(self.callable(self.fx.a, self.fx.a, self.fx.anyt, self.fx.a), self.callable(self.fx.a, self.fx.anyt, self.fx.a, self.fx.anyt), self.callable(self.fx.a, self.fx.anyt, self.fx.anyt, self.fx.anyt)) def test_meet_interface_types(self) -> None: self.assert_meet(self.fx.f, self.fx.f, self.fx.f) self.assert_meet(self.fx.f, self.fx.f2, self.fx.nonet) self.assert_meet(self.fx.f, self.fx.f3, self.fx.f3) def test_meet_interface_and_class_types(self) -> None: self.assert_meet(self.fx.o, self.fx.f, self.fx.f) self.assert_meet(self.fx.a, self.fx.f, self.fx.nonet) self.assert_meet(self.fx.e, self.fx.f, self.fx.e) def test_meet_class_types_with_shared_interfaces(self) -> None: # These have nothing special with respect to meets, unlike joins. These # are for completeness only. self.assert_meet(self.fx.e, self.fx.e2, self.fx.nonet) self.assert_meet(self.fx.e2, self.fx.e3, self.fx.nonet) @skip def test_meet_with_generic_interfaces(self) -> None: fx = InterfaceTypeFixture() self.assert_meet(fx.gfa, fx.m1, fx.m1) self.assert_meet(fx.gfa, fx.gfa, fx.gfa) self.assert_meet(fx.gfb, fx.m1, fx.nonet) def test_type_type(self) -> None: self.assert_meet(self.fx.type_a, self.fx.type_b, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_any, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_type, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_c, self.fx.nonet) self.assert_meet(self.fx.type_c, self.fx.type_d, self.fx.nonet) self.assert_meet(self.fx.type_type, self.fx.type_any, self.fx.type_any) self.assert_meet(self.fx.type_b, self.fx.anyt, self.fx.type_b) def test_literal_type(self) -> None: a = self.fx.a d = self.fx.d lit1 = LiteralType(1, a) lit2 = LiteralType(2, a) lit3 = LiteralType("foo", d) self.assert_meet(lit1, lit1, lit1) self.assert_meet(lit1, a, lit1) self.assert_meet_uninhabited(lit1, lit3) self.assert_meet_uninhabited(lit1, lit2) self.assert_meet(UnionType([lit1, lit2]), lit1, lit1) self.assert_meet(UnionType([lit1, lit2]), UnionType([lit2, lit3]), lit2) self.assert_meet(UnionType([lit1, lit2]), UnionType([lit1, lit2]), UnionType([lit1, lit2])) self.assert_meet(lit1, self.fx.anyt, lit1) self.assert_meet(lit1, self.fx.o, lit1) assert_true(is_same_type(lit1, narrow_declared_type(lit1, a))) assert_true(is_same_type(lit2, narrow_declared_type(lit2, a))) # FIX generic interfaces + ranges def assert_meet_uninhabited(self, s: Type, t: Type) -> None: with strict_optional_set(False): self.assert_meet(s, t, self.fx.nonet) with strict_optional_set(True): self.assert_meet(s, t, self.fx.uninhabited) def assert_meet(self, s: Type, t: Type, meet: Type) -> None: self.assert_simple_meet(s, t, meet) self.assert_simple_meet(t, s, meet) def assert_simple_meet(self, s: Type, t: Type, meet: Type) -> None: result = meet_types(s, t) actual = str(result) expected = str(meet) assert_equal(actual, expected, 'meet({}, {}) == {{}} ({{}} expected)'.format(s, t)) assert_true(is_subtype(result, s), '{} not subtype of {}'.format(result, s)) assert_true(is_subtype(result, t), '{} not subtype of {}'.format(result, t)) def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.function) class SameTypeSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_literal_type(self) -> None: b = self.fx.b # Reminder: b is a subclass of a d = self.fx.d lit1 = LiteralType(1, b) lit2 = LiteralType(2, b) lit3 = LiteralType("foo", d) self.assert_same(lit1, lit1) self.assert_same(UnionType([lit1, lit2]), UnionType([lit1, lit2])) self.assert_same(UnionType([lit1, lit2]), UnionType([lit2, lit1])) self.assert_not_same(lit1, b) self.assert_not_same(lit1, lit2) self.assert_not_same(lit1, lit3) self.assert_not_same(lit1, self.fx.anyt) self.assert_not_same(lit1, self.fx.nonet) def assert_same(self, s: Type, t: Type, strict: bool = True) -> None: self.assert_simple_is_same(s, t, expected=True, strict=strict) self.assert_simple_is_same(t, s, expected=True, strict=strict) def assert_not_same(self, s: Type, t: Type, strict: bool = True) -> None: self.assert_simple_is_same(s, t, False, strict=strict) self.assert_simple_is_same(t, s, False, strict=strict) def assert_simple_is_same(self, s: Type, t: Type, expected: bool, strict: bool) -> None: actual = is_same_type(s, t) assert_equal(actual, expected, 'is_same_type({}, {}) is {{}} ({{}} expected)'.format(s, t)) if strict: actual2 = (s == t) assert_equal(actual2, expected, '({} == {}) is {{}} ({{}} expected)'.format(s, t)) assert_equal(hash(s) == hash(t), expected, '(hash({}) == hash({}) is {{}} ({{}} expected)'.format(s, t)) mypy-0.761/mypy/test/typefixture.py0000644€tŠÔÚ€2›s®0000002765713576752246023660 0ustar jukkaDROPBOX\Domain Users00000000000000"""Fixture used in type-related test cases. It contains class TypeInfos and Type objects. """ from typing import List, Optional, Tuple from mypy.types import ( Type, TypeVarType, AnyType, NoneType, Instance, CallableType, TypeVarDef, TypeType, UninhabitedType, TypeOfAny, TypeAliasType, UnionType ) from mypy.nodes import ( TypeInfo, ClassDef, Block, ARG_POS, ARG_OPT, ARG_STAR, SymbolTable, COVARIANT, TypeAlias ) class TypeFixture: """Helper class that is used as a fixture in type-related unit tests. The members are initialized to contain various type-related values. """ def __init__(self, variance: int = COVARIANT) -> None: # The 'object' class self.oi = self.make_type_info('builtins.object') # class object self.o = Instance(self.oi, []) # object # Type variables (these are effectively global) def make_type_var(name: str, id: int, values: List[Type], upper_bound: Type, variance: int) -> TypeVarType: return TypeVarType(TypeVarDef(name, name, id, values, upper_bound, variance)) self.t = make_type_var('T', 1, [], self.o, variance) # T`1 (type variable) self.tf = make_type_var('T', -1, [], self.o, variance) # T`-1 (type variable) self.tf2 = make_type_var('T', -2, [], self.o, variance) # T`-2 (type variable) self.s = make_type_var('S', 2, [], self.o, variance) # S`2 (type variable) self.s1 = make_type_var('S', 1, [], self.o, variance) # S`1 (type variable) self.sf = make_type_var('S', -2, [], self.o, variance) # S`-2 (type variable) self.sf1 = make_type_var('S', -1, [], self.o, variance) # S`-1 (type variable) # Simple types self.anyt = AnyType(TypeOfAny.special_form) self.nonet = NoneType() self.uninhabited = UninhabitedType() # Abstract class TypeInfos # class F self.fi = self.make_type_info('F', is_abstract=True) # class F2 self.f2i = self.make_type_info('F2', is_abstract=True) # class F3(F) self.f3i = self.make_type_info('F3', is_abstract=True, mro=[self.fi]) # Class TypeInfos self.std_tuplei = self.make_type_info('builtins.tuple', mro=[self.oi], typevars=['T'], variances=[COVARIANT]) # class tuple self.type_typei = self.make_type_info('builtins.type') # class type self.functioni = self.make_type_info('builtins.function') # function TODO self.ai = self.make_type_info('A', mro=[self.oi]) # class A self.bi = self.make_type_info('B', mro=[self.ai, self.oi]) # class B(A) self.ci = self.make_type_info('C', mro=[self.ai, self.oi]) # class C(A) self.di = self.make_type_info('D', mro=[self.oi]) # class D # class E(F) self.ei = self.make_type_info('E', mro=[self.fi, self.oi]) # class E2(F2, F) self.e2i = self.make_type_info('E2', mro=[self.f2i, self.fi, self.oi]) # class E3(F, F2) self.e3i = self.make_type_info('E3', mro=[self.fi, self.f2i, self.oi]) # Generic class TypeInfos # G[T] self.gi = self.make_type_info('G', mro=[self.oi], typevars=['T'], variances=[variance]) # G2[T] self.g2i = self.make_type_info('G2', mro=[self.oi], typevars=['T'], variances=[variance]) # H[S, T] self.hi = self.make_type_info('H', mro=[self.oi], typevars=['S', 'T'], variances=[variance, variance]) # GS[T, S] <: G[S] self.gsi = self.make_type_info('GS', mro=[self.gi, self.oi], typevars=['T', 'S'], variances=[variance, variance], bases=[Instance(self.gi, [self.s])]) # GS2[S] <: G[S] self.gs2i = self.make_type_info('GS2', mro=[self.gi, self.oi], typevars=['S'], variances=[variance], bases=[Instance(self.gi, [self.s1])]) # list[T] self.std_listi = self.make_type_info('builtins.list', mro=[self.oi], typevars=['T'], variances=[variance]) # Instance types self.std_tuple = Instance(self.std_tuplei, [self.anyt]) # tuple self.type_type = Instance(self.type_typei, []) # type self.function = Instance(self.functioni, []) # function TODO self.a = Instance(self.ai, []) # A self.b = Instance(self.bi, []) # B self.c = Instance(self.ci, []) # C self.d = Instance(self.di, []) # D self.e = Instance(self.ei, []) # E self.e2 = Instance(self.e2i, []) # E2 self.e3 = Instance(self.e3i, []) # E3 self.f = Instance(self.fi, []) # F self.f2 = Instance(self.f2i, []) # F2 self.f3 = Instance(self.f3i, []) # F3 # Generic instance types self.ga = Instance(self.gi, [self.a]) # G[A] self.gb = Instance(self.gi, [self.b]) # G[B] self.gd = Instance(self.gi, [self.d]) # G[D] self.go = Instance(self.gi, [self.o]) # G[object] self.gt = Instance(self.gi, [self.t]) # G[T`1] self.gtf = Instance(self.gi, [self.tf]) # G[T`-1] self.gtf2 = Instance(self.gi, [self.tf2]) # G[T`-2] self.gs = Instance(self.gi, [self.s]) # G[S] self.gdyn = Instance(self.gi, [self.anyt]) # G[Any] self.g2a = Instance(self.g2i, [self.a]) # G2[A] self.gsaa = Instance(self.gsi, [self.a, self.a]) # GS[A, A] self.gsab = Instance(self.gsi, [self.a, self.b]) # GS[A, B] self.gsba = Instance(self.gsi, [self.b, self.a]) # GS[B, A] self.gs2a = Instance(self.gs2i, [self.a]) # GS2[A] self.gs2b = Instance(self.gs2i, [self.b]) # GS2[B] self.gs2d = Instance(self.gs2i, [self.d]) # GS2[D] self.hab = Instance(self.hi, [self.a, self.b]) # H[A, B] self.haa = Instance(self.hi, [self.a, self.a]) # H[A, A] self.hbb = Instance(self.hi, [self.b, self.b]) # H[B, B] self.hts = Instance(self.hi, [self.t, self.s]) # H[T, S] self.had = Instance(self.hi, [self.a, self.d]) # H[A, D] self.lsta = Instance(self.std_listi, [self.a]) # List[A] self.lstb = Instance(self.std_listi, [self.b]) # List[B] self.type_a = TypeType.make_normalized(self.a) self.type_b = TypeType.make_normalized(self.b) self.type_c = TypeType.make_normalized(self.c) self.type_d = TypeType.make_normalized(self.d) self.type_t = TypeType.make_normalized(self.t) self.type_any = TypeType.make_normalized(self.anyt) # Helper methods def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ return CallableType(list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.function) def callable_type(self, *a: Type) -> CallableType: """callable_type(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, and which represents a type. """ return CallableType(list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.type_type) def callable_default(self, min_args: int, *a: Type) -> CallableType: """callable_default(min_args, a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, with min_args mandatory fixed arguments. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * min_args + [ARG_OPT] * (n - min_args), [None] * n, a[-1], self.function) def callable_var_arg(self, min_args: int, *a: Type) -> CallableType: """callable_var_arg(min_args, a1, ..., an, r) constructs a callable with argument types a1, ... *an and return type r. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * min_args + [ARG_OPT] * (n - 1 - min_args) + [ARG_STAR], [None] * n, a[-1], self.function) def make_type_info(self, name: str, module_name: Optional[str] = None, is_abstract: bool = False, mro: Optional[List[TypeInfo]] = None, bases: Optional[List[Instance]] = None, typevars: Optional[List[str]] = None, variances: Optional[List[int]] = None) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" class_def = ClassDef(name, Block([]), None, []) class_def.fullname = name if module_name is None: if '.' in name: module_name = name.rsplit('.', 1)[0] else: module_name = '__main__' if typevars: v = [] # type: List[TypeVarDef] for id, n in enumerate(typevars, 1): if variances: variance = variances[id - 1] else: variance = COVARIANT v.append(TypeVarDef(n, n, id, [], self.o, variance=variance)) class_def.type_vars = v info = TypeInfo(SymbolTable(), class_def, module_name) if mro is None: mro = [] if name != 'builtins.object': mro.append(self.oi) info.mro = [info] + mro if bases is None: if mro: # By default, assume that there is a single non-generic base. bases = [Instance(mro[0], [])] else: bases = [] info.bases = bases return info def def_alias_1(self, base: Instance) -> Tuple[TypeAliasType, Type]: A = TypeAliasType(None, []) target = Instance(self.std_tuplei, [UnionType([base, A])]) # A = Tuple[Union[base, A], ...] AN = TypeAlias(target, '__main__.A', -1, -1) A.alias = AN return A, target def def_alias_2(self, base: Instance) -> Tuple[TypeAliasType, Type]: A = TypeAliasType(None, []) target = UnionType([base, Instance(self.std_tuplei, [A])]) # A = Union[base, Tuple[A, ...]] AN = TypeAlias(target, '__main__.A', -1, -1) A.alias = AN return A, target def non_rec_alias(self, target: Type) -> TypeAliasType: AN = TypeAlias(target, '__main__.A', -1, -1) return TypeAliasType(AN, []) class InterfaceTypeFixture(TypeFixture): """Extension of TypeFixture that contains additional generic interface types.""" def __init__(self) -> None: super().__init__() # GF[T] self.gfi = self.make_type_info('GF', typevars=['T'], is_abstract=True) # M1 <: GF[A] self.m1i = self.make_type_info('M1', is_abstract=True, mro=[self.gfi, self.oi], bases=[Instance(self.gfi, [self.a])]) self.gfa = Instance(self.gfi, [self.a]) # GF[A] self.gfb = Instance(self.gfi, [self.b]) # GF[B] self.m1 = Instance(self.m1i, []) # M1 mypy-0.761/mypy/test/update.py0000644€tŠÔÚ€2›s®0000000000013576752246022517 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/test/visitors.py0000644€tŠÔÚ€2›s®0000000436313576752246023137 0ustar jukkaDROPBOX\Domain Users00000000000000"""Visitor classes pulled out from different tests These are here because we don't currently support having interpreted classes subtype compiled ones but pytest grabs the python file even if the test was compiled. """ from typing import Set from mypy.nodes import ( NameExpr, TypeVarExpr, CallExpr, Expression, MypyFile, AssignmentStmt, IntExpr ) from mypy.traverser import TraverserVisitor from mypy.treetransform import TransformVisitor from mypy.types import Type # from testtypegen class SkippedNodeSearcher(TraverserVisitor): def __init__(self) -> None: self.nodes = set() # type: Set[Expression] self.is_typing = False def visit_mypy_file(self, f: MypyFile) -> None: self.is_typing = f.fullname == 'typing' or f.fullname == 'builtins' super().visit_mypy_file(f) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: if s.type or ignore_node(s.rvalue): for lvalue in s.lvalues: if isinstance(lvalue, NameExpr): self.nodes.add(lvalue) super().visit_assignment_stmt(s) def visit_name_expr(self, n: NameExpr) -> None: self.skip_if_typing(n) def visit_int_expr(self, n: IntExpr) -> None: self.skip_if_typing(n) def skip_if_typing(self, n: Expression) -> None: if self.is_typing: self.nodes.add(n) def ignore_node(node: Expression) -> bool: """Return True if node is to be omitted from test case output.""" # We want to get rid of object() expressions in the typing module stub # and also TypeVar(...) expressions. Since detecting whether a node comes # from the typing module is not easy, we just to strip them all away. if isinstance(node, TypeVarExpr): return True if isinstance(node, NameExpr) and node.fullname == 'builtins.object': return True if isinstance(node, NameExpr) and node.fullname == 'builtins.None': return True if isinstance(node, CallExpr) and (ignore_node(node.callee) or node.analyzed): return True return False # from testtransform class TypeAssertTransformVisitor(TransformVisitor): def type(self, type: Type) -> Type: assert type is not None return type mypy-0.761/mypy/traverser.py0000644€tŠÔÚ€2›s®0000002415713576752246022316 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generic node traverser visitor""" from typing import List from mypy.visitor import NodeVisitor from mypy.nodes import ( Block, MypyFile, FuncBase, FuncItem, CallExpr, ClassDef, Decorator, FuncDef, ExpressionStmt, AssignmentStmt, OperatorAssignmentStmt, WhileStmt, ForStmt, ReturnStmt, AssertStmt, DelStmt, IfStmt, RaiseStmt, TryStmt, WithStmt, NameExpr, MemberExpr, OpExpr, SliceExpr, CastExpr, RevealExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, IndexExpr, AssignmentExpr, GeneratorExpr, ListComprehension, SetComprehension, DictionaryComprehension, ConditionalExpr, TypeApplication, ExecStmt, Import, ImportFrom, LambdaExpr, ComparisonExpr, OverloadedFuncDef, YieldFromExpr, YieldExpr, StarExpr, BackquoteExpr, AwaitExpr, PrintStmt, SuperExpr, Node, REVEAL_TYPE, ) class TraverserVisitor(NodeVisitor[None]): """A parse tree visitor that traverses the parse tree during visiting. It does not perform any actions outside the traversal. Subclasses should override visit methods to perform actions during traversal. Calling the superclass method allows reusing the traversal implementation. """ def __init__(self) -> None: pass # Visit methods def visit_mypy_file(self, o: MypyFile) -> None: for d in o.defs: d.accept(self) def visit_block(self, block: Block) -> None: for s in block.body: s.accept(self) def visit_func(self, o: FuncItem) -> None: if o.arguments is not None: for arg in o.arguments: init = arg.initializer if init is not None: init.accept(self) for arg in o.arguments: self.visit_var(arg.variable) o.body.accept(self) def visit_func_def(self, o: FuncDef) -> None: self.visit_func(o) def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: for item in o.items: item.accept(self) if o.impl: o.impl.accept(self) def visit_class_def(self, o: ClassDef) -> None: for d in o.decorators: d.accept(self) for base in o.base_type_exprs: base.accept(self) o.defs.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_decorator(self, o: Decorator) -> None: o.func.accept(self) o.var.accept(self) for decorator in o.decorators: decorator.accept(self) def visit_expression_stmt(self, o: ExpressionStmt) -> None: o.expr.accept(self) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: o.rvalue.accept(self) for l in o.lvalues: l.accept(self) def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: o.rvalue.accept(self) o.lvalue.accept(self) def visit_while_stmt(self, o: WhileStmt) -> None: o.expr.accept(self) o.body.accept(self) if o.else_body: o.else_body.accept(self) def visit_for_stmt(self, o: ForStmt) -> None: o.index.accept(self) o.expr.accept(self) o.body.accept(self) if o.else_body: o.else_body.accept(self) def visit_return_stmt(self, o: ReturnStmt) -> None: if o.expr is not None: o.expr.accept(self) def visit_assert_stmt(self, o: AssertStmt) -> None: if o.expr is not None: o.expr.accept(self) if o.msg is not None: o.msg.accept(self) def visit_del_stmt(self, o: DelStmt) -> None: if o.expr is not None: o.expr.accept(self) def visit_if_stmt(self, o: IfStmt) -> None: for e in o.expr: e.accept(self) for b in o.body: b.accept(self) if o.else_body: o.else_body.accept(self) def visit_raise_stmt(self, o: RaiseStmt) -> None: if o.expr is not None: o.expr.accept(self) if o.from_expr is not None: o.from_expr.accept(self) def visit_try_stmt(self, o: TryStmt) -> None: o.body.accept(self) for i in range(len(o.types)): tp = o.types[i] if tp is not None: tp.accept(self) o.handlers[i].accept(self) for v in o.vars: if v is not None: v.accept(self) if o.else_body is not None: o.else_body.accept(self) if o.finally_body is not None: o.finally_body.accept(self) def visit_with_stmt(self, o: WithStmt) -> None: for i in range(len(o.expr)): o.expr[i].accept(self) targ = o.target[i] if targ is not None: targ.accept(self) o.body.accept(self) def visit_member_expr(self, o: MemberExpr) -> None: o.expr.accept(self) def visit_yield_from_expr(self, o: YieldFromExpr) -> None: o.expr.accept(self) def visit_yield_expr(self, o: YieldExpr) -> None: if o.expr: o.expr.accept(self) def visit_call_expr(self, o: CallExpr) -> None: for a in o.args: a.accept(self) o.callee.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_op_expr(self, o: OpExpr) -> None: o.left.accept(self) o.right.accept(self) def visit_comparison_expr(self, o: ComparisonExpr) -> None: for operand in o.operands: operand.accept(self) def visit_slice_expr(self, o: SliceExpr) -> None: if o.begin_index is not None: o.begin_index.accept(self) if o.end_index is not None: o.end_index.accept(self) if o.stride is not None: o.stride.accept(self) def visit_cast_expr(self, o: CastExpr) -> None: o.expr.accept(self) def visit_reveal_expr(self, o: RevealExpr) -> None: if o.kind == REVEAL_TYPE: assert o.expr is not None o.expr.accept(self) else: # RevealLocalsExpr doesn't have an inner expression pass def visit_assignment_expr(self, o: AssignmentExpr) -> None: o.target.accept(self) o.value.accept(self) def visit_unary_expr(self, o: UnaryExpr) -> None: o.expr.accept(self) def visit_list_expr(self, o: ListExpr) -> None: for item in o.items: item.accept(self) def visit_tuple_expr(self, o: TupleExpr) -> None: for item in o.items: item.accept(self) def visit_dict_expr(self, o: DictExpr) -> None: for k, v in o.items: if k is not None: k.accept(self) v.accept(self) def visit_set_expr(self, o: SetExpr) -> None: for item in o.items: item.accept(self) def visit_index_expr(self, o: IndexExpr) -> None: o.base.accept(self) o.index.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_generator_expr(self, o: GeneratorExpr) -> None: for index, sequence, conditions in zip(o.indices, o.sequences, o.condlists): sequence.accept(self) index.accept(self) for cond in conditions: cond.accept(self) o.left_expr.accept(self) def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None: for index, sequence, conditions in zip(o.indices, o.sequences, o.condlists): sequence.accept(self) index.accept(self) for cond in conditions: cond.accept(self) o.key.accept(self) o.value.accept(self) def visit_list_comprehension(self, o: ListComprehension) -> None: o.generator.accept(self) def visit_set_comprehension(self, o: SetComprehension) -> None: o.generator.accept(self) def visit_conditional_expr(self, o: ConditionalExpr) -> None: o.cond.accept(self) o.if_expr.accept(self) o.else_expr.accept(self) def visit_type_application(self, o: TypeApplication) -> None: o.expr.accept(self) def visit_lambda_expr(self, o: LambdaExpr) -> None: self.visit_func(o) def visit_star_expr(self, o: StarExpr) -> None: o.expr.accept(self) def visit_backquote_expr(self, o: BackquoteExpr) -> None: o.expr.accept(self) def visit_await_expr(self, o: AwaitExpr) -> None: o.expr.accept(self) def visit_super_expr(self, o: SuperExpr) -> None: o.call.accept(self) def visit_import(self, o: Import) -> None: for a in o.assignments: a.accept(self) def visit_import_from(self, o: ImportFrom) -> None: for a in o.assignments: a.accept(self) def visit_print_stmt(self, o: PrintStmt) -> None: for arg in o.args: arg.accept(self) def visit_exec_stmt(self, o: ExecStmt) -> None: o.expr.accept(self) if o.globals: o.globals.accept(self) if o.locals: o.locals.accept(self) class ReturnSeeker(TraverserVisitor): def __init__(self) -> None: self.found = False def visit_return_stmt(self, o: ReturnStmt) -> None: if (o.expr is None or isinstance(o.expr, NameExpr) and o.expr.name == 'None'): return self.found = True def has_return_statement(fdef: FuncBase) -> bool: """Find if a function has a non-trivial return statement. Plain 'return' and 'return None' don't count. """ seeker = ReturnSeeker() fdef.accept(seeker) return seeker.found class ReturnCollector(TraverserVisitor): def __init__(self) -> None: self.return_statements = [] # type: List[ReturnStmt] self.inside_func = False def visit_func_def(self, defn: FuncDef) -> None: if not self.inside_func: self.inside_func = True super().visit_func_def(defn) self.inside_func = False def visit_return_stmt(self, stmt: ReturnStmt) -> None: self.return_statements.append(stmt) def all_return_statements(node: Node) -> List[ReturnStmt]: v = ReturnCollector() node.accept(v) return v.return_statements mypy-0.761/mypy/treetransform.py0000644€tŠÔÚ€2›s®0000006174013576752246023173 0ustar jukkaDROPBOX\Domain Users00000000000000"""Base visitor that implements an identity AST transform. Subclass TransformVisitor to perform non-trivial transformations. """ from typing import List, Dict, cast, Optional, Iterable from mypy.nodes import ( MypyFile, Import, Node, ImportAll, ImportFrom, FuncItem, FuncDef, OverloadedFuncDef, ClassDef, Decorator, Block, Var, OperatorAssignmentStmt, ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt, DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl, WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, CastExpr, RevealExpr, TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr, DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr, UnaryExpr, LambdaExpr, TypeApplication, PrintStmt, SymbolTable, RefExpr, TypeVarExpr, NewTypeExpr, PromoteExpr, ComparisonExpr, TempNode, StarExpr, Statement, Expression, YieldFromExpr, NamedTupleExpr, TypedDictExpr, NonlocalDecl, SetComprehension, DictionaryComprehension, ComplexExpr, TypeAliasExpr, EllipsisExpr, YieldExpr, ExecStmt, Argument, BackquoteExpr, AwaitExpr, AssignmentExpr, OverloadPart, EnumCallExpr, REVEAL_TYPE ) from mypy.types import Type, FunctionLike, ProperType from mypy.traverser import TraverserVisitor from mypy.visitor import NodeVisitor from mypy.util import replace_object_state class TransformVisitor(NodeVisitor[Node]): """Transform a semantically analyzed AST (or subtree) to an identical copy. Use the node() method to transform an AST node. Subclass to perform a non-identity transform. Notes: * Do not duplicate TypeInfo nodes. This would generally not be desirable. * Only update some name binding cross-references, but only those that refer to Var, Decorator or FuncDef nodes, not those targeting ClassDef or TypeInfo nodes. * Types are not transformed, but you can override type() to also perform type transformation. TODO nested classes and functions have not been tested well enough """ def __init__(self) -> None: # There may be multiple references to a Var node. Keep track of # Var translations using a dictionary. self.var_map = {} # type: Dict[Var, Var] # These are uninitialized placeholder nodes used temporarily for nested # functions while we are transforming a top-level function. This maps an # untransformed node to a placeholder (which will later become the # transformed node). self.func_placeholder_map = {} # type: Dict[FuncDef, FuncDef] def visit_mypy_file(self, node: MypyFile) -> MypyFile: # NOTE: The 'names' and 'imports' instance variables will be empty! ignored_lines = {line: codes[:] for line, codes in node.ignored_lines.items()} new = MypyFile(self.statements(node.defs), [], node.is_bom, ignored_lines=ignored_lines) new._fullname = node._fullname new.path = node.path new.names = SymbolTable() return new def visit_import(self, node: Import) -> Import: return Import(node.ids[:]) def visit_import_from(self, node: ImportFrom) -> ImportFrom: return ImportFrom(node.id, node.relative, node.names[:]) def visit_import_all(self, node: ImportAll) -> ImportAll: return ImportAll(node.id, node.relative) def copy_argument(self, argument: Argument) -> Argument: arg = Argument( self.visit_var(argument.variable), argument.type_annotation, argument.initializer, argument.kind, ) # Refresh lines of the inner things arg.set_line(argument.line) return arg def visit_func_def(self, node: FuncDef) -> FuncDef: # Note that a FuncDef must be transformed to a FuncDef. # These contortions are needed to handle the case of recursive # references inside the function being transformed. # Set up placeholder nodes for references within this function # to other functions defined inside it. # Don't create an entry for this function itself though, # since we want self-references to point to the original # function if this is the top-level node we are transforming. init = FuncMapInitializer(self) for stmt in node.body.body: stmt.accept(init) new = FuncDef(node.name, [self.copy_argument(arg) for arg in node.arguments], self.block(node.body), cast(Optional[FunctionLike], self.optional_type(node.type))) self.copy_function_attributes(new, node) new._fullname = node._fullname new.is_decorated = node.is_decorated new.is_conditional = node.is_conditional new.is_abstract = node.is_abstract new.is_static = node.is_static new.is_class = node.is_class new.is_property = node.is_property new.is_final = node.is_final new.original_def = node.original_def if node in self.func_placeholder_map: # There is a placeholder definition for this function. Replace # the attributes of the placeholder with those form the transformed # function. We know that the classes will be identical (otherwise # this wouldn't work). result = self.func_placeholder_map[node] replace_object_state(result, new) return result else: return new def visit_lambda_expr(self, node: LambdaExpr) -> LambdaExpr: new = LambdaExpr([self.copy_argument(arg) for arg in node.arguments], self.block(node.body), cast(Optional[FunctionLike], self.optional_type(node.type))) self.copy_function_attributes(new, node) return new def copy_function_attributes(self, new: FuncItem, original: FuncItem) -> None: new.info = original.info new.min_args = original.min_args new.max_pos = original.max_pos new.is_overload = original.is_overload new.is_generator = original.is_generator new.line = original.line def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> OverloadedFuncDef: items = [cast(OverloadPart, item.accept(self)) for item in node.items] for newitem, olditem in zip(items, node.items): newitem.line = olditem.line new = OverloadedFuncDef(items) new._fullname = node._fullname new_type = self.optional_type(node.type) assert isinstance(new_type, ProperType) new.type = new_type new.info = node.info new.is_static = node.is_static new.is_class = node.is_class new.is_property = node.is_property new.is_final = node.is_final if node.impl: new.impl = cast(OverloadPart, node.impl.accept(self)) return new def visit_class_def(self, node: ClassDef) -> ClassDef: new = ClassDef(node.name, self.block(node.defs), node.type_vars, self.expressions(node.base_type_exprs), self.optional_expr(node.metaclass)) new.fullname = node.fullname new.info = node.info new.decorators = [self.expr(decorator) for decorator in node.decorators] return new def visit_global_decl(self, node: GlobalDecl) -> GlobalDecl: return GlobalDecl(node.names[:]) def visit_nonlocal_decl(self, node: NonlocalDecl) -> NonlocalDecl: return NonlocalDecl(node.names[:]) def visit_block(self, node: Block) -> Block: return Block(self.statements(node.body)) def visit_decorator(self, node: Decorator) -> Decorator: # Note that a Decorator must be transformed to a Decorator. func = self.visit_func_def(node.func) func.line = node.func.line new = Decorator(func, self.expressions(node.decorators), self.visit_var(node.var)) new.is_overload = node.is_overload return new def visit_var(self, node: Var) -> Var: # Note that a Var must be transformed to a Var. if node in self.var_map: return self.var_map[node] new = Var(node.name, self.optional_type(node.type)) new.line = node.line new._fullname = node._fullname new.info = node.info new.is_self = node.is_self new.is_ready = node.is_ready new.is_initialized_in_class = node.is_initialized_in_class new.is_staticmethod = node.is_staticmethod new.is_classmethod = node.is_classmethod new.is_property = node.is_property new.is_final = node.is_final new.final_value = node.final_value new.final_unset_in_class = node.final_unset_in_class new.final_set_in_init = node.final_set_in_init new.set_line(node.line) self.var_map[node] = new return new def visit_expression_stmt(self, node: ExpressionStmt) -> ExpressionStmt: return ExpressionStmt(self.expr(node.expr)) def visit_assignment_stmt(self, node: AssignmentStmt) -> AssignmentStmt: return self.duplicate_assignment(node) def duplicate_assignment(self, node: AssignmentStmt) -> AssignmentStmt: new = AssignmentStmt(self.expressions(node.lvalues), self.expr(node.rvalue), self.optional_type(node.unanalyzed_type)) new.line = node.line new.is_final_def = node.is_final_def new.type = self.optional_type(node.type) return new def visit_operator_assignment_stmt(self, node: OperatorAssignmentStmt) -> OperatorAssignmentStmt: return OperatorAssignmentStmt(node.op, self.expr(node.lvalue), self.expr(node.rvalue)) def visit_while_stmt(self, node: WhileStmt) -> WhileStmt: return WhileStmt(self.expr(node.expr), self.block(node.body), self.optional_block(node.else_body)) def visit_for_stmt(self, node: ForStmt) -> ForStmt: new = ForStmt(self.expr(node.index), self.expr(node.expr), self.block(node.body), self.optional_block(node.else_body), self.optional_type(node.unanalyzed_index_type)) new.index_type = self.optional_type(node.index_type) return new def visit_return_stmt(self, node: ReturnStmt) -> ReturnStmt: return ReturnStmt(self.optional_expr(node.expr)) def visit_assert_stmt(self, node: AssertStmt) -> AssertStmt: return AssertStmt(self.expr(node.expr), self.optional_expr(node.msg)) def visit_del_stmt(self, node: DelStmt) -> DelStmt: return DelStmt(self.expr(node.expr)) def visit_if_stmt(self, node: IfStmt) -> IfStmt: return IfStmt(self.expressions(node.expr), self.blocks(node.body), self.optional_block(node.else_body)) def visit_break_stmt(self, node: BreakStmt) -> BreakStmt: return BreakStmt() def visit_continue_stmt(self, node: ContinueStmt) -> ContinueStmt: return ContinueStmt() def visit_pass_stmt(self, node: PassStmt) -> PassStmt: return PassStmt() def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt: return RaiseStmt(self.optional_expr(node.expr), self.optional_expr(node.from_expr)) def visit_try_stmt(self, node: TryStmt) -> TryStmt: return TryStmt(self.block(node.body), self.optional_names(node.vars), self.optional_expressions(node.types), self.blocks(node.handlers), self.optional_block(node.else_body), self.optional_block(node.finally_body)) def visit_with_stmt(self, node: WithStmt) -> WithStmt: new = WithStmt(self.expressions(node.expr), self.optional_expressions(node.target), self.block(node.body), self.optional_type(node.unanalyzed_type)) new.analyzed_types = [self.type(typ) for typ in node.analyzed_types] return new def visit_print_stmt(self, node: PrintStmt) -> PrintStmt: return PrintStmt(self.expressions(node.args), node.newline, self.optional_expr(node.target)) def visit_exec_stmt(self, node: ExecStmt) -> ExecStmt: return ExecStmt(self.expr(node.expr), self.optional_expr(node.globals), self.optional_expr(node.locals)) def visit_star_expr(self, node: StarExpr) -> StarExpr: return StarExpr(node.expr) def visit_int_expr(self, node: IntExpr) -> IntExpr: return IntExpr(node.value) def visit_str_expr(self, node: StrExpr) -> StrExpr: return StrExpr(node.value, node.from_python_3) def visit_bytes_expr(self, node: BytesExpr) -> BytesExpr: return BytesExpr(node.value) def visit_unicode_expr(self, node: UnicodeExpr) -> UnicodeExpr: return UnicodeExpr(node.value) def visit_float_expr(self, node: FloatExpr) -> FloatExpr: return FloatExpr(node.value) def visit_complex_expr(self, node: ComplexExpr) -> ComplexExpr: return ComplexExpr(node.value) def visit_ellipsis(self, node: EllipsisExpr) -> EllipsisExpr: return EllipsisExpr() def visit_name_expr(self, node: NameExpr) -> NameExpr: return self.duplicate_name(node) def duplicate_name(self, node: NameExpr) -> NameExpr: # This method is used when the transform result must be a NameExpr. # visit_name_expr() is used when there is no such restriction. new = NameExpr(node.name) self.copy_ref(new, node) new.is_special_form = node.is_special_form return new def visit_member_expr(self, node: MemberExpr) -> MemberExpr: member = MemberExpr(self.expr(node.expr), node.name) if node.def_var: # This refers to an attribute and we don't transform attributes by default, # just normal variables. member.def_var = node.def_var self.copy_ref(member, node) return member def copy_ref(self, new: RefExpr, original: RefExpr) -> None: new.kind = original.kind new.fullname = original.fullname target = original.node if isinstance(target, Var): target = self.visit_var(target) elif isinstance(target, Decorator): target = self.visit_var(target.var) elif isinstance(target, FuncDef): # Use a placeholder node for the function if it exists. target = self.func_placeholder_map.get(target, target) new.node = target new.is_new_def = original.is_new_def new.is_inferred_def = original.is_inferred_def def visit_yield_from_expr(self, node: YieldFromExpr) -> YieldFromExpr: return YieldFromExpr(self.expr(node.expr)) def visit_yield_expr(self, node: YieldExpr) -> YieldExpr: return YieldExpr(self.optional_expr(node.expr)) def visit_await_expr(self, node: AwaitExpr) -> AwaitExpr: return AwaitExpr(self.expr(node.expr)) def visit_call_expr(self, node: CallExpr) -> CallExpr: return CallExpr(self.expr(node.callee), self.expressions(node.args), node.arg_kinds[:], node.arg_names[:], self.optional_expr(node.analyzed)) def visit_op_expr(self, node: OpExpr) -> OpExpr: new = OpExpr(node.op, self.expr(node.left), self.expr(node.right)) new.method_type = self.optional_type(node.method_type) return new def visit_comparison_expr(self, node: ComparisonExpr) -> ComparisonExpr: new = ComparisonExpr(node.operators, self.expressions(node.operands)) new.method_types = [self.optional_type(t) for t in node.method_types] return new def visit_cast_expr(self, node: CastExpr) -> CastExpr: return CastExpr(self.expr(node.expr), self.type(node.type)) def visit_reveal_expr(self, node: RevealExpr) -> RevealExpr: if node.kind == REVEAL_TYPE: assert node.expr is not None return RevealExpr(kind=REVEAL_TYPE, expr=self.expr(node.expr)) else: # Reveal locals expressions don't have any sub expressions return node def visit_super_expr(self, node: SuperExpr) -> SuperExpr: call = self.expr(node.call) assert isinstance(call, CallExpr) new = SuperExpr(node.name, call) new.info = node.info return new def visit_assignment_expr(self, node: AssignmentExpr) -> AssignmentExpr: return AssignmentExpr(node.target, node.value) def visit_unary_expr(self, node: UnaryExpr) -> UnaryExpr: new = UnaryExpr(node.op, self.expr(node.expr)) new.method_type = self.optional_type(node.method_type) return new def visit_list_expr(self, node: ListExpr) -> ListExpr: return ListExpr(self.expressions(node.items)) def visit_dict_expr(self, node: DictExpr) -> DictExpr: return DictExpr([(self.expr(key) if key else None, self.expr(value)) for key, value in node.items]) def visit_tuple_expr(self, node: TupleExpr) -> TupleExpr: return TupleExpr(self.expressions(node.items)) def visit_set_expr(self, node: SetExpr) -> SetExpr: return SetExpr(self.expressions(node.items)) def visit_index_expr(self, node: IndexExpr) -> IndexExpr: new = IndexExpr(self.expr(node.base), self.expr(node.index)) if node.method_type: new.method_type = self.type(node.method_type) if node.analyzed: if isinstance(node.analyzed, TypeApplication): new.analyzed = self.visit_type_application(node.analyzed) else: new.analyzed = self.visit_type_alias_expr(node.analyzed) new.analyzed.set_line(node.analyzed.line) return new def visit_type_application(self, node: TypeApplication) -> TypeApplication: return TypeApplication(self.expr(node.expr), self.types(node.types)) def visit_list_comprehension(self, node: ListComprehension) -> ListComprehension: generator = self.duplicate_generator(node.generator) generator.set_line(node.generator.line, node.generator.column) return ListComprehension(generator) def visit_set_comprehension(self, node: SetComprehension) -> SetComprehension: generator = self.duplicate_generator(node.generator) generator.set_line(node.generator.line, node.generator.column) return SetComprehension(generator) def visit_dictionary_comprehension(self, node: DictionaryComprehension ) -> DictionaryComprehension: return DictionaryComprehension(self.expr(node.key), self.expr(node.value), [self.expr(index) for index in node.indices], [self.expr(s) for s in node.sequences], [[self.expr(cond) for cond in conditions] for conditions in node.condlists], node.is_async) def visit_generator_expr(self, node: GeneratorExpr) -> GeneratorExpr: return self.duplicate_generator(node) def duplicate_generator(self, node: GeneratorExpr) -> GeneratorExpr: return GeneratorExpr(self.expr(node.left_expr), [self.expr(index) for index in node.indices], [self.expr(s) for s in node.sequences], [[self.expr(cond) for cond in conditions] for conditions in node.condlists], node.is_async) def visit_slice_expr(self, node: SliceExpr) -> SliceExpr: return SliceExpr(self.optional_expr(node.begin_index), self.optional_expr(node.end_index), self.optional_expr(node.stride)) def visit_conditional_expr(self, node: ConditionalExpr) -> ConditionalExpr: return ConditionalExpr(self.expr(node.cond), self.expr(node.if_expr), self.expr(node.else_expr)) def visit_backquote_expr(self, node: BackquoteExpr) -> BackquoteExpr: return BackquoteExpr(self.expr(node.expr)) def visit_type_var_expr(self, node: TypeVarExpr) -> TypeVarExpr: return TypeVarExpr(node.name, node.fullname, self.types(node.values), self.type(node.upper_bound), variance=node.variance) def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr: return TypeAliasExpr(node.node) def visit_newtype_expr(self, node: NewTypeExpr) -> NewTypeExpr: res = NewTypeExpr(node.name, node.old_type, line=node.line, column=node.column) res.info = node.info return res def visit_namedtuple_expr(self, node: NamedTupleExpr) -> NamedTupleExpr: return NamedTupleExpr(node.info) def visit_enum_call_expr(self, node: EnumCallExpr) -> EnumCallExpr: return EnumCallExpr(node.info, node.items, node.values) def visit_typeddict_expr(self, node: TypedDictExpr) -> Node: return TypedDictExpr(node.info) def visit__promote_expr(self, node: PromoteExpr) -> PromoteExpr: return PromoteExpr(node.type) def visit_temp_node(self, node: TempNode) -> TempNode: return TempNode(self.type(node.type)) def node(self, node: Node) -> Node: new = node.accept(self) new.set_line(node.line) return new def mypyfile(self, node: MypyFile) -> MypyFile: new = node.accept(self) assert isinstance(new, MypyFile) new.set_line(node.line) return new def expr(self, expr: Expression) -> Expression: new = expr.accept(self) assert isinstance(new, Expression) new.set_line(expr.line, expr.column) return new def stmt(self, stmt: Statement) -> Statement: new = stmt.accept(self) assert isinstance(new, Statement) new.set_line(stmt.line, stmt.column) return new # Helpers # # All the node helpers also propagate line numbers. def optional_expr(self, expr: Optional[Expression]) -> Optional[Expression]: if expr: return self.expr(expr) else: return None def block(self, block: Block) -> Block: new = self.visit_block(block) new.line = block.line return new def optional_block(self, block: Optional[Block]) -> Optional[Block]: if block: return self.block(block) else: return None def statements(self, statements: List[Statement]) -> List[Statement]: return [self.stmt(stmt) for stmt in statements] def expressions(self, expressions: List[Expression]) -> List[Expression]: return [self.expr(expr) for expr in expressions] def optional_expressions(self, expressions: Iterable[Optional[Expression]] ) -> List[Optional[Expression]]: return [self.optional_expr(expr) for expr in expressions] def blocks(self, blocks: List[Block]) -> List[Block]: return [self.block(block) for block in blocks] def names(self, names: List[NameExpr]) -> List[NameExpr]: return [self.duplicate_name(name) for name in names] def optional_names(self, names: Iterable[Optional[NameExpr]]) -> List[Optional[NameExpr]]: result = [] # type: List[Optional[NameExpr]] for name in names: if name: result.append(self.duplicate_name(name)) else: result.append(None) return result def type(self, type: Type) -> Type: # Override this method to transform types. return type def optional_type(self, type: Optional[Type]) -> Optional[Type]: if type: return self.type(type) else: return None def types(self, types: List[Type]) -> List[Type]: return [self.type(type) for type in types] class FuncMapInitializer(TraverserVisitor): """This traverser creates mappings from nested FuncDefs to placeholder FuncDefs. The placeholders will later be replaced with transformed nodes. """ def __init__(self, transformer: TransformVisitor) -> None: self.transformer = transformer def visit_func_def(self, node: FuncDef) -> None: if node not in self.transformer.func_placeholder_map: # Haven't seen this FuncDef before, so create a placeholder node. self.transformer.func_placeholder_map[node] = FuncDef( node.name, node.arguments, node.body, None) super().visit_func_def(node) mypy-0.761/mypy/tvar_scope.py0000644€tŠÔÚ€2›s®0000000673713576752246022452 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Dict, Union from mypy.types import TypeVarDef from mypy.nodes import TypeVarExpr, SymbolTableNode class TypeVarScope: """Scope that holds bindings for type variables. Node fullname -> TypeVarDef.""" def __init__(self, parent: 'Optional[TypeVarScope]' = None, is_class_scope: bool = False, prohibited: 'Optional[TypeVarScope]' = None) -> None: """Initializer for TypeVarScope Parameters: parent: the outer scope for this scope is_class_scope: True if this represents a generic class prohibited: Type variables that aren't strictly in scope exactly, but can't be bound because they're part of an outer class's scope. """ self.scope = {} # type: Dict[str, TypeVarDef] self.parent = parent self.func_id = 0 self.class_id = 0 self.is_class_scope = is_class_scope self.prohibited = prohibited if parent is not None: self.func_id = parent.func_id self.class_id = parent.class_id def get_function_scope(self) -> 'Optional[TypeVarScope]': """Get the nearest parent that's a function scope, not a class scope""" it = self # type: Optional[TypeVarScope] while it is not None and it.is_class_scope: it = it.parent return it def allow_binding(self, fullname: str) -> bool: if fullname in self.scope: return False elif self.parent and not self.parent.allow_binding(fullname): return False elif self.prohibited and not self.prohibited.allow_binding(fullname): return False return True def method_frame(self) -> 'TypeVarScope': """A new scope frame for binding a method""" return TypeVarScope(self, False, None) def class_frame(self) -> 'TypeVarScope': """A new scope frame for binding a class. Prohibits *this* class's tvars""" return TypeVarScope(self.get_function_scope(), True, self) def bind_new(self, name: str, tvar_expr: TypeVarExpr) -> TypeVarDef: if self.is_class_scope: self.class_id += 1 i = self.class_id else: self.func_id -= 1 i = self.func_id tvar_def = TypeVarDef(name, tvar_expr.fullname, i, values=tvar_expr.values, upper_bound=tvar_expr.upper_bound, variance=tvar_expr.variance, line=tvar_expr.line, column=tvar_expr.column) self.scope[tvar_expr.fullname] = tvar_def return tvar_def def bind_existing(self, tvar_def: TypeVarDef) -> None: self.scope[tvar_def.fullname] = tvar_def def get_binding(self, item: Union[str, SymbolTableNode]) -> Optional[TypeVarDef]: fullname = item.fullname if isinstance(item, SymbolTableNode) else item assert fullname is not None if fullname in self.scope: return self.scope[fullname] elif self.parent is not None: return self.parent.get_binding(fullname) else: return None def __str__(self) -> str: me = ", ".join('{}: {}`{}'.format(k, v.name, v.id) for k, v in self.scope.items()) if self.parent is None: return me return "{} <- {}".format(str(self.parent), me) mypy-0.761/mypy/type_visitor.py0000644€tŠÔÚ€2›s®0000002563013576752246023036 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type visitor classes. This module defines the type visitors that are intended to be subclassed by other code. They have been separated out into their own module to ease converting mypy to run under mypyc, since currently mypyc-extension classes can extend interpreted classes but not the other way around. Separating them out, then, allows us to compile types before we can compile everything that uses a TypeVisitor. The visitors are all re-exported from mypy.types and that is how other modules refer to them. """ from abc import abstractmethod from collections import OrderedDict from typing import Generic, TypeVar, cast, Any, List, Callable, Iterable, Optional, Set from mypy_extensions import trait T = TypeVar('T') from mypy.types import ( Type, AnyType, CallableType, Overloaded, TupleType, TypedDictType, LiteralType, RawExpressionType, Instance, NoneType, TypeType, UnionType, TypeVarType, PartialType, DeletedType, UninhabitedType, TypeVarDef, UnboundType, ErasedType, StarType, EllipsisType, TypeList, CallableArgument, PlaceholderType, TypeAliasType, get_proper_type ) @trait class TypeVisitor(Generic[T]): """Visitor class for types (Type subclasses). The parameter T is the return type of the visit methods. """ @abstractmethod def visit_unbound_type(self, t: UnboundType) -> T: pass @abstractmethod def visit_any(self, t: AnyType) -> T: pass @abstractmethod def visit_none_type(self, t: NoneType) -> T: pass @abstractmethod def visit_uninhabited_type(self, t: UninhabitedType) -> T: pass @abstractmethod def visit_erased_type(self, t: ErasedType) -> T: pass @abstractmethod def visit_deleted_type(self, t: DeletedType) -> T: pass @abstractmethod def visit_type_var(self, t: TypeVarType) -> T: pass @abstractmethod def visit_instance(self, t: Instance) -> T: pass @abstractmethod def visit_callable_type(self, t: CallableType) -> T: pass @abstractmethod def visit_overloaded(self, t: Overloaded) -> T: pass @abstractmethod def visit_tuple_type(self, t: TupleType) -> T: pass @abstractmethod def visit_typeddict_type(self, t: TypedDictType) -> T: pass @abstractmethod def visit_literal_type(self, t: LiteralType) -> T: pass @abstractmethod def visit_union_type(self, t: UnionType) -> T: pass @abstractmethod def visit_partial_type(self, t: PartialType) -> T: pass @abstractmethod def visit_type_type(self, t: TypeType) -> T: pass @abstractmethod def visit_type_alias_type(self, t: TypeAliasType) -> T: pass @trait class SyntheticTypeVisitor(TypeVisitor[T]): """A TypeVisitor that also knows how to visit synthetic AST constructs. Not just real types.""" @abstractmethod def visit_star_type(self, t: StarType) -> T: pass @abstractmethod def visit_type_list(self, t: TypeList) -> T: pass @abstractmethod def visit_callable_argument(self, t: CallableArgument) -> T: pass @abstractmethod def visit_ellipsis_type(self, t: EllipsisType) -> T: pass @abstractmethod def visit_raw_expression_type(self, t: RawExpressionType) -> T: pass @abstractmethod def visit_placeholder_type(self, t: PlaceholderType) -> T: pass @trait class TypeTranslator(TypeVisitor[Type]): """Identity type transformation. Subclass this and override some methods to implement a non-trivial transformation. """ def visit_unbound_type(self, t: UnboundType) -> Type: return t def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneType) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: return t def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_instance(self, t: Instance) -> Type: last_known_value = None # type: Optional[LiteralType] if t.last_known_value is not None: raw_last_known_value = t.last_known_value.accept(self) assert isinstance(raw_last_known_value, LiteralType) # type: ignore last_known_value = raw_last_known_value return Instance( typ=t.type, args=self.translate_types(t.args), line=t.line, column=t.column, last_known_value=last_known_value, ) def visit_type_var(self, t: TypeVarType) -> Type: return t def visit_partial_type(self, t: PartialType) -> Type: return t def visit_callable_type(self, t: CallableType) -> Type: return t.copy_modified(arg_types=self.translate_types(t.arg_types), ret_type=t.ret_type.accept(self), variables=self.translate_variables(t.variables)) def visit_tuple_type(self, t: TupleType) -> Type: return TupleType(self.translate_types(t.items), # TODO: This appears to be unsafe. cast(Any, t.partial_fallback.accept(self)), t.line, t.column) def visit_typeddict_type(self, t: TypedDictType) -> Type: items = OrderedDict([ (item_name, item_type.accept(self)) for (item_name, item_type) in t.items.items() ]) return TypedDictType(items, t.required_keys, # TODO: This appears to be unsafe. cast(Any, t.fallback.accept(self)), t.line, t.column) def visit_literal_type(self, t: LiteralType) -> Type: fallback = t.fallback.accept(self) assert isinstance(fallback, Instance) # type: ignore return LiteralType( value=t.value, fallback=fallback, line=t.line, column=t.column, ) def visit_union_type(self, t: UnionType) -> Type: return UnionType(self.translate_types(t.items), t.line, t.column) def translate_types(self, types: Iterable[Type]) -> List[Type]: return [t.accept(self) for t in types] def translate_variables(self, variables: List[TypeVarDef]) -> List[TypeVarDef]: return variables def visit_overloaded(self, t: Overloaded) -> Type: items = [] # type: List[CallableType] for item in t.items(): new = item.accept(self) assert isinstance(new, CallableType) # type: ignore items.append(new) return Overloaded(items=items) def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(t.item.accept(self), line=t.line, column=t.column) @abstractmethod def visit_type_alias_type(self, t: TypeAliasType) -> Type: # This method doesn't have a default implementation for type translators, # because type aliases are special: some information is contained in the # TypeAlias node, and we normally don't generate new nodes. Every subclass # must implement this depending on its semantics. pass @trait class TypeQuery(SyntheticTypeVisitor[T]): """Visitor for performing queries of types. strategy is used to combine results for a series of types, common use cases involve a boolean query using `any` or `all`. Note: this visitor keeps an internal state (tracks type aliases to avoid recursion), so it should *never* be re-used for querying different types, create a new visitor instance instead. # TODO: check that we don't have existing violations of this rule. """ def __init__(self, strategy: Callable[[Iterable[T]], T]) -> None: self.strategy = strategy # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. self.seen_aliases = set() # type: Set[TypeAliasType] def visit_unbound_type(self, t: UnboundType) -> T: return self.query_types(t.args) def visit_type_list(self, t: TypeList) -> T: return self.query_types(t.items) def visit_callable_argument(self, t: CallableArgument) -> T: return t.typ.accept(self) def visit_any(self, t: AnyType) -> T: return self.strategy([]) def visit_uninhabited_type(self, t: UninhabitedType) -> T: return self.strategy([]) def visit_none_type(self, t: NoneType) -> T: return self.strategy([]) def visit_erased_type(self, t: ErasedType) -> T: return self.strategy([]) def visit_deleted_type(self, t: DeletedType) -> T: return self.strategy([]) def visit_type_var(self, t: TypeVarType) -> T: return self.query_types([t.upper_bound] + t.values) def visit_partial_type(self, t: PartialType) -> T: return self.strategy([]) def visit_instance(self, t: Instance) -> T: return self.query_types(t.args) def visit_callable_type(self, t: CallableType) -> T: # FIX generics return self.query_types(t.arg_types + [t.ret_type]) def visit_tuple_type(self, t: TupleType) -> T: return self.query_types(t.items) def visit_typeddict_type(self, t: TypedDictType) -> T: return self.query_types(t.items.values()) def visit_raw_expression_type(self, t: RawExpressionType) -> T: return self.strategy([]) def visit_literal_type(self, t: LiteralType) -> T: return self.strategy([]) def visit_star_type(self, t: StarType) -> T: return t.type.accept(self) def visit_union_type(self, t: UnionType) -> T: return self.query_types(t.items) def visit_overloaded(self, t: Overloaded) -> T: return self.query_types(t.items()) def visit_type_type(self, t: TypeType) -> T: return t.item.accept(self) def visit_ellipsis_type(self, t: EllipsisType) -> T: return self.strategy([]) def visit_placeholder_type(self, t: PlaceholderType) -> T: return self.query_types(t.args) def visit_type_alias_type(self, t: TypeAliasType) -> T: return get_proper_type(t).accept(self) def query_types(self, types: Iterable[Type]) -> T: """Perform a query for a list of types. Use the strategy to combine the results. Skip type aliases already visited types to avoid infinite recursion. """ res = [] # type: List[T] for t in types: if isinstance(t, TypeAliasType): # Avoid infinite recursion for recursive type aliases. # TODO: Ideally we should fire subvisitors here (or use caching) if we care # about duplicates. if t in self.seen_aliases: continue self.seen_aliases.add(t) res.append(t.accept(self)) return self.strategy(res) mypy-0.761/mypy/typeanal.py0000644€tŠÔÚ€2›s®0000015523213576752246022115 0ustar jukkaDROPBOX\Domain Users00000000000000"""Semantic analysis of types""" import itertools from itertools import chain from contextlib import contextmanager from collections import OrderedDict from typing import Callable, List, Optional, Set, Tuple, Iterator, TypeVar, Iterable from typing_extensions import Final from mypy_extensions import DefaultNamedArg from mypy.messages import MessageBuilder, quote_type_string, format_type_bare from mypy.options import Options from mypy.types import ( Type, UnboundType, TypeVarType, TupleType, TypedDictType, UnionType, Instance, AnyType, CallableType, NoneType, ErasedType, DeletedType, TypeList, TypeVarDef, SyntheticTypeVisitor, StarType, PartialType, EllipsisType, UninhabitedType, TypeType, CallableArgument, TypeQuery, union_items, TypeOfAny, LiteralType, RawExpressionType, PlaceholderType, Overloaded, get_proper_type, TypeAliasType ) from mypy.nodes import ( TypeInfo, Context, SymbolTableNode, Var, Expression, nongen_builtins, check_arg_names, check_arg_kinds, ARG_POS, ARG_NAMED, ARG_OPT, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, TypeVarExpr, TypeAlias, PlaceholderNode, SYMBOL_FUNCBASE_TYPES, Decorator, MypyFile ) from mypy.typetraverser import TypeTraverserVisitor from mypy.tvar_scope import TypeVarScope from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.plugin import Plugin, TypeAnalyzerPluginInterface, AnalyzeTypeContext from mypy.semanal_shared import SemanticAnalyzerCoreInterface from mypy.errorcodes import ErrorCode from mypy import nodes, message_registry, errorcodes as codes T = TypeVar('T') type_constructors = { 'typing.Callable', 'typing.Optional', 'typing.Tuple', 'typing.Type', 'typing.Union', 'typing.Literal', 'typing_extensions.Literal', 'typing_extensions.Annotated', } # type: Final ARG_KINDS_BY_CONSTRUCTOR = { 'mypy_extensions.Arg': ARG_POS, 'mypy_extensions.DefaultArg': ARG_OPT, 'mypy_extensions.NamedArg': ARG_NAMED, 'mypy_extensions.DefaultNamedArg': ARG_NAMED_OPT, 'mypy_extensions.VarArg': ARG_STAR, 'mypy_extensions.KwArg': ARG_STAR2, } # type: Final GENERIC_STUB_NOT_AT_RUNTIME_TYPES = { 'queue.Queue', 'builtins._PathLike', } # type: Final def analyze_type_alias(node: Expression, api: SemanticAnalyzerCoreInterface, tvar_scope: TypeVarScope, plugin: Plugin, options: Options, is_typeshed_stub: bool, allow_unnormalized: bool = False, allow_placeholder: bool = False, in_dynamic_func: bool = False, global_scope: bool = True) -> Optional[Tuple[Type, Set[str]]]: """Analyze r.h.s. of a (potential) type alias definition. If `node` is valid as a type alias rvalue, return the resulting type and a set of full names of type aliases it depends on (directly or indirectly). Return None otherwise. 'node' must have been semantically analyzed. """ try: type = expr_to_unanalyzed_type(node) except TypeTranslationError: api.fail('Invalid type alias: expression is not a valid type', node) return None analyzer = TypeAnalyser(api, tvar_scope, plugin, options, is_typeshed_stub, allow_unnormalized=allow_unnormalized, defining_alias=True, allow_placeholder=allow_placeholder) analyzer.in_dynamic_func = in_dynamic_func analyzer.global_scope = global_scope res = type.accept(analyzer) return res, analyzer.aliases_used def no_subscript_builtin_alias(name: str, propose_alt: bool = True) -> str: msg = '"{}" is not subscriptable'.format(name.split('.')[-1]) replacement = nongen_builtins[name] if replacement and propose_alt: msg += ', use "{}" instead'.format(replacement) return msg class TypeAnalyser(SyntheticTypeVisitor[Type], TypeAnalyzerPluginInterface): """Semantic analyzer for types. Converts unbound types into bound types. This is a no-op for already bound types. If an incomplete reference is encountered, this does a defer. The caller never needs to defer. """ # Is this called from an untyped function definition? in_dynamic_func = False # type: bool # Is this called from global scope? global_scope = True # type: bool def __init__(self, api: SemanticAnalyzerCoreInterface, tvar_scope: TypeVarScope, plugin: Plugin, options: Options, is_typeshed_stub: bool, *, defining_alias: bool = False, allow_tuple_literal: bool = False, allow_unnormalized: bool = False, allow_unbound_tvars: bool = False, allow_placeholder: bool = False, report_invalid_types: bool = True) -> None: self.api = api self.lookup_qualified = api.lookup_qualified self.lookup_fqn_func = api.lookup_fully_qualified self.fail_func = api.fail self.note_func = api.note self.tvar_scope = tvar_scope # Are we analysing a type alias definition rvalue? self.defining_alias = defining_alias self.allow_tuple_literal = allow_tuple_literal # Positive if we are analyzing arguments of another (outer) type self.nesting_level = 0 # Should we allow unnormalized types like `list[int]` # (currently allowed in stubs)? self.allow_unnormalized = allow_unnormalized # Should we accept unbound type variables (always OK in aliases)? self.allow_unbound_tvars = allow_unbound_tvars or defining_alias # If false, record incomplete ref if we generate PlaceholderType. self.allow_placeholder = allow_placeholder # Should we report an error whenever we encounter a RawExpressionType outside # of a Literal context: e.g. whenever we encounter an invalid type? Normally, # we want to report an error, but the caller may want to do more specialized # error handling. self.report_invalid_types = report_invalid_types self.plugin = plugin self.options = options self.is_typeshed_stub = is_typeshed_stub # Names of type aliases encountered while analysing a type will be collected here. self.aliases_used = set() # type: Set[str] def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: typ = self.visit_unbound_type_nonoptional(t, defining_literal) if t.optional: # We don't need to worry about double-wrapping Optionals or # wrapping Anys: Union simplification will take care of that. return make_optional_type(typ) return typ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) -> Type: sym = self.lookup_qualified(t.name, t) if sym is not None: node = sym.node if isinstance(node, PlaceholderNode): if node.becomes_typeinfo: # Reference to placeholder type. if self.api.final_iteration: self.cannot_resolve_type(t) return AnyType(TypeOfAny.from_error) elif self.allow_placeholder: self.api.defer() else: self.api.record_incomplete_ref() return PlaceholderType(node.fullname, self.anal_array(t.args), t.line) else: if self.api.final_iteration: self.cannot_resolve_type(t) return AnyType(TypeOfAny.from_error) else: # Reference to an unknown placeholder node. self.api.record_incomplete_ref() return AnyType(TypeOfAny.special_form) if node is None: self.fail('Internal error (node is None, kind={})'.format(sym.kind), t) return AnyType(TypeOfAny.special_form) fullname = node.fullname hook = self.plugin.get_type_analyze_hook(fullname) if hook is not None: return hook(AnalyzeTypeContext(t, t, self)) if (fullname in nongen_builtins and t.args and not self.allow_unnormalized): self.fail(no_subscript_builtin_alias(fullname, propose_alt=not self.defining_alias), t) tvar_def = self.tvar_scope.get_binding(sym) if isinstance(sym.node, TypeVarExpr) and tvar_def is not None and self.defining_alias: self.fail('Can\'t use bound type variable "{}"' ' to define generic alias'.format(t.name), t) return AnyType(TypeOfAny.from_error) if isinstance(sym.node, TypeVarExpr) and tvar_def is not None: if len(t.args) > 0: self.fail('Type variable "{}" used with arguments'.format(t.name), t) return TypeVarType(tvar_def, t.line) special = self.try_analyze_special_unbound_type(t, fullname) if special is not None: return special if isinstance(node, TypeAlias): self.aliases_used.add(fullname) an_args = self.anal_array(t.args) disallow_any = self.options.disallow_any_generics and not self.is_typeshed_stub res = expand_type_alias(node, an_args, self.fail, node.no_args, t, unexpanded_type=t, disallow_any=disallow_any) # The only case where expand_type_alias() can return an incorrect instance is # when it is top-level instance, so no need to recurse. if (isinstance(res, Instance) and # type: ignore[misc] len(res.args) != len(res.type.type_vars) and not self.defining_alias): fix_instance( res, self.fail, self.note, disallow_any=disallow_any, use_generic_error=True, unexpanded_type=t) return res elif isinstance(node, TypeInfo): return self.analyze_type_with_type_info(node, t.args, t) else: return self.analyze_unbound_type_without_type_info(t, sym, defining_literal) else: # sym is None return AnyType(TypeOfAny.special_form) def cannot_resolve_type(self, t: UnboundType) -> None: # TODO: Move error message generation to messages.py. We'd first # need access to MessageBuilder here. Also move the similar # message generation logic in semanal.py. self.api.fail( 'Cannot resolve name "{}" (possible cyclic definition)'.format(t.name), t) def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Optional[Type]: """Bind special type that is recognized through magic name such as 'typing.Any'. Return the bound type if successful, and return None if the type is a normal type. """ if fullname == 'builtins.None': return NoneType() elif fullname == 'typing.Any' or fullname == 'builtins.Any': return AnyType(TypeOfAny.explicit) elif fullname in ('typing.Final', 'typing_extensions.Final'): self.fail("Final can be only used as an outermost qualifier" " in a variable annotation", t) return AnyType(TypeOfAny.from_error) elif fullname == 'typing.Tuple': # Tuple is special because it is involved in builtin import cycle # and may be not ready when used. sym = self.api.lookup_fully_qualified_or_none('builtins.tuple') if not sym or isinstance(sym.node, PlaceholderNode): if self.api.is_incomplete_namespace('builtins'): self.api.record_incomplete_ref() else: self.fail("Name 'tuple' is not defined", t) return AnyType(TypeOfAny.special_form) if len(t.args) == 0 and not t.empty_tuple_index: # Bare 'Tuple' is same as 'tuple' any_type = self.get_omitted_any(t) return self.named_type('builtins.tuple', [any_type], line=t.line, column=t.column) if len(t.args) == 2 and isinstance(t.args[1], EllipsisType): # Tuple[T, ...] (uniform, variable-length tuple) instance = self.named_type('builtins.tuple', [self.anal_type(t.args[0])]) instance.line = t.line return instance return self.tuple_type(self.anal_array(t.args)) elif fullname == 'typing.Union': items = self.anal_array(t.args) return UnionType.make_union(items) elif fullname == 'typing.Optional': if len(t.args) != 1: self.fail('Optional[...] must have exactly one type argument', t) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) return make_optional_type(item) elif fullname == 'typing.Callable': return self.analyze_callable_type(t) elif fullname == 'typing.Type': if len(t.args) == 0: any_type = self.get_omitted_any(t) return TypeType(any_type, line=t.line, column=t.column) if len(t.args) != 1: self.fail('Type[...] must have exactly one type argument', t) item = self.anal_type(t.args[0]) return TypeType.make_normalized(item, line=t.line) elif fullname == 'typing.ClassVar': if self.nesting_level > 0: self.fail('Invalid type: ClassVar nested inside other type', t) if len(t.args) == 0: return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) if len(t.args) != 1: self.fail('ClassVar[...] must have at most one type argument', t) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) elif fullname in ('mypy_extensions.NoReturn', 'typing.NoReturn'): return UninhabitedType(is_noreturn=True) elif fullname in ('typing_extensions.Literal', 'typing.Literal'): return self.analyze_literal_type(t) elif fullname == 'typing_extensions.Annotated': if len(t.args) < 2: self.fail("Annotated[...] must have exactly one type argument" " and at least one annotation", t) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) return None def get_omitted_any(self, typ: Type, fullname: Optional[str] = None) -> AnyType: disallow_any = not self.is_typeshed_stub and self.options.disallow_any_generics return get_omitted_any(disallow_any, self.fail, self.note, typ, fullname) def analyze_type_with_type_info(self, info: TypeInfo, args: List[Type], ctx: Context) -> Type: """Bind unbound type when were able to find target TypeInfo. This handles simple cases like 'int', 'modname.UserClass[str]', etc. """ if len(args) > 0 and info.fullname == 'builtins.tuple': fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line) return TupleType(self.anal_array(args), fallback, ctx.line) # Analyze arguments and (usually) construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. instance = Instance(info, self.anal_array(args), ctx.line, ctx.column) # Check type argument count. if len(instance.args) != len(info.type_vars) and not self.defining_alias: fix_instance(instance, self.fail, self.note, disallow_any=self.options.disallow_any_generics and not self.is_typeshed_stub) tup = info.tuple_type if tup is not None: # The class has a Tuple[...] base class so it will be # represented as a tuple type. if args: self.fail('Generic tuple types not supported', ctx) return AnyType(TypeOfAny.from_error) return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be # represented as a typeddict type. if args: self.fail('Generic TypedDict types not supported', ctx) return AnyType(TypeOfAny.from_error) # Create a named TypedDictType return td.copy_modified(item_types=self.anal_array(list(td.items.values())), fallback=instance) return instance def analyze_unbound_type_without_type_info(self, t: UnboundType, sym: SymbolTableNode, defining_literal: bool) -> Type: """Figure out what an unbound type that doesn't refer to a TypeInfo node means. This is something unusual. We try our best to find out what it is. """ name = sym.fullname if name is None: assert sym.node is not None name = sym.node.name # Option 1: # Something with an Any type -- make it an alias for Any in a type # context. This is slightly problematic as it allows using the type 'Any' # as a base class -- however, this will fail soon at runtime so the problem # is pretty minor. if isinstance(sym.node, Var): typ = get_proper_type(sym.node.type) if isinstance(typ, AnyType): return AnyType(TypeOfAny.from_unimported_type, missing_import_name=typ.missing_import_name) # Option 2: # Unbound type variable. Currently these may be still valid, # for example when defining a generic type alias. unbound_tvar = (isinstance(sym.node, TypeVarExpr) and self.tvar_scope.get_binding(sym) is None) if self.allow_unbound_tvars and unbound_tvar: return t # Option 3: # Enum value. Note: we only want to return a LiteralType when # we're using this enum value specifically within context of # a "Literal[...]" type. So, if `defining_literal` is not set, # we bail out early with an error. # # If, in the distant future, we decide to permit things like # `def foo(x: Color.RED) -> None: ...`, we can remove that # check entirely. if isinstance(sym.node, Var) and sym.node.info and sym.node.info.is_enum: value = sym.node.name base_enum_short_name = sym.node.info.name if not defining_literal: msg = message_registry.INVALID_TYPE_RAW_ENUM_VALUE.format( base_enum_short_name, value) self.fail(msg, t) return AnyType(TypeOfAny.from_error) return LiteralType( value=value, fallback=Instance(sym.node.info, [], line=t.line, column=t.column), line=t.line, column=t.column, ) # None of the above options worked. We parse the args (if there are any) # to make sure there are no remaining semanal-only types, then give up. t = t.copy_modified(args=self.anal_array(t.args)) # TODO: Move this message building logic to messages.py. notes = [] # type: List[str] if isinstance(sym.node, Var): # TODO: add a link to alias docs, see #3494. message = 'Variable "{}" is not valid as a type' elif isinstance(sym.node, (SYMBOL_FUNCBASE_TYPES, Decorator)): message = 'Function "{}" is not valid as a type' notes.append('Perhaps you need "Callable[...]" or a callback protocol?') elif isinstance(sym.node, MypyFile): # TODO: suggest a protocol when supported. message = 'Module "{}" is not valid as a type' elif unbound_tvar: message = 'Type variable "{}" is unbound' short = name.split('.')[-1] notes.append(('(Hint: Use "Generic[{}]" or "Protocol[{}]" base class' ' to bind "{}" inside a class)').format(short, short, short)) notes.append('(Hint: Use "{}" in function signature to bind "{}"' ' inside a function)'.format(short, short)) else: message = 'Cannot interpret reference "{}" as a type' self.fail(message.format(name), t, code=codes.VALID_TYPE) for note in notes: self.note(note, t, code=codes.VALID_TYPE) # TODO: Would it be better to always return Any instead of UnboundType # in case of an error? On one hand, UnboundType has a name so error messages # are more detailed, on the other hand, some of them may be bogus, # see https://github.com/python/mypy/issues/4987. return t def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneType) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: # This type should exist only temporarily during type inference assert False, "Internal error: Unexpected erased type" def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_type_list(self, t: TypeList) -> Type: self.fail('Bracketed expression "[...]" is not valid as a type', t) self.note('Did you mean "List[...]"?', t) return AnyType(TypeOfAny.from_error) def visit_callable_argument(self, t: CallableArgument) -> Type: self.fail('Invalid type', t) return AnyType(TypeOfAny.from_error) def visit_instance(self, t: Instance) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: # TODO: should we do something here? return t def visit_type_var(self, t: TypeVarType) -> Type: return t def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: # Every Callable can bind its own type variables, if they're not in the outer scope with self.tvar_scope_frame(): if self.defining_alias: variables = t.variables else: variables = self.bind_function_type_variables(t, t) ret = t.copy_modified(arg_types=self.anal_array(t.arg_types, nested=nested), ret_type=self.anal_type(t.ret_type, nested=nested), # If the fallback isn't filled in yet, # its type will be the falsey FakeInfo fallback=(t.fallback if t.fallback.type else self.named_type('builtins.function')), variables=self.anal_var_defs(variables)) return ret def visit_overloaded(self, t: Overloaded) -> Type: # Overloaded types are manually constructed in semanal.py by analyzing the # AST and combining together the Callable types this visitor converts. # # So if we're ever asked to reanalyze an Overloaded type, we know it's # fine to just return it as-is. return t def visit_tuple_type(self, t: TupleType) -> Type: # Types such as (t1, t2, ...) only allowed in assignment statements. They'll # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead. if t.implicit and not self.allow_tuple_literal: self.fail('Syntax error in type annotation', t, code=codes.SYNTAX) if len(t.items) == 1: self.note('Suggestion: Is there a spurious trailing comma?', t, code=codes.SYNTAX) else: self.note('Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn)', t, code=codes.SYNTAX) return AnyType(TypeOfAny.from_error) star_count = sum(1 for item in t.items if isinstance(item, StarType)) if star_count > 1: self.fail('At most one star type allowed in a tuple', t) if t.implicit: return TupleType([AnyType(TypeOfAny.from_error) for _ in t.items], self.named_type('builtins.tuple'), t.line) else: return AnyType(TypeOfAny.from_error) any_type = AnyType(TypeOfAny.special_form) # If the fallback isn't filled in yet, its type will be the falsey FakeInfo fallback = (t.partial_fallback if t.partial_fallback.type else self.named_type('builtins.tuple', [any_type])) return TupleType(self.anal_array(t.items), fallback, t.line) def visit_typeddict_type(self, t: TypedDictType) -> Type: items = OrderedDict([ (item_name, self.anal_type(item_type)) for (item_name, item_type) in t.items.items() ]) return TypedDictType(items, set(t.required_keys), t.fallback) def visit_raw_expression_type(self, t: RawExpressionType) -> Type: # We should never see a bare Literal. We synthesize these raw literals # in the earlier stages of semantic analysis, but those # "fake literals" should always be wrapped in an UnboundType # corresponding to 'Literal'. # # Note: if at some point in the distant future, we decide to # make signatures like "foo(x: 20) -> None" legal, we can change # this method so it generates and returns an actual LiteralType # instead. if self.report_invalid_types: if t.base_type_name in ('builtins.int', 'builtins.bool'): # The only time it makes sense to use an int or bool is inside of # a literal type. msg = "Invalid type: try using Literal[{}] instead?".format(repr(t.literal_value)) elif t.base_type_name in ('builtins.float', 'builtins.complex'): # We special-case warnings for floats and complex numbers. msg = "Invalid type: {} literals cannot be used as a type".format(t.simple_name()) else: # And in all other cases, we default to a generic error message. # Note: the reason why we use a generic error message for strings # but not ints or bools is because whenever we see an out-of-place # string, it's unclear if the user meant to construct a literal type # or just misspelled a regular type. So we avoid guessing. msg = 'Invalid type comment or annotation' self.fail(msg, t, code=codes.VALID_TYPE) if t.note is not None: self.note(t.note, t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error, line=t.line, column=t.column) def visit_literal_type(self, t: LiteralType) -> Type: return t def visit_star_type(self, t: StarType) -> Type: return StarType(self.anal_type(t.type), t.line) def visit_union_type(self, t: UnionType) -> Type: return UnionType(self.anal_array(t.items), t.line) def visit_partial_type(self, t: PartialType) -> Type: assert False, "Internal error: Unexpected partial type" def visit_ellipsis_type(self, t: EllipsisType) -> Type: self.fail("Unexpected '...'", t) return AnyType(TypeOfAny.from_error) def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(self.anal_type(t.item), line=t.line) def visit_placeholder_type(self, t: PlaceholderType) -> Type: n = None if t.fullname is None else self.api.lookup_fully_qualified(t.fullname) if not n or isinstance(n.node, PlaceholderNode): self.api.defer() # Still incomplete return t else: # TODO: Handle non-TypeInfo assert isinstance(n.node, TypeInfo) return self.analyze_type_with_type_info(n.node, t.args, t) def analyze_callable_type(self, t: UnboundType) -> Type: fallback = self.named_type('builtins.function') if len(t.args) == 0: # Callable (bare). Treat as Callable[..., Any]. any_type = self.get_omitted_any(t) ret = CallableType([any_type, any_type], [nodes.ARG_STAR, nodes.ARG_STAR2], [None, None], ret_type=any_type, fallback=fallback, is_ellipsis_args=True) elif len(t.args) == 2: ret_type = t.args[1] if isinstance(t.args[0], TypeList): # Callable[[ARG, ...], RET] (ordinary callable type) analyzed_args = self.analyze_callable_args(t.args[0]) if analyzed_args is None: return AnyType(TypeOfAny.from_error) args, kinds, names = analyzed_args ret = CallableType(args, kinds, names, ret_type=ret_type, fallback=fallback) elif isinstance(t.args[0], EllipsisType): # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments) ret = CallableType([AnyType(TypeOfAny.explicit), AnyType(TypeOfAny.explicit)], [nodes.ARG_STAR, nodes.ARG_STAR2], [None, None], ret_type=ret_type, fallback=fallback, is_ellipsis_args=True) else: self.fail('The first argument to Callable must be a list of types or "..."', t) return AnyType(TypeOfAny.from_error) else: self.fail('Please use "Callable[[], ]" or "Callable"', t) return AnyType(TypeOfAny.from_error) assert isinstance(ret, CallableType) return ret.accept(self) def analyze_callable_args(self, arglist: TypeList) -> Optional[Tuple[List[Type], List[int], List[Optional[str]]]]: args = [] # type: List[Type] kinds = [] # type: List[int] names = [] # type: List[Optional[str]] for arg in arglist.items: if isinstance(arg, CallableArgument): args.append(arg.typ) names.append(arg.name) if arg.constructor is None: return None found = self.lookup_qualified(arg.constructor, arg) if found is None: # Looking it up already put an error message in return None elif found.fullname not in ARG_KINDS_BY_CONSTRUCTOR: self.fail('Invalid argument constructor "{}"'.format( found.fullname), arg) return None else: assert found.fullname is not None kind = ARG_KINDS_BY_CONSTRUCTOR[found.fullname] kinds.append(kind) if arg.name is not None and kind in {ARG_STAR, ARG_STAR2}: self.fail("{} arguments should not have names".format( arg.constructor), arg) return None else: args.append(arg) kinds.append(ARG_POS) names.append(None) # Note that arglist below is only used for error context. check_arg_names(names, [arglist] * len(args), self.fail, "Callable") check_arg_kinds(kinds, [arglist] * len(args), self.fail) return args, kinds, names def analyze_literal_type(self, t: UnboundType) -> Type: if len(t.args) == 0: self.fail('Literal[...] must have at least one parameter', t) return AnyType(TypeOfAny.from_error) output = [] # type: List[Type] for i, arg in enumerate(t.args): analyzed_types = self.analyze_literal_param(i + 1, arg, t) if analyzed_types is None: return AnyType(TypeOfAny.from_error) else: output.extend(analyzed_types) return UnionType.make_union(output, line=t.line) def analyze_literal_param(self, idx: int, arg: Type, ctx: Context) -> Optional[List[Type]]: # This UnboundType was originally defined as a string. if isinstance(arg, UnboundType) and arg.original_str_expr is not None: assert arg.original_str_fallback is not None return [LiteralType( value=arg.original_str_expr, fallback=self.named_type_with_normalized_str(arg.original_str_fallback), line=arg.line, column=arg.column, )] # If arg is an UnboundType that was *not* originally defined as # a string, try expanding it in case it's a type alias or something. if isinstance(arg, UnboundType): self.nesting_level += 1 try: arg = self.visit_unbound_type(arg, defining_literal=True) finally: self.nesting_level -= 1 # Literal[...] cannot contain Any. Give up and add an error message # (if we haven't already). arg = get_proper_type(arg) if isinstance(arg, AnyType): # Note: We can encounter Literals containing 'Any' under three circumstances: # # 1. If the user attempts use an explicit Any as a parameter # 2. If the user is trying to use an enum value imported from a module with # no type hints, giving it an an implicit type of 'Any' # 3. If there's some other underlying problem with the parameter. # # We report an error in only the first two cases. In the third case, we assume # some other region of the code has already reported a more relevant error. # # TODO: Once we start adding support for enums, make sure we report a custom # error for case 2 as well. if arg.type_of_any not in (TypeOfAny.from_error, TypeOfAny.special_form): self.fail('Parameter {} of Literal[...] cannot be of type "Any"'.format(idx), ctx) return None elif isinstance(arg, RawExpressionType): # A raw literal. Convert it directly into a literal if we can. if arg.literal_value is None: name = arg.simple_name() if name in ('float', 'complex'): msg = 'Parameter {} of Literal[...] cannot be of type "{}"'.format(idx, name) else: msg = 'Invalid type: Literal[...] cannot contain arbitrary expressions' self.fail(msg, ctx) # Note: we deliberately ignore arg.note here: the extra info might normally be # helpful, but it generally won't make sense in the context of a Literal[...]. return None # Remap bytes and unicode into the appropriate type for the correct Python version fallback = self.named_type_with_normalized_str(arg.base_type_name) assert isinstance(fallback, Instance) return [LiteralType(arg.literal_value, fallback, line=arg.line, column=arg.column)] elif isinstance(arg, (NoneType, LiteralType)): # Types that we can just add directly to the literal/potential union of literals. return [arg] elif isinstance(arg, Instance) and arg.last_known_value is not None: # Types generated from declarations like "var: Final = 4". return [arg.last_known_value] elif isinstance(arg, UnionType): out = [] for union_arg in arg.items: union_result = self.analyze_literal_param(idx, union_arg, ctx) if union_result is None: return None out.extend(union_result) return out else: self.fail('Parameter {} of Literal[...] is invalid'.format(idx), ctx) return None def analyze_type(self, t: Type) -> Type: return t.accept(self) def fail(self, msg: str, ctx: Context, *, code: Optional[ErrorCode] = None) -> None: self.fail_func(msg, ctx, code=code) def note(self, msg: str, ctx: Context, *, code: Optional[ErrorCode] = None) -> None: self.note_func(msg, ctx, code=code) @contextmanager def tvar_scope_frame(self) -> Iterator[None]: old_scope = self.tvar_scope self.tvar_scope = self.tvar_scope.method_frame() yield self.tvar_scope = old_scope def infer_type_variables(self, type: CallableType) -> List[Tuple[str, TypeVarExpr]]: """Return list of unique type variables referred to in a callable.""" names = [] # type: List[str] tvars = [] # type: List[TypeVarExpr] for arg in type.arg_types: for name, tvar_expr in arg.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope)): if name not in names: names.append(name) tvars.append(tvar_expr) # When finding type variables in the return type of a function, don't # look inside Callable types. Type variables only appearing in # functions in the return type belong to those functions, not the # function we're currently analyzing. for name, tvar_expr in type.ret_type.accept( TypeVariableQuery(self.lookup_qualified, self.tvar_scope, include_callables=False)): if name not in names: names.append(name) tvars.append(tvar_expr) return list(zip(names, tvars)) def bind_function_type_variables(self, fun_type: CallableType, defn: Context) -> List[TypeVarDef]: """Find the type variables of the function type and bind them in our tvar_scope""" if fun_type.variables: for var in fun_type.variables: var_node = self.lookup_qualified(var.name, defn) assert var_node, "Binding for function type variable not found within function" var_expr = var_node.node assert isinstance(var_expr, TypeVarExpr) self.tvar_scope.bind_new(var.name, var_expr) return fun_type.variables typevars = self.infer_type_variables(fun_type) # Do not define a new type variable if already defined in scope. typevars = [(name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn)] defs = [] # type: List[TypeVarDef] for name, tvar in typevars: if not self.tvar_scope.allow_binding(tvar.fullname): self.fail("Type variable '{}' is bound by an outer class".format(name), defn) self.tvar_scope.bind_new(name, tvar) binding = self.tvar_scope.get_binding(tvar.fullname) assert binding is not None defs.append(binding) return defs def is_defined_type_var(self, tvar: str, context: Context) -> bool: tvar_node = self.lookup_qualified(tvar, context) if not tvar_node: return False return self.tvar_scope.get_binding(tvar_node) is not None def anal_array(self, a: Iterable[Type], nested: bool = True) -> List[Type]: res = [] # type: List[Type] for t in a: res.append(self.anal_type(t, nested)) return res def anal_type(self, t: Type, nested: bool = True) -> Type: if nested: self.nesting_level += 1 try: return t.accept(self) finally: if nested: self.nesting_level -= 1 def anal_var_defs(self, var_defs: List[TypeVarDef]) -> List[TypeVarDef]: a = [] # type: List[TypeVarDef] for vd in var_defs: a.append(TypeVarDef(vd.name, vd.fullname, vd.id.raw_id, self.anal_array(vd.values), vd.upper_bound.accept(self), vd.variance, vd.line)) return a def named_type_with_normalized_str(self, fully_qualified_name: str) -> Instance: """Does almost the same thing as `named_type`, except that we immediately unalias `builtins.bytes` and `builtins.unicode` to `builtins.str` as appropriate. """ python_version = self.options.python_version if python_version[0] == 2 and fully_qualified_name == 'builtins.bytes': fully_qualified_name = 'builtins.str' if python_version[0] >= 3 and fully_qualified_name == 'builtins.unicode': fully_qualified_name = 'builtins.str' return self.named_type(fully_qualified_name) def named_type(self, fully_qualified_name: str, args: Optional[List[Type]] = None, line: int = -1, column: int = -1) -> Instance: node = self.lookup_fqn_func(fully_qualified_name) assert isinstance(node.node, TypeInfo) any_type = AnyType(TypeOfAny.special_form) return Instance(node.node, args or [any_type] * len(node.node.defn.type_vars), line=line, column=column) def tuple_type(self, items: List[Type]) -> TupleType: any_type = AnyType(TypeOfAny.special_form) return TupleType(items, fallback=self.named_type('builtins.tuple', [any_type])) TypeVarList = List[Tuple[str, TypeVarExpr]] # Mypyc doesn't support callback protocols yet. MsgCallback = Callable[[str, Context, DefaultNamedArg(Optional[ErrorCode], 'code')], None] def get_omitted_any(disallow_any: bool, fail: MsgCallback, note: MsgCallback, typ: Type, fullname: Optional[str] = None, unexpanded_type: Optional[Type] = None) -> AnyType: if disallow_any: if fullname in nongen_builtins: # We use a dedicated error message for builtin generics (as the most common case). alternative = nongen_builtins[fullname] fail(message_registry.IMPLICIT_GENERIC_ANY_BUILTIN.format(alternative), typ, code=codes.TYPE_ARG) else: typ = unexpanded_type or typ type_str = typ.name if isinstance(typ, UnboundType) else format_type_bare(typ) fail( message_registry.BARE_GENERIC.format( quote_type_string(type_str)), typ, code=codes.TYPE_ARG) if fullname in GENERIC_STUB_NOT_AT_RUNTIME_TYPES: # Recommend `from __future__ import annotations` or to put type in quotes # (string literal escaping) for classes not generic at runtime note( "Subscripting classes that are not generic at runtime may require " "escaping, see https://mypy.readthedocs.io/" "en/latest/common_issues.html#not-generic-runtime", typ, code=codes.TYPE_ARG) any_type = AnyType(TypeOfAny.from_error, line=typ.line, column=typ.column) else: any_type = AnyType(TypeOfAny.from_omitted_generics, line=typ.line, column=typ.column) return any_type def fix_instance(t: Instance, fail: MsgCallback, note: MsgCallback, disallow_any: bool, use_generic_error: bool = False, unexpanded_type: Optional[Type] = None,) -> None: """Fix a malformed instance by replacing all type arguments with Any. Also emit a suitable error if this is not due to implicit Any's. """ if len(t.args) == 0: if use_generic_error: fullname = None # type: Optional[str] else: fullname = t.type.fullname any_type = get_omitted_any(disallow_any, fail, note, t, fullname, unexpanded_type) t.args = [any_type] * len(t.type.type_vars) return # Invalid number of type parameters. n = len(t.type.type_vars) s = '{} type arguments'.format(n) if n == 0: s = 'no type arguments' elif n == 1: s = '1 type argument' act = str(len(t.args)) if act == '0': act = 'none' fail('"{}" expects {}, but {} given'.format( t.type.name, s, act), t, code=codes.TYPE_ARG) # Construct the correct number of type arguments, as # otherwise the type checker may crash as it expects # things to be right. t.args = [AnyType(TypeOfAny.from_error) for _ in t.type.type_vars] t.invalid = True def expand_type_alias(node: TypeAlias, args: List[Type], fail: MsgCallback, no_args: bool, ctx: Context, *, unexpanded_type: Optional[Type] = None, disallow_any: bool = False) -> Type: """Expand a (generic) type alias target following the rules outlined in TypeAlias docstring. Here: target: original target type (contains unbound type variables) alias_tvars: type variable names args: types to be substituted in place of type variables fail: error reporter callback no_args: whether original definition used a bare generic `A = List` ctx: context where expansion happens """ exp_len = len(node.alias_tvars) act_len = len(args) if exp_len > 0 and act_len == 0: # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...] return set_any_tvars(node, ctx.line, ctx.column, disallow_any=disallow_any, fail=fail, unexpanded_type=unexpanded_type) if exp_len == 0 and act_len == 0: if no_args: assert isinstance(node.target, Instance) # type: ignore[misc] # Note: this is the only case where we use an eager expansion. See more info about # no_args aliases like L = List in the docstring for TypeAlias class. return Instance(node.target.type, [], line=ctx.line, column=ctx.column) return TypeAliasType(node, [], line=ctx.line, column=ctx.column) if (exp_len == 0 and act_len > 0 and isinstance(node.target, Instance) # type: ignore[misc] and no_args): tp = Instance(node.target.type, args) tp.line = ctx.line tp.column = ctx.column return tp if act_len != exp_len: fail('Bad number of arguments for type alias, expected: %s, given: %s' % (exp_len, act_len), ctx) return set_any_tvars(node, ctx.line, ctx.column, from_error=True) typ = TypeAliasType(node, args, ctx.line, ctx.column) assert typ.alias is not None # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here. if (isinstance(typ.alias.target, Instance) # type: ignore and typ.alias.target.type.fullname == 'mypy_extensions.FlexibleAlias'): exp = get_proper_type(typ) assert isinstance(exp, Instance) return exp.args[-1] return typ def set_any_tvars(node: TypeAlias, newline: int, newcolumn: int, *, from_error: bool = False, disallow_any: bool = False, fail: Optional[MsgCallback] = None, unexpanded_type: Optional[Type] = None) -> Type: if from_error or disallow_any: type_of_any = TypeOfAny.from_error else: type_of_any = TypeOfAny.from_omitted_generics if disallow_any: assert fail is not None otype = unexpanded_type or node.target type_str = otype.name if isinstance(otype, UnboundType) else format_type_bare(otype) fail(message_registry.BARE_GENERIC.format(quote_type_string(type_str)), Context(newline, newcolumn), code=codes.TYPE_ARG) any_type = AnyType(type_of_any, line=newline, column=newcolumn) return TypeAliasType(node, [any_type] * len(node.alias_tvars), newline, newcolumn) def remove_dups(tvars: Iterable[T]) -> List[T]: # Get unique elements in order of appearance all_tvars = set() # type: Set[T] new_tvars = [] # type: List[T] for t in tvars: if t not in all_tvars: new_tvars.append(t) all_tvars.add(t) return new_tvars def flatten_tvars(ll: Iterable[List[T]]) -> List[T]: return remove_dups(chain.from_iterable(ll)) class TypeVariableQuery(TypeQuery[TypeVarList]): def __init__(self, lookup: Callable[[str, Context], Optional[SymbolTableNode]], scope: 'TypeVarScope', *, include_callables: bool = True, include_bound_tvars: bool = False) -> None: self.include_callables = include_callables self.lookup = lookup self.scope = scope self.include_bound_tvars = include_bound_tvars super().__init__(flatten_tvars) def _seems_like_callable(self, type: UnboundType) -> bool: if not type.args: return False if isinstance(type.args[0], (EllipsisType, TypeList)): return True return False def visit_unbound_type(self, t: UnboundType) -> TypeVarList: name = t.name node = self.lookup(name, t) if node and isinstance(node.node, TypeVarExpr) and ( self.include_bound_tvars or self.scope.get_binding(node) is None): assert isinstance(node.node, TypeVarExpr) return [(name, node.node)] elif not self.include_callables and self._seems_like_callable(t): return [] elif node and node.fullname in ('typing_extensions.Literal', 'typing.Literal'): return [] else: return super().visit_unbound_type(t) def visit_callable_type(self, t: CallableType) -> TypeVarList: if self.include_callables: return super().visit_callable_type(t) else: return [] def check_for_explicit_any(typ: Optional[Type], options: Options, is_typeshed_stub: bool, msg: MessageBuilder, context: Context) -> None: if (options.disallow_any_explicit and not is_typeshed_stub and typ and has_explicit_any(typ)): msg.explicit_any(context) def has_explicit_any(t: Type) -> bool: """ Whether this type is or type it contains is an Any coming from explicit type annotation """ return t.accept(HasExplicitAny()) class HasExplicitAny(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.explicit def visit_typeddict_type(self, t: TypedDictType) -> bool: # typeddict is checked during TypedDict declaration, so don't typecheck it here. return False def has_any_from_unimported_type(t: Type) -> bool: """Return true if this type is Any because an import was not followed. If type t is such Any type or has type arguments that contain such Any type this function will return true. """ return t.accept(HasAnyFromUnimportedType()) class HasAnyFromUnimportedType(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.from_unimported_type def visit_typeddict_type(self, t: TypedDictType) -> bool: # typeddict is checked during TypedDict declaration, so don't typecheck it here return False def collect_any_types(t: Type) -> List[AnyType]: """Return all inner `AnyType`s of type t""" return t.accept(CollectAnyTypesQuery()) class CollectAnyTypesQuery(TypeQuery[List[AnyType]]): def __init__(self) -> None: super().__init__(self.combine_lists_strategy) def visit_any(self, t: AnyType) -> List[AnyType]: return [t] @classmethod def combine_lists_strategy(cls, it: Iterable[List[AnyType]]) -> List[AnyType]: result = [] # type: List[AnyType] for l in it: result.extend(l) return result def collect_all_inner_types(t: Type) -> List[Type]: """ Return all types that `t` contains """ return t.accept(CollectAllInnerTypesQuery()) class CollectAllInnerTypesQuery(TypeQuery[List[Type]]): def __init__(self) -> None: super().__init__(self.combine_lists_strategy) def query_types(self, types: Iterable[Type]) -> List[Type]: return self.strategy([t.accept(self) for t in types]) + list(types) @classmethod def combine_lists_strategy(cls, it: Iterable[List[Type]]) -> List[Type]: return list(itertools.chain.from_iterable(it)) def make_optional_type(t: Type) -> Type: """Return the type corresponding to Optional[t]. Note that we can't use normal union simplification, since this function is called during semantic analysis and simplification only works during type checking. """ t = get_proper_type(t) if isinstance(t, NoneType): return t elif isinstance(t, UnionType): items = [item for item in union_items(t) if not isinstance(item, NoneType)] return UnionType(items + [NoneType()], t.line, t.column) else: return UnionType([t, NoneType()], t.line, t.column) def fix_instance_types(t: Type, fail: MsgCallback, note: MsgCallback) -> None: """Recursively fix all instance types (type argument count) in a given type. For example 'Union[Dict, List[str, int]]' will be transformed into 'Union[Dict[Any, Any], List[Any]]' in place. """ t.accept(InstanceFixer(fail, note)) class InstanceFixer(TypeTraverserVisitor): def __init__(self, fail: MsgCallback, note: MsgCallback) -> None: self.fail = fail self.note = note def visit_instance(self, typ: Instance) -> None: super().visit_instance(typ) if len(typ.args) != len(typ.type.type_vars): fix_instance(typ, self.fail, self.note, disallow_any=False, use_generic_error=True) mypy-0.761/mypy/typeops.py0000644€tŠÔÚ€2›s®0000006414713576752246022007 0ustar jukkaDROPBOX\Domain Users00000000000000"""Miscellaneous type operations and helpers for use during type checking. NOTE: These must not be accessed from mypy.nodes or mypy.types to avoid import cycles. These must not be called from the semantic analysis main pass since these may assume that MROs are ready. """ from typing import cast, Optional, List, Sequence, Set, Iterable, TypeVar from typing_extensions import Type as TypingType import sys from mypy.types import ( TupleType, Instance, FunctionLike, Type, CallableType, TypeVarDef, Overloaded, TypeVarType, UninhabitedType, FormalArgument, UnionType, NoneType, TypedDictType, AnyType, TypeOfAny, TypeType, ProperType, LiteralType, get_proper_type, get_proper_types, copy_type, TypeAliasType, TypeQuery ) from mypy.nodes import ( FuncBase, FuncItem, OverloadedFuncDef, TypeInfo, ARG_STAR, ARG_STAR2, ARG_POS, Expression, StrExpr, Var ) from mypy.maptype import map_instance_to_supertype from mypy.expandtype import expand_type_by_instance, expand_type from mypy.sharedparse import argument_elide_name from mypy.typevars import fill_typevars from mypy import state def is_recursive_pair(s: Type, t: Type) -> bool: """Is this a pair of recursive type aliases?""" return (isinstance(s, TypeAliasType) and isinstance(t, TypeAliasType) and s.is_recursive and t.is_recursive) def tuple_fallback(typ: TupleType) -> Instance: """Return fallback type for a tuple.""" from mypy.join import join_type_list info = typ.partial_fallback.type if info.fullname != 'builtins.tuple': return typ.partial_fallback return Instance(info, [join_type_list(typ.items)]) def try_getting_instance_fallback(typ: ProperType) -> Optional[Instance]: """Returns the Instance fallback for this type if one exists. Otherwise, returns None. """ if isinstance(typ, Instance): return typ elif isinstance(typ, TupleType): return tuple_fallback(typ) elif isinstance(typ, TypedDictType): return typ.fallback elif isinstance(typ, FunctionLike): return typ.fallback elif isinstance(typ, LiteralType): return typ.fallback else: return None def type_object_type_from_function(signature: FunctionLike, info: TypeInfo, def_info: TypeInfo, fallback: Instance, is_new: bool) -> FunctionLike: # We first need to record all non-trivial (explicit) self types in __init__, # since they will not be available after we bind them. Note, we use explicit # self-types only in the defining class, similar to __new__ (but not exactly the same, # see comment in class_callable below). This is mostly useful for annotating library # classes such as subprocess.Popen. default_self = fill_typevars(info) if not is_new and not info.is_newtype: orig_self_types = [(it.arg_types[0] if it.arg_types and it.arg_types[0] != default_self and it.arg_kinds[0] == ARG_POS else None) for it in signature.items()] else: orig_self_types = [None] * len(signature.items()) # The __init__ method might come from a generic superclass 'def_info' # with type variables that do not map identically to the type variables of # the class 'info' being constructed. For example: # # class A(Generic[T]): # def __init__(self, x: T) -> None: ... # class B(A[List[T]]): # ... # # We need to map B's __init__ to the type (List[T]) -> None. signature = bind_self(signature, original_type=default_self, is_classmethod=is_new) signature = cast(FunctionLike, map_type_from_supertype(signature, info, def_info)) special_sig = None # type: Optional[str] if def_info.fullname == 'builtins.dict': # Special signature! special_sig = 'dict' if isinstance(signature, CallableType): return class_callable(signature, info, fallback, special_sig, is_new, orig_self_types[0]) else: # Overloaded __init__/__new__. assert isinstance(signature, Overloaded) items = [] # type: List[CallableType] for item, orig_self in zip(signature.items(), orig_self_types): items.append(class_callable(item, info, fallback, special_sig, is_new, orig_self)) return Overloaded(items) def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance, special_sig: Optional[str], is_new: bool, orig_self_type: Optional[Type] = None) -> CallableType: """Create a type object type based on the signature of __init__.""" variables = [] # type: List[TypeVarDef] variables.extend(info.defn.type_vars) variables.extend(init_type.variables) from mypy.subtypes import is_subtype init_ret_type = get_proper_type(init_type.ret_type) orig_self_type = get_proper_type(orig_self_type) default_ret_type = fill_typevars(info) explicit_type = init_ret_type if is_new else orig_self_type if ( isinstance(explicit_type, (Instance, TupleType)) # Only use the declared return type from __new__ or declared self in __init__ # if it is actually returning a subtype of what we would return otherwise. and is_subtype(explicit_type, default_ret_type, ignore_type_params=True) ): ret_type = explicit_type # type: Type else: ret_type = default_ret_type callable_type = init_type.copy_modified( ret_type=ret_type, fallback=type_type, name=None, variables=variables, special_sig=special_sig) c = callable_type.with_name(info.name) return c def map_type_from_supertype(typ: Type, sub_info: TypeInfo, super_info: TypeInfo) -> Type: """Map type variables in a type defined in a supertype context to be valid in the subtype context. Assume that the result is unique; if more than one type is possible, return one of the alternatives. For example, assume class D(Generic[S]): ... class C(D[E[T]], Generic[T]): ... Now S in the context of D would be mapped to E[T] in the context of C. """ # Create the type of self in subtype, of form t[a1, ...]. inst_type = fill_typevars(sub_info) if isinstance(inst_type, TupleType): inst_type = tuple_fallback(inst_type) # Map the type of self to supertype. This gets us a description of the # supertype type variables in terms of subtype variables, i.e. t[t1, ...] # so that any type variables in tN are to be interpreted in subtype # context. inst_type = map_instance_to_supertype(inst_type, super_info) # Finally expand the type variables in type with those in the previously # constructed type. Note that both type and inst_type may have type # variables, but in type they are interpreted in supertype context while # in inst_type they are interpreted in subtype context. This works even if # the names of type variables in supertype and subtype overlap. return expand_type_by_instance(typ, inst_type) def supported_self_type(typ: ProperType) -> bool: """Is this a supported kind of explicit self-types? Currently, this means a X or Type[X], where X is an instance or a type variable with an instance upper bound. """ if isinstance(typ, TypeType): return supported_self_type(typ.item) return (isinstance(typ, TypeVarType) or (isinstance(typ, Instance) and typ != fill_typevars(typ.type))) F = TypeVar('F', bound=FunctionLike) def bind_self(method: F, original_type: Optional[Type] = None, is_classmethod: bool = False) -> F: """Return a copy of `method`, with the type of its first parameter (usually self or cls) bound to original_type. If the type of `self` is a generic type (T, or Type[T] for classmethods), instantiate every occurrence of type with original_type in the rest of the signature and in the return type. original_type is the type of E in the expression E.copy(). It is None in compatibility checks. In this case we treat it as the erasure of the declared type of self. This way we can express "the type of self". For example: T = TypeVar('T', bound='A') class A: def copy(self: T) -> T: ... class B(A): pass b = B().copy() # type: B """ from mypy.infer import infer_type_arguments if isinstance(method, Overloaded): return cast(F, Overloaded([bind_self(c, original_type, is_classmethod) for c in method.items()])) assert isinstance(method, CallableType) func = method if not func.arg_types: # Invalid method, return something. return cast(F, func) if func.arg_kinds[0] == ARG_STAR: # The signature is of the form 'def foo(*args, ...)'. # In this case we shouldn't drop the first arg, # since func will be absorbed by the *args. # TODO: infer bounds on the type of *args? return cast(F, func) self_param_type = get_proper_type(func.arg_types[0]) if func.variables and supported_self_type(self_param_type): if original_type is None: # TODO: type check method override (see #7861). original_type = erase_to_bound(self_param_type) original_type = get_proper_type(original_type) all_ids = [x.id for x in func.variables] typeargs = infer_type_arguments(all_ids, self_param_type, original_type, is_supertype=True) if (is_classmethod # TODO: why do we need the extra guards here? and any(isinstance(get_proper_type(t), UninhabitedType) for t in typeargs) and isinstance(original_type, (Instance, TypeVarType, TupleType))): # In case we call a classmethod through an instance x, fallback to type(x) typeargs = infer_type_arguments(all_ids, self_param_type, TypeType(original_type), is_supertype=True) ids = [tid for tid in all_ids if any(tid == t.id for t in get_type_vars(self_param_type))] # Technically, some constrains might be unsolvable, make them . to_apply = [t if t is not None else UninhabitedType() for t in typeargs] def expand(target: Type) -> Type: return expand_type(target, {id: to_apply[all_ids.index(id)] for id in ids}) arg_types = [expand(x) for x in func.arg_types[1:]] ret_type = expand(func.ret_type) variables = [v for v in func.variables if v.id not in ids] else: arg_types = func.arg_types[1:] ret_type = func.ret_type variables = func.variables original_type = get_proper_type(original_type) if isinstance(original_type, CallableType) and original_type.is_type_obj(): original_type = TypeType.make_normalized(original_type.ret_type) res = func.copy_modified(arg_types=arg_types, arg_kinds=func.arg_kinds[1:], arg_names=func.arg_names[1:], variables=variables, ret_type=ret_type, bound_args=[original_type]) return cast(F, res) def erase_to_bound(t: Type) -> Type: # TODO: use value restrictions to produce a union? t = get_proper_type(t) if isinstance(t, TypeVarType): return t.upper_bound if isinstance(t, TypeType): if isinstance(t.item, TypeVarType): return TypeType.make_normalized(t.item.upper_bound) return t def callable_corresponding_argument(typ: CallableType, model: FormalArgument) -> Optional[FormalArgument]: """Return the argument a function that corresponds to `model`""" by_name = typ.argument_by_name(model.name) by_pos = typ.argument_by_position(model.pos) if by_name is None and by_pos is None: return None if by_name is not None and by_pos is not None: if by_name == by_pos: return by_name # If we're dealing with an optional pos-only and an optional # name-only arg, merge them. This is the case for all functions # taking both *args and **args, or a pair of functions like so: # def right(a: int = ...) -> None: ... # def left(__a: int = ..., *, a: int = ...) -> None: ... from mypy.subtypes import is_equivalent if (not (by_name.required or by_pos.required) and by_pos.name is None and by_name.pos is None and is_equivalent(by_name.typ, by_pos.typ)): return FormalArgument(by_name.name, by_pos.pos, by_name.typ, False) return by_name if by_name is not None else by_pos def make_simplified_union(items: Sequence[Type], line: int = -1, column: int = -1, *, keep_erased: bool = False) -> ProperType: """Build union type with redundant union items removed. If only a single item remains, this may return a non-union type. Examples: * [int, str] -> Union[int, str] * [int, object] -> object * [int, int] -> int * [int, Any] -> Union[int, Any] (Any types are not simplified away!) * [Any, Any] -> Any Note: This must NOT be used during semantic analysis, since TypeInfos may not be fully initialized. The keep_erased flag is used for type inference against union types containing type variables. If set to True, keep all ErasedType items. """ items = get_proper_types(items) while any(isinstance(typ, UnionType) for typ in items): all_items = [] # type: List[ProperType] for typ in items: if isinstance(typ, UnionType): all_items.extend(get_proper_types(typ.items)) else: all_items.append(typ) items = all_items from mypy.subtypes import is_proper_subtype removed = set() # type: Set[int] for i, ti in enumerate(items): if i in removed: continue # Keep track of the truishness info for deleted subtypes which can be relevant cbt = cbf = False for j, tj in enumerate(items): if i != j and is_proper_subtype(tj, ti, keep_erased_types=keep_erased): # We found a redundant item in the union. removed.add(j) cbt = cbt or tj.can_be_true cbf = cbf or tj.can_be_false # if deleted subtypes had more general truthiness, use that if not ti.can_be_true and cbt: items[i] = true_or_false(ti) elif not ti.can_be_false and cbf: items[i] = true_or_false(ti) simplified_set = [items[i] for i in range(len(items)) if i not in removed] return UnionType.make_union(simplified_set, line, column) def true_only(t: Type) -> ProperType: """ Restricted version of t with only True-ish values """ t = get_proper_type(t) if not t.can_be_true: # All values of t are False-ish, so there are no true values in it return UninhabitedType(line=t.line, column=t.column) elif not t.can_be_false: # All values of t are already True-ish, so true_only is idempotent in this case return t elif isinstance(t, UnionType): # The true version of a union type is the union of the true versions of its components new_items = [true_only(item) for item in t.items] return make_simplified_union(new_items, line=t.line, column=t.column) else: new_t = copy_type(t) new_t.can_be_false = False return new_t def false_only(t: Type) -> ProperType: """ Restricted version of t with only False-ish values """ t = get_proper_type(t) if not t.can_be_false: if state.strict_optional: # All values of t are True-ish, so there are no false values in it return UninhabitedType(line=t.line) else: # When strict optional checking is disabled, everything can be # False-ish since anything can be None return NoneType(line=t.line) elif not t.can_be_true: # All values of t are already False-ish, so false_only is idempotent in this case return t elif isinstance(t, UnionType): # The false version of a union type is the union of the false versions of its components new_items = [false_only(item) for item in t.items] return make_simplified_union(new_items, line=t.line, column=t.column) else: new_t = copy_type(t) new_t.can_be_true = False return new_t def true_or_false(t: Type) -> ProperType: """ Unrestricted version of t with both True-ish and False-ish values """ t = get_proper_type(t) if isinstance(t, UnionType): new_items = [true_or_false(item) for item in t.items] return make_simplified_union(new_items, line=t.line, column=t.column) new_t = copy_type(t) new_t.can_be_true = new_t.can_be_true_default() new_t.can_be_false = new_t.can_be_false_default() return new_t def erase_def_to_union_or_bound(tdef: TypeVarDef) -> Type: if tdef.values: return make_simplified_union(tdef.values) else: return tdef.upper_bound def erase_to_union_or_bound(typ: TypeVarType) -> ProperType: if typ.values: return make_simplified_union(typ.values) else: return get_proper_type(typ.upper_bound) def function_type(func: FuncBase, fallback: Instance) -> FunctionLike: if func.type: assert isinstance(func.type, FunctionLike) return func.type else: # Implicit type signature with dynamic types. if isinstance(func, FuncItem): return callable_type(func, fallback) else: # Broken overloads can have self.type set to None. # TODO: should we instead always set the type in semantic analyzer? assert isinstance(func, OverloadedFuncDef) any_type = AnyType(TypeOfAny.from_error) dummy = CallableType([any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None], any_type, fallback, line=func.line, is_ellipsis_args=True) # Return an Overloaded, because some callers may expect that # an OverloadedFuncDef has an Overloaded type. return Overloaded([dummy]) def callable_type(fdef: FuncItem, fallback: Instance, ret_type: Optional[Type] = None) -> CallableType: # TODO: somewhat unfortunate duplication with prepare_method_signature in semanal if fdef.info and not fdef.is_static and fdef.arg_names: self_type = fill_typevars(fdef.info) # type: Type if fdef.is_class or fdef.name == '__new__': self_type = TypeType.make_normalized(self_type) args = [self_type] + [AnyType(TypeOfAny.unannotated)] * (len(fdef.arg_names)-1) else: args = [AnyType(TypeOfAny.unannotated)] * len(fdef.arg_names) return CallableType( args, fdef.arg_kinds, [None if argument_elide_name(n) else n for n in fdef.arg_names], ret_type or AnyType(TypeOfAny.unannotated), fallback, name=fdef.name, line=fdef.line, column=fdef.column, implicit=True, ) def try_getting_str_literals(expr: Expression, typ: Type) -> Optional[List[str]]: """If the given expression or type corresponds to a string literal or a union of string literals, returns a list of the underlying strings. Otherwise, returns None. Specifically, this function is guaranteed to return a list with one or more strings if one one the following is true: 1. 'expr' is a StrExpr 2. 'typ' is a LiteralType containing a string 3. 'typ' is a UnionType containing only LiteralType of strings """ if isinstance(expr, StrExpr): return [expr.value] # TODO: See if we can eliminate this function and call the below one directly return try_getting_str_literals_from_type(typ) def try_getting_str_literals_from_type(typ: Type) -> Optional[List[str]]: """If the given expression or type corresponds to a string Literal or a union of string Literals, returns a list of the underlying strings. Otherwise, returns None. For example, if we had the type 'Literal["foo", "bar"]' as input, this function would return a list of strings ["foo", "bar"]. """ return try_getting_literals_from_type(typ, str, "builtins.str") def try_getting_int_literals_from_type(typ: Type) -> Optional[List[int]]: """If the given expression or type corresponds to an int Literal or a union of int Literals, returns a list of the underlying ints. Otherwise, returns None. For example, if we had the type 'Literal[1, 2, 3]' as input, this function would return a list of ints [1, 2, 3]. """ return try_getting_literals_from_type(typ, int, "builtins.int") T = TypeVar('T') def try_getting_literals_from_type(typ: Type, target_literal_type: TypingType[T], target_fullname: str) -> Optional[List[T]]: """If the given expression or type corresponds to a Literal or union of Literals where the underlying values corresponds to the given target type, returns a list of those underlying values. Otherwise, returns None. """ typ = get_proper_type(typ) if isinstance(typ, Instance) and typ.last_known_value is not None: possible_literals = [typ.last_known_value] # type: List[Type] elif isinstance(typ, UnionType): possible_literals = list(typ.items) else: possible_literals = [typ] literals = [] # type: List[T] for lit in get_proper_types(possible_literals): if isinstance(lit, LiteralType) and lit.fallback.type.fullname == target_fullname: val = lit.value if isinstance(val, target_literal_type): literals.append(val) else: return None else: return None return literals def get_enum_values(typ: Instance) -> List[str]: """Return the list of values for an Enum.""" return [name for name, sym in typ.type.names.items() if isinstance(sym.node, Var)] def is_singleton_type(typ: Type) -> bool: """Returns 'true' if this type is a "singleton type" -- if there exists exactly only one runtime value associated with this type. That is, given two values 'a' and 'b' that have the same type 't', 'is_singleton_type(t)' returns True if and only if the expression 'a is b' is always true. Currently, this returns True when given NoneTypes, enum LiteralTypes and enum types with a single value. Note that other kinds of LiteralTypes cannot count as singleton types. For example, suppose we do 'a = 100000 + 1' and 'b = 100001'. It is not guaranteed that 'a is b' will always be true -- some implementations of Python will end up constructing two distinct instances of 100001. """ typ = get_proper_type(typ) # TODO: Also make this return True if the type is a bool LiteralType. # Also make this return True if the type corresponds to ... (ellipsis) or NotImplemented? return ( isinstance(typ, NoneType) or (isinstance(typ, LiteralType) and typ.is_enum_literal()) or (isinstance(typ, Instance) and typ.type.is_enum and len(get_enum_values(typ)) == 1) ) def try_expanding_enum_to_union(typ: Type, target_fullname: str) -> ProperType: """Attempts to recursively expand any enum Instances with the given target_fullname into a Union of all of its component LiteralTypes. For example, if we have: class Color(Enum): RED = 1 BLUE = 2 YELLOW = 3 class Status(Enum): SUCCESS = 1 FAILURE = 2 UNKNOWN = 3 ...and if we call `try_expanding_enum_to_union(Union[Color, Status], 'module.Color')`, this function will return Literal[Color.RED, Color.BLUE, Color.YELLOW, Status]. """ typ = get_proper_type(typ) if isinstance(typ, UnionType): items = [try_expanding_enum_to_union(item, target_fullname) for item in typ.items] return make_simplified_union(items) elif isinstance(typ, Instance) and typ.type.is_enum and typ.type.fullname == target_fullname: new_items = [] for name, symbol in typ.type.names.items(): if not isinstance(symbol.node, Var): continue new_items.append(LiteralType(name, typ)) # SymbolTables are really just dicts, and dicts are guaranteed to preserve # insertion order only starting with Python 3.7. So, we sort these for older # versions of Python to help make tests deterministic. # # We could probably skip the sort for Python 3.6 since people probably run mypy # only using CPython, but we might as well for the sake of full correctness. if sys.version_info < (3, 7): new_items.sort(key=lambda lit: lit.value) return make_simplified_union(new_items) else: return typ def coerce_to_literal(typ: Type) -> ProperType: """Recursively converts any Instances that have a last_known_value or are instances of enum types with a single value into the corresponding LiteralType. """ typ = get_proper_type(typ) if isinstance(typ, UnionType): new_items = [coerce_to_literal(item) for item in typ.items] return make_simplified_union(new_items) elif isinstance(typ, Instance): if typ.last_known_value: return typ.last_known_value elif typ.type.is_enum: enum_values = get_enum_values(typ) if len(enum_values) == 1: return LiteralType(value=enum_values[0], fallback=typ) return typ def get_type_vars(tp: Type) -> List[TypeVarType]: return tp.accept(TypeVarExtractor()) class TypeVarExtractor(TypeQuery[List[TypeVarType]]): def __init__(self) -> None: super().__init__(self._merge) def _merge(self, iter: Iterable[List[TypeVarType]]) -> List[TypeVarType]: out = [] for item in iter: out.extend(item) return out def visit_type_var(self, t: TypeVarType) -> List[TypeVarType]: return [t] mypy-0.761/mypy/types.py0000644€tŠÔÚ€2›s®0000025530613576752246021447 0ustar jukkaDROPBOX\Domain Users00000000000000"""Classes for representing mypy types.""" import copy import sys from abc import abstractmethod from collections import OrderedDict from typing import ( Any, TypeVar, Dict, List, Tuple, cast, Set, Optional, Union, Iterable, NamedTuple, Sequence, Iterator, overload ) from typing_extensions import ClassVar, Final, TYPE_CHECKING, overload import mypy.nodes from mypy import state from mypy.nodes import ( INVARIANT, SymbolNode, ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT, FuncDef, ) from mypy.util import IdMapper from mypy.bogus_type import Bogus T = TypeVar('T') JsonDict = Dict[str, Any] # The set of all valid expressions that can currently be contained # inside of a Literal[...]. # # Literals can contain bytes and enum-values: we special-case both of these # and store the value as a string. We rely on the fallback type that's also # stored with the Literal to determine how a string is being used. # # TODO: confirm that we're happy with representing enums (and the # other types) in the manner described above. # # Note: if we change the set of types included below, we must also # make sure to audit the following methods: # # 1. types.LiteralType's serialize and deserialize methods: this method # needs to make sure it can convert the below types into JSON and back. # # 2. types.LiteralType's 'alue_repr` method: this method is ultimately used # by TypeStrVisitor's visit_literal_type to generate a reasonable # repr-able output. # # 3. server.astdiff.SnapshotTypeVisitor's visit_literal_type_method: this # method assumes that the following types supports equality checks and # hashability. # # Note: Although "Literal[None]" is a valid type, we internally always convert # such a type directly into "None". So, "None" is not a valid parameter of # LiteralType and is omitted from this list. LiteralValue = Union[int, str, bool] # If we only import type_visitor in the middle of the file, mypy # breaks, and if we do it at the top, it breaks at runtime because of # import cycle issues, so we do it at the top while typechecking and # then again in the middle at runtime. # We should be able to remove this once we are switched to the new # semantic analyzer! if TYPE_CHECKING: from mypy.type_visitor import ( TypeVisitor as TypeVisitor, SyntheticTypeVisitor as SyntheticTypeVisitor, ) # Supported names of TypedDict type constructors. TPDICT_NAMES = ('typing.TypedDict', 'typing_extensions.TypedDict', 'mypy_extensions.TypedDict') # type: Final # Supported fallback instance type names for TypedDict types. TPDICT_FB_NAMES = ('typing._TypedDict', 'typing_extensions._TypedDict', 'mypy_extensions._TypedDict') # type: Final # A placeholder used for Bogus[...] parameters _dummy = object() # type: Final[Any] class TypeOfAny: """ This class describes different types of Any. Each 'Any' can be of only one type at a time. """ # Was this Any type inferred without a type annotation? unannotated = 1 # type: Final # Does this Any come from an explicit type annotation? explicit = 2 # type: Final # Does this come from an unfollowed import? See --disallow-any-unimported option from_unimported_type = 3 # type: Final # Does this Any type come from omitted generics? from_omitted_generics = 4 # type: Final # Does this Any come from an error? from_error = 5 # type: Final # Is this a type that can't be represented in mypy's type system? For instance, type of # call to NewType...). Even though these types aren't real Anys, we treat them as such. # Also used for variables named '_'. special_form = 6 # type: Final # Does this Any come from interaction with another Any? from_another_any = 7 # type: Final # Does this Any come from an implementation limitation/bug? implementation_artifact = 8 # type: Final # Does this Any come from use in the suggestion engine? This is # used to ignore Anys inserted by the suggestion engine when # generating constraints. suggestion_engine = 9 # type: Final def deserialize_type(data: Union[JsonDict, str]) -> 'Type': if isinstance(data, str): return Instance.deserialize(data) classname = data['.class'] method = deserialize_map.get(classname) if method is not None: return method(data) raise NotImplementedError('unexpected .class {}'.format(classname)) class Type(mypy.nodes.Context): """Abstract base class for all types.""" __slots__ = ('can_be_true', 'can_be_false') def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.can_be_true = self.can_be_true_default() self.can_be_false = self.can_be_false_default() def can_be_true_default(self) -> bool: return True def can_be_false_default(self) -> bool: return True def accept(self, visitor: 'TypeVisitor[T]') -> T: raise RuntimeError('Not implemented') def __repr__(self) -> str: return self.accept(TypeStrVisitor()) def serialize(self) -> Union[JsonDict, str]: raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__)) @classmethod def deserialize(cls, data: JsonDict) -> 'Type': raise NotImplementedError('Cannot deserialize {} instance'.format(cls.__name__)) class TypeAliasType(Type): """A type alias to another type. NOTE: this is not being used yet, and the implementation is still incomplete. To support recursive type aliases we don't immediately expand a type alias during semantic analysis, but create an instance of this type that records the target alias definition node (mypy.nodes.TypeAlias) and type arguments (for generic aliases). This is very similar to how TypeInfo vs Instance interact, where a recursive class-based structure like class Node: value: int children: List[Node] can be represented in a tree-like manner. """ __slots__ = ('alias', 'args', 'line', 'column', 'type_ref') def __init__(self, alias: Optional[mypy.nodes.TypeAlias], args: List[Type], line: int = -1, column: int = -1) -> None: self.alias = alias self.args = args self.type_ref = None # type: Optional[str] super().__init__(line, column) def _expand_once(self) -> Type: """Expand to the target type exactly once. This doesn't do full expansion, i.e. the result can contain another (or even this same) type alias. Use this internal helper only when really needed, its public wrapper mypy.types.get_proper_type() is preferred. """ assert self.alias is not None if self.alias.no_args: # We know that no_args=True aliases like L = List must have an instance # as their target. assert isinstance(self.alias.target, Instance) # type: ignore[misc] return self.alias.target.copy_modified(args=self.args) return replace_alias_tvars(self.alias.target, self.alias.alias_tvars, self.args, self.line, self.column) def _partial_expansion(self) -> Tuple['ProperType', bool]: # Private method mostly for debugging and testing. unroller = UnrollAliasVisitor(set()) unrolled = self.accept(unroller) assert isinstance(unrolled, ProperType) return unrolled, unroller.recursed def expand_all_if_possible(self) -> Optional['ProperType']: """Attempt a full expansion of the type alias (including nested aliases). If the expansion is not possible, i.e. the alias is (mutually-)recursive, return None. """ unrolled, recursed = self._partial_expansion() if recursed: return None return unrolled @property def is_recursive(self) -> bool: assert self.alias is not None, 'Unfixed type alias' is_recursive = self.alias._is_recursive if is_recursive is None: is_recursive = self.expand_all_if_possible() is None # We cache the value on the underlying TypeAlias node as an optimization, # since the value is the same for all instances of the same alias. self.alias._is_recursive = is_recursive return is_recursive def can_be_true_default(self) -> bool: if self.alias is not None: return self.alias.target.can_be_true return super().can_be_true_default() def can_be_false_default(self) -> bool: if self.alias is not None: return self.alias.target.can_be_false return super().can_be_false_default() def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_type_alias_type(self) def __hash__(self) -> int: return hash((self.alias, tuple(self.args))) def __eq__(self, other: object) -> bool: # Note: never use this to determine subtype relationships, use is_subtype(). if not isinstance(other, TypeAliasType): return NotImplemented return (self.alias == other.alias and self.args == other.args) def serialize(self) -> JsonDict: assert self.alias is not None data = {'.class': 'TypeAliasType', 'type_ref': self.alias.fullname, 'args': [arg.serialize() for arg in self.args]} # type: JsonDict return data @classmethod def deserialize(cls, data: JsonDict) -> 'TypeAliasType': assert data['.class'] == 'TypeAliasType' args = [] # type: List[Type] if 'args' in data: args_list = data['args'] assert isinstance(args_list, list) args = [deserialize_type(arg) for arg in args_list] alias = TypeAliasType(None, args) alias.type_ref = data['type_ref'] return alias def copy_modified(self, *, args: Optional[List[Type]] = None) -> 'TypeAliasType': return TypeAliasType( self.alias, args if args is not None else self.args.copy(), self.line, self.column) class ProperType(Type): """Not a type alias. Every type except TypeAliasType must inherit from this type. """ class TypeVarId: # A type variable is uniquely identified by its raw id and meta level. # For plain variables (type parameters of generic classes and # functions) raw ids are allocated by semantic analysis, using # positive ids 1, 2, ... for generic class parameters and negative # ids -1, ... for generic function type arguments. This convention # is only used to keep type variable ids distinct when allocating # them; the type checker makes no distinction between class and # function type variables. # Metavariables are allocated unique ids starting from 1. raw_id = 0 # type: int # Level of the variable in type inference. Currently either 0 for # declared types, or 1 for type inference metavariables. meta_level = 0 # type: int # Class variable used for allocating fresh ids for metavariables. next_raw_id = 1 # type: ClassVar[int] def __init__(self, raw_id: int, meta_level: int = 0) -> None: self.raw_id = raw_id self.meta_level = meta_level @staticmethod def new(meta_level: int) -> 'TypeVarId': raw_id = TypeVarId.next_raw_id TypeVarId.next_raw_id += 1 return TypeVarId(raw_id, meta_level) def __repr__(self) -> str: return self.raw_id.__repr__() def __eq__(self, other: object) -> bool: if isinstance(other, TypeVarId): return (self.raw_id == other.raw_id and self.meta_level == other.meta_level) else: return False def __ne__(self, other: object) -> bool: return not (self == other) def __hash__(self) -> int: return hash((self.raw_id, self.meta_level)) def is_meta_var(self) -> bool: return self.meta_level > 0 class TypeVarDef(mypy.nodes.Context): """Definition of a single type variable.""" name = '' # Name (may be qualified) fullname = '' # Fully qualified name id = None # type: TypeVarId values = None # type: List[Type] # Value restriction, empty list if no restriction upper_bound = None # type: Type variance = INVARIANT # type: int def __init__(self, name: str, fullname: str, id: Union[TypeVarId, int], values: List[Type], upper_bound: Type, variance: int = INVARIANT, line: int = -1, column: int = -1) -> None: super().__init__(line, column) assert values is not None, "No restrictions must be represented by empty list" self.name = name self.fullname = fullname if isinstance(id, int): id = TypeVarId(id) self.id = id self.values = values self.upper_bound = upper_bound self.variance = variance @staticmethod def new_unification_variable(old: 'TypeVarDef') -> 'TypeVarDef': new_id = TypeVarId.new(meta_level=1) return TypeVarDef(old.name, old.fullname, new_id, old.values, old.upper_bound, old.variance, old.line, old.column) def __repr__(self) -> str: if self.values: return '{} in {}'.format(self.name, tuple(self.values)) elif not is_named_instance(self.upper_bound, 'builtins.object'): return '{} <: {}'.format(self.name, self.upper_bound) else: return self.name def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return {'.class': 'TypeVarDef', 'name': self.name, 'fullname': self.fullname, 'id': self.id.raw_id, 'values': [v.serialize() for v in self.values], 'upper_bound': self.upper_bound.serialize(), 'variance': self.variance, } @classmethod def deserialize(cls, data: JsonDict) -> 'TypeVarDef': assert data['.class'] == 'TypeVarDef' return TypeVarDef(data['name'], data['fullname'], data['id'], [deserialize_type(v) for v in data['values']], deserialize_type(data['upper_bound']), data['variance'], ) class UnboundType(ProperType): """Instance type that has not been bound during semantic analysis.""" __slots__ = ('name', 'args', 'optional', 'empty_tuple_index', 'original_str_expr', 'original_str_fallback') def __init__(self, name: Optional[str], args: Optional[List[Type]] = None, line: int = -1, column: int = -1, optional: bool = False, empty_tuple_index: bool = False, original_str_expr: Optional[str] = None, original_str_fallback: Optional[str] = None, ) -> None: super().__init__(line, column) if not args: args = [] assert name is not None self.name = name self.args = args # Should this type be wrapped in an Optional? self.optional = optional # Special case for X[()] self.empty_tuple_index = empty_tuple_index # If this UnboundType was originally defined as a str or bytes, keep track of # the original contents of that string-like thing. This way, if this UnboundExpr # ever shows up inside of a LiteralType, we can determine whether that # Literal[...] is valid or not. E.g. Literal[foo] is most likely invalid # (unless 'foo' is an alias for another literal or something) and # Literal["foo"] most likely is. # # We keep track of the entire string instead of just using a boolean flag # so we can distinguish between things like Literal["foo"] vs # Literal[" foo "]. # # We also keep track of what the original base fallback type was supposed to be # so we don't have to try and recompute it later self.original_str_expr = original_str_expr self.original_str_fallback = original_str_fallback def copy_modified(self, args: Bogus[Optional[List[Type]]] = _dummy, ) -> 'UnboundType': if args is _dummy: args = self.args return UnboundType( name=self.name, args=args, line=self.line, column=self.column, optional=self.optional, empty_tuple_index=self.empty_tuple_index, original_str_expr=self.original_str_expr, original_str_fallback=self.original_str_fallback, ) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_unbound_type(self) def __hash__(self) -> int: return hash((self.name, self.optional, tuple(self.args), self.original_str_expr)) def __eq__(self, other: object) -> bool: if not isinstance(other, UnboundType): return NotImplemented return (self.name == other.name and self.optional == other.optional and self.args == other.args and self.original_str_expr == other.original_str_expr and self.original_str_fallback == other.original_str_fallback) def serialize(self) -> JsonDict: return {'.class': 'UnboundType', 'name': self.name, 'args': [a.serialize() for a in self.args], 'expr': self.original_str_expr, 'expr_fallback': self.original_str_fallback, } @classmethod def deserialize(cls, data: JsonDict) -> 'UnboundType': assert data['.class'] == 'UnboundType' return UnboundType(data['name'], [deserialize_type(a) for a in data['args']], original_str_expr=data['expr'], original_str_fallback=data['expr_fallback'], ) class CallableArgument(ProperType): """Represents a Arg(type, 'name') inside a Callable's type list. Note that this is a synthetic type for helping parse ASTs, not a real type. """ typ = None # type: Type name = None # type: Optional[str] constructor = None # type: Optional[str] def __init__(self, typ: Type, name: Optional[str], constructor: Optional[str], line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.typ = typ self.name = name self.constructor = constructor def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_callable_argument(self) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" class TypeList(ProperType): """Information about argument types and names [...]. This is used for the arguments of a Callable type, i.e. for [arg, ...] in Callable[[arg, ...], ret]. This is not a real type but a syntactic AST construct. UnboundTypes can also have TypeList types before they are processed into Callable types. """ items = None # type: List[Type] def __init__(self, items: List[Type], line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.items = items def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_type_list(self) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" class AnyType(ProperType): """The type 'Any'.""" __slots__ = ('type_of_any', 'source_any', 'missing_import_name') def __init__(self, type_of_any: int, source_any: Optional['AnyType'] = None, missing_import_name: Optional[str] = None, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.type_of_any = type_of_any # If this Any was created as a result of interacting with another 'Any', record the source # and use it in reports. self.source_any = source_any if source_any and source_any.source_any: self.source_any = source_any.source_any if source_any is None: self.missing_import_name = missing_import_name else: self.missing_import_name = source_any.missing_import_name # Only unimported type anys and anys from other anys should have an import name assert (missing_import_name is None or type_of_any in (TypeOfAny.from_unimported_type, TypeOfAny.from_another_any)) # Only Anys that come from another Any can have source_any. assert type_of_any != TypeOfAny.from_another_any or source_any is not None # We should not have chains of Anys. assert not self.source_any or self.source_any.type_of_any != TypeOfAny.from_another_any @property def is_from_error(self) -> bool: return self.type_of_any == TypeOfAny.from_error def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_any(self) def copy_modified(self, # Mark with Bogus because _dummy is just an object (with type Any) type_of_any: Bogus[int] = _dummy, original_any: Bogus[Optional['AnyType']] = _dummy, ) -> 'AnyType': if type_of_any is _dummy: type_of_any = self.type_of_any if original_any is _dummy: original_any = self.source_any return AnyType(type_of_any=type_of_any, source_any=original_any, missing_import_name=self.missing_import_name, line=self.line, column=self.column) def __hash__(self) -> int: return hash(AnyType) def __eq__(self, other: object) -> bool: return isinstance(other, AnyType) def serialize(self) -> JsonDict: return {'.class': 'AnyType', 'type_of_any': self.type_of_any, 'source_any': self.source_any.serialize() if self.source_any is not None else None, 'missing_import_name': self.missing_import_name} @classmethod def deserialize(cls, data: JsonDict) -> 'AnyType': assert data['.class'] == 'AnyType' source = data['source_any'] return AnyType(data['type_of_any'], AnyType.deserialize(source) if source is not None else None, data['missing_import_name']) class UninhabitedType(ProperType): """This type has no members. This type is the bottom type. With strict Optional checking, it is the only common subtype between all other types, which allows `meet` to be well defined. Without strict Optional checking, NoneType fills this role. In general, for any type T: join(UninhabitedType, T) = T meet(UninhabitedType, T) = UninhabitedType is_subtype(UninhabitedType, T) = True """ is_noreturn = False # Does this come from a NoReturn? Purely for error messages. # It is important to track whether this is an actual NoReturn type, or just a result # of ambiguous type inference, in the latter case we don't want to mark a branch as # unreachable in binder. ambiguous = False # Is this a result of inference for a variable without constraints? def __init__(self, is_noreturn: bool = False, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.is_noreturn = is_noreturn def can_be_true_default(self) -> bool: return False def can_be_false_default(self) -> bool: return False def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_uninhabited_type(self) def __hash__(self) -> int: return hash(UninhabitedType) def __eq__(self, other: object) -> bool: return isinstance(other, UninhabitedType) def serialize(self) -> JsonDict: return {'.class': 'UninhabitedType', 'is_noreturn': self.is_noreturn} @classmethod def deserialize(cls, data: JsonDict) -> 'UninhabitedType': assert data['.class'] == 'UninhabitedType' return UninhabitedType(is_noreturn=data['is_noreturn']) class NoneType(ProperType): """The type of 'None'. This type can be written by users as 'None'. """ __slots__ = () def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) def can_be_true_default(self) -> bool: return False def __hash__(self) -> int: return hash(NoneType) def __eq__(self, other: object) -> bool: return isinstance(other, NoneType) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_none_type(self) def serialize(self) -> JsonDict: return {'.class': 'NoneType'} @classmethod def deserialize(cls, data: JsonDict) -> 'NoneType': assert data['.class'] == 'NoneType' return NoneType() # NoneType used to be called NoneTyp so to avoid needlessly breaking # external plugins we keep that alias here. NoneTyp = NoneType class ErasedType(ProperType): """Placeholder for an erased type. This is used during type inference. This has the special property that it is ignored during type inference. """ def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_erased_type(self) class DeletedType(ProperType): """Type of deleted variables. These can be used as lvalues but not rvalues. """ source = '' # type: Optional[str] # May be None; name that generated this value def __init__(self, source: Optional[str] = None, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.source = source def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_deleted_type(self) def serialize(self) -> JsonDict: return {'.class': 'DeletedType', 'source': self.source} @classmethod def deserialize(cls, data: JsonDict) -> 'DeletedType': assert data['.class'] == 'DeletedType' return DeletedType(data['source']) # Fake TypeInfo to be used as a placeholder during Instance de-serialization. NOT_READY = mypy.nodes.FakeInfo('De-serialization failure: TypeInfo not fixed') # type: Final class Instance(ProperType): """An instance type of form C[T1, ..., Tn]. The list of type variables may be empty. """ __slots__ = ('type', 'args', 'erased', 'invalid', 'type_ref', 'last_known_value') def __init__(self, typ: mypy.nodes.TypeInfo, args: List[Type], line: int = -1, column: int = -1, erased: bool = False, last_known_value: Optional['LiteralType'] = None) -> None: super().__init__(line, column) self.type = typ self.args = args self.type_ref = None # type: Optional[str] # True if result of type variable substitution self.erased = erased # True if recovered after incorrect number of type arguments error self.invalid = False # This field keeps track of the underlying Literal[...] value associated with # this instance, if one is known. # # This field is set whenever possible within expressions, but is erased upon # variable assignment (see erasetype.remove_instance_last_known_values) unless # the variable is declared to be final. # # For example, consider the following program: # # a = 1 # b: Final[int] = 2 # c: Final = 3 # print(a + b + c + 4) # # The 'Instance' objects associated with the expressions '1', '2', '3', and '4' will # have last_known_values of type Literal[1], Literal[2], Literal[3], and Literal[4] # respectively. However, the Instance object assigned to 'a' and 'b' will have their # last_known_value erased: variable 'a' is mutable; variable 'b' was declared to be # specifically an int. # # Or more broadly, this field lets this Instance "remember" its original declaration # when applicable. We want this behavior because we want implicit Final declarations # to act pretty much identically with constants: we should be able to replace any # places where we use some Final variable with the original value and get the same # type-checking behavior. For example, we want this program: # # def expects_literal(x: Literal[3]) -> None: pass # var: Final = 3 # expects_literal(var) # # ...to type-check in the exact same way as if we had written the program like this: # # def expects_literal(x: Literal[3]) -> None: pass # expects_literal(3) # # In order to make this work (especially with literal types), we need var's type # (an Instance) to remember the "original" value. # # Preserving this value within expressions is useful for similar reasons. # # Currently most of mypy will ignore this field and will continue to treat this type like # a regular Instance. We end up using this field only when we are explicitly within a # Literal context. self.last_known_value = last_known_value def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_instance(self) def __hash__(self) -> int: return hash((self.type, tuple(self.args), self.last_known_value)) def __eq__(self, other: object) -> bool: if not isinstance(other, Instance): return NotImplemented return (self.type == other.type and self.args == other.args and self.last_known_value == other.last_known_value) def serialize(self) -> Union[JsonDict, str]: assert self.type is not None type_ref = self.type.fullname if not self.args and not self.last_known_value: return type_ref data = {'.class': 'Instance', } # type: JsonDict data['type_ref'] = type_ref data['args'] = [arg.serialize() for arg in self.args] if self.last_known_value is not None: data['last_known_value'] = self.last_known_value.serialize() return data @classmethod def deserialize(cls, data: Union[JsonDict, str]) -> 'Instance': if isinstance(data, str): inst = Instance(NOT_READY, []) inst.type_ref = data return inst assert data['.class'] == 'Instance' args = [] # type: List[Type] if 'args' in data: args_list = data['args'] assert isinstance(args_list, list) args = [deserialize_type(arg) for arg in args_list] inst = Instance(NOT_READY, args) inst.type_ref = data['type_ref'] # Will be fixed up by fixup.py later. if 'last_known_value' in data: inst.last_known_value = LiteralType.deserialize(data['last_known_value']) return inst def copy_modified(self, *, args: Bogus[List[Type]] = _dummy, erased: Bogus[bool] = _dummy, last_known_value: Bogus[Optional['LiteralType']] = _dummy) -> 'Instance': return Instance( self.type, args if args is not _dummy else self.args, self.line, self.column, erased if erased is not _dummy else self.erased, last_known_value if last_known_value is not _dummy else self.last_known_value, ) def has_readable_member(self, name: str) -> bool: return self.type.has_readable_member(name) class TypeVarType(ProperType): """A type variable type. This refers to either a class type variable (id > 0) or a function type variable (id < 0). """ __slots__ = ('name', 'fullname', 'id', 'values', 'upper_bound', 'variance') def __init__(self, binder: TypeVarDef, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.name = binder.name # Name of the type variable (for messages and debugging) self.fullname = binder.fullname # type: str self.id = binder.id # type: TypeVarId # Value restriction, empty list if no restriction self.values = binder.values # type: List[Type] # Upper bound for values self.upper_bound = binder.upper_bound # type: Type # See comments in TypeVarDef for more about variance. self.variance = binder.variance # type: int def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_type_var(self) def __hash__(self) -> int: return hash(self.id) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeVarType): return NotImplemented return self.id == other.id def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return {'.class': 'TypeVarType', 'name': self.name, 'fullname': self.fullname, 'id': self.id.raw_id, 'values': [v.serialize() for v in self.values], 'upper_bound': self.upper_bound.serialize(), 'variance': self.variance, } @classmethod def deserialize(cls, data: JsonDict) -> 'TypeVarType': assert data['.class'] == 'TypeVarType' tvdef = TypeVarDef(data['name'], data['fullname'], data['id'], [deserialize_type(v) for v in data['values']], deserialize_type(data['upper_bound']), data['variance']) return TypeVarType(tvdef) class FunctionLike(ProperType): """Abstract base class for function types.""" __slots__ = ('fallback',) def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.can_be_false = False if TYPE_CHECKING: # we don't want a runtime None value # Corresponding instance type (e.g. builtins.type) self.fallback = cast(Instance, None) @abstractmethod def is_type_obj(self) -> bool: pass @abstractmethod def type_object(self) -> mypy.nodes.TypeInfo: pass @abstractmethod def items(self) -> List['CallableType']: pass @abstractmethod def with_name(self, name: str) -> 'FunctionLike': pass @abstractmethod def get_name(self) -> Optional[str]: pass FormalArgument = NamedTuple('FormalArgument', [ ('name', Optional[str]), ('pos', Optional[int]), ('typ', Type), ('required', bool)]) class CallableType(FunctionLike): """Type of a non-overloaded callable object (such as function).""" __slots__ = ('arg_types', # Types of function arguments 'arg_kinds', # ARG_ constants 'arg_names', # Argument names; None if not a keyword argument 'min_args', # Minimum number of arguments; derived from arg_kinds 'ret_type', # Return value type 'name', # Name (may be None; for error messages and plugins) 'definition', # For error messages. May be None. 'variables', # Type variables for a generic function 'is_ellipsis_args', # Is this Callable[..., t] (with literal '...')? 'is_classmethod_class', # Is this callable constructed for the benefit # of a classmethod's 'cls' argument? 'implicit', # Was this type implicitly generated instead of explicitly # specified by the user? 'special_sig', # Non-None for signatures that require special handling # (currently only value is 'dict' for a signature similar to # 'dict') 'from_type_type', # Was this callable generated by analyzing Type[...] # instantiation? 'bound_args', # Bound type args, mostly unused but may be useful for # tools that consume mypy ASTs 'def_extras', # Information about original definition we want to serialize. # This is used for more detailed error messages. ) def __init__(self, arg_types: Sequence[Type], arg_kinds: List[int], arg_names: Sequence[Optional[str]], ret_type: Type, fallback: Instance, name: Optional[str] = None, definition: Optional[SymbolNode] = None, variables: Optional[List[TypeVarDef]] = None, line: int = -1, column: int = -1, is_ellipsis_args: bool = False, implicit: bool = False, special_sig: Optional[str] = None, from_type_type: bool = False, bound_args: Sequence[Optional[Type]] = (), def_extras: Optional[Dict[str, Any]] = None, ) -> None: super().__init__(line, column) assert len(arg_types) == len(arg_kinds) == len(arg_names) if variables is None: variables = [] self.arg_types = list(arg_types) self.arg_kinds = arg_kinds self.arg_names = list(arg_names) self.min_args = arg_kinds.count(ARG_POS) self.ret_type = ret_type self.fallback = fallback assert not name or ' 'CallableType': return CallableType( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, arg_names=arg_names if arg_names is not _dummy else self.arg_names, ret_type=ret_type if ret_type is not _dummy else self.ret_type, fallback=fallback if fallback is not _dummy else self.fallback, name=name if name is not _dummy else self.name, definition=definition if definition is not _dummy else self.definition, variables=variables if variables is not _dummy else self.variables, line=line if line is not _dummy else self.line, column=column if column is not _dummy else self.column, is_ellipsis_args=( is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args), implicit=implicit if implicit is not _dummy else self.implicit, special_sig=special_sig if special_sig is not _dummy else self.special_sig, from_type_type=from_type_type if from_type_type is not _dummy else self.from_type_type, bound_args=bound_args if bound_args is not _dummy else self.bound_args, def_extras=def_extras if def_extras is not _dummy else dict(self.def_extras), ) def var_arg(self) -> Optional[FormalArgument]: """The formal argument for *args.""" for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): if kind == ARG_STAR: return FormalArgument(None, position, type, False) return None def kw_arg(self) -> Optional[FormalArgument]: """The formal argument for **kwargs.""" for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): if kind == ARG_STAR2: return FormalArgument(None, position, type, False) return None @property def is_var_arg(self) -> bool: """Does this callable have a *args argument?""" return ARG_STAR in self.arg_kinds @property def is_kw_arg(self) -> bool: """Does this callable have a **kwargs argument?""" return ARG_STAR2 in self.arg_kinds def is_type_obj(self) -> bool: return self.fallback.type.is_metaclass() def type_object(self) -> mypy.nodes.TypeInfo: assert self.is_type_obj() ret = get_proper_type(self.ret_type) if isinstance(ret, TypeVarType): ret = get_proper_type(ret.upper_bound) if isinstance(ret, TupleType): ret = ret.partial_fallback assert isinstance(ret, Instance) return ret.type def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_callable_type(self) def with_name(self, name: str) -> 'CallableType': """Return a copy of this type with the specified name.""" return self.copy_modified(ret_type=self.ret_type, name=name) def get_name(self) -> Optional[str]: return self.name def max_possible_positional_args(self) -> int: """Returns maximum number of positional arguments this method could possibly accept. This takes into account *arg and **kwargs but excludes keyword-only args.""" if self.is_var_arg or self.is_kw_arg: return sys.maxsize blacklist = (ARG_NAMED, ARG_NAMED_OPT) return len([kind not in blacklist for kind in self.arg_kinds]) def formal_arguments(self, include_star_args: bool = False) -> Iterator[FormalArgument]: """Yields the formal arguments corresponding to this callable, ignoring *arg and **kwargs. To handle *args and **kwargs, use the 'callable.var_args' and 'callable.kw_args' fields, if they are not None. If you really want to include star args in the yielded output, set the 'include_star_args' parameter to 'True'.""" done_with_positional = False for i in range(len(self.arg_types)): kind = self.arg_kinds[i] if kind in (ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT): done_with_positional = True if not include_star_args and kind in (ARG_STAR, ARG_STAR2): continue required = kind in (ARG_POS, ARG_NAMED) pos = None if done_with_positional else i yield FormalArgument( self.arg_names[i], pos, self.arg_types[i], required) def argument_by_name(self, name: Optional[str]) -> Optional[FormalArgument]: if name is None: return None seen_star = False for i, (arg_name, kind, typ) in enumerate( zip(self.arg_names, self.arg_kinds, self.arg_types)): # No more positional arguments after these. if kind in (ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT): seen_star = True if kind == ARG_STAR or kind == ARG_STAR2: continue if arg_name == name: position = None if seen_star else i return FormalArgument(name, position, typ, kind in (ARG_POS, ARG_NAMED)) return self.try_synthesizing_arg_from_kwarg(name) def argument_by_position(self, position: Optional[int]) -> Optional[FormalArgument]: if position is None: return None if position >= len(self.arg_names): return self.try_synthesizing_arg_from_vararg(position) name, kind, typ = ( self.arg_names[position], self.arg_kinds[position], self.arg_types[position], ) if kind in (ARG_POS, ARG_OPT): return FormalArgument(name, position, typ, kind == ARG_POS) else: return self.try_synthesizing_arg_from_vararg(position) def try_synthesizing_arg_from_kwarg(self, name: Optional[str]) -> Optional[FormalArgument]: kw_arg = self.kw_arg() if kw_arg is not None: return FormalArgument(name, None, kw_arg.typ, False) else: return None def try_synthesizing_arg_from_vararg(self, position: Optional[int]) -> Optional[FormalArgument]: var_arg = self.var_arg() if var_arg is not None: return FormalArgument(None, position, var_arg.typ, False) else: return None def items(self) -> List['CallableType']: return [self] def is_generic(self) -> bool: return bool(self.variables) def type_var_ids(self) -> List[TypeVarId]: a = [] # type: List[TypeVarId] for tv in self.variables: a.append(tv.id) return a def __hash__(self) -> int: return hash((self.ret_type, self.is_type_obj(), self.is_ellipsis_args, self.name, tuple(self.arg_types), tuple(self.arg_names), tuple(self.arg_kinds))) def __eq__(self, other: object) -> bool: if isinstance(other, CallableType): return (self.ret_type == other.ret_type and self.arg_types == other.arg_types and self.arg_names == other.arg_names and self.arg_kinds == other.arg_kinds and self.name == other.name and self.is_type_obj() == other.is_type_obj() and self.is_ellipsis_args == other.is_ellipsis_args) else: return NotImplemented def serialize(self) -> JsonDict: # TODO: As an optimization, leave out everything related to # generic functions for non-generic functions. return {'.class': 'CallableType', 'arg_types': [t.serialize() for t in self.arg_types], 'arg_kinds': self.arg_kinds, 'arg_names': self.arg_names, 'ret_type': self.ret_type.serialize(), 'fallback': self.fallback.serialize(), 'name': self.name, # We don't serialize the definition (only used for error messages). 'variables': [v.serialize() for v in self.variables], 'is_ellipsis_args': self.is_ellipsis_args, 'implicit': self.implicit, 'bound_args': [(None if t is None else t.serialize()) for t in self.bound_args], 'def_extras': dict(self.def_extras), } @classmethod def deserialize(cls, data: JsonDict) -> 'CallableType': assert data['.class'] == 'CallableType' # TODO: Set definition to the containing SymbolNode? return CallableType([deserialize_type(t) for t in data['arg_types']], data['arg_kinds'], data['arg_names'], deserialize_type(data['ret_type']), Instance.deserialize(data['fallback']), name=data['name'], variables=[TypeVarDef.deserialize(v) for v in data['variables']], is_ellipsis_args=data['is_ellipsis_args'], implicit=data['implicit'], bound_args=[(None if t is None else deserialize_type(t)) for t in data['bound_args']], def_extras=data['def_extras'] ) class Overloaded(FunctionLike): """Overloaded function type T1, ... Tn, where each Ti is CallableType. The variant to call is chosen based on static argument types. Overloaded function types can only be defined in stub files, and thus there is no explicit runtime dispatch implementation. """ _items = None # type: List[CallableType] # Must not be empty def __init__(self, items: List[CallableType]) -> None: super().__init__(items[0].line, items[0].column) self._items = items self.fallback = items[0].fallback def items(self) -> List[CallableType]: return self._items def name(self) -> Optional[str]: return self.get_name() def is_type_obj(self) -> bool: # All the items must have the same type object status, so it's # sufficient to query only (any) one of them. return self._items[0].is_type_obj() def type_object(self) -> mypy.nodes.TypeInfo: # All the items must have the same type object, so it's sufficient to # query only (any) one of them. return self._items[0].type_object() def with_name(self, name: str) -> 'Overloaded': ni = [] # type: List[CallableType] for it in self._items: ni.append(it.with_name(name)) return Overloaded(ni) def get_name(self) -> Optional[str]: return self._items[0].name def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_overloaded(self) def __hash__(self) -> int: return hash(tuple(self.items())) def __eq__(self, other: object) -> bool: if not isinstance(other, Overloaded): return NotImplemented return self.items() == other.items() def serialize(self) -> JsonDict: return {'.class': 'Overloaded', 'items': [t.serialize() for t in self.items()], } @classmethod def deserialize(cls, data: JsonDict) -> 'Overloaded': assert data['.class'] == 'Overloaded' return Overloaded([CallableType.deserialize(t) for t in data['items']]) class TupleType(ProperType): """The tuple type Tuple[T1, ..., Tn] (at least one type argument). Instance variables: items: Tuple item types partial_fallback: The (imprecise) underlying instance type that is used for non-tuple methods. This is generally builtins.tuple[Any] for regular tuples, but it's different for named tuples and classes with a tuple base class. Use mypy.typeops.tuple_fallback to calculate the precise fallback type derived from item types. implicit: If True, derived from a tuple expression (t,....) instead of Tuple[t, ...] """ items = None # type: List[Type] partial_fallback = None # type: Instance implicit = False def __init__(self, items: List[Type], fallback: Instance, line: int = -1, column: int = -1, implicit: bool = False) -> None: super().__init__(line, column) self.items = items self.partial_fallback = fallback self.implicit = implicit self.can_be_true = len(self.items) > 0 self.can_be_false = len(self.items) == 0 def length(self) -> int: return len(self.items) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_tuple_type(self) def __hash__(self) -> int: return hash((tuple(self.items), self.partial_fallback)) def __eq__(self, other: object) -> bool: if not isinstance(other, TupleType): return NotImplemented return self.items == other.items and self.partial_fallback == other.partial_fallback def serialize(self) -> JsonDict: return {'.class': 'TupleType', 'items': [t.serialize() for t in self.items], 'partial_fallback': self.partial_fallback.serialize(), 'implicit': self.implicit, } @classmethod def deserialize(cls, data: JsonDict) -> 'TupleType': assert data['.class'] == 'TupleType' return TupleType([deserialize_type(t) for t in data['items']], Instance.deserialize(data['partial_fallback']), implicit=data['implicit']) def copy_modified(self, *, fallback: Optional[Instance] = None, items: Optional[List[Type]] = None) -> 'TupleType': if fallback is None: fallback = self.partial_fallback if items is None: items = self.items return TupleType(items, fallback, self.line, self.column) def slice(self, begin: Optional[int], end: Optional[int], stride: Optional[int]) -> 'TupleType': return TupleType(self.items[begin:end:stride], self.partial_fallback, self.line, self.column, self.implicit) class TypedDictType(ProperType): """Type of TypedDict object {'k1': v1, ..., 'kn': vn}. A TypedDict object is a dictionary with specific string (literal) keys. Each key has a value with a distinct type that depends on the key. TypedDict objects are normal dict objects at runtime. A TypedDictType can be either named or anonymous. If it's anonymous, its fallback will be typing_extensions._TypedDict (Instance). _TypedDict is a subclass of Mapping[str, object] and defines all non-mapping dict methods that TypedDict supports. Some dict methods are unsafe and not supported. _TypedDict isn't defined at runtime. If a TypedDict is named, its fallback will be an Instance of the named type (ex: "Point") whose TypeInfo has a typeddict_type that is anonymous. This is similar to how named tuples work. TODO: The fallback structure is perhaps overly complicated. """ items = None # type: OrderedDict[str, Type] # item_name -> item_type required_keys = None # type: Set[str] fallback = None # type: Instance def __init__(self, items: 'OrderedDict[str, Type]', required_keys: Set[str], fallback: Instance, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.items = items self.required_keys = required_keys self.fallback = fallback self.can_be_true = len(self.items) > 0 self.can_be_false = len(self.required_keys) == 0 def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_typeddict_type(self) def __hash__(self) -> int: return hash((frozenset(self.items.items()), self.fallback, frozenset(self.required_keys))) def __eq__(self, other: object) -> bool: if isinstance(other, TypedDictType): if frozenset(self.items.keys()) != frozenset(other.items.keys()): return False for (_, left_item_type, right_item_type) in self.zip(other): if not left_item_type == right_item_type: return False return self.fallback == other.fallback and self.required_keys == other.required_keys else: return NotImplemented def serialize(self) -> JsonDict: return {'.class': 'TypedDictType', 'items': [[n, t.serialize()] for (n, t) in self.items.items()], 'required_keys': sorted(self.required_keys), 'fallback': self.fallback.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> 'TypedDictType': assert data['.class'] == 'TypedDictType' return TypedDictType(OrderedDict([(n, deserialize_type(t)) for (n, t) in data['items']]), set(data['required_keys']), Instance.deserialize(data['fallback'])) def is_anonymous(self) -> bool: return self.fallback.type.fullname in TPDICT_FB_NAMES def as_anonymous(self) -> 'TypedDictType': if self.is_anonymous(): return self assert self.fallback.type.typeddict_type is not None return self.fallback.type.typeddict_type.as_anonymous() def copy_modified(self, *, fallback: Optional[Instance] = None, item_types: Optional[List[Type]] = None, required_keys: Optional[Set[str]] = None) -> 'TypedDictType': if fallback is None: fallback = self.fallback if item_types is None: items = self.items else: items = OrderedDict(zip(self.items, item_types)) if required_keys is None: required_keys = self.required_keys return TypedDictType(items, required_keys, fallback, self.line, self.column) def create_anonymous_fallback(self, *, value_type: Type) -> Instance: anonymous = self.as_anonymous() return anonymous.fallback def names_are_wider_than(self, other: 'TypedDictType') -> bool: return len(other.items.keys() - self.items.keys()) == 0 def zip(self, right: 'TypedDictType') -> Iterable[Tuple[str, Type, Type]]: left = self for (item_name, left_item_type) in left.items.items(): right_item_type = right.items.get(item_name) if right_item_type is not None: yield (item_name, left_item_type, right_item_type) def zipall(self, right: 'TypedDictType') \ -> Iterable[Tuple[str, Optional[Type], Optional[Type]]]: left = self for (item_name, left_item_type) in left.items.items(): right_item_type = right.items.get(item_name) yield (item_name, left_item_type, right_item_type) for (item_name, right_item_type) in right.items.items(): if item_name in left.items: continue yield (item_name, None, right_item_type) class RawExpressionType(ProperType): """A synthetic type representing some arbitrary expression that does not cleanly translate into a type. This synthetic type is only used at the beginning stages of semantic analysis and should be completely removing during the process for mapping UnboundTypes to actual types: we either turn it into a LiteralType or an AnyType. For example, suppose `Foo[1]` is initially represented as the following: UnboundType( name='Foo', args=[ RawExpressionType(value=1, base_type_name='builtins.int'), ], ) As we perform semantic analysis, this type will transform into one of two possible forms. If 'Foo' was an alias for 'Literal' all along, this type is transformed into: LiteralType(value=1, fallback=int_instance_here) Alternatively, if 'Foo' is an unrelated class, we report an error and instead produce something like this: Instance(type=typeinfo_for_foo, args=[AnyType(TypeOfAny.from_error)) If the "note" field is not None, the provided note will be reported alongside the error at this point. Note: if "literal_value" is None, that means this object is representing some expression that cannot possibly be a parameter of Literal[...]. For example, "Foo[3j]" would be represented as: UnboundType( name='Foo', args=[ RawExpressionType(value=None, base_type_name='builtins.complex'), ], ) """ def __init__(self, literal_value: Optional[LiteralValue], base_type_name: str, line: int = -1, column: int = -1, note: Optional[str] = None, ) -> None: super().__init__(line, column) self.literal_value = literal_value self.base_type_name = base_type_name self.note = note def simple_name(self) -> str: return self.base_type_name.replace("builtins.", "") def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_raw_expression_type(self) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" def __hash__(self) -> int: return hash((self.literal_value, self.base_type_name)) def __eq__(self, other: object) -> bool: if isinstance(other, RawExpressionType): return (self.base_type_name == other.base_type_name and self.literal_value == other.literal_value) else: return NotImplemented class LiteralType(ProperType): """The type of a Literal instance. Literal[Value] A Literal always consists of: 1. A native Python object corresponding to the contained inner value 2. A fallback for this Literal. The fallback also corresponds to the parent type this Literal subtypes. For example, 'Literal[42]' is represented as 'LiteralType(value=42, fallback=instance_of_int)' As another example, `Literal[Color.RED]` (where Color is an enum) is represented as `LiteralType(value="RED", fallback=instance_of_color)'. """ __slots__ = ('value', 'fallback') def __init__(self, value: LiteralValue, fallback: Instance, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.value = value self.fallback = fallback def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_literal_type(self) def __hash__(self) -> int: return hash((self.value, self.fallback)) def __eq__(self, other: object) -> bool: if isinstance(other, LiteralType): return self.fallback == other.fallback and self.value == other.value else: return NotImplemented def is_enum_literal(self) -> bool: return self.fallback.type.is_enum def value_repr(self) -> str: """Returns the string representation of the underlying type. This function is almost equivalent to running `repr(self.value)`, except it includes some additional logic to correctly handle cases where the value is a string, byte string, a unicode string, or an enum. """ raw = repr(self.value) fallback_name = self.fallback.type.fullname # If this is backed by an enum, if self.is_enum_literal(): return '{}.{}'.format(fallback_name, self.value) if fallback_name == 'builtins.bytes': # Note: 'builtins.bytes' only appears in Python 3, so we want to # explicitly prefix with a "b" return 'b' + raw elif fallback_name == 'builtins.unicode': # Similarly, 'builtins.unicode' only appears in Python 2, where we also # want to explicitly prefix return 'u' + raw else: # 'builtins.str' could mean either depending on context, but either way # we don't prefix: it's the "native" string. And of course, if value is # some other type, we just return that string repr directly. return raw def serialize(self) -> Union[JsonDict, str]: return { '.class': 'LiteralType', 'value': self.value, 'fallback': self.fallback.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> 'LiteralType': assert data['.class'] == 'LiteralType' return LiteralType( value=data['value'], fallback=Instance.deserialize(data['fallback']), ) class StarType(ProperType): """The star type *type_parameter. This is not a real type but a syntactic AST construct. """ type = None # type: Type def __init__(self, type: Type, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.type = type def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_star_type(self) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" class UnionType(ProperType): """The union type Union[T1, ..., Tn] (at least one type argument).""" __slots__ = ('items',) def __init__(self, items: Sequence[Type], line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.items = flatten_nested_unions(items) self.can_be_true = any(item.can_be_true for item in items) self.can_be_false = any(item.can_be_false for item in items) def __hash__(self) -> int: return hash(frozenset(self.items)) def __eq__(self, other: object) -> bool: if not isinstance(other, UnionType): return NotImplemented return frozenset(self.items) == frozenset(other.items) @overload @staticmethod def make_union(items: Sequence[ProperType], line: int = -1, column: int = -1) -> ProperType: ... @overload @staticmethod def make_union(items: Sequence[Type], line: int = -1, column: int = -1) -> Type: ... @staticmethod def make_union(items: Sequence[Type], line: int = -1, column: int = -1) -> Type: if len(items) > 1: return UnionType(items, line, column) elif len(items) == 1: return items[0] else: return UninhabitedType() def length(self) -> int: return len(self.items) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_union_type(self) def has_readable_member(self, name: str) -> bool: """For a tree of unions of instances, check whether all instances have a given member. TODO: Deal with attributes of TupleType etc. TODO: This should probably be refactored to go elsewhere. """ return all((isinstance(x, UnionType) and x.has_readable_member(name)) or (isinstance(x, Instance) and x.type.has_readable_member(name)) for x in get_proper_types(self.relevant_items())) def relevant_items(self) -> List[Type]: """Removes NoneTypes from Unions when strict Optional checking is off.""" if state.strict_optional: return self.items else: return [i for i in get_proper_types(self.items) if not isinstance(i, NoneType)] def serialize(self) -> JsonDict: return {'.class': 'UnionType', 'items': [t.serialize() for t in self.items], } @classmethod def deserialize(cls, data: JsonDict) -> 'UnionType': assert data['.class'] == 'UnionType' return UnionType([deserialize_type(t) for t in data['items']]) class PartialType(ProperType): """Type such as List[?] where type arguments are unknown, or partial None type. These are used for inferring types in multiphase initialization such as this: x = [] # x gets a partial type List[?], as item type is unknown x.append(1) # partial type gets replaced with normal type List[int] Or with None: x = None # x gets a partial type None if c: x = 1 # Infer actual type int for x """ # None for the 'None' partial type; otherwise a generic class type = None # type: Optional[mypy.nodes.TypeInfo] var = None # type: mypy.nodes.Var def __init__(self, type: 'Optional[mypy.nodes.TypeInfo]', var: 'mypy.nodes.Var') -> None: super().__init__() self.type = type self.var = var def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_partial_type(self) class EllipsisType(ProperType): """The type ... (ellipsis). This is not a real type but a syntactic AST construct, used in Callable[..., T], for example. A semantically analyzed type will never have ellipsis types. """ def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_ellipsis_type(self) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" class TypeType(ProperType): """For types like Type[User]. This annotates variables that are class objects, constrained by the type argument. See PEP 484 for more details. We may encounter expressions whose values are specific classes; those are represented as callables (possibly overloaded) corresponding to the class's constructor's signature and returning an instance of that class. The difference with Type[C] is that those callables always represent the exact class given as the return type; Type[C] represents any class that's a subclass of C, and C may also be a type variable or a union (or Any). Many questions around subtype relationships between Type[C1] and def(...) -> C2 are answered by looking at the subtype relationships between C1 and C2, since Type[] is considered covariant. There's an unsolved problem with constructor signatures (also unsolved in PEP 484): calling a variable whose type is Type[C] assumes the constructor signature for C, even though a subclass of C might completely change the constructor signature. For now we just assume that users of Type[C] are careful not to do that (in the future we might detect when they are violating that assumption). """ # This can't be everything, but it can be a class reference, # a generic class instance, a union, Any, a type variable... item = None # type: ProperType def __init__(self, item: Bogus[Union[Instance, AnyType, TypeVarType, TupleType, NoneType, CallableType]], *, line: int = -1, column: int = -1) -> None: """To ensure Type[Union[A, B]] is always represented as Union[Type[A], Type[B]], item of type UnionType must be handled through make_normalized static method. """ super().__init__(line, column) self.item = item @staticmethod def make_normalized(item: Type, *, line: int = -1, column: int = -1) -> ProperType: item = get_proper_type(item) if isinstance(item, UnionType): return UnionType.make_union( [TypeType.make_normalized(union_item) for union_item in item.items], line=line, column=column ) return TypeType(item, line=line, column=column) # type: ignore[arg-type] def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_type_type(self) def __hash__(self) -> int: return hash(self.item) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeType): return NotImplemented return self.item == other.item def serialize(self) -> JsonDict: return {'.class': 'TypeType', 'item': self.item.serialize()} @classmethod def deserialize(cls, data: JsonDict) -> Type: assert data['.class'] == 'TypeType' return TypeType.make_normalized(deserialize_type(data['item'])) class PlaceholderType(ProperType): """Temporary, yet-unknown type during semantic analysis. This is needed when there's a reference to a type before the real symbol table entry of the target type is available (specifically, we use a temporary PlaceholderNode symbol node). Consider this example: class str(Sequence[str]): ... We use a PlaceholderType for the 'str' in 'Sequence[str]' since we can't create a TypeInfo for 'str' until all base classes have been resolved. We'll soon perform another analysis iteration which replaces the base class with a complete type without any placeholders. After semantic analysis, no placeholder types must exist. """ def __init__(self, fullname: Optional[str], args: List[Type], line: int) -> None: super().__init__(line) self.fullname = fullname # Must be a valid full name of an actual node (or None). self.args = args def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_placeholder_type(self) def serialize(self) -> str: # We should never get here since all placeholders should be replaced # during semantic analysis. assert False, "Internal error: unresolved placeholder type {}".format(self.fullname) @overload def get_proper_type(typ: None) -> None: ... @overload def get_proper_type(typ: Type) -> ProperType: ... def get_proper_type(typ: Optional[Type]) -> Optional[ProperType]: """Get the expansion of a type alias type. If the type is already a proper type, this is a no-op. Use this function wherever a decision is made on a call like e.g. 'if isinstance(typ, UnionType): ...', because 'typ' in this case may be an alias to union. Note: if after making the decision on the isinstance() call you pass on the original type (and not one of its components) it is recommended to *always* pass on the unexpanded alias. """ if typ is None: return None while isinstance(typ, TypeAliasType): typ = typ._expand_once() assert isinstance(typ, ProperType), typ # TODO: store the name of original type alias on this type, so we can show it in errors. return typ @overload def get_proper_types(it: Iterable[Type]) -> List[ProperType]: ... # type: ignore[misc] @overload def get_proper_types(it: Iterable[Optional[Type]]) -> List[Optional[ProperType]]: ... def get_proper_types(it: Iterable[Optional[Type]] ) -> Union[List[ProperType], List[Optional[ProperType]]]: return [get_proper_type(t) for t in it] # We split off the type visitor base classes to another module # to make it easier to gradually get modules working with mypyc. # Import them here, after the types are defined. # This is intended as a re-export also. from mypy.type_visitor import ( # noqa TypeVisitor as TypeVisitor, SyntheticTypeVisitor as SyntheticTypeVisitor, TypeTranslator as TypeTranslator, TypeQuery as TypeQuery, ) class TypeStrVisitor(SyntheticTypeVisitor[str]): """Visitor for pretty-printing types into strings. This is mostly for debugging/testing. Do not preserve original formatting. Notes: - Represent unbound types as Foo? or Foo?[...]. - Represent the NoneType type as None. """ def __init__(self, id_mapper: Optional[IdMapper] = None) -> None: self.id_mapper = id_mapper self.any_as_dots = False def visit_unbound_type(self, t: UnboundType) -> str: s = t.name + '?' if t.args: s += '[{}]'.format(self.list_str(t.args)) return s def visit_type_list(self, t: TypeList) -> str: return ''.format(self.list_str(t.items)) def visit_callable_argument(self, t: CallableArgument) -> str: typ = t.typ.accept(self) if t.name is None: return "{}({})".format(t.constructor, typ) else: return "{}({}, {})".format(t.constructor, typ, t.name) def visit_any(self, t: AnyType) -> str: if self.any_as_dots and t.type_of_any == TypeOfAny.special_form: return '...' return 'Any' def visit_none_type(self, t: NoneType) -> str: return "None" def visit_uninhabited_type(self, t: UninhabitedType) -> str: return "" def visit_erased_type(self, t: ErasedType) -> str: return "" def visit_deleted_type(self, t: DeletedType) -> str: if t.source is None: return "" else: return "".format(t.source) def visit_instance(self, t: Instance) -> str: if t.last_known_value and not t.args: # Instances with a literal fallback should never be generic. If they are, # something went wrong so we fall back to showing the full Instance repr. s = '{}?'.format(t.last_known_value) else: s = t.type.fullname or t.type.name or '' if t.erased: s += '*' if t.args != []: s += '[{}]'.format(self.list_str(t.args)) if self.id_mapper: s += '<{}>'.format(self.id_mapper.id(t.type)) return s def visit_type_var(self, t: TypeVarType) -> str: if t.name is None: # Anonymous type variable type (only numeric id). s = '`{}'.format(t.id) else: # Named type variable type. s = '{}`{}'.format(t.name, t.id) if self.id_mapper and t.upper_bound: s += '(upper_bound={})'.format(t.upper_bound.accept(self)) return s def visit_callable_type(self, t: CallableType) -> str: s = '' bare_asterisk = False for i in range(len(t.arg_types)): if s != '': s += ', ' if t.arg_kinds[i] in (ARG_NAMED, ARG_NAMED_OPT) and not bare_asterisk: s += '*, ' bare_asterisk = True if t.arg_kinds[i] == ARG_STAR: s += '*' if t.arg_kinds[i] == ARG_STAR2: s += '**' name = t.arg_names[i] if name: s += name + ': ' s += t.arg_types[i].accept(self) if t.arg_kinds[i] in (ARG_OPT, ARG_NAMED_OPT): s += ' =' s = '({})'.format(s) if not isinstance(get_proper_type(t.ret_type), NoneType): s += ' -> {}'.format(t.ret_type.accept(self)) if t.variables: vs = [] # We reimplement TypeVarDef.__repr__ here in order to support id_mapper. for var in t.variables: if var.values: vals = '({})'.format(', '.join(val.accept(self) for val in var.values)) vs.append('{} in {}'.format(var.name, vals)) elif not is_named_instance(var.upper_bound, 'builtins.object'): vs.append('{} <: {}'.format(var.name, var.upper_bound.accept(self))) else: vs.append(var.name) s = '{} {}'.format('[{}]'.format(', '.join(vs)), s) return 'def {}'.format(s) def visit_overloaded(self, t: Overloaded) -> str: a = [] for i in t.items(): a.append(i.accept(self)) return 'Overload({})'.format(', '.join(a)) def visit_tuple_type(self, t: TupleType) -> str: s = self.list_str(t.items) if t.partial_fallback and t.partial_fallback.type: fallback_name = t.partial_fallback.type.fullname if fallback_name != 'builtins.tuple': return 'Tuple[{}, fallback={}]'.format(s, t.partial_fallback.accept(self)) return 'Tuple[{}]'.format(s) def visit_typeddict_type(self, t: TypedDictType) -> str: def item_str(name: str, typ: str) -> str: if name in t.required_keys: return '{!r}: {}'.format(name, typ) else: return '{!r}?: {}'.format(name, typ) s = '{' + ', '.join(item_str(name, typ.accept(self)) for name, typ in t.items.items()) + '}' prefix = '' if t.fallback and t.fallback.type: if t.fallback.type.fullname not in TPDICT_FB_NAMES: prefix = repr(t.fallback.type.fullname) + ', ' return 'TypedDict({}{})'.format(prefix, s) def visit_raw_expression_type(self, t: RawExpressionType) -> str: return repr(t.literal_value) def visit_literal_type(self, t: LiteralType) -> str: return 'Literal[{}]'.format(t.value_repr()) def visit_star_type(self, t: StarType) -> str: s = t.type.accept(self) return '*{}'.format(s) def visit_union_type(self, t: UnionType) -> str: s = self.list_str(t.items) return 'Union[{}]'.format(s) def visit_partial_type(self, t: PartialType) -> str: if t.type is None: return '' else: return ''.format(t.type.name, ', '.join(['?'] * len(t.type.type_vars))) def visit_ellipsis_type(self, t: EllipsisType) -> str: return '...' def visit_type_type(self, t: TypeType) -> str: return 'Type[{}]'.format(t.item.accept(self)) def visit_placeholder_type(self, t: PlaceholderType) -> str: return ''.format(t.fullname) def visit_type_alias_type(self, t: TypeAliasType) -> str: if t.alias is not None: unrolled, recursed = t._partial_expansion() self.any_as_dots = recursed type_str = unrolled.accept(self) self.any_as_dots = False return type_str return '' def list_str(self, a: Iterable[Type]) -> str: """Convert items of an array to strings (pretty-print types) and join the results with commas. """ res = [] for t in a: res.append(t.accept(self)) return ', '.join(res) class UnrollAliasVisitor(TypeTranslator): def __init__(self, initial_aliases: Set[TypeAliasType]) -> None: self.recursed = False self.initial_aliases = initial_aliases def visit_type_alias_type(self, t: TypeAliasType) -> Type: if t in self.initial_aliases: self.recursed = True return AnyType(TypeOfAny.special_form) # Create a new visitor on encountering a new type alias, so that an alias like # A = Tuple[B, B] # B = int # will not be detected as recursive on the second encounter of B. subvisitor = UnrollAliasVisitor(self.initial_aliases | {t}) result = get_proper_type(t).accept(subvisitor) if subvisitor.recursed: self.recursed = True return result def strip_type(typ: Type) -> ProperType: """Make a copy of type without 'debugging info' (function name).""" typ = get_proper_type(typ) if isinstance(typ, CallableType): return typ.copy_modified(name=None) elif isinstance(typ, Overloaded): return Overloaded([cast(CallableType, strip_type(item)) for item in typ.items()]) else: return typ def is_named_instance(t: Type, fullname: str) -> bool: t = get_proper_type(t) return isinstance(t, Instance) and t.type.fullname == fullname TP = TypeVar('TP', bound=Type) def copy_type(t: TP) -> TP: """ Build a copy of the type; used to mutate the copy with truthiness information """ return copy.copy(t) class InstantiateAliasVisitor(TypeTranslator): def __init__(self, vars: List[str], subs: List[Type]) -> None: self.replacements = {v: s for (v, s) in zip(vars, subs)} def visit_type_alias_type(self, typ: TypeAliasType) -> Type: return typ.copy_modified(args=[t.accept(self) for t in typ.args]) def visit_unbound_type(self, typ: UnboundType) -> Type: # TODO: stop using unbound type variables for type aliases. # Now that type aliases are very similar to TypeInfos we should # make type variable tracking similar as well. Maybe we can even support # upper bounds etc. for generic type aliases. if typ.name in self.replacements: return self.replacements[typ.name] return typ def visit_type_var(self, typ: TypeVarType) -> Type: if typ.name in self.replacements: return self.replacements[typ.name] return typ def replace_alias_tvars(tp: Type, vars: List[str], subs: List[Type], newline: int, newcolumn: int) -> Type: """Replace type variables in a generic type alias tp with substitutions subs resetting context. Length of subs should be already checked. """ replacer = InstantiateAliasVisitor(vars, subs) new_tp = tp.accept(replacer) new_tp.line = newline new_tp.column = newcolumn return new_tp class HasTypeVars(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_type_var(self, t: TypeVarType) -> bool: return True def has_type_vars(typ: Type) -> bool: """Check if a type contains any type variables (recursively).""" return typ.accept(HasTypeVars()) def flatten_nested_unions(types: Iterable[Type]) -> List[Type]: """Flatten nested unions in a type list.""" # This and similar functions on unions can cause infinite recursion # if passed a "pathological" alias like A = Union[int, A] or similar. # TODO: ban such aliases in semantic analyzer. flat_items = [] # type: List[Type] for tp in types: if isinstance(tp, ProperType) and isinstance(tp, UnionType): flat_items.extend(flatten_nested_unions(tp.items)) else: flat_items.append(tp) return flat_items def union_items(typ: Type) -> List[ProperType]: """Return the flattened items of a union type. For non-union types, return a list containing just the argument. """ typ = get_proper_type(typ) if isinstance(typ, UnionType): items = [] for item in typ.items: items.extend(union_items(item)) return items else: return [typ] def is_generic_instance(tp: Type) -> bool: tp = get_proper_type(tp) return isinstance(tp, Instance) and bool(tp.args) def is_optional(t: Type) -> bool: t = get_proper_type(t) return isinstance(t, UnionType) and any(isinstance(get_proper_type(e), NoneType) for e in t.items) def remove_optional(typ: Type) -> Type: typ = get_proper_type(typ) if isinstance(typ, UnionType): return UnionType.make_union([t for t in typ.items if not isinstance(get_proper_type(t), NoneType)]) else: return typ def is_literal_type(typ: ProperType, fallback_fullname: str, value: LiteralValue) -> bool: """Check if this type is a LiteralType with the given fallback type and value.""" if isinstance(typ, Instance) and typ.last_known_value: typ = typ.last_known_value if not isinstance(typ, LiteralType): return False if typ.fallback.type.fullname != fallback_fullname: return False return typ.value == value names = globals().copy() # type: Final names.pop('NOT_READY', None) deserialize_map = { key: obj.deserialize for key, obj in names.items() if isinstance(obj, type) and issubclass(obj, Type) and obj is not Type } # type: Final mypy-0.761/mypy/typeshed/0000755€tŠÔÚ€2›s®0000000000013576752266021545 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/0000755€tŠÔÚ€2›s®0000000000013576752266023026 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/0000755€tŠÔÚ€2›s®0000000000013576752267023170 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/BaseHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000351013576752252026445 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for BaseHTTPServer (Python 2.7) from typing import Any, BinaryIO, Callable, Mapping, Optional, Tuple, Union import SocketServer import mimetools class HTTPServer(SocketServer.TCPServer): server_name: str server_port: int def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: Callable[..., BaseHTTPRequestHandler]) -> None: ... class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler): client_address: Tuple[str, int] server: SocketServer.BaseServer close_connection: bool command: str path: str request_version: str headers: mimetools.Message rfile: BinaryIO wfile: BinaryIO server_version: str sys_version: str error_message_format: str error_content_type: str protocol_version: str MessageClass: type responses: Mapping[int, Tuple[str, str]] def __init__(self, request: bytes, client_address: Tuple[str, int], server: SocketServer.BaseServer) -> None: ... def handle(self) -> None: ... def handle_one_request(self) -> None: ... def send_error(self, code: int, message: Optional[str] = ...) -> None: ... def send_response(self, code: int, message: Optional[str] = ...) -> None: ... def send_header(self, keyword: str, value: str) -> None: ... def end_headers(self) -> None: ... def flush_headers(self) -> None: ... def log_request(self, code: Union[int, str] = ..., size: Union[int, str] = ...) -> None: ... def log_error(self, format: str, *args: Any) -> None: ... def log_message(self, format: str, *args: Any) -> None: ... def version_string(self) -> str: ... def date_time_string(self, timestamp: Optional[int] = ...) -> str: ... def log_date_time_string(self) -> str: ... def address_string(self) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/CGIHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000034313576752252026176 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for CGIHTTPServer (Python 2.7) from typing import List import SimpleHTTPServer class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): cgi_directories: List[str] def do_POST(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/ConfigParser.pyi0000644€tŠÔÚ€2›s®0000000745213576752252026277 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Sequence, Tuple, Union, List, Dict, Protocol, Optional DEFAULTSECT: str MAX_INTERPOLATION_DEPTH: int class Error(Exception): message: Any def __init__(self, msg: str = ...) -> None: ... def _get_message(self) -> None: ... def _set_message(self, value: str) -> None: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... class NoSectionError(Error): section: str def __init__(self, section: str) -> None: ... class DuplicateSectionError(Error): section: str def __init__(self, section: str) -> None: ... class NoOptionError(Error): section: str option: str def __init__(self, option: str, section: str) -> None: ... class InterpolationError(Error): section: str option: str msg: str def __init__(self, option: str, section: str, msg: str) -> None: ... class InterpolationMissingOptionError(InterpolationError): reference: str def __init__(self, option: str, section: str, rawval: str, reference: str) -> None: ... class InterpolationSyntaxError(InterpolationError): ... class InterpolationDepthError(InterpolationError): def __init__(self, option: str, section: str, rawval: str) -> None: ... class ParsingError(Error): filename: str errors: List[Tuple[Any, Any]] def __init__(self, filename: str) -> None: ... def append(self, lineno: Any, line: Any) -> None: ... class MissingSectionHeaderError(ParsingError): lineno: Any line: Any def __init__(self, filename: str, lineno: Any, line: Any) -> None: ... class _Readable(Protocol): def readline(self) -> str: ... class RawConfigParser: _dict: Any _sections: Dict[Any, Any] _defaults: Dict[Any, Any] _optcre: Any SECTCRE: Any OPTCRE: Any OPTCRE_NV: Any def __init__(self, defaults: Dict[Any, Any] = ..., dict_type: Any = ..., allow_no_value: bool = ...) -> None: ... def defaults(self) -> Dict[Any, Any]: ... def sections(self) -> List[str]: ... def add_section(self, section: str) -> None: ... def has_section(self, section: str) -> bool: ... def options(self, section: str) -> List[str]: ... def read(self, filenames: Union[str, Sequence[str]]) -> List[str]: ... def readfp(self, fp: _Readable, filename: str = ...) -> None: ... def get(self, section: str, option: str) -> str: ... def items(self, section: str) -> List[Tuple[Any, Any]]: ... def _get(self, section: str, conv: type, option: str) -> Any: ... def getint(self, section: str, option: str) -> int: ... def getfloat(self, section: str, option: str) -> float: ... _boolean_states: Dict[str, bool] def getboolean(self, section: str, option: str) -> bool: ... def optionxform(self, optionstr: str) -> str: ... def has_option(self, section: str, option: str) -> bool: ... def set(self, section: str, option: str, value: Any = ...) -> None: ... def write(self, fp: IO[str]) -> None: ... def remove_option(self, section: str, option: Any) -> bool: ... def remove_section(self, section: str) -> bool: ... def _read(self, fp: IO[str], fpname: str) -> None: ... class ConfigParser(RawConfigParser): _KEYCRE: Any def get(self, section: str, option: str, raw: bool = ..., vars: Optional[Dict[Any, Any]] = ...) -> Any: ... def items(self, section: str, raw: bool = ..., vars: Optional[Dict[Any, Any]] = ...) -> List[Tuple[str, Any]]: ... def _interpolate(self, section: str, option: str, rawval: Any, vars: Any) -> str: ... def _interpolation_replace(self, match: Any) -> str: ... class SafeConfigParser(ConfigParser): _interpvar_re: Any def _interpolate(self, section: str, option: str, rawval: Any, vars: Any) -> str: ... def _interpolate_some( self, option: str, accum: List[Any], rest: str, section: str, map: Dict[Any, Any], depth: int, ) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/Cookie.pyi0000644€tŠÔÚ€2›s®0000000247613576752252025127 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Optional class CookieError(Exception): ... class Morsel(Dict[Any, Any]): key: Any def __init__(self): ... def __setitem__(self, K, V): ... def isReservedKey(self, K): ... value: Any coded_value: Any def set(self, key, val, coded_val, LegalChars=..., idmap=..., translate=...): ... def output(self, attrs: Optional[Any] = ..., header=...): ... def js_output(self, attrs: Optional[Any] = ...): ... def OutputString(self, attrs: Optional[Any] = ...): ... class BaseCookie(Dict[Any, Any]): def value_decode(self, val): ... def value_encode(self, val): ... def __init__(self, input: Optional[Any] = ...): ... def __setitem__(self, key, value): ... def output(self, attrs: Optional[Any] = ..., header=..., sep=...): ... def js_output(self, attrs: Optional[Any] = ...): ... def load(self, rawdata): ... class SimpleCookie(BaseCookie): def value_decode(self, val): ... def value_encode(self, val): ... class SerialCookie(BaseCookie): def __init__(self, input: Optional[Any] = ...): ... def value_decode(self, val): ... def value_encode(self, val): ... class SmartCookie(BaseCookie): def __init__(self, input: Optional[Any] = ...): ... def value_decode(self, val): ... def value_encode(self, val): ... Cookie: Any mypy-0.761/mypy/typeshed/stdlib/2/HTMLParser.pyi0000644€tŠÔÚ€2›s®0000000205313576752252025626 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple, AnyStr from markupbase import ParserBase class HTMLParser(ParserBase): def __init__(self) -> None: ... def feed(self, feed: AnyStr) -> None: ... def close(self) -> None: ... def reset(self) -> None: ... def get_starttag_text(self) -> AnyStr: ... def set_cdata_mode(self, AnyStr) -> None: ... def clear_cdata_mode(self) -> None: ... def handle_startendtag(self, tag: AnyStr, attrs: List[Tuple[AnyStr, AnyStr]]): ... def handle_starttag(self, tag: AnyStr, attrs: List[Tuple[AnyStr, AnyStr]]): ... def handle_endtag(self, tag: AnyStr): ... def handle_charref(self, name: AnyStr): ... def handle_entityref(self, name: AnyStr): ... def handle_data(self, data: AnyStr): ... def handle_comment(self, data: AnyStr): ... def handle_decl(self, decl: AnyStr): ... def handle_pi(self, data: AnyStr): ... def unknown_decl(self, data: AnyStr): ... def unescape(self, s: AnyStr) -> AnyStr: ... class HTMLParseError(Exception): msg: str lineno: int offset: int mypy-0.761/mypy/typeshed/stdlib/2/Queue.pyi0000644€tŠÔÚ€2›s®0000000163513576752252024776 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for Queue (Python 2) from collections import deque from typing import Any, Deque, TypeVar, Generic, Optional _T = TypeVar('_T') class Empty(Exception): ... class Full(Exception): ... class Queue(Generic[_T]): maxsize: Any mutex: Any not_empty: Any not_full: Any all_tasks_done: Any unfinished_tasks: Any queue: Deque[Any] # undocumented def __init__(self, maxsize: int = ...) -> None: ... def task_done(self) -> None: ... def join(self) -> None: ... def qsize(self) -> int: ... def empty(self) -> bool: ... def full(self) -> bool: ... def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ... def put_nowait(self, item: _T) -> None: ... def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ... def get_nowait(self) -> _T: ... class PriorityQueue(Queue[_T]): ... class LifoQueue(Queue[_T]): ... mypy-0.761/mypy/typeshed/stdlib/2/SimpleHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000126113576752252027025 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for SimpleHTTPServer (Python 2) from typing import Any, AnyStr, IO, Mapping, Optional, Union import BaseHTTPServer from StringIO import StringIO class SimpleHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): server_version: str def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... def send_head(self) -> Optional[IO[str]]: ... def list_directory(self, path: Union[str, unicode]) -> Optional[StringIO[Any]]: ... def translate_path(self, path: AnyStr) -> AnyStr: ... def copyfile(self, source: IO[AnyStr], outputfile: IO[AnyStr]): ... def guess_type(self, path: Union[str, unicode]) -> str: ... extensions_map: Mapping[str, str] mypy-0.761/mypy/typeshed/stdlib/2/SocketServer.pyi0000644€tŠÔÚ€2›s®0000000755613576752252026341 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: SocketServer.pyi and socketserver.pyi must remain consistent! # Stubs for socketserver from typing import Any, BinaryIO, Callable, Optional, Tuple, Type, Text, Union from socket import SocketType import sys import types class BaseServer: address_family: int RequestHandlerClass: Callable[..., BaseRequestHandler] server_address: Tuple[str, int] socket: SocketType allow_reuse_address: bool request_queue_size: int socket_type: int timeout: Optional[float] def __init__(self, server_address: Any, RequestHandlerClass: Callable[..., BaseRequestHandler]) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = ...) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... def finish_request(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def get_request(self) -> None: ... def handle_error(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def handle_timeout(self) -> None: ... def process_request(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def server_activate(self) -> None: ... def server_bind(self) -> None: ... def verify_request(self, request: bytes, client_address: Tuple[str, int]) -> bool: ... if sys.version_info >= (3, 6): def __enter__(self) -> BaseServer: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[types.TracebackType]) -> None: ... if sys.version_info >= (3, 3): def service_actions(self) -> None: ... class TCPServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: Callable[..., BaseRequestHandler], bind_and_activate: bool = ...) -> None: ... class UDPServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: Callable[..., BaseRequestHandler], bind_and_activate: bool = ...) -> None: ... if sys.platform != 'win32': class UnixStreamServer(BaseServer): def __init__(self, server_address: Union[Text, bytes], RequestHandlerClass: Callable[..., BaseRequestHandler], bind_and_activate: bool = ...) -> None: ... class UnixDatagramServer(BaseServer): def __init__(self, server_address: Union[Text, bytes], RequestHandlerClass: Callable[..., BaseRequestHandler], bind_and_activate: bool = ...) -> None: ... class ForkingMixIn: ... class ThreadingMixIn: ... class ForkingTCPServer(ForkingMixIn, TCPServer): ... class ForkingUDPServer(ForkingMixIn, UDPServer): ... class ThreadingTCPServer(ThreadingMixIn, TCPServer): ... class ThreadingUDPServer(ThreadingMixIn, UDPServer): ... if sys.platform != 'win32': class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): ... class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: # Those are technically of types, respectively: # * Union[SocketType, Tuple[bytes, SocketType]] # * Union[Tuple[str, int], str] # But there are some concerns that having unions here would cause # too much inconvenience to people using it (see # https://github.com/python/typeshed/pull/384#issuecomment-234649696) request: Any client_address: Any server: BaseServer def setup(self) -> None: ... def handle(self) -> None: ... def finish(self) -> None: ... class StreamRequestHandler(BaseRequestHandler): rfile: BinaryIO wfile: BinaryIO class DatagramRequestHandler(BaseRequestHandler): rfile: BinaryIO wfile: BinaryIO mypy-0.761/mypy/typeshed/stdlib/2/StringIO.pyi0000644€tŠÔÚ€2›s®0000000223313576752252025403 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for StringIO (Python 2) from typing import Any, IO, AnyStr, Iterator, Iterable, Generic, List, Optional class StringIO(IO[AnyStr], Generic[AnyStr]): closed: bool softspace: int len: int name: str def __init__(self, buf: AnyStr = ...) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... def next(self) -> AnyStr: ... def close(self) -> None: ... def isatty(self) -> bool: ... def seek(self, pos: int, mode: int = ...) -> int: ... def tell(self) -> int: ... def read(self, n: int = ...) -> AnyStr: ... def readline(self, length: int = ...) -> AnyStr: ... def readlines(self, sizehint: int = ...) -> List[AnyStr]: ... def truncate(self, size: Optional[int] = ...) -> int: ... def write(self, s: AnyStr) -> int: ... def writelines(self, iterable: Iterable[AnyStr]) -> None: ... def flush(self) -> None: ... def getvalue(self) -> AnyStr: ... def __enter__(self) -> Any: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> Any: ... def fileno(self) -> int: ... def readable(self) -> bool: ... def seekable(self) -> bool: ... def writable(self) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2/UserDict.pyi0000644€tŠÔÚ€2›s®0000000314013576752252025425 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import (Any, Container, Dict, Generic, Iterable, Iterator, List, Mapping, Optional, Sized, Tuple, TypeVar, Union, overload) _KT = TypeVar('_KT') _VT = TypeVar('_VT') _T = TypeVar('_T') class UserDict(Dict[_KT, _VT], Generic[_KT, _VT]): data: Dict[_KT, _VT] def __init__(self, initialdata: Mapping[_KT, _VT] = ...) -> None: ... # TODO: __iter__ is not available for UserDict class IterableUserDict(UserDict[_KT, _VT], Generic[_KT, _VT]): ... class DictMixin(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT]): def has_key(self, key: _KT) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_KT]: ... # From typing.Mapping[_KT, _VT] # (can't inherit because of keys()) @overload def get(self, k: _KT) -> Optional[_VT]: ... @overload def get(self, k: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: ... def values(self) -> List[_VT]: ... def items(self) -> List[Tuple[_KT, _VT]]: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... def __contains__(self, o: Any) -> bool: ... # From typing.MutableMapping[_KT, _VT] def clear(self) -> None: ... def pop(self, k: _KT, default: _VT = ...) -> _VT: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... @overload def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/UserList.pyi0000644€tŠÔÚ€2›s®0000000116613576752252025463 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, MutableSequence, TypeVar, Union, overload, List _T = TypeVar("_T") _S = TypeVar("_S") class UserList(MutableSequence[_T]): data: List[_T] def insert(self, index: int, object: _T) -> None: ... @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __len__(self) -> int: ... @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self: _S, s: slice) -> _S: ... def sort(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/UserString.pyi0000644€tŠÔÚ€2›s®0000000740413576752252026017 0ustar jukkaDROPBOX\Domain Users00000000000000import collections from typing import Any, Iterable, List, MutableSequence, Sequence, Optional, overload, Text, TypeVar, Tuple, Union _UST = TypeVar("_UST", bound=UserString) _MST = TypeVar("_MST", bound=MutableString) class UserString(Sequence[UserString]): data: unicode def __init__(self, seq: object) -> None: ... def __int__(self) -> int: ... def __long__(self) -> long: ... def __float__(self) -> float: ... def __complex__(self) -> complex: ... def __hash__(self) -> int: ... def __len__(self) -> int: ... @overload def __getitem__(self: _UST, i: int) -> _UST: ... @overload def __getitem__(self: _UST, s: slice) -> _UST: ... def __add__(self: _UST, other: Any) -> _UST: ... def __radd__(self: _UST, other: Any) -> _UST: ... def __mul__(self: _UST, other: int) -> _UST: ... def __rmul__(self: _UST, other: int) -> _UST: ... def __mod__(self: _UST, args: Any) -> _UST: ... def capitalize(self: _UST) -> _UST: ... def center(self: _UST, width: int, *args: Any) -> _UST: ... def count(self, sub: int, start: int = ..., end: int = ...) -> int: ... def decode(self: _UST, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> _UST: ... def encode(self: _UST, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> _UST: ... def endswith(self, suffix: Text, start: int = ..., end: int = ...) -> bool: ... def expandtabs(self: _UST, tabsize: int = ...) -> _UST: ... def find(self, sub: Text, start: int = ..., end: int = ...) -> int: ... def index(self, sub: Text, start: int = ..., end: int = ...) -> int: ... def isalpha(self) -> bool: ... def isalnum(self) -> bool: ... def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isnumeric(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, seq: Iterable[Text]) -> Text: ... def ljust(self: _UST, width: int, *args: Any) -> _UST: ... def lower(self: _UST) -> _UST: ... def lstrip(self: _UST, chars: Optional[Text] = ...) -> _UST: ... def partition(self, sep: Text) -> Tuple[Text, Text, Text]: ... def replace(self: _UST, old: Text, new: Text, maxsplit: int = ...) -> _UST: ... def rfind(self, sub: Text, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: Text, start: int = ..., end: int = ...) -> int: ... def rjust(self: _UST, width: int, *args: Any) -> _UST: ... def rpartition(self, sep: Text) -> Tuple[Text, Text, Text]: ... def rstrip(self: _UST, chars: Optional[Text] = ...) -> _UST: ... def split(self, sep: Optional[Text] = ..., maxsplit: int = ...) -> List[Text]: ... def rsplit(self, sep: Optional[Text] = ..., maxsplit: int = ...) -> List[Text]: ... def splitlines(self, keepends: int = ...) -> List[Text]: ... def startswith(self, suffix: Text, start: int = ..., end: int = ...) -> bool: ... def strip(self: _UST, chars: Optional[Text] = ...) -> _UST: ... def swapcase(self: _UST) -> _UST: ... def title(self: _UST) -> _UST: ... def translate(self: _UST, *args: Any) -> _UST: ... def upper(self: _UST) -> _UST: ... def zfill(self: _UST, width: int) -> _UST: ... class MutableString(UserString, MutableSequence[MutableString]): @overload def __getitem__(self: _MST, i: int) -> _MST: ... @overload def __getitem__(self: _MST, s: slice) -> _MST: ... def __setitem__(self, index: Union[int, slice], sub: Any) -> None: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def immutable(self) -> UserString: ... def __iadd__(self: _MST, other: Any) -> _MST: ... def __imul__(self, n: int) -> _MST: ... def insert(self, index: int, value: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/__builtin__.pyi0000644€tŠÔÚ€2›s®0000021221113576752252026146 0ustar jukkaDROPBOX\Domain Users00000000000000# True and False are deliberately omitted because they are keywords in # Python 3, and stub files conform to Python 3 syntax. from typing import ( TypeVar, Iterator, Iterable, NoReturn, overload, Container, Sequence, MutableSequence, Mapping, MutableMapping, Tuple, List, Any, Dict, Callable, Generic, Set, AbstractSet, FrozenSet, MutableSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsAbs, SupportsComplex, IO, BinaryIO, Union, ItemsView, KeysView, ValuesView, ByteString, Optional, AnyStr, Type, Text, Protocol, ) from abc import abstractmethod, ABCMeta from ast import mod, AST from types import TracebackType, CodeType import sys if sys.version_info >= (3,): from typing import SupportsBytes, SupportsRound if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal _T = TypeVar('_T') _T_co = TypeVar('_T_co', covariant=True) _KT = TypeVar('_KT') _VT = TypeVar('_VT') _S = TypeVar('_S') _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _TT = TypeVar('_TT', bound='type') class _SupportsIndex(Protocol): def __index__(self) -> int: ... class object: __doc__: Optional[str] __dict__: Dict[str, Any] __slots__: Union[Text, Iterable[Text]] __module__: str if sys.version_info >= (3, 6): __annotations__: Dict[str, Any] @property def __class__(self: _T) -> Type[_T]: ... @__class__.setter def __class__(self, __type: Type[object]) -> None: ... def __init__(self) -> None: ... def __new__(cls) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __eq__(self, o: object) -> bool: ... def __ne__(self, o: object) -> bool: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: ... def __getattribute__(self, name: str) -> Any: ... def __delattr__(self, name: str) -> None: ... def __sizeof__(self) -> int: ... def __reduce__(self) -> Union[str, Tuple[Any, ...]]: ... def __reduce_ex__(self, protocol: int) -> Union[str, Tuple[Any, ...]]: ... if sys.version_info >= (3,): def __dir__(self) -> Iterable[str]: ... if sys.version_info >= (3, 6): def __init_subclass__(cls) -> None: ... class staticmethod(object): # Special, only valid as a decorator. __func__: Callable[..., Any] if sys.version_info >= (3,): __isabstractmethod__: bool def __init__(self, f: Callable[..., Any]) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]] = ...) -> Callable[..., Any]: ... class classmethod(object): # Special, only valid as a decorator. __func__: Callable[..., Any] if sys.version_info >= (3,): __isabstractmethod__: bool def __init__(self, f: Callable[..., Any]) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]] = ...) -> Callable[..., Any]: ... class type(object): __base__: type __bases__: Tuple[type, ...] __basicsize__: int __dict__: Dict[str, Any] __dictoffset__: int __flags__: int __itemsize__: int __module__: str __mro__: Tuple[type, ...] __name__: str if sys.version_info >= (3,): __qualname__: str __text_signature__: Optional[str] __weakrefoffset__: int @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ... @overload def __new__(cls, o: object) -> type: ... @overload def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ... def __call__(self, *args: Any, **kwds: Any) -> Any: ... def __subclasses__(self: _TT) -> List[_TT]: ... # Note: the documentation doesnt specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> List[type]: ... def __instancecheck__(self, instance: Any) -> bool: ... def __subclasscheck__(self, subclass: type) -> bool: ... if sys.version_info >= (3,): @classmethod def __prepare__(metacls, __name: str, __bases: Tuple[type, ...], **kwds: Any) -> Mapping[str, Any]: ... class super(object): if sys.version_info >= (3,): @overload def __init__(self, t: Any, obj: Any) -> None: ... @overload def __init__(self, t: Any) -> None: ... @overload def __init__(self) -> None: ... else: @overload def __init__(self, t: Any, obj: Any) -> None: ... @overload def __init__(self, t: Any) -> None: ... class int: @overload def __init__(self, x: Union[Text, bytes, SupportsInt, _SupportsIndex] = ...) -> None: ... @overload def __init__(self, x: Union[Text, bytes, bytearray], base: int) -> None: ... if sys.version_info >= (3, 8): def as_integer_ratio(self) -> Tuple[int, Literal[1]]: ... @property def real(self) -> int: ... @property def imag(self) -> int: ... @property def numerator(self) -> int: ... @property def denominator(self) -> int: ... def conjugate(self) -> int: ... def bit_length(self) -> int: ... if sys.version_info >= (3,): def to_bytes(self, length: int, byteorder: str, *, signed: bool = ...) -> bytes: ... @classmethod def from_bytes(cls, bytes: Sequence[int], byteorder: str, *, signed: bool = ...) -> int: ... # TODO buffer object argument def __add__(self, x: int) -> int: ... def __sub__(self, x: int) -> int: ... def __mul__(self, x: int) -> int: ... def __floordiv__(self, x: int) -> int: ... if sys.version_info < (3,): def __div__(self, x: int) -> int: ... def __truediv__(self, x: int) -> float: ... def __mod__(self, x: int) -> int: ... def __divmod__(self, x: int) -> Tuple[int, int]: ... def __radd__(self, x: int) -> int: ... def __rsub__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... def __rfloordiv__(self, x: int) -> int: ... if sys.version_info < (3,): def __rdiv__(self, x: int) -> int: ... def __rtruediv__(self, x: int) -> float: ... def __rmod__(self, x: int) -> int: ... def __rdivmod__(self, x: int) -> Tuple[int, int]: ... def __pow__(self, __x: int, __modulo: Optional[int] = ...) -> Any: ... # Return type can be int or float, depending on x. def __rpow__(self, x: int) -> Any: ... def __and__(self, n: int) -> int: ... def __or__(self, n: int) -> int: ... def __xor__(self, n: int) -> int: ... def __lshift__(self, n: int) -> int: ... def __rshift__(self, n: int) -> int: ... def __rand__(self, n: int) -> int: ... def __ror__(self, n: int) -> int: ... def __rxor__(self, n: int) -> int: ... def __rlshift__(self, n: int) -> int: ... def __rrshift__(self, n: int) -> int: ... def __neg__(self) -> int: ... def __pos__(self) -> int: ... def __invert__(self) -> int: ... def __trunc__(self) -> int: ... if sys.version_info >= (3,): def __ceil__(self) -> int: ... def __floor__(self) -> int: ... def __round__(self, ndigits: Optional[int] = ...) -> int: ... def __getnewargs__(self) -> Tuple[int]: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: int) -> bool: ... def __le__(self, x: int) -> bool: ... def __gt__(self, x: int) -> bool: ... def __ge__(self, x: int) -> bool: ... def __str__(self) -> str: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __abs__(self) -> int: ... def __hash__(self) -> int: ... if sys.version_info >= (3,): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... def __index__(self) -> int: ... class float: def __init__(self, x: Union[SupportsFloat, _SupportsIndex, Text, bytes, bytearray] = ...) -> None: ... def as_integer_ratio(self) -> Tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @classmethod def fromhex(cls, s: str) -> float: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> float: ... def __add__(self, x: float) -> float: ... def __sub__(self, x: float) -> float: ... def __mul__(self, x: float) -> float: ... def __floordiv__(self, x: float) -> float: ... if sys.version_info < (3,): def __div__(self, x: float) -> float: ... def __truediv__(self, x: float) -> float: ... def __mod__(self, x: float) -> float: ... def __divmod__(self, x: float) -> Tuple[float, float]: ... def __pow__(self, x: float) -> float: ... # In Python 3, returns complex if self is negative and x is not whole def __radd__(self, x: float) -> float: ... def __rsub__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... def __rfloordiv__(self, x: float) -> float: ... if sys.version_info < (3,): def __rdiv__(self, x: float) -> float: ... def __rtruediv__(self, x: float) -> float: ... def __rmod__(self, x: float) -> float: ... def __rdivmod__(self, x: float) -> Tuple[float, float]: ... def __rpow__(self, x: float) -> float: ... def __getnewargs__(self) -> Tuple[float]: ... def __trunc__(self) -> int: ... if sys.version_info >= (3,): @overload def __round__(self, ndigits: None = ...) -> int: ... @overload def __round__(self, ndigits: int) -> float: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: float) -> bool: ... def __le__(self, x: float) -> bool: ... def __gt__(self, x: float) -> bool: ... def __ge__(self, x: float) -> bool: ... def __neg__(self) -> float: ... def __pos__(self) -> float: ... def __str__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... if sys.version_info >= (3,): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... class complex: @overload def __init__(self, real: float = ..., imag: float = ...) -> None: ... @overload def __init__(self, real: Union[str, SupportsComplex, _SupportsIndex]) -> None: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> complex: ... def __add__(self, x: complex) -> complex: ... def __sub__(self, x: complex) -> complex: ... def __mul__(self, x: complex) -> complex: ... def __pow__(self, x: complex) -> complex: ... if sys.version_info < (3,): def __div__(self, x: complex) -> complex: ... def __truediv__(self, x: complex) -> complex: ... def __radd__(self, x: complex) -> complex: ... def __rsub__(self, x: complex) -> complex: ... def __rmul__(self, x: complex) -> complex: ... def __rpow__(self, x: complex) -> complex: ... if sys.version_info < (3,): def __rdiv__(self, x: complex) -> complex: ... def __rtruediv__(self, x: complex) -> complex: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __neg__(self) -> complex: ... def __pos__(self) -> complex: ... def __str__(self) -> str: ... def __complex__(self) -> complex: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... if sys.version_info >= (3,): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... if sys.version_info >= (3,): _str_base = object else: class basestring(metaclass=ABCMeta): ... class unicode(basestring, Sequence[unicode]): @overload def __init__(self) -> None: ... @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, o: str, encoding: unicode = ..., errors: unicode = ...) -> None: ... def capitalize(self) -> unicode: ... def center(self, width: int, fillchar: unicode = ...) -> unicode: ... def count(self, x: unicode) -> int: ... def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ... def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ... def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = ..., end: int = ...) -> bool: ... def expandtabs(self, tabsize: int = ...) -> unicode: ... def find(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def format(self, *args: object, **kwargs: object) -> unicode: ... def index(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def isidentifier(self) -> bool: ... def islower(self) -> bool: ... def isnumeric(self) -> bool: ... def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[unicode]) -> unicode: ... def ljust(self, width: int, fillchar: unicode = ...) -> unicode: ... def lower(self) -> unicode: ... def lstrip(self, chars: unicode = ...) -> unicode: ... def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def replace(self, old: unicode, new: unicode, count: int = ...) -> unicode: ... def rfind(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rjust(self, width: int, fillchar: unicode = ...) -> unicode: ... def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def rsplit(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ... def rstrip(self, chars: unicode = ...) -> unicode: ... def split(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ... def splitlines(self, keepends: bool = ...) -> List[unicode]: ... def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = ..., end: int = ...) -> bool: ... def strip(self, chars: unicode = ...) -> unicode: ... def swapcase(self) -> unicode: ... def title(self) -> unicode: ... def translate(self, table: Union[Dict[int, Any], unicode]) -> unicode: ... def upper(self) -> unicode: ... def zfill(self, width: int) -> unicode: ... @overload def __getitem__(self, i: int) -> unicode: ... @overload def __getitem__(self, s: slice) -> unicode: ... def __getslice__(self, start: int, stop: int) -> unicode: ... def __add__(self, s: unicode) -> unicode: ... def __mul__(self, n: int) -> unicode: ... def __rmul__(self, n: int) -> unicode: ... def __mod__(self, x: Any) -> unicode: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: unicode) -> bool: ... def __le__(self, x: unicode) -> bool: ... def __gt__(self, x: unicode) -> bool: ... def __ge__(self, x: unicode) -> bool: ... def __len__(self) -> int: ... # The argument type is incompatible with Sequence def __contains__(self, s: Union[unicode, bytes]) -> bool: ... # type: ignore def __iter__(self) -> Iterator[unicode]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... def __getnewargs__(self) -> Tuple[unicode]: ... _str_base = basestring class str(Sequence[str], _str_base): if sys.version_info >= (3,): @overload def __init__(self, o: object = ...) -> None: ... @overload def __init__(self, o: bytes, encoding: str = ..., errors: str = ...) -> None: ... else: def __init__(self, o: object = ...) -> None: ... def capitalize(self) -> str: ... if sys.version_info >= (3, 3): def casefold(self) -> str: ... def center(self, width: int, fillchar: str = ...) -> str: ... def count(self, x: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... if sys.version_info < (3,): def decode(self, encoding: Text = ..., errors: Text = ...) -> unicode: ... def encode(self, encoding: Text = ..., errors: Text = ...) -> bytes: ... if sys.version_info >= (3,): def endswith(self, suffix: Union[Text, Tuple[Text, ...]], start: Optional[int] = ..., end: Optional[int] = ...) -> bool: ... else: def endswith(self, suffix: Union[Text, Tuple[Text, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> str: ... def find(self, sub: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def format(self, *args: object, **kwargs: object) -> str: ... if sys.version_info >= (3,): def format_map(self, map: Mapping[str, Any]) -> str: ... def index(self, sub: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... if sys.version_info >= (3,): def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... if sys.version_info >= (3,): def isidentifier(self) -> bool: ... def islower(self) -> bool: ... if sys.version_info >= (3,): def isnumeric(self) -> bool: ... def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... if sys.version_info >= (3,): def join(self, iterable: Iterable[str]) -> str: ... else: def join(self, iterable: Iterable[AnyStr]) -> AnyStr: ... def ljust(self, width: int, fillchar: str = ...) -> str: ... def lower(self) -> str: ... if sys.version_info >= (3,): def lstrip(self, chars: Optional[str] = ...) -> str: ... def partition(self, sep: str) -> Tuple[str, str, str]: ... def replace(self, old: str, new: str, count: int = ...) -> str: ... else: @overload def lstrip(self, chars: str = ...) -> str: ... @overload def lstrip(self, chars: unicode) -> unicode: ... @overload def partition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ... @overload def partition(self, sep: str) -> Tuple[str, str, str]: ... @overload def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def replace(self, old: AnyStr, new: AnyStr, count: int = ...) -> AnyStr: ... def rfind(self, sub: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rindex(self, sub: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: str = ...) -> str: ... if sys.version_info >= (3,): def rpartition(self, sep: str) -> Tuple[str, str, str]: ... def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... def rstrip(self, chars: Optional[str] = ...) -> str: ... def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... else: @overload def rpartition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ... @overload def rpartition(self, sep: str) -> Tuple[str, str, str]: ... @overload def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... @overload def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... @overload def rsplit(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ... @overload def rstrip(self, chars: str = ...) -> str: ... @overload def rstrip(self, chars: unicode) -> unicode: ... @overload def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... @overload def split(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ... def splitlines(self, keepends: bool = ...) -> List[str]: ... if sys.version_info >= (3,): def startswith(self, prefix: Union[Text, Tuple[Text, ...]], start: Optional[int] = ..., end: Optional[int] = ...) -> bool: ... def strip(self, chars: Optional[str] = ...) -> str: ... else: def startswith(self, prefix: Union[Text, Tuple[Text, ...]]) -> bool: ... @overload def strip(self, chars: str = ...) -> str: ... @overload def strip(self, chars: unicode) -> unicode: ... def swapcase(self) -> str: ... def title(self) -> str: ... if sys.version_info >= (3,): def translate(self, table: Union[Mapping[int, Union[int, str, None]], Sequence[Union[int, str, None]]]) -> str: ... else: def translate(self, table: Optional[AnyStr], deletechars: AnyStr = ...) -> AnyStr: ... def upper(self) -> str: ... def zfill(self, width: int) -> str: ... if sys.version_info >= (3,): @staticmethod @overload def maketrans(x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ... @staticmethod @overload def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Union[int, None]]: ... if sys.version_info >= (3,): def __add__(self, s: str) -> str: ... else: def __add__(self, s: AnyStr) -> AnyStr: ... # Incompatible with Sequence.__contains__ def __contains__(self, o: Union[str, Text]) -> bool: ... # type: ignore def __eq__(self, x: object) -> bool: ... def __ge__(self, x: Text) -> bool: ... def __getitem__(self, i: Union[int, slice]) -> str: ... def __gt__(self, x: Text) -> bool: ... def __hash__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... def __le__(self, x: Text) -> bool: ... def __len__(self) -> int: ... def __lt__(self, x: Text) -> bool: ... def __mod__(self, x: Any) -> str: ... def __mul__(self, n: int) -> str: ... def __ne__(self, x: object) -> bool: ... def __repr__(self) -> str: ... def __rmul__(self, n: int) -> str: ... def __str__(self) -> str: ... def __getnewargs__(self) -> Tuple[str]: ... if sys.version_info < (3,): def __getslice__(self, start: int, stop: int) -> str: ... def __float__(self) -> float: ... def __int__(self) -> int: ... if sys.version_info >= (3,): class bytes(ByteString): @overload def __init__(self, ints: Iterable[int]) -> None: ... @overload def __init__(self, string: str, encoding: str, errors: str = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... @overload def __init__(self) -> None: ... @overload def __init__(self, o: SupportsBytes) -> None: ... def capitalize(self) -> bytes: ... def center(self, width: int, fillchar: bytes = ...) -> bytes: ... def count(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def decode(self, encoding: str = ..., errors: str = ...) -> str: ... def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> bytes: ... def find(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... def index(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[Union[ByteString, memoryview]]) -> bytes: ... def ljust(self, width: int, fillchar: bytes = ...) -> bytes: ... def lower(self) -> bytes: ... def lstrip(self, chars: Optional[bytes] = ...) -> bytes: ... def partition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ... def replace(self, old: bytes, new: bytes, count: int = ...) -> bytes: ... def rfind(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def rindex(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: bytes = ...) -> bytes: ... def rpartition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ... def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ... def rstrip(self, chars: Optional[bytes] = ...) -> bytes: ... def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ... def splitlines(self, keepends: bool = ...) -> List[bytes]: ... def startswith( self, prefix: Union[bytes, Tuple[bytes, ...]], start: Optional[int] = ..., end: Optional[int] = ..., ) -> bool: ... def strip(self, chars: Optional[bytes] = ...) -> bytes: ... def swapcase(self) -> bytes: ... def title(self) -> bytes: ... def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytes: ... def upper(self) -> bytes: ... def zfill(self, width: int) -> bytes: ... @classmethod def fromhex(cls, s: str) -> bytes: ... @classmethod def maketrans(cls, frm: bytes, to: bytes) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> bytes: ... def __add__(self, s: bytes) -> bytes: ... def __mul__(self, n: int) -> bytes: ... def __rmul__(self, n: int) -> bytes: ... if sys.version_info >= (3, 5): def __mod__(self, value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ def __contains__(self, o: Union[int, bytes]) -> bool: ... # type: ignore def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: bytes) -> bool: ... def __le__(self, x: bytes) -> bool: ... def __gt__(self, x: bytes) -> bool: ... def __ge__(self, x: bytes) -> bool: ... def __getnewargs__(self) -> Tuple[bytes]: ... else: bytes = str class bytearray(MutableSequence[int], ByteString): if sys.version_info >= (3,): @overload def __init__(self) -> None: ... @overload def __init__(self, ints: Iterable[int]) -> None: ... @overload def __init__(self, string: Text, encoding: Text, errors: Text = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... else: @overload def __init__(self) -> None: ... @overload def __init__(self, ints: Iterable[int]) -> None: ... @overload def __init__(self, string: str) -> None: ... @overload def __init__(self, string: Text, encoding: Text, errors: Text = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... def capitalize(self) -> bytearray: ... def center(self, width: int, fillchar: bytes = ...) -> bytearray: ... if sys.version_info >= (3,): def count(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def copy(self) -> bytearray: ... else: def count(self, x: str) -> int: ... def decode(self, encoding: Text = ..., errors: Text = ...) -> str: ... def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> bytearray: ... if sys.version_info < (3,): def extend(self, iterable: Union[str, Iterable[int]]) -> None: ... if sys.version_info >= (3,): def find(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... def index(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def find(self, sub: str, start: int = ..., end: int = ...) -> int: ... def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... def insert(self, index: int, object: int) -> None: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... if sys.version_info >= (3,): def join(self, iterable: Iterable[Union[ByteString, memoryview]]) -> bytearray: ... def ljust(self, width: int, fillchar: bytes = ...) -> bytearray: ... else: def join(self, iterable: Iterable[str]) -> bytearray: ... def ljust(self, width: int, fillchar: str = ...) -> bytearray: ... def lower(self) -> bytearray: ... def lstrip(self, chars: Optional[bytes] = ...) -> bytearray: ... def partition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ... def replace(self, old: bytes, new: bytes, count: int = ...) -> bytearray: ... if sys.version_info >= (3,): def rfind(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def rindex(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def rfind(self, sub: bytes, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: bytes, start: int = ..., end: int = ...) -> int: ... def rjust(self, width: int, fillchar: bytes = ...) -> bytearray: ... def rpartition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ... def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ... def rstrip(self, chars: Optional[bytes] = ...) -> bytearray: ... def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ... def splitlines(self, keepends: bool = ...) -> List[bytearray]: ... def startswith( self, prefix: Union[bytes, Tuple[bytes, ...]], start: Optional[int] = ..., end: Optional[int] = ..., ) -> bool: ... def strip(self, chars: Optional[bytes] = ...) -> bytearray: ... def swapcase(self) -> bytearray: ... def title(self) -> bytearray: ... if sys.version_info >= (3,): def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytearray: ... else: def translate(self, table: str) -> bytearray: ... def upper(self) -> bytearray: ... def zfill(self, width: int) -> bytearray: ... @staticmethod def fromhex(s: str) -> bytearray: ... if sys.version_info >= (3,): @classmethod def maketrans(cls, frm: bytes, to: bytes) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... __hash__: None # type: ignore @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> bytearray: ... @overload def __setitem__(self, i: int, x: int) -> None: ... @overload def __setitem__(self, s: slice, x: Union[Iterable[int], bytes]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... if sys.version_info < (3,): def __getslice__(self, start: int, stop: int) -> bytearray: ... def __setslice__(self, start: int, stop: int, x: Union[Sequence[int], str]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __add__(self, s: bytes) -> bytearray: ... if sys.version_info >= (3,): def __iadd__(self, s: Iterable[int]) -> bytearray: ... def __mul__(self, n: int) -> bytearray: ... if sys.version_info >= (3,): def __rmul__(self, n: int) -> bytearray: ... def __imul__(self, n: int) -> bytearray: ... if sys.version_info >= (3, 5): def __mod__(self, value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ def __contains__(self, o: Union[int, bytes]) -> bool: ... # type: ignore def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: bytes) -> bool: ... def __le__(self, x: bytes) -> bool: ... def __gt__(self, x: bytes) -> bool: ... def __ge__(self, x: bytes) -> bool: ... if sys.version_info >= (3,): _mv_container_type = int else: _mv_container_type = str class memoryview(Sized, Container[_mv_container_type]): format: str itemsize: int shape: Optional[Tuple[int, ...]] strides: Optional[Tuple[int, ...]] suboffsets: Optional[Tuple[int, ...]] readonly: bool ndim: int if sys.version_info >= (3,): c_contiguous: bool f_contiguous: bool contiguous: bool nbytes: int def __init__(self, obj: Union[bytes, bytearray, memoryview]) -> None: ... def __enter__(self) -> memoryview: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> None: ... else: def __init__(self, obj: Union[bytes, bytearray, buffer, memoryview]) -> None: ... @overload def __getitem__(self, i: int) -> _mv_container_type: ... @overload def __getitem__(self, s: slice) -> memoryview: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[_mv_container_type]: ... def __len__(self) -> int: ... @overload def __setitem__(self, s: slice, o: memoryview) -> None: ... @overload def __setitem__(self, i: int, o: bytes) -> None: ... @overload def __setitem__(self, s: slice, o: Sequence[bytes]) -> None: ... def tobytes(self) -> bytes: ... def tolist(self) -> List[int]: ... if sys.version_info >= (3, 2): def release(self) -> None: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... class bool(int): def __init__(self, o: object = ...) -> None: ... @overload def __and__(self, x: bool) -> bool: ... @overload def __and__(self, x: int) -> int: ... @overload def __or__(self, x: bool) -> bool: ... @overload def __or__(self, x: int) -> int: ... @overload def __xor__(self, x: bool) -> bool: ... @overload def __xor__(self, x: int) -> int: ... @overload def __rand__(self, x: bool) -> bool: ... @overload def __rand__(self, x: int) -> int: ... @overload def __ror__(self, x: bool) -> bool: ... @overload def __ror__(self, x: int) -> int: ... @overload def __rxor__(self, x: bool) -> bool: ... @overload def __rxor__(self, x: int) -> int: ... def __getnewargs__(self) -> Tuple[int]: ... class slice(object): start: Any step: Any stop: Any @overload def __init__(self, stop: Any) -> None: ... @overload def __init__(self, start: Any, stop: Any, step: Any = ...) -> None: ... __hash__: None # type: ignore def indices(self, len: int) -> Tuple[int, int, int]: ... class tuple(Sequence[_T_co], Generic[_T_co]): def __new__(cls: Type[_T], iterable: Iterable[_T_co] = ...) -> _T: ... def __len__(self) -> int: ... def __contains__(self, x: object) -> bool: ... @overload def __getitem__(self, x: int) -> _T_co: ... @overload def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ... def __iter__(self) -> Iterator[_T_co]: ... def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __le__(self, x: Tuple[_T_co, ...]) -> bool: ... def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ... @overload def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ... @overload def __add__(self, x: Tuple[Any, ...]) -> Tuple[Any, ...]: ... def __mul__(self, n: int) -> Tuple[_T_co, ...]: ... def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ... def count(self, x: Any) -> int: ... if sys.version_info >= (3, 5): def index(self, x: Any, start: int = ..., end: int = ...) -> int: ... else: def index(self, x: Any) -> int: ... class function: # TODO not defined in builtins! __name__: str __module__: str __code__: CodeType if sys.version_info >= (3,): __qualname__: str __annotations__: Dict[str, Any] class list(MutableSequence[_T], Generic[_T]): @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... if sys.version_info >= (3,): def clear(self) -> None: ... def copy(self) -> List[_T]: ... def append(self, object: _T) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def pop(self, index: int = ...) -> _T: ... def index(self, object: _T, start: int = ..., stop: int = ...) -> int: ... def count(self, object: _T) -> int: ... def insert(self, index: int, object: _T) -> None: ... def remove(self, object: _T) -> None: ... def reverse(self) -> None: ... if sys.version_info >= (3,): def sort(self, *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> None: ... else: def sort(self, cmp: Callable[[_T, _T], Any] = ..., key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... __hash__: None # type: ignore @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, s: slice) -> List[_T]: ... @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... if sys.version_info < (3,): def __getslice__(self, start: int, stop: int) -> List[_T]: ... def __setslice__(self, start: int, stop: int, o: Sequence[_T]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __add__(self, x: List[_T]) -> List[_T]: ... def __iadd__(self: _S, x: Iterable[_T]) -> _S: ... def __mul__(self, n: int) -> List[_T]: ... def __rmul__(self, n: int) -> List[_T]: ... if sys.version_info >= (3,): def __imul__(self: _S, n: int) -> _S: ... def __contains__(self, o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __gt__(self, x: List[_T]) -> bool: ... def __ge__(self, x: List[_T]) -> bool: ... def __lt__(self, x: List[_T]) -> bool: ... def __le__(self, x: List[_T]) -> bool: ... class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # NOTE: Keyword arguments are special. If they are used, _KT must include # str, but we have no way of enforcing it here. @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ... if sys.version_info < (3,): def has_key(self, k: _KT) -> bool: ... def clear(self) -> None: ... def copy(self) -> Dict[_KT, _VT]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... if sys.version_info >= (3,): def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT]: ... def items(self) -> ItemsView[_KT, _VT]: ... else: def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... def viewkeys(self) -> KeysView[_KT]: ... def viewvalues(self) -> ValuesView[_VT]: ... def viewitems(self) -> ItemsView[_KT, _VT]: ... @staticmethod @overload def fromkeys(seq: Iterable[_T]) -> Dict[_T, Any]: ... # TODO: Actually a class method (mypy/issues#328) @staticmethod @overload def fromkeys(seq: Iterable[_T], value: _S) -> Dict[_T, _S]: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... __hash__: None # type: ignore class set(MutableSet[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ...) -> None: ... def add(self, element: _T) -> None: ... def clear(self) -> None: ... def copy(self) -> Set[_T]: ... def difference(self, *s: Iterable[Any]) -> Set[_T]: ... def difference_update(self, *s: Iterable[Any]) -> None: ... def discard(self, element: _T) -> None: ... def intersection(self, *s: Iterable[Any]) -> Set[_T]: ... def intersection_update(self, *s: Iterable[Any]) -> None: ... def isdisjoint(self, s: Iterable[Any]) -> bool: ... def issubset(self, s: Iterable[Any]) -> bool: ... def issuperset(self, s: Iterable[Any]) -> bool: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ... def symmetric_difference_update(self, s: Iterable[_T]) -> None: ... def union(self, *s: Iterable[_T]) -> Set[_T]: ... def update(self, *s: Iterable[_T]) -> None: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[object]) -> Set[_T]: ... def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ... def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __isub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... __hash__: None # type: ignore class frozenset(AbstractSet[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ...) -> None: ... def copy(self) -> FrozenSet[_T]: ... def difference(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def intersection(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def isdisjoint(self, s: Iterable[_T]) -> bool: ... def issubset(self, s: Iterable[object]) -> bool: ... def issuperset(self, s: Iterable[object]) -> bool: ... def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ... def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... class enumerate(Iterator[Tuple[int, _T]], Generic[_T]): def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... def __iter__(self) -> Iterator[Tuple[int, _T]]: ... if sys.version_info >= (3,): def __next__(self) -> Tuple[int, _T]: ... else: def next(self) -> Tuple[int, _T]: ... if sys.version_info >= (3,): class range(Sequence[int]): start: int stop: int step: int @overload def __init__(self, stop: int) -> None: ... @overload def __init__(self, start: int, stop: int, step: int = ...) -> None: ... def count(self, value: int) -> int: ... def index(self, value: int, start: int = ..., stop: Optional[int] = ...) -> int: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[int]: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> range: ... def __repr__(self) -> str: ... def __reversed__(self) -> Iterator[int]: ... else: class xrange(Sized, Iterable[int], Reversible[int]): @overload def __init__(self, stop: int) -> None: ... @overload def __init__(self, start: int, stop: int, step: int = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __getitem__(self, i: int) -> int: ... def __reversed__(self) -> Iterator[int]: ... class property(object): def __init__(self, fget: Optional[Callable[[Any], Any]] = ..., fset: Optional[Callable[[Any, Any], None]] = ..., fdel: Optional[Callable[[Any], None]] = ..., doc: Optional[str] = ...) -> None: ... def getter(self, fget: Callable[[Any], Any]) -> property: ... def setter(self, fset: Callable[[Any, Any], None]) -> property: ... def deleter(self, fdel: Callable[[Any], None]) -> property: ... def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ... def __set__(self, obj: Any, value: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... def fget(self) -> Any: ... def fset(self, value: Any) -> None: ... def fdel(self) -> None: ... if sys.version_info < (3,): long = int NotImplemented: Any def abs(__n: SupportsAbs[_T]) -> _T: ... def all(__i: Iterable[object]) -> bool: ... def any(__i: Iterable[object]) -> bool: ... if sys.version_info < (3,): def apply(__func: Callable[..., _T], __args: Optional[Sequence[Any]] = ..., __kwds: Optional[Mapping[str, Any]] = ...) -> _T: ... if sys.version_info >= (3,): def ascii(__o: object) -> str: ... def bin(__number: Union[int, _SupportsIndex]) -> str: ... if sys.version_info >= (3, 7): def breakpoint(*args: Any, **kws: Any) -> None: ... def callable(__o: object) -> bool: ... def chr(__code: int) -> str: ... if sys.version_info < (3,): def cmp(__x: Any, __y: Any) -> int: ... _N1 = TypeVar('_N1', bool, int, float, complex) def coerce(__x: _N1, __y: _N1) -> Tuple[_N1, _N1]: ... if sys.version_info >= (3, 6): # This class is to be exported as PathLike from os, # but we define it here as _PathLike to avoid import cycle issues. # See https://github.com/python/typeshed/pull/991#issuecomment-288160993 class _PathLike(Generic[AnyStr]): def __fspath__(self) -> AnyStr: ... def compile(source: Union[str, bytes, mod, AST], filename: Union[str, bytes, _PathLike[Any]], mode: str, flags: int = ..., dont_inherit: int = ..., optimize: int = ...) -> Any: ... elif sys.version_info >= (3,): def compile(source: Union[str, bytes, mod, AST], filename: Union[str, bytes], mode: str, flags: int = ..., dont_inherit: int = ..., optimize: int = ...) -> Any: ... else: def compile(source: Union[Text, mod], filename: Text, mode: Text, flags: int = ..., dont_inherit: int = ...) -> Any: ... if sys.version_info >= (3,): def copyright() -> None: ... def credits() -> None: ... def delattr(__o: Any, __name: Text) -> None: ... def dir(__o: object = ...) -> List[str]: ... _N2 = TypeVar('_N2', int, float) def divmod(__a: _N2, __b: _N2) -> Tuple[_N2, _N2]: ... def eval(__source: Union[Text, bytes, CodeType], __globals: Optional[Dict[str, Any]] = ..., __locals: Optional[Mapping[str, Any]] = ...) -> Any: ... if sys.version_info >= (3,): def exec(__object: Union[str, bytes, CodeType], __globals: Optional[Dict[str, Any]] = ..., __locals: Optional[Mapping[str, Any]] = ...) -> Any: ... else: def execfile(__filename: str, __globals: Optional[Dict[str, Any]] = ..., __locals: Optional[Dict[str, Any]] = ...) -> None: ... def exit(code: object = ...) -> NoReturn: ... if sys.version_info >= (3,): @overload def filter(__function: None, __iterable: Iterable[Optional[_T]]) -> Iterator[_T]: ... @overload def filter(__function: Callable[[_T], Any], __iterable: Iterable[_T]) -> Iterator[_T]: ... else: @overload def filter(__function: Callable[[AnyStr], Any], __iterable: AnyStr) -> AnyStr: ... # type: ignore @overload def filter(__function: None, __iterable: Tuple[Optional[_T], ...]) -> Tuple[_T, ...]: ... # type: ignore @overload def filter(__function: Callable[[_T], Any], __iterable: Tuple[_T, ...]) -> Tuple[_T, ...]: ... # type: ignore @overload def filter(__function: None, __iterable: Iterable[Optional[_T]]) -> List[_T]: ... @overload def filter(__function: Callable[[_T], Any], __iterable: Iterable[_T]) -> List[_T]: ... def format(__o: object, __format_spec: str = ...) -> str: ... # TODO unicode def getattr(__o: Any, name: Text, __default: Any = ...) -> Any: ... def globals() -> Dict[str, Any]: ... def hasattr(__o: Any, __name: Text) -> bool: ... def hash(__o: object) -> int: ... if sys.version_info >= (3,): def help(*args: Any, **kwds: Any) -> None: ... def hex(__i: Union[int, _SupportsIndex]) -> str: ... def id(__o: object) -> int: ... if sys.version_info >= (3,): def input(__prompt: Any = ...) -> str: ... else: def input(__prompt: Any = ...) -> Any: ... def intern(__string: str) -> str: ... @overload def iter(__iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def iter(__function: Callable[[], Optional[_T]], __sentinel: None) -> Iterator[_T]: ... @overload def iter(__function: Callable[[], _T], __sentinel: Any) -> Iterator[_T]: ... def isinstance(__o: object, __t: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]) -> bool: ... def issubclass(__cls: type, __classinfo: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]) -> bool: ... def len(__o: Sized) -> int: ... if sys.version_info >= (3,): def license() -> None: ... def locals() -> Dict[str, Any]: ... if sys.version_info >= (3,): @overload def map(__func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> Iterator[_S]: ... @overload def map(__func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Iterator[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> Iterator[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3, _T4], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> Iterator[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> Iterator[_S]: ... @overload def map(__func: Callable[..., _S], __iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[_S]: ... else: @overload def map(__func: None, __iter1: Iterable[_T1]) -> List[_T1]: ... @overload def map(__func: None, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ... @overload def map(__func: None, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> List[Tuple[_T1, _T2, _T3]]: ... @overload def map(__func: None, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def map(__func: None, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> List[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def map(__func: None, __iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> List[Tuple[Any, ...]]: ... @overload def map(__func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> List[_S]: ... @overload def map(__func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> List[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> List[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3, _T4], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> List[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> List[_S]: ... @overload def map(__func: Callable[..., _S], __iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> List[_S]: ... if sys.version_info >= (3,): @overload def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ..., default: _VT) -> Union[_T, _VT]: ... else: @overload def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ...) -> _T: ... if sys.version_info >= (3,): @overload def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ..., default: _VT) -> Union[_T, _VT]: ... else: @overload def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ...) -> _T: ... @overload def next(__i: Iterator[_T]) -> _T: ... @overload def next(__i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ... def oct(__i: Union[int, _SupportsIndex]) -> str: ... if sys.version_info >= (3, 6): def open(file: Union[str, bytes, int, _PathLike[Any]], mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., opener: Optional[Callable[[str, int], int]] = ...) -> IO[Any]: ... elif sys.version_info >= (3,): def open(file: Union[str, bytes, int], mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., opener: Optional[Callable[[str, int], int]] = ...) -> IO[Any]: ... else: def open(name: Union[unicode, int], mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... def ord(__c: Union[Text, bytes]) -> int: ... if sys.version_info >= (3,): class _Writer(Protocol): def write(self, __s: str) -> Any: ... def print( *values: object, sep: Optional[Text] = ..., end: Optional[Text] = ..., file: Optional[_Writer] = ..., flush: bool = ... ) -> None: ... else: class _Writer(Protocol): def write(self, __s: Any) -> Any: ... # This is only available after from __future__ import print_function. def print(*values: object, sep: Optional[Text] = ..., end: Optional[Text] = ..., file: Optional[_Writer] = ...) -> None: ... @overload def pow(__x: int, __y: int) -> Any: ... # The return type can be int or float, depending on y @overload def pow(__x: int, __y: int, __z: int) -> Any: ... @overload def pow(__x: float, __y: float) -> float: ... @overload def pow(__x: float, __y: float, __z: float) -> float: ... def quit(code: object = ...) -> NoReturn: ... if sys.version_info < (3,): def range(__x: int, __y: int = ..., __step: int = ...) -> List[int]: ... def raw_input(__prompt: Any = ...) -> str: ... @overload def reduce(__function: Callable[[_T, _S], _T], __iterable: Iterable[_S], __initializer: _T) -> _T: ... @overload def reduce(__function: Callable[[_T, _T], _T], __iterable: Iterable[_T]) -> _T: ... def reload(__module: Any) -> Any: ... @overload def reversed(__object: Sequence[_T]) -> Iterator[_T]: ... @overload def reversed(__object: Reversible[_T]) -> Iterator[_T]: ... def repr(__o: object) -> str: ... if sys.version_info >= (3,): @overload def round(number: float) -> int: ... @overload def round(number: float, ndigits: None) -> int: ... @overload def round(number: float, ndigits: int) -> float: ... @overload def round(number: SupportsRound[_T]) -> int: ... @overload def round(number: SupportsRound[_T], ndigits: None) -> int: ... @overload def round(number: SupportsRound[_T], ndigits: int) -> _T: ... else: @overload def round(number: float) -> float: ... @overload def round(number: float, ndigits: int) -> float: ... @overload def round(number: SupportsFloat) -> float: ... @overload def round(number: SupportsFloat, ndigits: int) -> float: ... def setattr(__object: Any, __name: Text, __value: Any) -> None: ... if sys.version_info >= (3,): def sorted(__iterable: Iterable[_T], *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> List[_T]: ... else: def sorted(__iterable: Iterable[_T], *, cmp: Callable[[_T, _T], int] = ..., key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> List[_T]: ... @overload def sum(__iterable: Iterable[_T]) -> Union[_T, int]: ... @overload def sum(__iterable: Iterable[_T], __start: _S) -> Union[_T, _S]: ... if sys.version_info < (3,): def unichr(__i: int) -> unicode: ... def vars(__object: Any = ...) -> Dict[str, Any]: ... if sys.version_info >= (3,): @overload def zip(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def zip(__iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... else: @overload def zip(__iter1: Iterable[_T1]) -> List[Tuple[_T1]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> List[Tuple[_T1, _T2, _T3]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> List[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def zip(__iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> List[Tuple[Any, ...]]: ... def __import__(name: Text, globals: Optional[Mapping[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ..., fromlist: Sequence[str] = ..., level: int = ...) -> Any: ... # Actually the type of Ellipsis is , but since it's # not exposed anywhere under that name, we make it private here. class ellipsis: ... Ellipsis: ellipsis if sys.version_info < (3,): # TODO: buffer support is incomplete; e.g. some_string.startswith(some_buffer) doesn't type check. _AnyBuffer = TypeVar('_AnyBuffer', str, unicode, bytearray, buffer) class buffer(Sized): def __init__(self, object: _AnyBuffer, offset: int = ..., size: int = ...) -> None: ... def __add__(self, other: _AnyBuffer) -> str: ... def __cmp__(self, other: _AnyBuffer) -> bool: ... def __getitem__(self, key: Union[int, slice]) -> str: ... def __getslice__(self, i: int, j: int) -> str: ... def __len__(self) -> int: ... def __mul__(self, x: int) -> str: ... class BaseException(object): args: Tuple[Any, ...] if sys.version_info < (3,): message: Any if sys.version_info >= (3,): __cause__: Optional[BaseException] __context__: Optional[BaseException] __suppress_context__: bool __traceback__: Optional[TracebackType] def __init__(self, *args: object) -> None: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... if sys.version_info < (3,): def __getitem__(self, i: int) -> Any: ... def __getslice__(self, start: int, stop: int) -> Tuple[Any, ...]: ... if sys.version_info >= (3,): def with_traceback(self, tb: Optional[TracebackType]) -> BaseException: ... class GeneratorExit(BaseException): ... class KeyboardInterrupt(BaseException): ... class SystemExit(BaseException): code: int class Exception(BaseException): ... class StopIteration(Exception): if sys.version_info >= (3,): value: Any if sys.version_info >= (3,): _StandardError = Exception class OSError(Exception): errno: int strerror: str # filename, filename2 are actually Union[str, bytes, None] filename: Any filename2: Any EnvironmentError = OSError IOError = OSError else: class StandardError(Exception): ... _StandardError = StandardError class EnvironmentError(StandardError): errno: int strerror: str # TODO can this be unicode? filename: str class OSError(EnvironmentError): ... class IOError(EnvironmentError): ... class ArithmeticError(_StandardError): ... class AssertionError(_StandardError): ... class AttributeError(_StandardError): ... class BufferError(_StandardError): ... class EOFError(_StandardError): ... class ImportError(_StandardError): if sys.version_info >= (3, 3): def __init__(self, *args, name: Optional[str] = ..., path: Optional[str] = ...) -> None: ... name: Optional[str] path: Optional[str] class LookupError(_StandardError): ... class MemoryError(_StandardError): ... class NameError(_StandardError): ... class ReferenceError(_StandardError): ... class RuntimeError(_StandardError): ... if sys.version_info >= (3, 5): class StopAsyncIteration(Exception): value: Any class SyntaxError(_StandardError): msg: str lineno: int offset: Optional[int] text: Optional[str] filename: str class SystemError(_StandardError): ... class TypeError(_StandardError): ... class ValueError(_StandardError): ... class FloatingPointError(ArithmeticError): ... class OverflowError(ArithmeticError): ... class ZeroDivisionError(ArithmeticError): ... if sys.version_info >= (3, 6): class ModuleNotFoundError(ImportError): ... class IndexError(LookupError): ... class KeyError(LookupError): ... class UnboundLocalError(NameError): ... class WindowsError(OSError): winerror: int if sys.version_info >= (3,): class BlockingIOError(OSError): characters_written: int class ChildProcessError(OSError): ... class ConnectionError(OSError): ... class BrokenPipeError(ConnectionError): ... class ConnectionAbortedError(ConnectionError): ... class ConnectionRefusedError(ConnectionError): ... class ConnectionResetError(ConnectionError): ... class FileExistsError(OSError): ... class FileNotFoundError(OSError): ... class InterruptedError(OSError): ... class IsADirectoryError(OSError): ... class NotADirectoryError(OSError): ... class PermissionError(OSError): ... class ProcessLookupError(OSError): ... class TimeoutError(OSError): ... class NotImplementedError(RuntimeError): ... if sys.version_info >= (3, 5): class RecursionError(RuntimeError): ... class IndentationError(SyntaxError): ... class TabError(IndentationError): ... class UnicodeError(ValueError): ... class UnicodeDecodeError(UnicodeError): encoding: str object: bytes start: int end: int reason: str def __init__(self, __encoding: str, __object: bytes, __start: int, __end: int, __reason: str) -> None: ... class UnicodeEncodeError(UnicodeError): encoding: str object: Text start: int end: int reason: str def __init__(self, __encoding: str, __object: Text, __start: int, __end: int, __reason: str) -> None: ... class UnicodeTranslateError(UnicodeError): ... class Warning(Exception): ... class UserWarning(Warning): ... class DeprecationWarning(Warning): ... class SyntaxWarning(Warning): ... class RuntimeWarning(Warning): ... class FutureWarning(Warning): ... class PendingDeprecationWarning(Warning): ... class ImportWarning(Warning): ... class UnicodeWarning(Warning): ... class BytesWarning(Warning): ... if sys.version_info >= (3, 2): class ResourceWarning(Warning): ... if sys.version_info < (3,): class file(BinaryIO): @overload def __init__(self, file: str, mode: str = ..., buffering: int = ...) -> None: ... @overload def __init__(self, file: unicode, mode: str = ..., buffering: int = ...) -> None: ... @overload def __init__(self, file: int, mode: str = ..., buffering: int = ...) -> None: ... def __iter__(self) -> Iterator[str]: ... def next(self) -> str: ... def read(self, n: int = ...) -> str: ... def __enter__(self) -> BinaryIO: ... def __exit__(self, t: Optional[type] = ..., exc: Optional[BaseException] = ..., tb: Optional[Any] = ...) -> Optional[bool]: ... def flush(self) -> None: ... def fileno(self) -> int: ... def isatty(self) -> bool: ... def close(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def seekable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def readline(self, limit: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def write(self, data: str) -> int: ... def writelines(self, data: Iterable[str]) -> None: ... def truncate(self, pos: Optional[int] = ...) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2/_ast.pyi0000644€tŠÔÚ€2›s®0000001322113576752252024632 0ustar jukkaDROPBOX\Domain Users00000000000000import typing from typing import Optional __version__: str PyCF_ONLY_AST: int _identifier = str class AST: _attributes: typing.Tuple[str, ...] _fields: typing.Tuple[str, ...] def __init__(self, *args, **kwargs) -> None: ... class mod(AST): ... class Module(mod): body: typing.List[stmt] class Interactive(mod): body: typing.List[stmt] class Expression(mod): body: expr class Suite(mod): body: typing.List[stmt] class stmt(AST): lineno: int col_offset: int class FunctionDef(stmt): name: _identifier args: arguments body: typing.List[stmt] decorator_list: typing.List[expr] class ClassDef(stmt): name: _identifier bases: typing.List[expr] body: typing.List[stmt] decorator_list: typing.List[expr] class Return(stmt): value: Optional[expr] class Delete(stmt): targets: typing.List[expr] class Assign(stmt): targets: typing.List[expr] value: expr class AugAssign(stmt): target: expr op: operator value: expr class Print(stmt): dest: Optional[expr] values: typing.List[expr] nl: bool class For(stmt): target: expr iter: expr body: typing.List[stmt] orelse: typing.List[stmt] class While(stmt): test: expr body: typing.List[stmt] orelse: typing.List[stmt] class If(stmt): test: expr body: typing.List[stmt] orelse: typing.List[stmt] class With(stmt): context_expr: expr optional_vars: Optional[expr] body: typing.List[stmt] class Raise(stmt): type: Optional[expr] inst: Optional[expr] tback: Optional[expr] class TryExcept(stmt): body: typing.List[stmt] handlers: typing.List[ExceptHandler] orelse: typing.List[stmt] class TryFinally(stmt): body: typing.List[stmt] finalbody: typing.List[stmt] class Assert(stmt): test: expr msg: Optional[expr] class Import(stmt): names: typing.List[alias] class ImportFrom(stmt): module: Optional[_identifier] names: typing.List[alias] level: Optional[int] class Exec(stmt): body: expr globals: Optional[expr] locals: Optional[expr] class Global(stmt): names: typing.List[_identifier] class Expr(stmt): value: expr class Pass(stmt): ... class Break(stmt): ... class Continue(stmt): ... class slice(AST): ... _slice = slice # this lets us type the variable named 'slice' below class Slice(slice): lower: Optional[expr] upper: Optional[expr] step: Optional[expr] class ExtSlice(slice): dims: typing.List[slice] class Index(slice): value: expr class Ellipsis(slice): ... class expr(AST): lineno: int col_offset: int class BoolOp(expr): op: boolop values: typing.List[expr] class BinOp(expr): left: expr op: operator right: expr class UnaryOp(expr): op: unaryop operand: expr class Lambda(expr): args: arguments body: expr class IfExp(expr): test: expr body: expr orelse: expr class Dict(expr): keys: typing.List[expr] values: typing.List[expr] class Set(expr): elts: typing.List[expr] class ListComp(expr): elt: expr generators: typing.List[comprehension] class SetComp(expr): elt: expr generators: typing.List[comprehension] class DictComp(expr): key: expr value: expr generators: typing.List[comprehension] class GeneratorExp(expr): elt: expr generators: typing.List[comprehension] class Yield(expr): value: Optional[expr] class Compare(expr): left: expr ops: typing.List[cmpop] comparators: typing.List[expr] class Call(expr): func: expr args: typing.List[expr] keywords: typing.List[keyword] starargs: Optional[expr] kwargs: Optional[expr] class Repr(expr): value: expr class Num(expr): n: float class Str(expr): s: str class Attribute(expr): value: expr attr: _identifier ctx: expr_context class Subscript(expr): value: expr slice: _slice ctx: expr_context class Name(expr): id: _identifier ctx: expr_context class List(expr): elts: typing.List[expr] ctx: expr_context class Tuple(expr): elts: typing.List[expr] ctx: expr_context class expr_context(AST): ... class AugLoad(expr_context): ... class AugStore(expr_context): ... class Del(expr_context): ... class Load(expr_context): ... class Param(expr_context): ... class Store(expr_context): ... class boolop(AST): ... class And(boolop): ... class Or(boolop): ... class operator(AST): ... class Add(operator): ... class BitAnd(operator): ... class BitOr(operator): ... class BitXor(operator): ... class Div(operator): ... class FloorDiv(operator): ... class LShift(operator): ... class Mod(operator): ... class Mult(operator): ... class Pow(operator): ... class RShift(operator): ... class Sub(operator): ... class unaryop(AST): ... class Invert(unaryop): ... class Not(unaryop): ... class UAdd(unaryop): ... class USub(unaryop): ... class cmpop(AST): ... class Eq(cmpop): ... class Gt(cmpop): ... class GtE(cmpop): ... class In(cmpop): ... class Is(cmpop): ... class IsNot(cmpop): ... class Lt(cmpop): ... class LtE(cmpop): ... class NotEq(cmpop): ... class NotIn(cmpop): ... class comprehension(AST): target: expr iter: expr ifs: typing.List[expr] class excepthandler(AST): ... class ExceptHandler(excepthandler): type: Optional[expr] name: Optional[expr] body: typing.List[stmt] lineno: int col_offset: int class arguments(AST): args: typing.List[expr] vararg: Optional[_identifier] kwarg: Optional[_identifier] defaults: typing.List[expr] class keyword(AST): arg: _identifier value: expr class alias(AST): name: _identifier asname: Optional[_identifier] mypy-0.761/mypy/typeshed/stdlib/2/_collections.pyi0000644€tŠÔÚ€2›s®0000000270613576752252026367 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_collections' module.""" from typing import Any, Callable, Dict, Generic, Iterator, TypeVar, Optional, Union _K = TypeVar("_K") _V = TypeVar("_V") _T = TypeVar('_T') _T2 = TypeVar('_T2') class defaultdict(Dict[_K, _V]): default_factory: None def __init__(self, __default_factory: Callable[[], _V] = ..., init: Any = ...) -> None: ... def __missing__(self, key: _K) -> _V: ... def __copy__(self: _T) -> _T: ... def copy(self: _T) -> _T: ... class deque(Generic[_T]): maxlen: Optional[int] def __init__(self, iterable: Iterator[_T] = ..., maxlen: int = ...) -> None: ... def append(self, x: _T) -> None: ... def appendleft(self, x: _T) -> None: ... def clear(self) -> None: ... def count(self, x: Any) -> int: ... def extend(self, iterable: Iterator[_T]) -> None: ... def extendleft(self, iterable: Iterator[_T]) -> None: ... def pop(self) -> _T: ... def popleft(self) -> _T: ... def remove(self, value: _T) -> None: ... def reverse(self) -> None: ... def rotate(self, n: int = ...) -> None: ... def __contains__(self, o: Any) -> bool: ... def __copy__(self) -> deque[_T]: ... def __getitem__(self, i: int) -> _T: ... def __iadd__(self, other: deque[_T2]) -> deque[Union[_T, _T2]]: ... def __iter__(self) -> Iterator[_T]: ... def __len__(self) -> int: ... def __reversed__(self) -> Iterator[_T]: ... def __setitem__(self, i: int, x: _T) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/_functools.pyi0000644€tŠÔÚ€2›s®0000000120513576752252026056 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_functools' module.""" from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Tuple, overload _T = TypeVar("_T") _S = TypeVar("_S") @overload def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... class partial(object): func: Callable[..., Any] args: Tuple[Any, ...] keywords: Dict[str, Any] def __init__(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/2/_hotshot.pyi0000644€tŠÔÚ€2›s®0000000154713576752252025543 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_hotshot' module.""" # This is an autogenerated file. It serves as a starting point # for a more precise manual annotation of this module. # Feel free to edit the source below, but remove this header when you do. from typing import Any, List, Tuple, Dict, Generic def coverage(a: str) -> Any: ... def logreader(a: str) -> LogReaderType: ... def profiler(a: str, *args, **kwargs) -> Any: ... def resolution() -> Tuple[Any, ...]: ... class LogReaderType(object): def close(self) -> None: ... def fileno(self) -> int: ... class ProfilerType(object): def addinfo(self, a: str, b: str) -> None: ... def close(self) -> None: ... def fileno(self) -> int: ... def runcall(self, *args, **kwargs) -> Any: ... def runcode(self, a, b, *args, **kwargs) -> Any: ... def start(self) -> None: ... def stop(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/_io.pyi0000644€tŠÔÚ€2›s®0000001573313576752252024464 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, AnyStr, BinaryIO, IO, Text, TextIO, Iterable, Iterator, List, Optional, Type, Tuple, TypeVar, Union from mmap import mmap from types import TracebackType _bytearray_like = Union[bytearray, mmap] DEFAULT_BUFFER_SIZE: int class BlockingIOError(IOError): characters_written: int class UnsupportedOperation(ValueError, IOError): ... _T = TypeVar("_T") class _IOBase(BinaryIO): @property def closed(self) -> bool: ... def _checkClosed(self, msg: Optional[str] = ...) -> None: ... # undocumented def _checkReadable(self) -> None: ... def _checkSeekable(self) -> None: ... def _checkWritable(self) -> None: ... # All these methods are concrete here (you can instantiate this) def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def __enter__(self: _T) -> _T: ... def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[Any]) -> Optional[bool]: ... def __iter__(self: _T) -> _T: ... # The parameter type of writelines[s]() is determined by that of write(): def writelines(self, lines: Iterable[bytes]) -> None: ... # The return type of readline[s]() and next() is determined by that of read(): def readline(self, limit: int = ...) -> bytes: ... def readlines(self, hint: int = ...) -> List[bytes]: ... def next(self) -> bytes: ... # These don't actually exist but we need to pretend that it does # so that this class is concrete. def write(self, s: bytes) -> int: ... def read(self, n: int = ...) -> bytes: ... class _BufferedIOBase(_IOBase): def read1(self, n: int) -> bytes: ... def read(self, size: int = ...) -> bytes: ... def readinto(self, buffer: _bytearray_like) -> int: ... def write(self, s: bytes) -> int: ... def detach(self) -> _IOBase: ... class BufferedRWPair(_BufferedIOBase): def __init__(self, reader: _RawIOBase, writer: _RawIOBase, buffer_size: int = ..., max_buffer_size: int = ...) -> None: ... def peek(self, n: int = ...) -> bytes: ... def __enter__(self) -> BufferedRWPair: ... class BufferedRandom(_BufferedIOBase): mode: str name: str raw: _IOBase def __init__(self, raw: _IOBase, buffer_size: int = ..., max_buffer_size: int = ...) -> None: ... def peek(self, n: int = ...) -> bytes: ... class BufferedReader(_BufferedIOBase): mode: str name: str raw: _IOBase def __init__(self, raw: _IOBase, buffer_size: int = ...) -> None: ... def peek(self, n: int = ...) -> bytes: ... class BufferedWriter(_BufferedIOBase): name: str raw: _IOBase mode: str def __init__(self, raw: _IOBase, buffer_size: int = ..., max_buffer_size: int = ...) -> None: ... class BytesIO(_BufferedIOBase): def __init__(self, initial_bytes: bytes = ...) -> None: ... def __setstate__(self, state: Tuple[Any, ...]) -> None: ... def __getstate__(self) -> Tuple[Any, ...]: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any def getvalue(self) -> bytes: ... def write(self, s: bytes) -> int: ... def writelines(self, lines: Iterable[bytes]) -> None: ... def read1(self, size: int) -> bytes: ... def next(self) -> bytes: ... class _RawIOBase(_IOBase): def readall(self) -> str: ... def read(self, n: int = ...) -> str: ... class FileIO(_RawIOBase, BytesIO): mode: str closefd: bool def __init__(self, file: Union[str, int], mode: str = ..., closefd: bool = ...) -> None: ... def readinto(self, buffer: _bytearray_like) -> int: ... def write(self, pbuf: str) -> int: ... class IncrementalNewlineDecoder(object): newlines: Union[str, unicode] def __init__(self, decoder, translate, z=...) -> None: ... def decode(self, input, final) -> Any: ... def getstate(self) -> Tuple[Any, int]: ... def setstate(self, state: Tuple[Any, int]) -> None: ... def reset(self) -> None: ... # Note: In the actual _io.py, _TextIOBase inherits from _IOBase. class _TextIOBase(TextIO): errors: Optional[str] # TODO: On _TextIOBase, this is always None. But it's unicode/bytes in subclasses. newlines: Union[None, unicode, bytes] encoding: str @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... def _checkReadable(self) -> None: ... def _checkSeekable(self) -> None: ... def _checkWritable(self) -> None: ... def close(self) -> None: ... def detach(self) -> IO[Any]: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def next(self) -> unicode: ... def read(self, size: int = ...) -> unicode: ... def readable(self) -> bool: ... def readline(self, limit: int = ...) -> unicode: ... def readlines(self, hint: int = ...) -> list[unicode]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def write(self, pbuf: unicode) -> int: ... def writelines(self, lines: Iterable[unicode]) -> None: ... def __enter__(self: _T) -> _T: ... def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[Any]) -> Optional[bool]: ... def __iter__(self: _T) -> _T: ... class StringIO(_TextIOBase): line_buffering: bool def __init__(self, initial_value: Optional[unicode] = ..., newline: Optional[unicode] = ...) -> None: ... def __setstate__(self, state: Tuple[Any, ...]) -> None: ... def __getstate__(self) -> Tuple[Any, ...]: ... # StringIO does not contain a "name" field. This workaround is necessary # to allow StringIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any def getvalue(self) -> unicode: ... class TextIOWrapper(_TextIOBase): name: str line_buffering: bool buffer: BinaryIO _CHUNK_SIZE: int def __init__( self, buffer: IO[Any], encoding: Optional[Text] = ..., errors: Optional[Text] = ..., newline: Optional[Text] = ..., line_buffering: bool = ..., write_through: bool = ..., ) -> None: ... def open(file: Union[str, unicode, int], mode: Text = ..., buffering: int = ..., encoding: Optional[Text] = ..., errors: Optional[Text] = ..., newline: Optional[Text] = ..., closefd: bool = ...) -> IO[Any]: ... mypy-0.761/mypy/typeshed/stdlib/2/_json.pyi0000644€tŠÔÚ€2›s®0000000035113576752252025014 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Tuple, Dict, Generic, Tuple def encode_basestring_ascii(*args, **kwargs) -> str: ... def scanstring(a, b, *args, **kwargs) -> Tuple[Any, ...]: ... class Encoder(object): ... class Scanner(object): ... mypy-0.761/mypy/typeshed/stdlib/2/_md5.pyi0000644€tŠÔÚ€2›s®0000000045413576752252024534 0ustar jukkaDROPBOX\Domain Users00000000000000blocksize: int digest_size: int class MD5Type(object): name: str block_size: int digest_size: int def copy(self) -> MD5Type: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... def new(arg: str = ...) -> MD5Type: ... mypy-0.761/mypy/typeshed/stdlib/2/_sha.pyi0000644€tŠÔÚ€2›s®0000000053413576752252024621 0ustar jukkaDROPBOX\Domain Users00000000000000blocksize: int block_size: int digest_size: int class sha(object): # not actually exposed name: str block_size: int digest_size: int digestsize: int def copy(self) -> sha: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... def new(arg: str = ...) -> sha: ... mypy-0.761/mypy/typeshed/stdlib/2/_sha256.pyi0000644€tŠÔÚ€2›s®0000000117013576752252025053 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional class sha224(object): name: str block_size: int digest_size: int digestsize: int def __init__(self, init: Optional[str]) -> None: ... def copy(self) -> sha224: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... class sha256(object): name: str block_size: int digest_size: int digestsize: int def __init__(self, init: Optional[str]) -> None: ... def copy(self) -> sha256: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/_sha512.pyi0000644€tŠÔÚ€2›s®0000000117013576752252025046 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional class sha384(object): name: str block_size: int digest_size: int digestsize: int def __init__(self, init: Optional[str]) -> None: ... def copy(self) -> sha384: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... class sha512(object): name: str block_size: int digest_size: int digestsize: int def __init__(self, init: Optional[str]) -> None: ... def copy(self) -> sha512: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/_socket.pyi0000644€tŠÔÚ€2›s®0000001424513576752252025342 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, Union, IO, Any, Optional, overload AF_APPLETALK: int AF_ASH: int AF_ATMPVC: int AF_ATMSVC: int AF_AX25: int AF_BLUETOOTH: int AF_BRIDGE: int AF_DECnet: int AF_ECONET: int AF_INET: int AF_INET6: int AF_IPX: int AF_IRDA: int AF_KEY: int AF_LLC: int AF_NETBEUI: int AF_NETLINK: int AF_NETROM: int AF_PACKET: int AF_PPPOX: int AF_ROSE: int AF_ROUTE: int AF_SECURITY: int AF_SNA: int AF_TIPC: int AF_UNIX: int AF_UNSPEC: int AF_WANPIPE: int AF_X25: int AI_ADDRCONFIG: int AI_ALL: int AI_CANONNAME: int AI_NUMERICHOST: int AI_NUMERICSERV: int AI_PASSIVE: int AI_V4MAPPED: int BDADDR_ANY: str BDADDR_LOCAL: str BTPROTO_HCI: int BTPROTO_L2CAP: int BTPROTO_RFCOMM: int BTPROTO_SCO: int EAI_ADDRFAMILY: int EAI_AGAIN: int EAI_BADFLAGS: int EAI_FAIL: int EAI_FAMILY: int EAI_MEMORY: int EAI_NODATA: int EAI_NONAME: int EAI_OVERFLOW: int EAI_SERVICE: int EAI_SOCKTYPE: int EAI_SYSTEM: int EBADF: int EINTR: int HCI_DATA_DIR: int HCI_FILTER: int HCI_TIME_STAMP: int INADDR_ALLHOSTS_GROUP: int INADDR_ANY: int INADDR_BROADCAST: int INADDR_LOOPBACK: int INADDR_MAX_LOCAL_GROUP: int INADDR_NONE: int INADDR_UNSPEC_GROUP: int IPPORT_RESERVED: int IPPORT_USERRESERVED: int IPPROTO_AH: int IPPROTO_DSTOPTS: int IPPROTO_EGP: int IPPROTO_ESP: int IPPROTO_FRAGMENT: int IPPROTO_GRE: int IPPROTO_HOPOPTS: int IPPROTO_ICMP: int IPPROTO_ICMPV6: int IPPROTO_IDP: int IPPROTO_IGMP: int IPPROTO_IP: int IPPROTO_IPIP: int IPPROTO_IPV6: int IPPROTO_NONE: int IPPROTO_PIM: int IPPROTO_PUP: int IPPROTO_RAW: int IPPROTO_ROUTING: int IPPROTO_RSVP: int IPPROTO_TCP: int IPPROTO_TP: int IPPROTO_UDP: int IPV6_CHECKSUM: int IPV6_DSTOPTS: int IPV6_HOPLIMIT: int IPV6_HOPOPTS: int IPV6_JOIN_GROUP: int IPV6_LEAVE_GROUP: int IPV6_MULTICAST_HOPS: int IPV6_MULTICAST_IF: int IPV6_MULTICAST_LOOP: int IPV6_NEXTHOP: int IPV6_PKTINFO: int IPV6_RECVDSTOPTS: int IPV6_RECVHOPLIMIT: int IPV6_RECVHOPOPTS: int IPV6_RECVPKTINFO: int IPV6_RECVRTHDR: int IPV6_RECVTCLASS: int IPV6_RTHDR: int IPV6_RTHDRDSTOPTS: int IPV6_RTHDR_TYPE_0: int IPV6_TCLASS: int IPV6_UNICAST_HOPS: int IPV6_V6ONLY: int IP_ADD_MEMBERSHIP: int IP_DEFAULT_MULTICAST_LOOP: int IP_DEFAULT_MULTICAST_TTL: int IP_DROP_MEMBERSHIP: int IP_HDRINCL: int IP_MAX_MEMBERSHIPS: int IP_MULTICAST_IF: int IP_MULTICAST_LOOP: int IP_MULTICAST_TTL: int IP_OPTIONS: int IP_RECVOPTS: int IP_RECVRETOPTS: int IP_RETOPTS: int IP_TOS: int IP_TTL: int MSG_CTRUNC: int MSG_DONTROUTE: int MSG_DONTWAIT: int MSG_EOR: int MSG_OOB: int MSG_PEEK: int MSG_TRUNC: int MSG_WAITALL: int MethodType: type NETLINK_DNRTMSG: int NETLINK_FIREWALL: int NETLINK_IP6_FW: int NETLINK_NFLOG: int NETLINK_ROUTE: int NETLINK_USERSOCK: int NETLINK_XFRM: int NI_DGRAM: int NI_MAXHOST: int NI_MAXSERV: int NI_NAMEREQD: int NI_NOFQDN: int NI_NUMERICHOST: int NI_NUMERICSERV: int PACKET_BROADCAST: int PACKET_FASTROUTE: int PACKET_HOST: int PACKET_LOOPBACK: int PACKET_MULTICAST: int PACKET_OTHERHOST: int PACKET_OUTGOING: int PF_PACKET: int SHUT_RD: int SHUT_RDWR: int SHUT_WR: int SOCK_DGRAM: int SOCK_RAW: int SOCK_RDM: int SOCK_SEQPACKET: int SOCK_STREAM: int SOL_HCI: int SOL_IP: int SOL_SOCKET: int SOL_TCP: int SOL_TIPC: int SOL_UDP: int SOMAXCONN: int SO_ACCEPTCONN: int SO_BROADCAST: int SO_DEBUG: int SO_DONTROUTE: int SO_ERROR: int SO_KEEPALIVE: int SO_LINGER: int SO_OOBINLINE: int SO_RCVBUF: int SO_RCVLOWAT: int SO_RCVTIMEO: int SO_REUSEADDR: int SO_REUSEPORT: int SO_SNDBUF: int SO_SNDLOWAT: int SO_SNDTIMEO: int SO_TYPE: int SSL_ERROR_EOF: int SSL_ERROR_INVALID_ERROR_CODE: int SSL_ERROR_SSL: int SSL_ERROR_SYSCALL: int SSL_ERROR_WANT_CONNECT: int SSL_ERROR_WANT_READ: int SSL_ERROR_WANT_WRITE: int SSL_ERROR_WANT_X509_LOOKUP: int SSL_ERROR_ZERO_RETURN: int TCP_CORK: int TCP_DEFER_ACCEPT: int TCP_INFO: int TCP_KEEPCNT: int TCP_KEEPIDLE: int TCP_KEEPINTVL: int TCP_LINGER2: int TCP_MAXSEG: int TCP_NODELAY: int TCP_QUICKACK: int TCP_SYNCNT: int TCP_WINDOW_CLAMP: int TIPC_ADDR_ID: int TIPC_ADDR_NAME: int TIPC_ADDR_NAMESEQ: int TIPC_CFG_SRV: int TIPC_CLUSTER_SCOPE: int TIPC_CONN_TIMEOUT: int TIPC_CRITICAL_IMPORTANCE: int TIPC_DEST_DROPPABLE: int TIPC_HIGH_IMPORTANCE: int TIPC_IMPORTANCE: int TIPC_LOW_IMPORTANCE: int TIPC_MEDIUM_IMPORTANCE: int TIPC_NODE_SCOPE: int TIPC_PUBLISHED: int TIPC_SRC_DROPPABLE: int TIPC_SUBSCR_TIMEOUT: int TIPC_SUB_CANCEL: int TIPC_SUB_PORTS: int TIPC_SUB_SERVICE: int TIPC_TOP_SRV: int TIPC_WAIT_FOREVER: int TIPC_WITHDRAWN: int TIPC_ZONE_SCOPE: int # PyCapsule CAPI: Any has_ipv6: bool class error(IOError): ... class gaierror(error): ... class timeout(error): ... class SocketType(object): family: int type: int proto: int timeout: float def __init__(self, family: int = ..., type: int = ..., proto: int = ...) -> None: ... def accept(self) -> Tuple[SocketType, Tuple[Any, ...]]: ... def bind(self, address: Tuple[Any, ...]) -> None: ... def close(self) -> None: ... def connect(self, address: Tuple[Any, ...]) -> None: ... def connect_ex(self, address: Tuple[Any, ...]) -> int: ... def dup(self) -> SocketType: ... def fileno(self) -> int: ... def getpeername(self) -> Tuple[Any, ...]: ... def getsockname(self) -> Tuple[Any, ...]: ... def getsockopt(self, level: int, option: int, buffersize: int = ...) -> str: ... def gettimeout(self) -> float: ... def listen(self, backlog: int) -> None: ... def makefile(self, mode: str = ..., buffersize: int = ...) -> IO[Any]: ... def recv(self, buffersize: int, flags: int = ...) -> str: ... def recv_into(self, buffer: bytearray, nbytes: int = ..., flags: int = ...) -> int: ... def recvfrom(self, buffersize: int, flags: int = ...) -> Tuple[Any, ...]: ... def recvfrom_into(self, buffer: bytearray, nbytes: int = ..., flags: int = ...) -> int: ... def send(self, data: str, flags: int = ...) -> int: ... def sendall(self, data: str, flags: int = ...) -> None: ... @overload def sendto(self, data: str, address: Tuple[Any, ...]) -> int: ... @overload def sendto(self, data: str, flags: int, address: Tuple[Any, ...]) -> int: ... def setblocking(self, flag: bool) -> None: ... def setsockopt(self, level: int, option: int, value: Union[int, str]) -> None: ... def settimeout(self, value: Optional[float]) -> None: ... def shutdown(self, flag: int) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/_sre.pyi0000644€tŠÔÚ€2›s®0000000367013576752252024643 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_sre' module.""" from typing import Any, Union, Iterable, Optional, Mapping, Sequence, Dict, List, Tuple, overload CODESIZE: int MAGIC: int MAXREPEAT: long copyright: str class SRE_Match(object): def start(self, group: int = ...) -> int: ... def end(self, group: int = ...) -> int: ... def expand(self, s: str) -> Any: ... @overload def group(self) -> str: ... @overload def group(self, group: int = ...) -> Optional[str]: ... def groupdict(self) -> Dict[int, Optional[str]]: ... def groups(self) -> Tuple[Optional[str], ...]: ... def span(self) -> Tuple[int, int]: ... @property def regs(self) -> Tuple[Tuple[int, int], ...]: ... # undocumented class SRE_Scanner(object): pattern: str def match(self) -> SRE_Match: ... def search(self) -> SRE_Match: ... class SRE_Pattern(object): pattern: str flags: int groups: int groupindex: Mapping[str, int] indexgroup: Sequence[int] def findall(self, source: str, pos: int = ..., endpos: int = ...) -> List[Union[Tuple[Any, ...], str]]: ... def finditer(self, source: str, pos: int = ..., endpos: int = ...) -> Iterable[Union[Tuple[Any, ...], str]]: ... def match(self, pattern, pos: int = ..., endpos: int = ...) -> SRE_Match: ... def scanner(self, s: str, start: int = ..., end: int = ...) -> SRE_Scanner: ... def search(self, pattern, pos: int = ..., endpos: int = ...) -> SRE_Match: ... def split(self, source: str, maxsplit: int = ...) -> List[Optional[str]]: ... def sub(self, repl: str, string: str, count: int = ...) -> Tuple[Any, ...]: ... def subn(self, repl: str, string: str, count: int = ...) -> Tuple[Any, ...]: ... def compile( pattern: str, flags: int, code: List[int], groups: int = ..., groupindex: Mapping[str, int] = ..., indexgroup: Sequence[int] = ..., ) -> SRE_Pattern: ... def getcodesize() -> int: ... def getlower(a: int, b: int) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2/_struct.pyi0000644€tŠÔÚ€2›s®0000000145313576752252025373 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_struct' module.""" from typing import Any, AnyStr, Tuple class error(Exception): ... class Struct(object): size: int format: str def __init__(self, fmt: str) -> None: ... def pack_into(self, buffer: bytearray, offset: int, obj: Any) -> None: ... def pack(self, *args) -> str: ... def unpack(self, s: str) -> Tuple[Any, ...]: ... def unpack_from(self, buffer: bytearray, offset: int = ...) -> Tuple[Any, ...]: ... def _clearcache() -> None: ... def calcsize(fmt: str) -> int: ... def pack(fmt: AnyStr, obj: Any) -> str: ... def pack_into(fmt: AnyStr, buffer: bytearray, offset: int, obj: Any) -> None: ... def unpack(fmt: AnyStr, data: str) -> Tuple[Any, ...]: ... def unpack_from(fmt: AnyStr, buffer: bytearray, offset: int = ...) -> Tuple[Any, ...]: ... mypy-0.761/mypy/typeshed/stdlib/2/_symtable.pyi0000644€tŠÔÚ€2›s®0000000125213576752252025664 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Dict CELL: int DEF_BOUND: int DEF_FREE: int DEF_FREE_CLASS: int DEF_GLOBAL: int DEF_IMPORT: int DEF_LOCAL: int DEF_PARAM: int FREE: int GLOBAL_EXPLICIT: int GLOBAL_IMPLICIT: int LOCAL: int OPT_BARE_EXEC: int OPT_EXEC: int OPT_IMPORT_STAR: int SCOPE_MASK: int SCOPE_OFF: int TYPE_CLASS: int TYPE_FUNCTION: int TYPE_MODULE: int USE: int class _symtable_entry(object): ... class symtable(object): children: List[_symtable_entry] id: int lineno: int name: str nested: int optimized: int symbols: Dict[str, int] type: int varnames: List[str] def __init__(self, src: str, filename: str, startstr: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/_threading_local.pyi0000644€tŠÔÚ€2›s®0000000061013576752252027160 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/_threading_local.py from typing import Any class _localbase(object): ... class local(_localbase): def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... def __del__(self) -> None: ... def _patch(self: local) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/abc.pyi0000644€tŠÔÚ€2›s®0000000217313576752252024435 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Set, Tuple, Type, TypeVar import _weakrefset _FuncT = TypeVar('_FuncT', bound=Callable[..., Any]) # NOTE: mypy has special processing for ABCMeta and abstractmethod. def abstractmethod(funcobj: _FuncT) -> _FuncT: ... class ABCMeta(type): # TODO: FrozenSet __abstractmethods__: Set[Any] _abc_cache: _weakrefset.WeakSet[Any] _abc_invalidation_counter: int _abc_negative_cache: _weakrefset.WeakSet[Any] _abc_negative_cache_version: int _abc_registry: _weakrefset.WeakSet[Any] def __init__(self, name: str, bases: Tuple[type, ...], namespace: Dict[Any, Any]) -> None: ... def __instancecheck__(cls: ABCMeta, instance: Any) -> Any: ... def __subclasscheck__(cls: ABCMeta, subclass: Any) -> Any: ... def _dump_registry(cls: ABCMeta, *args: Any, **kwargs: Any) -> None: ... def register(cls: ABCMeta, subclass: Type[Any]) -> None: ... # TODO: The real abc.abstractproperty inherits from "property". class abstractproperty(object): def __new__(cls, func: Any) -> Any: ... __isabstractmethod__: bool doc: Any fdel: Any fget: Any fset: Any mypy-0.761/mypy/typeshed/stdlib/2/ast.pyi0000644€tŠÔÚ€2›s®0000000226113576752252024475 0ustar jukkaDROPBOX\Domain Users00000000000000# Python 2.7 ast # Rename typing to _typing, as not to conflict with typing imported # from _ast below when loaded in an unorthodox way by the Dropbox # internal Bazel integration. import typing as _typing from typing import Any, Iterator, Optional, Union from _ast import * from _ast import AST, Module def parse(source: Union[str, unicode], filename: Union[str, unicode] = ..., mode: Union[str, unicode] = ...) -> Module: ... def copy_location(new_node: AST, old_node: AST) -> AST: ... def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> str: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[_typing.Tuple[str, Any]]: ... def literal_eval(node_or_string: Union[str, unicode, AST]) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... class NodeVisitor(): def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> Any: ... class NodeTransformer(NodeVisitor): def generic_visit(self, node: AST) -> Optional[AST]: ... mypy-0.761/mypy/typeshed/stdlib/2/atexit.pyi0000644€tŠÔÚ€2›s®0000000016513576752252025205 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Any _FT = TypeVar('_FT') def register(func: _FT, *args: Any, **kargs: Any) -> _FT: ... mypy-0.761/mypy/typeshed/stdlib/2/cPickle.pyi0000644€tŠÔÚ€2›s®0000000144113576752252025257 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, List HIGHEST_PROTOCOL: int compatible_formats: List[str] format_version: str class Pickler: def __init__(self, file: IO[str], protocol: int = ...) -> None: ... def dump(self, obj: Any) -> None: ... def clear_memo(self) -> None: ... class Unpickler: def __init__(self, file: IO[str]) -> None: ... def load(self) -> Any: ... def noload(self) -> Any: ... def dump(obj: Any, file: IO[str], protocol: int = ...) -> None: ... def dumps(obj: Any, protocol: int = ...) -> str: ... def load(file: IO[str]) -> Any: ... def loads(str: str) -> Any: ... class PickleError(Exception): ... class UnpicklingError(PickleError): ... class BadPickleGet(UnpicklingError): ... class PicklingError(PickleError): ... class UnpickleableError(PicklingError): ... mypy-0.761/mypy/typeshed/stdlib/2/cStringIO.pyi0000644€tŠÔÚ€2›s®0000000376113576752252025555 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for cStringIO (Python 2.7) # See https://docs.python.org/2/library/stringio.html from abc import ABCMeta from typing import overload, IO, List, Iterable, Iterator, Optional, Union from types import TracebackType # TODO the typing.IO[] generics should be split into input and output. # This class isn't actually abstract, but you can't instantiate it # directly, so we might as well treat it as abstract in the stub. class InputType(IO[str], Iterator[str], metaclass=ABCMeta): def getvalue(self) -> str: ... def close(self) -> None: ... @property def closed(self) -> bool: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def read(self, size: int = ...) -> str: ... def readline(self, size: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def __iter__(self) -> InputType: ... def next(self) -> str: ... def reset(self) -> None: ... class OutputType(IO[str], Iterator[str], metaclass=ABCMeta): @property def softspace(self) -> int: ... def getvalue(self) -> str: ... def close(self) -> None: ... @property def closed(self) -> bool: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def read(self, size: int = ...) -> str: ... def readline(self, size: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def __iter__(self) -> OutputType: ... def next(self) -> str: ... def reset(self) -> None: ... def write(self, b: Union[str, unicode]) -> int: ... def writelines(self, lines: Iterable[Union[str, unicode]]) -> None: ... @overload def StringIO() -> OutputType: ... @overload def StringIO(s: str) -> InputType: ... mypy-0.761/mypy/typeshed/stdlib/2/collections.pyi0000644€tŠÔÚ€2›s®0000001157213576752252026231 0ustar jukkaDROPBOX\Domain Users00000000000000# These are not exported. from typing import Any, Dict, Generic, TypeVar, Tuple, overload, Type, Optional, List, Union, Reversible # These are exported. from typing import ( Callable as Callable, Container as Container, Hashable as Hashable, ItemsView as ItemsView, Iterable as Iterable, Iterator as Iterator, KeysView as KeysView, Mapping as Mapping, MappingView as MappingView, MutableMapping as MutableMapping, MutableSequence as MutableSequence, MutableSet as MutableSet, Sequence as Sequence, AbstractSet as Set, Sized as Sized, ValuesView as ValuesView, ) _S = TypeVar('_S') _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') # namedtuple is special-cased in the type checker; the initializer is ignored. def namedtuple(typename: Union[str, unicode], field_names: Union[str, unicode, Iterable[Union[str, unicode]]], verbose: bool = ..., rename: bool = ...) -> Type[Tuple[Any, ...]]: ... class deque(Sized, Iterable[_T], Reversible[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ..., maxlen: int = ...) -> None: ... @property def maxlen(self) -> Optional[int]: ... def append(self, x: _T) -> None: ... def appendleft(self, x: _T) -> None: ... def clear(self) -> None: ... def count(self, x: _T) -> int: ... def extend(self, iterable: Iterable[_T]) -> None: ... def extendleft(self, iterable: Iterable[_T]) -> None: ... def pop(self) -> _T: ... def popleft(self) -> _T: ... def remove(self, value: _T) -> None: ... def reverse(self) -> None: ... def rotate(self, n: int) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __hash__(self) -> int: ... def __getitem__(self, i: int) -> _T: ... def __setitem__(self, i: int, x: _T) -> None: ... def __contains__(self, o: _T) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __iadd__(self: _S, iterable: Iterable[_T]) -> _S: ... class Counter(Dict[_T, int], Generic[_T]): @overload def __init__(self, **kwargs: int) -> None: ... @overload def __init__(self, mapping: Mapping[_T, int]) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def copy(self: _S) -> _S: ... def elements(self) -> Iterator[_T]: ... def most_common(self, n: Optional[int] = ...) -> List[Tuple[_T, int]]: ... @overload def subtract(self, __mapping: Mapping[_T, int]) -> None: ... @overload def subtract(self, iterable: Iterable[_T]) -> None: ... # The Iterable[Tuple[...]] argument type is not actually desirable # (the tuples will be added as keys, breaking type safety) but # it's included so that the signature is compatible with # Dict.update. Not sure if we should use '# type: ignore' instead # and omit the type from the union. @overload def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... @overload def update(self, __m: Union[Iterable[_T], Iterable[Tuple[_T, int]]], **kwargs: int) -> None: ... @overload def update(self, **kwargs: int) -> None: ... def __add__(self, other: Counter[_T]) -> Counter[_T]: ... def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... def __and__(self, other: Counter[_T]) -> Counter[_T]: ... def __or__(self, other: Counter[_T]) -> Counter[_T]: ... def __iadd__(self, other: Counter[_T]) -> Counter[_T]: ... def __isub__(self, other: Counter[_T]) -> Counter[_T]: ... def __iand__(self, other: Counter[_T]) -> Counter[_T]: ... def __ior__(self, other: Counter[_T]) -> Counter[_T]: ... class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ... def copy(self: _S) -> _S: ... def __reversed__(self) -> Iterator[_KT]: ... class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]): default_factory: Callable[[], _VT] @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], **kwargs: _VT) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], map: Mapping[_KT, _VT]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... def __missing__(self, key: _KT) -> _VT: ... def copy(self: _S) -> _S: ... mypy-0.761/mypy/typeshed/stdlib/2/commands.pyi0000644€tŠÔÚ€2›s®0000000051313576752252025505 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, AnyStr, Text, Tuple def getstatus(file: Text) -> str: ... def getoutput(cmd: Text) -> str: ... def getstatusoutput(cmd: Text) -> Tuple[int, str]: ... @overload def mk2arg(head: bytes, x: bytes) -> bytes: ... @overload def mk2arg(head: Text, x: Text) -> Text: ... def mkarg(x: AnyStr) -> AnyStr: ... mypy-0.761/mypy/typeshed/stdlib/2/compileall.pyi0000644€tŠÔÚ€2›s®0000000122413576752252026025 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for compileall (Python 2) from typing import Any, Optional, Pattern, Union _Path = Union[str, bytes] # rx can be any object with a 'search' method; once we have Protocols we can change the type def compile_dir( dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern[Any]] = ..., quiet: int = ..., ) -> int: ... def compile_file( fullname: _Path, ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern[Any]] = ..., quiet: int = ..., ) -> int: ... def compile_path(skip_curdir: bool = ..., maxlevels: int = ..., force: bool = ..., quiet: int = ...) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2/cookielib.pyi0000644€tŠÔÚ€2›s®0000001065113576752252025650 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Cookie: version: Any name: Any value: Any port: Any port_specified: Any domain: Any domain_specified: Any domain_initial_dot: Any path: Any path_specified: Any secure: Any expires: Any discard: Any comment: Any comment_url: Any rfc2109: Any def __init__(self, version, name, value, port, port_specified, domain, domain_specified, domain_initial_dot, path, path_specified, secure, expires, discard, comment, comment_url, rest, rfc2109: bool = ...): ... def has_nonstandard_attr(self, name): ... def get_nonstandard_attr(self, name, default: Optional[Any] = ...): ... def set_nonstandard_attr(self, name, value): ... def is_expired(self, now: Optional[Any] = ...): ... class CookiePolicy: def set_ok(self, cookie, request): ... def return_ok(self, cookie, request): ... def domain_return_ok(self, domain, request): ... def path_return_ok(self, path, request): ... class DefaultCookiePolicy(CookiePolicy): DomainStrictNoDots: Any DomainStrictNonDomain: Any DomainRFC2965Match: Any DomainLiberal: Any DomainStrict: Any netscape: Any rfc2965: Any rfc2109_as_netscape: Any hide_cookie2: Any strict_domain: Any strict_rfc2965_unverifiable: Any strict_ns_unverifiable: Any strict_ns_domain: Any strict_ns_set_initial_dollar: Any strict_ns_set_path: Any def __init__(self, blocked_domains: Optional[Any] = ..., allowed_domains: Optional[Any] = ..., netscape: bool = ..., rfc2965: bool = ..., rfc2109_as_netscape: Optional[Any] = ..., hide_cookie2: bool = ..., strict_domain: bool = ..., strict_rfc2965_unverifiable: bool = ..., strict_ns_unverifiable: bool = ..., strict_ns_domain=..., strict_ns_set_initial_dollar: bool = ..., strict_ns_set_path: bool = ...): ... def blocked_domains(self): ... def set_blocked_domains(self, blocked_domains): ... def is_blocked(self, domain): ... def allowed_domains(self): ... def set_allowed_domains(self, allowed_domains): ... def is_not_allowed(self, domain): ... def set_ok(self, cookie, request): ... def set_ok_version(self, cookie, request): ... def set_ok_verifiability(self, cookie, request): ... def set_ok_name(self, cookie, request): ... def set_ok_path(self, cookie, request): ... def set_ok_domain(self, cookie, request): ... def set_ok_port(self, cookie, request): ... def return_ok(self, cookie, request): ... def return_ok_version(self, cookie, request): ... def return_ok_verifiability(self, cookie, request): ... def return_ok_secure(self, cookie, request): ... def return_ok_expires(self, cookie, request): ... def return_ok_port(self, cookie, request): ... def return_ok_domain(self, cookie, request): ... def domain_return_ok(self, domain, request): ... def path_return_ok(self, path, request): ... class Absent: ... class CookieJar: non_word_re: Any quote_re: Any strict_domain_re: Any domain_re: Any dots_re: Any magic_re: Any def __init__(self, policy: Optional[Any] = ...): ... def set_policy(self, policy): ... def add_cookie_header(self, request): ... def make_cookies(self, response, request): ... def set_cookie_if_ok(self, cookie, request): ... def set_cookie(self, cookie): ... def extract_cookies(self, response, request): ... def clear(self, domain: Optional[Any] = ..., path: Optional[Any] = ..., name: Optional[Any] = ...): ... def clear_session_cookies(self): ... def clear_expired_cookies(self): ... def __iter__(self): ... def __len__(self): ... class LoadError(IOError): ... class FileCookieJar(CookieJar): filename: Any delayload: Any def __init__(self, filename: Optional[Any] = ..., delayload: bool = ..., policy: Optional[Any] = ...): ... def save(self, filename: Optional[Any] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...): ... def load(self, filename: Optional[Any] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...): ... def revert(self, filename: Optional[Any] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...): ... class LWPCookieJar(FileCookieJar): def as_lwp_str(self, ignore_discard: bool = ..., ignore_expires: bool = ...) -> str: ... # undocumented MozillaCookieJar = FileCookieJar def lwp_cookie_str(cookie: Cookie) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/copy_reg.pyi0000644€tŠÔÚ€2›s®0000000132613576752252025516 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import TypeVar, Callable, Union, Tuple, Any, Optional, SupportsInt, Hashable, List _Type = TypeVar("_Type", bound=type) _Reduce = Union[Tuple[Callable[..., _Type], Tuple[Any, ...]], Tuple[Callable[..., _Type], Tuple[Any, ...], Optional[Any]]] __all__: List[str] def pickle(ob_type: _Type, pickle_function: Callable[[_Type], Union[str, _Reduce[_Type]]], constructor_ob: Optional[Callable[[_Reduce[_Type]], _Type]] = ...) -> None: ... def constructor(object: Callable[[_Reduce[_Type]], _Type]) -> None: ... def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: ... def remove_extension(module: Hashable, name: Hashable, code: int) -> None: ... def clear_extension_cache() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/dircache.pyi0000644€tŠÔÚ€2›s®0000000052313576752252025447 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/dircache.py from typing import List, MutableSequence, Text, Union def reset() -> None: ... def listdir(path: Text) -> List[str]: ... opendir = listdir def annotate(head: Text, list: Union[MutableSequence[str], MutableSequence[Text], MutableSequence[Union[str, Text]]]) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/distutils/0000755€tŠÔÚ€2›s®0000000000013576752267025214 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/distutils/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027456 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/distutils/emxccompiler.pyi0000644€tŠÔÚ€2›s®0000000016413576752252030421 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for emxccompiler from distutils.unixccompiler import UnixCCompiler class EMXCCompiler(UnixCCompiler): ... mypy-0.761/mypy/typeshed/stdlib/2/dummy_thread.pyi0000644€tŠÔÚ€2›s®0000000143213576752252026367 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, NoReturn, Optional, Tuple class error(Exception): def __init__(self, *args: Any) -> None: ... def start_new_thread(function: Callable[..., Any], args: Tuple[Any, ...], kwargs: Dict[str, Any] = ...) -> None: ... def exit() -> NoReturn: ... def get_ident() -> int: ... def allocate_lock() -> LockType: ... def stack_size(size: Optional[int] = ...) -> int: ... class LockType(object): locked_status: bool def __init__(self) -> None: ... def acquire(self, waitflag: Optional[bool] = ...) -> bool: ... def __enter__(self, waitflag: Optional[bool] = ...) -> bool: ... def __exit__(self, typ: Any, val: Any, tb: Any) -> None: ... def release(self) -> bool: ... def locked(self) -> bool: ... def interrupt_main() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/0000755€tŠÔÚ€2›s®0000000000013576752267024257 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/email/MIMEText.pyi0000644€tŠÔÚ€2›s®0000000023713576752252026372 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import MIMENonMultipart class MIMEText(MIMENonMultipart): def __init__(self, _text, _subtype=..., _charset=...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/__init__.pyi0000644€tŠÔÚ€2›s®0000000041613576752252026534 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import IO, Any, AnyStr def message_from_string(s: AnyStr, *args, **kwargs): ... def message_from_bytes(s: str, *args, **kwargs): ... def message_from_file(fp: IO[AnyStr], *args, **kwargs): ... def message_from_binary_file(fp: IO[str], *args, **kwargs): ... mypy-0.761/mypy/typeshed/stdlib/2/email/_parseaddr.pyi0000644€tŠÔÚ€2›s®0000000206013576752252027076 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional def parsedate_tz(data): ... def parsedate(data): ... def mktime_tz(data): ... def quote(str): ... class AddrlistClass: specials: Any pos: Any LWS: Any CR: Any FWS: Any atomends: Any phraseends: Any field: Any commentlist: Any def __init__(self, field): ... def gotonext(self): ... def getaddrlist(self): ... def getaddress(self): ... def getrouteaddr(self): ... def getaddrspec(self): ... def getdomain(self): ... def getdelimited(self, beginchar, endchars, allowcomments: bool = ...): ... def getquote(self): ... def getcomment(self): ... def getdomainliteral(self): ... def getatom(self, atomends: Optional[Any] = ...): ... def getphraselist(self): ... class AddressList(AddrlistClass): addresslist: Any def __init__(self, field): ... def __len__(self): ... def __add__(self, other): ... def __iadd__(self, other): ... def __sub__(self, other): ... def __isub__(self, other): ... def __getitem__(self, index): ... mypy-0.761/mypy/typeshed/stdlib/2/email/base64mime.pyi0000644€tŠÔÚ€2›s®0000000045413576752252026733 0ustar jukkaDROPBOX\Domain Users00000000000000def base64_len(s: bytes) -> int: ... def header_encode(header, charset=..., keep_eols=..., maxlinelen=..., eol=...): ... def encode(s, binary=..., maxlinelen=..., eol=...): ... body_encode = encode encodestring = encode def decode(s, convert_eols=...): ... body_decode = decode decodestring = decode mypy-0.761/mypy/typeshed/stdlib/2/email/charset.pyi0000644€tŠÔÚ€2›s®0000000160613576752252026430 0ustar jukkaDROPBOX\Domain Users00000000000000def add_charset(charset, header_enc=..., body_enc=..., output_charset=...) -> None: ... def add_alias(alias, canonical) -> None: ... def add_codec(charset, codecname) -> None: ... QP: int # undocumented BASE64: int # undocumented SHORTEST: int # undocumented class Charset: input_charset = ... header_encoding = ... body_encoding = ... output_charset = ... input_codec = ... output_codec = ... def __init__(self, input_charset=...) -> None: ... def __eq__(self, other): ... def __ne__(self, other): ... def get_body_encoding(self): ... def convert(self, s): ... def to_splittable(self, s): ... def from_splittable(self, ustr, to_output: bool = ...): ... def get_output_charset(self): ... def encoded_header_len(self, s): ... def header_encode(self, s, convert: bool = ...): ... def body_encode(self, s, convert: bool = ...): ... mypy-0.761/mypy/typeshed/stdlib/2/email/encoders.pyi0000644€tŠÔÚ€2›s®0000000021713576752252026576 0ustar jukkaDROPBOX\Domain Users00000000000000def encode_base64(msg) -> None: ... def encode_quopri(msg) -> None: ... def encode_7or8bit(msg) -> None: ... def encode_noop(msg) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/feedparser.pyi0000644€tŠÔÚ€2›s®0000000103113576752252027107 0ustar jukkaDROPBOX\Domain Users00000000000000class BufferedSubFile: def __init__(self) -> None: ... def push_eof_matcher(self, pred) -> None: ... def pop_eof_matcher(self): ... def close(self) -> None: ... def readline(self): ... def unreadline(self, line) -> None: ... def push(self, data): ... def pushlines(self, lines) -> None: ... def is_closed(self): ... def __iter__(self): ... def next(self): ... class FeedParser: def __init__(self, _factory=...) -> None: ... def feed(self, data) -> None: ... def close(self): ... mypy-0.761/mypy/typeshed/stdlib/2/email/generator.pyi0000644€tŠÔÚ€2›s®0000000057213576752252026766 0ustar jukkaDROPBOX\Domain Users00000000000000class Generator: def __init__(self, outfp, mangle_from_: bool = ..., maxheaderlen: int = ...) -> None: ... def write(self, s) -> None: ... def flatten(self, msg, unixfrom: bool = ...) -> None: ... def clone(self, fp): ... class DecodedGenerator(Generator): def __init__(self, outfp, mangle_from_: bool = ..., maxheaderlen: int = ..., fmt=...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/header.pyi0000644€tŠÔÚ€2›s®0000000073213576752252026226 0ustar jukkaDROPBOX\Domain Users00000000000000def decode_header(header): ... def make_header(decoded_seq, maxlinelen=..., header_name=..., continuation_ws=...): ... class Header: def __init__(self, s=..., charset=..., maxlinelen=..., header_name=..., continuation_ws=..., errors=...) -> None: ... def __unicode__(self): ... def __eq__(self, other): ... def __ne__(self, other): ... def append(self, s, charset=..., errors=...) -> None: ... def encode(self, splitchars=...): ... mypy-0.761/mypy/typeshed/stdlib/2/email/iterators.pyi0000644€tŠÔÚ€2›s®0000000040013576752252027002 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Generator def walk(self) -> Generator[Any, Any, Any]: ... def body_line_iterator(msg, decode: bool = ...) -> Generator[Any, Any, Any]: ... def typed_subpart_iterator(msg, maintype=..., subtype=...) -> Generator[Any, Any, Any]: ... mypy-0.761/mypy/typeshed/stdlib/2/email/message.pyi0000644€tŠÔÚ€2›s®0000000363613576752252026430 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Generator class Message: preamble = ... epilogue = ... defects = ... def __init__(self): ... def as_string(self, unixfrom=...): ... def is_multipart(self) -> bool: ... def set_unixfrom(self, unixfrom) -> None: ... def get_unixfrom(self): ... def attach(self, payload) -> None: ... def get_payload(self, i=..., decode: bool = ...): ... def set_payload(self, payload, charset=...) -> None: ... def set_charset(self, charset): ... def get_charset(self): ... def __len__(self): ... def __getitem__(self, name): ... def __setitem__(self, name, val) -> None: ... def __delitem__(self, name) -> None: ... def __contains__(self, name): ... def has_key(self, name) -> bool: ... def keys(self): ... def values(self): ... def items(self): ... def get(self, name, failobj=...): ... def get_all(self, name, failobj=...): ... def add_header(self, _name, _value, **_params) -> None: ... def replace_header(self, _name, _value) -> None: ... def get_content_type(self): ... def get_content_maintype(self): ... def get_content_subtype(self): ... def get_default_type(self): ... def set_default_type(self, ctype) -> None: ... def get_params(self, failobj=..., header=..., unquote: bool = ...): ... def get_param(self, param, failobj=..., header=..., unquote: bool = ...): ... def set_param(self, param, value, header=..., requote: bool = ..., charset=..., language=...) -> None: ... def del_param(self, param, header=..., requote: bool = ...): ... def set_type(self, type, header=..., requote: bool = ...): ... def get_filename(self, failobj=...): ... def get_boundary(self, failobj=...): ... def set_boundary(self, boundary) -> None: ... def get_content_charset(self, failobj=...): ... def get_charsets(self, failobj=...): ... def walk(self) -> Generator[Any, Any, Any]: ... mypy-0.761/mypy/typeshed/stdlib/2/email/mime/0000755€tŠÔÚ€2›s®0000000000013576752267025206 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/email/mime/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027450 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/email/mime/application.pyi0000644€tŠÔÚ€2›s®0000000065313576752252030232 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.application from typing import Callable, Optional, Tuple, Union from email.mime.nonmultipart import MIMENonMultipart _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEApplication(MIMENonMultipart): def __init__(self, _data: bytes, _subtype: str = ..., _encoder: Callable[[MIMEApplication], None] = ..., **_params: _ParamsType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/mime/audio.pyi0000644€tŠÔÚ€2›s®0000000026113576752252027023 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import MIMENonMultipart class MIMEAudio(MIMENonMultipart): def __init__(self, _audiodata, _subtype=..., _encoder=..., **_params) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/mime/base.pyi0000644€tŠÔÚ€2›s®0000000020013576752252026625 0ustar jukkaDROPBOX\Domain Users00000000000000from email import message class MIMEBase(message.Message): def __init__(self, _maintype, _subtype, **_params) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/mime/image.pyi0000644€tŠÔÚ€2›s®0000000026113576752252027004 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import MIMENonMultipart class MIMEImage(MIMENonMultipart): def __init__(self, _imagedata, _subtype=..., _encoder=..., **_params) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/mime/message.pyi0000644€tŠÔÚ€2›s®0000000022413576752252027345 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import MIMENonMultipart class MIMEMessage(MIMENonMultipart): def __init__(self, _msg, _subtype=...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/mime/multipart.pyi0000644€tŠÔÚ€2›s®0000000023713576752252027746 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.base import MIMEBase class MIMEMultipart(MIMEBase): def __init__(self, _subtype=..., boundary=..., _subparts=..., **_params) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/mime/nonmultipart.pyi0000644€tŠÔÚ€2›s®0000000015313576752252030456 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.base import MIMEBase class MIMENonMultipart(MIMEBase): def attach(self, payload): ... mypy-0.761/mypy/typeshed/stdlib/2/email/mime/text.pyi0000644€tŠÔÚ€2›s®0000000023713576752252026711 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import MIMENonMultipart class MIMEText(MIMENonMultipart): def __init__(self, _text, _subtype=..., _charset=...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/email/parser.pyi0000644€tŠÔÚ€2›s®0000000063713576752252026276 0ustar jukkaDROPBOX\Domain Users00000000000000from .feedparser import FeedParser as FeedParser # not in __all__ but listed in documentation class Parser: def __init__(self, *args, **kws) -> None: ... def parse(self, fp, headersonly: bool = ...): ... def parsestr(self, text, headersonly: bool = ...): ... class HeaderParser(Parser): def parse(self, fp, headersonly: bool = ...): ... def parsestr(self, text, headersonly: bool = ...): ... mypy-0.761/mypy/typeshed/stdlib/2/email/quoprimime.pyi0000644€tŠÔÚ€2›s®0000000075213576752252027167 0ustar jukkaDROPBOX\Domain Users00000000000000def header_quopri_check(c): ... def body_quopri_check(c): ... def header_quopri_len(s): ... def body_quopri_len(str): ... def unquote(s): ... def quote(c): ... def header_encode(header, charset: str = ..., keep_eols: bool = ..., maxlinelen: int = ..., eol=...): ... def encode(body, binary: bool = ..., maxlinelen: int = ..., eol=...): ... body_encode = encode encodestring = encode def decode(encoded, eol=...): ... body_decode = decode decodestring = decode def header_decode(s): ... mypy-0.761/mypy/typeshed/stdlib/2/email/utils.pyi0000644€tŠÔÚ€2›s®0000000146713576752252026144 0ustar jukkaDROPBOX\Domain Users00000000000000from email._parseaddr import AddressList as _AddressList from email._parseaddr import mktime_tz as mktime_tz from email._parseaddr import parsedate as _parsedate from email._parseaddr import parsedate_tz as _parsedate_tz from quopri import decodestring as _qdecode from typing import Optional, Any def formataddr(pair): ... def getaddresses(fieldvalues): ... def formatdate(timeval: Optional[Any] = ..., localtime: bool = ..., usegmt: bool = ...): ... def make_msgid(idstring: Optional[Any] = ...): ... def parsedate(data): ... def parsedate_tz(data): ... def parseaddr(addr): ... def unquote(str): ... def decode_rfc2231(s): ... def encode_rfc2231(s, charset: Optional[Any] = ..., language: Optional[Any] = ...): ... def decode_params(params): ... def collapse_rfc2231_value(value, errors=..., fallback_charset=...): ... mypy-0.761/mypy/typeshed/stdlib/2/encodings/0000755€tŠÔÚ€2›s®0000000000013576752267025141 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/encodings/__init__.pyi0000644€tŠÔÚ€2›s®0000000013613576752252027415 0ustar jukkaDROPBOX\Domain Users00000000000000import codecs import typing def search_function(encoding: str) -> codecs.CodecInfo: ... mypy-0.761/mypy/typeshed/stdlib/2/encodings/utf_8.pyi0000644€tŠÔÚ€2›s®0000000107513576752252026706 0ustar jukkaDROPBOX\Domain Users00000000000000import codecs from typing import Text, Tuple class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: Text, final: bool = ...) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): def _buffer_decode(self, input: bytes, errors: str, final: bool) -> Tuple[Text, int]: ... class StreamWriter(codecs.StreamWriter): ... class StreamReader(codecs.StreamReader): ... def getregentry() -> codecs.CodecInfo: ... def encode(input: Text, errors: Text = ...) -> bytes: ... def decode(input: bytes, errors: Text = ...) -> Text: ... mypy-0.761/mypy/typeshed/stdlib/2/exceptions.pyi0000644€tŠÔÚ€2›s®0000000512013576752252026064 0ustar jukkaDROPBOX\Domain Users00000000000000from __builtin__ import ArithmeticError as ArithmeticError from __builtin__ import AssertionError as AssertionError from __builtin__ import AttributeError as AttributeError from __builtin__ import BaseException as BaseException from __builtin__ import BufferError as BufferError from __builtin__ import BytesWarning as BytesWarning from __builtin__ import DeprecationWarning as DeprecationWarning from __builtin__ import EOFError as EOFError from __builtin__ import EnvironmentError as EnvironmentError from __builtin__ import Exception as Exception from __builtin__ import FloatingPointError as FloatingPointError from __builtin__ import FutureWarning as FutureWarning from __builtin__ import GeneratorExit as GeneratorExit from __builtin__ import IOError as IOError from __builtin__ import ImportError as ImportError from __builtin__ import ImportWarning as ImportWarning from __builtin__ import IndentationError as IndentationError from __builtin__ import IndexError as IndexError from __builtin__ import KeyError as KeyError from __builtin__ import KeyboardInterrupt as KeyboardInterrupt from __builtin__ import LookupError as LookupError from __builtin__ import MemoryError as MemoryError from __builtin__ import NameError as NameError from __builtin__ import NotImplementedError as NotImplementedError from __builtin__ import OSError as OSError from __builtin__ import OverflowError as OverflowError from __builtin__ import PendingDeprecationWarning as PendingDeprecationWarning from __builtin__ import ReferenceError as ReferenceError from __builtin__ import RuntimeError as RuntimeError from __builtin__ import RuntimeWarning as RuntimeWarning from __builtin__ import StandardError as StandardError from __builtin__ import StopIteration as StopIteration from __builtin__ import SyntaxError as SyntaxError from __builtin__ import SyntaxWarning as SyntaxWarning from __builtin__ import SystemError as SystemError from __builtin__ import SystemExit as SystemExit from __builtin__ import TabError as TabError from __builtin__ import TypeError as TypeError from __builtin__ import UnboundLocalError as UnboundLocalError from __builtin__ import UnicodeError as UnicodeError from __builtin__ import UnicodeDecodeError as UnicodeDecodeError from __builtin__ import UnicodeEncodeError as UnicodeEncodeError from __builtin__ import UnicodeTranslateError as UnicodeTranslateError from __builtin__ import UnicodeWarning as UnicodeWarning from __builtin__ import UserWarning as UserWarning from __builtin__ import ValueError as ValueError from __builtin__ import Warning as Warning from __builtin__ import ZeroDivisionError as ZeroDivisionError mypy-0.761/mypy/typeshed/stdlib/2/fcntl.pyi0000644€tŠÔÚ€2›s®0000000303113576752252025010 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, IO import io FASYNC: int FD_CLOEXEC: int DN_ACCESS: int DN_ATTRIB: int DN_CREATE: int DN_DELETE: int DN_MODIFY: int DN_MULTISHOT: int DN_RENAME: int F_DUPFD: int F_EXLCK: int F_GETFD: int F_GETFL: int F_GETLEASE: int F_GETLK: int F_GETLK64: int F_GETOWN: int F_GETSIG: int F_NOTIFY: int F_RDLCK: int F_SETFD: int F_SETFL: int F_SETLEASE: int F_SETLK: int F_SETLK64: int F_SETLKW: int F_SETLKW64: int F_SETOWN: int F_SETSIG: int F_SHLCK: int F_UNLCK: int F_WRLCK: int I_ATMARK: int I_CANPUT: int I_CKBAND: int I_FDINSERT: int I_FIND: int I_FLUSH: int I_FLUSHBAND: int I_GETBAND: int I_GETCLTIME: int I_GETSIG: int I_GRDOPT: int I_GWROPT: int I_LINK: int I_LIST: int I_LOOK: int I_NREAD: int I_PEEK: int I_PLINK: int I_POP: int I_PUNLINK: int I_PUSH: int I_RECVFD: int I_SENDFD: int I_SETCLTIME: int I_SETSIG: int I_SRDOPT: int I_STR: int I_SWROPT: int I_UNLINK: int LOCK_EX: int LOCK_MAND: int LOCK_NB: int LOCK_READ: int LOCK_RW: int LOCK_SH: int LOCK_UN: int LOCK_WRITE: int _ANYFILE = Union[int, IO] # TODO All these return either int or bytes depending on the value of # cmd (not on the type of arg). def fcntl(fd: _ANYFILE, op: int, arg: Union[int, bytes] = ...) -> Any: ... # TODO: arg: int or read-only buffer interface or read-write buffer interface def ioctl(fd: _ANYFILE, op: int, arg: Union[int, bytes] = ..., mutate_flag: bool = ...) -> Any: ... def flock(fd: _ANYFILE, op: int) -> None: ... def lockf(fd: _ANYFILE, op: int, length: int = ..., start: int = ..., whence: int = ...) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/2/fnmatch.pyi0000644€tŠÔÚ€2›s®0000000053413576752252025327 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr, Iterable, List, Union _EitherStr = Union[str, unicode] def fnmatch(filename: _EitherStr, pattern: _EitherStr) -> bool: ... def fnmatchcase(filename: _EitherStr, pattern: _EitherStr) -> bool: ... def filter(names: Iterable[AnyStr], pattern: _EitherStr) -> List[AnyStr]: ... def translate(pattern: AnyStr) -> AnyStr: ... mypy-0.761/mypy/typeshed/stdlib/2/functools.pyi0000644€tŠÔÚ€2›s®0000000235613576752252025727 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for functools (Python 2.7) # NOTE: These are incomplete! from abc import ABCMeta, abstractmethod from typing import Any, Callable, Generic, Dict, Iterable, Optional, Sequence, Tuple, TypeVar, overload _AnyCallable = Callable[..., Any] _T = TypeVar("_T") _S = TypeVar("_S") @overload def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... WRAPPER_ASSIGNMENTS: Sequence[str] WRAPPER_UPDATES: Sequence[str] def update_wrapper(wrapper: _AnyCallable, wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> _AnyCallable: ... def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_AnyCallable], _AnyCallable]: ... def total_ordering(cls: type) -> type: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], Any]: ... class partial(Generic[_T]): func = ... # Callable[..., _T] args: Tuple[Any, ...] keywords: Dict[str, Any] def __init__(self, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> _T: ... mypy-0.761/mypy/typeshed/stdlib/2/future_builtins.pyi0000644€tŠÔÚ€2›s®0000000034213576752252027127 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from itertools import ifilter as filter from itertools import imap as map from itertools import izip as zip def ascii(obj: Any) -> str: ... def hex(x: int) -> str: ... def oct(x: int) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/gc.pyi0000644€tŠÔÚ€2›s®0000000142313576752252024276 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for gc from typing import Any, List, Tuple def enable() -> None: ... def disable() -> None: ... def isenabled() -> bool: ... def collect(generation: int = ...) -> int: ... def set_debug(flags: int) -> None: ... def get_debug() -> int: ... def get_objects() -> List[Any]: ... def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ...) -> None: ... def get_count() -> Tuple[int, int, int]: ... def get_threshold() -> Tuple[int, int, int]: ... def get_referrers(*objs: Any) -> List[Any]: ... def get_referents(*objs: Any) -> List[Any]: ... def is_tracked(obj: Any) -> bool: ... garbage: List[Any] DEBUG_STATS: int DEBUG_COLLECTABLE: int DEBUG_UNCOLLECTABLE: int DEBUG_INSTANCES: int DEBUG_OBJECTS: int DEBUG_SAVEALL: int DEBUG_LEAK: int mypy-0.761/mypy/typeshed/stdlib/2/getopt.pyi0000644€tŠÔÚ€2›s®0000000070013576752252025204 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple class GetoptError(Exception): opt: str msg: str def __init__(self, msg: str, opt: str = ...) -> None: ... def __str__(self) -> str: ... error = GetoptError def getopt(args: List[str], shortopts: str, longopts: List[str] = ...) -> Tuple[List[Tuple[str, str]], List[str]]: ... def gnu_getopt(args: List[str], shortopts: str, longopts: List[str] = ...) -> Tuple[List[Tuple[str, str]], List[str]]: ... mypy-0.761/mypy/typeshed/stdlib/2/getpass.pyi0000644€tŠÔÚ€2›s®0000000030013576752252025344 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for getpass (Python 2) from typing import Any, IO class GetPassWarning(UserWarning): ... def getpass(prompt: str = ..., stream: IO[Any] = ...) -> str: ... def getuser() -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/gettext.pyi0000644€tŠÔÚ€2›s®0000000435413576752252025377 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Container, Dict, IO, List, Optional, Sequence, Type, Union def bindtextdomain(domain: str, localedir: str = ...) -> str: ... def bind_textdomain_codeset(domain: str, codeset: str = ...) -> str: ... def textdomain(domain: str = ...) -> str: ... def gettext(message: str) -> str: ... def lgettext(message: str) -> str: ... def dgettext(domain: str, message: str) -> str: ... def ldgettext(domain: str, message: str) -> str: ... def ngettext(singular: str, plural: str, n: int) -> str: ... def lngettext(singular: str, plural: str, n: int) -> str: ... def dngettext(domain: str, singular: str, plural: str, n: int) -> str: ... def ldngettext(domain: str, singular: str, plural: str, n: int) -> str: ... class NullTranslations(object): def __init__(self, fp: IO[str] = ...) -> None: ... def _parse(self, fp: IO[str]) -> None: ... def add_fallback(self, fallback: NullTranslations) -> None: ... def gettext(self, message: str) -> str: ... def lgettext(self, message: str) -> str: ... def ugettext(self, message: Union[str, unicode]) -> unicode: ... def ngettext(self, singular: str, plural: str, n: int) -> str: ... def lngettext(self, singular: str, plural: str, n: int) -> str: ... def ungettext(self, singular: Union[str, unicode], plural: Union[str, unicode], n: int) -> unicode: ... def info(self) -> Dict[str, str]: ... def charset(self) -> Optional[str]: ... def output_charset(self) -> Optional[str]: ... def set_output_charset(self, charset: Optional[str]) -> None: ... def install(self, unicode: bool = ..., names: Container[str] = ...) -> None: ... class GNUTranslations(NullTranslations): LE_MAGIC: int BE_MAGIC: int def find(domain: str, localedir: Optional[str] = ..., languages: Optional[Sequence[str]] = ..., all: Any = ...) -> Optional[Union[str, List[str]]]: ... def translation(domain: str, localedir: Optional[str] = ..., languages: Optional[Sequence[str]] = ..., class_: Optional[Type[NullTranslations]] = ..., fallback: bool = ..., codeset: Optional[str] = ...) -> NullTranslations: ... def install(domain: str, localedir: Optional[str] = ..., unicode: bool = ..., codeset: Optional[str] = ..., names: Container[str] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/glob.pyi0000644€tŠÔÚ€2›s®0000000056713576752252024640 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Iterator, Union, AnyStr def glob(pathname: AnyStr) -> List[AnyStr]: ... def iglob(pathname: AnyStr) -> Iterator[AnyStr]: ... def glob1(dirname: Union[str, unicode], pattern: AnyStr) -> List[AnyStr]: ... def glob0(dirname: Union[str, unicode], basename: AnyStr) -> List[AnyStr]: ... def has_magic(s: Union[str, unicode]) -> bool: ... # undocumented mypy-0.761/mypy/typeshed/stdlib/2/gzip.pyi0000644€tŠÔÚ€2›s®0000000175013576752252024661 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Text import io class GzipFile(io.BufferedIOBase): myfileobj: Any max_read_chunk: Any mode: Any extrabuf: Any extrasize: Any extrastart: Any name: Any min_readsize: Any compress: Any fileobj: Any offset: Any mtime: Any def __init__(self, filename: str = ..., mode: Text = ..., compresslevel: int = ..., fileobj: IO[str] = ..., mtime: float = ...) -> None: ... @property def filename(self): ... size: Any crc: Any def write(self, data): ... def read(self, size=...): ... @property def closed(self): ... def close(self): ... def flush(self, zlib_mode=...): ... def fileno(self): ... def rewind(self): ... def readable(self): ... def writable(self): ... def seekable(self): ... def seek(self, offset, whence=...): ... def readline(self, size=...): ... def open(filename: str, mode: Text = ..., compresslevel: int = ...) -> GzipFile: ... mypy-0.761/mypy/typeshed/stdlib/2/hashlib.pyi0000644€tŠÔÚ€2›s®0000000175413576752252025326 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for hashlib (Python 2) from typing import Tuple, Union _DataType = Union[str, unicode, bytearray, buffer, memoryview] class _hash(object): # This is not actually in the module namespace. name: str block_size: int digest_size: int digestsize: int def __init__(self, arg: _DataType = ...) -> None: ... def update(self, arg: _DataType) -> None: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def copy(self) -> _hash: ... def new(name: str, data: str = ...) -> _hash: ... def md5(s: _DataType = ...) -> _hash: ... def sha1(s: _DataType = ...) -> _hash: ... def sha224(s: _DataType = ...) -> _hash: ... def sha256(s: _DataType = ...) -> _hash: ... def sha384(s: _DataType = ...) -> _hash: ... def sha512(s: _DataType = ...) -> _hash: ... algorithms: Tuple[str, ...] algorithms_guaranteed: Tuple[str, ...] algorithms_available: Tuple[str, ...] def pbkdf2_hmac(name: str, password: str, salt: str, rounds: int, dklen: int = ...) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/heapq.pyi0000644€tŠÔÚ€2›s®0000000135213576752252025004 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, List, Iterable, Any, Callable, Optional, Protocol _T = TypeVar('_T') class _Sortable(Protocol): def __lt__(self: _T, other: _T) -> bool: ... def cmp_lt(x, y) -> bool: ... def heappush(heap: List[_T], item: _T) -> None: ... def heappop(heap: List[_T]) -> _T: ... def heappushpop(heap: List[_T], item: _T) -> _T: ... def heapify(x: List[_T]) -> None: ... def heapreplace(heap: List[_T], item: _T) -> _T: ... def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ... def nlargest(n: int, iterable: Iterable[_T], key: Optional[Callable[[_T], _Sortable]] = ...) -> List[_T]: ... def nsmallest(n: int, iterable: Iterable[_T], key: Optional[Callable[[_T], _Sortable]] = ...) -> List[_T]: ... mypy-0.761/mypy/typeshed/stdlib/2/htmlentitydefs.pyi0000644€tŠÔÚ€2›s®0000000016213576752252026747 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict name2codepoint: Dict[str, int] codepoint2name: Dict[int, str] entitydefs: Dict[str, str] mypy-0.761/mypy/typeshed/stdlib/2/httplib.pyi0000644€tŠÔÚ€2›s®0000001346113576752252025360 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for httplib (Python 2) # # Generated by stubgen and manually massaged a bit. # Needs lots more work! from typing import Any, Dict, Optional, Protocol import mimetools import ssl class HTTPMessage(mimetools.Message): def addcontinue(self, key: str, more: str) -> None: ... dict: Dict[str, str] def addheader(self, key: str, value: str) -> None: ... unixfrom: str headers: Any status: str seekable: bool def readheaders(self) -> None: ... class HTTPResponse: fp: Any debuglevel: Any strict: Any msg: Any version: Any status: Any reason: Any chunked: Any chunk_left: Any length: Any will_close: Any def __init__(self, sock, debuglevel: int = ..., strict: int = ..., method: Optional[Any] = ..., buffering: bool = ...) -> None: ... def begin(self): ... def close(self): ... def isclosed(self): ... def read(self, amt: Optional[Any] = ...): ... def fileno(self): ... def getheader(self, name, default: Optional[Any] = ...): ... def getheaders(self): ... # This is an API stub only for HTTPConnection and HTTPSConnection, as used in # urllib2.AbstractHTTPHandler.do_open, which takes either the class # HTTPConnection or the class HTTPSConnection, *not* an instance of either # class. do_open does not use all of the parameters of HTTPConnection.__init__ # or HTTPSConnection.__init__, so HTTPConnectionProtocol only implements the # parameters that do_open does use. class HTTPConnectionProtocol(Protocol): def __call__(self, host: str, timeout: int = ..., **http_con_args: Any) -> HTTPConnection: ... class HTTPConnection: response_class: Any default_port: Any auto_open: Any debuglevel: Any strict: Any timeout: Any source_address: Any sock: Any host: str = ... port: int = ... def __init__(self, host, port: Optional[Any] = ..., strict: Optional[Any] = ..., timeout=..., source_address: Optional[Any] = ...) -> None: ... def set_tunnel(self, host, port: Optional[Any] = ..., headers: Optional[Any] = ...): ... def set_debuglevel(self, level): ... def connect(self): ... def close(self): ... def send(self, data): ... def putrequest(self, method, url, skip_host: int = ..., skip_accept_encoding: int = ...): ... def putheader(self, header, *values): ... def endheaders(self, message_body: Optional[Any] = ...): ... def request(self, method, url, body: Optional[Any] = ..., headers=...): ... def getresponse(self, buffering: bool = ...): ... class HTTP: debuglevel: Any def __init__(self, host: str = ..., port: Optional[Any] = ..., strict: Optional[Any] = ...) -> None: ... def connect(self, host: Optional[Any] = ..., port: Optional[Any] = ...): ... def getfile(self): ... file: Any headers: Any def getreply(self, buffering: bool = ...): ... def close(self): ... class HTTPSConnection(HTTPConnection): default_port: Any key_file: Any cert_file: Any def __init__(self, host, port: Optional[Any] = ..., key_file: Optional[Any] = ..., cert_file: Optional[Any] = ..., strict: Optional[Any] = ..., timeout=..., source_address: Optional[Any] = ..., context: Optional[Any] = ...) -> None: ... sock: Any def connect(self): ... class HTTPS(HTTP): key_file: Any cert_file: Any def __init__(self, host: str = ..., port: Optional[Any] = ..., key_file: Optional[Any] = ..., cert_file: Optional[Any] = ..., strict: Optional[Any] = ..., context: Optional[Any] = ...) -> None: ... class HTTPException(Exception): ... class NotConnected(HTTPException): ... class InvalidURL(HTTPException): ... class UnknownProtocol(HTTPException): args: Any version: Any def __init__(self, version) -> None: ... class UnknownTransferEncoding(HTTPException): ... class UnimplementedFileMode(HTTPException): ... class IncompleteRead(HTTPException): args: Any partial: Any expected: Any def __init__(self, partial, expected: Optional[Any] = ...) -> None: ... class ImproperConnectionState(HTTPException): ... class CannotSendRequest(ImproperConnectionState): ... class CannotSendHeader(ImproperConnectionState): ... class ResponseNotReady(ImproperConnectionState): ... class BadStatusLine(HTTPException): args: Any line: Any def __init__(self, line) -> None: ... class LineTooLong(HTTPException): def __init__(self, line_type) -> None: ... error: Any class LineAndFileWrapper: def __init__(self, line, file) -> None: ... def __getattr__(self, attr): ... def read(self, amt: Optional[Any] = ...): ... def readline(self): ... def readlines(self, size: Optional[Any] = ...): ... # Constants responses: Dict[int, str] HTTP_PORT: int HTTPS_PORT: int # status codes # informational CONTINUE: int SWITCHING_PROTOCOLS: int PROCESSING: int # successful OK: int CREATED: int ACCEPTED: int NON_AUTHORITATIVE_INFORMATION: int NO_CONTENT: int RESET_CONTENT: int PARTIAL_CONTENT: int MULTI_STATUS: int IM_USED: int # redirection MULTIPLE_CHOICES: int MOVED_PERMANENTLY: int FOUND: int SEE_OTHER: int NOT_MODIFIED: int USE_PROXY: int TEMPORARY_REDIRECT: int # client error BAD_REQUEST: int UNAUTHORIZED: int PAYMENT_REQUIRED: int FORBIDDEN: int NOT_FOUND: int METHOD_NOT_ALLOWED: int NOT_ACCEPTABLE: int PROXY_AUTHENTICATION_REQUIRED: int REQUEST_TIMEOUT: int CONFLICT: int GONE: int LENGTH_REQUIRED: int PRECONDITION_FAILED: int REQUEST_ENTITY_TOO_LARGE: int REQUEST_URI_TOO_LONG: int UNSUPPORTED_MEDIA_TYPE: int REQUESTED_RANGE_NOT_SATISFIABLE: int EXPECTATION_FAILED: int UNPROCESSABLE_ENTITY: int LOCKED: int FAILED_DEPENDENCY: int UPGRADE_REQUIRED: int # server error INTERNAL_SERVER_ERROR: int NOT_IMPLEMENTED: int BAD_GATEWAY: int SERVICE_UNAVAILABLE: int GATEWAY_TIMEOUT: int HTTP_VERSION_NOT_SUPPORTED: int INSUFFICIENT_STORAGE: int NOT_EXTENDED: int mypy-0.761/mypy/typeshed/stdlib/2/imp.pyi0000644€tŠÔÚ€2›s®0000000245513576752252024500 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stubs for the 'imp' module.""" from typing import List, Optional, Tuple, Iterable, IO, Any import types C_BUILTIN: int C_EXTENSION: int IMP_HOOK: int PKG_DIRECTORY: int PY_CODERESOURCE: int PY_COMPILED: int PY_FROZEN: int PY_RESOURCE: int PY_SOURCE: int SEARCH_ERROR: int def acquire_lock() -> None: ... def find_module(name: str, path: Iterable[str] = ...) -> Optional[Tuple[IO[Any], str, Tuple[str, str, int]]]: ... def get_magic() -> str: ... def get_suffixes() -> List[Tuple[str, str, int]]: ... def init_builtin(name: str) -> types.ModuleType: ... def init_frozen(name: str) -> types.ModuleType: ... def is_builtin(name: str) -> int: ... def is_frozen(name: str) -> bool: ... def load_compiled(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ... def load_dynamic(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ... def load_module(name: str, file: str, pathname: str, description: Tuple[str, str, int]) -> types.ModuleType: ... def load_source(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ... def lock_held() -> bool: ... def new_module(name: str) -> types.ModuleType: ... def release_lock() -> None: ... class NullImporter: def __init__(self, path_string: str) -> None: ... def find_module(self, fullname: str, path: str = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/importlib.pyi0000644€tŠÔÚ€2›s®0000000020613576752252025704 0ustar jukkaDROPBOX\Domain Users00000000000000import types from typing import Optional, Text def import_module(name: Text, package: Optional[Text] = ...) -> types.ModuleType: ... mypy-0.761/mypy/typeshed/stdlib/2/inspect.pyi0000644€tŠÔÚ€2›s®0000001075513576752252025362 0ustar jukkaDROPBOX\Domain Users00000000000000from types import CodeType, TracebackType, FrameType, FunctionType, MethodType, ModuleType from typing import Any, Dict, Callable, List, NamedTuple, Optional, Sequence, Tuple, Type, Union # Types and members class EndOfBlock(Exception): ... class BlockFinder: indent: int islambda: bool started: bool passline: bool last: int def tokeneater(self, type: int, token: str, srow_scol: Tuple[int, int], erow_ecol: Tuple[int, int], line: str) -> None: ... CO_GENERATOR: int CO_NESTED: int CO_NEWLOCALS: int CO_NOFREE: int CO_OPTIMIZED: int CO_VARARGS: int CO_VARKEYWORDS: int TPFLAGS_IS_ABSTRACT: int class ModuleInfo(NamedTuple): name: str suffix: str mode: str module_type: int def getmembers( object: object, predicate: Optional[Callable[[Any], bool]] = ... ) -> List[Tuple[str, Any]]: ... def getmoduleinfo(path: str) -> Optional[ModuleInfo]: ... def getmodulename(path: str) -> Optional[str]: ... def ismodule(object: object) -> bool: ... def isclass(object: object) -> bool: ... def ismethod(object: object) -> bool: ... def isfunction(object: object) -> bool: ... def isgeneratorfunction(object: object) -> bool: ... def isgenerator(object: object) -> bool: ... def istraceback(object: object) -> bool: ... def isframe(object: object) -> bool: ... def iscode(object: object) -> bool: ... def isbuiltin(object: object) -> bool: ... def isroutine(object: object) -> bool: ... def isabstract(object: object) -> bool: ... def ismethoddescriptor(object: object) -> bool: ... def isdatadescriptor(object: object) -> bool: ... def isgetsetdescriptor(object: object) -> bool: ... def ismemberdescriptor(object: object) -> bool: ... # Retrieving source code _SourceObjectType = Union[ModuleType, Type[Any], MethodType, FunctionType, TracebackType, FrameType, CodeType, Callable[..., Any]] def findsource(object: _SourceObjectType) -> Tuple[List[str], int]: ... def getabsfile(object: _SourceObjectType) -> str: ... def getblock(lines: Sequence[str]) -> Sequence[str]: ... def getdoc(object: object) -> Optional[str]: ... def getcomments(object: object) -> Optional[str]: ... def getfile(object: _SourceObjectType) -> str: ... def getmodule(object: object) -> Optional[ModuleType]: ... def getsourcefile(object: _SourceObjectType) -> Optional[str]: ... def getsourcelines(object: _SourceObjectType) -> Tuple[List[str], int]: ... def getsource(object: _SourceObjectType) -> str: ... def cleandoc(doc: str) -> str: ... def indentsize(line: str) -> int: ... # Classes and functions def getclasstree(classes: List[type], unique: bool = ...) -> List[Union[Tuple[type, Tuple[type, ...]], List[Any]]]: ... class ArgSpec(NamedTuple): args: List[str] varargs: Optional[str] keywords: Optional[str] defaults: Tuple[Any, ...] class ArgInfo(NamedTuple): args: List[str] varargs: Optional[str] keywords: Optional[str] locals: Dict[str, Any] class Arguments(NamedTuple): args: List[Union[str, List[Any]]] varargs: Optional[str] keywords: Optional[str] def getargs(co: CodeType) -> Arguments: ... def getargspec(func: object) -> ArgSpec: ... def getargvalues(frame: FrameType) -> ArgInfo: ... def formatargspec(args, varargs=..., varkw=..., defaults=..., formatarg=..., formatvarargs=..., formatvarkw=..., formatvalue=..., join=...) -> str: ... def formatargvalues(args, varargs=..., varkw=..., defaults=..., formatarg=..., formatvarargs=..., formatvarkw=..., formatvalue=..., join=...) -> str: ... def getmro(cls: type) -> Tuple[type, ...]: ... def getcallargs(func, *args, **kwds) -> Dict[str, Any]: ... # The interpreter stack class Traceback(NamedTuple): filename: str lineno: int function: str code_context: Optional[List[str]] index: Optional[int] # type: ignore _FrameInfo = Tuple[FrameType, str, int, str, Optional[List[str]], Optional[int]] def getouterframes(frame: FrameType, context: int = ...) -> List[_FrameInfo]: ... def getframeinfo(frame: Union[FrameType, TracebackType], context: int = ...) -> Traceback: ... def getinnerframes(traceback: TracebackType, context: int = ...) -> List[_FrameInfo]: ... def getlineno(frame: FrameType) -> int: ... def currentframe(depth: int = ...) -> FrameType: ... def stack(context: int = ...) -> List[_FrameInfo]: ... def trace(context: int = ...) -> List[_FrameInfo]: ... class Attribute(NamedTuple): name: str kind: str defining_class: type object: object def classify_class_attrs(cls: type) -> List[Attribute]: ... mypy-0.761/mypy/typeshed/stdlib/2/io.pyi0000644€tŠÔÚ€2›s®0000000266713576752252024327 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for io # Based on https://docs.python.org/2/library/io.html # Only a subset of functionality is included. from typing import List, BinaryIO, TextIO, IO, overload, Iterator, Iterable, Any, Union, Optional import _io from _io import BlockingIOError as BlockingIOError from _io import BufferedRWPair as BufferedRWPair from _io import BufferedRandom as BufferedRandom from _io import BufferedReader as BufferedReader from _io import BufferedWriter as BufferedWriter from _io import BytesIO as BytesIO from _io import DEFAULT_BUFFER_SIZE as DEFAULT_BUFFER_SIZE from _io import FileIO as FileIO from _io import IncrementalNewlineDecoder as IncrementalNewlineDecoder from _io import StringIO as StringIO from _io import TextIOWrapper as TextIOWrapper from _io import UnsupportedOperation as UnsupportedOperation from _io import open as open def _OpenWrapper(file: Union[str, unicode, int], mode: unicode = ..., buffering: int = ..., encoding: unicode = ..., errors: unicode = ..., newline: unicode = ..., closefd: bool = ...) -> IO[Any]: ... SEEK_SET: int SEEK_CUR: int SEEK_END: int class IOBase(_io._IOBase): ... class RawIOBase(_io._RawIOBase, IOBase): ... class BufferedIOBase(_io._BufferedIOBase, IOBase): ... # Note: In the actual io.py, TextIOBase subclasses IOBase. # (Which we don't do here because we don't want to subclass both TextIO and BinaryIO.) class TextIOBase(_io._TextIOBase): ... mypy-0.761/mypy/typeshed/stdlib/2/itertools.pyi0000644€tŠÔÚ€2›s®0000001454313576752252025740 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for itertools # Based on https://docs.python.org/2/library/itertools.html from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple, Union, Sequence, Generic, Optional) _T = TypeVar('_T') _S = TypeVar('_S') def count(start: int = ..., step: int = ...) -> Iterator[int]: ... # more general types? def cycle(iterable: Iterable[_T]) -> Iterator[_T]: ... def repeat(object: _T, times: int = ...) -> Iterator[_T]: ... class chain(Iterator[_T], Generic[_T]): def __init__(self, *iterables: Iterable[_T]) -> None: ... def next(self) -> _T: ... def __iter__(self) -> Iterator[_T]: ... @staticmethod def from_iterable(iterable: Iterable[Iterable[_S]]) -> Iterator[_S]: ... def compress(data: Iterable[_T], selectors: Iterable[Any]) -> Iterator[_T]: ... def dropwhile(predicate: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ... def ifilter(predicate: Optional[Callable[[_T], Any]], iterable: Iterable[_T]) -> Iterator[_T]: ... def ifilterfalse(predicate: Optional[Callable[[_T], Any]], iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def groupby(iterable: Iterable[_T], key: None = ...) -> Iterator[Tuple[_T, Iterator[_T]]]: ... @overload def groupby(iterable: Iterable[_T], key: Callable[[_T], _S]) -> Iterator[Tuple[_S, Iterator[_T]]]: ... @overload def islice(iterable: Iterable[_T], stop: Optional[int]) -> Iterator[_T]: ... @overload def islice(iterable: Iterable[_T], start: Optional[int], stop: Optional[int], step: Optional[int] = ...) -> Iterator[_T]: ... _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _T6 = TypeVar('_T6') @overload def imap(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> Iterator[_S]: ... @overload def imap(func: Callable[[_T1, _T2], _S], iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[_S]: ... @overload def imap(func: Callable[[_T1, _T2, _T3], _S], iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> Iterator[_S]: ... @overload def imap(func: Callable[[_T1, _T2, _T3, _T4], _S], iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> Iterator[_S]: ... @overload def imap(func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> Iterator[_S]: ... @overload def imap(func: Callable[[_T1, _T2, _T3, _T4, _T5, _T6], _S], iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], iter6: Iterable[_T6]) -> Iterator[_S]: ... @overload def imap(func: Callable[..., _S], iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], iter7: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[_S]: ... def starmap(func: Any, iterable: Iterable[Any]) -> Iterator[Any]: ... def takewhile(predicate: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ... def tee(iterable: Iterable[_T], n: int = ...) -> Tuple[Iterator[_T], ...]: ... @overload def izip(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], iter6: Iterable[_T6]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def izip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], iter7: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... def izip_longest(*p: Iterable[Any], fillvalue: Any = ...) -> Iterator[Any]: ... @overload def product(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], iter6: Iterable[_T6]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def product(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], iter7: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... @overload def product(*iterables: Iterable[Any], repeat: int) -> Iterator[Tuple[Any, ...]]: ... def permutations(iterable: Iterable[_T], r: int = ...) -> Iterator[Sequence[_T]]: ... def combinations(iterable: Iterable[_T], r: int) -> Iterator[Sequence[_T]]: ... def combinations_with_replacement(iterable: Iterable[_T], r: int) -> Iterator[Sequence[_T]]: ... mypy-0.761/mypy/typeshed/stdlib/2/json.pyi0000644€tŠÔÚ€2›s®0000000730113576752252024657 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union, Text, Protocol, Type class JSONDecodeError(ValueError): def dumps(self, obj: Any) -> str: ... def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ... def loads(self, s: str) -> Any: ... def load(self, fp: IO[str]) -> Any: ... def dumps(obj: Any, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Optional[Type[JSONEncoder]] = ..., indent: Optional[int] = ..., separators: Optional[Tuple[str, str]] = ..., encoding: str = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any) -> str: ... def dump(obj: Any, fp: Union[IO[str], IO[Text]], skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Optional[Type[JSONEncoder]] = ..., indent: Optional[int] = ..., separators: Optional[Tuple[str, str]] = ..., encoding: str = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any) -> None: ... def loads(s: Union[Text, bytes], encoding: Any = ..., cls: Optional[Type[JSONDecoder]] = ..., object_hook: Optional[Callable[[Dict[Any, Any]], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any) -> Any: ... class _Reader(Protocol): def read(self) -> Union[Text, bytes]: ... def load(fp: _Reader, encoding: Optional[str] = ..., cls: Optional[Type[JSONDecoder]] = ..., object_hook: Optional[Callable[[Dict[Any, Any]], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any) -> Any: ... class JSONDecoder(object): def __init__(self, encoding: Union[Text, bytes] = ..., object_hook: Callable[..., Any] = ..., parse_float: Callable[[str], float] = ..., parse_int: Callable[[str], int] = ..., parse_constant: Callable[[str], Any] = ..., strict: bool = ..., object_pairs_hook: Callable[..., Any] = ...) -> None: ... def decode(self, s: Union[Text, bytes], _w: Any = ...) -> Any: ... def raw_decode(self, s: Union[Text, bytes], idx: int = ...) -> Tuple[Any, Any]: ... class JSONEncoder(object): item_separator: str key_separator: str skipkeys: bool ensure_ascii: bool check_circular: bool allow_nan: bool sort_keys: bool indent: Optional[int] def __init__(self, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ..., indent: Optional[int] = ..., separators: Tuple[Union[Text, bytes], Union[Text, bytes]] = ..., encoding: Union[Text, bytes] = ..., default: Callable[..., Any] = ...) -> None: ... def default(self, o: Any) -> Any: ... def encode(self, o: Any) -> str: ... def iterencode(self, o: Any, _one_shot: bool = ...) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/markupbase.pyi0000644€tŠÔÚ€2›s®0000000041113576752252026033 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple class ParserBase(object): def __init__(self) -> None: ... def error(self, message: str) -> None: ... def reset(self) -> None: ... def getpos(self) -> Tuple[int, int]: ... def unknown_decl(self, data: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/md5.pyi0000644€tŠÔÚ€2›s®0000000016613576752252024375 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for Python 2.7 md5 stdlib module from hashlib import md5 as md5, md5 as new blocksize: int digest_size: int mypy-0.761/mypy/typeshed/stdlib/2/mimetools.pyi0000644€tŠÔÚ€2›s®0000000127713576752252025724 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import rfc822 class Message(rfc822.Message): encodingheader: Any typeheader: Any def __init__(self, fp, seekable: int = ...): ... plisttext: Any type: Any maintype: Any subtype: Any def parsetype(self): ... plist: Any def parseplist(self): ... def getplist(self): ... def getparam(self, name): ... def getparamnames(self): ... def getencoding(self): ... def gettype(self): ... def getmaintype(self): ... def getsubtype(self): ... def choose_boundary(): ... def decode(input, output, encoding): ... def encode(input, output, encoding): ... def copyliteral(input, output): ... def copybinary(input, output): ... mypy-0.761/mypy/typeshed/stdlib/2/multiprocessing/0000755€tŠÔÚ€2›s®0000000000013576752267026417 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/multiprocessing/__init__.pyi0000644€tŠÔÚ€2›s®0000000361313576752252030676 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Optional, TypeVar, Iterable from multiprocessing import pool from multiprocessing.process import Process as Process, current_process as current_process, active_children as active_children from multiprocessing.util import SUBDEBUG as SUBDEBUG, SUBWARNING as SUBWARNING from Queue import Queue as _BaseQueue class ProcessError(Exception): ... class BufferTooShort(ProcessError): ... class TimeoutError(ProcessError): ... class AuthenticationError(ProcessError): ... _T = TypeVar('_T') class Queue(_BaseQueue[_T]): def __init__(self, maxsize: int = ...) -> None: ... def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ... def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ... def qsize(self) -> int: ... def empty(self) -> bool: ... def full(self) -> bool: ... def put_nowait(self, item: _T) -> None: ... def get_nowait(self) -> _T: ... def close(self) -> None: ... def join_thread(self) -> None: ... def cancel_join_thread(self) -> None: ... def Manager(): ... def Pipe(duplex: bool = ...): ... def cpu_count() -> int: ... def freeze_support(): ... def get_logger(): ... def log_to_stderr(level: Optional[Any] = ...): ... def allow_connection_pickling(): ... def Lock(): ... def RLock(): ... def Condition(lock: Optional[Any] = ...): ... def Semaphore(value: int = ...): ... def BoundedSemaphore(value: int = ...): ... def Event(): ... def JoinableQueue(maxsize: int = ...): ... def RawValue(typecode_or_type, *args): ... def RawArray(typecode_or_type, size_or_initializer): ... def Value(typecode_or_type, *args, **kwds): ... def Array(typecode_or_type, size_or_initializer, **kwds): ... def Pool(processes: Optional[int] = ..., initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ..., maxtasksperchild: Optional[int] = ...) -> pool.Pool: ... mypy-0.761/mypy/typeshed/stdlib/2/multiprocessing/dummy/0000755€tŠÔÚ€2›s®0000000000013576752267027552 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/multiprocessing/dummy/__init__.pyi0000644€tŠÔÚ€2›s®0000000261113576752252032026 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, List, Type import threading import sys import weakref import array import itertools from multiprocessing import TimeoutError, cpu_count from multiprocessing.dummy.connection import Pipe from threading import Lock, RLock, Semaphore, BoundedSemaphore from threading import Event from Queue import Queue class DummyProcess(threading.Thread): _children: weakref.WeakKeyDictionary[Any, Any] _parent: threading.Thread _pid: None _start_called: bool def __init__(self, group=..., target=..., name=..., args=..., kwargs=...) -> None: ... @property def exitcode(self) -> Optional[int]: ... Process = DummyProcess # This should be threading._Condition but threading.pyi exports it as Condition class Condition(threading.Condition): notify_all: Any class Namespace(object): def __init__(self, **kwds) -> None: ... class Value(object): _typecode: Any _value: Any value: Any def __init__(self, typecode, value, lock=...) -> None: ... def _get(self) -> Any: ... def _set(self, value) -> None: ... JoinableQueue = Queue def Array(typecode, sequence, lock=...) -> array.array[Any]: ... def Manager() -> Any: ... def Pool(processes=..., initializer=..., initargs=...) -> Any: ... def active_children() -> List[Any]: ... def current_process() -> threading.Thread: ... def freeze_support() -> None: ... def shutdown() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/multiprocessing/dummy/connection.pyi0000644€tŠÔÚ€2›s®0000000124213576752252032425 0ustar jukkaDROPBOX\Domain Users00000000000000from Queue import Queue from typing import Any, List, Optional, Tuple, Type families: List[None] class Connection(object): _in: Any _out: Any recv: Any recv_bytes: Any send: Any send_bytes: Any def __init__(self, _in, _out) -> None: ... def close(self) -> None: ... def poll(self, timeout=...) -> Any: ... class Listener(object): _backlog_queue: Optional[Queue[Any]] address: Any def __init__(self, address=..., family=..., backlog=...) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... def Client(address) -> Connection: ... def Pipe(duplex=...) -> Tuple[Connection, Connection]: ... mypy-0.761/mypy/typeshed/stdlib/2/multiprocessing/pool.pyi0000644€tŠÔÚ€2›s®0000000452313576752252030111 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Callable, ContextManager, Iterable, Optional, Dict, List, TypeVar, Iterator, ) _T = TypeVar('_T', bound=Pool) class AsyncResult(): def get(self, timeout: Optional[float] = ...) -> Any: ... def wait(self, timeout: Optional[float] = ...) -> None: ... def ready(self) -> bool: ... def successful(self) -> bool: ... class IMapIterator(Iterator[Any]): def __iter__(self) -> Iterator[Any]: ... def next(self, timeout: Optional[float] = ...) -> Any: ... class IMapUnorderedIterator(IMapIterator): ... class Pool(ContextManager[Pool]): def __init__(self, processes: Optional[int] = ..., initializer: Optional[Callable[..., None]] = ..., initargs: Iterable[Any] = ..., maxtasksperchild: Optional[int] = ...) -> None: ... def apply(self, func: Callable[..., Any], args: Iterable[Any] = ..., kwds: Dict[str, Any] = ...) -> Any: ... def apply_async(self, func: Callable[..., Any], args: Iterable[Any] = ..., kwds: Dict[str, Any] = ..., callback: Optional[Callable[..., None]] = ...) -> AsyncResult: ... def map(self, func: Callable[..., Any], iterable: Iterable[Any] = ..., chunksize: Optional[int] = ...) -> List[Any]: ... def map_async(self, func: Callable[..., Any], iterable: Iterable[Any] = ..., chunksize: Optional[int] = ..., callback: Optional[Callable[..., None]] = ...) -> AsyncResult: ... def imap(self, func: Callable[..., Any], iterable: Iterable[Any] = ..., chunksize: Optional[int] = ...) -> IMapIterator: ... def imap_unordered(self, func: Callable[..., Any], iterable: Iterable[Any] = ..., chunksize: Optional[int] = ...) -> IMapIterator: ... def close(self) -> None: ... def terminate(self) -> None: ... def join(self) -> None: ... def __enter__(self: _T) -> _T: ... class ThreadPool(Pool, ContextManager[ThreadPool]): def __init__(self, processes: Optional[int] = ..., initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/multiprocessing/process.pyi0000644€tŠÔÚ€2›s®0000000161513576752252030615 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional def current_process(): ... def active_children(): ... class Process: def __init__(self, group: Optional[Any] = ..., target: Optional[Any] = ..., name: Optional[Any] = ..., args=..., kwargs=...): ... def run(self): ... def start(self): ... def terminate(self): ... def join(self, timeout: Optional[Any] = ...): ... def is_alive(self): ... @property def name(self): ... @name.setter def name(self, name): ... @property def daemon(self): ... @daemon.setter def daemon(self, daemonic): ... @property def authkey(self): ... @authkey.setter def authkey(self, authkey): ... @property def exitcode(self): ... @property def ident(self): ... pid: Any class AuthenticationString(bytes): def __reduce__(self): ... class _MainProcess(Process): def __init__(self): ... mypy-0.761/mypy/typeshed/stdlib/2/multiprocessing/util.pyi0000644€tŠÔÚ€2›s®0000000136613576752252030117 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional import threading SUBDEBUG: Any SUBWARNING: Any def sub_debug(msg, *args): ... def debug(msg, *args): ... def info(msg, *args): ... def sub_warning(msg, *args): ... def get_logger(): ... def log_to_stderr(level: Optional[Any] = ...): ... def get_temp_dir(): ... def register_after_fork(obj, func): ... class Finalize: def __init__(self, obj, callback, args=..., kwargs: Optional[Any] = ..., exitpriority: Optional[Any] = ...): ... def __call__(self, wr: Optional[Any] = ...): ... def cancel(self): ... def still_active(self): ... def is_exiting(): ... class ForkAwareThreadLock: def __init__(self): ... class ForkAwareLocal(threading.local): def __init__(self): ... def __reduce__(self): ... mypy-0.761/mypy/typeshed/stdlib/2/mutex.pyi0000644€tŠÔÚ€2›s®0000000065213576752252025052 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/mutex.py from typing import Any, Callable, Deque, TypeVar _ArgType = TypeVar('_ArgType') class mutex: locked: bool queue: Deque[Any] def __init__(self) -> None: ... def test(self) -> bool: ... def testandset(self) -> bool: ... def lock(self, function: Callable[[_ArgType], Any], argument: _ArgType) -> None: ... def unlock(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/nturl2path.pyi0000644€tŠÔÚ€2›s®0000000016313576752252026010 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr def url2pathname(url: AnyStr) -> AnyStr: ... def pathname2url(p: AnyStr) -> AnyStr: ... mypy-0.761/mypy/typeshed/stdlib/2/os/0000755€tŠÔÚ€2›s®0000000000013576752267023611 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2/os/__init__.pyi0000644€tŠÔÚ€2›s®0000003212713576752252026072 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os # Ron Murawski from builtins import OSError as error from io import TextIOWrapper as _TextIOWrapper from posix import listdir as listdir, stat_result as stat_result # TODO: use this, see https://github.com/python/mypy/issues/3078 import sys from typing import ( Mapping, MutableMapping, Dict, List, Any, Tuple, Iterator, overload, Union, AnyStr, Optional, Generic, Set, Callable, Text, Sequence, IO, NamedTuple, NoReturn, TypeVar ) from . import path as path _T = TypeVar('_T') # ----- os variables ----- if sys.version_info >= (3, 2): supports_bytes_environ: bool if sys.version_info >= (3, 3): supports_dir_fd: Set[Callable[..., Any]] supports_fd: Set[Callable[..., Any]] supports_effective_ids: Set[Callable[..., Any]] supports_follow_symlinks: Set[Callable[..., Any]] SEEK_SET: int SEEK_CUR: int SEEK_END: int O_RDONLY: int O_WRONLY: int O_RDWR: int O_APPEND: int O_CREAT: int O_EXCL: int O_TRUNC: int # We don't use sys.platform for O_* flags to denote platform-dependent APIs because some codes, # including tests for mypy, use a more finer way than sys.platform before using these APIs # See https://github.com/python/typeshed/pull/2286 for discussions O_DSYNC: int # Unix only O_RSYNC: int # Unix only O_SYNC: int # Unix only O_NDELAY: int # Unix only O_NONBLOCK: int # Unix only O_NOCTTY: int # Unix only O_SHLOCK: int # Unix only O_EXLOCK: int # Unix only O_BINARY: int # Windows only O_NOINHERIT: int # Windows only O_SHORT_LIVED: int # Windows only O_TEMPORARY: int # Windows only O_RANDOM: int # Windows only O_SEQUENTIAL: int # Windows only O_TEXT: int # Windows only O_ASYNC: int # Gnu extension if in C library O_DIRECT: int # Gnu extension if in C library O_DIRECTORY: int # Gnu extension if in C library O_NOFOLLOW: int # Gnu extension if in C library O_NOATIME: int # Gnu extension if in C library O_LARGEFILE: int # Gnu extension if in C library curdir: str pardir: str sep: str if sys.platform == 'win32': altsep: str else: altsep: Optional[str] extsep: str pathsep: str defpath: str linesep: str devnull: str name: str F_OK: int R_OK: int W_OK: int X_OK: int class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): def copy(self) -> Dict[AnyStr, AnyStr]: ... def __delitem__(self, key: AnyStr) -> None: ... def __getitem__(self, key: AnyStr) -> AnyStr: ... def __setitem__(self, key: AnyStr, value: AnyStr) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... def __len__(self) -> int: ... environ: _Environ[str] if sys.version_info >= (3, 2): environb: _Environ[bytes] if sys.platform != 'win32': # Unix only confstr_names: Dict[str, int] pathconf_names: Dict[str, int] sysconf_names: Dict[str, int] EX_OK: int EX_USAGE: int EX_DATAERR: int EX_NOINPUT: int EX_NOUSER: int EX_NOHOST: int EX_UNAVAILABLE: int EX_SOFTWARE: int EX_OSERR: int EX_OSFILE: int EX_CANTCREAT: int EX_IOERR: int EX_TEMPFAIL: int EX_PROTOCOL: int EX_NOPERM: int EX_CONFIG: int EX_NOTFOUND: int P_NOWAIT: int P_NOWAITO: int P_WAIT: int if sys.platform == 'win32': P_DETACH: int P_OVERLAY: int # wait()/waitpid() options if sys.platform != 'win32': WNOHANG: int # Unix only WCONTINUED: int # some Unix systems WUNTRACED: int # Unix only TMP_MAX: int # Undocumented, but used by tempfile # ----- os classes (structures) ----- if sys.version_info >= (3, 6): from builtins import _PathLike as PathLike # See comment in builtins _PathType = path._PathType class _StatVFS(NamedTuple): f_bsize: int f_frsize: int f_blocks: int f_bfree: int f_bavail: int f_files: int f_ffree: int f_favail: int f_flag: int f_namemax: int def getlogin() -> str: ... def getpid() -> int: ... def getppid() -> int: ... def strerror(code: int) -> str: ... def umask(mask: int) -> int: ... if sys.platform != 'win32': def ctermid() -> str: ... def getegid() -> int: ... def geteuid() -> int: ... def getgid() -> int: ... def getgroups() -> List[int]: ... # Unix only, behaves differently on Mac def initgroups(username: str, gid: int) -> None: ... def getpgid(pid: int) -> int: ... def getpgrp() -> int: ... def getresuid() -> Tuple[int, int, int]: ... def getresgid() -> Tuple[int, int, int]: ... def getuid() -> int: ... def setegid(egid: int) -> None: ... def seteuid(euid: int) -> None: ... def setgid(gid: int) -> None: ... def setgroups(groups: Sequence[int]) -> None: ... def setpgrp() -> None: ... def setpgid(pid: int, pgrp: int) -> None: ... def setregid(rgid: int, egid: int) -> None: ... def setresgid(rgid: int, egid: int, sgid: int) -> None: ... def setresuid(ruid: int, euid: int, suid: int) -> None: ... def setreuid(ruid: int, euid: int) -> None: ... def getsid(pid: int) -> int: ... def setsid() -> None: ... def setuid(uid: int) -> None: ... def uname() -> Tuple[str, str, str, str, str]: ... @overload def getenv(key: Text) -> Optional[str]: ... @overload def getenv(key: Text, default: _T) -> Union[str, _T]: ... def putenv(key: Union[bytes, Text], value: Union[bytes, Text]) -> None: ... def unsetenv(key: Union[bytes, Text]) -> None: ... def fdopen(fd: int, *args, **kwargs) -> IO[Any]: ... def close(fd: int) -> None: ... def closerange(fd_low: int, fd_high: int) -> None: ... def dup(fd: int) -> int: ... def dup2(fd: int, fd2: int) -> None: ... def fstat(fd: int) -> Any: ... def fsync(fd: int) -> None: ... def lseek(fd: int, pos: int, how: int) -> int: ... def open(file: _PathType, flags: int, mode: int = ...) -> int: ... def pipe() -> Tuple[int, int]: ... def read(fd: int, n: int) -> bytes: ... def write(fd: int, string: Union[bytes, buffer]) -> int: ... def access(path: _PathType, mode: int) -> bool: ... def chdir(path: _PathType) -> None: ... def fchdir(fd: int) -> None: ... def getcwd() -> str: ... def getcwdu() -> unicode: ... def chmod(path: _PathType, mode: int) -> None: ... def link(src: _PathType, link_name: _PathType) -> None: ... def lstat(path: _PathType) -> Any: ... def mknod(filename: _PathType, mode: int = ..., device: int = ...) -> None: ... def major(device: int) -> int: ... def minor(device: int) -> int: ... def makedev(major: int, minor: int) -> int: ... def mkdir(path: _PathType, mode: int = ...) -> None: ... def makedirs(path: _PathType, mode: int = ...) -> None: ... def readlink(path: AnyStr) -> AnyStr: ... def remove(path: _PathType) -> None: ... def removedirs(path: _PathType) -> None: ... def rename(src: _PathType, dst: _PathType) -> None: ... def renames(old: _PathType, new: _PathType) -> None: ... def rmdir(path: _PathType) -> None: ... def stat(path: _PathType) -> Any: ... @overload def stat_float_times() -> bool: ... @overload def stat_float_times(newvalue: bool) -> None: ... def symlink(source: _PathType, link_name: _PathType) -> None: ... def unlink(path: _PathType) -> None: ... # TODO: add ns, dir_fd, follow_symlinks argument if sys.version_info >= (3, 0): def utime(path: _PathType, times: Optional[Tuple[float, float]] = ...) -> None: ... else: def utime(path: _PathType, times: Optional[Tuple[float, float]]) -> None: ... if sys.platform != 'win32': # Unix only def fchmod(fd: int, mode: int) -> None: ... def fchown(fd: int, uid: int, gid: int) -> None: ... if sys.platform != 'darwin': def fdatasync(fd: int) -> None: ... # Unix only, not Mac def fpathconf(fd: int, name: Union[str, int]) -> int: ... def fstatvfs(fd: int) -> _StatVFS: ... def ftruncate(fd: int, length: int) -> None: ... def isatty(fd: int) -> bool: ... def openpty() -> Tuple[int, int]: ... # some flavors of Unix def tcgetpgrp(fd: int) -> int: ... def tcsetpgrp(fd: int, pg: int) -> None: ... def ttyname(fd: int) -> str: ... def chflags(path: _PathType, flags: int) -> None: ... def chroot(path: _PathType) -> None: ... def chown(path: _PathType, uid: int, gid: int) -> None: ... def lchflags(path: _PathType, flags: int) -> None: ... def lchmod(path: _PathType, mode: int) -> None: ... def lchown(path: _PathType, uid: int, gid: int) -> None: ... def mkfifo(path: _PathType, mode: int = ...) -> None: ... def pathconf(path: _PathType, name: Union[str, int]) -> int: ... def statvfs(path: _PathType) -> _StatVFS: ... if sys.version_info >= (3, 6): def walk(top: Union[AnyStr, PathLike[AnyStr]], topdown: bool = ..., onerror: Optional[Callable[[OSError], Any]] = ..., followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr]]]: ... else: def walk(top: AnyStr, topdown: bool = ..., onerror: Optional[Callable[[OSError], Any]] = ..., followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr]]]: ... def abort() -> NoReturn: ... # These are defined as execl(file, *args) but the first *arg is mandatory. def execl(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ... def execlp(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ... # These are: execle(file, *args, env) but env is pulled from the last element of the args. def execle(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ... def execlpe(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ... # The docs say `args: tuple or list of strings` # The implementation enforces tuple or list so we can't use Sequence. _ExecVArgs = Union[Tuple[Union[bytes, Text], ...], List[bytes], List[Text], List[Union[bytes, Text]]] def execv(path: _PathType, args: _ExecVArgs) -> NoReturn: ... def execve(path: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> NoReturn: ... def execvp(file: _PathType, args: _ExecVArgs) -> NoReturn: ... def execvpe(file: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> NoReturn: ... def _exit(n: int) -> NoReturn: ... def kill(pid: int, sig: int) -> None: ... if sys.platform != 'win32': # Unix only def fork() -> int: ... def forkpty() -> Tuple[int, int]: ... # some flavors of Unix def killpg(pgid: int, sig: int) -> None: ... def nice(increment: int) -> int: ... def plock(op: int) -> None: ... # ???op is int? if sys.version_info >= (3, 0): class popen(_TextIOWrapper): # TODO 'b' modes or bytes command not accepted? def __init__(self, command: str, mode: str = ..., bufsize: int = ...) -> None: ... def close(self) -> Any: ... # may return int else: def popen(command: str, *args, **kwargs) -> IO[Any]: ... def popen2(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ... def popen3(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any], IO[Any]]: ... def popen4(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ... def spawnl(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ... def spawnle(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Any) -> int: ... # Imprecise sig def spawnv(mode: int, path: _PathType, args: List[Union[bytes, Text]]) -> int: ... def spawnve(mode: int, path: _PathType, args: List[Union[bytes, Text]], env: Mapping[str, str]) -> int: ... def system(command: _PathType) -> int: ... def times() -> Tuple[float, float, float, float, float]: ... def waitpid(pid: int, options: int) -> Tuple[int, int]: ... def urandom(n: int) -> bytes: ... if sys.platform == 'win32': def startfile(path: _PathType, operation: Optional[str] = ...) -> None: ... else: # Unix only def spawnlp(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ... def spawnlpe(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Any) -> int: ... # Imprecise signature def spawnvp(mode: int, file: _PathType, args: List[Union[bytes, Text]]) -> int: ... def spawnvpe(mode: int, file: _PathType, args: List[Union[bytes, Text]], env: Mapping[str, str]) -> int: ... def wait() -> Tuple[int, int]: ... def wait3(options: int) -> Tuple[int, int, Any]: ... def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... def WCOREDUMP(status: int) -> bool: ... def WIFCONTINUED(status: int) -> bool: ... def WIFSTOPPED(status: int) -> bool: ... def WIFSIGNALED(status: int) -> bool: ... def WIFEXITED(status: int) -> bool: ... def WEXITSTATUS(status: int) -> int: ... def WSTOPSIG(status: int) -> int: ... def WTERMSIG(status: int) -> int: ... def confstr(name: Union[str, int]) -> Optional[str]: ... def getloadavg() -> Tuple[float, float, float]: ... def sysconf(name: Union[str, int]) -> int: ... if sys.version_info >= (3, 0): def sched_getaffinity(id: int) -> Set[int]: ... if sys.version_info >= (3, 3): class waitresult: si_pid: int def waitid(idtype: int, id: int, options: int) -> waitresult: ... if sys.version_info < (3, 0): def tmpfile() -> IO[Any]: ... def tmpnam() -> str: ... def tempnam(dir: str = ..., prefix: str = ...) -> str: ... P_ALL: int WEXITED: int WNOWAIT: int mypy-0.761/mypy/typeshed/stdlib/2/os/path.pyi0000644€tŠÔÚ€2›s®0000001412413576752252025264 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: path.pyi and stdlib/2 and stdlib/3 must remain consistent! # Stubs for os.path # Ron Murawski import os import sys from typing import overload, List, Any, AnyStr, Sequence, Tuple, TypeVar, Union, Text, Callable, Optional _T = TypeVar('_T') if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] _StrPath = Union[Text, _PathLike[Text]] _BytesPath = Union[bytes, _PathLike[bytes]] else: _PathType = Union[bytes, Text] _StrPath = Text _BytesPath = bytes # ----- os.path variables ----- supports_unicode_filenames: bool # aliases (also in os) curdir: str pardir: str sep: str if sys.platform == 'win32': altsep: str else: altsep: Optional[str] extsep: str pathsep: str defpath: str devnull: str # ----- os.path function stubs ----- if sys.version_info >= (3, 6): # Overloads are necessary to work around python/mypy#3644. @overload def abspath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def abspath(path: AnyStr) -> AnyStr: ... @overload def basename(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def basename(path: AnyStr) -> AnyStr: ... @overload def dirname(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def dirname(path: AnyStr) -> AnyStr: ... @overload def expanduser(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload def expandvars(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload def normcase(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normcase(path: AnyStr) -> AnyStr: ... @overload def normpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': @overload def realpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(path: AnyStr) -> AnyStr: ... else: @overload def realpath(filename: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(filename: AnyStr) -> AnyStr: ... else: def abspath(path: AnyStr) -> AnyStr: ... def basename(path: AnyStr) -> AnyStr: ... def dirname(path: AnyStr) -> AnyStr: ... def expanduser(path: AnyStr) -> AnyStr: ... def expandvars(path: AnyStr) -> AnyStr: ... def normcase(path: AnyStr) -> AnyStr: ... def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': def realpath(path: AnyStr) -> AnyStr: ... else: def realpath(filename: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 6): # In reality it returns str for sequences of _StrPath and bytes for sequences # of _BytesPath, but mypy does not accept such a signature. def commonpath(paths: Sequence[_PathType]) -> Any: ... elif sys.version_info >= (3, 5): def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ... # NOTE: Empty lists results in '' (str) regardless of contained type. # Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes # So, fall back to Any def commonprefix(list: Sequence[_PathType]) -> Any: ... if sys.version_info >= (3, 3): def exists(path: Union[_PathType, int]) -> bool: ... else: def exists(path: _PathType) -> bool: ... def lexists(path: _PathType) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(path: _PathType) -> float: ... def getmtime(path: _PathType) -> float: ... def getctime(path: _PathType) -> float: ... def getsize(path: _PathType) -> int: ... def isabs(path: _PathType) -> bool: ... def isfile(path: _PathType) -> bool: ... def isdir(path: _PathType) -> bool: ... def islink(path: _PathType) -> bool: ... def ismount(path: _PathType) -> bool: ... if sys.version_info < (3, 0): # Make sure signatures are disjunct, and allow combinations of bytes and unicode. # (Since Python 2 allows that, too) # Note that e.g. os.path.join("a", "b", "c", "d", u"e") will still result in # a type error. @overload def join(__p1: bytes, *p: bytes) -> bytes: ... @overload def join(__p1: bytes, __p2: bytes, __p3: bytes, __p4: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: Text, *p: _PathType) -> Text: ... @overload def join(__p1: Text, *p: _PathType) -> Text: ... elif sys.version_info >= (3, 6): # Mypy complains that the signatures overlap (same for relpath below), but things seem to behave correctly anyway. @overload def join(path: _StrPath, *paths: _StrPath) -> Text: ... @overload def join(path: _BytesPath, *paths: _BytesPath) -> bytes: ... else: def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ... @overload def relpath(path: _BytesPath, start: Optional[_BytesPath] = ...) -> bytes: ... @overload def relpath(path: _StrPath, start: Optional[_StrPath] = ...) -> Text: ... def samefile(path1: _PathType, path2: _PathType) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(stat1: os.stat_result, stat2: os.stat_result) -> bool: ... if sys.version_info >= (3, 6): @overload def split(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... else: def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... if sys.platform == 'win32': def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # deprecated if sys.version_info < (3,): def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/os2emxpath.pyi0000644€tŠÔÚ€2›s®0000001412413576752252026001 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: path.pyi and stdlib/2 and stdlib/3 must remain consistent! # Stubs for os.path # Ron Murawski import os import sys from typing import overload, List, Any, AnyStr, Sequence, Tuple, TypeVar, Union, Text, Callable, Optional _T = TypeVar('_T') if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] _StrPath = Union[Text, _PathLike[Text]] _BytesPath = Union[bytes, _PathLike[bytes]] else: _PathType = Union[bytes, Text] _StrPath = Text _BytesPath = bytes # ----- os.path variables ----- supports_unicode_filenames: bool # aliases (also in os) curdir: str pardir: str sep: str if sys.platform == 'win32': altsep: str else: altsep: Optional[str] extsep: str pathsep: str defpath: str devnull: str # ----- os.path function stubs ----- if sys.version_info >= (3, 6): # Overloads are necessary to work around python/mypy#3644. @overload def abspath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def abspath(path: AnyStr) -> AnyStr: ... @overload def basename(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def basename(path: AnyStr) -> AnyStr: ... @overload def dirname(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def dirname(path: AnyStr) -> AnyStr: ... @overload def expanduser(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload def expandvars(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload def normcase(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normcase(path: AnyStr) -> AnyStr: ... @overload def normpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': @overload def realpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(path: AnyStr) -> AnyStr: ... else: @overload def realpath(filename: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(filename: AnyStr) -> AnyStr: ... else: def abspath(path: AnyStr) -> AnyStr: ... def basename(path: AnyStr) -> AnyStr: ... def dirname(path: AnyStr) -> AnyStr: ... def expanduser(path: AnyStr) -> AnyStr: ... def expandvars(path: AnyStr) -> AnyStr: ... def normcase(path: AnyStr) -> AnyStr: ... def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': def realpath(path: AnyStr) -> AnyStr: ... else: def realpath(filename: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 6): # In reality it returns str for sequences of _StrPath and bytes for sequences # of _BytesPath, but mypy does not accept such a signature. def commonpath(paths: Sequence[_PathType]) -> Any: ... elif sys.version_info >= (3, 5): def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ... # NOTE: Empty lists results in '' (str) regardless of contained type. # Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes # So, fall back to Any def commonprefix(list: Sequence[_PathType]) -> Any: ... if sys.version_info >= (3, 3): def exists(path: Union[_PathType, int]) -> bool: ... else: def exists(path: _PathType) -> bool: ... def lexists(path: _PathType) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(path: _PathType) -> float: ... def getmtime(path: _PathType) -> float: ... def getctime(path: _PathType) -> float: ... def getsize(path: _PathType) -> int: ... def isabs(path: _PathType) -> bool: ... def isfile(path: _PathType) -> bool: ... def isdir(path: _PathType) -> bool: ... def islink(path: _PathType) -> bool: ... def ismount(path: _PathType) -> bool: ... if sys.version_info < (3, 0): # Make sure signatures are disjunct, and allow combinations of bytes and unicode. # (Since Python 2 allows that, too) # Note that e.g. os.path.join("a", "b", "c", "d", u"e") will still result in # a type error. @overload def join(__p1: bytes, *p: bytes) -> bytes: ... @overload def join(__p1: bytes, __p2: bytes, __p3: bytes, __p4: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: Text, *p: _PathType) -> Text: ... @overload def join(__p1: Text, *p: _PathType) -> Text: ... elif sys.version_info >= (3, 6): # Mypy complains that the signatures overlap (same for relpath below), but things seem to behave correctly anyway. @overload def join(path: _StrPath, *paths: _StrPath) -> Text: ... @overload def join(path: _BytesPath, *paths: _BytesPath) -> bytes: ... else: def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ... @overload def relpath(path: _BytesPath, start: Optional[_BytesPath] = ...) -> bytes: ... @overload def relpath(path: _StrPath, start: Optional[_StrPath] = ...) -> Text: ... def samefile(path1: _PathType, path2: _PathType) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(stat1: os.stat_result, stat2: os.stat_result) -> bool: ... if sys.version_info >= (3, 6): @overload def split(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... else: def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... if sys.platform == 'win32': def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # deprecated if sys.version_info < (3,): def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/pipes.pyi0000644€tŠÔÚ€2›s®0000000070513576752252025027 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO class Template: def __init__(self) -> None: ... def reset(self) -> None: ... def clone(self) -> Template: ... def debug(self, flag: bool) -> None: ... def append(self, cmd: str, kind: str) -> None: ... def prepend(self, cmd: str, kind: str) -> None: ... def open(self, file: str, mode: str) -> IO[Any]: ... def copy(self, infile: str, outfile: str) -> None: ... def quote(s: str) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/platform.pyi0000644€tŠÔÚ€2›s®0000000311113576752252025525 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for platform (Python 2) # # Based on stub generated by stubgen. from typing import Any, Optional, Tuple __copyright__: Any DEV_NULL: Any def libc_ver(executable=..., lib=..., version=..., chunksize: int = ...): ... def linux_distribution(distname=..., version=..., id=..., supported_dists=..., full_distribution_name: int = ...): ... def dist(distname=..., version=..., id=..., supported_dists=...): ... class _popen: tmpfile: Any pipe: Any bufsize: Any mode: Any def __init__(self, cmd, mode=..., bufsize: Optional[Any] = ...): ... def read(self): ... def readlines(self): ... def close(self, remove=..., error=...): ... __del__: Any def popen(cmd, mode=..., bufsize: Optional[Any] = ...): ... def win32_ver(release=..., version=..., csd=..., ptype=...): ... def mac_ver(release=..., versioninfo=..., machine=...): ... def java_ver(release=..., vendor=..., vminfo=..., osinfo=...): ... def system_alias(system, release, version): ... def architecture(executable=..., bits=..., linkage=...) -> Tuple[str, str]: ... def uname() -> Tuple[str, str, str, str, str, str]: ... def system() -> str: ... def node() -> str: ... def release() -> str: ... def version() -> str: ... def machine() -> str: ... def processor() -> str: ... def python_implementation() -> str: ... def python_version() -> str: ... def python_version_tuple() -> Tuple[str, str, str]: ... def python_branch() -> str: ... def python_revision() -> str: ... def python_build() -> Tuple[str, str]: ... def python_compiler() -> str: ... def platform(aliased: int = ..., terse: int = ...) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/popen2.pyi0000644€tŠÔÚ€2›s®0000000175013576752252025113 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterable, List, Optional, Union, TextIO, Tuple, TypeVar _T = TypeVar('_T') class Popen3: sts: int cmd: Iterable[Any] pid: int tochild: TextIO fromchild: TextIO childerr: Optional[TextIO] def __init__(self, cmd: Iterable[Any] = ..., capturestderr: bool = ..., bufsize: int = ...) -> None: ... def __del__(self) -> None: ... def poll(self, _deadstate: _T = ...) -> Union[int, _T]: ... def wait(self) -> int: ... class Popen4(Popen3): childerr: None cmd: Iterable[Any] pid: int tochild: TextIO fromchild: TextIO def __init__(self, cmd: Iterable[Any] = ..., bufsize: int = ...) -> None: ... def popen2(cmd: Iterable[Any] = ..., bufsize: int = ..., mode: str = ...) -> Tuple[TextIO, TextIO]: ... def popen3(cmd: Iterable[Any] = ..., bufsize: int = ..., mode: str = ...) -> Tuple[TextIO, TextIO, TextIO]: ... def popen4(cmd: Iterable[Any] = ..., bufsize: int = ..., mode: str = ...) -> Tuple[TextIO, TextIO]: ... mypy-0.761/mypy/typeshed/stdlib/2/posix.pyi0000644€tŠÔÚ€2›s®0000001433313576752252025053 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr, Dict, List, Mapping, Tuple, Union, Sequence, IO, Optional, TypeVar error = OSError confstr_names: Dict[str, int] environ: Dict[str, str] pathconf_names: Dict[str, int] sysconf_names: Dict[str, int] _T = TypeVar("_T") EX_CANTCREAT: int EX_CONFIG: int EX_DATAERR: int EX_IOERR: int EX_NOHOST: int EX_NOINPUT: int EX_NOPERM: int EX_NOUSER: int EX_OK: int EX_OSERR: int EX_OSFILE: int EX_PROTOCOL: int EX_SOFTWARE: int EX_TEMPFAIL: int EX_UNAVAILABLE: int EX_USAGE: int F_OK: int NGROUPS_MAX: int O_APPEND: int O_ASYNC: int O_CREAT: int O_DIRECT: int O_DIRECTORY: int O_DSYNC: int O_EXCL: int O_LARGEFILE: int O_NDELAY: int O_NOATIME: int O_NOCTTY: int O_NOFOLLOW: int O_NONBLOCK: int O_RDONLY: int O_RDWR: int O_RSYNC: int O_SYNC: int O_TRUNC: int O_WRONLY: int R_OK: int TMP_MAX: int WCONTINUED: int WNOHANG: int WUNTRACED: int W_OK: int X_OK: int def WCOREDUMP(status: int) -> bool: ... def WEXITSTATUS(status: int) -> bool: ... def WIFCONTINUED(status: int) -> bool: ... def WIFEXITED(status: int) -> bool: ... def WIFSIGNALED(status: int) -> bool: ... def WIFSTOPPED(status: int) -> bool: ... def WSTOPSIG(status: int) -> bool: ... def WTERMSIG(status: int) -> bool: ... class stat_result(object): n_fields: int n_sequence_fields: int n_unnamed_fields: int st_mode: int st_ino: int st_dev: int st_nlink: int st_uid: int st_gid: int st_size: int st_atime: int st_mtime: int st_ctime: int class statvfs_result(object): n_fields: int n_sequence_fields: int n_unnamed_fields: int f_bsize: int f_frsize: int f_blocks: int f_bfree: int f_bavail: int f_files: int f_ffree: int f_favail: int f_flag: int f_namemax: int def _exit(status: int) -> None: ... def abort() -> None: ... def access(path: unicode, mode: int) -> bool: ... def chdir(path: unicode) -> None: ... def chmod(path: unicode, mode: int) -> None: ... def chown(path: unicode, uid: int, gid: int) -> None: ... def chroot(path: unicode) -> None: ... def close(fd: int) -> None: ... def closerange(fd_low: int, fd_high: int) -> None: ... def confstr(name: Union[str, int]) -> str: ... def ctermid() -> str: ... def dup(fd: int) -> int: ... def dup2(fd: int, fd2: int) -> None: ... def execv(path: str, args: Sequence[str], env: Mapping[str, str]) -> None: ... def execve(path: str, args: Sequence[str], env: Mapping[str, str]) -> None: ... def fchdir(fd: int) -> None: ... def fchmod(fd: int, mode: int) -> None: ... def fchown(fd: int, uid: int, gid: int) -> None: ... def fdatasync(fd: int) -> None: ... def fdopen(fd: int, mode: str = ..., bufsize: int = ...) -> IO[str]: ... def fork() -> int: ... def forkpty() -> Tuple[int, int]: ... def fpathconf(fd: int, name: str) -> None: ... def fstat(fd: int) -> stat_result: ... def fstatvfs(fd: int) -> statvfs_result: ... def fsync(fd: int) -> None: ... def ftruncate(fd: int, length: int) -> None: ... def getcwd() -> str: ... def getcwdu() -> unicode: ... def getegid() -> int: ... def geteuid() -> int: ... def getgid() -> int: ... def getgroups() -> List[int]: ... def getloadavg() -> Tuple[float, float, float]: ... def getlogin() -> str: ... def getpgid(pid: int) -> int: ... def getpgrp() -> int: ... def getpid() -> int: ... def getppid() -> int: ... def getresgid() -> Tuple[int, int, int]: ... def getresuid() -> Tuple[int, int, int]: ... def getsid(pid: int) -> int: ... def getuid() -> int: ... def initgroups(username: str, gid: int) -> None: ... def isatty(fd: int) -> bool: ... def kill(pid: int, sig: int) -> None: ... def killpg(pgid: int, sig: int) -> None: ... def lchown(path: unicode, uid: int, gid: int) -> None: ... def link(source: unicode, link_name: str) -> None: ... def listdir(path: AnyStr) -> List[AnyStr]: ... def lseek(fd: int, pos: int, how: int) -> None: ... def lstat(path: unicode) -> stat_result: ... def major(device: int) -> int: ... def makedev(major: int, minor: int) -> int: ... def minor(device: int) -> int: ... def mkdir(path: unicode, mode: int = ...) -> None: ... def mkfifo(path: unicode, mode: int = ...) -> None: ... def mknod(filename: unicode, mode: int = ..., device: int = ...) -> None: ... def nice(increment: int) -> int: ... def open(file: unicode, flags: int, mode: int = ...) -> int: ... def openpty() -> Tuple[int, int]: ... def pathconf(path: unicode, name: str) -> str: ... def pipe() -> Tuple[int, int]: ... def popen(command: str, mode: str = ..., bufsize: int = ...) -> IO[str]: ... def putenv(varname: str, value: str) -> None: ... def read(fd: int, n: int) -> str: ... def readlink(path: _T) -> _T: ... def remove(path: unicode) -> None: ... def rename(src: unicode, dst: unicode) -> None: ... def rmdir(path: unicode) -> None: ... def setegid(egid: int) -> None: ... def seteuid(euid: int) -> None: ... def setgid(gid: int) -> None: ... def setgroups(groups: Sequence[int]) -> None: ... def setpgid(pid: int, pgrp: int) -> None: ... def setpgrp() -> None: ... def setregid(rgid: int, egid: int) -> None: ... def setresgid(rgid: int, egid: int, sgid: int) -> None: ... def setresuid(ruid: int, euid: int, suid: int) -> None: ... def setreuid(ruid: int, euid: int) -> None: ... def setsid() -> None: ... def setuid(pid: int) -> None: ... def stat(path: unicode) -> stat_result: ... def statvfs(path: unicode) -> statvfs_result: ... def stat_float_times(fd: int) -> None: ... def strerror(code: int) -> str: ... def symlink(source: unicode, link_name: unicode) -> None: ... def sysconf(name: Union[str, int]) -> int: ... def system(command: unicode) -> int: ... def tcgetpgrp(fd: int) -> int: ... def tcsetpgrp(fd: int, pg: int) -> None: ... def times() -> Tuple[float, float, float, float, float]: ... def tmpfile() -> IO[str]: ... def ttyname(fd: int) -> str: ... def umask(mask: int) -> int: ... def uname() -> Tuple[str, str, str, str, str]: ... def unlink(path: unicode) -> None: ... def unsetenv(varname: str) -> None: ... def urandom(n: int) -> str: ... def utime(path: unicode, times: Optional[Tuple[int, int]]) -> None: ... def wait() -> int: ... _r = Tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int] def wait3(options: int) -> Tuple[int, int, _r]: ... def wait4(pid: int, options: int) -> Tuple[int, int, _r]: ... def waitpid(pid: int, options: int) -> int: ... def write(fd: int, str: str) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2/random.pyi0000644€tŠÔÚ€2›s®0000000635713576752252025200 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for random # Ron Murawski # Updated by Jukka Lehtosalo # based on https://docs.python.org/2/library/random.html # ----- random classes ----- import _random from typing import AbstractSet, Any, Callable, Iterator, List, Protocol, Sequence, TypeVar, Union, overload _T = TypeVar("_T") _T_co = TypeVar('_T_co', covariant=True) class _Sampleable(Protocol[_T_co]): def __iter__(self) -> Iterator[_T_co]: ... def __len__(self) -> int: ... class Random(_random.Random): def __init__(self, x: object = ...) -> None: ... def seed(self, x: object = ...) -> None: ... def getstate(self) -> _random._State: ... def setstate(self, state: _random._State) -> None: ... def jumpahead(self, n: int) -> None: ... def getrandbits(self, k: int) -> int: ... @overload def randrange(self, stop: int) -> int: ... @overload def randrange(self, start: int, stop: int, step: int = ...) -> int: ... def randint(self, a: int, b: int) -> int: ... def choice(self, seq: Sequence[_T]) -> _T: ... def shuffle(self, x: List[Any], random: Callable[[], None] = ...) -> None: ... def sample(self, population: _Sampleable[_T], k: int) -> List[_T]: ... def random(self) -> float: ... def uniform(self, a: float, b: float) -> float: ... def triangular(self, low: float = ..., high: float = ..., mode: float = ...) -> float: ... def betavariate(self, alpha: float, beta: float) -> float: ... def expovariate(self, lambd: float) -> float: ... def gammavariate(self, alpha: float, beta: float) -> float: ... def gauss(self, mu: float, sigma: float) -> float: ... def lognormvariate(self, mu: float, sigma: float) -> float: ... def normalvariate(self, mu: float, sigma: float) -> float: ... def vonmisesvariate(self, mu: float, kappa: float) -> float: ... def paretovariate(self, alpha: float) -> float: ... def weibullvariate(self, alpha: float, beta: float) -> float: ... # SystemRandom is not implemented for all OS's; good on Windows & Linux class SystemRandom(Random): ... # ----- random function stubs ----- def seed(x: object = ...) -> None: ... def getstate() -> object: ... def setstate(state: object) -> None: ... def jumpahead(n: int) -> None: ... def getrandbits(k: int) -> int: ... @overload def randrange(stop: int) -> int: ... @overload def randrange(start: int, stop: int, step: int = ...) -> int: ... def randint(a: int, b: int) -> int: ... def choice(seq: Sequence[_T]) -> _T: ... def shuffle(x: List[Any], random: Callable[[], float] = ...) -> None: ... def sample(population: _Sampleable[_T], k: int) -> List[_T]: ... def random() -> float: ... def uniform(a: float, b: float) -> float: ... def triangular(low: float = ..., high: float = ..., mode: float = ...) -> float: ... def betavariate(alpha: float, beta: float) -> float: ... def expovariate(lambd: float) -> float: ... def gammavariate(alpha: float, beta: float) -> float: ... def gauss(mu: float, sigma: float) -> float: ... def lognormvariate(mu: float, sigma: float) -> float: ... def normalvariate(mu: float, sigma: float) -> float: ... def vonmisesvariate(mu: float, kappa: float) -> float: ... def paretovariate(alpha: float) -> float: ... def weibullvariate(alpha: float, beta: float) -> float: ... mypy-0.761/mypy/typeshed/stdlib/2/re.pyi0000644€tŠÔÚ€2›s®0000000733413576752252024322 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for re # Ron Murawski # 'bytes' support added by Jukka Lehtosalo # based on: http: //docs.python.org/2.7/library/re.html from typing import ( List, Iterator, overload, Callable, Tuple, Sequence, Dict, Generic, AnyStr, Match, Pattern, Any, Optional, Union ) # ----- re variables and constants ----- DEBUG: int I: int IGNORECASE: int L: int LOCALE: int M: int MULTILINE: int S: int DOTALL: int X: int VERBOSE: int U: int UNICODE: int T: int TEMPLATE: int class error(Exception): ... @overload def compile(pattern: AnyStr, flags: int = ...) -> Pattern[AnyStr]: ... @overload def compile(pattern: Pattern[AnyStr], flags: int = ...) -> Pattern[AnyStr]: ... @overload def search(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> Optional[Match[AnyStr]]: ... @overload def search(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, flags: int = ...) -> Optional[Match[AnyStr]]: ... @overload def match(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> Optional[Match[AnyStr]]: ... @overload def match(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, flags: int = ...) -> Optional[Match[AnyStr]]: ... @overload def split(pattern: Union[str, unicode], string: AnyStr, maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ... @overload def split(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ... @overload def findall(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> List[Any]: ... @overload def findall(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, flags: int = ...) -> List[Any]: ... # Return an iterator yielding match objects over all non-overlapping matches # for the RE pattern in string. The string is scanned left-to-right, and # matches are returned in the order found. Empty matches are included in the # result unless they touch the beginning of another match. @overload def finditer(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> Iterator[Match[AnyStr]]: ... @overload def finditer(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, flags: int = ...) -> Iterator[Match[AnyStr]]: ... @overload def sub(pattern: Union[str, unicode], repl: AnyStr, string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ... @overload def sub(pattern: Union[str, unicode], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ... @overload def sub(pattern: Union[Pattern[str], Pattern[unicode]], repl: AnyStr, string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ... @overload def sub(pattern: Union[Pattern[str], Pattern[unicode]], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ... @overload def subn(pattern: Union[str, unicode], repl: AnyStr, string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Union[str, unicode], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Union[Pattern[str], Pattern[unicode]], repl: AnyStr, string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Union[Pattern[str], Pattern[unicode]], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]: ... def escape(string: AnyStr) -> AnyStr: ... def purge() -> None: ... def template(pattern: Union[AnyStr, Pattern[AnyStr]], flags: int = ...) -> Pattern[AnyStr]: ... mypy-0.761/mypy/typeshed/stdlib/2/repr.pyi0000644€tŠÔÚ€2›s®0000000210613576752252024654 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List class Repr: maxarray: int maxdeque: int maxdict: int maxfrozenset: int maxlevel: int maxlist: int maxlong: int maxother: int maxset: int maxstring: int maxtuple: int def __init__(self) -> None: ... def _repr_iterable(self, x, level: complex, left, right, maxiter, trail=...) -> str: ... def repr(self, x) -> str: ... def repr1(self, x, level: complex) -> str: ... def repr_array(self, x, level: complex) -> str: ... def repr_deque(self, x, level: complex) -> str: ... def repr_dict(self, x, level: complex) -> str: ... def repr_frozenset(self, x, level: complex) -> str: ... def repr_instance(self, x, level: complex) -> str: ... def repr_list(self, x, level: complex) -> str: ... def repr_long(self, x, level: complex) -> str: ... def repr_set(self, x, level: complex) -> str: ... def repr_str(self, x, level: complex) -> str: ... def repr_tuple(self, x, level: complex) -> str: ... def _possibly_sorted(x) -> List[Any]: ... aRepr: Repr def repr(x) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/resource.pyi0000644€tŠÔÚ€2›s®0000000155413576752252025541 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, NamedTuple class error(Exception): ... RLIM_INFINITY: int def getrlimit(resource: int) -> Tuple[int, int]: ... def setrlimit(resource: int, limits: Tuple[int, int]) -> None: ... RLIMIT_CORE: int RLIMIT_CPU: int RLIMIT_FSIZE: int RLIMIT_DATA: int RLIMIT_STACK: int RLIMIT_RSS: int RLIMIT_NPROC: int RLIMIT_NOFILE: int RLIMIT_OFILE: int RLIMIT_MEMLOCK: int RLIMIT_VMEM: int RLIMIT_AS: int class _RUsage(NamedTuple): ru_utime: float ru_stime: float ru_maxrss: int ru_ixrss: int ru_idrss: int ru_isrss: int ru_minflt: int ru_majflt: int ru_nswap: int ru_inblock: int ru_oublock: int ru_msgsnd: int ru_msgrcv: int ru_nsignals: int ru_nvcsw: int ru_nivcsw: int def getrusage(who: int) -> _RUsage: ... def getpagesize() -> int: ... RUSAGE_SELF: int RUSAGE_CHILDREN: int RUSAGE_BOTH: int mypy-0.761/mypy/typeshed/stdlib/2/rfc822.pyi0000644€tŠÔÚ€2›s®0000000427213576752252024720 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for rfc822 (Python 2) # # Based on stub generated by stubgen. from typing import Any, Optional class Message: fp: Any seekable: Any startofheaders: Any startofbody: Any def __init__(self, fp, seekable: int = ...): ... def rewindbody(self): ... dict: Any unixfrom: Any headers: Any status: Any def readheaders(self): ... def isheader(self, line): ... def islast(self, line): ... def iscomment(self, line): ... def getallmatchingheaders(self, name): ... def getfirstmatchingheader(self, name): ... def getrawheader(self, name): ... def getheader(self, name, default: Optional[Any] = ...): ... get: Any def getheaders(self, name): ... def getaddr(self, name): ... def getaddrlist(self, name): ... def getdate(self, name): ... def getdate_tz(self, name): ... def __len__(self): ... def __getitem__(self, name): ... def __setitem__(self, name, value): ... def __delitem__(self, name): ... def setdefault(self, name, default=...): ... def has_key(self, name): ... def __contains__(self, name): ... def __iter__(self): ... def keys(self): ... def values(self): ... def items(self): ... class AddrlistClass: specials: Any pos: Any LWS: Any CR: Any atomends: Any phraseends: Any field: Any commentlist: Any def __init__(self, field): ... def gotonext(self): ... def getaddrlist(self): ... def getaddress(self): ... def getrouteaddr(self): ... def getaddrspec(self): ... def getdomain(self): ... def getdelimited(self, beginchar, endchars, allowcomments: int = ...): ... def getquote(self): ... def getcomment(self): ... def getdomainliteral(self): ... def getatom(self, atomends: Optional[Any] = ...): ... def getphraselist(self): ... class AddressList(AddrlistClass): addresslist: Any def __init__(self, field): ... def __len__(self): ... def __add__(self, other): ... def __iadd__(self, other): ... def __sub__(self, other): ... def __isub__(self, other): ... def __getitem__(self, index): ... def parsedate_tz(data): ... def parsedate(data): ... def mktime_tz(data): ... mypy-0.761/mypy/typeshed/stdlib/2/robotparser.pyi0000644€tŠÔÚ€2›s®0000000034613576752252026252 0ustar jukkaDROPBOX\Domain Users00000000000000class RobotFileParser: def set_url(self, url: str): ... def read(self): ... def parse(self, lines: str): ... def can_fetch(self, user_agent: str, url: str): ... def mtime(self): ... def modified(self): ... mypy-0.761/mypy/typeshed/stdlib/2/runpy.pyi0000644€tŠÔÚ€2›s®0000000103513576752252025061 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class _TempModule: mod_name: Any module: Any def __init__(self, mod_name): ... def __enter__(self): ... def __exit__(self, *args): ... class _ModifiedArgv0: value: Any def __init__(self, value): ... def __enter__(self): ... def __exit__(self, *args): ... def run_module(mod_name, init_globals: Optional[Any] = ..., run_name: Optional[Any] = ..., alter_sys: bool = ...): ... def run_path(path_name, init_globals: Optional[Any] = ..., run_name: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/stdlib/2/sets.pyi0000644€tŠÔÚ€2›s®0000000567313576752252024676 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sets (Python 2) from typing import Any, Callable, Hashable, Iterable, Iterator, MutableMapping, Optional, TypeVar, Union _T = TypeVar('_T') _Setlike = Union[BaseSet[_T], Iterable[_T]] _SelfT = TypeVar('_SelfT') class BaseSet(Iterable[_T]): def __init__(self) -> None: ... def __len__(self) -> int: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __iter__(self) -> Iterator[_T]: ... def __cmp__(self, other: Any) -> int: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... def copy(self: _SelfT) -> _SelfT: ... def __copy__(self: _SelfT) -> _SelfT: ... def __deepcopy__(self: _SelfT, memo: MutableMapping[int, BaseSet[_T]]) -> _SelfT: ... def __or__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def union(self: _SelfT, other: _Setlike[_T]) -> _SelfT: ... def __and__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def intersection(self: _SelfT, other: _Setlike[Any]) -> _SelfT: ... def __xor__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def symmetric_difference(self: _SelfT, other: _Setlike[_T]) -> _SelfT: ... def __sub__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def difference(self: _SelfT, other: _Setlike[Any]) -> _SelfT: ... def __contains__(self, element: Any) -> bool: ... def issubset(self, other: BaseSet[_T]) -> bool: ... def issuperset(self, other: BaseSet[_T]) -> bool: ... def __le__(self, other: BaseSet[_T]) -> bool: ... def __ge__(self, other: BaseSet[_T]) -> bool: ... def __lt__(self, other: BaseSet[_T]) -> bool: ... def __gt__(self, other: BaseSet[_T]) -> bool: ... class ImmutableSet(BaseSet[_T], Hashable): def __init__(self, iterable: Optional[_Setlike[_T]] = ...) -> None: ... def __hash__(self) -> int: ... class Set(BaseSet[_T]): def __init__(self, iterable: Optional[_Setlike[_T]] = ...) -> None: ... def __ior__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def union_update(self, other: _Setlike[_T]) -> None: ... def __iand__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def intersection_update(self, other: _Setlike[Any]) -> None: ... def __ixor__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def symmetric_difference_update(self, other: _Setlike[_T]) -> None: ... def __isub__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def difference_update(self, other: _Setlike[Any]) -> None: ... def update(self, iterable: _Setlike[_T]) -> None: ... def clear(self) -> None: ... def add(self, element: _T) -> None: ... def remove(self, element: _T) -> None: ... def discard(self, element: _T) -> None: ... def pop(self) -> _T: ... def __as_immutable__(self) -> ImmutableSet[_T]: ... def __as_temporarily_immutable__(self) -> _TemporarilyImmutableSet[_T]: ... class _TemporarilyImmutableSet(BaseSet[_T]): def __init__(self, set: BaseSet[_T]) -> None: ... def __hash__(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2/sha.pyi0000644€tŠÔÚ€2›s®0000000042513576752252024461 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for Python 2.7 sha stdlib module class sha(object): def update(self, arg: str) -> None: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def copy(self) -> sha: ... def new(string: str = ...) -> sha: ... blocksize: int digest_size: int mypy-0.761/mypy/typeshed/stdlib/2/shelve.pyi0000644€tŠÔÚ€2›s®0000000306113576752252025173 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterator, List, Optional, Tuple import collections class Shelf(collections.MutableMapping[Any, Any]): def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ... def __iter__(self) -> Iterator[str]: ... def keys(self) -> List[Any]: ... def __len__(self) -> int: ... def has_key(self, key: Any) -> bool: ... def __contains__(self, key: Any) -> bool: ... def get(self, key: Any, default: Any = ...) -> Any: ... def __getitem__(self, key: Any) -> Any: ... def __setitem__(self, key: Any, value: Any) -> None: ... def __delitem__(self, key: Any) -> None: ... def __enter__(self) -> Shelf: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... def close(self) -> None: ... def __del__(self) -> None: ... def sync(self) -> None: ... class BsdDbShelf(Shelf): def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ... def set_location(self, key: Any) -> Tuple[str, Any]: ... def next(self) -> Tuple[str, Any]: ... def previous(self) -> Tuple[str, Any]: ... def first(self) -> Tuple[str, Any]: ... def last(self) -> Tuple[str, Any]: ... class DbfilenameShelf(Shelf): def __init__(self, filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> None: ... def open(filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> DbfilenameShelf: ... mypy-0.761/mypy/typeshed/stdlib/2/shlex.pyi0000644€tŠÔÚ€2›s®0000000175013576752252025033 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, List, Optional, TypeVar def split(s: Optional[str], comments: bool = ..., posix: bool = ...) -> List[str]: ... _SLT = TypeVar('_SLT', bound=shlex) class shlex: def __init__(self, instream: IO[Any] = ..., infile: IO[Any] = ..., posix: bool = ...) -> None: ... def __iter__(self: _SLT) -> _SLT: ... def next(self) -> str: ... def get_token(self) -> Optional[str]: ... def push_token(self, _str: str) -> None: ... def read_token(self) -> str: ... def sourcehook(self, filename: str) -> None: ... def push_source(self, stream: IO[Any], filename: str = ...) -> None: ... def pop_source(self) -> IO[Any]: ... def error_leader(self, file: str = ..., line: int = ...) -> str: ... commenters: str wordchars: str whitespace: str escape: str quotes: str escapedquotes: str whitespace_split: bool infile: IO[Any] source: Optional[str] debug: int lineno: int token: Any eof: Optional[str] mypy-0.761/mypy/typeshed/stdlib/2/signal.pyi0000644€tŠÔÚ€2›s®0000000304313576752252025162 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Callable, Any, Tuple, Union from types import FrameType SIG_DFL: int = ... SIG_IGN: int = ... ITIMER_REAL: int = ... ITIMER_VIRTUAL: int = ... ITIMER_PROF: int = ... NSIG: int = ... SIGABRT: int = ... SIGALRM: int = ... SIGBREAK: int = ... # Windows SIGBUS: int = ... SIGCHLD: int = ... SIGCLD: int = ... SIGCONT: int = ... SIGEMT: int = ... SIGFPE: int = ... SIGHUP: int = ... SIGILL: int = ... SIGINFO: int = ... SIGINT: int = ... SIGIO: int = ... SIGIOT: int = ... SIGKILL: int = ... SIGPIPE: int = ... SIGPOLL: int = ... SIGPROF: int = ... SIGPWR: int = ... SIGQUIT: int = ... SIGRTMAX: int = ... SIGRTMIN: int = ... SIGSEGV: int = ... SIGSTOP: int = ... SIGSYS: int = ... SIGTERM: int = ... SIGTRAP: int = ... SIGTSTP: int = ... SIGTTIN: int = ... SIGTTOU: int = ... SIGURG: int = ... SIGUSR1: int = ... SIGUSR2: int = ... SIGVTALRM: int = ... SIGWINCH: int = ... SIGXCPU: int = ... SIGXFSZ: int = ... # Windows CTRL_C_EVENT: int = ... CTRL_BREAK_EVENT: int = ... class ItimerError(IOError): ... _HANDLER = Union[Callable[[int, FrameType], None], int, None] def alarm(time: int) -> int: ... def getsignal(signalnum: int) -> _HANDLER: ... def pause() -> None: ... def setitimer(which: int, seconds: float, interval: float = ...) -> Tuple[float, float]: ... def getitimer(which: int) -> Tuple[float, float]: ... def set_wakeup_fd(fd: int) -> int: ... def siginterrupt(signalnum: int, flag: bool) -> None: ... def signal(signalnum: int, handler: _HANDLER) -> _HANDLER: ... def default_int_handler(signum: int, frame: FrameType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/smtplib.pyi0000644€tŠÔÚ€2›s®0000000475613576752252025373 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class SMTPException(Exception): ... class SMTPServerDisconnected(SMTPException): ... class SMTPResponseException(SMTPException): smtp_code: Any smtp_error: Any args: Any def __init__(self, code, msg) -> None: ... class SMTPSenderRefused(SMTPResponseException): smtp_code: Any smtp_error: Any sender: Any args: Any def __init__(self, code, msg, sender) -> None: ... class SMTPRecipientsRefused(SMTPException): recipients: Any args: Any def __init__(self, recipients) -> None: ... class SMTPDataError(SMTPResponseException): ... class SMTPConnectError(SMTPResponseException): ... class SMTPHeloError(SMTPResponseException): ... class SMTPAuthenticationError(SMTPResponseException): ... def quoteaddr(addr): ... def quotedata(data): ... class SSLFakeFile: sslobj: Any def __init__(self, sslobj) -> None: ... def readline(self, size=...): ... def close(self): ... class SMTP: debuglevel: Any file: Any helo_resp: Any ehlo_msg: Any ehlo_resp: Any does_esmtp: Any default_port: Any timeout: Any esmtp_features: Any local_hostname: Any def __init__(self, host: str = ..., port: int = ..., local_hostname=..., timeout=...) -> None: ... def set_debuglevel(self, debuglevel): ... sock: Any def connect(self, host=..., port=...): ... def send(self, str): ... def putcmd(self, cmd, args=...): ... def getreply(self): ... def docmd(self, cmd, args=...): ... def helo(self, name=...): ... def ehlo(self, name=...): ... def has_extn(self, opt): ... def help(self, args=...): ... def rset(self): ... def noop(self): ... def mail(self, sender, options=...): ... def rcpt(self, recip, options=...): ... def data(self, msg): ... def verify(self, address): ... vrfy: Any def expn(self, address): ... def ehlo_or_helo_if_needed(self): ... def login(self, user, password): ... def starttls(self, keyfile=..., certfile=...): ... def sendmail(self, from_addr, to_addrs, msg, mail_options=..., rcpt_options=...): ... def close(self): ... def quit(self): ... class SMTP_SSL(SMTP): default_port: Any keyfile: Any certfile: Any def __init__(self, host=..., port=..., local_hostname=..., keyfile=..., certfile=..., timeout=...) -> None: ... class LMTP(SMTP): ehlo_msg: Any def __init__(self, host=..., port=..., local_hostname=...) -> None: ... sock: Any def connect(self, host=..., port=...): ... mypy-0.761/mypy/typeshed/stdlib/2/spwd.pyi0000644€tŠÔÚ€2›s®0000000046413576752252024666 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, NamedTuple class struct_spwd(NamedTuple): sp_nam: str sp_pwd: str sp_lstchg: int sp_min: int sp_max: int sp_warn: int sp_inact: int sp_expire: int sp_flag: int def getspall() -> List[struct_spwd]: ... def getspnam(name: str) -> struct_spwd: ... mypy-0.761/mypy/typeshed/stdlib/2/sre_constants.pyi0000644€tŠÔÚ€2›s®0000000342613576752252026577 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/sre_constants.py from typing import Dict, List, TypeVar MAGIC: int MAXREPEAT: int class error(Exception): ... FAILURE: str SUCCESS: str ANY: str ANY_ALL: str ASSERT: str ASSERT_NOT: str AT: str BIGCHARSET: str BRANCH: str CALL: str CATEGORY: str CHARSET: str GROUPREF: str GROUPREF_IGNORE: str GROUPREF_EXISTS: str IN: str IN_IGNORE: str INFO: str JUMP: str LITERAL: str LITERAL_IGNORE: str MARK: str MAX_REPEAT: str MAX_UNTIL: str MIN_REPEAT: str MIN_UNTIL: str NEGATE: str NOT_LITERAL: str NOT_LITERAL_IGNORE: str RANGE: str REPEAT: str REPEAT_ONE: str SUBPATTERN: str MIN_REPEAT_ONE: str AT_BEGINNING: str AT_BEGINNING_LINE: str AT_BEGINNING_STRING: str AT_BOUNDARY: str AT_NON_BOUNDARY: str AT_END: str AT_END_LINE: str AT_END_STRING: str AT_LOC_BOUNDARY: str AT_LOC_NON_BOUNDARY: str AT_UNI_BOUNDARY: str AT_UNI_NON_BOUNDARY: str CATEGORY_DIGIT: str CATEGORY_NOT_DIGIT: str CATEGORY_SPACE: str CATEGORY_NOT_SPACE: str CATEGORY_WORD: str CATEGORY_NOT_WORD: str CATEGORY_LINEBREAK: str CATEGORY_NOT_LINEBREAK: str CATEGORY_LOC_WORD: str CATEGORY_LOC_NOT_WORD: str CATEGORY_UNI_DIGIT: str CATEGORY_UNI_NOT_DIGIT: str CATEGORY_UNI_SPACE: str CATEGORY_UNI_NOT_SPACE: str CATEGORY_UNI_WORD: str CATEGORY_UNI_NOT_WORD: str CATEGORY_UNI_LINEBREAK: str CATEGORY_UNI_NOT_LINEBREAK: str _T = TypeVar('_T') def makedict(list: List[_T]) -> Dict[_T, int]: ... OP_IGNORE: Dict[str, str] AT_MULTILINE: Dict[str, str] AT_LOCALE: Dict[str, str] AT_UNICODE: Dict[str, str] CH_LOCALE: Dict[str, str] CH_UNICODE: Dict[str, str] SRE_FLAG_TEMPLATE: int SRE_FLAG_IGNORECASE: int SRE_FLAG_LOCALE: int SRE_FLAG_MULTILINE: int SRE_FLAG_DOTALL: int SRE_FLAG_UNICODE: int SRE_FLAG_VERBOSE: int SRE_FLAG_DEBUG: int SRE_INFO_PREFIX: int SRE_INFO_LITERAL: int SRE_INFO_CHARSET: int mypy-0.761/mypy/typeshed/stdlib/2/sre_parse.pyi0000644€tŠÔÚ€2›s®0000000451113576752252025671 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/sre_parse.py from typing import Any, Dict, Iterable, List, Match, Optional, Pattern as _Pattern, Set, Tuple, Union SPECIAL_CHARS: str REPEAT_CHARS: str DIGITS: Set[Any] OCTDIGITS: Set[Any] HEXDIGITS: Set[Any] WHITESPACE: Set[Any] ESCAPES: Dict[str, Tuple[str, int]] CATEGORIES: Dict[str, Union[Tuple[str, str], Tuple[str, List[Tuple[str, str]]]]] FLAGS: Dict[str, int] class Pattern: flags: int open: List[int] groups: int groupdict: Dict[str, int] lookbehind: int def __init__(self) -> None: ... def opengroup(self, name: str = ...) -> int: ... def closegroup(self, gid: int) -> None: ... def checkgroup(self, gid: int) -> bool: ... _OpSubpatternType = Tuple[Optional[int], int, int, SubPattern] _OpGroupRefExistsType = Tuple[int, SubPattern, SubPattern] _OpInType = List[Tuple[str, int]] _OpBranchType = Tuple[None, List[SubPattern]] _AvType = Union[_OpInType, _OpBranchType, Iterable[SubPattern], _OpGroupRefExistsType, _OpSubpatternType] _CodeType = Union[str, _AvType] class SubPattern: pattern: str data: List[_CodeType] width: Optional[int] def __init__(self, pattern, data: List[_CodeType] = ...) -> None: ... def dump(self, level: int = ...) -> None: ... def __len__(self) -> int: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def __getitem__(self, index: Union[int, slice]) -> Union[SubPattern, _CodeType]: ... def __setitem__(self, index: Union[int, slice], code: _CodeType): ... def insert(self, index, code: _CodeType) -> None: ... def append(self, code: _CodeType) -> None: ... def getwidth(self) -> int: ... class Tokenizer: string: str index: int def __init__(self, string: str) -> None: ... def match(self, char: str, skip: int = ...) -> int: ... def get(self) -> Optional[str]: ... def tell(self) -> Tuple[int, Optional[str]]: ... def seek(self, index: int) -> None: ... def isident(char: str) -> bool: ... def isdigit(char: str) -> bool: ... def isname(name: str) -> bool: ... def parse(str: str, flags: int = ..., pattern: Pattern = ...) -> SubPattern: ... _Template = Tuple[List[Tuple[int, int]], List[Optional[int]]] def parse_template(source: str, pattern: _Pattern[Any]) -> _Template: ... def expand_template(template: _Template, match: Match[Any]) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/stat.pyi0000644€tŠÔÚ€2›s®0000000174113576752252024663 0ustar jukkaDROPBOX\Domain Users00000000000000def S_ISDIR(mode: int) -> bool: ... def S_ISCHR(mode: int) -> bool: ... def S_ISBLK(mode: int) -> bool: ... def S_ISREG(mode: int) -> bool: ... def S_ISFIFO(mode: int) -> bool: ... def S_ISLNK(mode: int) -> bool: ... def S_ISSOCK(mode: int) -> bool: ... def S_IMODE(mode: int) -> int: ... def S_IFMT(mode: int) -> int: ... ST_MODE: int ST_INO: int ST_DEV: int ST_NLINK: int ST_UID: int ST_GID: int ST_SIZE: int ST_ATIME: int ST_MTIME: int ST_CTIME: int S_IFSOCK: int S_IFLNK: int S_IFREG: int S_IFBLK: int S_IFDIR: int S_IFCHR: int S_IFIFO: int S_ISUID: int S_ISGID: int S_ISVTX: int S_IRWXU: int S_IRUSR: int S_IWUSR: int S_IXUSR: int S_IRWXG: int S_IRGRP: int S_IWGRP: int S_IXGRP: int S_IRWXO: int S_IROTH: int S_IWOTH: int S_IXOTH: int S_ENFMT: int S_IREAD: int S_IWRITE: int S_IEXEC: int UF_NODUMP: int UF_IMMUTABLE: int UF_APPEND: int UF_OPAQUE: int UF_NOUNLINK: int UF_COMPRESSED: int UF_HIDDEN: int SF_ARCHIVED: int SF_IMMUTABLE: int SF_APPEND: int SF_NOUNLINK: int SF_SNAPSHOT: int mypy-0.761/mypy/typeshed/stdlib/2/string.pyi0000644€tŠÔÚ€2›s®0000000717213576752252025222 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for string # Based on http://docs.python.org/3.2/library/string.html from typing import Any, AnyStr, Iterable, List, Mapping, Optional, overload, Sequence, Text, Tuple, Union ascii_letters: str ascii_lowercase: str ascii_uppercase: str digits: str hexdigits: str letters: str lowercase: str octdigits: str punctuation: str printable: str uppercase: str whitespace: str def capwords(s: AnyStr, sep: AnyStr = ...) -> AnyStr: ... # TODO: originally named 'from' def maketrans(_from: str, to: str) -> str: ... def atof(s: unicode) -> float: ... def atoi(s: unicode, base: int = ...) -> int: ... def atol(s: unicode, base: int = ...) -> int: ... def capitalize(word: AnyStr) -> AnyStr: ... def find(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rfind(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def index(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rindex(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def count(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def lower(s: AnyStr) -> AnyStr: ... def split(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def rsplit(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def splitfields(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def join(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ... def joinfields(word: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ... def lstrip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ... def rstrip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ... def strip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ... def swapcase(s: AnyStr) -> AnyStr: ... def translate(s: str, table: str, deletechars: str = ...) -> str: ... def upper(s: AnyStr) -> AnyStr: ... def ljust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def rjust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def center(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def zfill(s: AnyStr, width: int) -> AnyStr: ... def replace(s: AnyStr, old: AnyStr, new: AnyStr, maxreplace: int = ...) -> AnyStr: ... class Template: template: Text def __init__(self, template: Text) -> None: ... @overload def substitute(self, mapping: Union[Mapping[str, str], Mapping[unicode, str]] = ..., **kwds: str) -> str: ... @overload def substitute(self, mapping: Union[Mapping[str, Text], Mapping[unicode, Text]] = ..., **kwds: Text) -> Text: ... @overload def safe_substitute(self, mapping: Union[Mapping[str, str], Mapping[unicode, str]] = ..., **kwds: str) -> str: ... @overload def safe_substitute(self, mapping: Union[Mapping[str, Text], Mapping[unicode, Text]], **kwds: Text) -> Text: ... # TODO(MichalPokorny): This is probably badly and/or loosely typed. class Formatter(object): def format(self, format_string: str, *args, **kwargs) -> str: ... def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... def parse(self, format_string: str) -> Iterable[Tuple[str, str, str, str]]: ... def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def get_value(self, key: Union[int, str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def check_unused_args(self, used_args: Sequence[Union[int, str]], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... def format_field(self, value: Any, format_spec: str) -> Any: ... def convert_field(self, value: Any, conversion: str) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/2/stringold.pyi0000644€tŠÔÚ€2›s®0000000403513576752252025714 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/stringold.py from typing import AnyStr, Iterable, List, Optional, Type whitespace: str lowercase: str uppercase: str letters: str digits: str hexdigits: str octdigits: str _idmap: str _idmapL: Optional[List[str]] index_error = ValueError atoi_error = ValueError atof_error = ValueError atol_error = ValueError def lower(s: AnyStr) -> AnyStr: ... def upper(s: AnyStr) -> AnyStr: ... def swapcase(s: AnyStr) -> AnyStr: ... def strip(s: AnyStr) -> AnyStr: ... def lstrip(s: AnyStr) -> AnyStr: ... def rstrip(s: AnyStr) -> AnyStr: ... def split(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def splitfields(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def join(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ... def joinfields(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ... def index(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rindex(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def count(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def find(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rfind(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def atof(s: unicode) -> float: ... def atoi(s: unicode, base: int = ...) -> int: ... def atol(s: unicode, base: int = ...) -> long: ... def ljust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def rjust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def center(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def zfill(s: AnyStr, width: int) -> AnyStr: ... def expandtabs(s: AnyStr, tabsize: int = ...) -> AnyStr: ... def translate(s: str, table: str, deletions: str = ...) -> str: ... def capitalize(s: AnyStr) -> AnyStr: ... def capwords(s: AnyStr, sep: AnyStr = ...) -> AnyStr: ... def maketrans(fromstr: str, tostr: str) -> str: ... def replace(s: AnyStr, old: AnyStr, new: AnyStr, maxreplace: int = ...) -> AnyStr: ... mypy-0.761/mypy/typeshed/stdlib/2/strop.pyi0000644€tŠÔÚ€2›s®0000000225613576752252025061 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'strop' module.""" from typing import List, Sequence lowercase: str uppercase: str whitespace: str def atof(a: str) -> float: ... def atoi(a: str, base: int = ...) -> int: ... def atol(a: str, base: int = ...) -> long: ... def capitalize(s: str) -> str: ... def count(s: str, sub: str, start: int = ..., end: int = ...) -> int: ... def expandtabs(string: str, tabsize: int = ...) -> str: ... def find(s: str, sub: str, start: int = ..., end: int = ...) -> int: ... def join(list: Sequence[str], sep: str = ...) -> str: ... def joinfields(list: Sequence[str], sep: str = ...) -> str: ... def lower(s: str) -> str: ... def lstrip(s: str) -> str: ... def maketrans(frm: str, to: str) -> str: ... def replace(s: str, old: str, new: str, maxsplit: int = ...) -> str: ... def rfind(s: str, sub: str, start: int = ..., end: int = ...) -> int: ... def rstrip(s: str) -> str: ... def split(s: str, sep: str, maxsplit: int = ...) -> List[str]: ... def splitfields(s: str, sep: str, maxsplit: int = ...) -> List[str]: ... def strip(s: str) -> str: ... def swapcase(s: str) -> str: ... def translate(s: str, table: str, deletechars: str = ...) -> str: ... def upper(s: str) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/subprocess.pyi0000644€tŠÔÚ€2›s®0000000735313576752252026105 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for subprocess # Based on http://docs.python.org/2/library/subprocess.html and Python 3 stub from typing import ( Sequence, Any, Mapping, Callable, Tuple, IO, Union, Optional, List, Text, TypeVar, Generic, ) _FILE = Union[None, int, IO[Any]] _TXT = Union[bytes, Text] _CMD = Union[_TXT, Sequence[_TXT]] _ENV = Union[Mapping[bytes, _TXT], Mapping[Text, _TXT]] # Same args as Popen.__init__ def call(args: _CMD, bufsize: int = ..., executable: _TXT = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: _TXT = ..., env: _ENV = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ...) -> int: ... def check_call(args: _CMD, bufsize: int = ..., executable: _TXT = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: _TXT = ..., env: _ENV = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ...) -> int: ... # Same args as Popen.__init__ except for stdout def check_output(args: _CMD, bufsize: int = ..., executable: _TXT = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: _TXT = ..., env: _ENV = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ...) -> bytes: ... PIPE: int STDOUT: int class CalledProcessError(Exception): returncode: int # morally: _CMD cmd: Any # morally: Optional[bytes] output: bytes def __init__(self, returncode: int, cmd: _CMD, output: Optional[bytes] = ...) -> None: ... # We use a dummy type variable used to make Popen generic like it is in python 3 _T = TypeVar('_T', bound=bytes) class Popen(Generic[_T]): stdin: Optional[IO[bytes]] stdout: Optional[IO[bytes]] stderr: Optional[IO[bytes]] pid: int returncode: int def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_TXT] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_TXT] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ...) -> Popen[bytes]: ... def poll(self) -> int: ... def wait(self) -> int: ... # morally: -> Tuple[Optional[bytes], Optional[bytes]] def communicate(self, input: Optional[_TXT] = ...) -> Tuple[bytes, bytes]: ... def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def list2cmdline(seq: Sequence[str]) -> str: ... # undocumented # Windows-only: STARTUPINFO etc. STD_INPUT_HANDLE: Any STD_OUTPUT_HANDLE: Any STD_ERROR_HANDLE: Any SW_HIDE: Any STARTF_USESTDHANDLES: Any STARTF_USESHOWWINDOW: Any CREATE_NEW_CONSOLE: Any CREATE_NEW_PROCESS_GROUP: Any mypy-0.761/mypy/typeshed/stdlib/2/symbol.pyi0000644€tŠÔÚ€2›s®0000000253413576752252025216 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for symbol (Python 2) from typing import Dict single_input: int file_input: int eval_input: int decorator: int decorators: int decorated: int funcdef: int parameters: int varargslist: int fpdef: int fplist: int stmt: int simple_stmt: int small_stmt: int expr_stmt: int augassign: int print_stmt: int del_stmt: int pass_stmt: int flow_stmt: int break_stmt: int continue_stmt: int return_stmt: int yield_stmt: int raise_stmt: int import_stmt: int import_name: int import_from: int import_as_name: int dotted_as_name: int import_as_names: int dotted_as_names: int dotted_name: int global_stmt: int exec_stmt: int assert_stmt: int compound_stmt: int if_stmt: int while_stmt: int for_stmt: int try_stmt: int with_stmt: int with_item: int except_clause: int suite: int testlist_safe: int old_test: int old_lambdef: int test: int or_test: int and_test: int not_test: int comparison: int comp_op: int expr: int xor_expr: int and_expr: int shift_expr: int arith_expr: int term: int factor: int power: int atom: int listmaker: int testlist_comp: int lambdef: int trailer: int subscriptlist: int subscript: int sliceop: int exprlist: int testlist: int dictorsetmaker: int classdef: int arglist: int argument: int list_iter: int list_for: int list_if: int comp_iter: int comp_for: int comp_if: int testlist1: int encoding_decl: int yield_expr: int sym_name: Dict[int, str] mypy-0.761/mypy/typeshed/stdlib/2/sys.pyi0000644€tŠÔÚ€2›s®0000000712213576752252024525 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stubs for the 'sys' module.""" from typing import ( IO, NoReturn, Union, List, Sequence, Any, Dict, Tuple, BinaryIO, Optional, Callable, overload, Text, Type, ) from types import FrameType, ModuleType, TracebackType, ClassType # The following type alias are stub-only and do not exist during runtime _ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] _OptExcInfo = Union[_ExcInfo, Tuple[None, None, None]] class _flags: bytes_warning: int debug: int division_new: int division_warning: int dont_write_bytecode: int hash_randomization: int ignore_environment: int inspect: int interactive: int no_site: int no_user_site: int optimize: int py3k_warning: int tabcheck: int unicode: int verbose: int class _float_info: max: float max_exp: int max_10_exp: int min: float min_exp: int min_10_exp: int dig: int mant_dig: int epsilon: float radix: int rounds: int class _version_info(Tuple[int, int, int, str, int]): major: int minor: int micro: int releaselevel: str serial: int _mercurial: Tuple[str, str, str] api_version: int argv: List[str] builtin_module_names: Tuple[str, ...] byteorder: str copyright: str dont_write_bytecode: bool exec_prefix: str executable: str flags: _flags float_repr_style: str hexversion: int long_info: object maxint: int maxsize: int maxunicode: int modules: Dict[str, Any] path: List[str] platform: str prefix: str py3kwarning: bool __stderr__: IO[str] __stdin__: IO[str] __stdout__: IO[str] stderr: IO[str] stdin: IO[str] stdout: IO[str] subversion: Tuple[str, str, str] version: str warnoptions: object float_info: _float_info version_info: _version_info ps1: str ps2: str last_type: type last_value: BaseException last_traceback: TracebackType # TODO precise types meta_path: List[Any] path_hooks: List[Any] path_importer_cache: Dict[str, Any] displayhook: Optional[Callable[[int], None]] excepthook: Optional[Callable[[type, BaseException, TracebackType], None]] exc_type: Optional[type] exc_value: Union[BaseException, ClassType] exc_traceback: TracebackType class _WindowsVersionType: major: Any minor: Any build: Any platform: Any service_pack: Any service_pack_major: Any service_pack_minor: Any suite_mask: Any product_type: Any def getwindowsversion() -> _WindowsVersionType: ... def _clear_type_cache() -> None: ... def _current_frames() -> Dict[int, FrameType]: ... def _getframe(depth: int = ...) -> FrameType: ... def call_tracing(fn: Any, args: Any) -> Any: ... def __displayhook__(value: int) -> None: ... def __excepthook__(type_: type, value: BaseException, traceback: TracebackType) -> None: ... def exc_clear() -> None: ... def exc_info() -> _OptExcInfo: ... # sys.exit() accepts an optional argument of anything printable def exit(arg: Any = ...) -> NoReturn: ... def getcheckinterval() -> int: ... # deprecated def getdefaultencoding() -> str: ... def getdlopenflags() -> int: ... def getfilesystemencoding() -> str: ... # In practice, never returns None def getrefcount(arg: Any) -> int: ... def getrecursionlimit() -> int: ... def getsizeof(obj: object, default: int = ...) -> int: ... def getprofile() -> Optional[Any]: ... def gettrace() -> Optional[Any]: ... def setcheckinterval(interval: int) -> None: ... # deprecated def setdlopenflags(n: int) -> None: ... def setdefaultencoding(encoding: Text) -> None: ... # only exists after reload(sys) def setprofile(profilefunc: Any) -> None: ... # TODO type def setrecursionlimit(limit: int) -> None: ... def settrace(tracefunc: Any) -> None: ... # TODO type mypy-0.761/mypy/typeshed/stdlib/2/tempfile.pyi0000644€tŠÔÚ€2›s®0000000725613576752252025524 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, AnyStr, IO, Iterable, Iterator, List, Optional, overload, Text, Tuple, Union from thread import LockType from random import Random TMP_MAX: int tempdir: str template: str _name_sequence: Optional[_RandomNameSequence] class _RandomNameSequence: characters: str = ... mutex: LockType @property def rng(self) -> Random: ... def __iter__(self) -> _RandomNameSequence: ... def next(self) -> str: ... # from os.path: def normcase(self, path: AnyStr) -> AnyStr: ... class _TemporaryFileWrapper(IO[str]): delete: bool file: IO[str] name: Any def __init__(self, file: IO[str], name: Any, delete: bool = ...) -> None: ... def __del__(self) -> None: ... def __enter__(self) -> _TemporaryFileWrapper: ... def __exit__(self, exc, value, tb) -> Optional[bool]: ... def __getattr__(self, name: unicode) -> Any: ... def close(self) -> None: ... def unlink(self, path: unicode) -> None: ... # These methods don't exist directly on this object, but # are delegated to the underlying IO object through __getattr__. # We need to add them here so that this class is concrete. def __iter__(self) -> Iterator[str]: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def next(self) -> str: ... def read(self, n: int = ...) -> str: ... def readable(self) -> bool: ... def readline(self, limit: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def write(self, s: Text) -> int: ... def writelines(self, lines: Iterable[str]) -> None: ... # TODO text files def TemporaryFile( mode: Union[bytes, unicode] = ..., bufsize: int = ..., suffix: Union[bytes, unicode] = ..., prefix: Union[bytes, unicode] = ..., dir: Union[bytes, unicode] = ... ) -> _TemporaryFileWrapper: ... def NamedTemporaryFile( mode: Union[bytes, unicode] = ..., bufsize: int = ..., suffix: Union[bytes, unicode] = ..., prefix: Union[bytes, unicode] = ..., dir: Union[bytes, unicode] = ..., delete: bool = ... ) -> _TemporaryFileWrapper: ... def SpooledTemporaryFile( max_size: int = ..., mode: Union[bytes, unicode] = ..., buffering: int = ..., suffix: Union[bytes, unicode] = ..., prefix: Union[bytes, unicode] = ..., dir: Union[bytes, unicode] = ... ) -> _TemporaryFileWrapper: ... class TemporaryDirectory: name: Any def __init__(self, suffix: Union[bytes, unicode] = ..., prefix: Union[bytes, unicode] = ..., dir: Union[bytes, unicode] = ...) -> None: ... def cleanup(self) -> None: ... def __enter__(self) -> Any: ... # Can be str or unicode def __exit__(self, type, value, traceback) -> None: ... @overload def mkstemp() -> Tuple[int, str]: ... @overload def mkstemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ..., text: bool = ...) -> Tuple[int, AnyStr]: ... @overload def mkdtemp() -> str: ... @overload def mkdtemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ...) -> AnyStr: ... @overload def mktemp() -> str: ... @overload def mktemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ...) -> AnyStr: ... def gettempdir() -> str: ... def gettempprefix() -> str: ... def _candidate_tempdir_list() -> List[str]: ... def _get_candidate_names() -> Optional[_RandomNameSequence]: ... def _get_default_tempdir() -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/textwrap.pyi0000644€tŠÔÚ€2›s®0000000367013576752252025571 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr, List, Dict, Pattern class TextWrapper(object): width: int = ... initial_indent: str = ... subsequent_indent: str = ... expand_tabs: bool = ... replace_whitespace: bool = ... fix_sentence_endings: bool = ... drop_whitespace: bool = ... break_long_words: bool = ... break_on_hyphens: bool = ... # Attributes not present in documentation sentence_end_re: Pattern[str] = ... wordsep_re: Pattern[str] = ... wordsep_simple_re: Pattern[str] = ... whitespace_trans: str = ... unicode_whitespace_trans: Dict[int, int] = ... uspace: int = ... x: int = ... def __init__( self, width: int = ..., initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., drop_whitespace: bool = ..., break_on_hyphens: bool = ...) -> None: ... def wrap(self, text: AnyStr) -> List[AnyStr]: ... def fill(self, text: AnyStr) -> AnyStr: ... def wrap(text: AnyStr, width: int = ..., initial_indent: AnyStr = ..., subsequent_indent: AnyStr = ..., expand_tabs: bool = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., drop_whitespace: bool = ..., break_on_hyphens: bool = ...) -> List[AnyStr]: ... def fill(text: AnyStr, width: int = ..., initial_indent: AnyStr = ..., subsequent_indent: AnyStr = ..., expand_tabs: bool = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., drop_whitespace: bool = ..., break_on_hyphens: bool = ...) -> AnyStr: ... def dedent(text: AnyStr) -> AnyStr: ... mypy-0.761/mypy/typeshed/stdlib/2/thread.pyi0000644€tŠÔÚ€2›s®0000000167513576752252025165 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stubs for the "thread" module.""" from typing import Callable, Any def _count() -> int: ... class error(Exception): ... class LockType: def acquire(self, waitflag: int = ...) -> bool: ... def acquire_lock(self, waitflag: int = ...) -> bool: ... def release(self) -> None: ... def release_lock(self) -> None: ... def locked(self) -> bool: ... def locked_lock(self) -> bool: ... def __enter__(self) -> LockType: ... def __exit__(self, typ: Any, value: Any, traceback: Any) -> None: ... class _local(object): ... class _localdummy(object): ... def start_new(function: Callable[..., Any], args: Any, kwargs: Any = ...) -> int: ... def start_new_thread(function: Callable[..., Any], args: Any, kwargs: Any = ...) -> int: ... def interrupt_main() -> None: ... def exit() -> None: ... def exit_thread() -> Any: ... def allocate_lock() -> LockType: ... def get_ident() -> int: ... def stack_size(size: int = ...) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2/toaiff.pyi0000644€tŠÔÚ€2›s®0000000052113576752252025153 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for toaiff (Python 2) # Source: https://hg.python.org/cpython/file/2.7/Lib/toaiff.py from pipes import Template from typing import Dict, List table: Dict[str, Template] t: Template uncompress: Template class error(Exception): ... def toaiff(filename: str) -> str: ... def _toaiff(filename: str, temps: List[str]) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/tokenize.pyi0000644€tŠÔÚ€2›s®0000000532213576752252025537 0ustar jukkaDROPBOX\Domain Users00000000000000# Automatically generated by pytype, manually fixed up. May still contain errors. from typing import Any, Callable, Dict, Generator, Iterator, List, Tuple, Union, Iterable __author__: str __credits__: str AMPER: int AMPEREQUAL: int AT: int BACKQUOTE: int Binnumber: str Bracket: str CIRCUMFLEX: int CIRCUMFLEXEQUAL: int COLON: int COMMA: int COMMENT: int Comment: str ContStr: str DEDENT: int DOT: int DOUBLESLASH: int DOUBLESLASHEQUAL: int DOUBLESTAR: int DOUBLESTAREQUAL: int Decnumber: str Double: str Double3: str ENDMARKER: int EQEQUAL: int EQUAL: int ERRORTOKEN: int Expfloat: str Exponent: str Floatnumber: str Funny: str GREATER: int GREATEREQUAL: int Hexnumber: str INDENT: int def ISEOF(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... def ISTERMINAL(x: int) -> bool: ... Ignore: str Imagnumber: str Intnumber: str LBRACE: int LEFTSHIFT: int LEFTSHIFTEQUAL: int LESS: int LESSEQUAL: int LPAR: int LSQB: int MINEQUAL: int MINUS: int NAME: int NEWLINE: int NL: int NOTEQUAL: int NT_OFFSET: int NUMBER: int N_TOKENS: int Name: str Number: str OP: int Octnumber: str Operator: str PERCENT: int PERCENTEQUAL: int PLUS: int PLUSEQUAL: int PlainToken: str Pointfloat: str PseudoExtras: str PseudoToken: str RBRACE: int RIGHTSHIFT: int RIGHTSHIFTEQUAL: int RPAR: int RSQB: int SEMI: int SLASH: int SLASHEQUAL: int STAR: int STAREQUAL: int STRING: int Single: str Single3: str Special: str String: str TILDE: int Token: str Triple: str VBAR: int VBAREQUAL: int Whitespace: str chain: type double3prog: type endprogs: Dict[str, Any] pseudoprog: type single3prog: type single_quoted: Dict[str, str] t: str tabsize: int tok_name: Dict[int, str] tokenprog: type triple_quoted: Dict[str, str] x: str _Pos = Tuple[int, int] _TokenType = Tuple[int, str, _Pos, _Pos, str] def any(*args, **kwargs) -> str: ... def generate_tokens(readline: Callable[[], str]) -> Generator[_TokenType, None, None]: ... def group(*args: str) -> str: ... def maybe(*args: str) -> str: ... def printtoken(type: int, token: str, srow_scol: _Pos, erow_ecol: _Pos, line: str) -> None: ... def tokenize(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ... def tokenize_loop(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ... def untokenize(iterable: Iterable[_TokenType]) -> str: ... class StopTokenizing(Exception): ... class TokenError(Exception): ... class Untokenizer: prev_col: int prev_row: int tokens: List[str] def __init__(self) -> None: ... def add_whitespace(self, _Pos) -> None: ... def compat(self, token: Tuple[int, Any], iterable: Iterator[_TokenType]) -> None: ... def untokenize(self, iterable: Iterable[_TokenType]) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2/types.pyi0000644€tŠÔÚ€2›s®0000001230413576752252025051 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for types # Note, all classes "defined" here require special handling. from typing import ( Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Type, TypeVar, Union, overload, ) _T = TypeVar('_T') class NoneType: ... TypeType = type ObjectType = object IntType = int LongType = int # Really long, but can't reference that due to a mypy import cycle FloatType = float BooleanType = bool ComplexType = complex StringType = str UnicodeType = unicode StringTypes: Tuple[Type[StringType], Type[UnicodeType]] BufferType = buffer TupleType = tuple ListType = list DictType = dict DictionaryType = dict class _Cell: cell_contents: Any class FunctionType: func_closure: Optional[Tuple[_Cell, ...]] = ... func_code: CodeType = ... func_defaults: Optional[Tuple[Any, ...]] = ... func_dict: Dict[str, Any] = ... func_doc: Optional[str] = ... func_globals: Dict[str, Any] = ... func_name: str = ... __closure__ = func_closure __code__ = func_code __defaults__ = func_defaults __dict__ = func_dict __globals__ = func_globals __name__ = func_name def __init__(self, code: CodeType, globals: Dict[str, Any], name: Optional[str] = ..., argdefs: Optional[Tuple[object, ...]] = ..., closure: Optional[Tuple[_Cell, ...]] = ...) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __get__(self, obj: Optional[object], type: Optional[type]) -> UnboundMethodType: ... LambdaType = FunctionType class CodeType: co_argcount: int co_cellvars: Tuple[str, ...] co_code: str co_consts: Tuple[Any, ...] co_filename: str co_firstlineno: int co_flags: int co_freevars: Tuple[str, ...] co_lnotab: str co_name: str co_names: Tuple[str, ...] co_nlocals: int co_stacksize: int co_varnames: Tuple[str, ...] def __init__( self, argcount: int, nlocals: int, stacksize: int, flags: int, codestring: str, constants: Tuple[Any, ...], names: Tuple[str, ...], varnames: Tuple[str, ...], filename: str, name: str, firstlineno: int, lnotab: str, freevars: Tuple[str, ...] = ..., cellvars: Tuple[str, ...] = ..., ) -> None: ... class GeneratorType: gi_code: CodeType gi_frame: FrameType gi_running: int def __iter__(self) -> GeneratorType: ... def close(self) -> None: ... def next(self) -> Any: ... def send(self, arg: Any) -> Any: ... @overload def throw(self, val: BaseException) -> Any: ... @overload def throw(self, typ: type, val: BaseException = ..., tb: TracebackType = ...) -> Any: ... class ClassType: ... class UnboundMethodType: im_class: type = ... im_func: FunctionType = ... im_self: object = ... __name__: str __func__ = im_func __self__ = im_self def __init__(self, func: Callable[..., Any], obj: object) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... class InstanceType(object): ... MethodType = UnboundMethodType class BuiltinFunctionType: __self__: Optional[object] def __call__(self, *args: Any, **kwargs: Any) -> Any: ... BuiltinMethodType = BuiltinFunctionType class ModuleType: __doc__: Optional[str] __file__: Optional[str] __name__: str __package__: Optional[str] __path__: Optional[Iterable[str]] __dict__: Dict[str, Any] def __init__(self, name: str, doc: Optional[str] = ...) -> None: ... FileType = file XRangeType = xrange class TracebackType: tb_frame: FrameType tb_lasti: int tb_lineno: int tb_next: TracebackType class FrameType: f_back: FrameType f_builtins: Dict[str, Any] f_code: CodeType f_exc_type: None f_exc_value: None f_exc_traceback: None f_globals: Dict[str, Any] f_lasti: int f_lineno: int f_locals: Dict[str, Any] f_restricted: bool f_trace: Callable[[], None] def clear(self) -> None: ... SliceType = slice class EllipsisType: ... class DictProxyType: # TODO is it possible to have non-string keys? # no __init__ def copy(self) -> Dict[Any, Any]: ... def get(self, key: str, default: _T = ...) -> Union[Any, _T]: ... def has_key(self, key: str) -> bool: ... def items(self) -> List[Tuple[str, Any]]: ... def iteritems(self) -> Iterator[Tuple[str, Any]]: ... def iterkeys(self) -> Iterator[str]: ... def itervalues(self) -> Iterator[Any]: ... def keys(self) -> List[str]: ... def values(self) -> List[Any]: ... def __contains__(self, key: str) -> bool: ... def __getitem__(self, key: str) -> Any: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... class NotImplementedType: ... class GetSetDescriptorType: __name__: str __objclass__: type def __get__(self, obj: Any, type: type = ...) -> Any: ... def __set__(self, obj: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... # Same type on Jython, different on CPython and PyPy, unknown on IronPython. class MemberDescriptorType: __name__: str __objclass__: type def __get__(self, obj: Any, type: type = ...) -> Any: ... def __set__(self, obj: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2/typing.pyi0000644€tŠÔÚ€2›s®0000004161613576752252025227 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for typing (Python 2.7) from abc import abstractmethod, ABCMeta from types import CodeType, FrameType, TracebackType import collections # Needed by aliases like DefaultDict, see mypy issue 2986 # Definitions of special type checking related constructs. Their definitions # are not used, so their value does not matter. overload = object() Any = object() TypeVar = object() _promote = object() class _SpecialForm(object): def __getitem__(self, typeargs: Any) -> object: ... Tuple: _SpecialForm = ... Generic: _SpecialForm = ... Protocol: _SpecialForm = ... Callable: _SpecialForm = ... Type: _SpecialForm = ... ClassVar: _SpecialForm = ... Final: _SpecialForm = ... _F = TypeVar('_F', bound=Callable[..., Any]) def final(f: _F) -> _F: ... Literal: _SpecialForm = ... # TypedDict is a (non-subscriptable) special form. TypedDict: object = ... class GenericMeta(type): ... # Return type that indicates a function does not return. # This type is equivalent to the None type, but the no-op Union is necessary to # distinguish the None type from the None value. NoReturn = Union[None] # These type variables are used by the container types. _T = TypeVar('_T') _S = TypeVar('_S') _KT = TypeVar('_KT') # Key type. _VT = TypeVar('_VT') # Value type. _T_co = TypeVar('_T_co', covariant=True) # Any type covariant containers. _V_co = TypeVar('_V_co', covariant=True) # Any type covariant containers. _KT_co = TypeVar('_KT_co', covariant=True) # Key type covariant containers. _VT_co = TypeVar('_VT_co', covariant=True) # Value type covariant containers. _T_contra = TypeVar('_T_contra', contravariant=True) # Ditto contravariant. _TC = TypeVar('_TC', bound=Type[object]) _C = TypeVar("_C", bound=Callable[..., Any]) no_type_check = object() def no_type_check_decorator(decorator: _C) -> _C: ... # Type aliases and type constructors class TypeAlias: # Class for defining generic aliases for library types. def __init__(self, target_type: type) -> None: ... def __getitem__(self, typeargs: Any) -> Any: ... Union = TypeAlias(object) Optional = TypeAlias(object) List = TypeAlias(object) Dict = TypeAlias(object) DefaultDict = TypeAlias(object) Set = TypeAlias(object) FrozenSet = TypeAlias(object) Counter = TypeAlias(object) Deque = TypeAlias(object) # Predefined type variables. AnyStr = TypeVar('AnyStr', str, unicode) # Abstract base classes. def runtime_checkable(cls: _TC) -> _TC: ... @runtime_checkable class SupportsInt(Protocol, metaclass=ABCMeta): @abstractmethod def __int__(self) -> int: ... @runtime_checkable class SupportsFloat(Protocol, metaclass=ABCMeta): @abstractmethod def __float__(self) -> float: ... @runtime_checkable class SupportsComplex(Protocol, metaclass=ABCMeta): @abstractmethod def __complex__(self) -> complex: ... @runtime_checkable class SupportsAbs(Protocol[_T_co]): @abstractmethod def __abs__(self) -> _T_co: ... @runtime_checkable class Reversible(Protocol[_T_co]): @abstractmethod def __reversed__(self) -> Iterator[_T_co]: ... @runtime_checkable class Sized(Protocol, metaclass=ABCMeta): @abstractmethod def __len__(self) -> int: ... @runtime_checkable class Hashable(Protocol, metaclass=ABCMeta): # TODO: This is special, in that a subclass of a hashable class may not be hashable # (for example, list vs. object). It's not obvious how to represent this. This class # is currently mostly useless for static checking. @abstractmethod def __hash__(self) -> int: ... @runtime_checkable class Iterable(Protocol[_T_co]): @abstractmethod def __iter__(self) -> Iterator[_T_co]: ... @runtime_checkable class Iterator(Iterable[_T_co], Protocol[_T_co]): @abstractmethod def next(self) -> _T_co: ... def __iter__(self) -> Iterator[_T_co]: ... class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]): @abstractmethod def next(self) -> _T_co: ... @abstractmethod def send(self, value: _T_contra) -> _T_co: ... @abstractmethod def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ..., tb: TracebackType = ...) -> _T_co: ... @abstractmethod def close(self) -> None: ... @property def gi_code(self) -> CodeType: ... @property def gi_frame(self) -> FrameType: ... @property def gi_running(self) -> bool: ... @runtime_checkable class Container(Protocol[_T_co]): @abstractmethod def __contains__(self, x: object) -> bool: ... class Sequence(Iterable[_T_co], Container[_T_co], Reversible[_T_co], Generic[_T_co]): @overload @abstractmethod def __getitem__(self, i: int) -> _T_co: ... @overload @abstractmethod def __getitem__(self, s: slice) -> Sequence[_T_co]: ... # Mixin methods def index(self, x: Any) -> int: ... def count(self, x: Any) -> int: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... def __reversed__(self) -> Iterator[_T_co]: ... # Implement Sized (but don't have it as a base class). @abstractmethod def __len__(self) -> int: ... class MutableSequence(Sequence[_T], Generic[_T]): @abstractmethod def insert(self, index: int, object: _T) -> None: ... @overload @abstractmethod def __getitem__(self, i: int) -> _T: ... @overload @abstractmethod def __getitem__(self, s: slice) -> MutableSequence[_T]: ... @overload @abstractmethod def __setitem__(self, i: int, o: _T) -> None: ... @overload @abstractmethod def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... @overload @abstractmethod def __delitem__(self, i: int) -> None: ... @overload @abstractmethod def __delitem__(self, i: slice) -> None: ... # Mixin methods def append(self, object: _T) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def reverse(self) -> None: ... def pop(self, index: int = ...) -> _T: ... def remove(self, object: _T) -> None: ... def __iadd__(self, x: Iterable[_T]) -> MutableSequence[_T]: ... class AbstractSet(Iterable[_T_co], Container[_T_co], Generic[_T_co]): @abstractmethod def __contains__(self, x: object) -> bool: ... # Mixin methods def __le__(self, s: AbstractSet[Any]) -> bool: ... def __lt__(self, s: AbstractSet[Any]) -> bool: ... def __gt__(self, s: AbstractSet[Any]) -> bool: ... def __ge__(self, s: AbstractSet[Any]) -> bool: ... def __and__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __or__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ... def __sub__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __xor__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ... # TODO: argument can be any container? def isdisjoint(self, s: AbstractSet[Any]) -> bool: ... # Implement Sized (but don't have it as a base class). @abstractmethod def __len__(self) -> int: ... class MutableSet(AbstractSet[_T], Generic[_T]): @abstractmethod def add(self, x: _T) -> None: ... @abstractmethod def discard(self, x: _T) -> None: ... # Mixin methods def clear(self) -> None: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def __ior__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ... def __iand__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... def __ixor__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ... def __isub__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... class MappingView(object): def __len__(self) -> int: ... class ItemsView(MappingView, AbstractSet[Tuple[_KT_co, _VT_co]], Generic[_KT_co, _VT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT_co]: ... class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_VT_co]: ... @runtime_checkable class ContextManager(Protocol[_T_co]): def __enter__(self) -> _T_co: ... def __exit__(self, __exc_type: Optional[Type[BaseException]], __exc_value: Optional[BaseException], __traceback: Optional[TracebackType]) -> Optional[bool]: ... class Mapping(Iterable[_KT], Container[_KT], Generic[_KT, _VT_co]): # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https: //github.com/python/typing/pull/273. @abstractmethod def __getitem__(self, k: _KT) -> _VT_co: ... # Mixin methods @overload def get(self, k: _KT) -> Optional[_VT_co]: ... @overload def get(self, k: _KT, default: Union[_VT_co, _T]) -> Union[_VT_co, _T]: ... def keys(self) -> list[_KT]: ... def values(self) -> list[_VT_co]: ... def items(self) -> list[Tuple[_KT, _VT_co]]: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT_co]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT_co]]: ... def __contains__(self, o: object) -> bool: ... # Implement Sized (but don't have it as a base class). @abstractmethod def __len__(self) -> int: ... class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): @abstractmethod def __setitem__(self, k: _KT, v: _VT) -> None: ... @abstractmethod def __delitem__(self, v: _KT) -> None: ... def clear(self) -> None: ... @overload def pop(self, k: _KT) -> _VT: ... @overload def pop(self, k: _KT, default: Union[_VT, _T] = ...) -> Union[_VT, _T]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... Text = unicode TYPE_CHECKING = True class IO(Iterator[AnyStr], Generic[AnyStr]): # TODO detach # TODO use abstract properties @property def mode(self) -> str: ... @property def name(self) -> str: ... @abstractmethod def close(self) -> None: ... @property def closed(self) -> bool: ... @abstractmethod def fileno(self) -> int: ... @abstractmethod def flush(self) -> None: ... @abstractmethod def isatty(self) -> bool: ... # TODO what if n is None? @abstractmethod def read(self, n: int = ...) -> AnyStr: ... @abstractmethod def readable(self) -> bool: ... @abstractmethod def readline(self, limit: int = ...) -> AnyStr: ... @abstractmethod def readlines(self, hint: int = ...) -> list[AnyStr]: ... @abstractmethod def seek(self, offset: int, whence: int = ...) -> int: ... @abstractmethod def seekable(self) -> bool: ... @abstractmethod def tell(self) -> int: ... @abstractmethod def truncate(self, size: Optional[int] = ...) -> int: ... @abstractmethod def writable(self) -> bool: ... # TODO buffer objects @abstractmethod def write(self, s: AnyStr) -> int: ... @abstractmethod def writelines(self, lines: Iterable[AnyStr]) -> None: ... @abstractmethod def next(self) -> AnyStr: ... @abstractmethod def __iter__(self) -> Iterator[AnyStr]: ... @abstractmethod def __enter__(self) -> IO[AnyStr]: ... @abstractmethod def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool]: ... class BinaryIO(IO[str]): # TODO readinto # TODO read1? # TODO peek? @abstractmethod def __enter__(self) -> BinaryIO: ... class TextIO(IO[unicode]): # TODO use abstractproperty @property def buffer(self) -> BinaryIO: ... @property def encoding(self) -> str: ... @property def errors(self) -> Optional[str]: ... @property def line_buffering(self) -> bool: ... @property def newlines(self) -> Any: ... # None, str or tuple @abstractmethod def __enter__(self) -> TextIO: ... class ByteString(Sequence[int], metaclass=ABCMeta): ... class Match(Generic[AnyStr]): pos: int endpos: int lastindex: Optional[int] string: AnyStr # The regular expression object whose match() or search() method produced # this match instance. This should not be Pattern[AnyStr] because the type # of the pattern is independent of the type of the matched string in # Python 2. Strictly speaking Match should be generic over AnyStr twice: # once for the type of the pattern and once for the type of the matched # string. re: Pattern[Any] # Can be None if there are no groups or if the last group was unnamed; # otherwise matches the type of the pattern. lastgroup: Optional[Any] def expand(self, template: Union[str, Text]) -> Any: ... @overload def group(self, group1: int = ...) -> AnyStr: ... @overload def group(self, group1: str) -> AnyStr: ... @overload def group(self, group1: int, group2: int, *groups: int) -> Tuple[AnyStr, ...]: ... @overload def group(self, group1: str, group2: str, *groups: str) -> Tuple[AnyStr, ...]: ... def groups(self, default: AnyStr = ...) -> Tuple[AnyStr, ...]: ... def groupdict(self, default: AnyStr = ...) -> Dict[str, AnyStr]: ... def start(self, group: Union[int, str] = ...) -> int: ... def end(self, group: Union[int, str] = ...) -> int: ... def span(self, group: Union[int, str] = ...) -> Tuple[int, int]: ... @property def regs(self) -> Tuple[Tuple[int, int], ...]: ... # undocumented # We need a second TypeVar with the same definition as AnyStr, because # Pattern is generic over AnyStr (determining the type of its .pattern # attribute), but at the same time its methods take either bytes or # Text and return the same type, regardless of the type of the pattern. _AnyStr2 = TypeVar('_AnyStr2', bytes, Text) class Pattern(Generic[AnyStr]): flags: int groupindex: Dict[AnyStr, int] groups: int pattern: AnyStr def search(self, string: _AnyStr2, pos: int = ..., endpos: int = ...) -> Optional[Match[_AnyStr2]]: ... def match(self, string: _AnyStr2, pos: int = ..., endpos: int = ...) -> Optional[Match[_AnyStr2]]: ... def split(self, string: _AnyStr2, maxsplit: int = ...) -> List[_AnyStr2]: ... # Returns either a list of _AnyStr2 or a list of tuples, depending on # whether there are groups in the pattern. def findall(self, string: Union[bytes, Text], pos: int = ..., endpos: int = ...) -> List[Any]: ... def finditer(self, string: _AnyStr2, pos: int = ..., endpos: int = ...) -> Iterator[Match[_AnyStr2]]: ... @overload def sub(self, repl: _AnyStr2, string: _AnyStr2, count: int = ...) -> _AnyStr2: ... @overload def sub(self, repl: Callable[[Match[_AnyStr2]], _AnyStr2], string: _AnyStr2, count: int = ...) -> _AnyStr2: ... @overload def subn(self, repl: _AnyStr2, string: _AnyStr2, count: int = ...) -> Tuple[_AnyStr2, int]: ... @overload def subn(self, repl: Callable[[Match[_AnyStr2]], _AnyStr2], string: _AnyStr2, count: int = ...) -> Tuple[_AnyStr2, int]: ... # Functions def get_type_hints( obj: Callable[..., Any], globalns: Optional[Dict[Text, Any]] = ..., localns: Optional[Dict[Text, Any]] = ..., ) -> None: ... @overload def cast(tp: Type[_T], obj: Any) -> _T: ... @overload def cast(tp: str, obj: Any) -> Any: ... # Type constructors # NamedTuple is special-cased in the type checker class NamedTuple(Tuple[Any, ...]): _fields: Tuple[str, ...] def __init__(self, typename: Text, fields: Iterable[Tuple[Text, Any]] = ..., **kwargs: Any) -> None: ... @classmethod def _make(cls: Type[_T], iterable: Iterable[Any]) -> _T: ... def _asdict(self) -> Dict[str, Any]: ... def _replace(self: _T, **kwargs: Any) -> _T: ... # Internal mypy fallback type for all typed dicts (does not exist at runtime) class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def copy(self: _T) -> _T: ... # Using NoReturn so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... def update(self: _T, __m: _T) -> None: ... def has_key(self, k: str) -> bool: ... def viewitems(self) -> ItemsView[str, object]: ... def viewkeys(self) -> KeysView[str]: ... def viewvalues(self) -> ValuesView[object]: ... def __delitem__(self, k: NoReturn) -> None: ... def NewType(name: str, tp: Type[_T]) -> Type[_T]: ... # This itself is only available during type checking def type_check_only(func_or_cls: _C) -> _C: ... mypy-0.761/mypy/typeshed/stdlib/2/unittest.pyi0000644€tŠÔÚ€2›s®0000003236713576752252025577 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for unittest # Based on http://docs.python.org/2.7/library/unittest.html from typing import (Any, Callable, Dict, FrozenSet, Iterable, Iterator, List, Mapping, NoReturn, Optional, overload, Pattern, Sequence, Set, Text, TextIO, Tuple, Type, TypeVar, Union) from abc import abstractmethod, ABCMeta import datetime import types _T = TypeVar('_T') _FT = TypeVar('_FT') _ExceptionType = Union[Type[BaseException], Tuple[Type[BaseException], ...]] _Regexp = Union[Text, Pattern[Text]] class Testable(metaclass=ABCMeta): @abstractmethod def run(self, result: TestResult) -> None: ... @abstractmethod def debug(self) -> None: ... @abstractmethod def countTestCases(self) -> int: ... # TODO ABC for test runners? class TestResult: errors: List[Tuple[Testable, str]] failures: List[Tuple[Testable, str]] skipped: List[Tuple[Testable, str]] expectedFailures: List[Tuple[Testable, str]] unexpectedSuccesses: List[Testable] shouldStop: bool testsRun: int buffer: bool failfast: bool def wasSuccessful(self) -> bool: ... def stop(self) -> None: ... def startTest(self, test: Testable) -> None: ... def stopTest(self, test: Testable) -> None: ... def startTestRun(self) -> None: ... def stopTestRun(self) -> None: ... def addError(self, test: Testable, err: Tuple[type, Any, Any]) -> None: ... # TODO def addFailure(self, test: Testable, err: Tuple[type, Any, Any]) -> None: ... # TODO def addSuccess(self, test: Testable) -> None: ... def addSkip(self, test: Testable, reason: str) -> None: ... def addExpectedFailure(self, test: Testable, err: str) -> None: ... def addUnexpectedSuccess(self, test: Testable) -> None: ... class _AssertRaisesBaseContext: expected: Any failureException: Type[BaseException] obj_name: str expected_regex: Pattern[str] class _AssertRaisesContext(_AssertRaisesBaseContext): exception: Any def __enter__(self) -> _AssertRaisesContext: ... def __exit__(self, exc_type, exc_value, tb) -> bool: ... class TestCase(Testable): failureException: Type[BaseException] longMessage: bool maxDiff: Optional[int] # undocumented _testMethodName: str def __init__(self, methodName: str = ...) -> None: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... @classmethod def setUpClass(cls) -> None: ... @classmethod def tearDownClass(cls) -> None: ... def run(self, result: TestResult = ...) -> None: ... def debug(self) -> None: ... def assert_(self, expr: Any, msg: object = ...) -> None: ... def failUnless(self, expr: Any, msg: object = ...) -> None: ... def assertTrue(self, expr: Any, msg: object = ...) -> None: ... def assertEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertEquals(self, first: Any, second: Any, msg: object = ...) -> None: ... def failUnlessEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertNotEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertNotEquals(self, first: Any, second: Any, msg: object = ...) -> None: ... def failIfEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... @overload def assertAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... @overload def assertAlmostEqual(self, first: float, second: float, *, msg: Any = ..., delta: float = ...) -> None: ... @overload def assertAlmostEqual(self, first: datetime.datetime, second: datetime.datetime, *, msg: Any = ..., delta: datetime.timedelta = ...) -> None: ... @overload def assertAlmostEquals(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... @overload def assertAlmostEquals(self, first: float, second: float, *, msg: Any = ..., delta: float = ...) -> None: ... @overload def assertAlmostEquals(self, first: datetime.datetime, second: datetime.datetime, *, msg: Any = ..., delta: datetime.timedelta = ...) -> None: ... def failUnlessAlmostEqual(self, first: float, second: float, places: int = ..., msg: object = ...) -> None: ... @overload def assertNotAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... @overload def assertNotAlmostEqual(self, first: float, second: float, *, msg: Any = ..., delta: float = ...) -> None: ... @overload def assertNotAlmostEqual(self, first: datetime.datetime, second: datetime.datetime, *, msg: Any = ..., delta: datetime.timedelta = ...) -> None: ... @overload def assertNotAlmostEquals(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... @overload def assertNotAlmostEquals(self, first: float, second: float, *, msg: Any = ..., delta: float = ...) -> None: ... @overload def assertNotAlmostEquals(self, first: datetime.datetime, second: datetime.datetime, *, msg: Any = ..., delta: datetime.timedelta = ...) -> None: ... def failIfAlmostEqual(self, first: float, second: float, places: int = ..., msg: object = ..., delta: float = ...) -> None: ... def assertGreater(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertGreaterEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertMultiLineEqual(self, first: str, second: str, msg: object = ...) -> None: ... def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any], msg: object = ..., seq_type: type = ...) -> None: ... def assertListEqual(self, first: List[Any], second: List[Any], msg: object = ...) -> None: ... def assertTupleEqual(self, first: Tuple[Any, ...], second: Tuple[Any, ...], msg: object = ...) -> None: ... def assertSetEqual(self, first: Union[Set[Any], FrozenSet[Any]], second: Union[Set[Any], FrozenSet[Any]], msg: object = ...) -> None: ... def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any], msg: object = ...) -> None: ... def assertLess(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertLessEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... @overload def assertRaises(self, exception: _ExceptionType, callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertRaises(self, exception: _ExceptionType) -> _AssertRaisesContext: ... @overload def assertRaisesRegexp(self, exception: _ExceptionType, regexp: _Regexp, callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertRaisesRegexp(self, exception: _ExceptionType, regexp: _Regexp) -> _AssertRaisesContext: ... def assertRegexpMatches(self, text: Text, regexp: _Regexp, msg: object = ...) -> None: ... def assertNotRegexpMatches(self, text: Text, regexp: _Regexp, msg: object = ...) -> None: ... def assertItemsEqual(self, first: Iterable[Any], second: Iterable[Any], msg: object = ...) -> None: ... def assertDictContainsSubset(self, expected: Mapping[Any, Any], actual: Mapping[Any, Any], msg: object = ...) -> None: ... def addTypeEqualityFunc(self, typeobj: type, function: Callable[..., None]) -> None: ... @overload def failUnlessRaises(self, exception: _ExceptionType, callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def failUnlessRaises(self, exception: _ExceptionType) -> _AssertRaisesContext: ... def failIf(self, expr: Any, msg: object = ...) -> None: ... def assertFalse(self, expr: Any, msg: object = ...) -> None: ... def assertIs(self, first: object, second: object, msg: object = ...) -> None: ... def assertIsNot(self, first: object, second: object, msg: object = ...) -> None: ... def assertIsNone(self, expr: Any, msg: object = ...) -> None: ... def assertIsNotNone(self, expr: Any, msg: object = ...) -> None: ... def assertIn(self, first: _T, second: Iterable[_T], msg: object = ...) -> None: ... def assertNotIn(self, first: _T, second: Iterable[_T], msg: object = ...) -> None: ... def assertIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]], msg: object = ...) -> None: ... def assertNotIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]], msg: object = ...) -> None: ... def fail(self, msg: object = ...) -> NoReturn: ... def countTestCases(self) -> int: ... def defaultTestResult(self) -> TestResult: ... def id(self) -> str: ... def shortDescription(self) -> str: ... # May return None def addCleanup(self, function: Any, *args: Any, **kwargs: Any) -> None: ... def doCleanups(self) -> bool: ... def skipTest(self, reason: Any) -> None: ... def _formatMessage(self, msg: Optional[Text], standardMsg: Text) -> str: ... # undocumented def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: ... # undocumented class FunctionTestCase(Testable): def __init__(self, testFunc: Callable[[], None], setUp: Optional[Callable[[], None]] = ..., tearDown: Optional[Callable[[], None]] = ..., description: Optional[str] = ...) -> None: ... def run(self, result: TestResult) -> None: ... def debug(self) -> None: ... def countTestCases(self) -> int: ... class TestSuite(Testable): def __init__(self, tests: Iterable[Testable] = ...) -> None: ... def addTest(self, test: Testable) -> None: ... def addTests(self, tests: Iterable[Testable]) -> None: ... def run(self, result: TestResult) -> None: ... def debug(self) -> None: ... def countTestCases(self) -> int: ... def __iter__(self) -> Iterator[Testable]: ... class TestLoader: testMethodPrefix: str sortTestMethodsUsing: Optional[Callable[[str, str], int]] suiteClass: Callable[[List[TestCase]], TestSuite] def loadTestsFromTestCase(self, testCaseClass: Type[TestCase]) -> TestSuite: ... def loadTestsFromModule(self, module: types.ModuleType = ..., use_load_tests: bool = ...) -> TestSuite: ... def loadTestsFromName(self, name: str = ..., module: Optional[types.ModuleType] = ...) -> TestSuite: ... def loadTestsFromNames(self, names: List[str] = ..., module: Optional[types.ModuleType] = ...) -> TestSuite: ... def discover(self, start_dir: str, pattern: str = ..., top_level_dir: Optional[str] = ...) -> TestSuite: ... def getTestCaseNames(self, testCaseClass: Type[TestCase] = ...) -> List[str]: ... defaultTestLoader: TestLoader class TextTestResult(TestResult): def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ... class TextTestRunner: def __init__(self, stream: Optional[TextIO] = ..., descriptions: bool = ..., verbosity: int = ..., failfast: bool = ..., buffer: bool = ..., resultclass: Optional[Type[TestResult]] = ...) -> None: ... def _makeResult(self) -> TestResult: ... class SkipTest(Exception): ... # TODO precise types def skipUnless(condition: Any, reason: Union[str, unicode]) -> Any: ... def skipIf(condition: Any, reason: Union[str, unicode]) -> Any: ... def expectedFailure(func: _FT) -> _FT: ... def skip(reason: Union[str, unicode]) -> Any: ... # not really documented class TestProgram: result: TestResult def runTests(self) -> None: ... # undocumented def main(module: Union[None, Text, types.ModuleType] = ..., defaultTest: Optional[str] = ..., argv: Optional[Sequence[str]] = ..., testRunner: Union[Type[TextTestRunner], TextTestRunner, None] = ..., testLoader: TestLoader = ..., exit: bool = ..., verbosity: int = ..., failfast: Optional[bool] = ..., catchbreak: Optional[bool] = ..., buffer: Optional[bool] = ...) -> TestProgram: ... def load_tests(loader: TestLoader, tests: TestSuite, pattern: Optional[Text]) -> TestSuite: ... def installHandler() -> None: ... def registerResult(result: TestResult) -> None: ... def removeResult(result: TestResult) -> bool: ... @overload def removeHandler() -> None: ... @overload def removeHandler(function: Callable[..., Any]) -> Callable[..., Any]: ... # private but occasionally used util: types.ModuleType mypy-0.761/mypy/typeshed/stdlib/2/urllib.pyi0000644€tŠÔÚ€2›s®0000001123613576752252025201 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, AnyStr, IO, List, Mapping, Sequence, Text, Tuple, TypeVar, Union def url2pathname(pathname: AnyStr) -> AnyStr: ... def pathname2url(pathname: AnyStr) -> AnyStr: ... def urlopen(url: str, data=..., proxies: Mapping[str, str] = ..., context=...) -> IO[Any]: ... def urlretrieve(url, filename=..., reporthook=..., data=..., context=...): ... def urlcleanup() -> None: ... class ContentTooShortError(IOError): content: Any def __init__(self, message, content) -> None: ... class URLopener: version: Any proxies: Any key_file: Any cert_file: Any context: Any addheaders: Any tempcache: Any ftpcache: Any def __init__(self, proxies: Mapping[str, str] = ..., context=..., **x509) -> None: ... def __del__(self): ... def close(self): ... def cleanup(self): ... def addheader(self, *args): ... type: Any def open(self, fullurl: str, data=...): ... def open_unknown(self, fullurl, data=...): ... def open_unknown_proxy(self, proxy, fullurl, data=...): ... def retrieve(self, url, filename=..., reporthook=..., data=...): ... def open_http(self, url, data=...): ... def http_error(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_default(self, url, fp, errcode, errmsg, headers): ... def open_https(self, url, data=...): ... def open_file(self, url): ... def open_local_file(self, url): ... def open_ftp(self, url): ... def open_data(self, url, data=...): ... class FancyURLopener(URLopener): auth_cache: Any tries: Any maxtries: Any def __init__(self, *args, **kwargs) -> None: ... def http_error_default(self, url, fp, errcode, errmsg, headers): ... def http_error_302(self, url, fp, errcode, errmsg, headers, data=...): ... def redirect_internal(self, url, fp, errcode, errmsg, headers, data): ... def http_error_301(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_303(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_307(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_401(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_407(self, url, fp, errcode, errmsg, headers, data=...): ... def retry_proxy_http_basic_auth(self, url, realm, data=...): ... def retry_proxy_https_basic_auth(self, url, realm, data=...): ... def retry_http_basic_auth(self, url, realm, data=...): ... def retry_https_basic_auth(self, url, realm, data=...): ... def get_user_passwd(self, host, realm, clear_cache=...): ... def prompt_user_passwd(self, host, realm): ... class ftpwrapper: user: Any passwd: Any host: Any port: Any dirs: Any timeout: Any refcount: Any keepalive: Any def __init__(self, user, passwd, host, port, dirs, timeout=..., persistent=...) -> None: ... busy: Any ftp: Any def init(self): ... def retrfile(self, file, type): ... def endtransfer(self): ... def close(self): ... def file_close(self): ... def real_close(self): ... _AIUT = TypeVar("_AIUT", bound=addbase) class addbase: fp: Any def read(self, n: int = ...) -> bytes: ... def readline(self, limit: int = ...) -> bytes: ... def readlines(self, hint: int = ...) -> List[bytes]: ... def fileno(self) -> int: ... # Optional[int], but that is rare def __iter__(self: _AIUT) -> _AIUT: ... def next(self) -> bytes: ... def __init__(self, fp) -> None: ... def close(self) -> None: ... class addclosehook(addbase): closehook: Any hookargs: Any def __init__(self, fp, closehook, *hookargs) -> None: ... def close(self): ... class addinfo(addbase): headers: Any def __init__(self, fp, headers) -> None: ... def info(self): ... class addinfourl(addbase): headers: Any url: Any code: Any def __init__(self, fp, headers, url, code=...) -> None: ... def info(self): ... def getcode(self): ... def geturl(self): ... def unwrap(url): ... def splittype(url): ... def splithost(url): ... def splituser(host): ... def splitpasswd(user): ... def splitport(host): ... def splitnport(host, defport=...): ... def splitquery(url): ... def splittag(url): ... def splitattr(url): ... def splitvalue(attr): ... def unquote(s: AnyStr) -> AnyStr: ... def unquote_plus(s: AnyStr) -> AnyStr: ... def quote(s: AnyStr, safe: Text = ...) -> AnyStr: ... def quote_plus(s: AnyStr, safe: Text = ...) -> AnyStr: ... def urlencode(query: Union[Sequence[Tuple[Any, Any]], Mapping[Any, Any]], doseq=...) -> str: ... def getproxies() -> Mapping[str, str]: ... def proxy_bypass(host: str) -> Any: ... # Undocumented # Names in __all__ with no definition: # basejoin mypy-0.761/mypy/typeshed/stdlib/2/urllib2.pyi0000644€tŠÔÚ€2›s®0000002022113576752252025255 0ustar jukkaDROPBOX\Domain Users00000000000000 import ssl from typing import Any, AnyStr, Dict, List, Union, Optional, Mapping, Callable, Sequence, Text, Tuple, Type from urllib import addinfourl from httplib import HTTPConnectionProtocol, HTTPResponse _string = Union[str, unicode] class URLError(IOError): reason: Union[str, BaseException] class HTTPError(URLError, addinfourl): code: int headers: Mapping[str, str] def __init__(self, url, code: int, msg: str, hdrs: Mapping[str, str], fp: addinfourl) -> None: ... class Request(object): host: str port: str data: str headers: Dict[str, str] unverifiable: bool type: Optional[str] origin_req_host = ... unredirected_hdrs: Dict[str, str] def __init__(self, url: str, data: Optional[str] = ..., headers: Dict[str, str] = ..., origin_req_host: Optional[str] = ..., unverifiable: bool = ...) -> None: ... def __getattr__(self, attr): ... def get_method(self) -> str: ... def add_data(self, data) -> None: ... def has_data(self) -> bool: ... def get_data(self) -> str: ... def get_full_url(self) -> str: ... def get_type(self): ... def get_host(self) -> str: ... def get_selector(self): ... def set_proxy(self, host, type) -> None: ... def has_proxy(self) -> bool: ... def get_origin_req_host(self) -> str: ... def is_unverifiable(self) -> bool: ... def add_header(self, key: str, val: str) -> None: ... def add_unredirected_header(self, key: str, val: str) -> None: ... def has_header(self, header_name: str) -> bool: ... def get_header(self, header_name: str, default: Optional[str] = ...) -> str: ... def header_items(self): ... class OpenerDirector(object): addheaders: List[Tuple[str, str]] def add_handler(self, handler: BaseHandler) -> None: ... def open(self, fullurl: Union[Request, _string], data: Optional[_string] = ..., timeout: Optional[float] = ...) -> Optional[addinfourl]: ... def error(self, proto: _string, *args: Any): ... # Note that this type is somewhat a lie. The return *can* be None if # a custom opener has been installed that fails to handle the request. def urlopen(url: Union[Request, _string], data: Optional[_string] = ..., timeout: Optional[float] = ..., cafile: Optional[_string] = ..., capath: Optional[_string] = ..., cadefault: bool = ..., context: Optional[ssl.SSLContext] = ...) -> addinfourl: ... def install_opener(opener: OpenerDirector) -> None: ... def build_opener(*handlers: Union[BaseHandler, Type[BaseHandler]]) -> OpenerDirector: ... class BaseHandler: handler_order: int parent: OpenerDirector def add_parent(self, parent: OpenerDirector) -> None: ... def close(self) -> None: ... def __lt__(self, other: Any) -> bool: ... class HTTPErrorProcessor(BaseHandler): def http_response(self, request, response): ... class HTTPDefaultErrorHandler(BaseHandler): def http_error_default(self, req: Request, fp: addinfourl, code: int, msg: str, hdrs: Mapping[str, str]): ... class HTTPRedirectHandler(BaseHandler): max_repeats: int max_redirections: int def redirect_request(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str], newurl): ... def http_error_301(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str]): ... def http_error_302(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str]): ... def http_error_303(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str]): ... def http_error_307(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str]): ... inf_msg: str class ProxyHandler(BaseHandler): proxies: Mapping[str, str] def __init__(self, proxies: Optional[Mapping[str, str]] = ...): ... def proxy_open(self, req: Request, proxy, type): ... class HTTPPasswordMgr: def __init__(self) -> None: ... def add_password(self, realm: Optional[Text], uri: Union[Text, Sequence[Text]], user: Text, passwd: Text) -> None: ... def find_user_password(self, realm: Optional[Text], authuri: Text) -> Tuple[Any, Any]: ... def reduce_uri(self, uri: _string, default_port: bool = ...) -> Tuple[Any, Any]: ... def is_suburi(self, base: _string, test: _string) -> bool: ... class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): ... class AbstractBasicAuthHandler: def __init__(self, password_mgr: Optional[HTTPPasswordMgr] = ...) -> None: ... def add_password(self, realm: Optional[Text], uri: Union[Text, Sequence[Text]], user: Text, passwd: Text) -> None: ... def http_error_auth_reqed(self, authreq, host, req: Request, headers: Mapping[str, str]): ... def retry_http_basic_auth(self, host, req: Request, realm): ... class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): auth_header: str def http_error_401(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str]): ... class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): auth_header: str def http_error_407(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str]): ... class AbstractDigestAuthHandler: def __init__(self, passwd: Optional[HTTPPasswordMgr] = ...) -> None: ... def add_password(self, realm: Optional[Text], uri: Union[Text, Sequence[Text]], user: Text, passwd: Text) -> None: ... def reset_retry_count(self) -> None: ... def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: Mapping[str, str]) -> None: ... def retry_http_digest_auth(self, req: Request, auth: str) -> Optional[HTTPResponse]: ... def get_cnonce(self, nonce: str) -> str: ... def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... def get_algorithm_impls(self, algorithm: str) -> Tuple[Callable[[str], str], Callable[[str, str], str]]: ... def get_entity_digest(self, data: Optional[bytes], chal: Mapping[str, str]) -> Optional[str]: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): auth_header: str handler_order: int def http_error_401(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str]): ... class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): auth_header: str handler_order: int def http_error_407(self, req: Request, fp: addinfourl, code: int, msg: str, headers: Mapping[str, str]): ... class AbstractHTTPHandler(BaseHandler): # undocumented def __init__(self, debuglevel: int = ...) -> None: ... def set_http_debuglevel(self, level: int) -> None: ... def do_request_(self, request: Request) -> Request: ... def do_open(self, http_class: HTTPConnectionProtocol, req: Request, **http_conn_args: Optional[Any]) -> addinfourl: ... class HTTPHandler(AbstractHTTPHandler): def http_open(self, req: Request) -> addinfourl: ... def http_request(self, request: Request) -> Request: ... # undocumented class HTTPSHandler(AbstractHTTPHandler): def __init__(self, debuglevel: int = ..., context: Optional[ssl.SSLContext] = ...) -> None: ... def https_open(self, req: Request) -> addinfourl: ... def https_request(self, request: Request) -> Request: ... # undocumented class HTTPCookieProcessor(BaseHandler): def __init__(self, cookiejar: Optional[Any] = ...): ... def http_request(self, request: Request): ... def http_response(self, request: Request, response): ... class UnknownHandler(BaseHandler): def unknown_open(self, req: Request): ... class FileHandler(BaseHandler): def file_open(self, req: Request): ... def get_names(self): ... def open_local_file(self, req: Request): ... class FTPHandler(BaseHandler): def ftp_open(self, req: Request): ... def connect_ftp(self, user, passwd, host, port, dirs, timeout): ... class CacheFTPHandler(FTPHandler): def __init__(self) -> None: ... def setTimeout(self, t: Optional[float]): ... def setMaxConns(self, m: int): ... def check_cache(self): ... def clear_cache(self): ... def parse_http_list(s: AnyStr) -> List[AnyStr]: ... def parse_keqv_list(l: List[AnyStr]) -> Dict[AnyStr, AnyStr]: ... mypy-0.761/mypy/typeshed/stdlib/2/urlparse.pyi0000644€tŠÔÚ€2›s®0000000376113576752252025551 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for urlparse (Python 2) from typing import AnyStr, Dict, List, NamedTuple, Tuple, Sequence, Union, overload, Optional _String = Union[str, unicode] uses_relative: List[str] uses_netloc: List[str] uses_params: List[str] non_hierarchical: List[str] uses_query: List[str] uses_fragment: List[str] scheme_chars: str MAX_CACHE_SIZE: int def clear_cache() -> None: ... class ResultMixin(object): @property def username(self) -> Optional[str]: ... @property def password(self) -> Optional[str]: ... @property def hostname(self) -> Optional[str]: ... @property def port(self) -> Optional[int]: ... class _SplitResult(NamedTuple): scheme: str netloc: str path: str query: str fragment: str class SplitResult(_SplitResult, ResultMixin): def geturl(self) -> str: ... class _ParseResult(NamedTuple): scheme: str netloc: str path: str params: str query: str fragment: str class ParseResult(_ParseResult, ResultMixin): def geturl(self) -> str: ... def urlparse(url: _String, scheme: _String = ..., allow_fragments: bool = ...) -> ParseResult: ... def urlsplit(url: _String, scheme: _String = ..., allow_fragments: bool = ...) -> SplitResult: ... @overload def urlunparse(data: Tuple[_String, _String, _String, _String, _String, _String]) -> str: ... @overload def urlunparse(data: Sequence[_String]) -> str: ... @overload def urlunsplit(data: Tuple[_String, _String, _String, _String, _String]) -> str: ... @overload def urlunsplit(data: Sequence[_String]) -> str: ... def urljoin(base: _String, url: _String, allow_fragments: bool = ...) -> str: ... def urldefrag(url: AnyStr) -> Tuple[AnyStr, str]: ... def unquote(s: AnyStr) -> AnyStr: ... def parse_qs(qs: AnyStr, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[AnyStr, List[AnyStr]]: ... def parse_qsl(qs: AnyStr, keep_blank_values: int = ..., strict_parsing: bool = ...) -> List[Tuple[AnyStr, AnyStr]]: ... mypy-0.761/mypy/typeshed/stdlib/2/user.pyi0000644€tŠÔÚ€2›s®0000000034013576752252024660 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for user (Python 2) # Docs: https://docs.python.org/2/library/user.html # Source: https://hg.python.org/cpython/file/2.7/Lib/user.py from typing import Any def __getattr__(name) -> Any: ... home: str pythonrc: str mypy-0.761/mypy/typeshed/stdlib/2/whichdb.pyi0000644€tŠÔÚ€2›s®0000000022613576752252025315 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/whichdb.py from typing import Optional, Text def whichdb(filename: Text) -> Optional[str]: ... mypy-0.761/mypy/typeshed/stdlib/2/xmlrpclib.pyi0000644€tŠÔÚ€2›s®0000002266713576752252025716 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xmlrpclib (Python 2) from typing import Any, AnyStr, Callable, IO, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Type, TypeVar, Union from types import InstanceType from datetime import datetime from time import struct_time from httplib import HTTPConnection, HTTPResponse, HTTPSConnection from ssl import SSLContext from StringIO import StringIO from gzip import GzipFile _Unmarshaller = Any _timeTuple = Tuple[int, int, int, int, int, int, int, int, int] # Represents types that can be compared against a DateTime object _dateTimeComp = Union[unicode, DateTime, datetime] # A "host description" used by Transport factories _hostDesc = Union[str, Tuple[str, Mapping[Any, Any]]] def escape(s: AnyStr, replace: Callable[[AnyStr, AnyStr, AnyStr], AnyStr] = ...) -> AnyStr: ... MAXINT: int MININT: int PARSE_ERROR: int SERVER_ERROR: int APPLICATION_ERROR: int SYSTEM_ERROR: int TRANSPORT_ERROR: int NOT_WELLFORMED_ERROR: int UNSUPPORTED_ENCODING: int INVALID_ENCODING_CHAR: int INVALID_XMLRPC: int METHOD_NOT_FOUND: int INVALID_METHOD_PARAMS: int INTERNAL_ERROR: int class Error(Exception): ... class ProtocolError(Error): url: str errcode: int errmsg: str headers: Any def __init__(self, url: str, errcode: int, errmsg: str, headers: Any) -> None: ... class ResponseError(Error): ... class Fault(Error): faultCode: Any faultString: str def __init__(self, faultCode: Any, faultString: str, **extra: Any) -> None: ... boolean: Type[bool] Boolean: Type[bool] class DateTime: value: str def __init__(self, value: Union[str, unicode, datetime, float, int, _timeTuple, struct_time] = ...) -> None: ... def make_comparable(self, other: _dateTimeComp) -> Tuple[unicode, unicode]: ... def __lt__(self, other: _dateTimeComp) -> bool: ... def __le__(self, other: _dateTimeComp) -> bool: ... def __gt__(self, other: _dateTimeComp) -> bool: ... def __ge__(self, other: _dateTimeComp) -> bool: ... def __eq__(self, other: _dateTimeComp) -> bool: ... # type: ignore def __ne__(self, other: _dateTimeComp) -> bool: ... # type: ignore def timetuple(self) -> struct_time: ... def __cmp__(self, other: _dateTimeComp) -> int: ... def decode(self, data: Any) -> None: ... def encode(self, out: IO[str]) -> None: ... class Binary: data: str def __init__(self, data: Optional[str] = ...) -> None: ... def __cmp__(self, other: Any) -> int: ... def decode(self, data: str) -> None: ... def encode(self, out: IO[str]) -> None: ... WRAPPERS: Tuple[Type[Any], ...] # Still part of the public API, but see http://bugs.python.org/issue1773632 FastParser: None FastUnmarshaller: None FastMarshaller: None # xmlrpclib.py will leave ExpatParser undefined if it can't import expat from # xml.parsers. Because this is Python 2.7, the import will succeed. class ExpatParser: def __init__(self, target: _Unmarshaller) -> None: ... def feed(self, data: str): ... def close(self): ... # TODO: Add xmllib.XMLParser as base class class SlowParser: handle_xml: Callable[[str, bool], None] unknown_starttag: Callable[[str, Any], None] handle_data: Callable[[str], None] handle_cdata: Callable[[str], None] unknown_endtag: Callable[[str, Callable[[Iterable[str], str], str]], None] def __init__(self, target: _Unmarshaller) -> None: ... class Marshaller: memo: MutableMapping[int, Any] data: Optional[str] encoding: Optional[str] allow_none: bool def __init__(self, encoding: Optional[str] = ..., allow_none: bool = ...) -> None: ... dispatch: Mapping[type, Callable[[Marshaller, str, Callable[[str], None]], None]] def dumps( self, values: Union[ Iterable[ Union[ None, int, bool, long, float, str, unicode, List[Any], Tuple[Any, ...], Mapping[Any, Any], datetime, InstanceType, ], ], Fault, ], ) -> str: ... def dump_nil(self, value: None, write: Callable[[str], None]) -> None: ... def dump_int(self, value: int, write: Callable[[str], None]) -> None: ... def dump_bool(self, value: bool, write: Callable[[str], None]) -> None: ... def dump_long(self, value: long, write: Callable[[str], None]) -> None: ... def dump_double(self, value: float, write: Callable[[str], None]) -> None: ... def dump_string(self, value: str, write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ...) -> None: ... def dump_unicode(self, value: unicode, write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ...) -> None: ... def dump_array(self, value: Iterable[Any], write: Callable[[str], None]) -> None: ... def dump_struct( self, value: Mapping[unicode, Any], write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ..., ) -> None: ... def dump_datetime(self, value: datetime, write: Callable[[str], None]) -> None: ... def dump_instance(self, value: InstanceType, write: Callable[[str], None]) -> None: ... class Unmarshaller: def append(self, object: Any) -> None: ... def __init__(self, use_datetime: bool = ...) -> None: ... def close(self) -> Tuple[Any, ...]: ... def getmethodname(self) -> Optional[str]: ... def xml(self, encoding: str, standalone: bool) -> None: ... def start(self, tag: str, attrs: Any) -> None: ... def data(self, text: str) -> None: ... def end(self, tag: str, join: Callable[[Iterable[str], str], str] = ...) -> None: ... def end_dispatch(self, tag: str, data: str) -> None: ... dispatch: Mapping[str, Callable[[Unmarshaller, str], None]] def end_nil(self, data: str): ... def end_boolean(self, data: str) -> None: ... def end_int(self, data: str) -> None: ... def end_double(self, data: str) -> None: ... def end_string(self, data: str) -> None: ... def end_array(self, data: str) -> None: ... def end_struct(self, data: str) -> None: ... def end_base64(self, data: str) -> None: ... def end_dateTime(self, data: str) -> None: ... def end_value(self, data: str) -> None: ... def end_params(self, data: str) -> None: ... def end_fault(self, data: str) -> None: ... def end_methodName(self, data: str) -> None: ... class _MultiCallMethod: def __init__(self, call_list: List[Tuple[str, Tuple[Any, ...]]], name: str) -> None: ... class MultiCallIterator: def __init__(self, results: List[Any]) -> None: ... class MultiCall: def __init__(self, server: ServerProxy) -> None: ... def __getattr__(self, name: str) -> _MultiCallMethod: ... def __call__(self) -> MultiCallIterator: ... def getparser(use_datetime: bool = ...) -> Tuple[Union[ExpatParser, SlowParser], Unmarshaller]: ... def dumps( params: Union[Tuple[Any, ...], Fault], methodname: Optional[str] = ..., methodresponse: Optional[bool] = ..., encoding: Optional[str] = ..., allow_none: bool = ..., ) -> str: ... def loads(data: str, use_datetime: bool = ...) -> Tuple[Tuple[Any, ...], Optional[str]]: ... def gzip_encode(data: str) -> str: ... def gzip_decode(data: str, max_decode: int = ...) -> str: ... class GzipDecodedResponse(GzipFile): stringio: StringIO[Any] def __init__(self, response: HTTPResponse) -> None: ... def close(self): ... class _Method: def __init__(self, send: Callable[[str, Tuple[Any, ...]], Any], name: str) -> None: ... def __getattr__(self, name: str) -> _Method: ... def __call__(self, *args: Any) -> Any: ... class Transport: user_agent: str accept_gzip_encoding: bool encode_threshold: Optional[int] def __init__(self, use_datetime: bool = ...) -> None: ... def request(self, host: _hostDesc, handler: str, request_body: str, verbose: bool = ...) -> Tuple[Any, ...]: ... verbose: bool def single_request(self, host: _hostDesc, handler: str, request_body: str, verbose: bool = ...) -> Tuple[Any, ...]: ... def getparser(self) -> Tuple[Union[ExpatParser, SlowParser], Unmarshaller]: ... def get_host_info(self, host: _hostDesc) -> Tuple[str, Optional[List[Tuple[str, str]]], Optional[Mapping[Any, Any]]]: ... def make_connection(self, host: _hostDesc) -> HTTPConnection: ... def close(self) -> None: ... def send_request(self, connection: HTTPConnection, handler: str, request_body: str) -> None: ... def send_host(self, connection: HTTPConnection, host: str) -> None: ... def send_user_agent(self, connection: HTTPConnection) -> None: ... def send_content(self, connection: HTTPConnection, request_body: str) -> None: ... def parse_response(self, response: HTTPResponse) -> Tuple[Any, ...]: ... class SafeTransport(Transport): def __init__(self, use_datetime: bool = ..., context: Optional[SSLContext] = ...) -> None: ... def make_connection(self, host: _hostDesc) -> HTTPSConnection: ... class ServerProxy: def __init__(self, uri: str, transport: Optional[Transport] = ..., encoding: Optional[str] = ..., verbose: bool = ..., allow_none: bool = ..., use_datetime: bool = ..., context: Optional[SSLContext] = ...) -> None: ... def __getattr__(self, name: str) -> _Method: ... def __call__(self, attr: str) -> Optional[Transport]: ... Server = ServerProxy mypy-0.761/mypy/typeshed/stdlib/2and3/0000755€tŠÔÚ€2›s®0000000000013576752267023736 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/__future__.pyi0000644€tŠÔÚ€2›s®0000000111313576752252026555 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List class _Feature: def getOptionalRelease(self) -> sys._version_info: ... def getMandatoryRelease(self) -> sys._version_info: ... compiler_flag: int absolute_import: _Feature division: _Feature generators: _Feature nested_scopes: _Feature print_function: _Feature unicode_literals: _Feature with_statement: _Feature if sys.version_info >= (3, 0): barry_as_FLUFL: _Feature if sys.version_info >= (3, 5): generator_stop: _Feature if sys.version_info >= (3, 7): annotations: _Feature all_feature_names: List[str] # undocumented mypy-0.761/mypy/typeshed/stdlib/2and3/_bisect.pyi0000644€tŠÔÚ€2›s®0000000116113576752252026062 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_bisect' module.""" from typing import Sequence, MutableSequence, TypeVar _T = TypeVar('_T') def bisect(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def bisect_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def bisect_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def insort(a: MutableSequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ... def insort_left(a: MutableSequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ... def insort_right(a: MutableSequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/_codecs.pyi0000644€tŠÔÚ€2›s®0000001163213576752252026055 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_codecs' module.""" import sys from typing import Any, Callable, Tuple, Optional, Dict, Text, Union import codecs # For convenience: _Handler = Callable[[Exception], Tuple[Text, int]] _String = Union[bytes, str] _Errors = Union[str, Text, None] if sys.version_info < (3, 0): _Decodable = Union[bytes, Text] _Encodable = Union[bytes, Text] else: _Decodable = bytes _Encodable = str # This type is not exposed; it is defined in unicodeobject.c class _EncodingMap(object): def size(self) -> int: ... _MapT = Union[Dict[int, int], _EncodingMap] def register(search_function: Callable[[str], Any]) -> None: ... def register_error(errors: Union[str, Text], handler: _Handler) -> None: ... def lookup(encoding: Union[str, Text]) -> codecs.CodecInfo: ... def lookup_error(name: Union[str, Text]) -> _Handler: ... def decode(obj: Any, encoding: Union[str, Text] = ..., errors: _Errors = ...) -> Any: ... def encode(obj: Any, encoding: Union[str, Text] = ..., errors: _Errors = ...) -> Any: ... def charmap_build(map: Text) -> _MapT: ... def ascii_decode(data: _Decodable, errors: _Errors = ...) -> Tuple[Text, int]: ... def ascii_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def charbuffer_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def charmap_decode(data: _Decodable, errors: _Errors = ..., mapping: Optional[_MapT] = ...) -> Tuple[Text, int]: ... def charmap_encode(data: _Encodable, errors: _Errors, mapping: Optional[_MapT] = ...) -> Tuple[bytes, int]: ... def escape_decode(data: _String, errors: _Errors = ...) -> Tuple[str, int]: ... def escape_encode(data: bytes, errors: _Errors = ...) -> Tuple[bytes, int]: ... def latin_1_decode(data: _Decodable, errors: _Errors = ...) -> Tuple[Text, int]: ... def latin_1_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def raw_unicode_escape_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ... def raw_unicode_escape_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def readbuffer_encode(data: _String, errors: _Errors = ...) -> Tuple[bytes, int]: ... def unicode_escape_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ... def unicode_escape_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... if sys.version_info < (3, 8): def unicode_internal_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ... def unicode_internal_encode(data: _String, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_16_be_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_16_be_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_16_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_16_encode(data: _Encodable, errors: _Errors = ..., byteorder: int = ...) -> Tuple[bytes, int]: ... def utf_16_ex_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int, int]: ... def utf_16_le_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_16_le_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_32_be_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_32_be_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_32_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_32_encode(data: _Encodable, errors: _Errors = ..., byteorder: int = ...) -> Tuple[bytes, int]: ... def utf_32_ex_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int, int]: ... def utf_32_le_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_32_le_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_7_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_7_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_8_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_8_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... if sys.platform == 'win32': def mbcs_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def mbcs_encode(str: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... if sys.version_info >= (3, 0): def oem_decode(data: bytes, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def code_page_decode(codepage: int, data: bytes, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def oem_encode(str: Text, errors: _Errors = ...) -> Tuple[bytes, int]: ... def code_page_encode(code_page: int, str: Text, errors: _Errors = ...) -> Tuple[bytes, int]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/_csv.pyi0000644€tŠÔÚ€2›s®0000000263513576752252025413 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Iterable, Iterator, List, Optional, Sequence, Text QUOTE_ALL: int QUOTE_MINIMAL: int QUOTE_NONE: int QUOTE_NONNUMERIC: int class Error(Exception): ... class Dialect: delimiter: str quotechar: Optional[str] escapechar: Optional[str] doublequote: bool skipinitialspace: bool lineterminator: str quoting: int strict: int def __init__(self) -> None: ... class _reader(Iterator[List[str]]): dialect: Dialect line_num: int if sys.version_info >= (3, 0): def __next__(self) -> List[str]: ... else: def next(self) -> List[str]: ... class _writer: dialect: Dialect if sys.version_info >= (3, 5): def writerow(self, row: Iterable[Any]) -> None: ... def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... else: def writerow(self, row: Sequence[Any]) -> None: ... def writerows(self, rows: Iterable[Sequence[Any]]) -> None: ... # TODO: precise type def writer(csvfile: Any, dialect: Any = ..., **fmtparams: Any) -> _writer: ... def reader(csvfile: Iterable[Text], dialect: Any = ..., **fmtparams: Any) -> _reader: ... def register_dialect(name: str, dialect: Any = ..., **fmtparams: Any) -> None: ... def unregister_dialect(name: str) -> None: ... def get_dialect(name: str) -> Dialect: ... def list_dialects() -> List[str]: ... def field_size_limit(new_limit: int = ...) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/_curses.pyi0000644€tŠÔÚ€2›s®0000003176213576752252026127 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, BinaryIO, IO, NamedTuple, Optional, Tuple, Union, overload _chtype = Union[str, bytes, int] ALL_MOUSE_EVENTS: int A_ALTCHARSET: int A_ATTRIBUTES: int A_BLINK: int A_BOLD: int A_CHARTEXT: int A_COLOR: int A_DIM: int A_HORIZONTAL: int A_INVIS: int if sys.version_info >= (3, 7): A_ITALIC: int A_LEFT: int A_LOW: int A_NORMAL: int A_PROTECT: int A_REVERSE: int A_RIGHT: int A_STANDOUT: int A_TOP: int A_UNDERLINE: int A_VERTICAL: int BUTTON1_CLICKED: int BUTTON1_DOUBLE_CLICKED: int BUTTON1_PRESSED: int BUTTON1_RELEASED: int BUTTON1_TRIPLE_CLICKED: int BUTTON2_CLICKED: int BUTTON2_DOUBLE_CLICKED: int BUTTON2_PRESSED: int BUTTON2_RELEASED: int BUTTON2_TRIPLE_CLICKED: int BUTTON3_CLICKED: int BUTTON3_DOUBLE_CLICKED: int BUTTON3_PRESSED: int BUTTON3_RELEASED: int BUTTON3_TRIPLE_CLICKED: int BUTTON4_CLICKED: int BUTTON4_DOUBLE_CLICKED: int BUTTON4_PRESSED: int BUTTON4_RELEASED: int BUTTON4_TRIPLE_CLICKED: int BUTTON_ALT: int BUTTON_CTRL: int BUTTON_SHIFT: int COLOR_BLACK: int COLOR_BLUE: int COLOR_CYAN: int COLOR_GREEN: int COLOR_MAGENTA: int COLOR_RED: int COLOR_WHITE: int COLOR_YELLOW: int ERR: int KEY_A1: int KEY_A3: int KEY_B2: int KEY_BACKSPACE: int KEY_BEG: int KEY_BREAK: int KEY_BTAB: int KEY_C1: int KEY_C3: int KEY_CANCEL: int KEY_CATAB: int KEY_CLEAR: int KEY_CLOSE: int KEY_COMMAND: int KEY_COPY: int KEY_CREATE: int KEY_CTAB: int KEY_DC: int KEY_DL: int KEY_DOWN: int KEY_EIC: int KEY_END: int KEY_ENTER: int KEY_EOL: int KEY_EOS: int KEY_EXIT: int KEY_F0: int KEY_F1: int KEY_F10: int KEY_F11: int KEY_F12: int KEY_F13: int KEY_F14: int KEY_F15: int KEY_F16: int KEY_F17: int KEY_F18: int KEY_F19: int KEY_F2: int KEY_F20: int KEY_F21: int KEY_F22: int KEY_F23: int KEY_F24: int KEY_F25: int KEY_F26: int KEY_F27: int KEY_F28: int KEY_F29: int KEY_F3: int KEY_F30: int KEY_F31: int KEY_F32: int KEY_F33: int KEY_F34: int KEY_F35: int KEY_F36: int KEY_F37: int KEY_F38: int KEY_F39: int KEY_F4: int KEY_F40: int KEY_F41: int KEY_F42: int KEY_F43: int KEY_F44: int KEY_F45: int KEY_F46: int KEY_F47: int KEY_F48: int KEY_F49: int KEY_F5: int KEY_F50: int KEY_F51: int KEY_F52: int KEY_F53: int KEY_F54: int KEY_F55: int KEY_F56: int KEY_F57: int KEY_F58: int KEY_F59: int KEY_F6: int KEY_F60: int KEY_F61: int KEY_F62: int KEY_F63: int KEY_F7: int KEY_F8: int KEY_F9: int KEY_FIND: int KEY_HELP: int KEY_HOME: int KEY_IC: int KEY_IL: int KEY_LEFT: int KEY_LL: int KEY_MARK: int KEY_MAX: int KEY_MESSAGE: int KEY_MIN: int KEY_MOUSE: int KEY_MOVE: int KEY_NEXT: int KEY_NPAGE: int KEY_OPEN: int KEY_OPTIONS: int KEY_PPAGE: int KEY_PREVIOUS: int KEY_PRINT: int KEY_REDO: int KEY_REFERENCE: int KEY_REFRESH: int KEY_REPLACE: int KEY_RESET: int KEY_RESIZE: int KEY_RESTART: int KEY_RESUME: int KEY_RIGHT: int KEY_SAVE: int KEY_SBEG: int KEY_SCANCEL: int KEY_SCOMMAND: int KEY_SCOPY: int KEY_SCREATE: int KEY_SDC: int KEY_SDL: int KEY_SELECT: int KEY_SEND: int KEY_SEOL: int KEY_SEXIT: int KEY_SF: int KEY_SFIND: int KEY_SHELP: int KEY_SHOME: int KEY_SIC: int KEY_SLEFT: int KEY_SMESSAGE: int KEY_SMOVE: int KEY_SNEXT: int KEY_SOPTIONS: int KEY_SPREVIOUS: int KEY_SPRINT: int KEY_SR: int KEY_SREDO: int KEY_SREPLACE: int KEY_SRESET: int KEY_SRIGHT: int KEY_SRSUME: int KEY_SSAVE: int KEY_SSUSPEND: int KEY_STAB: int KEY_SUNDO: int KEY_SUSPEND: int KEY_UNDO: int KEY_UP: int OK: int REPORT_MOUSE_POSITION: int _C_API: Any version: bytes def baudrate() -> int: ... def beep() -> None: ... def can_change_color() -> bool: ... def cbreak(flag: bool = ...) -> None: ... def color_content(color_number: int) -> Tuple[int, int, int]: ... def color_pair(color_number: int) -> int: ... def curs_set(visibility: int) -> int: ... def def_prog_mode() -> None: ... def def_shell_mode() -> None: ... def delay_output(ms: int) -> None: ... def doupdate() -> None: ... def echo(flag: bool = ...) -> None: ... def endwin() -> None: ... def erasechar() -> bytes: ... def filter() -> None: ... def flash() -> None: ... def flushinp() -> None: ... def getmouse() -> Tuple[int, int, int, int, int]: ... def getsyx() -> Tuple[int, int]: ... def getwin(f: BinaryIO) -> _CursesWindow: ... def halfdelay(tenths: int) -> None: ... def has_colors() -> bool: ... def has_ic() -> bool: ... def has_il() -> bool: ... def has_key(ch: int) -> bool: ... def init_color(color_number: int, r: int, g: int, b: int) -> None: ... def init_pair(pair_number: int, fg: int, bg: int) -> None: ... def initscr() -> _CursesWindow: ... def intrflush(ch: bool) -> None: ... def is_term_resized(nlines: int, ncols: int) -> bool: ... def isendwin() -> bool: ... def keyname(k: int) -> bytes: ... def killchar() -> bytes: ... def longname() -> bytes: ... def meta(yes: bool) -> None: ... def mouseinterval(interval: int) -> None: ... def mousemask(mousemask: int) -> Tuple[int, int]: ... def napms(ms: int) -> int: ... def newpad(nlines: int, ncols: int) -> _CursesWindow: ... def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ...) -> _CursesWindow: ... def nl(flag: bool = ...) -> None: ... def nocbreak() -> None: ... def noecho() -> None: ... def nonl() -> None: ... def noqiflush() -> None: ... def noraw() -> None: ... def pair_content(pair_number: int) -> Tuple[int, int]: ... def pair_number(attr: int) -> int: ... def putp(string: bytes) -> None: ... def qiflush(flag: bool = ...) -> None: ... def raw(flag: bool = ...) -> None: ... def reset_prog_mode() -> None: ... def reset_shell_mode() -> None: ... def resetty() -> None: ... def resize_term(nlines: int, ncols: int) -> None: ... def resizeterm(nlines: int, ncols: int) -> None: ... def savetty() -> None: ... def setsyx(y: int, x: int) -> None: ... def setupterm(termstr: str = ..., fd: int = ...) -> None: ... def start_color() -> None: ... def termattrs() -> int: ... def termname() -> bytes: ... def tigetflag(capname: str) -> int: ... def tigetnum(capname: str) -> int: ... def tigetstr(capname: str) -> bytes: ... def tparm(fmt: bytes, i1: int = ..., i2: int = ..., i3: int = ..., i4: int = ..., i5: int = ..., i6: int = ..., i7: int = ..., i8: int = ..., i9: int = ...) -> bytes: ... def typeahead(fd: int) -> None: ... def unctrl(ch: _chtype) -> bytes: ... if sys.version_info >= (3, 3): def unget_wch(ch: Union[int, str]) -> None: ... def ungetch(ch: _chtype) -> None: ... def ungetmouse(id: int, x: int, y: int, z: int, bstate: int) -> None: ... if sys.version_info >= (3, 5): def update_lines_cols() -> int: ... def use_default_colors() -> None: ... def use_env(flag: bool) -> None: ... class error(Exception): ... class _CursesWindow: if sys.version_info >= (3, 3): encoding: str @overload def addch(self, ch: _chtype, attr: int = ...) -> None: ... @overload def addch(self, y: int, x: int, ch: _chtype, attr: int = ...) -> None: ... @overload def addnstr(self, str: str, n: int, attr: int = ...) -> None: ... @overload def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... @overload def addstr(self, str: str, attr: int = ...) -> None: ... @overload def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... def attroff(self, attr: int) -> None: ... def attron(self, attr: int) -> None: ... def attrset(self, attr: int) -> None: ... def bkgd(self, ch: _chtype, attr: int = ...) -> None: ... def bkgset(self, ch: _chtype, attr: int = ...) -> None: ... def border(self, ls: _chtype = ..., rs: _chtype = ..., ts: _chtype = ..., bs: _chtype = ..., tl: _chtype = ..., tr: _chtype = ..., bl: _chtype = ..., br: _chtype = ...) -> None: ... @overload def box(self) -> None: ... @overload def box(self, vertch: _chtype = ..., horch: _chtype = ...) -> None: ... @overload def chgat(self, attr: int) -> None: ... @overload def chgat(self, num: int, attr: int) -> None: ... @overload def chgat(self, y: int, x: int, attr: int) -> None: ... @overload def chgat(self, y: int, x: int, num: int, attr: int) -> None: ... def clear(self) -> None: ... def clearok(self, yes: int) -> None: ... def clrtobot(self) -> None: ... def clrtoeol(self) -> None: ... def cursyncup(self) -> None: ... @overload def delch(self) -> None: ... @overload def delch(self, y: int, x: int) -> None: ... def deleteln(self) -> None: ... @overload def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... @overload def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... def echochar(self, ch: _chtype, attr: int = ...) -> None: ... def enclose(self, y: int, x: int) -> bool: ... def erase(self) -> None: ... def getbegyx(self) -> Tuple[int, int]: ... def getbkgd(self) -> Tuple[int, int]: ... @overload def getch(self) -> int: ... @overload def getch(self, y: int, x: int) -> int: ... if sys.version_info >= (3, 3): @overload def get_wch(self) -> Union[int, str]: ... @overload def get_wch(self, y: int, x: int) -> Union[int, str]: ... @overload def getkey(self) -> str: ... @overload def getkey(self, y: int, x: int) -> str: ... def getmaxyx(self) -> Tuple[int, int]: ... def getparyx(self) -> Tuple[int, int]: ... @overload def getstr(self) -> _chtype: ... @overload def getstr(self, n: int) -> _chtype: ... @overload def getstr(self, y: int, x: int) -> _chtype: ... @overload def getstr(self, y: int, x: int, n: int) -> _chtype: ... def getyx(self) -> Tuple[int, int]: ... @overload def hline(self, ch: _chtype, n: int) -> None: ... @overload def hline(self, y: int, x: int, ch: _chtype, n: int) -> None: ... def idcok(self, flag: bool) -> None: ... def idlok(self, yes: bool) -> None: ... def immedok(self, flag: bool) -> None: ... @overload def inch(self) -> _chtype: ... @overload def inch(self, y: int, x: int) -> _chtype: ... @overload def insch(self, ch: _chtype, attr: int = ...) -> None: ... @overload def insch(self, y: int, x: int, ch: _chtype, attr: int = ...) -> None: ... def insdelln(self, nlines: int) -> None: ... def insertln(self) -> None: ... @overload def insnstr(self, str: str, n: int, attr: int = ...) -> None: ... @overload def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... @overload def insstr(self, str: str, attr: int = ...) -> None: ... @overload def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... @overload def instr(self, n: int = ...) -> _chtype: ... @overload def instr(self, y: int, x: int, n: int = ...) -> _chtype: ... def is_linetouched(self, line: int) -> bool: ... def is_wintouched(self) -> bool: ... def keypad(self, yes: bool) -> None: ... def leaveok(self, yes: bool) -> None: ... def move(self, new_y: int, new_x: int) -> None: ... def mvderwin(self, y: int, x: int) -> None: ... def mvwin(self, new_y: int, new_x: int) -> None: ... def nodelay(self, yes: bool) -> None: ... def notimeout(self, yes: bool) -> None: ... def noutrefresh(self) -> None: ... @overload def overlay(self, destwin: _CursesWindow) -> None: ... @overload def overlay(self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int) -> None: ... @overload def overwrite(self, destwin: _CursesWindow) -> None: ... @overload def overwrite(self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int) -> None: ... def putwin(self, file: IO[Any]) -> None: ... def redrawln(self, beg: int, num: int) -> None: ... def redrawwin(self) -> None: ... @overload def refresh(self) -> None: ... @overload def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... def resize(self, nlines: int, ncols: int) -> None: ... def scroll(self, lines: int = ...) -> None: ... def scrollok(self, flag: bool) -> None: ... def setscrreg(self, top: int, bottom: int) -> None: ... def standend(self) -> None: ... def standout(self) -> None: ... @overload def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ... @overload def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... @overload def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... @overload def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... def syncdown(self) -> None: ... def syncok(self, flag: bool) -> None: ... def syncup(self) -> None: ... def timeout(self, delay: int) -> None: ... def touchline(self, start: int, count: int, changed: bool = ...) -> None: ... def touchwin(self) -> None: ... def untouchwin(self) -> None: ... @overload def vline(self, ch: _chtype, n: int) -> None: ... @overload def vline(self, y: int, x: int, ch: _chtype, n: int) -> None: ... if sys.version_info >= (3, 8): class _ncurses_version(NamedTuple): major: int minor: int patch: int ncurses_version: _ncurses_version mypy-0.761/mypy/typeshed/stdlib/2and3/_heapq.pyi0000644€tŠÔÚ€2›s®0000000103513576752252025707 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_heapq' module.""" from typing import TypeVar, List, Iterable, Any, Callable, Optional import sys _T = TypeVar("_T") def heapify(heap: List[_T]) -> None: ... def heappop(heap: List[_T]) -> _T: ... def heappush(heap: List[_T], item: _T) -> None: ... def heappushpop(heap: List[_T], item: _T) -> _T: ... def heapreplace(heap: List[_T], item: _T) -> _T: ... if sys.version_info < (3,): def nlargest(n: int, iterable: Iterable[_T]) -> List[_T]: ... def nsmallest(n: int, iterable: Iterable[_T]) -> List[_T]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/_random.pyi0000644€tŠÔÚ€2›s®0000000075513576752252026101 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _random import sys from typing import Tuple # Actually Tuple[(int,) * 625] _State = Tuple[int, ...] class Random(object): def __init__(self, seed: object = ...) -> None: ... def seed(self, x: object = ...) -> None: ... def getstate(self) -> _State: ... def setstate(self, state: _State) -> None: ... def random(self) -> float: ... def getrandbits(self, k: int) -> int: ... if sys.version_info < (3,): def jumpahead(self, i: int) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/_warnings.pyi0000644€tŠÔÚ€2›s®0000000204713576752252026445 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Dict, List, Optional, Tuple, Type, Union, overload if sys.version_info >= (3, 0): _defaultaction: str _onceregistry: Dict[Any, Any] else: default_action: str once_registry: Dict[Any, Any] filters: List[Tuple[Any, ...]] @overload def warn(message: str, category: Optional[Type[Warning]] = ..., stacklevel: int = ...) -> None: ... @overload def warn(message: Warning, category: Any = ..., stacklevel: int = ...) -> None: ... @overload def warn_explicit( message: str, category: Type[Warning], filename: str, lineno: int, module: Optional[str] = ..., registry: Optional[Dict[Union[str, Tuple[str, Type[Warning], int]], int]] = ..., module_globals: Optional[Dict[str, Any]] = ..., ) -> None: ... @overload def warn_explicit( message: Warning, category: Any, filename: str, lineno: int, module: Optional[str] = ..., registry: Optional[Dict[Union[str, Tuple[str, Type[Warning], int]], int]] = ..., module_globals: Optional[Dict[str, Any]] = ..., ) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/_weakref.pyi0000644€tŠÔÚ€2›s®0000000200413576752252026232 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Generic, Optional, TypeVar, overload _C = TypeVar('_C', bound=Callable[..., Any]) _T = TypeVar('_T') class CallableProxyType(object): # "weakcallableproxy" def __getattr__(self, attr: str) -> Any: ... class ProxyType(object): # "weakproxy" def __getattr__(self, attr: str) -> Any: ... class ReferenceType(Generic[_T]): if sys.version_info >= (3, 4): __callback__: Callable[[ReferenceType[_T]], Any] def __init__(self, o: _T, callback: Optional[Callable[[ReferenceType[_T]], Any]] = ...) -> None: ... def __call__(self) -> Optional[_T]: ... def __hash__(self) -> int: ... ref = ReferenceType def getweakrefcount(object: Any) -> int: ... def getweakrefs(object: Any) -> int: ... @overload def proxy(object: _C, callback: Optional[Callable[[_C], Any]] = ...) -> CallableProxyType: ... # Return CallableProxyType if object is callable, ProxyType otherwise @overload def proxy(object: _T, callback: Optional[Callable[[_T], Any]] = ...) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/2and3/_weakrefset.pyi0000644€tŠÔÚ€2›s®0000000427713576752252026764 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterator, Any, Iterable, MutableSet, Optional, TypeVar, Generic, Union _S = TypeVar('_S') _T = TypeVar('_T') _SelfT = TypeVar('_SelfT', bound=WeakSet[Any]) class WeakSet(MutableSet[_T], Generic[_T]): def __init__(self, data: Optional[Iterable[_T]] = ...) -> None: ... def add(self, item: _T) -> None: ... def clear(self) -> None: ... def discard(self, item: _T) -> None: ... def copy(self: _SelfT) -> _SelfT: ... def pop(self) -> _T: ... def remove(self, item: _T) -> None: ... def update(self, other: Iterable[_T]) -> None: ... def __contains__(self, item: object) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __ior__(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def difference(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def __sub__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def difference_update(self: _SelfT, other: Iterable[_T]) -> None: ... def __isub__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def intersection(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def __and__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def intersection_update(self, other: Iterable[_T]) -> None: ... def __iand__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def issubset(self, other: Iterable[_T]) -> bool: ... def __le__(self, other: Iterable[_T]) -> bool: ... def __lt__(self, other: Iterable[_T]) -> bool: ... def issuperset(self, other: Iterable[_T]) -> bool: ... def __ge__(self, other: Iterable[_T]) -> bool: ... def __gt__(self, other: Iterable[_T]) -> bool: ... def __eq__(self, other: object) -> bool: ... def symmetric_difference(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def __xor__(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def symmetric_difference_update(self, other: Iterable[_S]) -> None: ... def __ixor__(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def union(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def __or__(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def isdisjoint(self, other: Iterable[_T]) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/aifc.pyi0000644€tŠÔÚ€2›s®0000000636513576752252025367 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Union, IO, Optional, Type, NamedTuple, List, Tuple, Any, Text, overload from typing_extensions import Literal from types import TracebackType import sys class Error(Exception): ... class _aifc_params(NamedTuple): nchannels: int sampwidth: int framerate: int nframes: int comptype: bytes compname: bytes _File = Union[Text, IO[bytes]] _Marker = Tuple[int, int, bytes] class Aifc_read: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 4): def __enter__(self) -> Aifc_read: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> None: ... def initfp(self, file: IO[bytes]) -> None: ... def getfp(self) -> IO[bytes]: ... def rewind(self) -> None: ... def close(self) -> None: ... def tell(self) -> int: ... def getnchannels(self) -> int: ... def getnframes(self) -> int: ... def getsampwidth(self) -> int: ... def getframerate(self) -> int: ... def getcomptype(self) -> bytes: ... def getcompname(self) -> bytes: ... def getparams(self) -> _aifc_params: ... def getmarkers(self) -> Optional[List[_Marker]]: ... def getmark(self, id: int) -> _Marker: ... def setpos(self, pos: int) -> None: ... def readframes(self, nframes: int) -> bytes: ... class Aifc_write: def __init__(self, f: _File) -> None: ... def __del__(self) -> None: ... if sys.version_info >= (3, 4): def __enter__(self) -> Aifc_write: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> None: ... def initfp(self, file: IO[bytes]) -> None: ... def aiff(self) -> None: ... def aifc(self) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... def getsampwidth(self) -> int: ... def setframerate(self, framerate: int) -> None: ... def getframerate(self) -> int: ... def setnframes(self, nframes: int) -> None: ... def getnframes(self) -> int: ... def setcomptype(self, comptype: bytes, compname: bytes) -> None: ... def getcomptype(self) -> bytes: ... def getcompname(self) -> bytes: ... def setparams(self, params: Tuple[int, int, int, int, bytes, bytes]) -> None: ... def getparams(self) -> _aifc_params: ... def setmark(self, id: int, pos: int, name: bytes) -> None: ... def getmark(self, id: int) -> _Marker: ... def getmarkers(self) -> Optional[List[_Marker]]: ... def tell(self) -> int: ... def writeframesraw(self, data: Any) -> None: ... # Actual type for data is Buffer Protocol def writeframes(self, data: Any) -> None: ... def close(self) -> None: ... @overload def open(f: _File, mode: Literal["r", "rb"] = ...) -> Aifc_read: ... @overload def open(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... @overload def open(f: _File, mode: str) -> Any: ... @overload def openfp(f: _File, mode: Literal["r", "rb"] = ...) -> Aifc_read: ... @overload def openfp(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... @overload def openfp(f: _File, mode: str) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/2and3/argparse.pyi0000644€tŠÔÚ€2›s®0000004213513576752252026264 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Callable, Dict, Generator, Iterable, List, IO, NoReturn, Optional, Pattern, Sequence, Text, Tuple, Type, Union, TypeVar, overload ) import sys _T = TypeVar('_T') _ActionT = TypeVar('_ActionT', bound=Action) _N = TypeVar('_N') if sys.version_info >= (3,): _Text = str else: _Text = Union[str, unicode] ONE_OR_MORE: str OPTIONAL: str PARSER: str REMAINDER: str SUPPRESS: str ZERO_OR_MORE: str _UNRECOGNIZED_ARGS_ATTR: str # undocumented class ArgumentError(Exception): ... # undocumented class _AttributeHolder: def _get_kwargs(self) -> List[Tuple[str, Any]]: ... def _get_args(self) -> List[Any]: ... # undocumented class _ActionsContainer: description: Optional[_Text] prefix_chars: _Text argument_default: Optional[_Text] conflict_handler: _Text _registries: Dict[_Text, Dict[Any, Any]] _actions: List[Action] _option_string_actions: Dict[_Text, Action] _action_groups: List[_ArgumentGroup] _mutually_exclusive_groups: List[_MutuallyExclusiveGroup] _defaults: Dict[str, Any] _negative_number_matcher: Pattern[str] _has_negative_number_optionals: List[bool] def __init__(self, description: Optional[Text], prefix_chars: Text, argument_default: Optional[Text], conflict_handler: Text) -> None: ... def register(self, registry_name: Text, value: Any, object: Any) -> None: ... def _registry_get(self, registry_name: Text, value: Any, default: Any = ...) -> Any: ... def set_defaults(self, **kwargs: Any) -> None: ... def get_default(self, dest: Text) -> Any: ... def add_argument(self, *name_or_flags: Text, action: Union[Text, Type[Action]] = ..., nargs: Union[int, Text] = ..., const: Any = ..., default: Any = ..., type: Union[Callable[[Text], _T], Callable[[str], _T], FileType] = ..., choices: Iterable[_T] = ..., required: bool = ..., help: Optional[Text] = ..., metavar: Optional[Union[Text, Tuple[Text, ...]]] = ..., dest: Optional[Text] = ..., version: Text = ..., **kwargs: Any) -> Action: ... def add_argument_group(self, *args: Any, **kwargs: Any) -> _ArgumentGroup: ... def add_mutually_exclusive_group(self, **kwargs: Any) -> _MutuallyExclusiveGroup: ... def _add_action(self, action: _ActionT) -> _ActionT: ... def _remove_action(self, action: Action) -> None: ... def _add_container_actions(self, container: _ActionsContainer) -> None: ... def _get_positional_kwargs(self, dest: Text, **kwargs: Any) -> Dict[str, Any]: ... def _get_optional_kwargs(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: ... def _pop_action_class(self, kwargs: Any, default: Optional[Type[Action]] = ...) -> Type[Action]: ... def _get_handler(self) -> Callable[[Action, Iterable[Tuple[Text, Action]]], Any]: ... def _check_conflict(self, action: Action) -> None: ... def _handle_conflict_error(self, action: Action, conflicting_actions: Iterable[Tuple[Text, Action]]) -> NoReturn: ... def _handle_conflict_resolve(self, action: Action, conflicting_actions: Iterable[Tuple[Text, Action]]) -> None: ... class ArgumentParser(_AttributeHolder, _ActionsContainer): prog: _Text usage: Optional[_Text] epilog: Optional[_Text] formatter_class: Type[HelpFormatter] fromfile_prefix_chars: Optional[_Text] add_help: bool if sys.version_info >= (3, 5): allow_abbrev: bool # undocumented _positionals: _ArgumentGroup _optionals: _ArgumentGroup _subparsers: Optional[_ArgumentGroup] if sys.version_info >= (3, 5): def __init__(self, prog: Optional[str] = ..., usage: Optional[str] = ..., description: Optional[str] = ..., epilog: Optional[str] = ..., parents: Sequence[ArgumentParser] = ..., formatter_class: Type[HelpFormatter] = ..., prefix_chars: str = ..., fromfile_prefix_chars: Optional[str] = ..., argument_default: Optional[str] = ..., conflict_handler: str = ..., add_help: bool = ..., allow_abbrev: bool = ...) -> None: ... else: def __init__(self, prog: Optional[Text] = ..., usage: Optional[Text] = ..., description: Optional[Text] = ..., epilog: Optional[Text] = ..., parents: Sequence[ArgumentParser] = ..., formatter_class: Type[HelpFormatter] = ..., prefix_chars: Text = ..., fromfile_prefix_chars: Optional[Text] = ..., argument_default: Optional[Text] = ..., conflict_handler: Text = ..., add_help: bool = ...) -> None: ... # The type-ignores in these overloads should be temporary. See: # https://github.com/python/typeshed/pull/2643#issuecomment-442280277 @overload def parse_args(self, args: Optional[Sequence[Text]] = ...) -> Namespace: ... @overload def parse_args(self, args: Optional[Sequence[Text]], namespace: None) -> Namespace: ... # type: ignore @overload def parse_args(self, args: Optional[Sequence[Text]], namespace: _N) -> _N: ... @overload def parse_args(self, *, namespace: None) -> Namespace: ... # type: ignore @overload def parse_args(self, *, namespace: _N) -> _N: ... if sys.version_info >= (3, 7): def add_subparsers(self, title: str = ..., description: Optional[str] = ..., prog: str = ..., parser_class: Type[ArgumentParser] = ..., action: Type[Action] = ..., option_string: str = ..., dest: Optional[str] = ..., required: bool = ..., help: Optional[str] = ..., metavar: Optional[str] = ...) -> _SubParsersAction: ... else: def add_subparsers(self, title: Text = ..., description: Optional[Text] = ..., prog: Text = ..., parser_class: Type[ArgumentParser] = ..., action: Type[Action] = ..., option_string: Text = ..., dest: Optional[Text] = ..., help: Optional[Text] = ..., metavar: Optional[Text] = ...) -> _SubParsersAction: ... def print_usage(self, file: Optional[IO[str]] = ...) -> None: ... def print_help(self, file: Optional[IO[str]] = ...) -> None: ... def format_usage(self) -> str: ... def format_help(self) -> str: ... def parse_known_args(self, args: Optional[Sequence[Text]] = ..., namespace: Optional[Namespace] = ...) -> Tuple[Namespace, List[str]]: ... def convert_arg_line_to_args(self, arg_line: Text) -> List[str]: ... def exit(self, status: int = ..., message: Optional[Text] = ...) -> NoReturn: ... def error(self, message: Text) -> NoReturn: ... if sys.version_info >= (3, 7): def parse_intermixed_args(self, args: Optional[Sequence[str]] = ..., namespace: Optional[Namespace] = ...) -> Namespace: ... def parse_known_intermixed_args(self, args: Optional[Sequence[str]] = ..., namespace: Optional[Namespace] = ...) -> Tuple[Namespace, List[str]]: ... # undocumented def _get_optional_actions(self) -> List[Action]: ... def _get_positional_actions(self) -> List[Action]: ... def _parse_known_args(self, arg_strings: List[Text], namespace: Namespace) -> Tuple[Namespace, List[str]]: ... def _read_args_from_files(self, arg_strings: List[Text]) -> List[Text]: ... def _match_argument(self, action: Action, arg_strings_pattern: Text) -> int: ... def _match_arguments_partial(self, actions: Sequence[Action], arg_strings_pattern: Text) -> List[int]: ... def _parse_optional(self, arg_string: Text) -> Optional[Tuple[Optional[Action], Text, Optional[Text]]]: ... def _get_option_tuples(self, option_string: Text) -> List[Tuple[Action, Text, Optional[Text]]]: ... def _get_nargs_pattern(self, action: Action) -> _Text: ... def _get_values(self, action: Action, arg_strings: List[Text]) -> Any: ... def _get_value(self, action: Action, arg_string: Text) -> Any: ... def _check_value(self, action: Action, value: Any) -> None: ... def _get_formatter(self) -> HelpFormatter: ... def _print_message(self, message: str, file: Optional[IO[str]] = ...) -> None: ... class HelpFormatter: # undocumented _prog: _Text _indent_increment: int _max_help_position: int _width: int _current_indent: int _level: int _action_max_length: int _root_section: Any _current_section: Any _whitespace_matcher: Pattern[str] _long_break_matcher: Pattern[str] _Section: Type[Any] # Nested class def __init__(self, prog: Text, indent_increment: int = ..., max_help_position: int = ..., width: Optional[int] = ...) -> None: ... def _indent(self) -> None: ... def _dedent(self) -> None: ... def _add_item(self, func: Callable[..., _Text], args: Iterable[Any]) -> None: ... def start_section(self, heading: Optional[Text]) -> None: ... def end_section(self) -> None: ... def add_text(self, text: Optional[Text]) -> None: ... def add_usage(self, usage: Text, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: Optional[Text] = ...) -> None: ... def add_argument(self, action: Action) -> None: ... def add_arguments(self, actions: Iterable[Action]) -> None: ... def format_help(self) -> _Text: ... def _join_parts(self, part_strings: Iterable[Text]) -> _Text: ... def _format_usage(self, usage: Text, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: Optional[Text]) -> _Text: ... def _format_actions_usage(self, actions: Iterable[Action], groups: Iterable[_ArgumentGroup]) -> _Text: ... def _format_text(self, text: Text) -> _Text: ... def _format_action(self, action: Action) -> _Text: ... def _format_action_invocation(self, action: Action) -> _Text: ... def _metavar_formatter(self, action: Action, default_metavar: Text) -> Callable[[int], Tuple[_Text, ...]]: ... def _format_args(self, action: Action, default_metavar: Text) -> _Text: ... def _expand_help(self, action: Action) -> _Text: ... def _iter_indented_subactions(self, action: Action) -> Generator[Action, None, None]: ... def _split_lines(self, text: Text, width: int) -> List[_Text]: ... def _fill_text(self, text: Text, width: int, indent: Text) -> _Text: ... def _get_help_string(self, action: Action) -> Optional[_Text]: ... def _get_default_metavar_for_optional(self, action: Action) -> _Text: ... def _get_default_metavar_for_positional(self, action: Action) -> _Text: ... class RawDescriptionHelpFormatter(HelpFormatter): ... class RawTextHelpFormatter(HelpFormatter): ... class ArgumentDefaultsHelpFormatter(HelpFormatter): ... if sys.version_info >= (3,): class MetavarTypeHelpFormatter(HelpFormatter): ... class Action(_AttributeHolder): option_strings: Sequence[_Text] dest: _Text nargs: Optional[Union[int, _Text]] const: Any default: Any type: Union[Callable[[str], Any], FileType, None] choices: Optional[Iterable[Any]] required: bool help: Optional[_Text] metavar: Optional[Union[_Text, Tuple[_Text, ...]]] def __init__(self, option_strings: Sequence[Text], dest: Text, nargs: Optional[Union[int, Text]] = ..., const: Any = ..., default: Any = ..., type: Optional[Union[Callable[[Text], _T], Callable[[str], _T], FileType]] = ..., choices: Optional[Iterable[_T]] = ..., required: bool = ..., help: Optional[Text] = ..., metavar: Optional[Union[Text, Tuple[Text, ...]]] = ...) -> None: ... def __call__(self, parser: ArgumentParser, namespace: Namespace, values: Union[Text, Sequence[Any], None], option_string: Optional[Text] = ...) -> None: ... class Namespace(_AttributeHolder): def __init__(self, **kwargs: Any) -> None: ... def __getattr__(self, name: Text) -> Any: ... def __setattr__(self, name: Text, value: Any) -> None: ... def __contains__(self, key: str) -> bool: ... class FileType: # undocumented _mode: _Text _bufsize: int if sys.version_info >= (3,): _encoding: Optional[str] _errors: Optional[str] def __init__(self, mode: str = ..., bufsize: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ...) -> None: ... else: def __init__(self, mode: Text = ..., bufsize: Optional[int] = ...) -> None: ... def __call__(self, string: Text) -> IO[Any]: ... # undocumented class _ArgumentGroup(_ActionsContainer): title: Optional[_Text] _group_actions: List[Action] def __init__(self, container: _ActionsContainer, title: Optional[Text] = ..., description: Optional[Text] = ..., **kwargs: Any) -> None: ... # undocumented class _MutuallyExclusiveGroup(_ArgumentGroup): required: bool _container: _ActionsContainer def __init__(self, container: _ActionsContainer, required: bool = ...) -> None: ... # undocumented class _StoreAction(Action): ... # undocumented class _StoreConstAction(Action): def __init__(self, option_strings: Sequence[Text], dest: Text, const: Any, default: Any = ..., required: bool = ..., help: Optional[Text] = ..., metavar: Optional[Union[Text, Tuple[Text, ...]]] = ...) -> None: ... # undocumented class _StoreTrueAction(_StoreConstAction): def __init__(self, option_strings: Sequence[Text], dest: Text, default: bool = ..., required: bool = ..., help: Optional[Text] = ...) -> None: ... # undocumented class _StoreFalseAction(_StoreConstAction): def __init__(self, option_strings: Sequence[Text], dest: Text, default: bool = ..., required: bool = ..., help: Optional[Text] = ...) -> None: ... # undocumented class _AppendAction(Action): ... # undocumented class _AppendConstAction(Action): def __init__(self, option_strings: Sequence[Text], dest: Text, const: Any, default: Any = ..., required: bool = ..., help: Optional[Text] = ..., metavar: Optional[Union[Text, Tuple[Text, ...]]] = ...) -> None: ... # undocumented class _CountAction(Action): def __init__(self, option_strings: Sequence[Text], dest: Text, default: Any = ..., required: bool = ..., help: Optional[Text] = ...) -> None: ... # undocumented class _HelpAction(Action): def __init__(self, option_strings: Sequence[Text], dest: Text = ..., default: Text = ..., help: Optional[Text] = ...) -> None: ... # undocumented class _VersionAction(Action): version: Optional[_Text] def __init__(self, option_strings: Sequence[Text], version: Optional[Text] = ..., dest: Text = ..., default: Text = ..., help: Text = ...) -> None: ... # undocumented class _SubParsersAction(Action): _ChoicesPseudoAction: Type[Any] # nested class _prog_prefix: _Text _parser_class: Type[ArgumentParser] _name_parser_map: Dict[_Text, ArgumentParser] choices: Dict[_Text, ArgumentParser] _choices_actions: List[Action] def __init__(self, option_strings: Sequence[Text], prog: Text, parser_class: Type[ArgumentParser], dest: Text = ..., required: bool = ..., help: Optional[Text] = ..., metavar: Optional[Union[Text, Tuple[Text, ...]]] = ...) -> None: ... # TODO: Type keyword args properly. def add_parser(self, name: Text, **kwargs: Any) -> ArgumentParser: ... def _get_subactions(self) -> List[Action]: ... # undocumented class ArgumentTypeError(Exception): ... if sys.version_info < (3, 7): # undocumented def _ensure_value(namespace: Namespace, name: Text, value: Any) -> Any: ... # undocumented def _get_action_name(argument: Optional[Action]) -> Optional[str]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/array.pyi0000644€tŠÔÚ€2›s®0000000545513576752252025602 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for array # Based on http://docs.python.org/3.6/library/array.html import sys from typing import (Any, BinaryIO, Generic, Iterable, Iterator, List, MutableSequence, overload, Text, Tuple, TypeVar, Union) _T = TypeVar('_T', int, float, Text) if sys.version_info >= (3,): typecodes: str class array(MutableSequence[_T], Generic[_T]): typecode: str itemsize: int def __init__(self, typecode: str, __initializer: Union[bytes, Iterable[_T]] = ...) -> None: ... def append(self, x: _T) -> None: ... def buffer_info(self) -> Tuple[int, int]: ... def byteswap(self) -> None: ... def count(self, x: Any) -> int: ... def extend(self, iterable: Iterable[_T]) -> None: ... if sys.version_info >= (3, 2): def frombytes(self, s: bytes) -> None: ... def fromfile(self, f: BinaryIO, n: int) -> None: ... def fromlist(self, list: List[_T]) -> None: ... def fromstring(self, s: bytes) -> None: ... def fromunicode(self, s: str) -> None: ... def index(self, x: _T) -> int: ... # type: ignore # Overrides Sequence def insert(self, i: int, x: _T) -> None: ... def pop(self, i: int = ...) -> _T: ... if sys.version_info < (3,): def read(self, f: BinaryIO, n: int) -> None: ... def remove(self, x: Any) -> None: ... def reverse(self) -> None: ... if sys.version_info >= (3, 2): def tobytes(self) -> bytes: ... def tofile(self, f: BinaryIO) -> None: ... def tolist(self) -> List[_T]: ... def tostring(self) -> bytes: ... def tounicode(self) -> str: ... if sys.version_info < (3,): def write(self, f: BinaryIO) -> None: ... def __len__(self) -> int: ... @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, s: slice) -> array[_T]: ... @overload # type: ignore # Overrides MutableSequence def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, s: slice, o: array[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __add__(self, x: array[_T]) -> array[_T]: ... def __ge__(self, other: array[_T]) -> bool: ... def __gt__(self, other: array[_T]) -> bool: ... def __iadd__(self, x: array[_T]) -> array[_T]: ... # type: ignore # Overrides MutableSequence def __imul__(self, n: int) -> array[_T]: ... def __le__(self, other: array[_T]) -> bool: ... def __lt__(self, other: array[_T]) -> bool: ... def __mul__(self, n: int) -> array[_T]: ... def __rmul__(self, n: int) -> array[_T]: ... if sys.version_info < (3,): def __delslice__(self, i: int, j: int) -> None: ... def __getslice__(self, i: int, j: int) -> array[_T]: ... def __setslice__(self, i: int, j: int, y: array[_T]) -> None: ... ArrayType = array mypy-0.761/mypy/typeshed/stdlib/2and3/asynchat.pyi0000644€tŠÔÚ€2›s®0000000302513576752252026265 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import abstractmethod import asyncore import socket import sys from typing import Optional, Sequence, Tuple, Union class simple_producer: def __init__(self, data: bytes, buffer_size: int = ...) -> None: ... def more(self) -> bytes: ... class async_chat(asyncore.dispatcher): ac_in_buffer_size: int ac_out_buffer_size: int def __init__(self, sock: Optional[socket.socket] = ..., map: Optional[asyncore._maptype] = ...) -> None: ... @abstractmethod def collect_incoming_data(self, data: bytes) -> None: ... @abstractmethod def found_terminator(self) -> None: ... def set_terminator(self, term: Union[bytes, int, None]) -> None: ... def get_terminator(self) -> Union[bytes, int, None]: ... def handle_read(self) -> None: ... def handle_write(self) -> None: ... def handle_close(self) -> None: ... def push(self, data: bytes) -> None: ... def push_with_producer(self, producer: simple_producer) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def close_when_done(self) -> None: ... def initiate_send(self) -> None: ... def discard_buffers(self) -> None: ... if sys.version_info < (3, 0): class fifo: def __init__(self, list: Sequence[Union[bytes, simple_producer]] = ...) -> None: ... def __len__(self) -> int: ... def is_empty(self) -> bool: ... def first(self) -> bytes: ... def push(self, data: Union[bytes, simple_producer]) -> None: ... def pop(self) -> Tuple[int, bytes]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/asyncore.pyi0000644€tŠÔÚ€2›s®0000001277313576752252026310 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, Union, Optional, Any, Dict, overload import os import select import sys import time import warnings from socket import SocketType from typing import Optional from errno import (EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, EINVAL, ENOTCONN, ESHUTDOWN, EINTR, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, errorcode) # cyclic dependence with asynchat _maptype = Dict[int, Any] socket_map: _maptype = ... # Undocumented class ExitNow(Exception): ... def read(obj: Any) -> None: ... def write(obj: Any) -> None: ... def readwrite(obj: Any, flags: int) -> None: ... def poll(timeout: float = ..., map: _maptype = ...) -> None: ... def poll2(timeout: float = ..., map: _maptype = ...) -> None: ... poll3 = poll2 def loop(timeout: float = ..., use_poll: bool = ..., map: _maptype = ..., count: Optional[int] = ...) -> None: ... # Not really subclass of socket.socket; it's only delegation. # It is not covariant to it. class dispatcher: debug: bool connected: bool accepting: bool connecting: bool closing: bool ignore_log_types: frozenset[str] socket: Optional[SocketType] def __init__(self, sock: Optional[SocketType] = ..., map: _maptype = ...) -> None: ... def add_channel(self, map: _maptype = ...) -> None: ... def del_channel(self, map: _maptype = ...) -> None: ... def create_socket(self, family: int, type: int) -> None: ... def set_socket(self, sock: SocketType, map: _maptype = ...) -> None: ... def set_reuse_addr(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def listen(self, backlog: int) -> None: ... def bind(self, address: Union[Tuple[Any, ...], str]) -> None: ... def connect(self, address: Union[Tuple[Any, ...], str]) -> None: ... def accept(self) -> Optional[Tuple[SocketType, Any]]: ... def send(self, data: bytes) -> int: ... def recv(self, buffer_size: int) -> bytes: ... def close(self) -> None: ... def log(self, message: Any) -> None: ... def log_info(self, message: Any, type: str = ...) -> None: ... def handle_read_event(self) -> None: ... def handle_connect_event(self) -> None: ... def handle_write_event(self) -> None: ... def handle_expt_event(self) -> None: ... def handle_error(self) -> None: ... def handle_expt(self) -> None: ... def handle_read(self) -> None: ... def handle_write(self) -> None: ... def handle_connect(self) -> None: ... def handle_accept(self) -> None: ... def handle_close(self) -> None: ... if sys.version_info < (3, 5): # Historically, some methods were "imported" from `self.socket` by # means of `__getattr__`. This was long deprecated, and as of Python # 3.5 has been removed; simply call the relevant methods directly on # self.socket if necessary. def detach(self) -> int: ... def fileno(self) -> int: ... # return value is an address def getpeername(self) -> Any: ... def getsockname(self) -> Any: ... @overload def getsockopt(self, level: int, optname: int) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... def gettimeout(self) -> float: ... def ioctl(self, control: object, option: Tuple[int, int, int]) -> None: ... # TODO the return value may be BinaryIO or TextIO, depending on mode def makefile(self, mode: str = ..., buffering: int = ..., encoding: str = ..., errors: str = ..., newline: str = ...) -> Any: ... # return type is an address def recvfrom(self, bufsize: int, flags: int = ...) -> Any: ... def recvfrom_into(self, buffer: bytes, nbytes: int, flags: int = ...) -> Any: ... def recv_into(self, buffer: bytes, nbytes: int, flags: int = ...) -> Any: ... def sendall(self, data: bytes, flags: int = ...) -> None: ... def sendto(self, data: bytes, address: Union[Tuple[str, int], str], flags: int = ...) -> int: ... def setblocking(self, flag: bool) -> None: ... def settimeout(self, value: Union[float, None]) -> None: ... def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ... def shutdown(self, how: int) -> None: ... class dispatcher_with_send(dispatcher): def __init__(self, sock: SocketType = ..., map: _maptype = ...) -> None: ... def initiate_send(self) -> None: ... def handle_write(self) -> None: ... # incompatible signature: # def send(self, data: bytes) -> Optional[int]: ... def compact_traceback() -> Tuple[Tuple[str, str, str], type, type, str]: ... def close_all(map: _maptype = ..., ignore_all: bool = ...) -> None: ... # if os.name == 'posix': # import fcntl class file_wrapper: fd: int def __init__(self, fd: int) -> None: ... def recv(self, bufsize: int, flags: int = ...) -> bytes: ... def send(self, data: bytes, flags: int = ...) -> int: ... @overload def getsockopt(self, level: int, optname: int) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... def read(self, bufsize: int, flags: int = ...) -> bytes: ... def write(self, data: bytes, flags: int = ...) -> int: ... def close(self) -> None: ... def fileno(self) -> int: ... class file_dispatcher(dispatcher): def __init__(self, fd: int, map: _maptype = ...) -> None: ... def set_file(self, fd: int) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/audioop.pyi0000644€tŠÔÚ€2›s®0000000370713576752252026122 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Tuple AdpcmState = Tuple[int, int] RatecvState = Tuple[int, Tuple[Tuple[int, int], ...]] class error(Exception): ... def add(fragment1: bytes, fragment2: bytes, width: int) -> bytes: ... def adpcm2lin(adpcmfragment: bytes, width: int, state: Optional[AdpcmState]) -> Tuple[bytes, AdpcmState]: ... def alaw2lin(fragment: bytes, width: int) -> bytes: ... def avg(fragment: bytes, width: int) -> int: ... def avgpp(fragment: bytes, width: int) -> int: ... def bias(fragment: bytes, width: int, bias: int) -> bytes: ... def byteswap(fragment: bytes, width: int) -> bytes: ... def cross(fragment: bytes, width: int) -> int: ... def findfactor(fragment: bytes, reference: bytes) -> float: ... def findfit(fragment: bytes, reference: bytes) -> Tuple[int, float]: ... def findmax(fragment: bytes, length: int) -> int: ... def getsample(fragment: bytes, width: int, index: int) -> int: ... def lin2adpcm(fragment: bytes, width: int, state: Optional[AdpcmState]) -> Tuple[bytes, AdpcmState]: ... def lin2alaw(fragment: bytes, width: int) -> bytes: ... def lin2lin(fragment: bytes, width: int, newwidth: int) -> bytes: ... def lin2ulaw(fragment: bytes, width: int) -> bytes: ... def max(fragment: bytes, width: int) -> int: ... def maxpp(fragment: bytes, width: int) -> int: ... def minmax(fragment: bytes, width: int) -> Tuple[int, int]: ... def mul(fragment: bytes, width: int, factor: float) -> bytes: ... def ratecv( fragment: bytes, width: int, nchannels: int, inrate: int, outrate: int, state: Optional[RatecvState], weightA: int = ..., weightB: int = ..., ) -> Tuple[bytes, RatecvState]: ... def reverse(fragment: bytes, width: int) -> bytes: ... def rms(fragment: bytes, width: int) -> int: ... def tomono(fragment: bytes, width: int, lfactor: float, rfactor: float) -> bytes: ... def tostereo(fragment: bytes, width: int, lfactor: float, rfactor: float) -> bytes: ... def ulaw2lin(fragment: bytes, width: int) -> bytes: ... mypy-0.761/mypy/typeshed/stdlib/2and3/base64.pyi0000644€tŠÔÚ€2›s®0000000306213576752252025540 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for base64 from typing import IO, Union, Text import sys if sys.version_info < (3,): _encodable = Union[bytes, unicode] _decodable = Union[bytes, unicode] else: _encodable = bytes _decodable = Union[bytes, str] def b64encode(s: _encodable, altchars: bytes = ...) -> bytes: ... def b64decode(s: _decodable, altchars: bytes = ..., validate: bool = ...) -> bytes: ... def standard_b64encode(s: _encodable) -> bytes: ... def standard_b64decode(s: _decodable) -> bytes: ... def urlsafe_b64encode(s: _encodable) -> bytes: ... def urlsafe_b64decode(s: _decodable) -> bytes: ... def b32encode(s: _encodable) -> bytes: ... def b32decode(s: _decodable, casefold: bool = ..., map01: bytes = ...) -> bytes: ... def b16encode(s: _encodable) -> bytes: ... def b16decode(s: _decodable, casefold: bool = ...) -> bytes: ... if sys.version_info >= (3, 4): def a85encode(b: _encodable, *, foldspaces: bool = ..., wrapcol: int = ..., pad: bool = ..., adobe: bool = ...) -> bytes: ... def a85decode(b: _decodable, *, foldspaces: bool = ..., adobe: bool = ..., ignorechars: Union[str, bytes] = ...) -> bytes: ... def b85encode(b: _encodable, pad: bool = ...) -> bytes: ... def b85decode(b: _decodable) -> bytes: ... def decode(input: IO[bytes], output: IO[bytes]) -> None: ... def decodebytes(s: bytes) -> bytes: ... def decodestring(s: bytes) -> bytes: ... def encode(input: IO[bytes], output: IO[bytes]) -> None: ... def encodebytes(s: bytes) -> bytes: ... def encodestring(s: bytes) -> bytes: ... mypy-0.761/mypy/typeshed/stdlib/2and3/bdb.pyi0000644€tŠÔÚ€2›s®0000001057213576752252025207 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import Set, Dict, Iterable, Any, Callable, Tuple, Type, SupportsInt, List, Union, TypeVar, Optional, IO from types import FrameType, TracebackType, CodeType _T = TypeVar("_T") _TraceDispatch = Callable[[FrameType, str, Any], Any] # TODO: Recursive type _ExcInfo = Tuple[Type[BaseException], BaseException, FrameType] GENERATOR_AND_COROUTINE_FLAGS: int = ... class BdbQuit(Exception): ... class Bdb: skip: Optional[Set[str]] breaks: Dict[str, List[int]] fncache: Dict[str, str] frame_returning: Optional[FrameType] botframe: Optional[FrameType] quitting: bool stopframe: Optional[FrameType] returnframe: Optional[FrameType] stoplineno: int def __init__(self, skip: Iterable[str] = ...) -> None: ... def canonic(self, filename: str) -> str: ... def reset(self) -> None: ... def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> _TraceDispatch: ... def dispatch_line(self, frame: FrameType) -> _TraceDispatch: ... def dispatch_call(self, frame: FrameType, arg: None) -> _TraceDispatch: ... def dispatch_return(self, frame: FrameType, arg: Any) -> _TraceDispatch: ... def dispatch_exception(self, frame: FrameType, arg: _ExcInfo) -> _TraceDispatch: ... def is_skipped_module(self, module_name: str) -> bool: ... def stop_here(self, frame: FrameType) -> bool: ... def break_here(self, frame: FrameType) -> bool: ... def do_clear(self, arg: Any) -> None: ... def break_anywhere(self, frame: FrameType) -> bool: ... def user_call(self, frame: FrameType, argument_list: None) -> None: ... def user_line(self, frame: FrameType) -> None: ... def user_return(self, frame: FrameType, return_value: Any) -> None: ... def user_exception(self, frame: FrameType, exc_info: _ExcInfo) -> None: ... def set_until(self, frame: FrameType, lineno: Optional[int] = ...) -> None: ... def set_step(self) -> None: ... def set_next(self, frame: FrameType) -> None: ... def set_return(self, frame: FrameType) -> None: ... def set_trace(self, frame: Optional[FrameType] = ...) -> None: ... def set_continue(self) -> None: ... def set_quit(self) -> None: ... def set_break(self, filename: str, lineno: int, temporary: bool = ..., cond: Optional[str] = ..., funcname: Optional[str] = ...) -> None: ... def clear_break(self, filename: str, lineno: int) -> None: ... def clear_bpbynumber(self, arg: SupportsInt) -> None: ... def clear_all_file_breaks(self, filename: str) -> None: ... def clear_all_breaks(self) -> None: ... def get_bpbynumber(self, arg: SupportsInt) -> Breakpoint: ... def get_break(self, filename: str, lineno: int) -> bool: ... def get_breaks(self, filename: str, lineno: int) -> List[Breakpoint]: ... def get_file_breaks(self, filename: str) -> List[Breakpoint]: ... def get_all_breaks(self) -> List[Breakpoint]: ... def get_stack(self, f: FrameType, t: TracebackType) -> Tuple[List[Tuple[FrameType, int]], int]: ... def format_stack_entry(self, frame_lineno: int, lprefix: str = ...) -> str: ... def run(self, cmd: Union[str, CodeType], globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> None: ... def runeval(self, expr: str, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> None: ... def runctx(self, cmd: Union[str, CodeType], globals: Dict[str, Any], locals: Dict[str, Any]) -> None: ... def runcall(self, func: Callable[[Any], _T], *args: Any, **kwds: Any) -> Optional[_T]: ... class Breakpoint: next: int = ... bplist: Dict[Tuple[str, int], List[Breakpoint]] = ... bpbynumber: List[Optional[Breakpoint]] = ... funcname: Optional[str] func_first_executable_line: Optional[int] file: str line: int temporary: bool cond: Optional[str] enabled: bool ignore: int hits: int number: int def __init__(self, file: str, line: int, temporary: bool = ..., cond: Optional[str] = ..., funcname: Optional[str] = ...) -> None: ... def deleteMe(self) -> None: ... def enable(self) -> None: ... def disable(self) -> None: ... def bpprint(self, out: Optional[IO[str]] = ...) -> None: ... def bpformat(self) -> str: ... def __str__(self) -> str: ... def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: ... def effective(file: str, line: int, frame: FrameType) -> Union[Tuple[Breakpoint, bool], Tuple[None, None]]: ... def set_trace() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/binascii.pyi0000644€tŠÔÚ€2›s®0000000266213576752252026242 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for binascii # Based on http://docs.python.org/3.2/library/binascii.html import sys from typing import Union, Text if sys.version_info < (3,): # Python 2 accepts unicode ascii pretty much everywhere. _Bytes = Text _Ascii = Text else: # But since Python 3.3 ASCII-only unicode strings are accepted by the # a2b_* functions. _Bytes = bytes _Ascii = Union[bytes, str] def a2b_uu(string: _Ascii) -> bytes: ... if sys.version_info >= (3, 7): def b2a_uu(data: _Bytes, *, backtick: bool = ...) -> bytes: ... else: def b2a_uu(data: _Bytes) -> bytes: ... def a2b_base64(string: _Ascii) -> bytes: ... if sys.version_info >= (3, 6): def b2a_base64(data: _Bytes, *, newline: bool = ...) -> bytes: ... else: def b2a_base64(data: _Bytes) -> bytes: ... def a2b_qp(string: _Ascii, header: bool = ...) -> bytes: ... def b2a_qp(data: _Bytes, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> bytes: ... def a2b_hqx(string: _Ascii) -> bytes: ... def rledecode_hqx(data: _Bytes) -> bytes: ... def rlecode_hqx(data: _Bytes) -> bytes: ... def b2a_hqx(data: _Bytes) -> bytes: ... def crc_hqx(data: _Bytes, crc: int) -> int: ... def crc32(data: _Bytes, crc: int = ...) -> int: ... def b2a_hex(data: _Bytes) -> bytes: ... def hexlify(data: _Bytes) -> bytes: ... def a2b_hex(hexstr: _Ascii) -> bytes: ... def unhexlify(hexlify: _Ascii) -> bytes: ... class Error(Exception): ... class Incomplete(Exception): ... mypy-0.761/mypy/typeshed/stdlib/2and3/binhex.pyi0000644€tŠÔÚ€2›s®0000000222113576752252025725 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, IO, Tuple, Union, ) class Error(Exception): ... REASONABLY_LARGE: int LINELEN: int RUNCHAR: bytes class FInfo: def __init__(self) -> None: ... Type: str Creator: str Flags: int _FileInfoTuple = Tuple[str, FInfo, int, int] _FileHandleUnion = Union[str, IO[bytes]] def getfileinfo(name: str) -> _FileInfoTuple: ... class openrsrc: def __init__(self, *args: Any) -> None: ... def read(self, *args: Any) -> bytes: ... def write(self, *args: Any) -> None: ... def close(self) -> None: ... class BinHex: def __init__(self, name_finfo_dlen_rlen: _FileInfoTuple, ofp: _FileHandleUnion) -> None: ... def write(self, data: bytes) -> None: ... def close_data(self) -> None: ... def write_rsrc(self, data: bytes) -> None: ... def close(self) -> None: ... def binhex(inp: str, out: str) -> None: ... class HexBin: def __init__(self, ifp: _FileHandleUnion) -> None: ... def read(self, *n: int) -> bytes: ... def close_data(self) -> None: ... def read_rsrc(self, *n: int) -> bytes: ... def close(self) -> None: ... def hexbin(inp: str, out: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/bisect.pyi0000644€tŠÔÚ€2›s®0000000113613576752252025725 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for bisect from typing import Any, Sequence, MutableSequence, TypeVar _T = TypeVar('_T') def bisect_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def bisect_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def bisect(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def insort_left(a: MutableSequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def insort_right(a: MutableSequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def insort(a: MutableSequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/builtins.pyi0000644€tŠÔÚ€2›s®0000021221113576752252026303 0ustar jukkaDROPBOX\Domain Users00000000000000# True and False are deliberately omitted because they are keywords in # Python 3, and stub files conform to Python 3 syntax. from typing import ( TypeVar, Iterator, Iterable, NoReturn, overload, Container, Sequence, MutableSequence, Mapping, MutableMapping, Tuple, List, Any, Dict, Callable, Generic, Set, AbstractSet, FrozenSet, MutableSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsAbs, SupportsComplex, IO, BinaryIO, Union, ItemsView, KeysView, ValuesView, ByteString, Optional, AnyStr, Type, Text, Protocol, ) from abc import abstractmethod, ABCMeta from ast import mod, AST from types import TracebackType, CodeType import sys if sys.version_info >= (3,): from typing import SupportsBytes, SupportsRound if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal _T = TypeVar('_T') _T_co = TypeVar('_T_co', covariant=True) _KT = TypeVar('_KT') _VT = TypeVar('_VT') _S = TypeVar('_S') _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _TT = TypeVar('_TT', bound='type') class _SupportsIndex(Protocol): def __index__(self) -> int: ... class object: __doc__: Optional[str] __dict__: Dict[str, Any] __slots__: Union[Text, Iterable[Text]] __module__: str if sys.version_info >= (3, 6): __annotations__: Dict[str, Any] @property def __class__(self: _T) -> Type[_T]: ... @__class__.setter def __class__(self, __type: Type[object]) -> None: ... def __init__(self) -> None: ... def __new__(cls) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __eq__(self, o: object) -> bool: ... def __ne__(self, o: object) -> bool: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: ... def __getattribute__(self, name: str) -> Any: ... def __delattr__(self, name: str) -> None: ... def __sizeof__(self) -> int: ... def __reduce__(self) -> Union[str, Tuple[Any, ...]]: ... def __reduce_ex__(self, protocol: int) -> Union[str, Tuple[Any, ...]]: ... if sys.version_info >= (3,): def __dir__(self) -> Iterable[str]: ... if sys.version_info >= (3, 6): def __init_subclass__(cls) -> None: ... class staticmethod(object): # Special, only valid as a decorator. __func__: Callable[..., Any] if sys.version_info >= (3,): __isabstractmethod__: bool def __init__(self, f: Callable[..., Any]) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]] = ...) -> Callable[..., Any]: ... class classmethod(object): # Special, only valid as a decorator. __func__: Callable[..., Any] if sys.version_info >= (3,): __isabstractmethod__: bool def __init__(self, f: Callable[..., Any]) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]] = ...) -> Callable[..., Any]: ... class type(object): __base__: type __bases__: Tuple[type, ...] __basicsize__: int __dict__: Dict[str, Any] __dictoffset__: int __flags__: int __itemsize__: int __module__: str __mro__: Tuple[type, ...] __name__: str if sys.version_info >= (3,): __qualname__: str __text_signature__: Optional[str] __weakrefoffset__: int @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ... @overload def __new__(cls, o: object) -> type: ... @overload def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ... def __call__(self, *args: Any, **kwds: Any) -> Any: ... def __subclasses__(self: _TT) -> List[_TT]: ... # Note: the documentation doesnt specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> List[type]: ... def __instancecheck__(self, instance: Any) -> bool: ... def __subclasscheck__(self, subclass: type) -> bool: ... if sys.version_info >= (3,): @classmethod def __prepare__(metacls, __name: str, __bases: Tuple[type, ...], **kwds: Any) -> Mapping[str, Any]: ... class super(object): if sys.version_info >= (3,): @overload def __init__(self, t: Any, obj: Any) -> None: ... @overload def __init__(self, t: Any) -> None: ... @overload def __init__(self) -> None: ... else: @overload def __init__(self, t: Any, obj: Any) -> None: ... @overload def __init__(self, t: Any) -> None: ... class int: @overload def __init__(self, x: Union[Text, bytes, SupportsInt, _SupportsIndex] = ...) -> None: ... @overload def __init__(self, x: Union[Text, bytes, bytearray], base: int) -> None: ... if sys.version_info >= (3, 8): def as_integer_ratio(self) -> Tuple[int, Literal[1]]: ... @property def real(self) -> int: ... @property def imag(self) -> int: ... @property def numerator(self) -> int: ... @property def denominator(self) -> int: ... def conjugate(self) -> int: ... def bit_length(self) -> int: ... if sys.version_info >= (3,): def to_bytes(self, length: int, byteorder: str, *, signed: bool = ...) -> bytes: ... @classmethod def from_bytes(cls, bytes: Sequence[int], byteorder: str, *, signed: bool = ...) -> int: ... # TODO buffer object argument def __add__(self, x: int) -> int: ... def __sub__(self, x: int) -> int: ... def __mul__(self, x: int) -> int: ... def __floordiv__(self, x: int) -> int: ... if sys.version_info < (3,): def __div__(self, x: int) -> int: ... def __truediv__(self, x: int) -> float: ... def __mod__(self, x: int) -> int: ... def __divmod__(self, x: int) -> Tuple[int, int]: ... def __radd__(self, x: int) -> int: ... def __rsub__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... def __rfloordiv__(self, x: int) -> int: ... if sys.version_info < (3,): def __rdiv__(self, x: int) -> int: ... def __rtruediv__(self, x: int) -> float: ... def __rmod__(self, x: int) -> int: ... def __rdivmod__(self, x: int) -> Tuple[int, int]: ... def __pow__(self, __x: int, __modulo: Optional[int] = ...) -> Any: ... # Return type can be int or float, depending on x. def __rpow__(self, x: int) -> Any: ... def __and__(self, n: int) -> int: ... def __or__(self, n: int) -> int: ... def __xor__(self, n: int) -> int: ... def __lshift__(self, n: int) -> int: ... def __rshift__(self, n: int) -> int: ... def __rand__(self, n: int) -> int: ... def __ror__(self, n: int) -> int: ... def __rxor__(self, n: int) -> int: ... def __rlshift__(self, n: int) -> int: ... def __rrshift__(self, n: int) -> int: ... def __neg__(self) -> int: ... def __pos__(self) -> int: ... def __invert__(self) -> int: ... def __trunc__(self) -> int: ... if sys.version_info >= (3,): def __ceil__(self) -> int: ... def __floor__(self) -> int: ... def __round__(self, ndigits: Optional[int] = ...) -> int: ... def __getnewargs__(self) -> Tuple[int]: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: int) -> bool: ... def __le__(self, x: int) -> bool: ... def __gt__(self, x: int) -> bool: ... def __ge__(self, x: int) -> bool: ... def __str__(self) -> str: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __abs__(self) -> int: ... def __hash__(self) -> int: ... if sys.version_info >= (3,): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... def __index__(self) -> int: ... class float: def __init__(self, x: Union[SupportsFloat, _SupportsIndex, Text, bytes, bytearray] = ...) -> None: ... def as_integer_ratio(self) -> Tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @classmethod def fromhex(cls, s: str) -> float: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> float: ... def __add__(self, x: float) -> float: ... def __sub__(self, x: float) -> float: ... def __mul__(self, x: float) -> float: ... def __floordiv__(self, x: float) -> float: ... if sys.version_info < (3,): def __div__(self, x: float) -> float: ... def __truediv__(self, x: float) -> float: ... def __mod__(self, x: float) -> float: ... def __divmod__(self, x: float) -> Tuple[float, float]: ... def __pow__(self, x: float) -> float: ... # In Python 3, returns complex if self is negative and x is not whole def __radd__(self, x: float) -> float: ... def __rsub__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... def __rfloordiv__(self, x: float) -> float: ... if sys.version_info < (3,): def __rdiv__(self, x: float) -> float: ... def __rtruediv__(self, x: float) -> float: ... def __rmod__(self, x: float) -> float: ... def __rdivmod__(self, x: float) -> Tuple[float, float]: ... def __rpow__(self, x: float) -> float: ... def __getnewargs__(self) -> Tuple[float]: ... def __trunc__(self) -> int: ... if sys.version_info >= (3,): @overload def __round__(self, ndigits: None = ...) -> int: ... @overload def __round__(self, ndigits: int) -> float: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: float) -> bool: ... def __le__(self, x: float) -> bool: ... def __gt__(self, x: float) -> bool: ... def __ge__(self, x: float) -> bool: ... def __neg__(self) -> float: ... def __pos__(self) -> float: ... def __str__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... if sys.version_info >= (3,): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... class complex: @overload def __init__(self, real: float = ..., imag: float = ...) -> None: ... @overload def __init__(self, real: Union[str, SupportsComplex, _SupportsIndex]) -> None: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> complex: ... def __add__(self, x: complex) -> complex: ... def __sub__(self, x: complex) -> complex: ... def __mul__(self, x: complex) -> complex: ... def __pow__(self, x: complex) -> complex: ... if sys.version_info < (3,): def __div__(self, x: complex) -> complex: ... def __truediv__(self, x: complex) -> complex: ... def __radd__(self, x: complex) -> complex: ... def __rsub__(self, x: complex) -> complex: ... def __rmul__(self, x: complex) -> complex: ... def __rpow__(self, x: complex) -> complex: ... if sys.version_info < (3,): def __rdiv__(self, x: complex) -> complex: ... def __rtruediv__(self, x: complex) -> complex: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __neg__(self) -> complex: ... def __pos__(self) -> complex: ... def __str__(self) -> str: ... def __complex__(self) -> complex: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... if sys.version_info >= (3,): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... if sys.version_info >= (3,): _str_base = object else: class basestring(metaclass=ABCMeta): ... class unicode(basestring, Sequence[unicode]): @overload def __init__(self) -> None: ... @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, o: str, encoding: unicode = ..., errors: unicode = ...) -> None: ... def capitalize(self) -> unicode: ... def center(self, width: int, fillchar: unicode = ...) -> unicode: ... def count(self, x: unicode) -> int: ... def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ... def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ... def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = ..., end: int = ...) -> bool: ... def expandtabs(self, tabsize: int = ...) -> unicode: ... def find(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def format(self, *args: object, **kwargs: object) -> unicode: ... def index(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def isidentifier(self) -> bool: ... def islower(self) -> bool: ... def isnumeric(self) -> bool: ... def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[unicode]) -> unicode: ... def ljust(self, width: int, fillchar: unicode = ...) -> unicode: ... def lower(self) -> unicode: ... def lstrip(self, chars: unicode = ...) -> unicode: ... def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def replace(self, old: unicode, new: unicode, count: int = ...) -> unicode: ... def rfind(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rjust(self, width: int, fillchar: unicode = ...) -> unicode: ... def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def rsplit(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ... def rstrip(self, chars: unicode = ...) -> unicode: ... def split(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ... def splitlines(self, keepends: bool = ...) -> List[unicode]: ... def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = ..., end: int = ...) -> bool: ... def strip(self, chars: unicode = ...) -> unicode: ... def swapcase(self) -> unicode: ... def title(self) -> unicode: ... def translate(self, table: Union[Dict[int, Any], unicode]) -> unicode: ... def upper(self) -> unicode: ... def zfill(self, width: int) -> unicode: ... @overload def __getitem__(self, i: int) -> unicode: ... @overload def __getitem__(self, s: slice) -> unicode: ... def __getslice__(self, start: int, stop: int) -> unicode: ... def __add__(self, s: unicode) -> unicode: ... def __mul__(self, n: int) -> unicode: ... def __rmul__(self, n: int) -> unicode: ... def __mod__(self, x: Any) -> unicode: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: unicode) -> bool: ... def __le__(self, x: unicode) -> bool: ... def __gt__(self, x: unicode) -> bool: ... def __ge__(self, x: unicode) -> bool: ... def __len__(self) -> int: ... # The argument type is incompatible with Sequence def __contains__(self, s: Union[unicode, bytes]) -> bool: ... # type: ignore def __iter__(self) -> Iterator[unicode]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... def __getnewargs__(self) -> Tuple[unicode]: ... _str_base = basestring class str(Sequence[str], _str_base): if sys.version_info >= (3,): @overload def __init__(self, o: object = ...) -> None: ... @overload def __init__(self, o: bytes, encoding: str = ..., errors: str = ...) -> None: ... else: def __init__(self, o: object = ...) -> None: ... def capitalize(self) -> str: ... if sys.version_info >= (3, 3): def casefold(self) -> str: ... def center(self, width: int, fillchar: str = ...) -> str: ... def count(self, x: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... if sys.version_info < (3,): def decode(self, encoding: Text = ..., errors: Text = ...) -> unicode: ... def encode(self, encoding: Text = ..., errors: Text = ...) -> bytes: ... if sys.version_info >= (3,): def endswith(self, suffix: Union[Text, Tuple[Text, ...]], start: Optional[int] = ..., end: Optional[int] = ...) -> bool: ... else: def endswith(self, suffix: Union[Text, Tuple[Text, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> str: ... def find(self, sub: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def format(self, *args: object, **kwargs: object) -> str: ... if sys.version_info >= (3,): def format_map(self, map: Mapping[str, Any]) -> str: ... def index(self, sub: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... if sys.version_info >= (3,): def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... if sys.version_info >= (3,): def isidentifier(self) -> bool: ... def islower(self) -> bool: ... if sys.version_info >= (3,): def isnumeric(self) -> bool: ... def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... if sys.version_info >= (3,): def join(self, iterable: Iterable[str]) -> str: ... else: def join(self, iterable: Iterable[AnyStr]) -> AnyStr: ... def ljust(self, width: int, fillchar: str = ...) -> str: ... def lower(self) -> str: ... if sys.version_info >= (3,): def lstrip(self, chars: Optional[str] = ...) -> str: ... def partition(self, sep: str) -> Tuple[str, str, str]: ... def replace(self, old: str, new: str, count: int = ...) -> str: ... else: @overload def lstrip(self, chars: str = ...) -> str: ... @overload def lstrip(self, chars: unicode) -> unicode: ... @overload def partition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ... @overload def partition(self, sep: str) -> Tuple[str, str, str]: ... @overload def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def replace(self, old: AnyStr, new: AnyStr, count: int = ...) -> AnyStr: ... def rfind(self, sub: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rindex(self, sub: Text, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: str = ...) -> str: ... if sys.version_info >= (3,): def rpartition(self, sep: str) -> Tuple[str, str, str]: ... def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... def rstrip(self, chars: Optional[str] = ...) -> str: ... def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... else: @overload def rpartition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ... @overload def rpartition(self, sep: str) -> Tuple[str, str, str]: ... @overload def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... @overload def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... @overload def rsplit(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ... @overload def rstrip(self, chars: str = ...) -> str: ... @overload def rstrip(self, chars: unicode) -> unicode: ... @overload def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... @overload def split(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ... def splitlines(self, keepends: bool = ...) -> List[str]: ... if sys.version_info >= (3,): def startswith(self, prefix: Union[Text, Tuple[Text, ...]], start: Optional[int] = ..., end: Optional[int] = ...) -> bool: ... def strip(self, chars: Optional[str] = ...) -> str: ... else: def startswith(self, prefix: Union[Text, Tuple[Text, ...]]) -> bool: ... @overload def strip(self, chars: str = ...) -> str: ... @overload def strip(self, chars: unicode) -> unicode: ... def swapcase(self) -> str: ... def title(self) -> str: ... if sys.version_info >= (3,): def translate(self, table: Union[Mapping[int, Union[int, str, None]], Sequence[Union[int, str, None]]]) -> str: ... else: def translate(self, table: Optional[AnyStr], deletechars: AnyStr = ...) -> AnyStr: ... def upper(self) -> str: ... def zfill(self, width: int) -> str: ... if sys.version_info >= (3,): @staticmethod @overload def maketrans(x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ... @staticmethod @overload def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Union[int, None]]: ... if sys.version_info >= (3,): def __add__(self, s: str) -> str: ... else: def __add__(self, s: AnyStr) -> AnyStr: ... # Incompatible with Sequence.__contains__ def __contains__(self, o: Union[str, Text]) -> bool: ... # type: ignore def __eq__(self, x: object) -> bool: ... def __ge__(self, x: Text) -> bool: ... def __getitem__(self, i: Union[int, slice]) -> str: ... def __gt__(self, x: Text) -> bool: ... def __hash__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... def __le__(self, x: Text) -> bool: ... def __len__(self) -> int: ... def __lt__(self, x: Text) -> bool: ... def __mod__(self, x: Any) -> str: ... def __mul__(self, n: int) -> str: ... def __ne__(self, x: object) -> bool: ... def __repr__(self) -> str: ... def __rmul__(self, n: int) -> str: ... def __str__(self) -> str: ... def __getnewargs__(self) -> Tuple[str]: ... if sys.version_info < (3,): def __getslice__(self, start: int, stop: int) -> str: ... def __float__(self) -> float: ... def __int__(self) -> int: ... if sys.version_info >= (3,): class bytes(ByteString): @overload def __init__(self, ints: Iterable[int]) -> None: ... @overload def __init__(self, string: str, encoding: str, errors: str = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... @overload def __init__(self) -> None: ... @overload def __init__(self, o: SupportsBytes) -> None: ... def capitalize(self) -> bytes: ... def center(self, width: int, fillchar: bytes = ...) -> bytes: ... def count(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def decode(self, encoding: str = ..., errors: str = ...) -> str: ... def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> bytes: ... def find(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... def index(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[Union[ByteString, memoryview]]) -> bytes: ... def ljust(self, width: int, fillchar: bytes = ...) -> bytes: ... def lower(self) -> bytes: ... def lstrip(self, chars: Optional[bytes] = ...) -> bytes: ... def partition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ... def replace(self, old: bytes, new: bytes, count: int = ...) -> bytes: ... def rfind(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def rindex(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: bytes = ...) -> bytes: ... def rpartition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ... def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ... def rstrip(self, chars: Optional[bytes] = ...) -> bytes: ... def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ... def splitlines(self, keepends: bool = ...) -> List[bytes]: ... def startswith( self, prefix: Union[bytes, Tuple[bytes, ...]], start: Optional[int] = ..., end: Optional[int] = ..., ) -> bool: ... def strip(self, chars: Optional[bytes] = ...) -> bytes: ... def swapcase(self) -> bytes: ... def title(self) -> bytes: ... def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytes: ... def upper(self) -> bytes: ... def zfill(self, width: int) -> bytes: ... @classmethod def fromhex(cls, s: str) -> bytes: ... @classmethod def maketrans(cls, frm: bytes, to: bytes) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> bytes: ... def __add__(self, s: bytes) -> bytes: ... def __mul__(self, n: int) -> bytes: ... def __rmul__(self, n: int) -> bytes: ... if sys.version_info >= (3, 5): def __mod__(self, value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ def __contains__(self, o: Union[int, bytes]) -> bool: ... # type: ignore def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: bytes) -> bool: ... def __le__(self, x: bytes) -> bool: ... def __gt__(self, x: bytes) -> bool: ... def __ge__(self, x: bytes) -> bool: ... def __getnewargs__(self) -> Tuple[bytes]: ... else: bytes = str class bytearray(MutableSequence[int], ByteString): if sys.version_info >= (3,): @overload def __init__(self) -> None: ... @overload def __init__(self, ints: Iterable[int]) -> None: ... @overload def __init__(self, string: Text, encoding: Text, errors: Text = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... else: @overload def __init__(self) -> None: ... @overload def __init__(self, ints: Iterable[int]) -> None: ... @overload def __init__(self, string: str) -> None: ... @overload def __init__(self, string: Text, encoding: Text, errors: Text = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... def capitalize(self) -> bytearray: ... def center(self, width: int, fillchar: bytes = ...) -> bytearray: ... if sys.version_info >= (3,): def count(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def copy(self) -> bytearray: ... else: def count(self, x: str) -> int: ... def decode(self, encoding: Text = ..., errors: Text = ...) -> str: ... def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> bytearray: ... if sys.version_info < (3,): def extend(self, iterable: Union[str, Iterable[int]]) -> None: ... if sys.version_info >= (3,): def find(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... def index(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def find(self, sub: str, start: int = ..., end: int = ...) -> int: ... def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... def insert(self, index: int, object: int) -> None: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... if sys.version_info >= (3,): def join(self, iterable: Iterable[Union[ByteString, memoryview]]) -> bytearray: ... def ljust(self, width: int, fillchar: bytes = ...) -> bytearray: ... else: def join(self, iterable: Iterable[str]) -> bytearray: ... def ljust(self, width: int, fillchar: str = ...) -> bytearray: ... def lower(self) -> bytearray: ... def lstrip(self, chars: Optional[bytes] = ...) -> bytearray: ... def partition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ... def replace(self, old: bytes, new: bytes, count: int = ...) -> bytearray: ... if sys.version_info >= (3,): def rfind(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def rindex(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def rfind(self, sub: bytes, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: bytes, start: int = ..., end: int = ...) -> int: ... def rjust(self, width: int, fillchar: bytes = ...) -> bytearray: ... def rpartition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ... def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ... def rstrip(self, chars: Optional[bytes] = ...) -> bytearray: ... def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ... def splitlines(self, keepends: bool = ...) -> List[bytearray]: ... def startswith( self, prefix: Union[bytes, Tuple[bytes, ...]], start: Optional[int] = ..., end: Optional[int] = ..., ) -> bool: ... def strip(self, chars: Optional[bytes] = ...) -> bytearray: ... def swapcase(self) -> bytearray: ... def title(self) -> bytearray: ... if sys.version_info >= (3,): def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytearray: ... else: def translate(self, table: str) -> bytearray: ... def upper(self) -> bytearray: ... def zfill(self, width: int) -> bytearray: ... @staticmethod def fromhex(s: str) -> bytearray: ... if sys.version_info >= (3,): @classmethod def maketrans(cls, frm: bytes, to: bytes) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... __hash__: None # type: ignore @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> bytearray: ... @overload def __setitem__(self, i: int, x: int) -> None: ... @overload def __setitem__(self, s: slice, x: Union[Iterable[int], bytes]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... if sys.version_info < (3,): def __getslice__(self, start: int, stop: int) -> bytearray: ... def __setslice__(self, start: int, stop: int, x: Union[Sequence[int], str]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __add__(self, s: bytes) -> bytearray: ... if sys.version_info >= (3,): def __iadd__(self, s: Iterable[int]) -> bytearray: ... def __mul__(self, n: int) -> bytearray: ... if sys.version_info >= (3,): def __rmul__(self, n: int) -> bytearray: ... def __imul__(self, n: int) -> bytearray: ... if sys.version_info >= (3, 5): def __mod__(self, value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ def __contains__(self, o: Union[int, bytes]) -> bool: ... # type: ignore def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: bytes) -> bool: ... def __le__(self, x: bytes) -> bool: ... def __gt__(self, x: bytes) -> bool: ... def __ge__(self, x: bytes) -> bool: ... if sys.version_info >= (3,): _mv_container_type = int else: _mv_container_type = str class memoryview(Sized, Container[_mv_container_type]): format: str itemsize: int shape: Optional[Tuple[int, ...]] strides: Optional[Tuple[int, ...]] suboffsets: Optional[Tuple[int, ...]] readonly: bool ndim: int if sys.version_info >= (3,): c_contiguous: bool f_contiguous: bool contiguous: bool nbytes: int def __init__(self, obj: Union[bytes, bytearray, memoryview]) -> None: ... def __enter__(self) -> memoryview: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> None: ... else: def __init__(self, obj: Union[bytes, bytearray, buffer, memoryview]) -> None: ... @overload def __getitem__(self, i: int) -> _mv_container_type: ... @overload def __getitem__(self, s: slice) -> memoryview: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[_mv_container_type]: ... def __len__(self) -> int: ... @overload def __setitem__(self, s: slice, o: memoryview) -> None: ... @overload def __setitem__(self, i: int, o: bytes) -> None: ... @overload def __setitem__(self, s: slice, o: Sequence[bytes]) -> None: ... def tobytes(self) -> bytes: ... def tolist(self) -> List[int]: ... if sys.version_info >= (3, 2): def release(self) -> None: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... class bool(int): def __init__(self, o: object = ...) -> None: ... @overload def __and__(self, x: bool) -> bool: ... @overload def __and__(self, x: int) -> int: ... @overload def __or__(self, x: bool) -> bool: ... @overload def __or__(self, x: int) -> int: ... @overload def __xor__(self, x: bool) -> bool: ... @overload def __xor__(self, x: int) -> int: ... @overload def __rand__(self, x: bool) -> bool: ... @overload def __rand__(self, x: int) -> int: ... @overload def __ror__(self, x: bool) -> bool: ... @overload def __ror__(self, x: int) -> int: ... @overload def __rxor__(self, x: bool) -> bool: ... @overload def __rxor__(self, x: int) -> int: ... def __getnewargs__(self) -> Tuple[int]: ... class slice(object): start: Any step: Any stop: Any @overload def __init__(self, stop: Any) -> None: ... @overload def __init__(self, start: Any, stop: Any, step: Any = ...) -> None: ... __hash__: None # type: ignore def indices(self, len: int) -> Tuple[int, int, int]: ... class tuple(Sequence[_T_co], Generic[_T_co]): def __new__(cls: Type[_T], iterable: Iterable[_T_co] = ...) -> _T: ... def __len__(self) -> int: ... def __contains__(self, x: object) -> bool: ... @overload def __getitem__(self, x: int) -> _T_co: ... @overload def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ... def __iter__(self) -> Iterator[_T_co]: ... def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __le__(self, x: Tuple[_T_co, ...]) -> bool: ... def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ... @overload def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ... @overload def __add__(self, x: Tuple[Any, ...]) -> Tuple[Any, ...]: ... def __mul__(self, n: int) -> Tuple[_T_co, ...]: ... def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ... def count(self, x: Any) -> int: ... if sys.version_info >= (3, 5): def index(self, x: Any, start: int = ..., end: int = ...) -> int: ... else: def index(self, x: Any) -> int: ... class function: # TODO not defined in builtins! __name__: str __module__: str __code__: CodeType if sys.version_info >= (3,): __qualname__: str __annotations__: Dict[str, Any] class list(MutableSequence[_T], Generic[_T]): @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... if sys.version_info >= (3,): def clear(self) -> None: ... def copy(self) -> List[_T]: ... def append(self, object: _T) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def pop(self, index: int = ...) -> _T: ... def index(self, object: _T, start: int = ..., stop: int = ...) -> int: ... def count(self, object: _T) -> int: ... def insert(self, index: int, object: _T) -> None: ... def remove(self, object: _T) -> None: ... def reverse(self) -> None: ... if sys.version_info >= (3,): def sort(self, *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> None: ... else: def sort(self, cmp: Callable[[_T, _T], Any] = ..., key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... __hash__: None # type: ignore @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, s: slice) -> List[_T]: ... @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... if sys.version_info < (3,): def __getslice__(self, start: int, stop: int) -> List[_T]: ... def __setslice__(self, start: int, stop: int, o: Sequence[_T]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __add__(self, x: List[_T]) -> List[_T]: ... def __iadd__(self: _S, x: Iterable[_T]) -> _S: ... def __mul__(self, n: int) -> List[_T]: ... def __rmul__(self, n: int) -> List[_T]: ... if sys.version_info >= (3,): def __imul__(self: _S, n: int) -> _S: ... def __contains__(self, o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __gt__(self, x: List[_T]) -> bool: ... def __ge__(self, x: List[_T]) -> bool: ... def __lt__(self, x: List[_T]) -> bool: ... def __le__(self, x: List[_T]) -> bool: ... class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # NOTE: Keyword arguments are special. If they are used, _KT must include # str, but we have no way of enforcing it here. @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ... if sys.version_info < (3,): def has_key(self, k: _KT) -> bool: ... def clear(self) -> None: ... def copy(self) -> Dict[_KT, _VT]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... if sys.version_info >= (3,): def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT]: ... def items(self) -> ItemsView[_KT, _VT]: ... else: def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... def viewkeys(self) -> KeysView[_KT]: ... def viewvalues(self) -> ValuesView[_VT]: ... def viewitems(self) -> ItemsView[_KT, _VT]: ... @staticmethod @overload def fromkeys(seq: Iterable[_T]) -> Dict[_T, Any]: ... # TODO: Actually a class method (mypy/issues#328) @staticmethod @overload def fromkeys(seq: Iterable[_T], value: _S) -> Dict[_T, _S]: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... __hash__: None # type: ignore class set(MutableSet[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ...) -> None: ... def add(self, element: _T) -> None: ... def clear(self) -> None: ... def copy(self) -> Set[_T]: ... def difference(self, *s: Iterable[Any]) -> Set[_T]: ... def difference_update(self, *s: Iterable[Any]) -> None: ... def discard(self, element: _T) -> None: ... def intersection(self, *s: Iterable[Any]) -> Set[_T]: ... def intersection_update(self, *s: Iterable[Any]) -> None: ... def isdisjoint(self, s: Iterable[Any]) -> bool: ... def issubset(self, s: Iterable[Any]) -> bool: ... def issuperset(self, s: Iterable[Any]) -> bool: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ... def symmetric_difference_update(self, s: Iterable[_T]) -> None: ... def union(self, *s: Iterable[_T]) -> Set[_T]: ... def update(self, *s: Iterable[_T]) -> None: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[object]) -> Set[_T]: ... def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ... def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __isub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... __hash__: None # type: ignore class frozenset(AbstractSet[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ...) -> None: ... def copy(self) -> FrozenSet[_T]: ... def difference(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def intersection(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def isdisjoint(self, s: Iterable[_T]) -> bool: ... def issubset(self, s: Iterable[object]) -> bool: ... def issuperset(self, s: Iterable[object]) -> bool: ... def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ... def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... class enumerate(Iterator[Tuple[int, _T]], Generic[_T]): def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... def __iter__(self) -> Iterator[Tuple[int, _T]]: ... if sys.version_info >= (3,): def __next__(self) -> Tuple[int, _T]: ... else: def next(self) -> Tuple[int, _T]: ... if sys.version_info >= (3,): class range(Sequence[int]): start: int stop: int step: int @overload def __init__(self, stop: int) -> None: ... @overload def __init__(self, start: int, stop: int, step: int = ...) -> None: ... def count(self, value: int) -> int: ... def index(self, value: int, start: int = ..., stop: Optional[int] = ...) -> int: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[int]: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> range: ... def __repr__(self) -> str: ... def __reversed__(self) -> Iterator[int]: ... else: class xrange(Sized, Iterable[int], Reversible[int]): @overload def __init__(self, stop: int) -> None: ... @overload def __init__(self, start: int, stop: int, step: int = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __getitem__(self, i: int) -> int: ... def __reversed__(self) -> Iterator[int]: ... class property(object): def __init__(self, fget: Optional[Callable[[Any], Any]] = ..., fset: Optional[Callable[[Any, Any], None]] = ..., fdel: Optional[Callable[[Any], None]] = ..., doc: Optional[str] = ...) -> None: ... def getter(self, fget: Callable[[Any], Any]) -> property: ... def setter(self, fset: Callable[[Any, Any], None]) -> property: ... def deleter(self, fdel: Callable[[Any], None]) -> property: ... def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ... def __set__(self, obj: Any, value: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... def fget(self) -> Any: ... def fset(self, value: Any) -> None: ... def fdel(self) -> None: ... if sys.version_info < (3,): long = int NotImplemented: Any def abs(__n: SupportsAbs[_T]) -> _T: ... def all(__i: Iterable[object]) -> bool: ... def any(__i: Iterable[object]) -> bool: ... if sys.version_info < (3,): def apply(__func: Callable[..., _T], __args: Optional[Sequence[Any]] = ..., __kwds: Optional[Mapping[str, Any]] = ...) -> _T: ... if sys.version_info >= (3,): def ascii(__o: object) -> str: ... def bin(__number: Union[int, _SupportsIndex]) -> str: ... if sys.version_info >= (3, 7): def breakpoint(*args: Any, **kws: Any) -> None: ... def callable(__o: object) -> bool: ... def chr(__code: int) -> str: ... if sys.version_info < (3,): def cmp(__x: Any, __y: Any) -> int: ... _N1 = TypeVar('_N1', bool, int, float, complex) def coerce(__x: _N1, __y: _N1) -> Tuple[_N1, _N1]: ... if sys.version_info >= (3, 6): # This class is to be exported as PathLike from os, # but we define it here as _PathLike to avoid import cycle issues. # See https://github.com/python/typeshed/pull/991#issuecomment-288160993 class _PathLike(Generic[AnyStr]): def __fspath__(self) -> AnyStr: ... def compile(source: Union[str, bytes, mod, AST], filename: Union[str, bytes, _PathLike[Any]], mode: str, flags: int = ..., dont_inherit: int = ..., optimize: int = ...) -> Any: ... elif sys.version_info >= (3,): def compile(source: Union[str, bytes, mod, AST], filename: Union[str, bytes], mode: str, flags: int = ..., dont_inherit: int = ..., optimize: int = ...) -> Any: ... else: def compile(source: Union[Text, mod], filename: Text, mode: Text, flags: int = ..., dont_inherit: int = ...) -> Any: ... if sys.version_info >= (3,): def copyright() -> None: ... def credits() -> None: ... def delattr(__o: Any, __name: Text) -> None: ... def dir(__o: object = ...) -> List[str]: ... _N2 = TypeVar('_N2', int, float) def divmod(__a: _N2, __b: _N2) -> Tuple[_N2, _N2]: ... def eval(__source: Union[Text, bytes, CodeType], __globals: Optional[Dict[str, Any]] = ..., __locals: Optional[Mapping[str, Any]] = ...) -> Any: ... if sys.version_info >= (3,): def exec(__object: Union[str, bytes, CodeType], __globals: Optional[Dict[str, Any]] = ..., __locals: Optional[Mapping[str, Any]] = ...) -> Any: ... else: def execfile(__filename: str, __globals: Optional[Dict[str, Any]] = ..., __locals: Optional[Dict[str, Any]] = ...) -> None: ... def exit(code: object = ...) -> NoReturn: ... if sys.version_info >= (3,): @overload def filter(__function: None, __iterable: Iterable[Optional[_T]]) -> Iterator[_T]: ... @overload def filter(__function: Callable[[_T], Any], __iterable: Iterable[_T]) -> Iterator[_T]: ... else: @overload def filter(__function: Callable[[AnyStr], Any], __iterable: AnyStr) -> AnyStr: ... # type: ignore @overload def filter(__function: None, __iterable: Tuple[Optional[_T], ...]) -> Tuple[_T, ...]: ... # type: ignore @overload def filter(__function: Callable[[_T], Any], __iterable: Tuple[_T, ...]) -> Tuple[_T, ...]: ... # type: ignore @overload def filter(__function: None, __iterable: Iterable[Optional[_T]]) -> List[_T]: ... @overload def filter(__function: Callable[[_T], Any], __iterable: Iterable[_T]) -> List[_T]: ... def format(__o: object, __format_spec: str = ...) -> str: ... # TODO unicode def getattr(__o: Any, name: Text, __default: Any = ...) -> Any: ... def globals() -> Dict[str, Any]: ... def hasattr(__o: Any, __name: Text) -> bool: ... def hash(__o: object) -> int: ... if sys.version_info >= (3,): def help(*args: Any, **kwds: Any) -> None: ... def hex(__i: Union[int, _SupportsIndex]) -> str: ... def id(__o: object) -> int: ... if sys.version_info >= (3,): def input(__prompt: Any = ...) -> str: ... else: def input(__prompt: Any = ...) -> Any: ... def intern(__string: str) -> str: ... @overload def iter(__iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def iter(__function: Callable[[], Optional[_T]], __sentinel: None) -> Iterator[_T]: ... @overload def iter(__function: Callable[[], _T], __sentinel: Any) -> Iterator[_T]: ... def isinstance(__o: object, __t: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]) -> bool: ... def issubclass(__cls: type, __classinfo: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]) -> bool: ... def len(__o: Sized) -> int: ... if sys.version_info >= (3,): def license() -> None: ... def locals() -> Dict[str, Any]: ... if sys.version_info >= (3,): @overload def map(__func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> Iterator[_S]: ... @overload def map(__func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Iterator[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> Iterator[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3, _T4], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> Iterator[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> Iterator[_S]: ... @overload def map(__func: Callable[..., _S], __iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[_S]: ... else: @overload def map(__func: None, __iter1: Iterable[_T1]) -> List[_T1]: ... @overload def map(__func: None, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ... @overload def map(__func: None, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> List[Tuple[_T1, _T2, _T3]]: ... @overload def map(__func: None, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def map(__func: None, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> List[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def map(__func: None, __iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> List[Tuple[Any, ...]]: ... @overload def map(__func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> List[_S]: ... @overload def map(__func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> List[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> List[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3, _T4], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> List[_S]: ... @overload def map(__func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> List[_S]: ... @overload def map(__func: Callable[..., _S], __iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> List[_S]: ... if sys.version_info >= (3,): @overload def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ..., default: _VT) -> Union[_T, _VT]: ... else: @overload def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ...) -> _T: ... if sys.version_info >= (3,): @overload def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ..., default: _VT) -> Union[_T, _VT]: ... else: @overload def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(__iterable: Iterable[_T], *, key: Callable[[_T], Any] = ...) -> _T: ... @overload def next(__i: Iterator[_T]) -> _T: ... @overload def next(__i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ... def oct(__i: Union[int, _SupportsIndex]) -> str: ... if sys.version_info >= (3, 6): def open(file: Union[str, bytes, int, _PathLike[Any]], mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., opener: Optional[Callable[[str, int], int]] = ...) -> IO[Any]: ... elif sys.version_info >= (3,): def open(file: Union[str, bytes, int], mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., opener: Optional[Callable[[str, int], int]] = ...) -> IO[Any]: ... else: def open(name: Union[unicode, int], mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... def ord(__c: Union[Text, bytes]) -> int: ... if sys.version_info >= (3,): class _Writer(Protocol): def write(self, __s: str) -> Any: ... def print( *values: object, sep: Optional[Text] = ..., end: Optional[Text] = ..., file: Optional[_Writer] = ..., flush: bool = ... ) -> None: ... else: class _Writer(Protocol): def write(self, __s: Any) -> Any: ... # This is only available after from __future__ import print_function. def print(*values: object, sep: Optional[Text] = ..., end: Optional[Text] = ..., file: Optional[_Writer] = ...) -> None: ... @overload def pow(__x: int, __y: int) -> Any: ... # The return type can be int or float, depending on y @overload def pow(__x: int, __y: int, __z: int) -> Any: ... @overload def pow(__x: float, __y: float) -> float: ... @overload def pow(__x: float, __y: float, __z: float) -> float: ... def quit(code: object = ...) -> NoReturn: ... if sys.version_info < (3,): def range(__x: int, __y: int = ..., __step: int = ...) -> List[int]: ... def raw_input(__prompt: Any = ...) -> str: ... @overload def reduce(__function: Callable[[_T, _S], _T], __iterable: Iterable[_S], __initializer: _T) -> _T: ... @overload def reduce(__function: Callable[[_T, _T], _T], __iterable: Iterable[_T]) -> _T: ... def reload(__module: Any) -> Any: ... @overload def reversed(__object: Sequence[_T]) -> Iterator[_T]: ... @overload def reversed(__object: Reversible[_T]) -> Iterator[_T]: ... def repr(__o: object) -> str: ... if sys.version_info >= (3,): @overload def round(number: float) -> int: ... @overload def round(number: float, ndigits: None) -> int: ... @overload def round(number: float, ndigits: int) -> float: ... @overload def round(number: SupportsRound[_T]) -> int: ... @overload def round(number: SupportsRound[_T], ndigits: None) -> int: ... @overload def round(number: SupportsRound[_T], ndigits: int) -> _T: ... else: @overload def round(number: float) -> float: ... @overload def round(number: float, ndigits: int) -> float: ... @overload def round(number: SupportsFloat) -> float: ... @overload def round(number: SupportsFloat, ndigits: int) -> float: ... def setattr(__object: Any, __name: Text, __value: Any) -> None: ... if sys.version_info >= (3,): def sorted(__iterable: Iterable[_T], *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> List[_T]: ... else: def sorted(__iterable: Iterable[_T], *, cmp: Callable[[_T, _T], int] = ..., key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> List[_T]: ... @overload def sum(__iterable: Iterable[_T]) -> Union[_T, int]: ... @overload def sum(__iterable: Iterable[_T], __start: _S) -> Union[_T, _S]: ... if sys.version_info < (3,): def unichr(__i: int) -> unicode: ... def vars(__object: Any = ...) -> Dict[str, Any]: ... if sys.version_info >= (3,): @overload def zip(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def zip(__iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... else: @overload def zip(__iter1: Iterable[_T1]) -> List[Tuple[_T1]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> List[Tuple[_T1, _T2, _T3]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]) -> List[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def zip(__iter1: Iterable[Any], __iter2: Iterable[Any], __iter3: Iterable[Any], __iter4: Iterable[Any], __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any]) -> List[Tuple[Any, ...]]: ... def __import__(name: Text, globals: Optional[Mapping[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ..., fromlist: Sequence[str] = ..., level: int = ...) -> Any: ... # Actually the type of Ellipsis is , but since it's # not exposed anywhere under that name, we make it private here. class ellipsis: ... Ellipsis: ellipsis if sys.version_info < (3,): # TODO: buffer support is incomplete; e.g. some_string.startswith(some_buffer) doesn't type check. _AnyBuffer = TypeVar('_AnyBuffer', str, unicode, bytearray, buffer) class buffer(Sized): def __init__(self, object: _AnyBuffer, offset: int = ..., size: int = ...) -> None: ... def __add__(self, other: _AnyBuffer) -> str: ... def __cmp__(self, other: _AnyBuffer) -> bool: ... def __getitem__(self, key: Union[int, slice]) -> str: ... def __getslice__(self, i: int, j: int) -> str: ... def __len__(self) -> int: ... def __mul__(self, x: int) -> str: ... class BaseException(object): args: Tuple[Any, ...] if sys.version_info < (3,): message: Any if sys.version_info >= (3,): __cause__: Optional[BaseException] __context__: Optional[BaseException] __suppress_context__: bool __traceback__: Optional[TracebackType] def __init__(self, *args: object) -> None: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... if sys.version_info < (3,): def __getitem__(self, i: int) -> Any: ... def __getslice__(self, start: int, stop: int) -> Tuple[Any, ...]: ... if sys.version_info >= (3,): def with_traceback(self, tb: Optional[TracebackType]) -> BaseException: ... class GeneratorExit(BaseException): ... class KeyboardInterrupt(BaseException): ... class SystemExit(BaseException): code: int class Exception(BaseException): ... class StopIteration(Exception): if sys.version_info >= (3,): value: Any if sys.version_info >= (3,): _StandardError = Exception class OSError(Exception): errno: int strerror: str # filename, filename2 are actually Union[str, bytes, None] filename: Any filename2: Any EnvironmentError = OSError IOError = OSError else: class StandardError(Exception): ... _StandardError = StandardError class EnvironmentError(StandardError): errno: int strerror: str # TODO can this be unicode? filename: str class OSError(EnvironmentError): ... class IOError(EnvironmentError): ... class ArithmeticError(_StandardError): ... class AssertionError(_StandardError): ... class AttributeError(_StandardError): ... class BufferError(_StandardError): ... class EOFError(_StandardError): ... class ImportError(_StandardError): if sys.version_info >= (3, 3): def __init__(self, *args, name: Optional[str] = ..., path: Optional[str] = ...) -> None: ... name: Optional[str] path: Optional[str] class LookupError(_StandardError): ... class MemoryError(_StandardError): ... class NameError(_StandardError): ... class ReferenceError(_StandardError): ... class RuntimeError(_StandardError): ... if sys.version_info >= (3, 5): class StopAsyncIteration(Exception): value: Any class SyntaxError(_StandardError): msg: str lineno: int offset: Optional[int] text: Optional[str] filename: str class SystemError(_StandardError): ... class TypeError(_StandardError): ... class ValueError(_StandardError): ... class FloatingPointError(ArithmeticError): ... class OverflowError(ArithmeticError): ... class ZeroDivisionError(ArithmeticError): ... if sys.version_info >= (3, 6): class ModuleNotFoundError(ImportError): ... class IndexError(LookupError): ... class KeyError(LookupError): ... class UnboundLocalError(NameError): ... class WindowsError(OSError): winerror: int if sys.version_info >= (3,): class BlockingIOError(OSError): characters_written: int class ChildProcessError(OSError): ... class ConnectionError(OSError): ... class BrokenPipeError(ConnectionError): ... class ConnectionAbortedError(ConnectionError): ... class ConnectionRefusedError(ConnectionError): ... class ConnectionResetError(ConnectionError): ... class FileExistsError(OSError): ... class FileNotFoundError(OSError): ... class InterruptedError(OSError): ... class IsADirectoryError(OSError): ... class NotADirectoryError(OSError): ... class PermissionError(OSError): ... class ProcessLookupError(OSError): ... class TimeoutError(OSError): ... class NotImplementedError(RuntimeError): ... if sys.version_info >= (3, 5): class RecursionError(RuntimeError): ... class IndentationError(SyntaxError): ... class TabError(IndentationError): ... class UnicodeError(ValueError): ... class UnicodeDecodeError(UnicodeError): encoding: str object: bytes start: int end: int reason: str def __init__(self, __encoding: str, __object: bytes, __start: int, __end: int, __reason: str) -> None: ... class UnicodeEncodeError(UnicodeError): encoding: str object: Text start: int end: int reason: str def __init__(self, __encoding: str, __object: Text, __start: int, __end: int, __reason: str) -> None: ... class UnicodeTranslateError(UnicodeError): ... class Warning(Exception): ... class UserWarning(Warning): ... class DeprecationWarning(Warning): ... class SyntaxWarning(Warning): ... class RuntimeWarning(Warning): ... class FutureWarning(Warning): ... class PendingDeprecationWarning(Warning): ... class ImportWarning(Warning): ... class UnicodeWarning(Warning): ... class BytesWarning(Warning): ... if sys.version_info >= (3, 2): class ResourceWarning(Warning): ... if sys.version_info < (3,): class file(BinaryIO): @overload def __init__(self, file: str, mode: str = ..., buffering: int = ...) -> None: ... @overload def __init__(self, file: unicode, mode: str = ..., buffering: int = ...) -> None: ... @overload def __init__(self, file: int, mode: str = ..., buffering: int = ...) -> None: ... def __iter__(self) -> Iterator[str]: ... def next(self) -> str: ... def read(self, n: int = ...) -> str: ... def __enter__(self) -> BinaryIO: ... def __exit__(self, t: Optional[type] = ..., exc: Optional[BaseException] = ..., tb: Optional[Any] = ...) -> Optional[bool]: ... def flush(self) -> None: ... def fileno(self) -> int: ... def isatty(self) -> bool: ... def close(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def seekable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def readline(self, limit: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def write(self, data: str) -> int: ... def writelines(self, data: Iterable[str]) -> None: ... def truncate(self, pos: Optional[int] = ...) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/bz2.pyi0000644€tŠÔÚ€2›s®0000000307013576752252025150 0ustar jukkaDROPBOX\Domain Users00000000000000import io import sys from typing import Any, IO, Optional, Union if sys.version_info >= (3, 6): from os import PathLike _PathOrFile = Union[str, bytes, IO[Any], PathLike[Any]] elif sys.version_info >= (3, 3): _PathOrFile = Union[str, bytes, IO[Any]] else: _PathOrFile = str def compress(data: bytes, compresslevel: int = ...) -> bytes: ... def decompress(data: bytes) -> bytes: ... if sys.version_info >= (3, 3): def open(filename: _PathOrFile, mode: str = ..., compresslevel: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ... class BZ2File(io.BufferedIOBase, IO[bytes]): # type: ignore # python/mypy#5027 def __init__(self, filename: _PathOrFile, mode: str = ..., buffering: Optional[Any] = ..., compresslevel: int = ...) -> None: ... class BZ2Compressor(object): def __init__(self, compresslevel: int = ...) -> None: ... def compress(self, data: bytes) -> bytes: ... def flush(self) -> bytes: ... class BZ2Decompressor(object): if sys.version_info >= (3, 5): def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... else: def decompress(self, data: bytes) -> bytes: ... if sys.version_info >= (3, 3): @property def eof(self) -> bool: ... if sys.version_info >= (3, 5): @property def needs_input(self) -> bool: ... @property def unused_data(self) -> bytes: ... mypy-0.761/mypy/typeshed/stdlib/2and3/cProfile.pyi0000644€tŠÔÚ€2›s®0000000240413576752252026216 0ustar jukkaDROPBOX\Domain Users00000000000000import os import sys from typing import Any, Callable, Dict, Optional, Text, TypeVar, Union def run(statement: str, filename: Optional[str] = ..., sort: Union[str, int] = ...) -> None: ... def runctx(statement: str, globals: Dict[str, Any], locals: Dict[str, Any], filename: Optional[str] = ..., sort: Union[str, int] = ...) -> None: ... _SelfT = TypeVar('_SelfT', bound=Profile) _T = TypeVar('_T') if sys.version_info >= (3, 6): _Path = Union[bytes, Text, os.PathLike[Any]] else: _Path = Union[bytes, Text] class Profile: def __init__(self, custom_timer: Callable[[], float] = ..., time_unit: float = ..., subcalls: bool = ..., builtins: bool = ...) -> None: ... def enable(self) -> None: ... def disable(self) -> None: ... def print_stats(self, sort: Union[str, int] = ...) -> None: ... def dump_stats(self, file: _Path) -> None: ... def create_stats(self) -> None: ... def run(self: _SelfT, cmd: str) -> _SelfT: ... def runctx(self: _SelfT, cmd: str, globals: Dict[str, Any], locals: Dict[str, Any]) -> _SelfT: ... def runcall(self, func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ... if sys.version_info >= (3, 8): def __enter__(self: _SelfT) -> _SelfT: ... def __exit__(self, *exc_info: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/calendar.pyi0000644€tŠÔÚ€2›s®0000001321413576752252026225 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime import sys from time import struct_time from typing import Any, Iterable, List, Optional, Sequence, Tuple, Union _LocaleType = Tuple[Optional[str], Optional[str]] class IllegalMonthError(ValueError): def __init__(self, month: int) -> None: ... def __str__(self) -> str: ... class IllegalWeekdayError(ValueError): def __init__(self, weekday: int) -> None: ... def __str__(self) -> str: ... def isleap(year: int) -> bool: ... def leapdays(y1: int, y2: int) -> int: ... def weekday(year: int, month: int, day: int) -> int: ... def monthrange(year: int, month: int) -> Tuple[int, int]: ... class Calendar: def __init__(self, firstweekday: int = ...) -> None: ... def getfirstweekday(self) -> int: ... def setfirstweekday(self, firstweekday: int) -> None: ... def iterweekdays(self) -> Iterable[int]: ... def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ... def itermonthdays2(self, year: int, month: int) -> Iterable[Tuple[int, int]]: ... def itermonthdays(self, year: int, month: int) -> Iterable[int]: ... def monthdatescalendar(self, year: int, month: int) -> List[List[datetime.date]]: ... def monthdays2calendar(self, year: int, month: int) -> List[List[Tuple[int, int]]]: ... def monthdayscalendar(self, year: int, month: int) -> List[List[int]]: ... def yeardatescalendar(self, year: int, width: int = ...) -> List[List[int]]: ... def yeardays2calendar(self, year: int, width: int = ...) -> List[List[Tuple[int, int]]]: ... def yeardayscalendar(self, year: int, width: int = ...) -> List[List[int]]: ... if sys.version_info >= (3, 7): def itermonthdays3(self, year: int, month: int) -> Iterable[Tuple[int, int, int]]: ... def itermonthdays4(self, year: int, month: int) -> Iterable[Tuple[int, int, int, int]]: ... class TextCalendar(Calendar): def prweek(self, theweek: int, width: int) -> None: ... def formatday(self, day: int, weekday: int, width: int) -> str: ... def formatweek(self, theweek: int, width: int) -> str: ... def formatweekday(self, day: int, width: int) -> str: ... def formatweekheader(self, width: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ... def prmonth(self, theyear: int, themonth: int, w: int = ..., l: int = ...) -> None: ... def formatmonth(self, theyear: int, themonth: int, w: int = ..., l: int = ...) -> str: ... def formatyear(self, theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> str: ... def pryear(self, theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> None: ... def firstweekday() -> int: ... def monthcalendar(year: int, month: int) -> List[List[int]]: ... def prweek(theweek: int, width: int) -> None: ... def week(theweek: int, width: int) -> str: ... def weekheader(width: int) -> str: ... def prmonth(theyear: int, themonth: int, w: int = ..., l: int = ...) -> None: ... def month(theyear: int, themonth: int, w: int = ..., l: int = ...) -> str: ... def calendar(theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> str: ... def prcal(theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> None: ... class HTMLCalendar(Calendar): def formatday(self, day: int, weekday: int) -> str: ... def formatweek(self, theweek: int) -> str: ... def formatweekday(self, day: int) -> str: ... def formatweekheader(self) -> str: ... def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... def formatmonth(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... def formatyear(self, theyear: int, width: int = ...) -> str: ... def formatyearpage(self, theyear: int, width: int = ..., css: Optional[str] = ..., encoding: Optional[str] = ...) -> str: ... if sys.version_info >= (3, 7): cssclasses: List[str] cssclass_noday: str cssclasses_weekday_head: List[str] cssclass_month_head: str cssclass_month: str cssclass_year: str cssclass_year_head: str if sys.version_info < (3, 0): class TimeEncoding: def __init__(self, locale: _LocaleType) -> None: ... def __enter__(self) -> _LocaleType: ... def __exit__(self, *args: Any) -> None: ... else: class different_locale: def __init__(self, locale: _LocaleType) -> None: ... def __enter__(self) -> _LocaleType: ... def __exit__(self, *args: Any) -> None: ... class LocaleTextCalendar(TextCalendar): def __init__(self, firstweekday: int = ..., locale: Optional[_LocaleType] = ...) -> None: ... def formatweekday(self, day: int, width: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ... class LocaleHTMLCalendar(HTMLCalendar): def __init__(self, firstweekday: int = ..., locale: Optional[_LocaleType] = ...) -> None: ... def formatweekday(self, day: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... c: TextCalendar def setfirstweekday(firstweekday: int) -> None: ... def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... def timegm(tuple: Union[Tuple[int, ...], struct_time]) -> int: ... # Data attributes day_name: Sequence[str] day_abbr: Sequence[str] month_name: Sequence[str] month_abbr: Sequence[str] # Below constants are not in docs or __all__, but enough people have used them # they are now effectively public. MONDAY: int TUESDAY: int WEDNESDAY: int THURSDAY: int FRIDAY: int SATURDAY: int SUNDAY: int mypy-0.761/mypy/typeshed/stdlib/2and3/cgi.pyi0000644€tŠÔÚ€2›s®0000001160613576752252025221 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, AnyStr, Dict, IO, Iterable, List, Mapping, Optional, Tuple, TypeVar, Union _T = TypeVar('_T', bound=FieldStorage) def parse(fp: IO[Any] = ..., environ: Mapping[str, str] = ..., keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ... if sys.version_info < (3, 8): def parse_qs(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ... def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ... if sys.version_info >= (3, 7): def parse_multipart(fp: IO[Any], pdict: Mapping[str, bytes], encoding: str = ..., errors: str = ...) -> Dict[str, List[Any]]: ... else: def parse_multipart(fp: IO[Any], pdict: Mapping[str, bytes]) -> Dict[str, List[bytes]]: ... def parse_header(s: str) -> Tuple[str, Dict[str, str]]: ... def test(environ: Mapping[str, str] = ...) -> None: ... def print_environ(environ: Mapping[str, str] = ...) -> None: ... def print_form(form: Dict[str, Any]) -> None: ... def print_directory() -> None: ... def print_environ_usage() -> None: ... if sys.version_info < (3,): def escape(s: AnyStr, quote: bool = ...) -> AnyStr: ... elif sys.version_info < (3, 8): def escape(s: str, quote: bool = ...) -> str: ... class MiniFieldStorage: # The first five "Any" attributes here are always None, but mypy doesn't support that filename: Any list: Any type: Any file: Optional[IO[bytes]] type_options: Dict[Any, Any] disposition: Any disposition_options: Dict[Any, Any] headers: Dict[Any, Any] name: Any value: Any def __init__(self, name: Any, value: Any) -> None: ... def __repr__(self) -> str: ... class FieldStorage(object): FieldStorageClass: Optional[type] keep_blank_values: int strict_parsing: int qs_on_post: Optional[str] headers: Mapping[str, str] fp: IO[bytes] encoding: str errors: str outerboundary: bytes bytes_read: int limit: Optional[int] disposition: str disposition_options: Dict[str, str] filename: Optional[str] file: Optional[IO[bytes]] type: str type_options: Dict[str, str] innerboundary: bytes length: int done: int list: Optional[List[Any]] value: Union[None, bytes, List[Any]] if sys.version_info >= (3, 0): def __init__(self, fp: IO[Any] = ..., headers: Mapping[str, str] = ..., outerboundary: bytes = ..., environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ..., limit: int = ..., encoding: str = ..., errors: str = ...) -> None: ... else: def __init__(self, fp: IO[Any] = ..., headers: Mapping[str, str] = ..., outerboundary: bytes = ..., environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ...) -> None: ... if sys.version_info >= (3, 0): def __enter__(self: _T) -> _T: ... def __exit__(self, *args: Any) -> None: ... def __repr__(self) -> str: ... def __iter__(self) -> Iterable[str]: ... def __getitem__(self, key: str) -> Any: ... def getvalue(self, key: str, default: Any = ...) -> Any: ... def getfirst(self, key: str, default: Any = ...) -> Any: ... def getlist(self, key: str) -> List[Any]: ... def keys(self) -> List[str]: ... if sys.version_info < (3, 0): def has_key(self, key: str) -> bool: ... def __contains__(self, key: str) -> bool: ... def __len__(self) -> int: ... if sys.version_info >= (3, 0): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... if sys.version_info >= (3, 0): # In Python 3 it returns bytes or str IO depending on an internal flag def make_file(self) -> IO[Any]: ... else: # In Python 2 it always returns bytes and ignores the "binary" flag def make_file(self, binary: Any = ...) -> IO[bytes]: ... if sys.version_info < (3, 0): from UserDict import UserDict class FormContentDict(UserDict[str, List[str]]): query_string: str def __init__(self, environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ...) -> None: ... class SvFormContentDict(FormContentDict): def getlist(self, key: Any) -> Any: ... class InterpFormContentDict(SvFormContentDict): ... class FormContent(FormContentDict): # TODO this should have # def values(self, key: Any) -> Any: ... # but this is incompatible with the supertype, and adding '# type: ignore' triggers # a parse error in pytype (https://github.com/google/pytype/issues/53) def indexed_value(self, key: Any, location: int) -> Any: ... def value(self, key: Any) -> Any: ... def length(self, key: Any) -> int: ... def stripped(self, key: Any) -> Any: ... def pars(self) -> Dict[Any, Any]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/cgitb.pyi0000644€tŠÔÚ€2›s®0000000300013576752252025534 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import Dict, Any, List, Tuple, Optional, Callable, Type, Union, IO, AnyStr, TypeVar from types import FrameType, TracebackType import sys _T = TypeVar("_T") _ExcInfo = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] if sys.version_info >= (3, 6): from os import PathLike _Path = Union[_T, PathLike[_T]] else: _Path = Union[_T] def reset() -> str: ... # undocumented def small(text: str) -> str: ... # undocumented def strong(text: str) -> str: ... # undocumented def grey(text: str) -> str: ... # undocumented def lookup(name: str, frame: FrameType, locals: Dict[str, Any]) -> Tuple[Optional[str], Any]: ... # undocumented def scanvars(reader: Callable[[], bytes], frame: FrameType, locals: Dict[str, Any]) -> List[Tuple[str, Optional[str], Any]]: ... # undocumented def html(einfo: _ExcInfo, context: int = ...) -> str: ... def text(einfo: _ExcInfo, context: int = ...) -> str: ... class Hook: # undocumented def __init__(self, display: int = ..., logdir: Optional[_Path[AnyStr]] = ..., context: int = ..., file: Optional[IO[str]] = ..., format: str = ...) -> None: ... def __call__(self, etype: Optional[Type[BaseException]], evalue: Optional[BaseException], etb: Optional[TracebackType]) -> None: ... def handle(self, info: Optional[_ExcInfo] = ...) -> None: ... def handler(info: Optional[_ExcInfo] = ...) -> None: ... def enable(display: int = ..., logdir: Optional[_Path[AnyStr]] = ..., context: int = ..., format: str = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/chunk.pyi0000644€tŠÔÚ€2›s®0000000136313576752252025566 0ustar jukkaDROPBOX\Domain Users00000000000000# Source(py2): https://hg.python.org/cpython/file/2.7/Lib/chunk.py # Source(py3): https://github.com/python/cpython/blob/master/Lib/chunk.py from typing import IO class Chunk: closed: bool align: bool file: IO[bytes] chunkname: bytes chunksize: int size_read: int offset: int seekable: bool def __init__(self, file: IO[bytes], align: bool = ..., bigendian: bool = ..., inclheader: bool = ...) -> None: ... def getname(self) -> bytes: ... def getsize(self) -> int: ... def close(self) -> None: ... def isatty(self) -> bool: ... def seek(self, pos: int, whence: int = ...) -> None: ... def tell(self) -> int: ... def read(self, size: int = ...) -> bytes: ... def skip(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/cmath.pyi0000644€tŠÔÚ€2›s®0000000226713576752252025556 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'cmath' module.""" import sys from typing import SupportsComplex, SupportsFloat, Tuple, Union e: float pi: float if sys.version_info >= (3, 6): inf: float infj: complex nan: float nanj: complex tau: float _C = Union[SupportsFloat, SupportsComplex] def acos(x: _C) -> complex: ... def acosh(x: _C) -> complex: ... def asin(x: _C) -> complex: ... def asinh(x: _C) -> complex: ... def atan(x: _C) -> complex: ... def atanh(x: _C) -> complex: ... def cos(x: _C) -> complex: ... def cosh(x: _C) -> complex: ... def exp(x: _C) -> complex: ... if sys.version_info >= (3, 5): def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = ..., abs_tol: SupportsFloat = ...) -> bool: ... def isinf(z: _C) -> bool: ... def isnan(z: _C) -> bool: ... def log(x: _C, base: _C = ...) -> complex: ... def log10(x: _C) -> complex: ... def phase(z: _C) -> float: ... def polar(z: _C) -> Tuple[float, float]: ... def rect(r: float, phi: float) -> complex: ... def sin(x: _C) -> complex: ... def sinh(x: _C) -> complex: ... def sqrt(x: _C) -> complex: ... def tan(x: _C) -> complex: ... def tanh(x: _C) -> complex: ... if sys.version_info >= (3,): def isfinite(z: _C) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/cmd.pyi0000644€tŠÔÚ€2›s®0000000323613576752252025222 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for cmd (Python 2/3) from typing import Any, Optional, Text, IO, List, Callable, Tuple class Cmd: prompt: str identchars: str ruler: str lastcmd: str intro: Optional[Any] doc_leader: str doc_header: str misc_header: str undoc_header: str nohelp: str use_rawinput: bool stdin: IO[str] stdout: IO[str] cmdqueue: List[str] completekey: str def __init__(self, completekey: str = ..., stdin: Optional[IO[str]] = ..., stdout: Optional[IO[str]] = ...) -> None: ... old_completer: Optional[Callable[[str, int], Optional[str]]] def cmdloop(self, intro: Optional[Any] = ...) -> None: ... def precmd(self, line: str) -> str: ... def postcmd(self, stop: bool, line: str) -> bool: ... def preloop(self) -> None: ... def postloop(self) -> None: ... def parseline(self, line: str) -> Tuple[Optional[str], Optional[str], str]: ... def onecmd(self, line: str) -> bool: ... def emptyline(self) -> bool: ... def default(self, line: str) -> bool: ... def completedefault(self, *ignored: Any) -> List[str]: ... def completenames(self, text: str, *ignored: Any) -> List[str]: ... completion_matches: Optional[List[str]] def complete(self, text: str, state: int) -> Optional[List[str]]: ... def get_names(self) -> List[str]: ... # Only the first element of args matters. def complete_help(self, *args: Any) -> List[str]: ... def do_help(self, arg: Optional[str]) -> None: ... def print_topics(self, header: str, cmds: Optional[List[str]], cmdlen: Any, maxcol: int) -> None: ... def columnize(self, list: Optional[List[str]], displaywidth: int = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/code.pyi0000644€tŠÔÚ€2›s®0000000315613576752252025372 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for code import sys from typing import Any, Callable, Mapping, Optional from types import CodeType class InteractiveInterpreter: def __init__(self, locals: Optional[Mapping[str, Any]] = ...) -> None: ... def runsource(self, source: str, filename: str = ..., symbol: str = ...) -> bool: ... def runcode(self, code: CodeType) -> None: ... def showsyntaxerror(self, filename: Optional[str] = ...) -> None: ... def showtraceback(self) -> None: ... def write(self, data: str) -> None: ... class InteractiveConsole(InteractiveInterpreter): def __init__(self, locals: Optional[Mapping[str, Any]] = ..., filename: str = ...) -> None: ... if sys.version_info >= (3, 6): def interact(self, banner: Optional[str] = ..., exitmsg: Optional[str] = ...) -> None: ... else: def interact(self, banner: Optional[str] = ...) -> None: ... def push(self, line: str) -> bool: ... def resetbuffer(self) -> None: ... def raw_input(self, prompt: str = ...) -> str: ... if sys.version_info >= (3, 6): def interact(banner: Optional[str] = ..., readfunc: Optional[Callable[[str], str]] = ..., local: Optional[Mapping[str, Any]] = ..., exitmsg: Optional[str] = ...) -> None: ... else: def interact(banner: Optional[str] = ..., readfunc: Optional[Callable[[str], str]] = ..., local: Optional[Mapping[str, Any]] = ...) -> None: ... def compile_command(source: str, filename: str = ..., symbol: str = ...) -> Optional[CodeType]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/codecs.pyi0000644€tŠÔÚ€2›s®0000002547713576752252025732 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, BinaryIO, Callable, Generator, IO, Iterable, Iterator, List, Optional, Protocol, Text, TextIO, Tuple, Type, TypeVar, Union from abc import abstractmethod import types # TODO: this only satisfies the most common interface, where # bytes (py2 str) is the raw form and str (py2 unicode) is the cooked form. # In the long run, both should become template parameters maybe? # There *are* bytes->bytes and str->str encodings in the standard library. # They are much more common in Python 2 than in Python 3. _Decoded = Text _Encoded = bytes class _Encoder(Protocol): def __call__(self, input: _Decoded, errors: str = ...) -> Tuple[_Encoded, int]: ... # signature of Codec().encode class _Decoder(Protocol): def __call__(self, input: _Encoded, errors: str = ...) -> Tuple[_Decoded, int]: ... # signature of Codec().decode class _StreamReader(Protocol): def __call__(self, stream: IO[_Encoded], errors: str = ...) -> StreamReader: ... class _StreamWriter(Protocol): def __call__(self, stream: IO[_Encoded], errors: str = ...) -> StreamWriter: ... class _IncrementalEncoder(Protocol): def __call__(self, errors: str = ...) -> IncrementalEncoder: ... class _IncrementalDecoder(Protocol): def __call__(self, errors: str = ...) -> IncrementalDecoder: ... def encode(obj: _Decoded, encoding: str = ..., errors: str = ...) -> _Encoded: ... def decode(obj: _Encoded, encoding: str = ..., errors: str = ...) -> _Decoded: ... def lookup(encoding: str) -> CodecInfo: ... def utf_16_be_decode(__obj: _Encoded, __errors: str = ..., __final: bool = ...) -> Tuple[_Decoded, int]: ... # undocumented def utf_16_be_encode(__obj: _Decoded, __errors: str = ...) -> Tuple[_Encoded, int]: ... # undocumented class CodecInfo(Tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): @property def encode(self) -> _Encoder: ... @property def decode(self) -> _Decoder: ... @property def streamreader(self) -> _StreamReader: ... @property def streamwriter(self) -> _StreamWriter: ... @property def incrementalencoder(self) -> _IncrementalEncoder: ... @property def incrementaldecoder(self) -> _IncrementalDecoder: ... name: str def __init__( self, encode: _Encoder, decode: _Decoder, streamreader: _StreamReader = ..., streamwriter: _StreamWriter = ..., incrementalencoder: _IncrementalEncoder = ..., incrementaldecoder: _IncrementalDecoder = ..., name: str = ..., ) -> None: ... def getencoder(encoding: str) -> _Encoder: ... def getdecoder(encoding: str) -> _Decoder: ... def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... def getreader(encoding: str) -> _StreamReader: ... def getwriter(encoding: str) -> _StreamWriter: ... def register(search_function: Callable[[str], Optional[CodecInfo]]) -> None: ... def open(filename: str, mode: str = ..., encoding: str = ..., errors: str = ..., buffering: int = ...) -> StreamReaderWriter: ... def EncodedFile(file: IO[_Encoded], data_encoding: str, file_encoding: str = ..., errors: str = ...) -> StreamRecoder: ... def iterencode(iterator: Iterable[_Decoded], encoding: str, errors: str = ...) -> Generator[_Encoded, None, None]: ... def iterdecode(iterator: Iterable[_Encoded], encoding: str, errors: str = ...) -> Generator[_Decoded, None, None]: ... BOM: bytes BOM_BE: bytes BOM_LE: bytes BOM_UTF8: bytes BOM_UTF16: bytes BOM_UTF16_BE: bytes BOM_UTF16_LE: bytes BOM_UTF32: bytes BOM_UTF32_BE: bytes BOM_UTF32_LE: bytes # It is expected that different actions be taken depending on which of the # three subclasses of `UnicodeError` is actually ...ed. However, the Union # is still needed for at least one of the cases. def register_error(name: str, error_handler: Callable[[UnicodeError], Tuple[Union[str, bytes], int]]) -> None: ... def lookup_error(name: str) -> Callable[[UnicodeError], Tuple[Union[str, bytes], int]]: ... def strict_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... def replace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... def ignore_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... def xmlcharrefreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... def backslashreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... class Codec: # These are sort of @abstractmethod but sort of not. # The StreamReader and StreamWriter subclasses only implement one. def encode(self, input: _Decoded, errors: str = ...) -> Tuple[_Encoded, int]: ... def decode(self, input: _Encoded, errors: str = ...) -> Tuple[_Decoded, int]: ... class IncrementalEncoder: errors: str def __init__(self, errors: str = ...) -> None: ... @abstractmethod def encode(self, object: _Decoded, final: bool = ...) -> _Encoded: ... def reset(self) -> None: ... # documentation says int but str is needed for the subclass. def getstate(self) -> Union[int, _Decoded]: ... def setstate(self, state: Union[int, _Decoded]) -> None: ... class IncrementalDecoder: errors: str def __init__(self, errors: str = ...) -> None: ... @abstractmethod def decode(self, object: _Encoded, final: bool = ...) -> _Decoded: ... def reset(self) -> None: ... def getstate(self) -> Tuple[_Encoded, int]: ... def setstate(self, state: Tuple[_Encoded, int]) -> None: ... # These are not documented but used in encodings/*.py implementations. class BufferedIncrementalEncoder(IncrementalEncoder): buffer: str def __init__(self, errors: str = ...) -> None: ... @abstractmethod def _buffer_encode(self, input: _Decoded, errors: str, final: bool) -> _Encoded: ... def encode(self, input: _Decoded, final: bool = ...) -> _Encoded: ... class BufferedIncrementalDecoder(IncrementalDecoder): buffer: bytes def __init__(self, errors: str = ...) -> None: ... @abstractmethod def _buffer_decode(self, input: _Encoded, errors: str, final: bool) -> Tuple[_Decoded, int]: ... def decode(self, object: _Encoded, final: bool = ...) -> _Decoded: ... _SW = TypeVar("_SW", bound=StreamWriter) # TODO: it is not possible to specify the requirement that all other # attributes and methods are passed-through from the stream. class StreamWriter(Codec): errors: str def __init__(self, stream: IO[_Encoded], errors: str = ...) -> None: ... def write(self, obj: _Decoded) -> None: ... def writelines(self, list: Iterable[_Decoded]) -> None: ... def reset(self) -> None: ... def __enter__(self: _SW) -> _SW: ... def __exit__( self, typ: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[types.TracebackType] ) -> None: ... def __getattr__(self, name: str) -> Any: ... _SR = TypeVar("_SR", bound=StreamReader) class StreamReader(Codec): errors: str def __init__(self, stream: IO[_Encoded], errors: str = ...) -> None: ... def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> _Decoded: ... def readline(self, size: int = ..., keepends: bool = ...) -> _Decoded: ... def readlines(self, sizehint: int = ..., keepends: bool = ...) -> List[_Decoded]: ... def reset(self) -> None: ... def __enter__(self: _SR) -> _SR: ... def __exit__( self, typ: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[types.TracebackType] ) -> None: ... def __iter__(self) -> Iterator[_Decoded]: ... def __getattr__(self, name: str) -> Any: ... _T = TypeVar("_T", bound=StreamReaderWriter) # Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): def __init__(self, stream: IO[_Encoded], Reader: _StreamReader, Writer: _StreamWriter, errors: str = ...) -> None: ... def read(self, size: int = ...) -> _Decoded: ... def readline(self, size: Optional[int] = ...) -> _Decoded: ... def readlines(self, sizehint: Optional[int] = ...) -> List[_Decoded]: ... if sys.version_info >= (3,): def __next__(self) -> Text: ... else: def next(self) -> Text: ... def __iter__(self: _T) -> _T: ... # This actually returns None, but that's incompatible with the supertype def write(self, data: _Decoded) -> int: ... def writelines(self, list: Iterable[_Decoded]) -> None: ... def reset(self) -> None: ... # Same as write() def seek(self, offset: int, whence: int = ...) -> int: ... def __enter__(self: _T) -> _T: ... def __exit__( self, typ: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[types.TracebackType] ) -> None: ... def __getattr__(self, name: str) -> Any: ... # These methods don't actually exist directly, but they are needed to satisfy the TextIO # interface. At runtime, they are delegated through __getattr__. def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def truncate(self, size: Optional[int] = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def writable(self) -> bool: ... _SRT = TypeVar("_SRT", bound=StreamRecoder) class StreamRecoder(BinaryIO): def __init__( self, stream: IO[_Encoded], encode: _Encoder, decode: _Decoder, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ..., ) -> None: ... def read(self, size: int = ...) -> bytes: ... def readline(self, size: Optional[int] = ...) -> bytes: ... def readlines(self, sizehint: Optional[int] = ...) -> List[bytes]: ... if sys.version_info >= (3,): def __next__(self) -> bytes: ... else: def next(self) -> bytes: ... def __iter__(self: _SRT) -> _SRT: ... def write(self, data: bytes) -> int: ... def writelines(self, list: Iterable[bytes]) -> int: ... # type: ignore # it's supposed to return None def reset(self) -> None: ... def __getattr__(self, name: str) -> Any: ... def __enter__(self: _SRT) -> _SRT: ... def __exit__( self, type: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] ) -> None: ... # These methods don't actually exist directly, but they are needed to satisfy the BinaryIO # interface. At runtime, they are delegated through __getattr__. def seek(self, offset: int, whence: int = ...) -> int: ... def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def truncate(self, size: Optional[int] = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def writable(self) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/codeop.pyi0000644€tŠÔÚ€2›s®0000000117113576752252025724 0ustar jukkaDROPBOX\Domain Users00000000000000# Source(py2): https://hg.python.org/cpython/file/2.7/Lib/codeop.py # Source(py3): https://github.com/python/cpython/blob/master/Lib/codeop.py from types import CodeType from typing import Optional def compile_command(source: str, filename: str = ..., symbol: str = ...) -> Optional[CodeType]: ... class Compile: flags: int def __init__(self) -> None: ... def __call__(self, source: str, filename: str, symbol: str) -> CodeType: ... class CommandCompiler: compiler: Compile def __init__(self) -> None: ... def __call__(self, source: str, filename: str = ..., symbol: str = ...) -> Optional[CodeType]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/colorsys.pyi0000644€tŠÔÚ€2›s®0000000113013576752252026323 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for colorsys from typing import Tuple def rgb_to_yiq(r: float, g: float, b: float) -> Tuple[float, float, float]: ... def yiq_to_rgb(y: float, i: float, q: float) -> Tuple[float, float, float]: ... def rgb_to_hls(r: float, g: float, b: float) -> Tuple[float, float, float]: ... def hls_to_rgb(h: float, l: float, s: float) -> Tuple[float, float, float]: ... def rgb_to_hsv(r: float, g: float, b: float) -> Tuple[float, float, float]: ... def hsv_to_rgb(h: float, s: float, v: float) -> Tuple[float, float, float]: ... # TODO undocumented ONE_SIXTH: float ONE_THIRD: float TWO_THIRD: float mypy-0.761/mypy/typeshed/stdlib/2and3/contextlib.pyi0000644€tŠÔÚ€2›s®0000001130613576752252026627 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for contextlib from typing import ( Any, Callable, Generator, IO, Iterable, Iterator, Optional, Type, Generic, TypeVar, overload ) from types import TracebackType import sys # Aliased here for backwards compatibility; TODO eventually remove this from typing import ContextManager as ContextManager if sys.version_info >= (3, 5): from typing import AsyncContextManager, AsyncIterator if sys.version_info >= (3, 6): from typing import ContextManager as AbstractContextManager if sys.version_info >= (3, 7): from typing import AsyncContextManager as AbstractAsyncContextManager _T = TypeVar('_T') _F = TypeVar('_F', bound=Callable[..., Any]) _ExitFunc = Callable[[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]], bool] _CM_EF = TypeVar('_CM_EF', ContextManager[Any], _ExitFunc) if sys.version_info >= (3, 2): class _GeneratorContextManager(ContextManager[_T], Generic[_T]): def __call__(self, func: _F) -> _F: ... def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., _GeneratorContextManager[_T]]: ... else: class GeneratorContextManager(ContextManager[_T], Generic[_T]): def __call__(self, func: _F) -> _F: ... def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ... if sys.version_info >= (3, 7): def asynccontextmanager(func: Callable[..., AsyncIterator[_T]]) -> Callable[..., AsyncContextManager[_T]]: ... if sys.version_info < (3,): def nested(*mgr: ContextManager[Any]) -> ContextManager[Iterable[Any]]: ... class closing(ContextManager[_T], Generic[_T]): def __init__(self, thing: _T) -> None: ... if sys.version_info >= (3, 4): class suppress(ContextManager[None]): def __init__(self, *exceptions: Type[BaseException]) -> None: ... def __exit__(self, exctype: Optional[Type[BaseException]], excinst: Optional[BaseException], exctb: Optional[TracebackType]) -> bool: ... class redirect_stdout(ContextManager[None]): def __init__(self, new_target: Optional[IO[str]]) -> None: ... if sys.version_info >= (3, 5): class redirect_stderr(ContextManager[None]): def __init__(self, new_target: Optional[IO[str]]) -> None: ... if sys.version_info >= (3,): class ContextDecorator: def __call__(self, func: Callable[..., None]) -> Callable[..., ContextManager[None]]: ... _U = TypeVar('_U', bound=ExitStack) class ExitStack(ContextManager[ExitStack]): def __init__(self) -> None: ... def enter_context(self, cm: ContextManager[_T]) -> _T: ... def push(self, exit: _CM_EF) -> _CM_EF: ... def callback(self, callback: Callable[..., Any], *args: Any, **kwds: Any) -> Callable[..., Any]: ... def pop_all(self: _U) -> _U: ... def close(self) -> None: ... def __enter__(self: _U) -> _U: ... def __exit__(self, __exc_type: Optional[Type[BaseException]], __exc_value: Optional[BaseException], __traceback: Optional[TracebackType]) -> bool: ... if sys.version_info >= (3, 7): from typing import Awaitable _S = TypeVar('_S', bound=AsyncExitStack) _ExitCoroFunc = Callable[[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]], Awaitable[bool]] _CallbackCoroFunc = Callable[..., Awaitable[Any]] _ACM_EF = TypeVar('_ACM_EF', AsyncContextManager[Any], _ExitCoroFunc) class AsyncExitStack(AsyncContextManager[AsyncExitStack]): def __init__(self) -> None: ... def enter_context(self, cm: ContextManager[_T]) -> _T: ... def enter_async_context(self, cm: AsyncContextManager[_T]) -> Awaitable[_T]: ... def push(self, exit: _CM_EF) -> _CM_EF: ... def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: ... def callback(self, callback: Callable[..., Any], *args: Any, **kwds: Any) -> Callable[..., Any]: ... def push_async_callback(self, callback: _CallbackCoroFunc, *args: Any, **kwds: Any) -> _CallbackCoroFunc: ... def pop_all(self: _S) -> _S: ... def aclose(self) -> Awaitable[None]: ... def __aenter__(self: _S) -> Awaitable[_S]: ... def __aexit__(self, __exc_type: Optional[Type[BaseException]], __exc_value: Optional[BaseException], __traceback: Optional[TracebackType]) -> Awaitable[bool]: ... if sys.version_info >= (3, 7): @overload def nullcontext(enter_result: _T) -> ContextManager[_T]: ... @overload def nullcontext() -> ContextManager[None]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/copy.pyi0000644€tŠÔÚ€2›s®0000000052713576752252025431 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for copy from typing import TypeVar, Optional, Dict, Any _T = TypeVar('_T') # None in CPython but non-None in Jython PyStringMap: Any # Note: memo and _nil are internal kwargs. def deepcopy(x: _T, memo: Optional[Dict[int, _T]] = ..., _nil: Any = ...) -> _T: ... def copy(x: _T) -> _T: ... class Error(Exception): ... error = Error mypy-0.761/mypy/typeshed/stdlib/2and3/crypt.pyi0000644€tŠÔÚ€2›s®0000000121013576752252025606 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List, Optional, Union if sys.version_info >= (3, 3): class _Method: ... METHOD_CRYPT: _Method METHOD_MD5: _Method METHOD_SHA256: _Method METHOD_SHA512: _Method if sys.version_info >= (3, 7): METHOD_BLOWFISH: _Method methods: List[_Method] if sys.version_info >= (3, 7): def mksalt(method: Optional[_Method] = ..., *, rounds: Optional[int] = ...) -> str: ... else: def mksalt(method: Optional[_Method] = ...) -> str: ... def crypt(word: str, salt: Optional[Union[str, _Method]] = ...) -> str: ... else: def crypt(word: str, salt: str) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/csv.pyi0000644€tŠÔÚ€2›s®0000000537513576752252025260 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from _csv import ( QUOTE_ALL as QUOTE_ALL, QUOTE_MINIMAL as QUOTE_MINIMAL, QUOTE_NONE as QUOTE_NONE, QUOTE_NONNUMERIC as QUOTE_NONNUMERIC, Error as Error, _reader, _writer, field_size_limit as field_size_limit, get_dialect as get_dialect, list_dialects as list_dialects, reader as reader, register_dialect as register_dialect, unregister_dialect as unregister_dialect, writer as writer, ) from collections import OrderedDict from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Sequence, Text, Type, Union _Dialect = Union[str, Dialect, Type[Dialect]] _DictRow = Mapping[str, Any] class Dialect(object): delimiter: str quotechar: Optional[str] escapechar: Optional[str] doublequote: bool skipinitialspace: bool lineterminator: str quoting: int def __init__(self) -> None: ... class excel(Dialect): delimiter: str quotechar: str doublequote: bool skipinitialspace: bool lineterminator: str quoting: int class excel_tab(excel): delimiter: str if sys.version_info >= (3,): class unix_dialect(Dialect): delimiter: str quotechar: str doublequote: bool skipinitialspace: bool lineterminator: str quoting: int if sys.version_info >= (3, 6): _DRMapping = OrderedDict[str, str] else: _DRMapping = Dict[str, str] class DictReader(Iterator[_DRMapping]): restkey: Optional[str] restval: Optional[str] reader: _reader dialect: _Dialect line_num: int fieldnames: Sequence[str] def __init__( self, f: Iterable[Text], fieldnames: Optional[Sequence[str]] = ..., restkey: Optional[str] = ..., restval: Optional[str] = ..., dialect: _Dialect = ..., *args: Any, **kwds: Any, ) -> None: ... def __iter__(self) -> DictReader: ... if sys.version_info >= (3,): def __next__(self) -> _DRMapping: ... else: def next(self) -> _DRMapping: ... class DictWriter(object): fieldnames: Sequence[str] restval: Optional[Any] extrasaction: str writer: _writer def __init__( self, f: Any, fieldnames: Iterable[str], restval: Optional[Any] = ..., extrasaction: str = ..., dialect: _Dialect = ..., *args: Any, **kwds: Any, ) -> None: ... def writeheader(self) -> None: ... def writerow(self, rowdict: _DictRow) -> None: ... def writerows(self, rowdicts: Iterable[_DictRow]) -> None: ... class Sniffer(object): preferred: List[str] def __init__(self) -> None: ... def sniff(self, sample: str, delimiters: Optional[str] = ...) -> Type[Dialect]: ... def has_header(self, sample: str) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/ctypes/0000755€tŠÔÚ€2›s®0000000000013576752267025245 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/ctypes/__init__.pyi0000644€tŠÔÚ€2›s®0000002676113576752252027535 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for ctypes from array import array from typing import ( Any, Callable, ClassVar, Iterator, Iterable, List, Mapping, Optional, Sequence, Sized, Text, Tuple, Type, Generic, TypeVar, overload, ) from typing import Union as _UnionT import sys _T = TypeVar('_T') _DLLT = TypeVar('_DLLT', bound=CDLL) _CT = TypeVar('_CT', bound=_CData) RTLD_GLOBAL: int = ... RTLD_LOCAL: int = ... DEFAULT_MODE: int = ... class CDLL(object): _func_flags_: ClassVar[int] = ... _func_restype_: ClassVar[_CData] = ... _name: str = ... _handle: int = ... _FuncPtr: Type[_FuncPointer] = ... def __init__( self, name: str, mode: int = ..., handle: Optional[int] = ..., use_errno: bool = ..., use_last_error: bool = ..., winmode: Optional[int] = ..., ) -> None: ... def __getattr__(self, name: str) -> _FuncPointer: ... def __getitem__(self, name: str) -> _FuncPointer: ... if sys.platform == 'win32': class OleDLL(CDLL): ... class WinDLL(CDLL): ... class PyDLL(CDLL): ... class LibraryLoader(Generic[_DLLT]): def __init__(self, dlltype: Type[_DLLT]) -> None: ... def __getattr__(self, name: str) -> _DLLT: ... def __getitem__(self, name: str) -> _DLLT: ... def LoadLibrary(self, name: str) -> _DLLT: ... cdll: LibraryLoader[CDLL] = ... if sys.platform == 'win32': windll: LibraryLoader[WinDLL] = ... oledll: LibraryLoader[OleDLL] = ... pydll: LibraryLoader[PyDLL] = ... pythonapi: PyDLL = ... # Anything that implements the read-write buffer interface. # The buffer interface is defined purely on the C level, so we cannot define a normal Protocol # for it. Instead we have to list the most common stdlib buffer classes in a Union. _WritableBuffer = _UnionT[bytearray, memoryview, array, _CData] # Same as _WritableBuffer, but also includes read-only buffer types (like bytes). _ReadOnlyBuffer = _UnionT[_WritableBuffer, bytes] class _CDataMeta(type): # By default mypy complains about the following two methods, because strictly speaking cls # might not be a Type[_CT]. However this can never actually happen, because the only class that # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. def __mul__(cls: Type[_CT], other: int) -> Type[Array[_CT]]: ... # type: ignore def __rmul__(cls: Type[_CT], other: int) -> Type[Array[_CT]]: ... # type: ignore class _CData(metaclass=_CDataMeta): _b_base: int = ... _b_needsfree_: bool = ... _objects: Optional[Mapping[Any, int]] = ... @classmethod def from_buffer(cls: Type[_CT], source: _WritableBuffer, offset: int = ...) -> _CT: ... @classmethod def from_buffer_copy(cls: Type[_CT], source: _ReadOnlyBuffer, offset: int = ...) -> _CT: ... @classmethod def from_address(cls: Type[_CT], address: int) -> _CT: ... @classmethod def from_param(cls: Type[_CT], obj: Any) -> _UnionT[_CT, _CArgObject]: ... @classmethod def in_dll(cls: Type[_CT], library: CDLL, name: str) -> _CT: ... class _PointerLike(_CData): ... _ECT = Callable[[Optional[Type[_CData]], _FuncPointer, Tuple[_CData, ...]], _CData] _PF = _UnionT[ Tuple[int], Tuple[int, str], Tuple[int, str, Any] ] class _FuncPointer(_PointerLike, _CData): restype: _UnionT[Type[_CData], Callable[[int], None], None] = ... argtypes: Sequence[Type[_CData]] = ... errcheck: _ECT = ... @overload def __init__(self, address: int) -> None: ... @overload def __init__(self, callable: Callable[..., Any]) -> None: ... @overload def __init__(self, func_spec: Tuple[_UnionT[str, int], CDLL], paramflags: Tuple[_PF, ...] = ...) -> None: ... @overload def __init__(self, vtlb_index: int, name: str, paramflags: Tuple[_PF, ...] = ..., iid: pointer[c_int] = ...) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... class ArgumentError(Exception): ... def CFUNCTYPE(restype: Optional[Type[_CData]], *argtypes: Type[_CData], use_errno: bool = ..., use_last_error: bool = ...) -> Type[_FuncPointer]: ... if sys.platform == 'win32': def WINFUNCTYPE(restype: Optional[Type[_CData]], *argtypes: Type[_CData], use_errno: bool = ..., use_last_error: bool = ...) -> Type[_FuncPointer]: ... def PYFUNCTYPE(restype: Optional[Type[_CData]], *argtypes: Type[_CData]) -> Type[_FuncPointer]: ... class _CArgObject: ... # Any type that can be implicitly converted to c_void_p when passed as a C function argument. # (bytes is not included here, see below.) _CVoidPLike = _UnionT[_PointerLike, Array[Any], _CArgObject, int] # Same as above, but including types known to be read-only (i. e. bytes). # This distinction is not strictly necessary (ctypes doesn't differentiate between const # and non-const pointers), but it catches errors like memmove(b'foo', buf, 4) # when memmove(buf, b'foo', 4) was intended. _CVoidConstPLike = _UnionT[_CVoidPLike, bytes] def addressof(obj: _CData) -> int: ... def alignment(obj_or_type: _UnionT[_CData, Type[_CData]]) -> int: ... def byref(obj: _CData, offset: int = ...) -> _CArgObject: ... _PT = TypeVar('_PT', bound=_PointerLike) def cast(obj: _UnionT[_CData, _CArgObject], type: Type[_PT]) -> _PT: ... def create_string_buffer(init_or_size: _UnionT[int, bytes], size: Optional[int] = ...) -> Array[c_char]: ... c_buffer = create_string_buffer def create_unicode_buffer(init_or_size: _UnionT[int, Text], size: Optional[int] = ...) -> Array[c_wchar]: ... if sys.platform == 'win32': def DllCanUnloadNow() -> int: ... def DllGetClassObject(rclsid: Any, riid: Any, ppv: Any) -> int: ... # TODO not documented def FormatError(code: int) -> str: ... def GetLastError() -> int: ... def get_errno() -> int: ... if sys.platform == 'win32': def get_last_error() -> int: ... def memmove(dst: _CVoidPLike, src: _CVoidConstPLike, count: int) -> None: ... def memset(dst: _CVoidPLike, c: int, count: int) -> None: ... def POINTER(type: Type[_CT]) -> Type[pointer[_CT]]: ... # The real ctypes.pointer is a function, not a class. The stub version of pointer behaves like # ctypes._Pointer in that it is the base class for all pointer types. Unlike the real _Pointer, # it can be instantiated directly (to mimic the behavior of the real pointer function). class pointer(Generic[_CT], _PointerLike, _CData): _type_: ClassVar[Type[_CT]] = ... contents: _CT = ... def __init__(self, arg: _CT = ...) -> None: ... @overload def __getitem__(self, i: int) -> _CT: ... @overload def __getitem__(self, s: slice) -> List[_CT]: ... @overload def __setitem__(self, i: int, o: _CT) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_CT]) -> None: ... def resize(obj: _CData, size: int) -> None: ... if sys.version_info < (3,): def set_conversion_mode(encoding: str, errors: str) -> Tuple[str, str]: ... def set_errno(value: int) -> int: ... if sys.platform == 'win32': def set_last_error(value: int) -> int: ... def sizeof(obj_or_type: _UnionT[_CData, Type[_CData]]) -> int: ... def string_at(address: _CVoidConstPLike, size: int = ...) -> bytes: ... if sys.platform == 'win32': def WinError(code: Optional[int] = ..., desc: Optional[str] = ...) -> WindowsError: ... def wstring_at(address: _CVoidConstPLike, size: int = ...) -> str: ... class _SimpleCData(Generic[_T], _CData): value: _T = ... def __init__(self, value: _T = ...) -> None: ... class c_byte(_SimpleCData[int]): ... class c_char(_SimpleCData[bytes]): def __init__(self, value: _UnionT[int, bytes] = ...) -> None: ... class c_char_p(_PointerLike, _SimpleCData[Optional[bytes]]): def __init__(self, value: Optional[_UnionT[int, bytes]] = ...) -> None: ... class c_double(_SimpleCData[float]): ... class c_longdouble(_SimpleCData[float]): ... class c_float(_SimpleCData[float]): ... class c_int(_SimpleCData[int]): ... class c_int8(_SimpleCData[int]): ... class c_int16(_SimpleCData[int]): ... class c_int32(_SimpleCData[int]): ... class c_int64(_SimpleCData[int]): ... class c_long(_SimpleCData[int]): ... class c_longlong(_SimpleCData[int]): ... class c_short(_SimpleCData[int]): ... class c_size_t(_SimpleCData[int]): ... class c_ssize_t(_SimpleCData[int]): ... class c_ubyte(_SimpleCData[int]): ... class c_uint(_SimpleCData[int]): ... class c_uint8(_SimpleCData[int]): ... class c_uint16(_SimpleCData[int]): ... class c_uint32(_SimpleCData[int]): ... class c_uint64(_SimpleCData[int]): ... class c_ulong(_SimpleCData[int]): ... class c_ulonglong(_SimpleCData[int]): ... class c_ushort(_SimpleCData[int]): ... class c_void_p(_PointerLike, _SimpleCData[Optional[int]]): ... class c_wchar(_SimpleCData[Text]): ... class c_wchar_p(_PointerLike, _SimpleCData[Optional[Text]]): def __init__(self, value: Optional[_UnionT[int, Text]] = ...) -> None: ... class c_bool(_SimpleCData[bool]): def __init__(self, value: bool = ...) -> None: ... if sys.platform == 'win32': class HRESULT(_SimpleCData[int]): ... # TODO undocumented class py_object(_SimpleCData[_T]): ... class _CField: offset: int = ... size: int = ... class _StructUnionMeta(_CDataMeta): _fields_: Sequence[_UnionT[Tuple[str, Type[_CData]], Tuple[str, Type[_CData], int]]] = ... _pack_: int = ... _anonymous_: Sequence[str] = ... def __getattr__(self, name: str) -> _CField: ... class _StructUnionBase(_CData, metaclass=_StructUnionMeta): def __init__(self, *args: Any, **kw: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... class Union(_StructUnionBase): ... class Structure(_StructUnionBase): ... class BigEndianStructure(Structure): ... class LittleEndianStructure(Structure): ... class Array(Generic[_CT], _CData): _length_: ClassVar[int] = ... _type_: ClassVar[Type[_CT]] = ... raw: bytes = ... # Note: only available if _CT == c_char value: Any = ... # Note: bytes if _CT == c_char, Text if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT # here, because of a special feature of ctypes. # By default, when accessing an element of an Array[_CT], the returned object has type _CT. # However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object # and converts it to the corresponding Python primitive. For example, when accessing an element # of an Array[c_int], a Python int object is returned, not a c_int. # This behavior does *not* apply to subclasses of "simple types". # If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns # a MyInt, not an int. # This special behavior is not easy to model in a stub, so for now all places where # the array element type would belong are annotated with Any instead. def __init__(self, *args: Any) -> None: ... @overload def __getitem__(self, i: int) -> Any: ... @overload def __getitem__(self, s: slice) -> List[Any]: ... @overload def __setitem__(self, i: int, o: Any) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[Any]) -> None: ... def __iter__(self) -> Iterator[Any]: ... # Can't inherit from Sized because the metaclass conflict between # Sized and _CData prevents using _CDataMeta. def __len__(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/ctypes/util.pyi0000644€tŠÔÚ€2›s®0000000027313576752252026741 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for ctypes.util from typing import Optional import sys def find_library(name: str) -> Optional[str]: ... if sys.platform == 'win32': def find_msvcrt() -> Optional[str]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/ctypes/wintypes.pyi0000644€tŠÔÚ€2›s®0000001072613576752252027652 0ustar jukkaDROPBOX\Domain Users00000000000000from ctypes import ( _SimpleCData, Array, Structure, c_byte, c_char, c_char_p, c_double, c_float, c_int, c_long, c_longlong, c_short, c_uint, c_ulong, c_ulonglong, c_ushort, c_void_p, c_wchar, c_wchar_p, pointer, ) BYTE = c_byte WORD = c_ushort DWORD = c_ulong CHAR = c_char WCHAR = c_wchar UINT = c_uint INT = c_int DOUBLE = c_double FLOAT = c_float BOOLEAN = BYTE BOOL = c_long class VARIANT_BOOL(_SimpleCData[bool]): ... ULONG = c_ulong LONG = c_long USHORT = c_ushort SHORT = c_short LARGE_INTEGER = c_longlong _LARGE_INTEGER = c_longlong ULARGE_INTEGER = c_ulonglong _ULARGE_INTEGER = c_ulonglong OLESTR = c_wchar_p LPOLESTR = c_wchar_p LPCOLESTR = c_wchar_p LPWSTR = c_wchar_p LPCWSTR = c_wchar_p LPSTR = c_char_p LPCSTR = c_char_p LPVOID = c_void_p LPCVOID = c_void_p # These two types are pointer-sized unsigned and signed ints, respectively. # At runtime, they are either c_[u]long or c_[u]longlong, depending on the host's pointer size # (they are not really separate classes). class WPARAM(_SimpleCData[int]): ... class LPARAM(_SimpleCData[int]): ... ATOM = WORD LANGID = WORD COLORREF = DWORD LGRPID = DWORD LCTYPE = DWORD LCID = DWORD HANDLE = c_void_p HACCEL = HANDLE HBITMAP = HANDLE HBRUSH = HANDLE HCOLORSPACE = HANDLE HDC = HANDLE HDESK = HANDLE HDWP = HANDLE HENHMETAFILE = HANDLE HFONT = HANDLE HGDIOBJ = HANDLE HGLOBAL = HANDLE HHOOK = HANDLE HICON = HANDLE HINSTANCE = HANDLE HKEY = HANDLE HKL = HANDLE HLOCAL = HANDLE HMENU = HANDLE HMETAFILE = HANDLE HMODULE = HANDLE HMONITOR = HANDLE HPALETTE = HANDLE HPEN = HANDLE HRGN = HANDLE HRSRC = HANDLE HSTR = HANDLE HTASK = HANDLE HWINSTA = HANDLE HWND = HANDLE SC_HANDLE = HANDLE SERVICE_STATUS_HANDLE = HANDLE class RECT(Structure): left: LONG top: LONG right: LONG bottom: LONG RECTL = RECT _RECTL = RECT tagRECT = RECT class _SMALL_RECT(Structure): Left: SHORT Top: SHORT Right: SHORT Bottom: SHORT SMALL_RECT = _SMALL_RECT class _COORD(Structure): X: SHORT Y: SHORT class POINT(Structure): x: LONG y: LONG POINTL = POINT _POINTL = POINT tagPOINT = POINT class SIZE(Structure): cx: LONG cy: LONG SIZEL = SIZE tagSIZE = SIZE def RGB(red: int, green: int, blue: int) -> int: ... class FILETIME(Structure): dwLowDateTime: DWORD dwHighDateTime: DWORD _FILETIME = FILETIME class MSG(Structure): hWnd: HWND message: UINT wParam: WPARAM lParam: LPARAM time: DWORD pt: POINT tagMSG = MSG MAX_PATH: int class WIN32_FIND_DATAA(Structure): dwFileAttributes: DWORD ftCreationTime: FILETIME ftLastAccessTime: FILETIME ftLastWriteTime: FILETIME nFileSizeHigh: DWORD nFileSizeLow: DWORD dwReserved0: DWORD dwReserved1: DWORD cFileName: Array[CHAR] cAlternateFileName: Array[CHAR] class WIN32_FIND_DATAW(Structure): dwFileAttributes: DWORD ftCreationTime: FILETIME ftLastAccessTime: FILETIME ftLastWriteTime: FILETIME nFileSizeHigh: DWORD nFileSizeLow: DWORD dwReserved0: DWORD dwReserved1: DWORD cFileName: Array[WCHAR] cAlternateFileName: Array[WCHAR] # These pointer type definitions use pointer[...] instead of POINTER(...), to allow them # to be used in type annotations. PBOOL = pointer[BOOL] LPBOOL = pointer[BOOL] PBOOLEAN = pointer[BOOLEAN] PBYTE = pointer[BYTE] LPBYTE = pointer[BYTE] PCHAR = pointer[CHAR] LPCOLORREF = pointer[COLORREF] PDWORD = pointer[DWORD] LPDWORD = pointer[DWORD] PFILETIME = pointer[FILETIME] LPFILETIME = pointer[FILETIME] PFLOAT = pointer[FLOAT] PHANDLE = pointer[HANDLE] LPHANDLE = pointer[HANDLE] PHKEY = pointer[HKEY] LPHKL = pointer[HKL] PINT = pointer[INT] LPINT = pointer[INT] PLARGE_INTEGER = pointer[LARGE_INTEGER] PLCID = pointer[LCID] PLONG = pointer[LONG] LPLONG = pointer[LONG] PMSG = pointer[MSG] LPMSG = pointer[MSG] PPOINT = pointer[POINT] LPPOINT = pointer[POINT] PPOINTL = pointer[POINTL] PRECT = pointer[RECT] LPRECT = pointer[RECT] PRECTL = pointer[RECTL] LPRECTL = pointer[RECTL] LPSC_HANDLE = pointer[SC_HANDLE] PSHORT = pointer[SHORT] PSIZE = pointer[SIZE] LPSIZE = pointer[SIZE] PSIZEL = pointer[SIZEL] LPSIZEL = pointer[SIZEL] PSMALL_RECT = pointer[SMALL_RECT] PUINT = pointer[UINT] LPUINT = pointer[UINT] PULARGE_INTEGER = pointer[ULARGE_INTEGER] PULONG = pointer[ULONG] PUSHORT = pointer[USHORT] PWCHAR = pointer[WCHAR] PWIN32_FIND_DATAA = pointer[WIN32_FIND_DATAA] LPWIN32_FIND_DATAA = pointer[WIN32_FIND_DATAA] PWIN32_FIND_DATAW = pointer[WIN32_FIND_DATAW] LPWIN32_FIND_DATAW = pointer[WIN32_FIND_DATAW] PWORD = pointer[WORD] LPWORD = pointer[WORD] mypy-0.761/mypy/typeshed/stdlib/2and3/curses/0000755€tŠÔÚ€2›s®0000000000013576752267025242 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/curses/__init__.pyi0000644€tŠÔÚ€2›s®0000000056013576752252027517 0ustar jukkaDROPBOX\Domain Users00000000000000from _curses import * # noqa: F403 from _curses import _CursesWindow as _CursesWindow from typing import TypeVar, Callable, Any _T = TypeVar('_T') # available after calling `curses.initscr()` LINES: int COLS: int # available after calling `curses.start_color()` COLORS: int COLOR_PAIRS: int def wrapper(func: Callable[..., _T], *arg: Any, **kwds: Any) -> _T: ... mypy-0.761/mypy/typeshed/stdlib/2and3/curses/ascii.pyi0000644€tŠÔÚ€2›s®0000000230613576752252027050 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Union, overload, TypeVar _Ch = TypeVar('_Ch', str, int) NUL: int SOH: int STX: int ETX: int EOT: int ENQ: int ACK: int BEL: int BS: int TAB: int HT: int LF: int NL: int VT: int FF: int CR: int SO: int SI: int DLE: int DC1: int DC2: int DC3: int DC4: int NAK: int SYN: int ETB: int CAN: int EM: int SUB: int ESC: int FS: int GS: int RS: int US: int SP: int DEL: int controlnames: List[int] def isalnum(c: Union[str, int]) -> bool: ... def isalpha(c: Union[str, int]) -> bool: ... def isascii(c: Union[str, int]) -> bool: ... def isblank(c: Union[str, int]) -> bool: ... def iscntrl(c: Union[str, int]) -> bool: ... def isdigit(c: Union[str, int]) -> bool: ... def isgraph(c: Union[str, int]) -> bool: ... def islower(c: Union[str, int]) -> bool: ... def isprint(c: Union[str, int]) -> bool: ... def ispunct(c: Union[str, int]) -> bool: ... def isspace(c: Union[str, int]) -> bool: ... def isupper(c: Union[str, int]) -> bool: ... def isxdigit(c: Union[str, int]) -> bool: ... def isctrl(c: Union[str, int]) -> bool: ... def ismeta(c: Union[str, int]) -> bool: ... def ascii(c: _Ch) -> _Ch: ... def ctrl(c: _Ch) -> _Ch: ... def alt(c: _Ch) -> _Ch: ... def unctrl(c: Union[str, int]) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/curses/panel.pyi0000644€tŠÔÚ€2›s®0000000143713576752252027063 0ustar jukkaDROPBOX\Domain Users00000000000000from _curses import _CursesWindow class _Curses_Panel: # type is (note the space in the class name) def above(self) -> _Curses_Panel: ... def below(self) -> _Curses_Panel: ... def bottom(self) -> None: ... def hidden(self) -> bool: ... def hide(self) -> None: ... def move(self, y: int, x: int) -> None: ... def replace(self, win: _CursesWindow) -> None: ... def set_userptr(self, obj: object) -> None: ... def show(self) -> None: ... def top(self) -> None: ... def userptr(self) -> object: ... def window(self) -> _CursesWindow: ... def bottom_panel() -> _Curses_Panel: ... def new_panel(win: _CursesWindow) -> _Curses_Panel: ... def top_panel() -> _Curses_Panel: ... def update_panels() -> _Curses_Panel: ... mypy-0.761/mypy/typeshed/stdlib/2and3/curses/textpad.pyi0000644€tŠÔÚ€2›s®0000000065513576752252027436 0ustar jukkaDROPBOX\Domain Users00000000000000from _curses import _CursesWindow from typing import Callable, Union def rectangle(win: _CursesWindow, uly: int, ulx: int, lry: int, lrx: int) -> None: ... class Textbox: stripspaces: bool def __init__(self, w: _CursesWindow, insert_mode: bool = ...) -> None: ... def edit(self, validate: Callable[[int], int]) -> str: ... def do_command(self, ch: Union[str, int]) -> None: ... def gather(self) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/datetime.pyi0000644€tŠÔÚ€2›s®0000002676613576752252026270 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from time import struct_time from typing import AnyStr, Optional, SupportsAbs, Tuple, Union, overload, ClassVar, Type, TypeVar _S = TypeVar("_S") if sys.version_info >= (3,): _Text = str else: _Text = Union[str, unicode] MINYEAR: int MAXYEAR: int class tzinfo: def tzname(self, dt: Optional[datetime]) -> Optional[str]: ... def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def fromutc(self, dt: datetime) -> datetime: ... if sys.version_info >= (3, 2): class timezone(tzinfo): utc: ClassVar[timezone] min: ClassVar[timezone] max: ClassVar[timezone] def __init__(self, offset: timedelta, name: str = ...) -> None: ... def __hash__(self) -> int: ... _tzinfo = tzinfo class date: min: ClassVar[date] max: ClassVar[date] resolution: ClassVar[timedelta] def __init__(self, year: int, month: int, day: int) -> None: ... @classmethod def fromtimestamp(cls: Type[_S], t: float) -> _S: ... @classmethod def today(cls: Type[_S]) -> _S: ... @classmethod def fromordinal(cls: Type[_S], n: int) -> _S: ... if sys.version_info >= (3, 7): @classmethod def fromisoformat(cls: Type[_S], date_string: str) -> _S: ... if sys.version_info >= (3, 8): @classmethod def fromisocalendar(cls: Type[_S], year: int, week: int, day: int) -> _S: ... @property def year(self) -> int: ... @property def month(self) -> int: ... @property def day(self) -> int: ... def ctime(self) -> str: ... def strftime(self, fmt: _Text) -> str: ... if sys.version_info >= (3,): def __format__(self, fmt: str) -> str: ... else: def __format__(self, fmt: AnyStr) -> AnyStr: ... def isoformat(self) -> str: ... def timetuple(self) -> struct_time: ... def toordinal(self) -> int: ... def replace(self, year: int = ..., month: int = ..., day: int = ...) -> date: ... def __le__(self, other: date) -> bool: ... def __lt__(self, other: date) -> bool: ... def __ge__(self, other: date) -> bool: ... def __gt__(self, other: date) -> bool: ... if sys.version_info >= (3, 8): def __add__(self: _S, other: timedelta) -> _S: ... else: def __add__(self, other: timedelta) -> date: ... @overload def __sub__(self, other: timedelta) -> date: ... @overload def __sub__(self, other: date) -> timedelta: ... def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... def isocalendar(self) -> Tuple[int, int, int]: ... class time: min: ClassVar[time] max: ClassVar[time] resolution: ClassVar[timedelta] if sys.version_info >= (3, 6): def __init__(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ..., *, fold: int = ...) -> None: ... else: def __init__(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> None: ... @property def hour(self) -> int: ... @property def minute(self) -> int: ... @property def second(self) -> int: ... @property def microsecond(self) -> int: ... @property def tzinfo(self) -> Optional[_tzinfo]: ... if sys.version_info >= (3, 6): @property def fold(self) -> int: ... def __le__(self, other: time) -> bool: ... def __lt__(self, other: time) -> bool: ... def __ge__(self, other: time) -> bool: ... def __gt__(self, other: time) -> bool: ... def __hash__(self) -> int: ... if sys.version_info >= (3, 6): def isoformat(self, timespec: str = ...) -> str: ... else: def isoformat(self) -> str: ... if sys.version_info >= (3, 7): @classmethod def fromisoformat(cls: Type[_S], time_string: str) -> _S: ... def strftime(self, fmt: _Text) -> str: ... if sys.version_info >= (3,): def __format__(self, fmt: str) -> str: ... else: def __format__(self, fmt: AnyStr) -> AnyStr: ... def utcoffset(self) -> Optional[timedelta]: ... def tzname(self) -> Optional[str]: ... def dst(self) -> Optional[int]: ... if sys.version_info >= (3, 6): def replace(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ..., *, fold: int = ...) -> time: ... else: def replace(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> time: ... _date = date _time = time class timedelta(SupportsAbs[timedelta]): min: ClassVar[timedelta] max: ClassVar[timedelta] resolution: ClassVar[timedelta] if sys.version_info >= (3, 6): def __init__(self, days: float = ..., seconds: float = ..., microseconds: float = ..., milliseconds: float = ..., minutes: float = ..., hours: float = ..., weeks: float = ..., *, fold: int = ...) -> None: ... else: def __init__(self, days: float = ..., seconds: float = ..., microseconds: float = ..., milliseconds: float = ..., minutes: float = ..., hours: float = ..., weeks: float = ...) -> None: ... @property def days(self) -> int: ... @property def seconds(self) -> int: ... @property def microseconds(self) -> int: ... def total_seconds(self) -> float: ... def __add__(self, other: timedelta) -> timedelta: ... def __radd__(self, other: timedelta) -> timedelta: ... def __sub__(self, other: timedelta) -> timedelta: ... def __rsub__(self, other: timedelta) -> timedelta: ... def __neg__(self) -> timedelta: ... def __pos__(self) -> timedelta: ... def __abs__(self) -> timedelta: ... def __mul__(self, other: float) -> timedelta: ... def __rmul__(self, other: float) -> timedelta: ... @overload def __floordiv__(self, other: timedelta) -> int: ... @overload def __floordiv__(self, other: int) -> timedelta: ... if sys.version_info >= (3,): @overload def __truediv__(self, other: timedelta) -> float: ... @overload def __truediv__(self, other: float) -> timedelta: ... def __mod__(self, other: timedelta) -> timedelta: ... def __divmod__(self, other: timedelta) -> Tuple[int, timedelta]: ... else: @overload def __div__(self, other: timedelta) -> float: ... @overload def __div__(self, other: float) -> timedelta: ... def __le__(self, other: timedelta) -> bool: ... def __lt__(self, other: timedelta) -> bool: ... def __ge__(self, other: timedelta) -> bool: ... def __gt__(self, other: timedelta) -> bool: ... def __hash__(self) -> int: ... class datetime(date): min: ClassVar[datetime] max: ClassVar[datetime] resolution: ClassVar[timedelta] if sys.version_info >= (3, 6): def __init__(self, year: int, month: int, day: int, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ..., *, fold: int = ...) -> None: ... else: def __init__(self, year: int, month: int, day: int, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> None: ... @property def year(self) -> int: ... @property def month(self) -> int: ... @property def day(self) -> int: ... @property def hour(self) -> int: ... @property def minute(self) -> int: ... @property def second(self) -> int: ... @property def microsecond(self) -> int: ... @property def tzinfo(self) -> Optional[_tzinfo]: ... if sys.version_info >= (3, 6): @property def fold(self) -> int: ... @classmethod def fromtimestamp(cls: Type[_S], t: float, tz: Optional[_tzinfo] = ...) -> _S: ... @classmethod def utcfromtimestamp(cls: Type[_S], t: float) -> _S: ... @classmethod def today(cls: Type[_S]) -> _S: ... @classmethod def fromordinal(cls: Type[_S], n: int) -> _S: ... if sys.version_info >= (3, 8): @classmethod def now(cls: Type[_S], tz: Optional[_tzinfo] = ...) -> _S: ... else: @overload @classmethod def now(cls: Type[_S], tz: None = ...) -> _S: ... @overload @classmethod def now(cls, tz: _tzinfo) -> datetime: ... @classmethod def utcnow(cls: Type[_S]) -> _S: ... if sys.version_info >= (3, 6): @classmethod def combine(cls, date: _date, time: _time, tzinfo: Optional[_tzinfo] = ...) -> datetime: ... else: @classmethod def combine(cls, date: _date, time: _time) -> datetime: ... if sys.version_info >= (3, 7): @classmethod def fromisoformat(cls: Type[_S], date_string: str) -> _S: ... def strftime(self, fmt: _Text) -> str: ... if sys.version_info >= (3,): def __format__(self, fmt: str) -> str: ... else: def __format__(self, fmt: AnyStr) -> AnyStr: ... def toordinal(self) -> int: ... def timetuple(self) -> struct_time: ... if sys.version_info >= (3, 3): def timestamp(self) -> float: ... def utctimetuple(self) -> struct_time: ... def date(self) -> _date: ... def time(self) -> _time: ... def timetz(self) -> _time: ... if sys.version_info >= (3, 6): def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ..., *, fold: int = ...) -> datetime: ... else: def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> datetime: ... if sys.version_info >= (3, 8): def astimezone(self: _S, tz: Optional[_tzinfo] = ...) -> _S: ... elif sys.version_info >= (3, 3): def astimezone(self, tz: Optional[_tzinfo] = ...) -> datetime: ... else: def astimezone(self, tz: _tzinfo) -> datetime: ... def ctime(self) -> str: ... if sys.version_info >= (3, 6): def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ... else: def isoformat(self, sep: str = ...) -> str: ... @classmethod def strptime(cls, date_string: _Text, format: _Text) -> datetime: ... def utcoffset(self) -> Optional[timedelta]: ... def tzname(self) -> Optional[str]: ... def dst(self) -> Optional[timedelta]: ... def __le__(self, other: datetime) -> bool: ... # type: ignore def __lt__(self, other: datetime) -> bool: ... # type: ignore def __ge__(self, other: datetime) -> bool: ... # type: ignore def __gt__(self, other: datetime) -> bool: ... # type: ignore if sys.version_info >= (3, 8): def __add__(self: _S, other: timedelta) -> _S: ... else: def __add__(self, other: timedelta) -> datetime: ... @overload # type: ignore def __sub__(self, other: datetime) -> timedelta: ... @overload def __sub__(self, other: timedelta) -> datetime: ... def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... def isocalendar(self) -> Tuple[int, int, int]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/decimal.pyi0000644€tŠÔÚ€2›s®0000003724513576752252026064 0ustar jukkaDROPBOX\Domain Users00000000000000import numbers import sys from types import TracebackType from typing import ( Any, Container, Dict, List, NamedTuple, Optional, overload, Sequence, Text, Tuple, Type, TypeVar, Union, ) _Decimal = Union[Decimal, int] _DecimalNew = Union[Decimal, float, Text, Tuple[int, Sequence[int], int]] if sys.version_info >= (3,): _ComparableNum = Union[Decimal, float, numbers.Rational] else: _ComparableNum = Union[Decimal, float] _DecimalT = TypeVar('_DecimalT', bound=Decimal) class DecimalTuple(NamedTuple): sign: int digits: Tuple[int, ...] exponent: int ROUND_DOWN: str ROUND_HALF_UP: str ROUND_HALF_EVEN: str ROUND_CEILING: str ROUND_FLOOR: str ROUND_UP: str ROUND_HALF_DOWN: str ROUND_05UP: str if sys.version_info >= (3,): HAVE_THREADS: bool MAX_EMAX: int MAX_PREC: int MIN_EMIN: int MIN_ETINY: int class DecimalException(ArithmeticError): def handle(self, context: Context, *args: Any) -> Optional[Decimal]: ... class Clamped(DecimalException): ... class InvalidOperation(DecimalException): ... class ConversionSyntax(InvalidOperation): ... class DivisionByZero(DecimalException, ZeroDivisionError): ... class DivisionImpossible(InvalidOperation): ... class DivisionUndefined(InvalidOperation, ZeroDivisionError): ... class Inexact(DecimalException): ... class InvalidContext(InvalidOperation): ... class Rounded(DecimalException): ... class Subnormal(DecimalException): ... class Overflow(Inexact, Rounded): ... class Underflow(Inexact, Rounded, Subnormal): ... if sys.version_info >= (3,): class FloatOperation(DecimalException, TypeError): ... def setcontext(context: Context) -> None: ... def getcontext() -> Context: ... def localcontext(ctx: Optional[Context] = ...) -> _ContextManager: ... class Decimal(object): def __new__(cls: Type[_DecimalT], value: _DecimalNew = ..., context: Optional[Context] = ...) -> _DecimalT: ... @classmethod def from_float(cls, f: float) -> Decimal: ... if sys.version_info >= (3,): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... def __eq__(self, other: object, context: Optional[Context] = ...) -> bool: ... if sys.version_info < (3,): def __ne__(self, other: object, context: Optional[Context] = ...) -> bool: ... def __lt__(self, other: _ComparableNum, context: Optional[Context] = ...) -> bool: ... def __le__(self, other: _ComparableNum, context: Optional[Context] = ...) -> bool: ... def __gt__(self, other: _ComparableNum, context: Optional[Context] = ...) -> bool: ... def __ge__(self, other: _ComparableNum, context: Optional[Context] = ...) -> bool: ... def compare(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __hash__(self) -> int: ... def as_tuple(self) -> DecimalTuple: ... if sys.version_info >= (3,): def as_integer_ratio(self) -> Tuple[int, int]: ... def __str__(self, eng: bool = ..., context: Optional[Context] = ...) -> str: ... def to_eng_string(self, context: Optional[Context] = ...) -> str: ... def __neg__(self, context: Optional[Context] = ...) -> Decimal: ... def __pos__(self, context: Optional[Context] = ...) -> Decimal: ... def __abs__(self, round: bool = ..., context: Optional[Context] = ...) -> Decimal: ... def __add__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __radd__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __sub__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __rsub__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __mul__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __rmul__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __truediv__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __rtruediv__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... if sys.version_info < (3,): def __div__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __rdiv__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __divmod__(self, other: _Decimal, context: Optional[Context] = ...) -> Tuple[Decimal, Decimal]: ... def __rdivmod__(self, other: _Decimal, context: Optional[Context] = ...) -> Tuple[Decimal, Decimal]: ... def __mod__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __rmod__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def remainder_near(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __floordiv__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __rfloordiv__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __trunc__(self) -> int: ... @property def real(self) -> Decimal: ... @property def imag(self) -> Decimal: ... def conjugate(self) -> Decimal: ... def __complex__(self) -> complex: ... if sys.version_info >= (3,): @overload def __round__(self) -> int: ... @overload def __round__(self, ndigits: int) -> Decimal: ... def __floor__(self) -> int: ... def __ceil__(self) -> int: ... else: def __long__(self) -> long: ... def fma(self, other: _Decimal, third: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __pow__(self, other: _Decimal, modulo: Optional[_Decimal] = ..., context: Optional[Context] = ...) -> Decimal: ... def __rpow__(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def normalize(self, context: Optional[Context] = ...) -> Decimal: ... if sys.version_info >= (3,): def quantize(self, exp: _Decimal, rounding: Optional[str] = ..., context: Optional[Context] = ...) -> Decimal: ... def same_quantum(self, other: _Decimal, context: Optional[Context] = ...) -> bool: ... else: def quantize(self, exp: _Decimal, rounding: Optional[str] = ..., context: Optional[Context] = ..., watchexp: bool = ...) -> Decimal: ... def same_quantum(self, other: _Decimal) -> bool: ... def to_integral_exact(self, rounding: Optional[str] = ..., context: Optional[Context] = ...) -> Decimal: ... def to_integral_value(self, rounding: Optional[str] = ..., context: Optional[Context] = ...) -> Decimal: ... def to_integral(self, rounding: Optional[str] = ..., context: Optional[Context] = ...) -> Decimal: ... def sqrt(self, context: Optional[Context] = ...) -> Decimal: ... def max(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def min(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def adjusted(self) -> int: ... if sys.version_info >= (3,): def canonical(self) -> Decimal: ... else: def canonical(self, context: Optional[Context] = ...) -> Decimal: ... def compare_signal(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... if sys.version_info >= (3,): def compare_total(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def compare_total_mag(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... else: def compare_total(self, other: _Decimal) -> Decimal: ... def compare_total_mag(self, other: _Decimal) -> Decimal: ... def copy_abs(self) -> Decimal: ... def copy_negate(self) -> Decimal: ... if sys.version_info >= (3,): def copy_sign(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... else: def copy_sign(self, other: _Decimal) -> Decimal: ... def exp(self, context: Optional[Context] = ...) -> Decimal: ... def is_canonical(self) -> bool: ... def is_finite(self) -> bool: ... def is_infinite(self) -> bool: ... def is_nan(self) -> bool: ... def is_normal(self, context: Optional[Context] = ...) -> bool: ... def is_qnan(self) -> bool: ... def is_signed(self) -> bool: ... def is_snan(self) -> bool: ... def is_subnormal(self, context: Optional[Context] = ...) -> bool: ... def is_zero(self) -> bool: ... def ln(self, context: Optional[Context] = ...) -> Decimal: ... def log10(self, context: Optional[Context] = ...) -> Decimal: ... def logb(self, context: Optional[Context] = ...) -> Decimal: ... def logical_and(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def logical_invert(self, context: Optional[Context] = ...) -> Decimal: ... def logical_or(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def logical_xor(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def max_mag(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def min_mag(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def next_minus(self, context: Optional[Context] = ...) -> Decimal: ... def next_plus(self, context: Optional[Context] = ...) -> Decimal: ... def next_toward(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def number_class(self, context: Optional[Context] = ...) -> str: ... def radix(self) -> Decimal: ... def rotate(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def scaleb(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def shift(self, other: _Decimal, context: Optional[Context] = ...) -> Decimal: ... def __reduce__(self) -> Tuple[Type[Decimal], Tuple[str]]: ... def __copy__(self) -> Decimal: ... def __deepcopy__(self, memo: Any) -> Decimal: ... def __format__(self, specifier: str, context: Optional[Context] = ...) -> str: ... class _ContextManager(object): new_context: Context saved_context: Context def __init__(self, new_context: Context) -> None: ... def __enter__(self) -> Context: ... def __exit__(self, t: Optional[Type[BaseException]], v: Optional[BaseException], tb: Optional[TracebackType]) -> None: ... _TrapType = Type[DecimalException] class Context(object): prec: int rounding: str Emin: int Emax: int capitals: int if sys.version_info >= (3,): clamp: int else: _clamp: int traps: Dict[_TrapType, bool] flags: Dict[_TrapType, bool] if sys.version_info >= (3,): def __init__(self, prec: Optional[int] = ..., rounding: Optional[str] = ..., Emin: Optional[int] = ..., Emax: Optional[int] = ..., capitals: Optional[int] = ..., clamp: Optional[int] = ..., flags: Union[None, Dict[_TrapType, bool], Container[_TrapType]] = ..., traps: Union[None, Dict[_TrapType, bool], Container[_TrapType]] = ..., _ignored_flags: Optional[List[_TrapType]] = ...) -> None: ... else: def __init__(self, prec: Optional[int] = ..., rounding: Optional[str] = ..., traps: Union[None, Dict[_TrapType, bool], Container[_TrapType]] = ..., flags: Union[None, Dict[_TrapType, bool], Container[_TrapType]] = ..., Emin: Optional[int] = ..., Emax: Optional[int] = ..., capitals: Optional[int] = ..., _clamp: Optional[int] = ..., _ignored_flags: Optional[List[_TrapType]] = ...) -> None: ... if sys.version_info >= (3,): # __setattr__() only allows to set a specific set of attributes, # already defined above. def __delattr__(self, name: str) -> None: ... def __reduce__(self) -> Tuple[Type[Context], Tuple[Any, ...]]: ... def clear_flags(self) -> None: ... if sys.version_info >= (3,): def clear_traps(self) -> None: ... def copy(self) -> Context: ... def __copy__(self) -> Context: ... __hash__: Any = ... def Etiny(self) -> int: ... def Etop(self) -> int: ... def create_decimal(self, num: _DecimalNew = ...) -> Decimal: ... def create_decimal_from_float(self, f: float) -> Decimal: ... def abs(self, a: _Decimal) -> Decimal: ... def add(self, a: _Decimal, b: _Decimal) -> Decimal: ... def canonical(self, a: Decimal) -> Decimal: ... def compare(self, a: _Decimal, b: _Decimal) -> Decimal: ... def compare_signal(self, a: _Decimal, b: _Decimal) -> Decimal: ... def compare_total(self, a: _Decimal, b: _Decimal) -> Decimal: ... def compare_total_mag(self, a: _Decimal, b: _Decimal) -> Decimal: ... def copy_abs(self, a: _Decimal) -> Decimal: ... def copy_decimal(self, a: _Decimal) -> Decimal: ... def copy_negate(self, a: _Decimal) -> Decimal: ... def copy_sign(self, a: _Decimal, b: _Decimal) -> Decimal: ... def divide(self, a: _Decimal, b: _Decimal) -> Decimal: ... def divide_int(self, a: _Decimal, b: _Decimal) -> Decimal: ... def divmod(self, a: _Decimal, b: _Decimal) -> Tuple[Decimal, Decimal]: ... def exp(self, a: _Decimal) -> Decimal: ... def fma(self, a: _Decimal, b: _Decimal, c: _Decimal) -> Decimal: ... def is_canonical(self, a: _Decimal) -> bool: ... def is_finite(self, a: _Decimal) -> bool: ... def is_infinite(self, a: _Decimal) -> bool: ... def is_nan(self, a: _Decimal) -> bool: ... def is_normal(self, a: _Decimal) -> bool: ... def is_qnan(self, a: _Decimal) -> bool: ... def is_signed(self, a: _Decimal) -> bool: ... def is_snan(self, a: _Decimal) -> bool: ... def is_subnormal(self, a: _Decimal) -> bool: ... def is_zero(self, a: _Decimal) -> bool: ... def ln(self, a: _Decimal) -> Decimal: ... def log10(self, a: _Decimal) -> Decimal: ... def logb(self, a: _Decimal) -> Decimal: ... def logical_and(self, a: _Decimal, b: _Decimal) -> Decimal: ... def logical_invert(self, a: _Decimal) -> Decimal: ... def logical_or(self, a: _Decimal, b: _Decimal) -> Decimal: ... def logical_xor(self, a: _Decimal, b: _Decimal) -> Decimal: ... def max(self, a: _Decimal, b: _Decimal) -> Decimal: ... def max_mag(self, a: _Decimal, b: _Decimal) -> Decimal: ... def min(self, a: _Decimal, b: _Decimal) -> Decimal: ... def min_mag(self, a: _Decimal, b: _Decimal) -> Decimal: ... def minus(self, a: _Decimal) -> Decimal: ... def multiply(self, a: _Decimal, b: _Decimal) -> Decimal: ... def next_minus(self, a: _Decimal) -> Decimal: ... def next_plus(self, a: _Decimal) -> Decimal: ... def next_toward(self, a: _Decimal, b: _Decimal) -> Decimal: ... def normalize(self, a: _Decimal) -> Decimal: ... def number_class(self, a: _Decimal) -> str: ... def plus(self, a: _Decimal) -> Decimal: ... def power(self, a: _Decimal, b: _Decimal, modulo: Optional[_Decimal] = ...) -> Decimal: ... def quantize(self, a: _Decimal, b: _Decimal) -> Decimal: ... def radix(self) -> Decimal: ... def remainder(self, a: _Decimal, b: _Decimal) -> Decimal: ... def remainder_near(self, a: _Decimal, b: _Decimal) -> Decimal: ... def rotate(self, a: _Decimal, b: _Decimal) -> Decimal: ... def same_quantum(self, a: _Decimal, b: _Decimal) -> bool: ... def scaleb(self, a: _Decimal, b: _Decimal) -> Decimal: ... def shift(self, a: _Decimal, b: _Decimal) -> Decimal: ... def sqrt(self, a: _Decimal) -> Decimal: ... def subtract(self, a: _Decimal, b: _Decimal) -> Decimal: ... def to_eng_string(self, a: _Decimal) -> str: ... def to_sci_string(self, a: _Decimal) -> str: ... def to_integral_exact(self, a: _Decimal) -> Decimal: ... def to_integral_value(self, a: _Decimal) -> Decimal: ... def to_integral(self, a: _Decimal) -> Decimal: ... DefaultContext: Context BasicContext: Context ExtendedContext: Context mypy-0.761/mypy/typeshed/stdlib/2and3/difflib.pyi0000644€tŠÔÚ€2›s®0000000741513576752252026061 0ustar jukkaDROPBOX\Domain Users00000000000000# Based on https://docs.python.org/2.7/library/difflib.html and https://docs.python.org/3.2/library/difflib.html import sys from typing import ( TypeVar, Callable, Iterable, Iterator, List, NamedTuple, Sequence, Tuple, Generic, Optional, Text, Union, AnyStr ) _T = TypeVar('_T') if sys.version_info >= (3,): _StrType = Text else: # Aliases can't point to type vars, so we need to redeclare AnyStr _StrType = TypeVar('_StrType', Text, bytes) _JunkCallback = Union[Callable[[Text], bool], Callable[[str], bool]] class Match(NamedTuple): a: int b: int size: int class SequenceMatcher(Generic[_T]): def __init__(self, isjunk: Optional[Callable[[_T], bool]] = ..., a: Sequence[_T] = ..., b: Sequence[_T] = ..., autojunk: bool = ...) -> None: ... def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... def set_seq1(self, a: Sequence[_T]) -> None: ... def set_seq2(self, b: Sequence[_T]) -> None: ... def find_longest_match(self, alo: int, ahi: int, blo: int, bhi: int) -> Match: ... def get_matching_blocks(self) -> List[Match]: ... def get_opcodes(self) -> List[Tuple[str, int, int, int, int]]: ... def get_grouped_opcodes(self, n: int = ... ) -> Iterable[List[Tuple[str, int, int, int, int]]]: ... def ratio(self) -> float: ... def quick_ratio(self) -> float: ... def real_quick_ratio(self) -> float: ... def get_close_matches(word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = ..., cutoff: float = ...) -> List[Sequence[_T]]: ... class Differ: def __init__(self, linejunk: _JunkCallback = ..., charjunk: _JunkCallback = ...) -> None: ... def compare(self, a: Sequence[_StrType], b: Sequence[_StrType]) -> Iterator[_StrType]: ... def IS_LINE_JUNK(line: _StrType) -> bool: ... def IS_CHARACTER_JUNK(line: _StrType) -> bool: ... def unified_diff(a: Sequence[_StrType], b: Sequence[_StrType], fromfile: _StrType = ..., tofile: _StrType = ..., fromfiledate: _StrType = ..., tofiledate: _StrType = ..., n: int = ..., lineterm: _StrType = ...) -> Iterator[_StrType]: ... def context_diff(a: Sequence[_StrType], b: Sequence[_StrType], fromfile: _StrType = ..., tofile: _StrType = ..., fromfiledate: _StrType = ..., tofiledate: _StrType = ..., n: int = ..., lineterm: _StrType = ...) -> Iterator[_StrType]: ... def ndiff(a: Sequence[_StrType], b: Sequence[_StrType], linejunk: _JunkCallback = ..., charjunk: _JunkCallback = ... ) -> Iterator[_StrType]: ... class HtmlDiff(object): def __init__(self, tabsize: int = ..., wrapcolumn: int = ..., linejunk: _JunkCallback = ..., charjunk: _JunkCallback = ... ) -> None: ... def make_file(self, fromlines: Sequence[_StrType], tolines: Sequence[_StrType], fromdesc: _StrType = ..., todesc: _StrType = ..., context: bool = ..., numlines: int = ...) -> _StrType: ... def make_table(self, fromlines: Sequence[_StrType], tolines: Sequence[_StrType], fromdesc: _StrType = ..., todesc: _StrType = ..., context: bool = ..., numlines: int = ...) -> _StrType: ... def restore(delta: Iterable[_StrType], which: int) -> Iterator[_StrType]: ... if sys.version_info >= (3, 5): def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], a: Sequence[bytes], b: Sequence[bytes], fromfile: bytes = ..., tofile: bytes = ..., fromfiledate: bytes = ..., tofiledate: bytes = ..., n: int = ..., lineterm: bytes = ... ) -> Iterator[bytes]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/dis.pyi0000644€tŠÔÚ€2›s®0000000551613576752252025241 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Callable, List, Union, Iterator, Tuple, Optional, Any, IO, NamedTuple, Dict import sys import types from opcode import (hasconst as hasconst, hasname as hasname, hasjrel as hasjrel, hasjabs as hasjabs, haslocal as haslocal, hascompare as hascompare, hasfree as hasfree, cmp_op as cmp_op, opname as opname, opmap as opmap, HAVE_ARGUMENT as HAVE_ARGUMENT, EXTENDED_ARG as EXTENDED_ARG) if sys.version_info >= (3, 4): from opcode import stack_effect as stack_effect if sys.version_info >= (3, 6): from opcode import hasnargs as hasnargs # Strictly this should not have to include Callable, but mypy doesn't use FunctionType # for functions (python/mypy#3171) _have_code = Union[types.MethodType, types.FunctionType, types.CodeType, type, Callable[..., Any]] _have_code_or_string = Union[_have_code, str, bytes] if sys.version_info >= (3, 4): class Instruction(NamedTuple): opname: str opcode: int arg: Optional[int] argval: Any argrepr: str offset: int starts_line: Optional[int] is_jump_target: bool class Bytecode: codeobj: types.CodeType first_line: int def __init__(self, x: _have_code_or_string, *, first_line: Optional[int] = ..., current_offset: Optional[int] = ...) -> None: ... def __iter__(self) -> Iterator[Instruction]: ... def __repr__(self) -> str: ... def info(self) -> str: ... def dis(self) -> str: ... @classmethod def from_traceback(cls, tb: types.TracebackType) -> Bytecode: ... COMPILER_FLAG_NAMES: Dict[int, str] def findlabels(code: _have_code) -> List[int]: ... def findlinestarts(code: _have_code) -> Iterator[Tuple[int, int]]: ... if sys.version_info >= (3, 0): def pretty_flags(flags: int) -> str: ... def code_info(x: _have_code_or_string) -> str: ... if sys.version_info >= (3, 4): def dis(x: _have_code_or_string = ..., *, file: Optional[IO[str]] = ...) -> None: ... def distb(tb: Optional[types.TracebackType] = ..., *, file: Optional[IO[str]] = ...) -> None: ... def disassemble(co: _have_code, lasti: int = ..., *, file: Optional[IO[str]] = ...) -> None: ... def disco(co: _have_code, lasti: int = ..., *, file: Optional[IO[str]] = ...) -> None: ... def show_code(co: _have_code, *, file: Optional[IO[str]] = ...) -> None: ... def get_instructions(x: _have_code, *, first_line: Optional[int] = ...) -> Iterator[Instruction]: ... else: def dis(x: _have_code_or_string = ...) -> None: ... def distb(tb: types.TracebackType = ...) -> None: ... def disassemble(co: _have_code, lasti: int = ...) -> None: ... def disco(co: _have_code, lasti: int = ...) -> None: ... if sys.version_info >= (3, 0): def show_code(co: _have_code) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/0000755€tŠÔÚ€2›s®0000000000013576752267025762 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252030224 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/archive_util.pyi0000644€tŠÔÚ€2›s®0000000101513576752252031152 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.archive_util from typing import Optional def make_archive(base_name: str, format: str, root_dir: Optional[str] = ..., base_dir: Optional[str] = ..., verbose: int = ..., dry_run: int = ...) -> str: ... def make_tarball(base_name: str, base_dir: str, compress: Optional[str] = ..., verbose: int = ..., dry_run: int = ...) -> str: ... def make_zipfile(base_name: str, base_dir: str, verbose: int = ..., dry_run: int = ...) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/bcppcompiler.pyi0000644€tŠÔÚ€2›s®0000000016313576752252031156 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.bcppcompiler from distutils.ccompiler import CCompiler class BCPPCompiler(CCompiler): ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/ccompiler.pyi0000644€tŠÔÚ€2›s®0000001546113576752252030463 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.ccompiler from typing import Any, Callable, List, Optional, Tuple, Union _Macro = Union[Tuple[str], Tuple[str, str]] def gen_lib_options(compiler: CCompiler, library_dirs: List[str], runtime_library_dirs: List[str], libraries: List[str]) -> List[str]: ... def gen_preprocess_options(macros: List[_Macro], include_dirs: List[str]) -> List[str]: ... def get_default_compiler(osname: Optional[str] = ..., platform: Optional[str] = ...) -> str: ... def new_compiler(plat: Optional[str] = ..., compiler: Optional[str] = ..., verbose: int = ..., dry_run: int = ..., force: int = ...) -> CCompiler: ... def show_compilers() -> None: ... class CCompiler: def __init__(self, verbose: int = ..., dry_run: int = ..., force: int = ...) -> None: ... def add_include_dir(self, dir: str) -> None: ... def set_include_dirs(self, dirs: List[str]) -> None: ... def add_library(self, libname: str) -> None: ... def set_libraries(self, libnames: List[str]) -> None: ... def add_library_dir(self, dir: str) -> None: ... def set_library_dirs(self, dirs: List[str]) -> None: ... def add_runtime_library_dir(self, dir: str) -> None: ... def set_runtime_library_dirs(self, dirs: List[str]) -> None: ... def define_macro(self, name: str, value: Optional[str] = ...) -> None: ... def undefine_macro(self, name: str) -> None: ... def add_link_object(self, object: str) -> None: ... def set_link_objects(self, objects: List[str]) -> None: ... def detect_language(self, sources: Union[str, List[str]]) -> Optional[str]: ... def find_library_file(self, dirs: List[str], lib: str, debug: bool = ...) -> Optional[str]: ... def has_function(self, funcname: str, includes: Optional[List[str]] = ..., include_dirs: Optional[List[str]] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ...) -> bool: ... def library_dir_option(self, dir: str) -> str: ... def library_option(self, lib: str) -> str: ... def runtime_library_dir_option(self, dir: str) -> str: ... def set_executables(self, **args: str) -> None: ... def compile(self, sources: List[str], output_dir: Optional[str] = ..., macros: Optional[_Macro] = ..., include_dirs: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., depends: Optional[List[str]] = ...) -> List[str]: ... def create_static_lib(self, objects: List[str], output_libname: str, output_dir: Optional[str] = ..., debug: bool = ..., target_lang: Optional[str] = ...) -> None: ... def link(self, target_desc: str, objects: List[str], output_filename: str, output_dir: Optional[str] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ..., runtime_library_dirs: Optional[List[str]] = ..., export_symbols: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., build_temp: Optional[str] = ..., target_lang: Optional[str] = ...) -> None: ... def link_executable(self, objects: List[str], output_progname: str, output_dir: Optional[str] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ..., runtime_library_dirs: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., target_lang: Optional[str] = ...) -> None: ... def link_shared_lib(self, objects: List[str], output_libname: str, output_dir: Optional[str] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ..., runtime_library_dirs: Optional[List[str]] = ..., export_symbols: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., build_temp: Optional[str] = ..., target_lang: Optional[str] = ...) -> None: ... def link_shared_object(self, objects: List[str], output_filename: str, output_dir: Optional[str] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ..., runtime_library_dirs: Optional[List[str]] = ..., export_symbols: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., build_temp: Optional[str] = ..., target_lang: Optional[str] = ...) -> None: ... def preprocess(self, source: str, output_file: Optional[str] = ..., macros: Optional[List[_Macro]] = ..., include_dirs: Optional[List[str]] = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ...) -> None: ... def executable_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... def library_filename(self, libname: str, lib_type: str = ..., strip_dir: int = ..., output_dir: str = ...) -> str: ... def object_filenames(self, source_filenames: List[str], strip_dir: int = ..., output_dir: str = ...) -> List[str]: ... def shared_object_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... def execute(self, func: Callable[..., None], args: Tuple[Any, ...], msg: Optional[str] = ..., level: int = ...) -> None: ... def spawn(self, cmd: List[str]) -> None: ... def mkpath(self, name: str, mode: int = ...) -> None: ... def move_file(self, src: str, dst: str) -> str: ... def announce(self, msg: str, level: int = ...) -> None: ... def warn(self, msg: str) -> None: ... def debug_print(self, msg: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/cmd.pyi0000644€tŠÔÚ€2›s®0000000503613576752252027246 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.cmd from typing import Callable, List, Tuple, Union, Optional, Iterable, Any, Text from abc import abstractmethod from distutils.dist import Distribution class Command: sub_commands: List[Tuple[str, Union[Callable[[], bool], str, None]]] def __init__(self, dist: Distribution) -> None: ... @abstractmethod def initialize_options(self) -> None: ... @abstractmethod def finalize_options(self) -> None: ... @abstractmethod def run(self) -> None: ... def announce(self, msg: Text, level: int = ...) -> None: ... def debug_print(self, msg: Text) -> None: ... def ensure_string(self, option: str, default: Optional[str] = ...) -> None: ... def ensure_string_list(self, option: Union[str, List[str]]) -> None: ... def ensure_filename(self, option: str) -> None: ... def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... def set_undefined_options(self, src_cmd: Text, *option_pairs: Tuple[str, str]) -> None: ... def get_finalized_command(self, command: Text, create: int = ...) -> Command: ... def reinitialize_command(self, command: Union[Command, Text], reinit_subcommands: int = ...) -> Command: ... def run_command(self, command: Text) -> None: ... def get_sub_commands(self) -> List[str]: ... def warn(self, msg: Text) -> None: ... def execute(self, func: Callable[..., Any], args: Iterable[Any], msg: Optional[Text] = ..., level: int = ...) -> None: ... def mkpath(self, name: str, mode: int = ...) -> None: ... def copy_file(self, infile: str, outfile: str, preserve_mode: int = ..., preserve_times: int = ..., link: Optional[str] = ..., level: Any = ...) -> Tuple[str, bool]: ... # level is not used def copy_tree(self, infile: str, outfile: str, preserve_mode: int = ..., preserve_times: int = ..., preserve_symlinks: int = ..., level: Any = ...) -> List[str]: ... # level is not used def move_file(self, src: str, dest: str, level: Any = ...) -> str: ... # level is not used def spawn(self, cmd: Iterable[str], search_path: int = ..., level: Any = ...) -> None: ... # level is not used def make_archive(self, base_name: str, format: str, root_dir: Optional[str] = ..., base_dir: Optional[str] = ..., owner: Optional[str] = ..., group: Optional[str] = ...) -> str: ... def make_file(self, infiles: Union[str, List[str], Tuple[str]], outfile: str, func: Callable[..., Any], args: List[Any], exec_msg: Optional[str] = ..., skip_msg: Optional[str] = ..., level: Any = ...) -> None: ... # level is not used mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/0000755€tŠÔÚ€2›s®0000000000013576752267027400 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031642 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/bdist.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031210 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/bdist_dumb.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032217 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/bdist_msi.pyi0000644€tŠÔÚ€2›s®0000000026613576752252032076 0ustar jukkaDROPBOX\Domain Users00000000000000from distutils.cmd import Command class bdist_msi(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/bdist_packager.pyi0000644€tŠÔÚ€2›s®0000000000013576752252033045 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/bdist_rpm.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032066 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/bdist_wininst.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032763 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/build.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031202 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/build_clib.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032173 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/build_ext.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032062 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/build_py.pyi0000644€tŠÔÚ€2›s®0000000042513576752252031725 0ustar jukkaDROPBOX\Domain Users00000000000000from distutils.cmd import Command import sys if sys.version_info >= (3,): class build_py(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... class build_py_2to3(build_py): ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/build_scripts.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032751 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/check.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031160 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/clean.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031165 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/config.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031350 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/install.pyi0000644€tŠÔÚ€2›s®0000000052413576752252031564 0ustar jukkaDROPBOX\Domain Users00000000000000from distutils.cmd import Command from typing import Optional, Text class install(Command): user: bool prefix: Optional[Text] home: Optional[Text] root: Optional[Text] install_lib: Optional[Text] def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/install_data.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032542 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/install_headers.pyi0000644€tŠÔÚ€2›s®0000000000013576752252033244 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/install_lib.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032377 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/install_scripts.pyi0000644€tŠÔÚ€2›s®0000000000013576752252033320 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/register.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031727 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/command/sdist.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031231 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/core.pyi0000644€tŠÔÚ€2›s®0000000364713576752252027441 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.core from typing import Any, List, Mapping, Optional, Tuple, Type, Union from distutils.cmd import Command as Command from distutils.dist import Distribution as Distribution from distutils.extension import Extension as Extension def setup(name: str = ..., version: str = ..., description: str = ..., long_description: str = ..., author: str = ..., author_email: str = ..., maintainer: str = ..., maintainer_email: str = ..., url: str = ..., download_url: str = ..., packages: List[str] = ..., py_modules: List[str] = ..., scripts: List[str] = ..., ext_modules: List[Extension] = ..., classifiers: List[str] = ..., distclass: Type[Distribution] = ..., script_name: str = ..., script_args: List[str] = ..., options: Mapping[str, Any] = ..., license: str = ..., keywords: Union[List[str], str] = ..., platforms: Union[List[str], str] = ..., cmdclass: Mapping[str, Type[Command]] = ..., data_files: List[Tuple[str, List[str]]] = ..., package_dir: Mapping[str, str] = ..., obsoletes: List[str] = ..., provides: List[str] = ..., requires: List[str] = ..., command_packages: List[str] = ..., command_options: Mapping[str, Mapping[str, Tuple[Any, Any]]] = ..., package_data: Mapping[str, List[str]] = ..., include_package_data: bool = ..., libraries: List[str] = ..., headers: List[str] = ..., ext_package: str = ..., include_dirs: List[str] = ..., password: str = ..., fullname: str = ..., **attrs: Any) -> None: ... def run_setup(script_name: str, script_args: Optional[List[str]] = ..., stop_after: str = ...) -> Distribution: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/cygwinccompiler.pyi0000644€tŠÔÚ€2›s®0000000026213576752252031675 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.cygwinccompiler from distutils.unixccompiler import UnixCCompiler class CygwinCCompiler(UnixCCompiler): ... class Mingw32CCompiler(CygwinCCompiler): ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/debug.pyi0000644€tŠÔÚ€2›s®0000000005113576752252027561 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.debug DEBUG: bool mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/dep_util.pyi0000644€tŠÔÚ€2›s®0000000045713576752252030312 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.dep_util from typing import List, Tuple def newer(source: str, target: str) -> bool: ... def newer_pairwise(sources: List[str], targets: List[str]) -> List[Tuple[str, str]]: ... def newer_group(sources: List[str], target: str, missing: str = ...) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/dir_util.pyi0000644€tŠÔÚ€2›s®0000000117613576752252030317 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.dir_util from typing import List def mkpath(name: str, mode: int = ..., verbose: int = ..., dry_run: int = ...) -> List[str]: ... def create_tree(base_dir: str, files: List[str], mode: int = ..., verbose: int = ..., dry_run: int = ...) -> None: ... def copy_tree(src: str, dst: str, preserve_mode: int = ..., preserve_times: int = ..., preserve_symlinks: int = ..., update: int = ..., verbose: int = ..., dry_run: int = ...) -> List[str]: ... def remove_tree(directory: str, verbose: int = ..., dry_run: int = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/dist.pyi0000644€tŠÔÚ€2›s®0000000075413576752252027450 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.dist from distutils.cmd import Command from typing import Any, Mapping, Optional, Dict, Tuple, Iterable, Text class Distribution: def __init__(self, attrs: Optional[Mapping[str, Any]] = ...) -> None: ... def get_option_dict(self, command: str) -> Dict[str, Tuple[str, Text]]: ... def parse_config_files(self, filenames: Optional[Iterable[Text]] = ...) -> None: ... def get_command_obj(self, command: str, create: bool = ...) -> Optional[Command]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/errors.pyi0000644€tŠÔÚ€2›s®0000000152413576752252030015 0ustar jukkaDROPBOX\Domain Users00000000000000class DistutilsError(Exception): ... class DistutilsModuleError(DistutilsError): ... class DistutilsClassError(DistutilsError): ... class DistutilsGetoptError(DistutilsError): ... class DistutilsArgError(DistutilsError): ... class DistutilsFileError(DistutilsError): ... class DistutilsOptionError(DistutilsError): ... class DistutilsSetupError(DistutilsError): ... class DistutilsPlatformError(DistutilsError): ... class DistutilsExecError(DistutilsError): ... class DistutilsInternalError(DistutilsError): ... class DistutilsTemplateError(DistutilsError): ... class DistutilsByteCompileError(DistutilsError): ... class CCompilerError(Exception): ... class PreprocessError(CCompilerError): ... class CompileError(CCompilerError): ... class LibError(CCompilerError): ... class LinkError(CCompilerError): ... class UnknownFileError(CCompilerError): ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/extension.pyi0000644€tŠÔÚ€2›s®0000000326113576752252030515 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.extension from typing import List, Optional, Tuple import sys class Extension: if sys.version_info >= (3,): def __init__(self, name: str, sources: List[str], include_dirs: List[str] = ..., define_macros: List[Tuple[str, Optional[str]]] = ..., undef_macros: List[str] = ..., library_dirs: List[str] = ..., libraries: List[str] = ..., runtime_library_dirs: List[str] = ..., extra_objects: List[str] = ..., extra_compile_args: List[str] = ..., extra_link_args: List[str] = ..., export_symbols: List[str] = ..., depends: List[str] = ..., language: str = ..., optional: bool = ...) -> None: ... else: def __init__(self, name: str, sources: List[str], include_dirs: List[str] = ..., define_macros: List[Tuple[str, Optional[str]]] = ..., undef_macros: List[str] = ..., library_dirs: List[str] = ..., libraries: List[str] = ..., runtime_library_dirs: List[str] = ..., extra_objects: List[str] = ..., extra_compile_args: List[str] = ..., extra_link_args: List[str] = ..., export_symbols: List[str] = ..., depends: List[str] = ..., language: str = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/fancy_getopt.pyi0000644€tŠÔÚ€2›s®0000000170213576752252031161 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.fancy_getopt from typing import ( Any, List, Mapping, Optional, Tuple, Union, TypeVar, overload, ) _Option = Tuple[str, Optional[str], str] _GR = Tuple[List[str], OptionDummy] def fancy_getopt(options: List[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: Optional[List[str]]) -> Union[List[str], _GR]: ... def wrap_text(text: str, width: int) -> List[str]: ... class FancyGetopt: def __init__(self, option_table: Optional[List[_Option]] = ...) -> None: ... # TODO kinda wrong, `getopt(object=object())` is invalid @overload def getopt(self, args: Optional[List[str]] = ...) -> _GR: ... @overload def getopt(self, args: Optional[List[str]], object: Any) -> List[str]: ... def get_option_order(self) -> List[Tuple[str, str]]: ... def generate_help(self, header: Optional[str] = ...) -> List[str]: ... class OptionDummy: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/file_util.pyi0000644€tŠÔÚ€2›s®0000000075613576752252030463 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.file_util from typing import Optional, Sequence, Tuple def copy_file(src: str, dst: str, preserve_mode: bool = ..., preserve_times: bool = ..., update: bool = ..., link: Optional[str] = ..., verbose: bool = ..., dry_run: bool = ...) -> Tuple[str, str]: ... def move_file(src: str, dst: str, verbose: bool = ..., dry_run: bool = ...) -> str: ... def write_file(filename: str, contents: Sequence[str]) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/filelist.pyi0000644€tŠÔÚ€2›s®0000000006413576752252030312 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.filelist class FileList: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/log.pyi0000644€tŠÔÚ€2›s®0000000141013576752252027254 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterable, Text DEBUG: int INFO: int WARN: int ERROR: int FATAL: int class Log: def __init__(self, threshold: int = ...) -> None: ... def log(self, level: int, msg: Text, *args: Any) -> None: ... def debug(self, msg: Text, *args: Any) -> None: ... def info(self, msg: Text, *args: Any) -> None: ... def warn(self, msg: Text, *args: Any) -> None: ... def error(self, msg: Text, *args: Any) -> None: ... def fatal(self, msg: Text, *args: Any) -> None: ... _LogFunc = Callable[[Text, Iterable[Any]], None] log: Callable[[int, Text, Iterable[Any]], None] debug: _LogFunc info: _LogFunc warn: _LogFunc error: _LogFunc fatal: _LogFunc def set_threshold(level: int) -> int: ... def set_verbosity(v: int) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/msvccompiler.pyi0000644€tŠÔÚ€2›s®0000000016313576752252031202 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.msvccompiler from distutils.ccompiler import CCompiler class MSVCCompiler(CCompiler): ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/spawn.pyi0000644€tŠÔÚ€2›s®0000000043613576752252027632 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.spawn from typing import List, Optional def spawn(cmd: List[str], search_path: bool = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ... def find_executable(executable: str, path: Optional[str] = ...) -> Optional[str]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/sysconfig.pyi0000644€tŠÔÚ€2›s®0000000126413576752252030506 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.sysconfig from typing import Mapping, Optional, Union from distutils.ccompiler import CCompiler PREFIX: str EXEC_PREFIX: str def get_config_var(name: str) -> Union[int, str, None]: ... def get_config_vars(*args: str) -> Mapping[str, Union[int, str]]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... def get_python_inc(plat_specific: bool = ..., prefix: Optional[str] = ...) -> str: ... def get_python_lib(plat_specific: bool = ..., standard_lib: bool = ..., prefix: Optional[str] = ...) -> str: ... def customize_compiler(compiler: CCompiler) -> None: ... def set_python_build() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/text_file.pyi0000644€tŠÔÚ€2›s®0000000137013576752252030463 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.text_file from typing import IO, List, Optional, Tuple, Union class TextFile: def __init__(self, filename: Optional[str] = ..., file: Optional[IO[str]] = ..., *, strip_comments: bool = ..., lstrip_ws: bool = ..., rstrip_ws: bool = ..., skip_blanks: bool = ..., join_lines: bool = ..., collapse_join: bool = ...) -> None: ... def open(self, filename: str) -> None: ... def close(self) -> None: ... def warn(self, msg: str, line: Union[List[int], Tuple[int, int], int] = ...) -> None: ... def readline(self) -> Optional[str]: ... def readlines(self) -> List[str]: ... def unreadline(self, line: str) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/unixccompiler.pyi0000644€tŠÔÚ€2›s®0000000016513576752252031362 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.unixccompiler from distutils.ccompiler import CCompiler class UnixCCompiler(CCompiler): ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/util.pyi0000644€tŠÔÚ€2›s®0000000157413576752252027463 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.util from typing import Any, Callable, List, Mapping, Optional, Tuple def get_platform() -> str: ... def convert_path(pathname: str) -> str: ... def change_root(new_root: str, pathname: str) -> str: ... def check_environ() -> None: ... def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... def split_quoted(s: str) -> List[str]: ... def execute(func: Callable[..., None], args: Tuple[Any, ...], msg: Optional[str] = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ... def strtobool(val: str) -> bool: ... def byte_compile(py_files: List[str], optimize: int = ..., force: bool = ..., prefix: Optional[str] = ..., base_dir: Optional[str] = ..., verbose: bool = ..., dry_run: bool = ..., direct: Optional[bool] = ...) -> None: ... def rfc822_escape(header: str) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/distutils/version.pyi0000644€tŠÔÚ€2›s®0000000353413576752252030171 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from abc import abstractmethod from typing import Any, Optional, TypeVar, Union, Pattern, Text, Tuple _T = TypeVar('_T', bound=Version) class Version: def __repr__(self) -> str: ... if sys.version_info >= (3,): def __eq__(self, other: object) -> bool: ... def __lt__(self: _T, other: Union[_T, str]) -> bool: ... def __le__(self: _T, other: Union[_T, str]) -> bool: ... def __gt__(self: _T, other: Union[_T, str]) -> bool: ... def __ge__(self: _T, other: Union[_T, str]) -> bool: ... @abstractmethod def __init__(self, vstring: Optional[Text] = ...) -> None: ... @abstractmethod def parse(self: _T, vstring: Text) -> _T: ... @abstractmethod def __str__(self) -> str: ... if sys.version_info >= (3,): @abstractmethod def _cmp(self: _T, other: Union[_T, str]) -> bool: ... else: @abstractmethod def __cmp__(self: _T, other: Union[_T, str]) -> bool: ... class StrictVersion(Version): version_re: Pattern[str] version: Tuple[int, int, int] prerelease: Optional[Tuple[Text, int]] def __init__(self, vstring: Optional[Text] = ...) -> None: ... def parse(self: _T, vstring: Text) -> _T: ... def __str__(self) -> str: ... if sys.version_info >= (3,): def _cmp(self: _T, other: Union[_T, str]) -> bool: ... else: def __cmp__(self: _T, other: Union[_T, str]) -> bool: ... class LooseVersion(Version): component_re: Pattern[str] vstring: Text version: Tuple[Union[Text, int], ...] def __init__(self, vstring: Optional[Text] = ...) -> None: ... def parse(self: _T, vstring: Text) -> _T: ... def __str__(self) -> str: ... if sys.version_info >= (3,): def _cmp(self: _T, other: Union[_T, str]) -> bool: ... else: def __cmp__(self: _T, other: Union[_T, str]) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/doctest.pyi0000644€tŠÔÚ€2›s®0000001530413576752252026123 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Type, Union import sys import types import unittest class TestResults(NamedTuple): failed: int attempted: int OPTIONFLAGS_BY_NAME: Dict[str, int] def register_optionflag(name: str) -> int: ... DONT_ACCEPT_TRUE_FOR_1: int DONT_ACCEPT_BLANKLINE: int NORMALIZE_WHITESPACE: int ELLIPSIS: int SKIP: int IGNORE_EXCEPTION_DETAIL: int COMPARISON_FLAGS: int REPORT_UDIFF: int REPORT_CDIFF: int REPORT_NDIFF: int REPORT_ONLY_FIRST_FAILURE: int if sys.version_info >= (3, 4): FAIL_FAST: int REPORTING_FLAGS: int BLANKLINE_MARKER: str ELLIPSIS_MARKER: str class Example: source: str want: str exc_msg: Optional[str] lineno: int indent: int options: Dict[int, bool] def __init__(self, source: str, want: str, exc_msg: Optional[str] = ..., lineno: int = ..., indent: int = ..., options: Optional[Dict[int, bool]] = ...) -> None: ... def __hash__(self) -> int: ... class DocTest: examples: List[Example] globs: Dict[str, Any] name: str filename: Optional[str] lineno: Optional[int] docstring: Optional[str] def __init__(self, examples: List[Example], globs: Dict[str, Any], name: str, filename: Optional[str], lineno: Optional[int], docstring: Optional[str]) -> None: ... def __hash__(self) -> int: ... def __lt__(self, other: DocTest) -> bool: ... class DocTestParser: def parse(self, string: str, name: str = ...) -> List[Union[str, Example]]: ... def get_doctest(self, string: str, globs: Dict[str, Any], name: str, filename: Optional[str], lineno: Optional[int]) -> DocTest: ... def get_examples(self, string: str, name: str = ...) -> List[Example]: ... class DocTestFinder: def __init__(self, verbose: bool = ..., parser: DocTestParser = ..., recurse: bool = ..., exclude_empty: bool = ...) -> None: ... def find(self, obj: object, name: Optional[str] = ..., module: Union[None, bool, types.ModuleType] = ..., globs: Optional[Dict[str, Any]] = ..., extraglobs: Optional[Dict[str, Any]] = ...) -> List[DocTest]: ... _Out = Callable[[str], Any] _ExcInfo = Tuple[Type[BaseException], BaseException, types.TracebackType] class DocTestRunner: DIVIDER: str optionflags: int original_optionflags: int tries: int failures: int test: DocTest def __init__(self, checker: Optional[OutputChecker] = ..., verbose: Optional[bool] = ..., optionflags: int = ...) -> None: ... def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ... def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: _ExcInfo) -> None: ... def run(self, test: DocTest, compileflags: Optional[int] = ..., out: Optional[_Out] = ..., clear_globs: bool = ...) -> TestResults: ... def summarize(self, verbose: Optional[bool] = ...) -> TestResults: ... def merge(self, other: DocTestRunner) -> None: ... class OutputChecker: def check_output(self, want: str, got: str, optionflags: int) -> bool: ... def output_difference(self, example: Example, got: str, optionflags: int) -> str: ... class DocTestFailure(Exception): test: DocTest example: Example got: str def __init__(self, test: DocTest, example: Example, got: str) -> None: ... class UnexpectedException(Exception): test: DocTest example: Example exc_info: _ExcInfo def __init__(self, test: DocTest, example: Example, exc_info: _ExcInfo) -> None: ... class DebugRunner(DocTestRunner): ... master: Optional[DocTestRunner] def testmod(m: Optional[types.ModuleType] = ..., name: Optional[str] = ..., globs: Dict[str, Any] = ..., verbose: Optional[bool] = ..., report: bool = ..., optionflags: int = ..., extraglobs: Dict[str, Any] = ..., raise_on_error: bool = ..., exclude_empty: bool = ...) -> TestResults: ... def testfile(filename: str, module_relative: bool = ..., name: Optional[str] = ..., package: Union[None, str, types.ModuleType] = ..., globs: Optional[Dict[str, Any]] = ..., verbose: Optional[bool] = ..., report: bool = ..., optionflags: int = ..., extraglobs: Optional[Dict[str, Any]] = ..., raise_on_error: bool = ..., parser: DocTestParser = ..., encoding: Optional[str] = ...) -> TestResults: ... def run_docstring_examples(f: object, globs: Dict[str, Any], verbose: bool = ..., name: str = ..., compileflags: Optional[int] = ..., optionflags: int = ...) -> None: ... def set_unittest_reportflags(flags: int) -> int: ... class DocTestCase(unittest.TestCase): def __init__(self, test: DocTest, optionflags: int = ..., setUp: Optional[Callable[[DocTest], Any]] = ..., tearDown: Optional[Callable[[DocTest], Any]] = ..., checker: Optional[OutputChecker] = ...) -> None: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... def runTest(self) -> None: ... def format_failure(self, err: str) -> str: ... def debug(self) -> None: ... def id(self) -> str: ... def __hash__(self) -> int: ... def shortDescription(self) -> str: ... class SkipDocTestCase(DocTestCase): def __init__(self, module: types.ModuleType) -> None: ... def setUp(self) -> None: ... def test_skip(self) -> None: ... def shortDescription(self) -> str: ... if sys.version_info >= (3, 4): class _DocTestSuite(unittest.TestSuite): ... else: _DocTestSuite = unittest.TestSuite def DocTestSuite(module: Union[None, str, types.ModuleType] = ..., globs: Optional[Dict[str, Any]] = ..., extraglobs: Optional[Dict[str, Any]] = ..., test_finder: Optional[DocTestFinder] = ..., **options: Any) -> _DocTestSuite: ... class DocFileCase(DocTestCase): def id(self) -> str: ... def format_failure(self, err: str) -> str: ... def DocFileTest(path: str, module_relative: bool = ..., package: Union[None, str, types.ModuleType] = ..., globs: Optional[Dict[str, Any]] = ..., parser: DocTestParser = ..., encoding: Optional[str] = ..., **options: Any) -> DocFileCase: ... def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: ... def script_from_examples(s: str) -> str: ... def testsource(module: Union[None, str, types.ModuleType], name: str) -> str: ... def debug_src(src: str, pm: bool = ..., globs: Optional[Dict[str, Any]] = ...) -> None: ... def debug_script(src: str, pm: bool = ..., globs: Optional[Dict[str, Any]] = ...) -> None: ... def debug(module: Union[None, str, types.ModuleType], name: str, pm: bool = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/ensurepip/0000755€tŠÔÚ€2›s®0000000000013576752267025750 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/ensurepip/__init__.pyi0000644€tŠÔÚ€2›s®0000000067413576752252030233 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import Optional import sys def version() -> str: ... if sys.version_info >= (3, 0): def bootstrap(*, root: Optional[str] = ..., upgrade: bool = ..., user: bool = ..., altinstall: bool = ..., default_pip: bool = ..., verbosity: int = ...) -> None: ... else: def bootstrap(root: Optional[str] = ..., upgrade: bool = ..., user: bool = ..., altinstall: bool = ..., default_pip: bool = ..., verbosity: int = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/errno.pyi0000644€tŠÔÚ€2›s®0000000377113576752252025610 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for errno from typing import Mapping import sys errorcode: Mapping[int, str] EPERM: int ENOENT: int ESRCH: int EINTR: int EIO: int ENXIO: int E2BIG: int ENOEXEC: int EBADF: int ECHILD: int EAGAIN: int ENOMEM: int EACCES: int EFAULT: int ENOTBLK: int EBUSY: int EEXIST: int EXDEV: int ENODEV: int ENOTDIR: int EISDIR: int EINVAL: int ENFILE: int EMFILE: int ENOTTY: int ETXTBSY: int EFBIG: int ENOSPC: int ESPIPE: int EROFS: int EMLINK: int EPIPE: int EDOM: int ERANGE: int EDEADLCK: int ENAMETOOLONG: int ENOLCK: int ENOSYS: int ENOTEMPTY: int ELOOP: int EWOULDBLOCK: int ENOMSG: int EIDRM: int ECHRNG: int EL2NSYNC: int EL3HLT: int EL3RST: int ELNRNG: int EUNATCH: int ENOCSI: int EL2HLT: int EBADE: int EBADR: int EXFULL: int ENOANO: int EBADRQC: int EBADSLT: int EDEADLOCK: int EBFONT: int ENOSTR: int ENODATA: int ETIME: int ENOSR: int ENONET: int ENOPKG: int EREMOTE: int ENOLINK: int EADV: int ESRMNT: int ECOMM: int EPROTO: int EMULTIHOP: int EDOTDOT: int EBADMSG: int EOVERFLOW: int ENOTUNIQ: int EBADFD: int EREMCHG: int ELIBACC: int ELIBBAD: int ELIBSCN: int ELIBMAX: int ELIBEXEC: int EILSEQ: int ERESTART: int ESTRPIPE: int EUSERS: int ENOTSOCK: int EDESTADDRREQ: int EMSGSIZE: int EPROTOTYPE: int ENOPROTOOPT: int EPROTONOSUPPORT: int ESOCKTNOSUPPORT: int ENOTSUP: int EOPNOTSUPP: int EPFNOSUPPORT: int EAFNOSUPPORT: int EADDRINUSE: int EADDRNOTAVAIL: int ENETDOWN: int ENETUNREACH: int ENETRESET: int ECONNABORTED: int ECONNRESET: int ENOBUFS: int EISCONN: int ENOTCONN: int ESHUTDOWN: int ETOOMANYREFS: int ETIMEDOUT: int ECONNREFUSED: int EHOSTDOWN: int EHOSTUNREACH: int EALREADY: int EINPROGRESS: int ESTALE: int EUCLEAN: int ENOTNAM: int ENAVAIL: int EISNAM: int EREMOTEIO: int EDQUOT: int ECANCELED: int # undocumented EKEYEXPIRED: int # undocumented EKEYREJECTED: int # undocumented EKEYREVOKED: int # undocumented EMEDIUMTYPE: int # undocumented ENOKEY: int # undocumented ENOMEDIUM: int # undocumented ENOTRECOVERABLE: int # undocumented EOWNERDEAD: int # undocumented ERFKILL: int # undocumented mypy-0.761/mypy/typeshed/stdlib/2and3/filecmp.pyi0000644€tŠÔÚ€2›s®0000000312513576752252026073 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for filecmp (Python 2/3) import sys from typing import AnyStr, Callable, Dict, Generic, Iterable, List, Optional, Sequence, Tuple, Union, Text DEFAULT_IGNORES: List[str] def cmp(f1: Union[bytes, Text], f2: Union[bytes, Text], shallow: Union[int, bool] = ...) -> bool: ... def cmpfiles(a: AnyStr, b: AnyStr, common: Iterable[AnyStr], shallow: Union[int, bool] = ...) -> Tuple[List[AnyStr], List[AnyStr], List[AnyStr]]: ... class dircmp(Generic[AnyStr]): def __init__(self, a: AnyStr, b: AnyStr, ignore: Optional[Sequence[AnyStr]] = ..., hide: Optional[Sequence[AnyStr]] = ...) -> None: ... left: AnyStr right: AnyStr hide: Sequence[AnyStr] ignore: Sequence[AnyStr] # These properties are created at runtime by __getattr__ subdirs: Dict[AnyStr, dircmp[AnyStr]] same_files: List[AnyStr] diff_files: List[AnyStr] funny_files: List[AnyStr] common_dirs: List[AnyStr] common_files: List[AnyStr] common_funny: List[AnyStr] common: List[AnyStr] left_only: List[AnyStr] right_only: List[AnyStr] left_list: List[AnyStr] right_list: List[AnyStr] def report(self) -> None: ... def report_partial_closure(self) -> None: ... def report_full_closure(self) -> None: ... methodmap: Dict[str, Callable[[], None]] def phase0(self) -> None: ... def phase1(self) -> None: ... def phase2(self) -> None: ... def phase3(self) -> None: ... def phase4(self) -> None: ... def phase4_closure(self) -> None: ... if sys.version_info >= (3,): def clear_cache() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/fileinput.pyi0000644€tŠÔÚ€2›s®0000000516013576752252026454 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, Callable, IO, AnyStr, Generic, Any, Text, Union, Iterator, Optional import os import sys if sys.version_info >= (3, 6): _Path = Union[Text, bytes, os.PathLike[Any]] else: _Path = Union[Text, bytes] if sys.version_info >= (3, 8): def input( files: Union[_Path, Iterable[_Path], None] = ..., inplace: bool = ..., backup: str = ..., *, mode: str = ..., openhook: Callable[[_Path, str], IO[AnyStr]] = ..., ) -> FileInput[AnyStr]: ... else: def input( files: Union[_Path, Iterable[_Path], None] = ..., inplace: bool = ..., backup: str = ..., bufsize: int = ..., mode: str = ..., openhook: Callable[[_Path, str], IO[AnyStr]] = ..., ) -> FileInput[AnyStr]: ... def close() -> None: ... def nextfile() -> None: ... def filename() -> str: ... def lineno() -> int: ... def filelineno() -> int: ... def fileno() -> int: ... def isfirstline() -> bool: ... def isstdin() -> bool: ... class FileInput(Iterable[AnyStr], Generic[AnyStr]): if sys.version_info >= (3, 8): def __init__( self, files: Union[None, _Path, Iterable[_Path]] = ..., inplace: bool = ..., backup: str = ..., *, mode: str = ..., openhook: Callable[[_Path, str], IO[AnyStr]] = ... ) -> None: ... else: def __init__( self, files: Union[None, _Path, Iterable[_Path]] = ..., inplace: bool = ..., backup: str = ..., bufsize: int = ..., mode: str = ..., openhook: Callable[[_Path, str], IO[AnyStr]] = ... ) -> None: ... def __del__(self) -> None: ... def close(self) -> None: ... if sys.version_info >= (3, 2): def __enter__(self) -> FileInput[AnyStr]: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... def __next__(self) -> AnyStr: ... def __getitem__(self, i: int) -> AnyStr: ... def nextfile(self) -> None: ... def readline(self) -> AnyStr: ... def filename(self) -> str: ... def lineno(self) -> int: ... def filelineno(self) -> int: ... def fileno(self) -> int: ... def isfirstline(self) -> bool: ... def isstdin(self) -> bool: ... def hook_compressed(filename: _Path, mode: str) -> IO[Any]: ... if sys.version_info >= (3, 6): def hook_encoded(encoding: str, errors: Optional[str] = ...) -> Callable[[_Path, str], IO[Any]]: ... else: def hook_encoded(encoding: str) -> Callable[[_Path, str], IO[Any]]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/formatter.pyi0000644€tŠÔÚ€2›s®0000001105713576752252026462 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/formatter.py # and https://github.com/python/cpython/blob/master/Lib/formatter.py from typing import Any, IO, List, Optional, Tuple AS_IS: None _FontType = Tuple[str, bool, bool, bool] _StylesType = Tuple[Any, ...] class NullFormatter: writer: Optional[NullWriter] def __init__(self, writer: Optional[NullWriter] = ...) -> None: ... def end_paragraph(self, blankline: int) -> None: ... def add_line_break(self) -> None: ... def add_hor_rule(self, *args, **kw) -> None: ... def add_label_data(self, format, counter: int, blankline: Optional[int] = ...) -> None: ... def add_flowing_data(self, data: str) -> None: ... def add_literal_data(self, data: str) -> None: ... def flush_softspace(self) -> None: ... def push_alignment(self, align: Optional[str]) -> None: ... def pop_alignment(self) -> None: ... def push_font(self, x: _FontType) -> None: ... def pop_font(self) -> None: ... def push_margin(self, margin: int) -> None: ... def pop_margin(self) -> None: ... def set_spacing(self, spacing: Optional[str]) -> None: ... def push_style(self, *styles: _StylesType) -> None: ... def pop_style(self, n: int = ...) -> None: ... def assert_line_data(self, flag: int = ...) -> None: ... class AbstractFormatter: writer: NullWriter align: Optional[str] align_stack: List[Optional[str]] font_stack: List[_FontType] margin_stack: List[int] spacing: Optional[str] style_stack: Any nospace: int softspace: int para_end: int parskip: int hard_break: int have_label: int def __init__(self, writer: NullWriter) -> None: ... def end_paragraph(self, blankline: int) -> None: ... def add_line_break(self) -> None: ... def add_hor_rule(self, *args, **kw) -> None: ... def add_label_data(self, format, counter: int, blankline: Optional[int] = ...) -> None: ... def format_counter(self, format, counter: int) -> str: ... def format_letter(self, case: str, counter: int) -> str: ... def format_roman(self, case: str, counter: int) -> str: ... def add_flowing_data(self, data: str) -> None: ... def add_literal_data(self, data: str) -> None: ... def flush_softspace(self) -> None: ... def push_alignment(self, align: Optional[str]) -> None: ... def pop_alignment(self) -> None: ... def push_font(self, font: _FontType) -> None: ... def pop_font(self) -> None: ... def push_margin(self, margin: int) -> None: ... def pop_margin(self) -> None: ... def set_spacing(self, spacing: Optional[str]) -> None: ... def push_style(self, *styles: _StylesType) -> None: ... def pop_style(self, n: int = ...) -> None: ... def assert_line_data(self, flag: int = ...) -> None: ... class NullWriter: def __init__(self) -> None: ... def flush(self) -> None: ... def new_alignment(self, align: Optional[str]) -> None: ... def new_font(self, font: _FontType) -> None: ... def new_margin(self, margin: int, level: int) -> None: ... def new_spacing(self, spacing: Optional[str]) -> None: ... def new_styles(self, styles) -> None: ... def send_paragraph(self, blankline: int) -> None: ... def send_line_break(self) -> None: ... def send_hor_rule(self, *args, **kw) -> None: ... def send_label_data(self, data: str) -> None: ... def send_flowing_data(self, data: str) -> None: ... def send_literal_data(self, data: str) -> None: ... class AbstractWriter(NullWriter): def new_alignment(self, align: Optional[str]) -> None: ... def new_font(self, font: _FontType) -> None: ... def new_margin(self, margin: int, level: int) -> None: ... def new_spacing(self, spacing: Optional[str]) -> None: ... def new_styles(self, styles) -> None: ... def send_paragraph(self, blankline: int) -> None: ... def send_line_break(self) -> None: ... def send_hor_rule(self, *args, **kw) -> None: ... def send_label_data(self, data: str) -> None: ... def send_flowing_data(self, data: str) -> None: ... def send_literal_data(self, data: str) -> None: ... class DumbWriter(NullWriter): file: IO[str] maxcol: int def __init__(self, file: Optional[IO[str]] = ..., maxcol: int = ...) -> None: ... def reset(self) -> None: ... def send_paragraph(self, blankline: int) -> None: ... def send_line_break(self) -> None: ... def send_hor_rule(self, *args, **kw) -> None: ... def send_literal_data(self, data: str) -> None: ... def send_flowing_data(self, data: str) -> None: ... def test(file: Optional[str] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/fractions.pyi0000644€tŠÔÚ€2›s®0000000631413576752252026447 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for fractions # See https://docs.python.org/3/library/fractions.html # # Note: these stubs are incomplete. The more complex type # signatures are currently omitted. Also see numbers.pyi. from typing import Optional, TypeVar, Union, overload, Any, Tuple from numbers import Real, Integral, Rational from decimal import Decimal import sys _ComparableNum = Union[int, float, Decimal, Real] @overload def gcd(a: int, b: int) -> int: ... @overload def gcd(a: Integral, b: int) -> Integral: ... @overload def gcd(a: int, b: Integral) -> Integral: ... @overload def gcd(a: Integral, b: Integral) -> Integral: ... class Fraction(Rational): @overload def __init__(self, numerator: Union[int, Rational] = ..., denominator: Optional[Union[int, Rational]] = ..., *, _normalize: bool = ...) -> None: ... @overload def __init__(self, value: float, *, _normalize: bool = ...) -> None: ... @overload def __init__(self, value: Decimal, *, _normalize: bool = ...) -> None: ... @overload def __init__(self, value: str, *, _normalize: bool = ...) -> None: ... @classmethod def from_float(cls, f: float) -> Fraction: ... @classmethod def from_decimal(cls, dec: Decimal) -> Fraction: ... def limit_denominator(self, max_denominator: int = ...) -> Fraction: ... if sys.version_info >= (3, 8): def as_integer_ratio(self) -> Tuple[int, int]: ... @property def numerator(self) -> int: ... @property def denominator(self) -> int: ... def __add__(self, other): ... def __radd__(self, other): ... def __sub__(self, other): ... def __rsub__(self, other): ... def __mul__(self, other): ... def __rmul__(self, other): ... def __truediv__(self, other): ... def __rtruediv__(self, other): ... if sys.version_info < (3, 0): def __div__(self, other): ... def __rdiv__(self, other): ... def __floordiv__(self, other) -> int: ... def __rfloordiv__(self, other) -> int: ... def __mod__(self, other): ... def __rmod__(self, other): ... def __divmod__(self, other): ... def __rdivmod__(self, other): ... def __pow__(self, other): ... def __rpow__(self, other): ... def __pos__(self) -> Fraction: ... def __neg__(self) -> Fraction: ... def __abs__(self) -> Fraction: ... def __trunc__(self) -> int: ... if sys.version_info >= (3, 0): def __floor__(self) -> int: ... def __ceil__(self) -> int: ... def __round__(self, ndigits: Optional[Any] = ...): ... def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... def __lt__(self, other: _ComparableNum) -> bool: ... def __gt__(self, other: _ComparableNum) -> bool: ... def __le__(self, other: _ComparableNum) -> bool: ... def __ge__(self, other: _ComparableNum) -> bool: ... if sys.version_info >= (3, 0): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... # Not actually defined within fractions.py, but provides more useful # overrides @property def real(self) -> Fraction: ... @property def imag(self) -> Fraction: ... def conjugate(self) -> Fraction: ... mypy-0.761/mypy/typeshed/stdlib/2and3/ftplib.pyi0000644€tŠÔÚ€2›s®0000001354113576752252025737 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import ( Any, BinaryIO, Callable, Dict, Generic, Iterable, Iterator, List, Optional, Protocol, Text, TextIO, Tuple, Type, TypeVar, Union, ) from types import TracebackType from socket import socket from ssl import SSLContext _T = TypeVar("_T") _IntOrStr = Union[int, Text] MSG_OOB: int FTP_PORT: int MAXLINE: int CRLF: str if sys.version_info >= (3,): B_CRLF: bytes class Error(Exception): ... class error_reply(Error): ... class error_temp(Error): ... class error_perm(Error): ... class error_proto(Error): ... all_errors = Tuple[Exception, ...] class _Readable(Protocol): def read(self, __length: int) -> bytes: ... class _ReadLineable(Protocol): def readline(self, _length: int) -> bytes: ... class FTP: debugging: int # Note: This is technically the type that's passed in as the host argument. But to make it easier in Python 2 we # accept Text but return str. host: str port: int maxline: int sock: Optional[socket] welcome: Optional[str] passiveserver: int timeout: int af: int lastresp: str if sys.version_info >= (3,): file: Optional[TextIO] encoding: str def __enter__(self: _T) -> _T: ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType] ) -> None: ... else: file: Optional[BinaryIO] if sys.version_info >= (3, 3): source_address: Optional[Tuple[str, int]] def __init__( self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ..., ) -> None: ... def connect( self, host: Text = ..., port: int = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ... ) -> str: ... else: def __init__( self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ..., timeout: float = ... ) -> None: ... def connect(self, host: Text = ..., port: int = ..., timeout: float = ...) -> str: ... def getwelcome(self) -> str: ... def set_debuglevel(self, level: int) -> None: ... def debug(self, level: int) -> None: ... def set_pasv(self, val: Union[bool, int]) -> None: ... def sanitize(self, s: Text) -> str: ... def putline(self, line: Text) -> None: ... def putcmd(self, line: Text) -> None: ... def getline(self) -> str: ... def getmultiline(self) -> str: ... def getresp(self) -> str: ... def voidresp(self) -> str: ... def abort(self) -> str: ... def sendcmd(self, cmd: Text) -> str: ... def voidcmd(self, cmd: Text) -> str: ... def sendport(self, host: Text, port: int) -> str: ... def sendeprt(self, host: Text, port: int) -> str: ... def makeport(self) -> socket: ... def makepasv(self) -> Tuple[str, int]: ... def login(self, user: Text = ..., passwd: Text = ..., acct: Text = ...) -> str: ... # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers. def ntransfercmd(self, cmd: Text, rest: Optional[_IntOrStr] = ...) -> Tuple[socket, int]: ... def transfercmd(self, cmd: Text, rest: Optional[_IntOrStr] = ...) -> socket: ... def retrbinary( self, cmd: Text, callback: Callable[[bytes], Any], blocksize: int = ..., rest: Optional[_IntOrStr] = ... ) -> str: ... def storbinary( self, cmd: Text, fp: _Readable, blocksize: int = ..., callback: Optional[Callable[[bytes], Any]] = ..., rest: Optional[_IntOrStr] = ..., ) -> str: ... def retrlines(self, cmd: Text, callback: Optional[Callable[[str], Any]] = ...) -> str: ... def storlines(self, cmd: Text, fp: _ReadLineable, callback: Optional[Callable[[bytes], Any]] = ...) -> str: ... def acct(self, password: Text) -> str: ... def nlst(self, *args: Text) -> List[str]: ... # Technically only the last arg can be a Callable but ... def dir(self, *args: Union[str, Callable[[str], None]]) -> None: ... if sys.version_info >= (3, 3): def mlsd(self, path: Text = ..., facts: Iterable[str] = ...) -> Iterator[Tuple[str, Dict[str, str]]]: ... def rename(self, fromname: Text, toname: Text) -> str: ... def delete(self, filename: Text) -> str: ... def cwd(self, dirname: Text) -> str: ... def size(self, filename: Text) -> str: ... def mkd(self, dirname: Text) -> str: ... def rmd(self, dirname: Text) -> str: ... def pwd(self) -> str: ... def quit(self) -> str: ... def close(self) -> None: ... class FTP_TLS(FTP): def __init__( self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ..., keyfile: Optional[str] = ..., certfile: Optional[str] = ..., context: Optional[SSLContext] = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ..., ) -> None: ... ssl_version: int keyfile: Optional[str] certfile: Optional[str] context: SSLContext def login(self, user: Text = ..., passwd: Text = ..., acct: Text = ..., secure: bool = ...) -> str: ... def auth(self) -> str: ... def prot_p(self) -> str: ... def prot_c(self) -> str: ... if sys.version_info >= (3, 3): def ccc(self) -> str: ... if sys.version_info < (3,): class Netrc: def __init__(self, filename: Optional[Text] = ...) -> None: ... def get_hosts(self) -> List[str]: ... def get_account(self, host: Text) -> Tuple[Optional[str], Optional[str], Optional[str]]: ... def get_macros(self) -> List[str]: ... def get_macro(self, macro: Text) -> Tuple[str, ...]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/genericpath.pyi0000644€tŠÔÚ€2›s®0000000121513576752252026743 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Sequence, AnyStr, Text import sys if sys.version_info >= (3, 0): def commonprefix(m: Sequence[str]) -> str: ... else: def commonprefix(m: Sequence[AnyStr]) -> AnyStr: ... def exists(path: Text) -> bool: ... def isfile(path: Text) -> bool: ... def isdir(s: Text) -> bool: ... def getsize(filename: Text) -> int: ... def getmtime(filename: Text) -> float: ... def getatime(filename: Text) -> float: ... def getctime(filename: Text) -> float: ... if sys.version_info >= (3, 4): def samestat(s1: str, s2: str) -> int: ... def samefile(f1: str, f2: str) -> int: ... def sameopenfile(fp1: str, fp2: str) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/grp.pyi0000644€tŠÔÚ€2›s®0000000044713576752252025250 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, NamedTuple, Optional class struct_group(NamedTuple): gr_name: str gr_passwd: Optional[str] gr_gid: int gr_mem: List[str] def getgrall() -> List[struct_group]: ... def getgrgid(gid: int) -> struct_group: ... def getgrnam(name: str) -> struct_group: ... mypy-0.761/mypy/typeshed/stdlib/2and3/hmac.pyi0000644€tŠÔÚ€2›s®0000000206313576752252025364 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for hmac from typing import Any, Callable, Optional, Union, overload, AnyStr from types import ModuleType import sys _B = Union[bytes, bytearray] # TODO more precise type for object of hashlib _Hash = Any digest_size: None if sys.version_info >= (3, 4): def new(key: _B, msg: Optional[_B] = ..., digestmod: Optional[Union[str, Callable[[], _Hash], ModuleType]] = ...) -> HMAC: ... else: def new(key: _B, msg: Optional[_B] = ..., digestmod: Optional[Union[Callable[[], _Hash], ModuleType]] = ...) -> HMAC: ... class HMAC: if sys.version_info >= (3,): digest_size: int if sys.version_info >= (3, 4): block_size: int name: str def update(self, msg: _B) -> None: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... def copy(self) -> HMAC: ... @overload def compare_digest(a: bytearray, b: bytearray) -> bool: ... @overload def compare_digest(a: AnyStr, b: AnyStr) -> bool: ... if sys.version_info >= (3, 7): def digest(key: _B, msg: _B, digest: str) -> bytes: ... mypy-0.761/mypy/typeshed/stdlib/2and3/imaplib.pyi0000644€tŠÔÚ€2›s®0000001424213576752252026073 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for imaplib (Python 2) import imaplib import subprocess import sys import time from socket import socket as _socket from ssl import SSLSocket, SSLContext from typing import Any, Callable, Dict, IO, List, Optional, Pattern, Text, Tuple, Type, Union CommandResults = Tuple[str, List[Any]] class IMAP4: error: Type[Exception] = ... abort: Type[Exception] = ... readonly: Type[Exception] = ... mustquote: Pattern[Text] = ... debug: int = ... state: str = ... literal: Optional[Text] = ... tagged_commands: Dict[str, str] = ... untagged_responses: Dict[str, str] = ... continuation_response: str = ... is_readonly: bool = ... tagnum: int = ... tagpre: str = ... tagre: Pattern[Text] = ... welcome: bytes = ... capabilities: Tuple[str] = ... PROTOCOL_VERSION: str = ... def __init__(self, host: str, port: int) -> None: ... def __getattr__(self, attr: str) -> Any: ... host: str = ... port: int = ... sock: _socket = ... file: Union[IO[Text], IO[bytes]] = ... def open(self, host: str = ..., port: int = ...) -> None: ... def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: bytes) -> None: ... def shutdown(self) -> None: ... def socket(self) -> _socket: ... def recent(self) -> CommandResults: ... def response(self, code: str) -> CommandResults: ... def append(self, mailbox: str, flags: str, date_time: str, message: str) -> str: ... def authenticate(self, mechanism: str, authobject: Callable[[bytes], Optional[bytes]]) -> Tuple[str, str]: ... def capability(self) -> CommandResults: ... def check(self) -> CommandResults: ... def close(self) -> CommandResults: ... def copy(self, message_set: str, new_mailbox: str) -> CommandResults: ... def create(self, mailbox: str) -> CommandResults: ... def delete(self, mailbox: str) -> CommandResults: ... def deleteacl(self, mailbox: str, who: str) -> CommandResults: ... if sys.version_info >= (3, 5): def enable(self, capability: str) -> CommandResults: ... def expunge(self) -> CommandResults: ... def fetch(self, message_set: str, message_parts: str) -> CommandResults: ... def getacl(self, mailbox: str) -> CommandResults: ... def getannotation(self, mailbox: str, entry: str, attribute: str) -> CommandResults: ... def getquota(self, root: str) -> CommandResults: ... def getquotaroot(self, mailbox: str) -> CommandResults: ... def list(self, directory: str = ..., pattern: str = ...) -> CommandResults: ... def login(self, user: str, password: str) -> CommandResults: ... def login_cram_md5(self, user: str, password: str) -> CommandResults: ... def logout(self) -> CommandResults: ... def lsub(self, directory: str = ..., pattern: str = ...) -> CommandResults: ... def myrights(self, mailbox: str) -> CommandResults: ... def namespace(self) -> CommandResults: ... def noop(self) -> CommandResults: ... def partial(self, message_num: str, message_part: str, start: str, length: str) -> CommandResults: ... def proxyauth(self, user: str) -> CommandResults: ... def rename(self, oldmailbox: str, newmailbox: str) -> CommandResults: ... def search(self, charset: Optional[str], *criteria: str) -> CommandResults: ... def select(self, mailbox: str = ..., readonly: bool = ...) -> CommandResults: ... def setacl(self, mailbox: str, who: str, what: str) -> CommandResults: ... def setannotation(self, *args: str) -> CommandResults: ... def setquota(self, root: str, limits: str) -> CommandResults: ... def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> CommandResults: ... if sys.version_info >= (3,): def starttls(self, ssl_context: Optional[Any] = ...) -> CommandResults: ... def status(self, mailbox: str, names: str) -> CommandResults: ... def store(self, message_set: str, command: str, flags: str) -> CommandResults: ... def subscribe(self, mailbox: str) -> CommandResults: ... def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> CommandResults: ... def uid(self, command: str, *args: str) -> CommandResults: ... def unsubscribe(self, mailbox: str) -> CommandResults: ... def xatom(self, name: str, *args: str) -> CommandResults: ... def print_log(self) -> None: ... class IMAP4_SSL(IMAP4): keyfile: str = ... certfile: str = ... if sys.version_info >= (3, 3): def __init__(self, host: str = ..., port: int = ..., keyfile: Optional[str] = ..., certfile: Optional[str] = ..., ssl_context: Optional[SSLContext] = ...) -> None: ... else: def __init__(self, host: str = ..., port: int = ..., keyfile: Optional[str] = ..., certfile: Optional[str] = ...) -> None: ... host: str = ... port: int = ... sock: _socket = ... sslobj: SSLSocket = ... file: IO[Any] = ... def open(self, host: str = ..., port: Optional[int] = ...) -> None: ... def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: bytes) -> None: ... def shutdown(self) -> None: ... def socket(self) -> _socket: ... def ssl(self) -> SSLSocket: ... class IMAP4_stream(IMAP4): command: str = ... def __init__(self, command: str) -> None: ... host: str = ... port: int = ... sock: _socket = ... file: IO[Any] = ... process: subprocess.Popen[bytes] = ... writefile: IO[Any] = ... readfile: IO[Any] = ... def open(self, host: str = ..., port: Optional[int] = ...) -> None: ... def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: bytes) -> None: ... def shutdown(self) -> None: ... class _Authenticator: mech: Callable[[bytes], bytes] = ... def __init__(self, mechinst: Callable[[bytes], bytes]) -> None: ... def process(self, data: str) -> str: ... def encode(self, inp: bytes) -> str: ... def decode(self, inp: str) -> bytes: ... def Internaldate2tuple(resp: str) -> time.struct_time: ... def Int2AP(num: int) -> str: ... def ParseFlags(resp: str) -> Tuple[str]: ... def Time2Internaldate(date_time: Union[float, time.struct_time, str]) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/imghdr.pyi0000644€tŠÔÚ€2›s®0000000062313576752252025726 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, Union, Text, BinaryIO, Optional, Any, List, Callable import sys import os if sys.version_info >= (3, 6): _File = Union[Text, os.PathLike[Text], BinaryIO] else: _File = Union[Text, BinaryIO] @overload def what(file: _File) -> Optional[str]: ... @overload def what(file: Any, h: bytes) -> Optional[str]: ... tests: List[Callable[[bytes, BinaryIO], Optional[str]]] mypy-0.761/mypy/typeshed/stdlib/2and3/keyword.pyi0000644€tŠÔÚ€2›s®0000000020713576752252026136 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for keyword from typing import Sequence, Text, Union def iskeyword(s: Union[Text, bytes]) -> bool: ... kwlist: Sequence[str] mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/0000755€tŠÔÚ€2›s®0000000000013576752267025214 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/__init__.pyi0000644€tŠÔÚ€2›s®0000000004113576752252027463 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3 (Python 3.6) mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/0000755€tŠÔÚ€2›s®0000000000013576752267026227 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi0000644€tŠÔÚ€2›s®0000000027013576752252030502 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2 (Python 3.6) import os import sys from typing import Text, Union if sys.version_info >= (3, 6): _Path = Union[Text, os.PathLike] else: _Path = Text mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi0000644€tŠÔÚ€2›s®0000000203213576752252030234 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.driver (Python 3.6) import os import sys from typing import Any, Callable, IO, Iterable, List, Optional, Text, Tuple, Union from logging import Logger from lib2to3.pytree import _Convert, _NL from lib2to3.pgen2 import _Path from lib2to3.pgen2.grammar import Grammar class Driver: grammar: Grammar logger: Logger convert: _Convert def __init__(self, grammar: Grammar, convert: Optional[_Convert] = ..., logger: Optional[Logger] = ...) -> None: ... def parse_tokens(self, tokens: Iterable[Any], debug: bool = ...) -> _NL: ... def parse_stream_raw(self, stream: IO[Text], debug: bool = ...) -> _NL: ... def parse_stream(self, stream: IO[Text], debug: bool = ...) -> _NL: ... def parse_file(self, filename: _Path, encoding: Optional[Text] = ..., debug: bool = ...) -> _NL: ... def parse_string(self, text: Text, debug: bool = ...) -> _NL: ... def load_grammar(gt: Text = ..., gp: Optional[Text] = ..., save: bool = ..., force: bool = ..., logger: Optional[Logger] = ...) -> Grammar: ... mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi0000644€tŠÔÚ€2›s®0000000142113576752252030370 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.grammar (Python 3.6) from lib2to3.pgen2 import _Path from typing import Any, Dict, List, Optional, Text, Tuple, TypeVar _P = TypeVar('_P') _Label = Tuple[int, Optional[Text]] _DFA = List[List[Tuple[int, int]]] _DFAS = Tuple[_DFA, Dict[int, int]] class Grammar: symbol2number: Dict[Text, int] number2symbol: Dict[int, Text] states: List[_DFA] dfas: Dict[int, _DFAS] labels: List[_Label] keywords: Dict[Text, int] tokens: Dict[int, int] symbol2label: Dict[Text, int] start: int def __init__(self) -> None: ... def dump(self, filename: _Path) -> None: ... def load(self, filename: _Path) -> None: ... def copy(self: _P) -> _P: ... def report(self) -> None: ... opmap_raw: Text opmap: Dict[Text, Text] mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi0000644€tŠÔÚ€2›s®0000000033513576752252030564 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.literals (Python 3.6) from typing import Dict, Match, Text simple_escapes: Dict[Text, Text] def escape(m: Match[str]) -> Text: ... def evalString(s: Text) -> Text: ... def test() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi0000644€tŠÔÚ€2›s®0000000221013576752252030051 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.parse (Python 3.6) from typing import Any, Dict, List, Optional, Sequence, Set, Text, Tuple from lib2to3.pgen2.grammar import Grammar, _DFAS from lib2to3.pytree import _NL, _Convert, _RawNode _Context = Sequence[Any] class ParseError(Exception): msg: Text type: int value: Optional[Text] context: _Context def __init__(self, msg: Text, type: int, value: Optional[Text], context: _Context) -> None: ... class Parser: grammar: Grammar convert: _Convert stack: List[Tuple[_DFAS, int, _RawNode]] rootnode: Optional[_NL] used_names: Set[Text] def __init__(self, grammar: Grammar, convert: Optional[_Convert] = ...) -> None: ... def setup(self, start: Optional[int] = ...) -> None: ... def addtoken(self, type: int, value: Optional[Text], context: _Context) -> bool: ... def classify(self, type: int, value: Optional[Text], context: _Context) -> int: ... def shift(self, type: int, value: Optional[Text], newstate: int, context: _Context) -> None: ... def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: ... def pop(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi0000644€tŠÔÚ€2›s®0000000416513576752252027703 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.pgen (Python 3.6) from typing import ( Any, Dict, IO, Iterable, Iterator, List, NoReturn, Optional, Text, Tuple ) from lib2to3.pgen2 import _Path, grammar from lib2to3.pgen2.tokenize import _TokenInfo class PgenGrammar(grammar.Grammar): ... class ParserGenerator: filename: _Path stream: IO[Text] generator: Iterator[_TokenInfo] first: Dict[Text, Dict[Text, int]] def __init__(self, filename: _Path, stream: Optional[IO[Text]] = ...) -> None: ... def make_grammar(self) -> PgenGrammar: ... def make_first(self, c: PgenGrammar, name: Text) -> Dict[int, int]: ... def make_label(self, c: PgenGrammar, label: Text) -> int: ... def addfirstsets(self) -> None: ... def calcfirst(self, name: Text) -> None: ... def parse(self) -> Tuple[Dict[Text, List[DFAState]], Text]: ... def make_dfa(self, start: NFAState, finish: NFAState) -> List[DFAState]: ... def dump_nfa(self, name: Text, start: NFAState, finish: NFAState) -> List[DFAState]: ... def dump_dfa(self, name: Text, dfa: Iterable[DFAState]) -> None: ... def simplify_dfa(self, dfa: List[DFAState]) -> None: ... def parse_rhs(self) -> Tuple[NFAState, NFAState]: ... def parse_alt(self) -> Tuple[NFAState, NFAState]: ... def parse_item(self) -> Tuple[NFAState, NFAState]: ... def parse_atom(self) -> Tuple[NFAState, NFAState]: ... def expect(self, type: int, value: Optional[Any] = ...) -> Text: ... def gettoken(self) -> None: ... def raise_error(self, msg: str, *args: Any) -> NoReturn: ... class NFAState: arcs: List[Tuple[Optional[Text], NFAState]] def __init__(self) -> None: ... def addarc(self, next: NFAState, label: Optional[Text] = ...) -> None: ... class DFAState: nfaset: Dict[NFAState, Any] isfinal: bool arcs: Dict[Text, DFAState] def __init__(self, nfaset: Dict[NFAState, Any], final: NFAState) -> None: ... def addarc(self, next: DFAState, label: Text) -> None: ... def unifystate(self, old: DFAState, new: DFAState) -> None: ... def __eq__(self, other: Any) -> bool: ... def generate_grammar(filename: _Path = ...) -> PgenGrammar: ... mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi0000644€tŠÔÚ€2›s®0000000212713576752252030066 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.token (Python 3.6) import sys from typing import Dict, Text ENDMARKER: int NAME: int NUMBER: int STRING: int NEWLINE: int INDENT: int DEDENT: int LPAR: int RPAR: int LSQB: int RSQB: int COLON: int COMMA: int SEMI: int PLUS: int MINUS: int STAR: int SLASH: int VBAR: int AMPER: int LESS: int GREATER: int EQUAL: int DOT: int PERCENT: int BACKQUOTE: int LBRACE: int RBRACE: int EQEQUAL: int NOTEQUAL: int LESSEQUAL: int GREATEREQUAL: int TILDE: int CIRCUMFLEX: int LEFTSHIFT: int RIGHTSHIFT: int DOUBLESTAR: int PLUSEQUAL: int MINEQUAL: int STAREQUAL: int SLASHEQUAL: int PERCENTEQUAL: int AMPEREQUAL: int VBAREQUAL: int CIRCUMFLEXEQUAL: int LEFTSHIFTEQUAL: int RIGHTSHIFTEQUAL: int DOUBLESTAREQUAL: int DOUBLESLASH: int DOUBLESLASHEQUAL: int OP: int COMMENT: int NL: int if sys.version_info >= (3,): RARROW: int if sys.version_info >= (3, 5): AT: int ATEQUAL: int AWAIT: int ASYNC: int ERRORTOKEN: int N_TOKENS: int NT_OFFSET: int tok_name: Dict[int, Text] def ISTERMINAL(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... def ISEOF(x: int) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi0000644€tŠÔÚ€2›s®0000000173413576752252030601 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.tokenize (Python 3.6) # NOTE: Only elements from __all__ are present. from typing import Callable, Iterable, Iterator, List, Text, Tuple from lib2to3.pgen2.token import * # noqa _Coord = Tuple[int, int] _TokenEater = Callable[[int, Text, _Coord, _Coord, Text], None] _TokenInfo = Tuple[int, Text, _Coord, _Coord, Text] class TokenError(Exception): ... class StopTokenizing(Exception): ... def tokenize(readline: Callable[[], Text], tokeneater: _TokenEater = ...) -> None: ... class Untokenizer: tokens: List[Text] prev_row: int prev_col: int def __init__(self) -> None: ... def add_whitespace(self, start: _Coord) -> None: ... def untokenize(self, iterable: Iterable[_TokenInfo]) -> Text: ... def compat(self, token: Tuple[int, Text], iterable: Iterable[_TokenInfo]) -> None: ... def untokenize(iterable: Iterable[_TokenInfo]) -> Text: ... def generate_tokens( readline: Callable[[], Text] ) -> Iterator[_TokenInfo]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pygram.pyi0000644€tŠÔÚ€2›s®0000000434013576752252027231 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pygram (Python 3.6) from typing import Any from lib2to3.pgen2.grammar import Grammar class Symbols: def __init__(self, grammar: Grammar) -> None: ... class python_symbols(Symbols): and_expr: int and_test: int annassign: int arglist: int argument: int arith_expr: int assert_stmt: int async_funcdef: int async_stmt: int atom: int augassign: int break_stmt: int classdef: int comp_for: int comp_if: int comp_iter: int comp_op: int comparison: int compound_stmt: int continue_stmt: int decorated: int decorator: int decorators: int del_stmt: int dictsetmaker: int dotted_as_name: int dotted_as_names: int dotted_name: int encoding_decl: int eval_input: int except_clause: int exec_stmt: int expr: int expr_stmt: int exprlist: int factor: int file_input: int flow_stmt: int for_stmt: int funcdef: int global_stmt: int if_stmt: int import_as_name: int import_as_names: int import_from: int import_name: int import_stmt: int lambdef: int listmaker: int not_test: int old_lambdef: int old_test: int or_test: int parameters: int pass_stmt: int power: int print_stmt: int raise_stmt: int return_stmt: int shift_expr: int simple_stmt: int single_input: int sliceop: int small_stmt: int star_expr: int stmt: int subscript: int subscriptlist: int suite: int term: int test: int testlist: int testlist1: int testlist_gexp: int testlist_safe: int testlist_star_expr: int tfpdef: int tfplist: int tname: int trailer: int try_stmt: int typedargslist: int varargslist: int vfpdef: int vfplist: int vname: int while_stmt: int with_item: int with_stmt: int with_var: int xor_expr: int yield_arg: int yield_expr: int yield_stmt: int class pattern_symbols(Symbols): Alternative: int Alternatives: int Details: int Matcher: int NegatedUnit: int Repeater: int Unit: int python_grammar: Grammar python_grammar_no_print_statement: Grammar pattern_grammar: Grammar mypy-0.761/mypy/typeshed/stdlib/2and3/lib2to3/pytree.pyi0000644€tŠÔÚ€2›s®0000000626613576752252027253 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pytree (Python 3.6) import sys from typing import Any, Callable, Dict, Iterator, List, Optional, Text, Tuple, TypeVar, Union from lib2to3.pgen2.grammar import Grammar _P = TypeVar('_P') _NL = Union[Node, Leaf] _Context = Tuple[Text, int, int] _Results = Dict[Text, _NL] _RawNode = Tuple[int, Text, _Context, Optional[List[_NL]]] _Convert = Callable[[Grammar, _RawNode], Any] HUGE: int def type_repr(type_num: int) -> Text: ... class Base: type: int parent: Optional[Node] prefix: Text children: List[_NL] was_changed: bool was_checked: bool def __eq__(self, other: Any) -> bool: ... def _eq(self: _P, other: _P) -> bool: ... def clone(self: _P) -> _P: ... def post_order(self) -> Iterator[_NL]: ... def pre_order(self) -> Iterator[_NL]: ... def replace(self, new: Union[_NL, List[_NL]]) -> None: ... def get_lineno(self) -> int: ... def changed(self) -> None: ... def remove(self) -> Optional[int]: ... @property def next_sibling(self) -> Optional[_NL]: ... @property def prev_sibling(self) -> Optional[_NL]: ... def leaves(self) -> Iterator[Leaf]: ... def depth(self) -> int: ... def get_suffix(self) -> Text: ... if sys.version_info < (3,): def get_prefix(self) -> Text: ... def set_prefix(self, prefix: Text) -> None: ... class Node(Base): fixers_applied: List[Any] def __init__(self, type: int, children: List[_NL], context: Optional[Any] = ..., prefix: Optional[Text] = ..., fixers_applied: Optional[List[Any]] = ...) -> None: ... def set_child(self, i: int, child: _NL) -> None: ... def insert_child(self, i: int, child: _NL) -> None: ... def append_child(self, child: _NL) -> None: ... class Leaf(Base): lineno: int column: int value: Text fixers_applied: List[Any] def __init__(self, type: int, value: Text, context: Optional[_Context] = ..., prefix: Optional[Text] = ..., fixers_applied: List[Any] = ...) -> None: ... def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ... class BasePattern: type: int content: Optional[Text] name: Optional[Text] def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns def match(self, node: _NL, results: Optional[_Results] = ...) -> bool: ... def match_seq(self, nodes: List[_NL], results: Optional[_Results] = ...) -> bool: ... def generate_matches(self, nodes: List[_NL]) -> Iterator[Tuple[int, _Results]]: ... class LeafPattern(BasePattern): def __init__(self, type: Optional[int] = ..., content: Optional[Text] = ..., name: Optional[Text] = ...) -> None: ... class NodePattern(BasePattern): wildcards: bool def __init__(self, type: Optional[int] = ..., content: Optional[Text] = ..., name: Optional[Text] = ...) -> None: ... class WildcardPattern(BasePattern): min: int max: int def __init__(self, content: Optional[Text] = ..., min: int = ..., max: int = ..., name: Optional[Text] = ...) -> None: ... class NegatedPattern(BasePattern): def __init__(self, content: Optional[Text] = ...) -> None: ... def generate_matches(patterns: List[BasePattern], nodes: List[_NL]) -> Iterator[Tuple[int, _Results]]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/linecache.pyi0000644€tŠÔÚ€2›s®0000000111613576752252026365 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Dict, List, Optional, Text _ModuleGlobals = Dict[str, Any] def getline(filename: Text, lineno: int, module_globals: Optional[_ModuleGlobals] = ...) -> str: ... def clearcache() -> None: ... def getlines(filename: Text, module_globals: Optional[_ModuleGlobals] = ...) -> List[str]: ... def checkcache(filename: Optional[Text] = ...) -> None: ... def updatecache(filename: Text, module_globals: Optional[_ModuleGlobals] = ...) -> List[str]: ... if sys.version_info >= (3, 5): def lazycache(filename: Text, module_globals: _ModuleGlobals) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/locale.pyi0000644€tŠÔÚ€2›s®0000000504413576752252025715 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for locale from decimal import Decimal from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple, Union import sys # workaround for mypy#2010 if sys.version_info < (3,): from __builtin__ import str as _str else: from builtins import str as _str CODESET: int D_T_FMT: int D_FMT: int T_FMT: int T_FMT_AMPM: int DAY_1: int DAY_2: int DAY_3: int DAY_4: int DAY_5: int DAY_6: int DAY_7: int ABDAY_1: int ABDAY_2: int ABDAY_3: int ABDAY_4: int ABDAY_5: int ABDAY_6: int ABDAY_7: int MON_1: int MON_2: int MON_3: int MON_4: int MON_5: int MON_6: int MON_7: int MON_8: int MON_9: int MON_10: int MON_11: int MON_12: int ABMON_1: int ABMON_2: int ABMON_3: int ABMON_4: int ABMON_5: int ABMON_6: int ABMON_7: int ABMON_8: int ABMON_9: int ABMON_10: int ABMON_11: int ABMON_12: int RADIXCHAR: int THOUSEP: int YESEXPR: int NOEXPR: int CRNCYSTR: int ERA: int ERA_D_T_FMT: int ERA_D_FMT: int ERA_T_FMT: int ALT_DIGITS: int LC_CTYPE: int LC_COLLATE: int LC_TIME: int LC_MONETARY: int LC_MESSAGES: int LC_NUMERIC: int LC_ALL: int CHAR_MAX: int class Error(Exception): ... def setlocale(category: int, locale: Union[_str, Iterable[_str], None] = ...) -> _str: ... def localeconv() -> Mapping[_str, Union[int, _str, List[int]]]: ... def nl_langinfo(option: int) -> _str: ... def getdefaultlocale(envvars: Tuple[_str, ...] = ...) -> Tuple[Optional[_str], Optional[_str]]: ... def getlocale(category: int = ...) -> Sequence[_str]: ... def getpreferredencoding(do_setlocale: bool = ...) -> _str: ... def normalize(localename: _str) -> _str: ... def resetlocale(category: int = ...) -> None: ... def strcoll(string1: _str, string2: _str) -> int: ... def strxfrm(string: _str) -> _str: ... def format(format: _str, val: Union[float, Decimal], grouping: bool = ..., monetary: bool = ...) -> _str: ... if sys.version_info >= (3, 7): def format_string(format: _str, val: Any, grouping: bool = ..., monetary: bool = ...) -> _str: ... else: def format_string(format: _str, val: Any, grouping: bool = ...) -> _str: ... def currency(val: Union[int, float, Decimal], symbol: bool = ..., grouping: bool = ..., international: bool = ...) -> _str: ... if sys.version_info >= (3, 5): def delocalize(string: _str) -> None: ... def atof(string: _str) -> float: ... def atoi(string: _str) -> int: ... def str(float: float) -> _str: ... locale_alias: Dict[_str, _str] # undocumented locale_encoding_alias: Dict[_str, _str] # undocumented windows_locale: Dict[int, _str] # undocumented mypy-0.761/mypy/typeshed/stdlib/2and3/logging/0000755€tŠÔÚ€2›s®0000000000013576752267025364 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/logging/__init__.pyi0000644€tŠÔÚ€2›s®0000004401313576752252027642 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for logging (Python 3.7) from typing import ( Any, Callable, Dict, Iterable, List, Mapping, MutableMapping, Optional, IO, Tuple, Text, Union, overload, ) from string import Template from time import struct_time from types import TracebackType, FrameType import sys import threading _SysExcInfoType = Union[Tuple[type, BaseException, Optional[TracebackType]], Tuple[None, None, None]] if sys.version_info >= (3, 5): _ExcInfoType = Union[None, bool, _SysExcInfoType, BaseException] else: _ExcInfoType = Union[None, bool, _SysExcInfoType] _ArgsType = Union[Tuple[Any, ...], Mapping[str, Any]] _FilterType = Union[Filter, Callable[[LogRecord], int]] _Level = Union[int, Text] if sys.version_info >= (3, 6): from os import PathLike _Path = Union[str, PathLike[str]] else: _Path = str raiseExceptions: bool logThreads: bool logMultiprocessing: bool logProcesses: bool def currentframe() -> FrameType: ... if sys.version_info >= (3,): _levelToName: Dict[int, str] _nameToLevel: Dict[str, int] else: _levelNames: Dict[Union[int, str], Union[str, int]] # Union[int:str, str:int] class Filterer(object): filters: List[Filter] def __init__(self) -> None: ... def addFilter(self, filter: Filter) -> None: ... def removeFilter(self, filter: Filter) -> None: ... def filter(self, record: LogRecord) -> bool: ... class Logger(Filterer): name: str level: int parent: Union[Logger, PlaceHolder] propagate: bool handlers: List[Handler] disabled: int def __init__(self, name: str, level: _Level = ...) -> None: ... def setLevel(self, level: _Level) -> None: ... def isEnabledFor(self, level: int) -> bool: ... def getEffectiveLevel(self) -> int: ... def getChild(self, suffix: str) -> Logger: ... if sys.version_info >= (3,): def debug(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warn(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... fatal = critical def log(self, level: int, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... else: def debug(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... warn = warning def error(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... fatal = critical def log(self, level: int, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def addFilter(self, filt: _FilterType) -> None: ... def removeFilter(self, filt: _FilterType) -> None: ... def filter(self, record: LogRecord) -> bool: ... def addHandler(self, hdlr: Handler) -> None: ... def removeHandler(self, hdlr: Handler) -> None: ... if sys.version_info >= (3,): def findCaller(self, stack_info: bool = ...) -> Tuple[str, int, str, Optional[str]]: ... else: def findCaller(self) -> Tuple[str, int, str]: ... def handle(self, record: LogRecord) -> None: ... if sys.version_info >= (3,): def makeRecord(self, name: str, level: int, fn: str, lno: int, msg: Any, args: _ArgsType, exc_info: Optional[_SysExcInfoType], func: Optional[str] = ..., extra: Optional[Mapping[str, Any]] = ..., sinfo: Optional[str] = ...) -> LogRecord: ... else: def makeRecord(self, name: str, level: int, fn: str, lno: int, msg: Any, args: _ArgsType, exc_info: Optional[_SysExcInfoType], func: Optional[str] = ..., extra: Optional[Mapping[str, Any]] = ...) -> LogRecord: ... if sys.version_info >= (3,): def hasHandlers(self) -> bool: ... CRITICAL: int FATAL: int ERROR: int WARNING: int WARN: int INFO: int DEBUG: int NOTSET: int class Handler(Filterer): level: int # undocumented formatter: Optional[Formatter] # undocumented lock: Optional[threading.Lock] # undocumented name: Optional[str] # undocumented def __init__(self, level: _Level = ...) -> None: ... def createLock(self) -> None: ... def acquire(self) -> None: ... def release(self) -> None: ... def setLevel(self, level: _Level) -> None: ... def setFormatter(self, fmt: Formatter) -> None: ... def addFilter(self, filt: _FilterType) -> None: ... def removeFilter(self, filt: _FilterType) -> None: ... def filter(self, record: LogRecord) -> bool: ... def flush(self) -> None: ... def close(self) -> None: ... def handle(self, record: LogRecord) -> None: ... def handleError(self, record: LogRecord) -> None: ... def format(self, record: LogRecord) -> str: ... def emit(self, record: LogRecord) -> None: ... class Formatter: converter: Callable[[Optional[float]], struct_time] _fmt: Optional[str] datefmt: Optional[str] if sys.version_info >= (3,): _style: PercentStyle default_time_format: str default_msec_format: str if sys.version_info >= (3,): def __init__(self, fmt: Optional[str] = ..., datefmt: Optional[str] = ..., style: str = ...) -> None: ... else: def __init__(self, fmt: Optional[str] = ..., datefmt: Optional[str] = ...) -> None: ... def format(self, record: LogRecord) -> str: ... def formatTime(self, record: LogRecord, datefmt: Optional[str] = ...) -> str: ... def formatException(self, exc_info: _SysExcInfoType) -> str: ... if sys.version_info >= (3,): def formatMessage(self, record: LogRecord) -> str: ... # undocumented def formatStack(self, stack_info: str) -> str: ... class Filter: def __init__(self, name: str = ...) -> None: ... def filter(self, record: LogRecord) -> int: ... class LogRecord: args: _ArgsType asctime: str created: int exc_info: Optional[_SysExcInfoType] exc_text: Optional[str] filename: str funcName: str levelname: str levelno: int lineno: int module: str msecs: int message: str msg: str name: str pathname: str process: int processName: str relativeCreated: int if sys.version_info >= (3,): stack_info: Optional[str] thread: int threadName: str if sys.version_info >= (3,): def __init__(self, name: str, level: int, pathname: str, lineno: int, msg: Any, args: _ArgsType, exc_info: Optional[_SysExcInfoType], func: Optional[str] = ..., sinfo: Optional[str] = ...) -> None: ... else: def __init__(self, name: str, level: int, pathname: str, lineno: int, msg: Any, args: _ArgsType, exc_info: Optional[_SysExcInfoType], func: Optional[str] = ...) -> None: ... def getMessage(self) -> str: ... class LoggerAdapter: logger: Logger extra: Mapping[str, Any] def __init__(self, logger: Logger, extra: Mapping[str, Any]) -> None: ... def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> Tuple[Any, MutableMapping[str, Any]]: ... if sys.version_info >= (3,): def debug(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warn(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(self, level: int, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... else: def debug(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(self, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(self, level: int, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def isEnabledFor(self, lvl: int) -> bool: ... if sys.version_info >= (3,): def getEffectiveLevel(self) -> int: ... def setLevel(self, lvl: Union[int, str]) -> None: ... def hasHandlers(self) -> bool: ... if sys.version_info >= (3,): def getLogger(name: Optional[str] = ...) -> Logger: ... else: @overload def getLogger() -> Logger: ... @overload def getLogger(name: Union[Text, str]) -> Logger: ... def getLoggerClass() -> type: ... if sys.version_info >= (3,): def getLogRecordFactory() -> Callable[..., LogRecord]: ... if sys.version_info >= (3,): def debug(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warn(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(level: int, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... else: def debug(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... warn = warning def error(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(level: int, msg: Any, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... fatal = critical def disable(lvl: int) -> None: ... def addLevelName(lvl: int, levelName: str) -> None: ... def getLevelName(lvl: Union[int, str]) -> Any: ... def makeLogRecord(attrdict: Mapping[str, Any]) -> LogRecord: ... if sys.version_info >= (3,): def basicConfig(*, filename: Optional[_Path] = ..., filemode: str = ..., format: str = ..., datefmt: Optional[str] = ..., style: str = ..., level: Optional[_Level] = ..., stream: Optional[IO[str]] = ..., handlers: Optional[Iterable[Handler]] = ...) -> None: ... else: @overload def basicConfig() -> None: ... @overload def basicConfig(*, filename: Optional[str] = ..., filemode: str = ..., format: str = ..., datefmt: Optional[str] = ..., level: Optional[_Level] = ..., stream: IO[str] = ...) -> None: ... def shutdown() -> None: ... def setLoggerClass(klass: type) -> None: ... def captureWarnings(capture: bool) -> None: ... if sys.version_info >= (3,): def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: ... if sys.version_info >= (3,): lastResort: Optional[StreamHandler] class StreamHandler(Handler): stream: IO[str] # undocumented if sys.version_info >= (3, 2): terminator: str def __init__(self, stream: Optional[IO[str]] = ...) -> None: ... if sys.version_info >= (3, 7): def setStream(self, stream: IO[str]) -> Optional[IO[str]]: ... class FileHandler(StreamHandler): baseFilename: str # undocumented mode: str # undocumented encoding: Optional[str] # undocumented delay: bool # undocumented def __init__(self, filename: _Path, mode: str = ..., encoding: Optional[str] = ..., delay: bool = ...) -> None: ... class NullHandler(Handler): ... class PlaceHolder: def __init__(self, alogger: Logger) -> None: ... def append(self, alogger: Logger) -> None: ... # Below aren't in module docs but still visible class RootLogger(Logger): ... root: RootLogger if sys.version_info >= (3,): class PercentStyle(object): default_format: str asctime_format: str asctime_search: str _fmt: str def __init__(self, fmt: str) -> None: ... def usesTime(self) -> bool: ... def format(self, record: Any) -> str: ... class StrFormatStyle(PercentStyle): ... class StringTemplateStyle(PercentStyle): _tpl: Template _STYLES: Dict[str, Tuple[PercentStyle, str]] BASIC_FORMAT: str mypy-0.761/mypy/typeshed/stdlib/2and3/logging/config.pyi0000644€tŠÔÚ€2›s®0000000221313576752252027344 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for logging.config (Python 3.4) from typing import Any, Callable, Dict, Optional, IO, Union from threading import Thread import sys if sys.version_info >= (3,): from configparser import RawConfigParser else: from ConfigParser import RawConfigParser if sys.version_info >= (3, 6): from os import PathLike if sys.version_info >= (3, 7): _Path = Union[str, bytes, PathLike[str]] elif sys.version_info >= (3, 6): _Path = Union[str, PathLike[str]] else: _Path = str def dictConfig(config: Dict[str, Any]) -> None: ... if sys.version_info >= (3, 4): def fileConfig(fname: Union[_Path, IO[str], RawConfigParser], defaults: Optional[Dict[str, str]] = ..., disable_existing_loggers: bool = ...) -> None: ... def listen(port: int = ..., verify: Optional[Callable[[bytes], Optional[bytes]]] = ...) -> Thread: ... else: def fileConfig(fname: Union[str, IO[str]], defaults: Optional[Dict[str, str]] = ..., disable_existing_loggers: bool = ...) -> None: ... def listen(port: int = ...) -> Thread: ... def stopListening() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/logging/handlers.pyi0000644€tŠÔÚ€2›s®0000001753713576752252027716 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for logging.handlers (Python 2.4) import datetime from logging import Handler, FileHandler, LogRecord from socket import SocketType import ssl import sys from typing import Any, Callable, Dict, List, Optional, Tuple, Union, overload if sys.version_info >= (3, 7): from queue import SimpleQueue, Queue elif sys.version_info >= (3,): from queue import Queue else: from Queue import Queue # TODO update socket stubs to add SocketKind _SocketKind = int if sys.version_info >= (3, 6): from os import PathLike _Path = Union[str, PathLike[str]] else: _Path = str DEFAULT_TCP_LOGGING_PORT: int DEFAULT_UDP_LOGGING_PORT: int DEFAULT_HTTP_LOGGING_PORT: int DEFAULT_SOAP_LOGGING_PORT: int SYSLOG_UDP_PORT: int SYSLOG_TCP_PORT: int class WatchedFileHandler(FileHandler): def __init__(self, filename: _Path, mode: str = ..., encoding: Optional[str] = ..., delay: bool = ...) -> None: ... if sys.version_info >= (3,): class BaseRotatingHandler(FileHandler): terminator: str namer: Optional[Callable[[str], str]] rotator: Optional[Callable[[str, str], None]] def __init__(self, filename: _Path, mode: str, encoding: Optional[str] = ..., delay: bool = ...) -> None: ... def rotation_filename(self, default_name: str) -> None: ... def rotate(self, source: str, dest: str) -> None: ... if sys.version_info >= (3,): class RotatingFileHandler(BaseRotatingHandler): def __init__(self, filename: _Path, mode: str = ..., maxBytes: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ...) -> None: ... def doRollover(self) -> None: ... else: class RotatingFileHandler(Handler): def __init__(self, filename: str, mode: str = ..., maxBytes: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ...) -> None: ... def doRollover(self) -> None: ... if sys.version_info >= (3,): class TimedRotatingFileHandler(BaseRotatingHandler): if sys.version_info >= (3, 4): def __init__(self, filename: _Path, when: str = ..., interval: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ..., utc: bool = ..., atTime: Optional[datetime.datetime] = ...) -> None: ... else: def __init__(self, filename: str, when: str = ..., interval: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ..., utc: bool = ...) -> None: ... def doRollover(self) -> None: ... else: class TimedRotatingFileHandler(Handler): def __init__(self, filename: str, when: str = ..., interval: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ..., utc: bool = ...) -> None: ... def doRollover(self) -> None: ... class SocketHandler(Handler): retryStart: float retryFactor: float retryMax: float if sys.version_info >= (3, 4): def __init__(self, host: str, port: Optional[int]) -> None: ... else: def __init__(self, host: str, port: int) -> None: ... def makeSocket(self) -> SocketType: ... def makePickle(self, record: LogRecord) -> bytes: ... def send(self, packet: bytes) -> None: ... def createSocket(self) -> None: ... class DatagramHandler(SocketHandler): ... class SysLogHandler(Handler): LOG_ALERT: int LOG_CRIT: int LOG_DEBUG: int LOG_EMERG: int LOG_ERR: int LOG_INFO: int LOG_NOTICE: int LOG_WARNING: int LOG_AUTH: int LOG_AUTHPRIV: int LOG_CRON: int LOG_DAEMON: int LOG_FTP: int LOG_KERN: int LOG_LPR: int LOG_MAIL: int LOG_NEWS: int LOG_SYSLOG: int LOG_USER: int LOG_UUCP: int LOG_LOCAL0: int LOG_LOCAL1: int LOG_LOCAL2: int LOG_LOCAL3: int LOG_LOCAL4: int LOG_LOCAL5: int LOG_LOCAL6: int LOG_LOCAL7: int def __init__(self, address: Union[Tuple[str, int], str] = ..., facility: int = ..., socktype: _SocketKind = ...) -> None: ... def encodePriority(self, facility: Union[int, str], priority: Union[int, str]) -> int: ... def mapPriority(self, levelName: str) -> str: ... class NTEventLogHandler(Handler): def __init__(self, appname: str, dllname: str = ..., logtype: str = ...) -> None: ... def getEventCategory(self, record: LogRecord) -> int: ... # TODO correct return value? def getEventType(self, record: LogRecord) -> int: ... def getMessageID(self, record: LogRecord) -> int: ... class SMTPHandler(Handler): # TODO `secure` can also be an empty tuple if sys.version_info >= (3,): def __init__(self, mailhost: Union[str, Tuple[str, int]], fromaddr: str, toaddrs: List[str], subject: str, credentials: Optional[Tuple[str, str]] = ..., secure: Union[Tuple[str], Tuple[str, str], None] = ..., timeout: float = ...) -> None: ... else: def __init__(self, mailhost: Union[str, Tuple[str, int]], fromaddr: str, toaddrs: List[str], subject: str, credentials: Optional[Tuple[str, str]] = ..., secure: Union[Tuple[str], Tuple[str, str], None] = ...) -> None: ... def getSubject(self, record: LogRecord) -> str: ... class BufferingHandler(Handler): buffer: List[LogRecord] def __init__(self, capacity: int) -> None: ... def shouldFlush(self, record: LogRecord) -> bool: ... class MemoryHandler(BufferingHandler): def __init__(self, capacity: int, flushLevel: int = ..., target: Optional[Handler] = ...) -> None: ... def setTarget(self, target: Handler) -> None: ... class HTTPHandler(Handler): if sys.version_info >= (3, 5): def __init__(self, host: str, url: str, method: str = ..., secure: bool = ..., credentials: Optional[Tuple[str, str]] = ..., context: Optional[ssl.SSLContext] = ...) -> None: ... elif sys.version_info >= (3,): def __init__(self, host: str, url: str, method: str = ..., secure: bool = ..., credentials: Optional[Tuple[str, str]] = ...) -> None: ... else: def __init__(self, host: str, url: str, method: str = ...) -> None: ... def mapLogRecord(self, record: LogRecord) -> Dict[str, Any]: ... if sys.version_info >= (3,): class QueueHandler(Handler): if sys.version_info >= (3, 7): def __init__(self, queue: Union[SimpleQueue[Any], Queue[Any]]) -> None: ... else: def __init__(self, queue: Queue[Any]) -> None: ... def prepare(self, record: LogRecord) -> Any: ... def enqueue(self, record: LogRecord) -> None: ... class QueueListener: if sys.version_info >= (3, 7): def __init__(self, queue: Union[SimpleQueue[Any], Queue[Any]], *handlers: Handler, respect_handler_level: bool = ...) -> None: ... elif sys.version_info >= (3, 5): def __init__(self, queue: Queue[Any], *handlers: Handler, respect_handler_level: bool = ...) -> None: ... else: def __init__(self, queue: Queue, *handlers: Handler) -> None: ... def dequeue(self, block: bool) -> LogRecord: ... def prepare(self, record: LogRecord) -> Any: ... def start(self) -> None: ... def stop(self) -> None: ... def enqueue_sentinel(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/macpath.pyi0000644€tŠÔÚ€2›s®0000001474713576752252026105 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os.path # Ron Murawski import os import sys from typing import overload, List, Any, AnyStr, Sequence, Tuple, TypeVar, Union, Text, Callable, Optional if sys.version_info < (3, 8): _T = TypeVar('_T') if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] _StrPath = Union[Text, _PathLike[Text]] _BytesPath = Union[bytes, _PathLike[bytes]] else: _PathType = Union[bytes, Text] _StrPath = Text _BytesPath = bytes # ----- os.path variables ----- supports_unicode_filenames: bool # aliases (also in os) curdir: str pardir: str sep: str altsep: Optional[str] extsep: str pathsep: str defpath: str devnull: str # ----- os.path function stubs ----- if sys.version_info >= (3, 6): # Overloads are necessary to work around python/mypy#3644. @overload def abspath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def abspath(path: AnyStr) -> AnyStr: ... @overload def basename(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def basename(path: AnyStr) -> AnyStr: ... @overload def dirname(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def dirname(path: AnyStr) -> AnyStr: ... @overload def expanduser(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload def expandvars(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload def normcase(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normcase(path: AnyStr) -> AnyStr: ... @overload def normpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': @overload def realpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(path: AnyStr) -> AnyStr: ... else: @overload def realpath(filename: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(filename: AnyStr) -> AnyStr: ... else: def abspath(path: AnyStr) -> AnyStr: ... def basename(path: AnyStr) -> AnyStr: ... def dirname(path: AnyStr) -> AnyStr: ... def expanduser(path: AnyStr) -> AnyStr: ... def expandvars(path: AnyStr) -> AnyStr: ... def normcase(path: AnyStr) -> AnyStr: ... def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': def realpath(path: AnyStr) -> AnyStr: ... else: def realpath(filename: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 6): # In reality it returns str for sequences of _StrPath and bytes for sequences # of _BytesPath, but mypy does not accept such a signature. def commonpath(paths: Sequence[_PathType]) -> Any: ... elif sys.version_info >= (3, 5): def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ... # NOTE: Empty lists results in '' (str) regardless of contained type. # Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes # So, fall back to Any def commonprefix(list: Sequence[_PathType]) -> Any: ... if sys.version_info >= (3, 3): def exists(path: Union[_PathType, int]) -> bool: ... else: def exists(path: _PathType) -> bool: ... def lexists(path: _PathType) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(path: _PathType) -> float: ... def getmtime(path: _PathType) -> float: ... def getctime(path: _PathType) -> float: ... def getsize(path: _PathType) -> int: ... def isabs(path: _PathType) -> bool: ... def isfile(path: _PathType) -> bool: ... def isdir(path: _PathType) -> bool: ... def islink(path: _PathType) -> bool: ... def ismount(path: _PathType) -> bool: ... if sys.version_info < (3, 0): # Make sure signatures are disjunct, and allow combinations of bytes and unicode. # (Since Python 2 allows that, too) # Note that e.g. os.path.join("a", "b", "c", "d", u"e") will still result in # a type error. @overload def join(__p1: bytes, *p: bytes) -> bytes: ... @overload def join(__p1: bytes, __p2: bytes, __p3: bytes, __p4: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: Text, *p: _PathType) -> Text: ... @overload def join(__p1: Text, *p: _PathType) -> Text: ... elif sys.version_info >= (3, 6): # Mypy complains that the signatures overlap (same for relpath below), but things seem to behave correctly anyway. @overload def join(path: _StrPath, *paths: _StrPath) -> Text: ... @overload def join(path: _BytesPath, *paths: _BytesPath) -> bytes: ... else: def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ... @overload def relpath(path: _BytesPath, start: Optional[_BytesPath] = ...) -> bytes: ... @overload def relpath(path: _StrPath, start: Optional[_StrPath] = ...) -> Text: ... def samefile(path1: _PathType, path2: _PathType) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(stat1: os.stat_result, stat2: os.stat_result) -> bool: ... if sys.version_info >= (3, 6): @overload def split(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... else: def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... if sys.version_info < (3,): def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/mailbox.pyi0000644€tŠÔÚ€2›s®0000001726613576752252026122 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import Optional, Union, Text, AnyStr, Callable, IO, Any, Iterator, List, Tuple, TypeVar, Protocol, Dict, Sequence, Iterable, Generic, Type, Mapping, overload from types import TracebackType import sys import email if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] else: _PathType = Union[bytes, Text] _T = TypeVar("_T") _MessageType = TypeVar("_MessageType", bound=Message) _MessageData = Union[email.message.Message, bytes, str, IO[str], IO[bytes]] class _HasIteritems(Protocol): def iteritems(self) -> Iterator[Tuple[str, _MessageData]]: ... class _HasItems(Protocol): def items(self) -> Iterator[Tuple[str, _MessageData]]: ... linesep: bytes class Mailbox(Generic[_MessageType]): _path: Union[bytes, str] # undocumented _factory: Optional[Callable[[IO[Any]], _MessageType]] # undocumented def __init__(self, path: _PathType, factory: Optional[Callable[[IO[Any]], _MessageType]] = ..., create: bool = ...) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... def __delitem__(self, key: str) -> None: ... def discard(self, key: str) -> None: ... def __setitem__(self, key: str, message: _MessageData) -> None: ... @overload def get(self, key: str, default: None = ...) -> Optional[_MessageType]: ... @overload def get(self, key: str, default: _T) -> Union[_MessageType, _T]: ... def __getitem__(self, key: str) -> _MessageType: ... def get_message(self, key: str) -> _MessageType: ... def get_string(self, key: str) -> str: ... def get_bytes(self, key: str) -> bytes: ... # As '_ProxyFile' doesn't implement the full IO spec, and BytesIO is incompatible with it, get_file return is Any here def get_file(self, key: str) -> Any: ... def iterkeys(self) -> Iterator[str]: ... def keys(self) -> List[str]: ... def itervalues(self) -> Iterator[_MessageType]: ... def __iter__(self) -> Iterator[_MessageType]: ... def values(self) -> List[_MessageType]: ... def iteritems(self) -> Iterator[Tuple[str, _MessageType]]: ... def items(self) -> List[Tuple[str, _MessageType]]: ... def __contains__(self, key: str) -> bool: ... def __len__(self) -> int: ... def clear(self) -> None: ... @overload def pop(self, key: str, default: None = ...) -> Optional[_MessageType]: ... @overload def pop(self, key: str, default: _T = ...) -> Union[_MessageType, _T]: ... def popitem(self) -> Tuple[str, _MessageType]: ... def update(self, arg: Optional[Union[_HasIteritems, _HasItems, Iterable[Tuple[str, _MessageData]]]] = ...) -> None: ... def flush(self) -> None: ... def lock(self) -> None: ... def unlock(self) -> None: ... def close(self) -> None: ... class Maildir(Mailbox[MaildirMessage]): colon: str def __init__(self, dirname: _PathType, factory: Optional[Callable[[IO[Any]], MaildirMessage]] = ..., create: bool = ...) -> None: ... def get_file(self, key: str) -> _ProxyFile[bytes]: ... def list_folders(self) -> List[str]: ... def get_folder(self, folder: Text) -> Maildir: ... def add_folder(self, folder: Text) -> Maildir: ... def remove_folder(self, folder: Text) -> None: ... def clean(self) -> None: ... def next(self) -> Optional[str]: ... class _singlefileMailbox(Mailbox[_MessageType]): ... class _mboxMMDF(_singlefileMailbox[_MessageType]): def get_file(self, key: str) -> _PartialFile[bytes]: ... class mbox(_mboxMMDF[mboxMessage]): def __init__(self, dirname: _PathType, factory: Optional[Callable[[IO[Any]], mboxMessage]] = ..., create: bool = ...) -> None: ... class MMDF(_mboxMMDF[MMDFMessage]): def __init__(self, dirname: _PathType, factory: Optional[Callable[[IO[Any]], MMDFMessage]] = ..., create: bool = ...) -> None: ... class MH(Mailbox[MHMessage]): def __init__(self, dirname: _PathType, factory: Optional[Callable[[IO[Any]], MHMessage]] = ..., create: bool = ...) -> None: ... def get_file(self, key: str) -> _ProxyFile[bytes]: ... def list_folders(self) -> List[str]: ... def get_folder(self, folder: _PathType) -> MH: ... def add_folder(self, folder: _PathType) -> MH: ... def remove_folder(self, folder: _PathType) -> None: ... def get_sequences(self) -> Dict[str, List[int]]: ... def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: ... def pack(self) -> None: ... class Babyl(_singlefileMailbox[BabylMessage]): def __init__(self, dirname: _PathType, factory: Optional[Callable[[IO[Any]], BabylMessage]] = ..., create: bool = ...) -> None: ... def get_file(self, key: str) -> IO[bytes]: ... def get_labels(self) -> List[str]: ... class Message(email.message.Message): def __init__(self, message: Optional[_MessageData] = ...) -> None: ... class MaildirMessage(Message): def get_subdir(self) -> str: ... def set_subdir(self, subdir: Literal["new", "cur"]) -> None: ... def get_flags(self) -> str: ... def set_flags(self, flags: Iterable[str]) -> None: ... def add_flag(self, flag: str) -> None: ... def remove_flag(self, flag: str) -> None: ... def get_date(self) -> int: ... def set_date(self, date: int) -> None: ... def get_info(self) -> str: ... def set_info(self, info: str) -> None: ... class _mboxMMDFMessage(Message): def get_from(self) -> str: ... def set_from(self, from_: str, time_: Optional[Union[bool, Tuple[int, int, int, int, int, int, int, int, int]]] = ...) -> None: ... def get_flags(self) -> str: ... def set_flags(self, flags: Iterable[str]) -> None: ... def add_flag(self, flag: str) -> None: ... def remove_flag(self, flag: str) -> None: ... class mboxMessage(_mboxMMDFMessage): ... class MHMessage(Message): def get_sequences(self) -> List[str]: ... def set_sequences(self, sequences: Iterable[str]) -> None: ... def add_sequence(self, sequence: str) -> None: ... def remove_sequence(self, sequence: str) -> None: ... class BabylMessage(Message): def get_labels(self) -> List[str]: ... def set_labels(self, labels: Iterable[str]) -> None: ... def add_label(self, label: str) -> None: ... def remove_label(self, label: str) -> None: ... def get_visible(self) -> Message: ... def set_visible(self, visible: _MessageData) -> None: ... def update_visible(self) -> None: ... class MMDFMessage(_mboxMMDFMessage): ... class _ProxyFile(Generic[AnyStr]): def __init__(self, f: IO[AnyStr], pos: Optional[int] = ...) -> None: ... def read(self, size: Optional[int] = ...) -> AnyStr: ... def read1(self, size: Optional[int] = ...) -> AnyStr: ... def readline(self, size: Optional[int] = ...) -> AnyStr: ... def readlines(self, sizehint: Optional[int] = ...) -> List[AnyStr]: ... def __iter__(self) -> Iterator[AnyStr]: ... def tell(self) -> int: ... def seek(self, offset: int, whence: int = ...) -> None: ... def close(self) -> None: ... def __enter__(self) -> _ProxyFile[AnyStr]: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType]) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def seekable(self) -> bool: ... def flush(self) -> None: ... @property def closed(self) -> bool: ... class _PartialFile(_ProxyFile[AnyStr]): def __init__(self, f: IO[AnyStr], start: Optional[int] = ..., stop: Optional[int] = ...) -> None: ... class Error(Exception): ... class NoSuchMailboxError(Error): ... class NotEmptyError(Error): ... class ExternalClashError(Error): ... class FormatError(Error): ... mypy-0.761/mypy/typeshed/stdlib/2and3/mailcap.pyi0000644€tŠÔÚ€2›s®0000000050513576752252026061 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import Sequence, Dict, List, Union, Tuple, Optional, Mapping _Cap = Dict[str, Union[str, int]] def findmatch(caps: Mapping[str, List[_Cap]], MIMEtype: str, key: str = ..., filename: str = ..., plist: Sequence[str] = ...) -> Tuple[Optional[str], Optional[_Cap]]: ... def getcaps() -> Dict[str, List[_Cap]]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/marshal.pyi0000644€tŠÔÚ€2›s®0000000036013576752252026101 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO version: int def dump(value: Any, file: IO[Any], version: int = ...) -> None: ... def load(file: IO[Any]) -> Any: ... def dumps(value: Any, version: int = ...) -> bytes: ... def loads(string: bytes) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/2and3/math.pyi0000644€tŠÔÚ€2›s®0000000631013576752252025404 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for math # See: http://docs.python.org/2/library/math.html from typing import Tuple, Iterable, SupportsFloat, SupportsInt, overload import sys e: float pi: float if sys.version_info >= (3, 5): inf: float nan: float if sys.version_info >= (3, 6): tau: float def acos(x: SupportsFloat) -> float: ... def acosh(x: SupportsFloat) -> float: ... def asin(x: SupportsFloat) -> float: ... def asinh(x: SupportsFloat) -> float: ... def atan(x: SupportsFloat) -> float: ... def atan2(y: SupportsFloat, x: SupportsFloat) -> float: ... def atanh(x: SupportsFloat) -> float: ... if sys.version_info >= (3,): def ceil(x: SupportsFloat) -> int: ... else: def ceil(x: SupportsFloat) -> float: ... def copysign(x: SupportsFloat, y: SupportsFloat) -> float: ... def cos(x: SupportsFloat) -> float: ... def cosh(x: SupportsFloat) -> float: ... def degrees(x: SupportsFloat) -> float: ... if sys.version_info >= (3, 8): def dist(__p: Iterable[SupportsFloat], __q: Iterable[SupportsFloat]) -> float: ... def erf(x: SupportsFloat) -> float: ... def erfc(x: SupportsFloat) -> float: ... def exp(x: SupportsFloat) -> float: ... def expm1(x: SupportsFloat) -> float: ... def fabs(x: SupportsFloat) -> float: ... def factorial(x: SupportsInt) -> int: ... if sys.version_info >= (3,): def floor(x: SupportsFloat) -> int: ... else: def floor(x: SupportsFloat) -> float: ... def fmod(x: SupportsFloat, y: SupportsFloat) -> float: ... def frexp(x: SupportsFloat) -> Tuple[float, int]: ... def fsum(iterable: Iterable[float]) -> float: ... def gamma(x: SupportsFloat) -> float: ... if sys.version_info >= (3, 5): def gcd(a: int, b: int) -> int: ... if sys.version_info >= (3, 8): def hypot(*coordinates: SupportsFloat) -> float: ... else: def hypot(__x: SupportsFloat, __y: SupportsFloat) -> float: ... if sys.version_info >= (3, 5): def isclose(a: SupportsFloat, b: SupportsFloat, rel_tol: SupportsFloat = ..., abs_tol: SupportsFloat = ...) -> bool: ... def isinf(x: SupportsFloat) -> bool: ... if sys.version_info >= (3,): def isfinite(x: SupportsFloat) -> bool: ... def isnan(x: SupportsFloat) -> bool: ... if sys.version_info >= (3, 8): def isqrt(__n: int) -> int: ... def ldexp(x: SupportsFloat, i: int) -> float: ... def lgamma(x: SupportsFloat) -> float: ... def log(x: SupportsFloat, base: SupportsFloat = ...) -> float: ... def log10(x: SupportsFloat) -> float: ... def log1p(x: SupportsFloat) -> float: ... if sys.version_info >= (3, 3): def log2(x: SupportsFloat) -> float: ... def modf(x: SupportsFloat) -> Tuple[float, float]: ... def pow(x: SupportsFloat, y: SupportsFloat) -> float: ... if sys.version_info >= (3, 8): @overload def prod(__iterable: Iterable[int], *, start: int = ...) -> int: ... # type: ignore @overload def prod(__iterable: Iterable[SupportsFloat], *, start: SupportsFloat = ...) -> float: ... def radians(x: SupportsFloat) -> float: ... if sys.version_info >= (3, 7): def remainder(x: SupportsFloat, y: SupportsFloat) -> float: ... def sin(x: SupportsFloat) -> float: ... def sinh(x: SupportsFloat) -> float: ... def sqrt(x: SupportsFloat) -> float: ... def tan(x: SupportsFloat) -> float: ... def tanh(x: SupportsFloat) -> float: ... def trunc(x: SupportsFloat) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/mimetypes.pyi0000644€tŠÔÚ€2›s®0000000304713576752252026473 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for mimetypes from typing import Dict, IO, List, Optional, Sequence, Text, Tuple import sys def guess_type(url: Text, strict: bool = ...) -> Tuple[Optional[str], Optional[str]]: ... def guess_all_extensions(type: str, strict: bool = ...) -> List[str]: ... def guess_extension(type: str, strict: bool = ...) -> Optional[str]: ... def init(files: Optional[Sequence[str]] = ...) -> None: ... def read_mime_types(filename: str) -> Optional[Dict[str, str]]: ... def add_type(type: str, ext: str, strict: bool = ...) -> None: ... inited: bool knownfiles: List[str] suffix_map: Dict[str, str] encodings_map: Dict[str, str] types_map: Dict[str, str] common_types: Dict[str, str] class MimeTypes: suffix_map: Dict[str, str] encodings_map: Dict[str, str] types_map: Tuple[Dict[str, str], Dict[str, str]] types_map_inv: Tuple[Dict[str, str], Dict[str, str]] def __init__(self, filenames: Tuple[str, ...] = ..., strict: bool = ...) -> None: ... def guess_extension(self, type: str, strict: bool = ...) -> Optional[str]: ... def guess_type(self, url: str, strict: bool = ...) -> Tuple[Optional[str], Optional[str]]: ... def guess_all_extensions(self, type: str, strict: bool = ...) -> List[str]: ... def read(self, filename: str, strict: bool = ...) -> None: ... def readfp(self, fp: IO[str], strict: bool = ...) -> None: ... if sys.platform == 'win32': def read_windows_registry(self, strict: bool = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/mmap.pyi0000644€tŠÔÚ€2›s®0000000714613576752252025415 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import (Optional, Sequence, Union, Generic, overload, Iterable, Iterator, Sized, ContextManager, AnyStr) ACCESS_DEFAULT: int ACCESS_READ: int ACCESS_WRITE: int ACCESS_COPY: int ALLOCATIONGRANULARITY: int if sys.platform != 'win32': MAP_ANON: int MAP_ANONYMOUS: int MAP_DENYWRITE: int MAP_EXECUTABLE: int MAP_PRIVATE: int MAP_SHARED: int PROT_EXEC: int PROT_READ: int PROT_WRITE: int PAGESIZE: int class _mmap(Generic[AnyStr]): if sys.platform == 'win32': def __init__(self, fileno: int, length: int, tagname: Optional[str] = ..., access: int = ..., offset: int = ...) -> None: ... else: def __init__(self, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = ..., offset: int = ...) -> None: ... def close(self) -> None: ... def find(self, sub: AnyStr, start: int = ..., end: int = ...) -> int: ... if sys.version_info >= (3, 8): def flush(self, offset: int = ..., size: int = ...) -> None: ... else: def flush(self, offset: int = ..., size: int = ...) -> int: ... def move(self, dest: int, src: int, count: int) -> None: ... def read(self, n: int = ...) -> AnyStr: ... def read_byte(self) -> AnyStr: ... def readline(self) -> AnyStr: ... def resize(self, newsize: int) -> None: ... def seek(self, pos: int, whence: int = ...) -> None: ... def size(self) -> int: ... def tell(self) -> int: ... def write(self, bytes: AnyStr) -> None: ... def write_byte(self, byte: AnyStr) -> None: ... def __len__(self) -> int: ... if sys.version_info >= (3,): class mmap(_mmap[bytes], ContextManager[mmap], Iterable[bytes], Sized): closed: bool if sys.version_info >= (3, 8): def madvise(self, option: int, start: int = ..., length: int = ...) -> None: ... def rfind(self, sub: bytes, start: int = ..., stop: int = ...) -> int: ... @overload def __getitem__(self, index: int) -> int: ... @overload def __getitem__(self, index: slice) -> bytes: ... def __delitem__(self, index: Union[int, slice]) -> None: ... @overload def __setitem__(self, index: int, object: int) -> None: ... @overload def __setitem__(self, index: slice, object: bytes) -> None: ... # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and # __len__, so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[bytes]: ... else: class mmap(_mmap[bytes], Sequence[bytes]): def rfind(self, string: bytes, start: int = ..., stop: int = ...) -> int: ... def __getitem__(self, index: Union[int, slice]) -> bytes: ... def __getslice__(self, i: Optional[int], j: Optional[int]) -> bytes: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def __setitem__(self, index: Union[int, slice], object: bytes) -> None: ... if sys.version_info >= (3, 8): MADV_NORMAL: int MADV_RANDOM: int MADV_SEQUENTIAL: int MADV_WILLNEED: int MADV_DONTNEED: int MADV_REMOVE: int MADV_DONTFORK: int MADV_DOFORK: int MADV_HWPOISON: int MADV_MERGEABLE: int MADV_UNMERGEABLE: int MADV_SOFT_OFFLINE: int MADV_HUGEPAGE: int MADV_NOHUGEPAGE: int MADV_DONTDUMP: int MADV_DODUMP: int MADV_FREE: int MADV_NOSYNC: int MADV_AUTOSYNC: int MADV_NOCORE: int MADV_CORE: int MADV_PROTECT: int mypy-0.761/mypy/typeshed/stdlib/2and3/modulefinder.pyi0000644€tŠÔÚ€2›s®0000000560613576752252027137 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import Optional, Container, Dict, Sequence, Tuple, List, Any, Iterator, IO, Iterable from types import CodeType LOAD_CONST: int # undocumented IMPORT_NAME: int # undocumented STORE_NAME: int # undocumented STORE_GLOBAL: int # undocumented STORE_OPS: Tuple[int, int] # undocumented EXTENDED_ARG: int # undocumented packagePathMap: Dict[str, List[str]] # undocumented def AddPackagePath(packagename: str, path: str) -> None: ... replacePackageMap: Dict[str, str] # undocumented def ReplacePackage(oldname: str, newname: str) -> None: ... class Module: # undocumented def __init__(self, name: str, file: Optional[str] = ..., path: Optional[str] = ...) -> None: ... def __repr__(self) -> str: ... class ModuleFinder: modules: Dict[str, Module] def __init__(self, path: Optional[List[str]] = ..., debug: int = ..., excludes: Container[str] = ..., replace_paths: Sequence[Tuple[str, str]] = ...) -> None: ... def msg(self, level: int, str: str, *args: Any) -> None: ... # undocumented def msgin(self, *args: Any) -> None: ... # undocumented def msgout(self, *args: Any) -> None: ... # undocumented def run_script(self, pathname: str) -> None: ... def load_file(self, pathname: str) -> None: ... # undocumented def import_hook(self, name: str, caller: Optional[Module] = ..., fromlist: Optional[List[str]] = ..., level: int = ...) -> Optional[Module]: ... # undocumented def determine_parent(self, caller: Optional[Module], level: int = ...) -> Optional[Module]: ... # undocumented def find_head_package(self, parent: Module, name: str) -> Tuple[Module, str]: ... # undocumented def load_tail(self, q: Module, tail: str) -> Module: ... # undocumented def ensure_fromlist(self, m: Module, fromlist: Iterable[str], recursive: int = ...) -> None: ... # undocumented def find_all_submodules(self, m: Module) -> Iterable[str]: ... # undocumented def import_module(self, partname: str, fqname: str, parent: Module) -> Optional[Module]: ... # undocumented def load_module(self, fqname: str, fp: IO[str], pathname: str, file_info: Tuple[str, str, str]) -> Module: ... # undocumented def scan_opcodes(self, co: CodeType) -> Iterator[Tuple[str, Tuple[Any, ...]]]: ... # undocumented def scan_code(self, co: CodeType, m: Module) -> None: ... # undocumented def load_package(self, fqname: str, pathname: str) -> Module: ... # undocumented def add_module(self, fqname: str) -> Module: ... # undocumented def find_module(self, name: str, path: Optional[str], parent: Optional[Module] = ...) -> Tuple[Optional[IO[Any]], Optional[str], Tuple[str, str, int]]: ... # undocumented def report(self) -> None: ... def any_missing(self) -> List[str]: ... # undocumented def any_missing_maybe(self) -> Tuple[List[str], List[str]]: ... # undocumented def replace_paths_in_code(self, co: CodeType) -> CodeType: ... # undocumented mypy-0.761/mypy/typeshed/stdlib/2and3/netrc.pyi0000644€tŠÔÚ€2›s®0000000072213576752252025567 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr, Dict, List, Optional, Tuple, overload class NetrcParseError(Exception): filename: Optional[str] lineno: Optional[int] msg: str # (login, account, password) tuple _NetrcTuple = Tuple[str, Optional[str], Optional[str]] class netrc: hosts: Dict[str, _NetrcTuple] macros: Dict[str, List[str]] def __init__(self, file: str = ...) -> None: ... def authenticators(self, host: str) -> Optional[_NetrcTuple]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/nis.pyi0000644€tŠÔÚ€2›s®0000000050313576752252025242 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Dict, List if sys.platform != 'win32': def cat(map: str, domain: str = ...) -> Dict[str, str]: ... def get_default_domain() -> str: ... def maps(domain: str = ...) -> List[str]: ... def match(key: str, map: str, domain: str = ...) -> str: ... class error(Exception): ... mypy-0.761/mypy/typeshed/stdlib/2and3/ntpath.pyi0000644€tŠÔÚ€2›s®0000001412413576752252025753 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: path.pyi and stdlib/2 and stdlib/3 must remain consistent! # Stubs for os.path # Ron Murawski import os import sys from typing import overload, List, Any, AnyStr, Sequence, Tuple, TypeVar, Union, Text, Callable, Optional _T = TypeVar('_T') if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] _StrPath = Union[Text, _PathLike[Text]] _BytesPath = Union[bytes, _PathLike[bytes]] else: _PathType = Union[bytes, Text] _StrPath = Text _BytesPath = bytes # ----- os.path variables ----- supports_unicode_filenames: bool # aliases (also in os) curdir: str pardir: str sep: str if sys.platform == 'win32': altsep: str else: altsep: Optional[str] extsep: str pathsep: str defpath: str devnull: str # ----- os.path function stubs ----- if sys.version_info >= (3, 6): # Overloads are necessary to work around python/mypy#3644. @overload def abspath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def abspath(path: AnyStr) -> AnyStr: ... @overload def basename(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def basename(path: AnyStr) -> AnyStr: ... @overload def dirname(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def dirname(path: AnyStr) -> AnyStr: ... @overload def expanduser(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload def expandvars(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload def normcase(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normcase(path: AnyStr) -> AnyStr: ... @overload def normpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': @overload def realpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(path: AnyStr) -> AnyStr: ... else: @overload def realpath(filename: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(filename: AnyStr) -> AnyStr: ... else: def abspath(path: AnyStr) -> AnyStr: ... def basename(path: AnyStr) -> AnyStr: ... def dirname(path: AnyStr) -> AnyStr: ... def expanduser(path: AnyStr) -> AnyStr: ... def expandvars(path: AnyStr) -> AnyStr: ... def normcase(path: AnyStr) -> AnyStr: ... def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': def realpath(path: AnyStr) -> AnyStr: ... else: def realpath(filename: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 6): # In reality it returns str for sequences of _StrPath and bytes for sequences # of _BytesPath, but mypy does not accept such a signature. def commonpath(paths: Sequence[_PathType]) -> Any: ... elif sys.version_info >= (3, 5): def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ... # NOTE: Empty lists results in '' (str) regardless of contained type. # Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes # So, fall back to Any def commonprefix(list: Sequence[_PathType]) -> Any: ... if sys.version_info >= (3, 3): def exists(path: Union[_PathType, int]) -> bool: ... else: def exists(path: _PathType) -> bool: ... def lexists(path: _PathType) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(path: _PathType) -> float: ... def getmtime(path: _PathType) -> float: ... def getctime(path: _PathType) -> float: ... def getsize(path: _PathType) -> int: ... def isabs(path: _PathType) -> bool: ... def isfile(path: _PathType) -> bool: ... def isdir(path: _PathType) -> bool: ... def islink(path: _PathType) -> bool: ... def ismount(path: _PathType) -> bool: ... if sys.version_info < (3, 0): # Make sure signatures are disjunct, and allow combinations of bytes and unicode. # (Since Python 2 allows that, too) # Note that e.g. os.path.join("a", "b", "c", "d", u"e") will still result in # a type error. @overload def join(__p1: bytes, *p: bytes) -> bytes: ... @overload def join(__p1: bytes, __p2: bytes, __p3: bytes, __p4: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: Text, *p: _PathType) -> Text: ... @overload def join(__p1: Text, *p: _PathType) -> Text: ... elif sys.version_info >= (3, 6): # Mypy complains that the signatures overlap (same for relpath below), but things seem to behave correctly anyway. @overload def join(path: _StrPath, *paths: _StrPath) -> Text: ... @overload def join(path: _BytesPath, *paths: _BytesPath) -> bytes: ... else: def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ... @overload def relpath(path: _BytesPath, start: Optional[_BytesPath] = ...) -> bytes: ... @overload def relpath(path: _StrPath, start: Optional[_StrPath] = ...) -> Text: ... def samefile(path1: _PathType, path2: _PathType) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(stat1: os.stat_result, stat2: os.stat_result) -> bool: ... if sys.version_info >= (3, 6): @overload def split(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... else: def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... if sys.platform == 'win32': def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # deprecated if sys.version_info < (3,): def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/numbers.pyi0000644€tŠÔÚ€2›s®0000000774113576752252026137 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for numbers (Python 3.5) # See https://docs.python.org/2.7/library/numbers.html # and https://docs.python.org/3/library/numbers.html # # Note: these stubs are incomplete. The more complex type # signatures are currently omitted. from typing import Any, Optional, SupportsFloat, overload from abc import ABCMeta, abstractmethod import sys class Number(metaclass=ABCMeta): @abstractmethod def __hash__(self) -> int: ... class Complex(Number): @abstractmethod def __complex__(self) -> complex: ... if sys.version_info >= (3, 0): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... @property @abstractmethod def real(self): ... @property @abstractmethod def imag(self): ... @abstractmethod def __add__(self, other): ... @abstractmethod def __radd__(self, other): ... @abstractmethod def __neg__(self): ... @abstractmethod def __pos__(self): ... def __sub__(self, other): ... def __rsub__(self, other): ... @abstractmethod def __mul__(self, other): ... @abstractmethod def __rmul__(self, other): ... if sys.version_info < (3, 0): @abstractmethod def __div__(self, other): ... @abstractmethod def __rdiv__(self, other): ... @abstractmethod def __truediv__(self, other): ... @abstractmethod def __rtruediv__(self, other): ... @abstractmethod def __pow__(self, exponent): ... @abstractmethod def __rpow__(self, base): ... def __abs__(self): ... def conjugate(self): ... def __eq__(self, other: object) -> bool: ... if sys.version_info < (3, 0): def __ne__(self, other: object) -> bool: ... class Real(Complex, SupportsFloat): @abstractmethod def __float__(self) -> float: ... @abstractmethod def __trunc__(self) -> int: ... if sys.version_info >= (3, 0): @abstractmethod def __floor__(self) -> int: ... @abstractmethod def __ceil__(self) -> int: ... @abstractmethod @overload def __round__(self, ndigits: None = ...): ... @abstractmethod @overload def __round__(self, ndigits: int): ... def __divmod__(self, other): ... def __rdivmod__(self, other): ... @abstractmethod def __floordiv__(self, other): ... @abstractmethod def __rfloordiv__(self, other): ... @abstractmethod def __mod__(self, other): ... @abstractmethod def __rmod__(self, other): ... @abstractmethod def __lt__(self, other) -> bool: ... @abstractmethod def __le__(self, other) -> bool: ... def __complex__(self) -> complex: ... @property def real(self): ... @property def imag(self): ... def conjugate(self): ... class Rational(Real): @property @abstractmethod def numerator(self) -> int: ... @property @abstractmethod def denominator(self) -> int: ... def __float__(self) -> float: ... class Integral(Rational): if sys.version_info >= (3, 0): @abstractmethod def __int__(self) -> int: ... else: @abstractmethod def __long__(self) -> long: ... def __index__(self) -> int: ... @abstractmethod def __pow__(self, exponent, modulus: Optional[Any] = ...): ... @abstractmethod def __lshift__(self, other): ... @abstractmethod def __rlshift__(self, other): ... @abstractmethod def __rshift__(self, other): ... @abstractmethod def __rrshift__(self, other): ... @abstractmethod def __and__(self, other): ... @abstractmethod def __rand__(self, other): ... @abstractmethod def __xor__(self, other): ... @abstractmethod def __rxor__(self, other): ... @abstractmethod def __or__(self, other): ... @abstractmethod def __ror__(self, other): ... @abstractmethod def __invert__(self): ... def __float__(self) -> float: ... @property def numerator(self) -> int: ... @property def denominator(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/opcode.pyi0000644€tŠÔÚ€2›s®0000000071713576752252025731 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Dict, Optional, Sequence import sys cmp_op: Sequence[str] hasconst: List[int] hasname: List[int] hasjrel: List[int] hasjabs: List[int] haslocal: List[int] hascompare: List[int] hasfree: List[int] opname: List[str] opmap: Dict[str, int] HAVE_ARGUMENT: int EXTENDED_ARG: int if sys.version_info >= (3, 4): def stack_effect(opcode: int, oparg: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 6): hasnargs: List[int] mypy-0.761/mypy/typeshed/stdlib/2and3/operator.pyi0000644€tŠÔÚ€2›s®0000001456713576752252026323 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for operator from typing import ( Any, Callable, Container, Mapping, MutableMapping, MutableSequence, Sequence, SupportsAbs, Tuple, TypeVar, overload, ) import sys _T = TypeVar('_T') _K = TypeVar('_K') _V = TypeVar('_V') def lt(a: Any, b: Any) -> Any: ... def le(a: Any, b: Any) -> Any: ... def eq(a: Any, b: Any) -> Any: ... def ne(a: Any, b: Any) -> Any: ... def ge(a: Any, b: Any) -> Any: ... def gt(a: Any, b: Any) -> Any: ... def __lt__(a: Any, b: Any) -> Any: ... def __le__(a: Any, b: Any) -> Any: ... def __eq__(a: Any, b: Any) -> Any: ... def __ne__(a: Any, b: Any) -> Any: ... def __ge__(a: Any, b: Any) -> Any: ... def __gt__(a: Any, b: Any) -> Any: ... def not_(obj: Any) -> bool: ... def __not__(obj: Any) -> bool: ... def truth(x: Any) -> bool: ... def is_(a: Any, b: Any) -> bool: ... def is_not(a: Any, b: Any) -> bool: ... def abs(x: SupportsAbs[_T]) -> _T: ... def __abs__(a: SupportsAbs[_T]) -> _T: ... def add(a: Any, b: Any) -> Any: ... def __add__(a: Any, b: Any) -> Any: ... def and_(a: Any, b: Any) -> Any: ... def __and__(a: Any, b: Any) -> Any: ... if sys.version_info < (3, ): def div(a: Any, b: Any) -> Any: ... def __div__(a: Any, b: Any) -> Any: ... def floordiv(a: Any, b: Any) -> Any: ... def __floordiv__(a: Any, b: Any) -> Any: ... def index(a: Any) -> int: ... def __index__(a: Any) -> int: ... def inv(obj: Any) -> Any: ... def invert(obj: Any) -> Any: ... def __inv__(obj: Any) -> Any: ... def __invert__(obj: Any) -> Any: ... def lshift(a: Any, b: Any) -> Any: ... def __lshift__(a: Any, b: Any) -> Any: ... def mod(a: Any, b: Any) -> Any: ... def __mod__(a: Any, b: Any) -> Any: ... def mul(a: Any, b: Any) -> Any: ... def __mul__(a: Any, b: Any) -> Any: ... if sys.version_info >= (3, 5): def matmul(a: Any, b: Any) -> Any: ... def __matmul__(a: Any, b: Any) -> Any: ... def neg(obj: Any) -> Any: ... def __neg__(obj: Any) -> Any: ... def or_(a: Any, b: Any) -> Any: ... def __or__(a: Any, b: Any) -> Any: ... def pos(obj: Any) -> Any: ... def __pos__(obj: Any) -> Any: ... def pow(a: Any, b: Any) -> Any: ... def __pow__(a: Any, b: Any) -> Any: ... def rshift(a: Any, b: Any) -> Any: ... def __rshift__(a: Any, b: Any) -> Any: ... def sub(a: Any, b: Any) -> Any: ... def __sub__(a: Any, b: Any) -> Any: ... def truediv(a: Any, b: Any) -> Any: ... def __truediv__(a: Any, b: Any) -> Any: ... def xor(a: Any, b: Any) -> Any: ... def __xor__(a: Any, b: Any) -> Any: ... def concat(a: Sequence[_T], b: Sequence[_T]) -> Sequence[_T]: ... def __concat__(a: Sequence[_T], b: Sequence[_T]) -> Sequence[_T]: ... def contains(a: Container[Any], b: Any) -> bool: ... def __contains__(a: Container[Any], b: Any) -> bool: ... def countOf(a: Container[Any], b: Any) -> int: ... @overload def delitem(a: MutableSequence[_T], b: int) -> None: ... @overload def delitem(a: MutableMapping[_K, _V], b: _K) -> None: ... @overload def __delitem__(a: MutableSequence[_T], b: int) -> None: ... @overload def __delitem__(a: MutableMapping[_K, _V], b: _K) -> None: ... if sys.version_info < (3, ): def delslice(a: MutableSequence[Any], b: int, c: int) -> None: ... def __delslice__(a: MutableSequence[Any], b: int, c: int) -> None: ... @overload def getitem(a: Sequence[_T], b: int) -> _T: ... @overload def getitem(a: Mapping[_K, _V], b: _K) -> _V: ... @overload def __getitem__(a: Sequence[_T], b: int) -> _T: ... @overload def __getitem__(a: Mapping[_K, _V], b: _K) -> _V: ... if sys.version_info < (3, ): def getslice(a: Sequence[_T], b: int, c: int) -> Sequence[_T]: ... def __getslice__(a: Sequence[_T], b: int, c: int) -> Sequence[_T]: ... def indexOf(a: Sequence[_T], b: _T) -> int: ... if sys.version_info < (3, ): def repeat(a: Any, b: int) -> Any: ... def __repeat__(a: Any, b: int) -> Any: ... if sys.version_info < (3, ): def sequenceIncludes(a: Container[Any], b: Any) -> bool: ... @overload def setitem(a: MutableSequence[_T], b: int, c: _T) -> None: ... @overload def setitem(a: MutableMapping[_K, _V], b: _K, c: _V) -> None: ... @overload def __setitem__(a: MutableSequence[_T], b: int, c: _T) -> None: ... @overload def __setitem__(a: MutableMapping[_K, _V], b: _K, c: _V) -> None: ... if sys.version_info < (3, ): def setslice(a: MutableSequence[_T], b: int, c: int, v: Sequence[_T]) -> None: ... def __setslice__(a: MutableSequence[_T], b: int, c: int, v: Sequence[_T]) -> None: ... if sys.version_info >= (3, 4): def length_hint(obj: Any, default: int = ...) -> int: ... @overload def attrgetter(attr: str) -> Callable[[Any], Any]: ... @overload def attrgetter(*attrs: str) -> Callable[[Any], Tuple[Any, ...]]: ... @overload def itemgetter(item: Any) -> Callable[[Any], Any]: ... @overload def itemgetter(*items: Any) -> Callable[[Any], Tuple[Any, ...]]: ... def methodcaller(name: str, *args: Any, **kwargs: Any) -> Callable[..., Any]: ... def iadd(a: Any, b: Any) -> Any: ... def __iadd__(a: Any, b: Any) -> Any: ... def iand(a: Any, b: Any) -> Any: ... def __iand__(a: Any, b: Any) -> Any: ... def iconcat(a: Any, b: Any) -> Any: ... def __iconcat__(a: Any, b: Any) -> Any: ... if sys.version_info < (3, ): def idiv(a: Any, b: Any) -> Any: ... def __idiv__(a: Any, b: Any) -> Any: ... def ifloordiv(a: Any, b: Any) -> Any: ... def __ifloordiv__(a: Any, b: Any) -> Any: ... def ilshift(a: Any, b: Any) -> Any: ... def __ilshift__(a: Any, b: Any) -> Any: ... def imod(a: Any, b: Any) -> Any: ... def __imod__(a: Any, b: Any) -> Any: ... def imul(a: Any, b: Any) -> Any: ... def __imul__(a: Any, b: Any) -> Any: ... if sys.version_info >= (3, 5): def imatmul(a: Any, b: Any) -> Any: ... def __imatmul__(a: Any, b: Any) -> Any: ... def ior(a: Any, b: Any) -> Any: ... def __ior__(a: Any, b: Any) -> Any: ... def ipow(a: Any, b: Any) -> Any: ... def __ipow__(a: Any, b: Any) -> Any: ... if sys.version_info < (3, ): def irepeat(a: Any, b: int) -> Any: ... def __irepeat__(a: Any, b: int) -> Any: ... def irshift(a: Any, b: Any) -> Any: ... def __irshift__(a: Any, b: Any) -> Any: ... def isub(a: Any, b: Any) -> Any: ... def __isub__(a: Any, b: Any) -> Any: ... def itruediv(a: Any, b: Any) -> Any: ... def __itruediv__(a: Any, b: Any) -> Any: ... def ixor(a: Any, b: Any) -> Any: ... def __ixor__(a: Any, b: Any) -> Any: ... if sys.version_info < (3, ): def isCallable(x: Any) -> bool: ... def isMappingType(x: Any) -> bool: ... def isNumberType(x: Any) -> bool: ... def isSequenceType(x: Any) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/optparse.pyi0000644€tŠÔÚ€2›s®0000002344013576752252026313 0ustar jukkaDROPBOX\Domain Users00000000000000# Generated by pytype, with only minor tweaks. Might be incomplete. import sys from typing import Any, AnyStr, Callable, Dict, IO, Iterable, List, Mapping, Optional, Sequence, Tuple, Union # See https://groups.google.com/forum/#!topic/python-ideas/gA1gdj3RZ5g if sys.version_info >= (3,): _Text = str else: _Text = Union[str, unicode] NO_DEFAULT: Tuple[_Text, ...] SUPPRESS_HELP: _Text SUPPRESS_USAGE: _Text def check_builtin(option: Option, opt: Any, value: _Text) -> Any: ... def check_choice(option: Option, opt: Any, value: _Text) -> Any: ... if sys.version_info < (3,): def isbasestring(x: Any) -> bool: ... class OptParseError(Exception): msg: _Text def __init__(self, msg: _Text) -> None: ... class BadOptionError(OptParseError): opt_str: _Text def __init__(self, opt_str: _Text) -> None: ... class AmbiguousOptionError(BadOptionError): possibilities: Iterable[_Text] def __init__(self, opt_str: _Text, possibilities: Sequence[_Text]) -> None: ... class OptionError(OptParseError): msg: _Text option_id: _Text def __init__(self, msg: _Text, option: Option) -> None: ... class OptionConflictError(OptionError): ... class OptionValueError(OptParseError): ... class HelpFormatter: NO_DEFAULT_VALUE: _Text _long_opt_fmt: _Text _short_opt_fmt: _Text current_indent: int default_tag: _Text help_position: Any help_width: Any indent_increment: int level: int max_help_position: int option_strings: Dict[Option, _Text] parser: OptionParser short_first: Any width: int def __init__(self, indent_increment: int, max_help_position: int, width: Optional[int], short_first: int) -> None: ... def _format__Text(self, _Text: _Text) -> _Text: ... def dedent(self) -> None: ... def expand_default(self, option: Option) -> _Text: ... def format_description(self, description: _Text) -> _Text: ... def format_epilog(self, epilog) -> _Text: ... def format_heading(self, heading: Any) -> _Text: ... def format_option(self, option: OptionParser) -> _Text: ... def format_option_strings(self, option: OptionParser) -> Any: ... def format_usage(self, usage: Any) -> _Text: ... def indent(self) -> None: ... def set_long_opt_delimiter(self, delim: _Text) -> None: ... def set_parser(self, parser: OptionParser) -> None: ... def set_short_opt_delimiter(self, delim: _Text) -> None: ... def store_option_strings(self, parser: OptionParser) -> None: ... class IndentedHelpFormatter(HelpFormatter): def __init__(self, indent_increment: int = ..., max_help_position: int = ..., width: Optional[int] = ..., short_first: int = ...) -> None: ... def format_heading(self, heading: _Text) -> _Text: ... def format_usage(self, usage: _Text) -> _Text: ... class TitledHelpFormatter(HelpFormatter): def __init__(self, indent_increment: int = ..., max_help_position: int = ..., width: Optional[int] = ..., short_first: int = ...) -> None: ... def format_heading(self, heading: _Text) -> _Text: ... def format_usage(self, usage: _Text) -> _Text: ... class Option: ACTIONS: Tuple[_Text, ...] ALWAYS_TYPED_ACTIONS: Tuple[_Text, ...] ATTRS: List[_Text] CHECK_METHODS: Optional[List[Callable[..., Any]]] CONST_ACTIONS: Tuple[_Text, ...] STORE_ACTIONS: Tuple[_Text, ...] TYPED_ACTIONS: Tuple[_Text, ...] TYPES: Tuple[_Text, ...] TYPE_CHECKER: Dict[_Text, Callable[..., Any]] _long_opts: List[_Text] _short_opts: List[_Text] action: _Text dest: Optional[_Text] nargs: int type: Any def __init__(self, *opts, **attrs) -> None: ... def _check_action(self) -> None: ... def _check_callback(self) -> None: ... def _check_choice(self) -> None: ... def _check_const(self) -> None: ... def _check_dest(self) -> None: ... def _check_nargs(self) -> None: ... def _check_opt_strings(self, opts: Optional[_Text]) -> Any: ... def _check_type(self) -> None: ... def _set_attrs(self, attrs: Dict[_Text, Any]) -> None: ... def _set_opt_strings(self, opts: _Text) -> None: ... def check_value(self, opt: Any, value: Any) -> Any: ... def convert_value(self, opt: Any, value: Any) -> Any: ... def get_opt_string(self) -> _Text: ... def process(self, opt: Any, value: Any, values: Any, parser: OptionParser) -> int: ... def take_action(self, action: _Text, dest: _Text, opt: Any, value: Any, values: Any, parser: OptionParser) -> int: ... def takes_value(self) -> bool: ... make_option = Option class OptionContainer: _long_opt: Dict[_Text, Option] _short_opt: Dict[_Text, Option] conflict_handler: _Text defaults: Dict[_Text, Any] description: Any option_class: Any def __init__(self, option_class: Option, conflict_handler: Any, description: Any) -> None: ... def _check_conflict(self, option: Any) -> None: ... def _create_option_mappings(self) -> None: ... def _share_option_mappings(self, parser: OptionParser) -> None: ... def add_option(self, *args, **kwargs) -> Any: ... def add_options(self, option_list: Iterable[Option]) -> None: ... def destroy(self) -> None: ... def format_description(self, formatter: Optional[HelpFormatter]) -> Any: ... def format_help(self, formatter: Optional[HelpFormatter]) -> _Text: ... def format_option_help(self, formatter: Optional[HelpFormatter]) -> _Text: ... def get_description(self) -> Any: ... def get_option(self, opt_str: _Text) -> Optional[Option]: ... def has_option(self, opt_str: _Text) -> bool: ... def remove_option(self, opt_str: _Text) -> None: ... def set_conflict_handler(self, handler: Any) -> None: ... def set_description(self, description: Any) -> None: ... class OptionGroup(OptionContainer): option_list: List[Option] parser: OptionParser title: _Text def __init__(self, parser: OptionParser, title: _Text, description: Optional[_Text] = ...) -> None: ... def _create_option_list(self) -> None: ... def set_title(self, title: _Text) -> None: ... class Values: def __init__(self, defaults: Optional[Mapping[str, Any]] = ...) -> None: ... def _update(self, dict: Mapping[_Text, Any], mode: Any) -> None: ... def _update_careful(self, dict: Mapping[_Text, Any]) -> None: ... def _update_loose(self, dict: Mapping[_Text, Any]) -> None: ... def ensure_value(self, attr: _Text, value: Any) -> Any: ... def read_file(self, filename: _Text, mode: _Text) -> None: ... def read_module(self, modname: _Text, mode: _Text) -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... class OptionParser(OptionContainer): allow_interspersed_args: bool epilog: Optional[_Text] formatter: HelpFormatter largs: Optional[List[_Text]] option_groups: List[OptionParser] option_list: List[Option] process_default_values: Any prog: Optional[_Text] rargs: Optional[List[Any]] standard_option_list: List[Option] usage: Optional[_Text] values: Optional[Values] version: _Text def __init__(self, usage: Optional[_Text] = ..., option_list: Iterable[Option] = ..., option_class: Option = ..., version: Optional[_Text] = ..., conflict_handler: _Text = ..., description: Optional[_Text] = ..., formatter: Optional[HelpFormatter] = ..., add_help_option: bool = ..., prog: Optional[_Text] = ..., epilog: Optional[_Text] = ...) -> None: ... def _add_help_option(self) -> None: ... def _add_version_option(self) -> None: ... def _create_option_list(self) -> None: ... def _get_all_options(self) -> List[Option]: ... def _get_args(self, args: Iterable[Any]) -> List[Any]: ... def _init_parsing_state(self) -> None: ... def _match_long_opt(self, opt: _Text) -> _Text: ... def _populate_option_list(self, option_list: Iterable[Option], add_help: bool = ...) -> None: ... def _process_args(self, largs: List[Any], rargs: List[Any], values: Values) -> None: ... def _process_long_opt(self, rargs: List[Any], values: Any) -> None: ... def _process_short_opts(self, rargs: List[Any], values: Any) -> None: ... def add_option_group(self, *args, **kwargs) -> OptionParser: ... def check_values(self, values: Values, args: List[_Text]) -> Tuple[Values, List[_Text]]: ... def disable_interspersed_args(self) -> None: ... def enable_interspersed_args(self) -> None: ... def error(self, msg: _Text) -> None: ... def exit(self, status: int = ..., msg: Optional[str] = ...) -> None: ... def expand_prog_name(self, s: Optional[_Text]) -> Any: ... def format_epilog(self, formatter: HelpFormatter) -> Any: ... def format_help(self, formatter: Optional[HelpFormatter] = ...) -> _Text: ... def format_option_help(self, formatter: Optional[HelpFormatter] = ...) -> _Text: ... def get_default_values(self) -> Values: ... def get_option_group(self, opt_str: _Text) -> Any: ... def get_prog_name(self) -> _Text: ... def get_usage(self) -> _Text: ... def get_version(self) -> _Text: ... def parse_args(self, args: Optional[Sequence[AnyStr]] = ..., values: Optional[Values] = ...) -> Tuple[Values, List[AnyStr]]: ... def print_usage(self, file: Optional[IO[str]] = ...) -> None: ... def print_help(self, file: Optional[IO[str]] = ...) -> None: ... def print_version(self, file: Optional[IO[str]] = ...) -> None: ... def set_default(self, dest: Any, value: Any) -> None: ... def set_defaults(self, **kwargs) -> None: ... def set_process_default_values(self, process: Any) -> None: ... def set_usage(self, usage: _Text) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pdb.pyi0000644€tŠÔÚ€2›s®0000000451713576752252025227 0ustar jukkaDROPBOX\Domain Users00000000000000# NOTE: This stub is incomplete - only contains some global functions from cmd import Cmd import sys from types import FrameType from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar _T = TypeVar('_T') class Restart(Exception): ... def run(statement: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> None: ... def runeval(expression: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> Any: ... def runctx(statement: str, globals: Dict[str, Any], locals: Dict[str, Any]) -> None: ... def runcall(*args: Any, **kwds: Any) -> Any: ... if sys.version_info >= (3, 7): def set_trace(*, header: Optional[str] = ...) -> None: ... else: def set_trace() -> None: ... def post_mortem(t: Optional[Any] = ...) -> None: ... def pm() -> None: ... class Pdb(Cmd): if sys.version_info >= (3, 6): def __init__( self, completekey: str = ..., stdin: Optional[IO[str]] = ..., stdout: Optional[IO[str]] = ..., skip: Optional[Iterable[str]] = ..., nosigint: bool = ..., readrc: bool = ..., ) -> None: ... elif sys.version_info >= (3, 2): def __init__( self, completekey: str = ..., stdin: Optional[IO[str]] = ..., stdout: Optional[IO[str]] = ..., skip: Optional[Iterable[str]] = ..., nosigint: bool = ..., ) -> None: ... else: def __init__( self, completekey: str = ..., stdin: Optional[IO[str]] = ..., stdout: Optional[IO[str]] = ..., skip: Optional[Iterable[str]] = ..., ) -> None: ... # TODO: The run* and set_trace() methods are actually defined on bdb.Bdb, from which Pdb inherits. # Move these methods there once we have a bdb stub. def run(self, statement: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> None: ... def runeval(self, expression: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> Any: ... def runcall(self, func: Callable[..., _T], *args: Any, **kwds: Any) -> Optional[_T]: ... def set_trace(self, frame: Optional[FrameType] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pickle.pyi0000644€tŠÔÚ€2›s®0000000716113576752252025727 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, IO, Mapping, Union, Tuple, Callable, Optional, Iterator HIGHEST_PROTOCOL: int if sys.version_info >= (3, 0): DEFAULT_PROTOCOL: int if sys.version_info >= (3, 0): def dump(obj: Any, file: IO[bytes], protocol: Optional[int] = ..., *, fix_imports: bool = ...) -> None: ... def dumps(obj: Any, protocol: Optional[int] = ..., *, fix_imports: bool = ...) -> bytes: ... def loads(bytes_object: bytes, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... def load(file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... else: def dump(obj: Any, file: IO[bytes], protocol: Optional[int] = ...) -> None: ... def dumps(obj: Any, protocol: Optional[int] = ...) -> bytes: ... def load(file: IO[bytes]) -> Any: ... def loads(string: bytes) -> Any: ... class PickleError(Exception): ... class PicklingError(PickleError): ... class UnpicklingError(PickleError): ... _reducedtype = Union[str, Tuple[Callable[..., Any], Tuple[Any, ...]], Tuple[Callable[..., Any], Tuple[Any, ...], Any], Tuple[Callable[..., Any], Tuple[Any, ...], Any, Optional[Iterator]], Tuple[Callable[..., Any], Tuple[Any, ...], Any, Optional[Iterator], Optional[Iterator]]] class Pickler: fast: bool if sys.version_info >= (3, 3): dispatch_table: Mapping[type, Callable[[Any], _reducedtype]] if sys.version_info >= (3, 0): def __init__(self, file: IO[bytes], protocol: Optional[int] = ..., *, fix_imports: bool = ...) -> None: ... else: def __init__(self, file: IO[bytes], protocol: Optional[int] = ...) -> None: ... def dump(self, obj: Any) -> None: ... def clear_memo(self) -> None: ... def persistent_id(self, obj: Any) -> Any: ... if sys.version_info >= (3, 8): def reducer_override(self, obj: Any) -> Any: ... class Unpickler: if sys.version_info >= (3, 0): def __init__(self, file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> None: ... else: def __init__(self, file: IO[bytes]) -> None: ... def load(self) -> Any: ... def find_class(self, module: str, name: str) -> Any: ... if sys.version_info >= (3, 0): def persistent_load(self, pid: Any) -> Any: ... MARK: bytes STOP: bytes POP: bytes POP_MARK: bytes DUP: bytes FLOAT: bytes INT: bytes BININT: bytes BININT1: bytes LONG: bytes BININT2: bytes NONE: bytes PERSID: bytes BINPERSID: bytes REDUCE: bytes STRING: bytes BINSTRING: bytes SHORT_BINSTRING: bytes UNICODE: bytes BINUNICODE: bytes APPEND: bytes BUILD: bytes GLOBAL: bytes DICT: bytes EMPTY_DICT: bytes APPENDS: bytes GET: bytes BINGET: bytes INST: bytes LONG_BINGET: bytes LIST: bytes EMPTY_LIST: bytes OBJ: bytes PUT: bytes BINPUT: bytes LONG_BINPUT: bytes SETITEM: bytes TUPLE: bytes EMPTY_TUPLE: bytes SETITEMS: bytes BINFLOAT: bytes TRUE: bytes FALSE: bytes # protocol 2 PROTO: bytes NEWOBJ: bytes EXT1: bytes EXT2: bytes EXT4: bytes TUPLE1: bytes TUPLE2: bytes TUPLE3: bytes NEWTRUE: bytes NEWFALSE: bytes LONG1: bytes LONG4: bytes if sys.version_info >= (3, 0): # protocol 3 BINBYTES: bytes SHORT_BINBYTES: bytes if sys.version_info >= (3, 4): # protocol 4 SHORT_BINUNICODE: bytes BINUNICODE8: bytes BINBYTES8: bytes EMPTY_SET: bytes ADDITEMS: bytes FROZENSET: bytes NEWOBJ_EX: bytes STACK_GLOBAL: bytes MEMOIZE: bytes FRAME: bytes mypy-0.761/mypy/typeshed/stdlib/2and3/pickletools.pyi0000644€tŠÔÚ€2›s®0000001045313576752252027006 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pickletools (Python 2 and 3) import sys from typing import Any, Callable, IO, Iterator, List, MutableMapping, Optional, Text, Tuple, Type, Union _Reader = Callable[[IO[bytes]], Any] if sys.version_info >= (3, 0): bytes_types: Tuple[Type[Any], ...] UP_TO_NEWLINE: int TAKEN_FROM_ARGUMENT1: int TAKEN_FROM_ARGUMENT4: int if sys.version_info >= (3, 3): TAKEN_FROM_ARGUMENT4U: int if sys.version_info >= (3, 4): TAKEN_FROM_ARGUMENT8U: int class ArgumentDescriptor(object): name: str n: int reader: _Reader doc: str def __init__(self, name: str, n: int, reader: _Reader, doc: str) -> None: ... def read_uint1(f: IO[bytes]) -> int: ... uint1: ArgumentDescriptor def read_uint2(f: IO[bytes]) -> int: ... uint2: ArgumentDescriptor def read_int4(f: IO[bytes]) -> int: ... int4: ArgumentDescriptor if sys.version_info >= (3, 3): def read_uint4(f: IO[bytes]) -> int: ... uint4: ArgumentDescriptor if sys.version_info >= (3, 5): def read_uint8(f: IO[bytes]) -> int: ... uint8: ArgumentDescriptor def read_stringnl(f: IO[bytes], decode: bool = ..., stripquotes: bool = ...) -> Union[bytes, Text]: ... stringnl: ArgumentDescriptor def read_stringnl_noescape(f: IO[bytes]) -> str: ... stringnl_noescape: ArgumentDescriptor def read_stringnl_noescape_pair(f: IO[bytes]) -> Text: ... stringnl_noescape_pair: ArgumentDescriptor def read_string1(f: IO[bytes]) -> str: ... string1: ArgumentDescriptor def read_string4(f: IO[bytes]) -> str: ... string4: ArgumentDescriptor if sys.version_info >= (3, 3): def read_bytes1(f: IO[bytes]) -> bytes: ... bytes1: ArgumentDescriptor def read_bytes4(f: IO[bytes]) -> bytes: ... bytes4: ArgumentDescriptor if sys.version_info >= (3, 4): def read_bytes8(f: IO[bytes]) -> bytes: ... bytes8: ArgumentDescriptor def read_unicodestringnl(f: IO[bytes]) -> Text: ... unicodestringnl: ArgumentDescriptor if sys.version_info >= (3, 4): def read_unicodestring1(f: IO[bytes]) -> Text: ... unicodestring1: ArgumentDescriptor def read_unicodestring4(f: IO[bytes]) -> Text: ... unicodestring4: ArgumentDescriptor if sys.version_info >= (3, 4): def read_unicodestring8(f: IO[bytes]) -> Text: ... unicodestring8: ArgumentDescriptor def read_decimalnl_short(f: IO[bytes]) -> int: ... def read_decimalnl_long(f: IO[bytes]) -> int: ... decimalnl_short: ArgumentDescriptor decimalnl_long: ArgumentDescriptor def read_floatnl(f: IO[bytes]) -> float: ... floatnl: ArgumentDescriptor def read_float8(f: IO[bytes]) -> float: ... float8: ArgumentDescriptor def read_long1(f: IO[bytes]) -> int: ... long1: ArgumentDescriptor def read_long4(f: IO[bytes]) -> int: ... long4: ArgumentDescriptor class StackObject(object): name: str obtype: Union[Type[Any], Tuple[Type[Any], ...]] doc: str def __init__(self, name: str, obtype: Union[Type[Any], Tuple[Type[Any], ...]], doc: str) -> None: ... pyint: StackObject pylong: StackObject pyinteger_or_bool: StackObject pybool: StackObject pyfloat: StackObject if sys.version_info >= (3, 4): pybytes_or_str: StackObject pystring: StackObject if sys.version_info >= (3, 0): pybytes: StackObject pyunicode: StackObject pynone: StackObject pytuple: StackObject pylist: StackObject pydict: StackObject if sys.version_info >= (3, 4): pyset: StackObject pyfrozenset: StackObject anyobject: StackObject markobject: StackObject stackslice: StackObject class OpcodeInfo(object): name: str code: str arg: Optional[ArgumentDescriptor] stack_before: List[StackObject] stack_after: List[StackObject] proto: int doc: str def __init__(self, name: str, code: str, arg: Optional[ArgumentDescriptor], stack_before: List[StackObject], stack_after: List[StackObject], proto: int, doc: str) -> None: ... opcodes: List[OpcodeInfo] def genops(pickle: Union[bytes, IO[bytes]]) -> Iterator[Tuple[OpcodeInfo, Optional[Any], Optional[int]]]: ... def optimize(p: Union[bytes, IO[bytes]]) -> bytes: ... if sys.version_info >= (3, 2): def dis(pickle: Union[bytes, IO[bytes]], out: Optional[IO[str]] = ..., memo: Optional[MutableMapping[int, Any]] = ..., indentlevel: int = ..., annotate: int = ...) -> None: ... else: def dis(pickle: Union[bytes, IO[bytes]], out: Optional[IO[str]] = ..., memo: Optional[MutableMapping[int, Any]] = ..., indentlevel: int = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pkgutil.pyi0000644€tŠÔÚ€2›s®0000000253113576752252026133 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pkgutil from typing import Any, Callable, Generator, IO, Iterable, Optional, Tuple, NamedTuple import sys if sys.version_info >= (3,): from importlib.abc import Loader else: Loader = Any if sys.version_info >= (3, 6): class ModuleInfo(NamedTuple): module_finder: Any name: str ispkg: bool _YMFNI = Generator[ModuleInfo, None, None] else: _YMFNI = Generator[Tuple[Any, str, bool], None, None] def extend_path(path: Iterable[str], name: str) -> Iterable[str]: ... class ImpImporter: def __init__(self, dirname: Optional[str] = ...) -> None: ... class ImpLoader: def __init__(self, fullname: str, file: IO[str], filename: str, etc: Tuple[str, str, int]) -> None: ... def find_loader(fullname: str) -> Optional[Loader]: ... def get_importer(path_item: str) -> Any: ... # TODO precise type def get_loader(module_or_name: str) -> Loader: ... def iter_importers(fullname: str = ...) -> Generator[Any, None, None]: ... # TODO precise type def iter_modules(path: Optional[Iterable[str]] = ..., prefix: str = ...) -> _YMFNI: ... # TODO precise type def walk_packages(path: Optional[Iterable[str]] = ..., prefix: str = ..., onerror: Optional[Callable[[str], None]] = ...) -> _YMFNI: ... def get_data(package: str, resource: str) -> Optional[bytes]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/plistlib.pyi0000644€tŠÔÚ€2›s®0000000463313576752252026303 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for plistlib from typing import ( Any, IO, Mapping, MutableMapping, Optional, Union, Type, TypeVar, ) from typing import Dict as DictT import sys if sys.version_info >= (3,): from enum import Enum class PlistFormat(Enum): FMT_XML: int FMT_BINARY: int FMT_XML = PlistFormat.FMT_XML FMT_BINARY = PlistFormat.FMT_BINARY mm = MutableMapping[str, Any] _D = TypeVar('_D', bound=mm) if sys.version_info >= (3,): _Path = str else: _Path = Union[str, unicode] if sys.version_info >= (3, 4): def load(fp: IO[bytes], *, fmt: Optional[PlistFormat] = ..., use_builtin_types: bool = ..., dict_type: Type[_D] = ...) -> _D: ... def loads(data: bytes, *, fmt: Optional[PlistFormat] = ..., use_builtin_types: bool = ..., dict_type: Type[_D] = ...) -> _D: ... def dump(value: Mapping[str, Any], fp: IO[bytes], *, fmt: PlistFormat = ..., sort_keys: bool = ..., skipkeys: bool = ...) -> None: ... def dumps(value: Mapping[str, Any], *, fmt: PlistFormat = ..., skipkeys: bool = ..., sort_keys: bool = ...) -> bytes: ... def readPlist(pathOrFile: Union[_Path, IO[bytes]]) -> DictT[str, Any]: ... def writePlist(value: Mapping[str, Any], pathOrFile: Union[_Path, IO[bytes]]) -> None: ... def readPlistFromBytes(data: bytes) -> DictT[str, Any]: ... def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ... if sys.version_info < (3,): def readPlistFromResource(path: _Path, restype: str = ..., resid: int = ...) -> DictT[str, Any]: ... def writePlistToResource(rootObject: Mapping[str, Any], path: _Path, restype: str = ..., resid: int = ...) -> None: ... def readPlistFromString(data: str) -> DictT[str, Any]: ... def writePlistToString(rootObject: Mapping[str, Any]) -> str: ... if sys.version_info < (3, 7): class Dict(DictT[str, Any]): def __getattr__(self, attr: str) -> Any: ... def __setattr__(self, attr: str, value: Any) -> None: ... def __delattr__(self, attr: str) -> None: ... class Data: data: bytes def __init__(self, data: bytes) -> None: ... if sys.version_info >= (3, 8): class UID: data: int def __init__(self, data: int) -> None: ... def __index__(self) -> int: ... def __reduce__(self) -> Any: ... def __hash__(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/poplib.pyi0000644€tŠÔÚ€2›s®0000000467413576752252025753 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for poplib (Python 2 and 3) import socket import ssl import sys from typing import ( Any, BinaryIO, Dict, List, NoReturn, Optional, overload, Pattern, Text, Tuple, ) _LongResp = Tuple[bytes, List[bytes], int] class error_proto(Exception): ... POP3_PORT: int POP3_SSL_PORT: int CR: bytes LF: bytes CRLF: bytes class POP3: if sys.version_info >= (3, 0): encoding: Text host: Text port: int sock: socket.socket file: BinaryIO welcome: bytes def __init__(self, host: Text, port: int = ..., timeout: float = ...) -> None: ... def getwelcome(self) -> bytes: ... def set_debuglevel(self, level: int) -> None: ... def user(self, user: Text) -> bytes: ... def pass_(self, pswd: Text) -> bytes: ... def stat(self) -> Tuple[int, int]: ... def list(self, which: Optional[Any] = ...) -> _LongResp: ... def retr(self, which: Any) -> _LongResp: ... def dele(self, which: Any) -> bytes: ... def noop(self) -> bytes: ... def rset(self) -> bytes: ... def quit(self) -> bytes: ... def close(self) -> None: ... def rpop(self, user: Text) -> bytes: ... timestamp: Pattern[Text] if sys.version_info < (3, 0): def apop(self, user: Text, secret: Text) -> bytes: ... else: def apop(self, user: Text, password: Text) -> bytes: ... def top(self, which: Any, howmuch: int) -> _LongResp: ... @overload def uidl(self) -> _LongResp: ... @overload def uidl(self, which: Any) -> bytes: ... if sys.version_info >= (3, 5): def utf8(self) -> bytes: ... if sys.version_info >= (3, 4): def capa(self) -> Dict[Text, List[Text]]: ... def stls(self, context: Optional[ssl.SSLContext] = ...) -> bytes: ... class POP3_SSL(POP3): if sys.version_info >= (3, 0): def __init__(self, host: Text, port: int = ..., keyfile: Optional[Text] = ..., certfile: Optional[Text] = ..., timeout: float = ..., context: Optional[ssl.SSLContext] = ...) -> None: ... else: def __init__(self, host: Text, port: int = ..., keyfile: Optional[Text] = ..., certfile: Optional[Text] = ..., timeout: float = ...) -> None: ... if sys.version_info >= (3, 4): # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored def stls(self, context: Any = ..., keyfile: Any = ..., certfile: Any = ...) -> bytes: ... mypy-0.761/mypy/typeshed/stdlib/2and3/posixpath.pyi0000644€tŠÔÚ€2›s®0000001412413576752252026474 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: path.pyi and stdlib/2 and stdlib/3 must remain consistent! # Stubs for os.path # Ron Murawski import os import sys from typing import overload, List, Any, AnyStr, Sequence, Tuple, TypeVar, Union, Text, Callable, Optional _T = TypeVar('_T') if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] _StrPath = Union[Text, _PathLike[Text]] _BytesPath = Union[bytes, _PathLike[bytes]] else: _PathType = Union[bytes, Text] _StrPath = Text _BytesPath = bytes # ----- os.path variables ----- supports_unicode_filenames: bool # aliases (also in os) curdir: str pardir: str sep: str if sys.platform == 'win32': altsep: str else: altsep: Optional[str] extsep: str pathsep: str defpath: str devnull: str # ----- os.path function stubs ----- if sys.version_info >= (3, 6): # Overloads are necessary to work around python/mypy#3644. @overload def abspath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def abspath(path: AnyStr) -> AnyStr: ... @overload def basename(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def basename(path: AnyStr) -> AnyStr: ... @overload def dirname(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def dirname(path: AnyStr) -> AnyStr: ... @overload def expanduser(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload def expandvars(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload def normcase(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normcase(path: AnyStr) -> AnyStr: ... @overload def normpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': @overload def realpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(path: AnyStr) -> AnyStr: ... else: @overload def realpath(filename: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(filename: AnyStr) -> AnyStr: ... else: def abspath(path: AnyStr) -> AnyStr: ... def basename(path: AnyStr) -> AnyStr: ... def dirname(path: AnyStr) -> AnyStr: ... def expanduser(path: AnyStr) -> AnyStr: ... def expandvars(path: AnyStr) -> AnyStr: ... def normcase(path: AnyStr) -> AnyStr: ... def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': def realpath(path: AnyStr) -> AnyStr: ... else: def realpath(filename: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 6): # In reality it returns str for sequences of _StrPath and bytes for sequences # of _BytesPath, but mypy does not accept such a signature. def commonpath(paths: Sequence[_PathType]) -> Any: ... elif sys.version_info >= (3, 5): def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ... # NOTE: Empty lists results in '' (str) regardless of contained type. # Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes # So, fall back to Any def commonprefix(list: Sequence[_PathType]) -> Any: ... if sys.version_info >= (3, 3): def exists(path: Union[_PathType, int]) -> bool: ... else: def exists(path: _PathType) -> bool: ... def lexists(path: _PathType) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(path: _PathType) -> float: ... def getmtime(path: _PathType) -> float: ... def getctime(path: _PathType) -> float: ... def getsize(path: _PathType) -> int: ... def isabs(path: _PathType) -> bool: ... def isfile(path: _PathType) -> bool: ... def isdir(path: _PathType) -> bool: ... def islink(path: _PathType) -> bool: ... def ismount(path: _PathType) -> bool: ... if sys.version_info < (3, 0): # Make sure signatures are disjunct, and allow combinations of bytes and unicode. # (Since Python 2 allows that, too) # Note that e.g. os.path.join("a", "b", "c", "d", u"e") will still result in # a type error. @overload def join(__p1: bytes, *p: bytes) -> bytes: ... @overload def join(__p1: bytes, __p2: bytes, __p3: bytes, __p4: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: Text, *p: _PathType) -> Text: ... @overload def join(__p1: Text, *p: _PathType) -> Text: ... elif sys.version_info >= (3, 6): # Mypy complains that the signatures overlap (same for relpath below), but things seem to behave correctly anyway. @overload def join(path: _StrPath, *paths: _StrPath) -> Text: ... @overload def join(path: _BytesPath, *paths: _BytesPath) -> bytes: ... else: def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ... @overload def relpath(path: _BytesPath, start: Optional[_BytesPath] = ...) -> bytes: ... @overload def relpath(path: _StrPath, start: Optional[_StrPath] = ...) -> Text: ... def samefile(path1: _PathType, path2: _PathType) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(stat1: os.stat_result, stat2: os.stat_result) -> bool: ... if sys.version_info >= (3, 6): @overload def split(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... else: def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... if sys.platform == 'win32': def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # deprecated if sys.version_info < (3,): def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pprint.pyi0000644€tŠÔÚ€2›s®0000000324313576752252025771 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pprint # Based on http://docs.python.org/2/library/pprint.html # Based on http://docs.python.org/3/library/pprint.html import sys from typing import Any, Dict, Tuple, IO, Optional if sys.version_info >= (3, 4): def pformat(o: object, indent: int = ..., width: int = ..., depth: Optional[int] = ..., compact: bool = ...) -> str: ... else: def pformat(o: object, indent: int = ..., width: int = ..., depth: Optional[int] = ...) -> str: ... if sys.version_info >= (3, 4): def pprint(o: object, stream: Optional[IO[str]] = ..., indent: int = ..., width: int = ..., depth: Optional[int] = ..., compact: bool = ...) -> None: ... else: def pprint(o: object, stream: Optional[IO[str]] = ..., indent: int = ..., width: int = ..., depth: Optional[int] = ...) -> None: ... def isreadable(o: object) -> bool: ... def isrecursive(o: object) -> bool: ... def saferepr(o: object) -> str: ... class PrettyPrinter: if sys.version_info >= (3, 4): def __init__(self, indent: int = ..., width: int = ..., depth: Optional[int] = ..., stream: Optional[IO[str]] = ..., compact: bool = ...) -> None: ... else: def __init__(self, indent: int = ..., width: int = ..., depth: Optional[int] = ..., stream: Optional[IO[str]] = ...) -> None: ... def pformat(self, o: object) -> str: ... def pprint(self, o: object) -> None: ... def isreadable(self, o: object) -> bool: ... def isrecursive(self, o: object) -> bool: ... def format(self, o: object, context: Dict[int, Any], maxlevels: int, level: int) -> Tuple[str, bool, bool]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/profile.pyi0000644€tŠÔÚ€2›s®0000000241113576752252026111 0ustar jukkaDROPBOX\Domain Users00000000000000import os import sys from typing import Any, Callable, Dict, Optional, Text, TypeVar, Union def run(statement: str, filename: Optional[str] = ..., sort: Union[str, int] = ...) -> None: ... def runctx(statement: str, globals: Dict[str, Any], locals: Dict[str, Any], filename: Optional[str] = ..., sort: Union[str, int] = ...) -> None: ... _SelfT = TypeVar('_SelfT', bound=Profile) _T = TypeVar('_T') if sys.version_info >= (3, 6): _Path = Union[bytes, Text, os.PathLike[Any]] else: _Path = Union[bytes, Text] class Profile: def __init__(self, timer: Optional[Callable[[], float]] = ..., bias: Optional[int] = ...) -> None: ... def set_cmd(self, cmd: str) -> None: ... def simulate_call(self, name: str) -> None: ... def simulate_cmd_complete(self) -> None: ... def print_stats(self, sort: Union[str, int] = ...) -> None: ... def dump_stats(self, file: _Path) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... def run(self: _SelfT, cmd: str) -> _SelfT: ... def runctx(self: _SelfT, cmd: str, globals: Dict[str, Any], locals: Dict[str, Any]) -> _SelfT: ... def runcall(self, func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ... def calibrate(self, m: int, verbose: int = ...) -> float: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pstats.pyi0000644€tŠÔÚ€2›s®0000000365413576752252026001 0ustar jukkaDROPBOX\Domain Users00000000000000from profile import Profile from cProfile import Profile as _cProfile import os import sys from typing import Any, Dict, IO, Iterable, List, Text, Tuple, TypeVar, Union, overload _Selector = Union[str, float, int] _T = TypeVar('_T', bound=Stats) if sys.version_info >= (3, 6): _Path = Union[bytes, Text, os.PathLike[Any]] else: _Path = Union[bytes, Text] class Stats: def __init__(self: _T, __arg: Union[None, str, Text, Profile, _cProfile] = ..., *args: Union[None, str, Text, Profile, _cProfile, _T], stream: IO[Any] = ...) -> None: ... def init(self, arg: Union[None, str, Text, Profile, _cProfile]) -> None: ... def load_stats(self, arg: Union[None, str, Text, Profile, _cProfile]) -> None: ... def get_top_level_stats(self) -> None: ... def add(self: _T, *arg_list: Union[None, str, Text, Profile, _cProfile, _T]) -> _T: ... def dump_stats(self, filename: _Path) -> None: ... def get_sort_arg_defs(self) -> Dict[str, Tuple[Tuple[Tuple[int, int], ...], str]]: ... @overload def sort_stats(self: _T, field: int) -> _T: ... @overload def sort_stats(self: _T, *field: str) -> _T: ... def reverse_order(self: _T) -> _T: ... def strip_dirs(self: _T) -> _T: ... def calc_callees(self) -> None: ... def eval_print_amount(self, sel: _Selector, list: List[str], msg: str) -> Tuple[List[str], str]: ... def get_print_list(self, sel_list: Iterable[_Selector]) -> Tuple[int, List[str]]: ... def print_stats(self: _T, *amount: _Selector) -> _T: ... def print_callees(self: _T, *amount: _Selector) -> _T: ... def print_callers(self: _T, *amount: _Selector) -> _T: ... def print_call_heading(self, name_size: int, column_title: str) -> None: ... def print_call_line(self, name_size: int, source: str, call_dict: Dict[str, Any], arrow: str = ...) -> None: ... def print_title(self) -> None: ... def print_line(self, func: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pty.pyi0000644€tŠÔÚ€2›s®0000000115713576752252025273 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pty (Python 2 and 3) import sys from typing import Callable, Iterable, Tuple, Union _Reader = Callable[[int], bytes] STDIN_FILENO: int STDOUT_FILENO: int STDERR_FILENO: int CHILD: int def openpty() -> Tuple[int, int]: ... def master_open() -> Tuple[int, str]: ... def slave_open(tty_name: str) -> int: ... def fork() -> Tuple[int, int]: ... if sys.version_info >= (3, 4): def spawn(argv: Union[str, Iterable[str]], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ... else: def spawn(argv: Union[str, Iterable[str]], master_read: _Reader = ..., stdin_read: _Reader = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pwd.pyi0000644€tŠÔÚ€2›s®0000000053613576752252025251 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple class struct_passwd(Tuple[str, str, int, int, str, str, str]): pw_name: str pw_passwd: str pw_uid: int pw_gid: int pw_gecos: str pw_dir: str pw_shell: str def getpwall() -> List[struct_passwd]: ... def getpwuid(uid: int) -> struct_passwd: ... def getpwnam(name: str) -> struct_passwd: ... mypy-0.761/mypy/typeshed/stdlib/2and3/py_compile.pyi0000644€tŠÔÚ€2›s®0000000232413576752252026614 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for py_compile (Python 2 and 3) import enum import sys from typing import Optional, List, Text, AnyStr, Union, Type _EitherStr = Union[bytes, Text] class PyCompileError(Exception): exc_type_name: str exc_value: BaseException file: str msg: str def __init__(self, exc_type: Type[BaseException], exc_value: BaseException, file: str, msg: str = ...) -> None: ... if sys.version_info >= (3, 7): class PycInvalidationMode(enum.Enum): TIMESTAMP: int = ... CHECKED_HASH: int = ... UNCHECKED_HASH: int = ... def _get_default_invalidation_mode() -> PycInvalidationMode: ... def compile(file: AnyStr, cfile: Optional[AnyStr] = ..., dfile: Optional[AnyStr] = ..., doraise: bool = ..., optimize: int = ..., invalidation_mode: Optional[PycInvalidationMode] = ...) -> Optional[AnyStr]: ... elif sys.version_info >= (3, 2): def compile(file: AnyStr, cfile: Optional[AnyStr] = ..., dfile: Optional[AnyStr] = ..., doraise: bool = ..., optimize: int = ...) -> Optional[AnyStr]: ... else: def compile(file: _EitherStr, cfile: Optional[_EitherStr] = ..., dfile: Optional[_EitherStr] = ..., doraise: bool = ...) -> None: ... def main(args: Optional[List[Text]] = ...) -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pyclbr.pyi0000644€tŠÔÚ€2›s®0000000164313576752252025752 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Union, Sequence, Optional, Dict class Class: module: str name: str super: Optional[List[Union[Class, str]]] methods: Dict[str, int] file: int lineno: int def __init__(self, module: str, name: str, super: Optional[List[Union[Class, str]]], file: str, lineno: int) -> None: ... class Function: module: str name: str file: int lineno: int def __init__(self, module: str, name: str, file: str, lineno: int) -> None: ... def readmodule(module: str, path: Optional[Sequence[str]] = ... ) -> Dict[str, Class]: ... def readmodule_ex(module: str, path: Optional[Sequence[str]] = ... ) -> Dict[str, Union[Class, Function, List[str]]]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pydoc.pyi0000644€tŠÔÚ€2›s®0000002307213576752252025575 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, AnyStr, Callable, Container, Dict, IO, List, Mapping, MutableMapping, NoReturn, Optional, Text, Tuple, Type, Union from types import FunctionType, MethodType, ModuleType, TracebackType if sys.version_info >= (3,): from reprlib import Repr else: from repr import Repr # the return type of sys.exc_info(), used by ErrorDuringImport.__init__ _Exc_Info = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] __author__: str __date__: str __version__: str __credits__: str def pathdirs() -> List[str]: ... def getdoc(object: object) -> Text: ... def splitdoc(doc: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def classname(object: object, modname: str) -> str: ... def isdata(object: object) -> bool: ... def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... def cram(text: str, maxlen: int) -> str: ... def stripid(text: str) -> str: ... def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... def visiblename(name: str, all: Optional[Container[str]] = ..., obj: Optional[object] = ...) -> bool: ... def classify_class_attrs(object: object) -> List[Tuple[str, str, type, str]]: ... def ispackage(path: str) -> bool: ... def source_synopsis(file: IO[AnyStr]) -> Optional[AnyStr]: ... def synopsis(filename: str, cache: MutableMapping[str, Tuple[int, str]] = ...) -> Optional[str]: ... class ErrorDuringImport(Exception): filename: str exc: Optional[Type[BaseException]] value: Optional[BaseException] tb: Optional[TracebackType] def __init__(self, filename: str, exc_info: _Exc_Info) -> None: ... def importfile(path: str) -> ModuleType: ... def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = ...) -> ModuleType: ... class Doc: def document(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def fail(self, object: object, name: Optional[str] = ..., *args: Any) -> NoReturn: ... def docmodule(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docclass(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docroutine(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docother(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docproperty(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docdata(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def getdocloc(self, object: object) -> Optional[str]: ... class HTMLRepr(Repr): maxlist: int maxtuple: int maxdict: int maxstring: int maxother: int def __init__(self) -> None: ... def escape(self, text: str) -> str: ... def repr(self, object: object) -> str: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: Text, level: complex) -> str: ... def repr_str(self, x: Text, level: complex) -> str: ... def repr_instance(self, x: object, level: complex) -> str: ... def repr_unicode(self, x: AnyStr, level: complex) -> str: ... class HTMLDoc(Doc): def repr(self, object: object) -> str: ... def escape(self, test: str) -> str: ... def page(self, title: str, contents: str) -> str: ... def heading(self, title: str, fgcol: str, bgcol: str, extras: str = ...) -> str: ... def section(self, title: str, fgcol: str, bgcol: str, contents: str, width: int = ..., prelude: str = ..., marginalia: Optional[str] = ..., gap: str = ...) -> str: ... def bigsection(self, title: str, *args) -> str: ... def preformat(self, text: str) -> str: ... def multicolumn(self, list: List[Any], format: Callable[[Any], str], cols: int = ...) -> str: ... def grey(self, text: str) -> str: ... def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ... def classlink(self, object: object, modname: str) -> str: ... def modulelink(self, object: object) -> str: ... def modpkglink(self, data: Tuple[str, str, bool, bool]) -> str: ... def markup(self, text: str, escape: Optional[Callable[[str], str]] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ...) -> str: ... def formattree(self, tree: List[Union[Tuple[type, Tuple[type, ...]], List[Any]]], modname: str, parent: Optional[type] = ...) -> str: ... def docmodule(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., *ignored) -> str: ... def docclass(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., *ignored) -> str: ... def formatvalue(self, object: object) -> str: ... def docroutine(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., cl: Optional[type] = ..., *ignored) -> str: ... def docproperty(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def docother(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., *ignored) -> str: ... def docdata(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def index(self, dir: str, shadowed: Optional[MutableMapping[str, bool]] = ...) -> str: ... class TextRepr(Repr): maxlist: int maxtuple: int maxdict: int maxstring: int maxother: int def __init__(self) -> None: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: str, level: complex) -> str: ... def repr_str(self, x: str, level: complex) -> str: ... def repr_instance(self, x: object, level: complex) -> str: ... class TextDoc(Doc): def repr(self, object: object) -> str: ... def bold(self, text: str) -> str: ... def indent(self, text: str, prefix: str = ...) -> str: ... def section(self, title: str, contents: str) -> str: ... def formattree(self, tree: List[Union[Tuple[type, Tuple[type, ...]], List[Any]]], modname: str, parent: Optional[type] = ..., prefix: str = ...) -> str: ... def docmodule(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., *ignored) -> str: ... def docclass(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., *ignored) -> str: ... def formatvalue(self, object: object) -> str: ... def docroutine(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def docproperty(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def docdata(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def docother(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., parent: Optional[str] = ..., maxlen: Optional[int] = ..., doc: Optional[Any] = ..., *ignored) -> str: ... def pager(text: str) -> None: ... def getpager() -> Callable[[str], None]: ... def plain(text: str) -> str: ... def pipepager(text: str, cmd: str) -> None: ... def tempfilepager(text: str, cmd: str) -> None: ... def ttypager(text: str) -> None: ... def plainpager(text: str) -> None: ... def describe(thing: Any) -> str: ... def locate(path: str, forceload: bool = ...) -> object: ... text: TextDoc html: HTMLDoc class _OldStyleClass: ... def resolve(thing: Union[str, object], forceload: bool = ...) -> Optional[Tuple[object, str]]: ... def render_doc(thing: Union[str, object], title: str = ..., forceload: bool = ...) -> str: ... def doc(thing: Union[str, object], title: str = ..., forceload: bool = ...) -> None: ... def writedoc(thing: Union[str, object], forceload: bool = ...) -> None: ... def writedocs(dir: str, pkgpath: str = ..., done: Optional[Any] = ...) -> None: ... class Helper: keywords: Dict[str, Union[str, Tuple[str, str]]] symbols: Dict[str, str] topics: Dict[str, Union[str, Tuple[str, ...]]] def __init__(self, input: Optional[IO[str]] = ..., output: Optional[IO[str]] = ...) -> None: ... input: IO[str] output: IO[str] def __call__(self, request: Union[str, Helper, object] = ...) -> None: ... def interact(self) -> None: ... def getline(self, prompt: str) -> str: ... def help(self, request: Any) -> None: ... def intro(self) -> None: ... def list(self, items: List[str], columns: int = ..., width: int = ...) -> None: ... def listkeywords(self) -> None: ... def listsymbols(self) -> None: ... def listtopics(self) -> None: ... def showtopic(self, topic: str, more_xrefs: str = ...) -> None: ... def showsymbol(self, symbol: str) -> None: ... def listmodules(self, key: str = ...) -> None: ... help: Helper # See Python issue #11182: "remove the unused and undocumented pydoc.Scanner class" # class Scanner: # roots = ... # type: Any # state = ... # type: Any # children = ... # type: Any # descendp = ... # type: Any # def __init__(self, roots, children, descendp) -> None: ... # def next(self): ... class ModuleScanner: quit: bool def run(self, callback: Callable[[Optional[str], str, str], None], key: Optional[Any] = ..., completer: Optional[Callable[[], None]] = ..., onerror: Optional[Callable[[str], None]] = ...) -> None: ... def apropos(key: str) -> None: ... def serve(port: int, callback: Optional[Callable[[Any], None]] = ..., completer: Optional[Callable[[], None]] = ...) -> None: ... def gui() -> None: ... def ispath(x: Any) -> bool: ... def cli() -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pyexpat/0000755€tŠÔÚ€2›s®0000000000013576752267025430 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/pyexpat/__init__.pyi0000644€tŠÔÚ€2›s®0000000636313576752252027714 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple, Optional, Callable, Any, Protocol, Union, Dict, Text import pyexpat.errors as errors import pyexpat.model as model EXPAT_VERSION: str # undocumented version_info: Tuple[int, int, int] # undocumented native_encoding: str # undocumented features: List[Tuple[str, int]] # undocumented class ExpatError(Exception): code: int lineno: int offset: int error = ExpatError class _Reader(Protocol): def read(self, length: int) -> bytes: ... XML_PARAM_ENTITY_PARSING_NEVER: int XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE: int XML_PARAM_ENTITY_PARSING_ALWAYS: int _Model = Tuple[int, int, Optional[str], tuple] class XMLParserType(object): def Parse(self, data: Union[Text, bytes], isfinal: bool = ...) -> int: ... def ParseFile(self, file: _Reader) -> int: ... def SetBase(self, base: Text) -> None: ... def GetBase(self) -> Optional[str]: ... def GetInputContext(self) -> Optional[bytes]: ... def ExternalEntityParserCreate(self, context: Optional[Text], encoding: Text = ...) -> XMLParserType: ... def SetParamEntityParsing(self, flag: int) -> int: ... def UseForeignDTD(self, flag: bool = ...) -> None: ... buffer_size: int buffer_text: bool buffer_used: int namespace_prefixes: bool # undocumented ordered_attributes: bool specified_attributes: bool ErrorByteIndex: int ErrorCode: int ErrorColumnNumber: int ErrorLineNumber: int CurrentByteIndex: int CurrentColumnNumber: int CurrentLineNumber: int XmlDeclHandler: Optional[Callable[[str, Optional[str], int], Any]] StartDoctypeDeclHandler: Optional[Callable[[str, Optional[str], Optional[str], bool], Any]] EndDoctypeDeclHandler: Optional[Callable[[], Any]] ElementDeclHandler: Optional[Callable[[str, _Model], Any]] AttlistDeclHandler: Optional[Callable[[str, str, str, Optional[str], bool], Any]] StartElementHandler: Optional[Union[ Callable[[str, Dict[str, str]], Any], Callable[[str, List[str]], Any], Callable[[str, Union[Dict[str, str]], List[str]], Any]]] EndElementHandler: Optional[Callable[[str], Any]] ProcessingInstructionHandler: Optional[Callable[[str, str], Any]] CharacterDataHandler: Optional[Callable[[str], Any]] UnparsedEntityDeclHandler: Optional[Callable[[str, Optional[str], str, Optional[str], str], Any]] EntityDeclHandler: Optional[Callable[[str, bool, Optional[str], Optional[str], str, Optional[str], Optional[str]], Any]] NotationDeclHandler: Optional[Callable[[str, Optional[str], str, Optional[str]], Any]] StartNamespaceDeclHandler: Optional[Callable[[str, str], Any]] EndNamespaceDeclHandler: Optional[Callable[[str], Any]] CommentHandler: Optional[Callable[[str], Any]] StartCdataSectionHandler: Optional[Callable[[], Any]] EndCdataSectionHandler: Optional[Callable[[], Any]] DefaultHandler: Optional[Callable[[str], Any]] DefaultHandlerExpand: Optional[Callable[[str], Any]] NotStandaloneHandler: Optional[Callable[[], int]] ExternalEntityRefHandler: Optional[Callable[[str, Optional[str], Optional[str], Optional[str]], int]] def ErrorString(errno: int) -> str: ... def ParserCreate(encoding: Optional[Text] = ..., namespace_separator: Optional[Text] = ...) -> XMLParserType: ... mypy-0.761/mypy/typeshed/stdlib/2and3/pyexpat/errors.pyi0000644€tŠÔÚ€2›s®0000000237313576752252027466 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Dict if sys.version_info >= (3, 2): codes: Dict[str, int] messages: Dict[int, str] XML_ERROR_ABORTED: str XML_ERROR_ASYNC_ENTITY: str XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: str XML_ERROR_BAD_CHAR_REF: str XML_ERROR_BINARY_ENTITY_REF: str XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: str XML_ERROR_DUPLICATE_ATTRIBUTE: str XML_ERROR_ENTITY_DECLARED_IN_PE: str XML_ERROR_EXTERNAL_ENTITY_HANDLING: str XML_ERROR_FEATURE_REQUIRES_XML_DTD: str XML_ERROR_FINISHED: str XML_ERROR_INCOMPLETE_PE: str XML_ERROR_INCORRECT_ENCODING: str XML_ERROR_INVALID_TOKEN: str XML_ERROR_JUNK_AFTER_DOC_ELEMENT: str XML_ERROR_MISPLACED_XML_PI: str XML_ERROR_NOT_STANDALONE: str XML_ERROR_NOT_SUSPENDED: str XML_ERROR_NO_ELEMENTS: str XML_ERROR_NO_MEMORY: str XML_ERROR_PARAM_ENTITY_REF: str XML_ERROR_PARTIAL_CHAR: str XML_ERROR_PUBLICID: str XML_ERROR_RECURSIVE_ENTITY_REF: str XML_ERROR_SUSPENDED: str XML_ERROR_SUSPEND_PE: str XML_ERROR_SYNTAX: str XML_ERROR_TAG_MISMATCH: str XML_ERROR_TEXT_DECL: str XML_ERROR_UNBOUND_PREFIX: str XML_ERROR_UNCLOSED_CDATA_SECTION: str XML_ERROR_UNCLOSED_TOKEN: str XML_ERROR_UNDECLARING_PREFIX: str XML_ERROR_UNDEFINED_ENTITY: str XML_ERROR_UNEXPECTED_STATE: str XML_ERROR_UNKNOWN_ENCODING: str XML_ERROR_XML_DECL: str mypy-0.761/mypy/typeshed/stdlib/2and3/pyexpat/model.pyi0000644€tŠÔÚ€2›s®0000000031513576752252027244 0ustar jukkaDROPBOX\Domain Users00000000000000XML_CTYPE_ANY: int XML_CTYPE_CHOICE: int XML_CTYPE_EMPTY: int XML_CTYPE_MIXED: int XML_CTYPE_NAME: int XML_CTYPE_SEQ: int XML_CQUANT_NONE: int XML_CQUANT_OPT: int XML_CQUANT_PLUS: int XML_CQUANT_REP: int mypy-0.761/mypy/typeshed/stdlib/2and3/quopri.pyi0000644€tŠÔÚ€2›s®0000000057413576752252026000 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for quopri (Python 2 and 3) from typing import BinaryIO def encode(input: BinaryIO, output: BinaryIO, quotetabs: int, header: int = ...) -> None: ... def encodestring(s: bytes, quotetabs: int = ..., header: int = ...) -> bytes: ... def decode(input: BinaryIO, output: BinaryIO, header: int = ...) -> None: ... def decodestring(s: bytes, header: int = ...) -> bytes: ... mypy-0.761/mypy/typeshed/stdlib/2and3/readline.pyi0000644€tŠÔÚ€2›s®0000000276513576752252026250 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for readline from typing import Callable, Optional, Sequence import sys _CompleterT = Optional[Callable[[str, int], Optional[str]]] _CompDispT = Optional[Callable[[str, Sequence[str], int], None]] def parse_and_bind(string: str) -> None: ... def read_init_file(filename: str = ...) -> None: ... def get_line_buffer() -> str: ... def insert_text(string: str) -> None: ... def redisplay() -> None: ... def read_history_file(filename: str = ...) -> None: ... def write_history_file(filename: str = ...) -> None: ... if sys.version_info >= (3, 5): def append_history_file(nelements: int, filename: str = ...) -> None: ... def get_history_length() -> int: ... def set_history_length(length: int) -> None: ... def clear_history() -> None: ... def get_current_history_length() -> int: ... def get_history_item(index: int) -> str: ... def remove_history_item(pos: int) -> None: ... def replace_history_item(pos: int, line: str) -> None: ... def add_history(string: str) -> None: ... def set_startup_hook(function: Optional[Callable[[], None]] = ...) -> None: ... def set_pre_input_hook(function: Optional[Callable[[], None]] = ...) -> None: ... def set_completer(function: _CompleterT = ...) -> None: ... def get_completer() -> _CompleterT: ... def get_completion_type() -> int: ... def get_begidx() -> int: ... def get_endidx() -> int: ... def set_completer_delims(string: str) -> None: ... def get_completer_delims() -> str: ... def set_completion_display_matches_hook(function: _CompDispT = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/rlcompleter.pyi0000644€tŠÔÚ€2›s®0000000051613576752252027005 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for rlcompleter from typing import Any, Dict, Optional, Union import sys if sys.version_info >= (3,): _Text = str else: _Text = Union[str, unicode] class Completer: def __init__(self, namespace: Optional[Dict[str, Any]] = ...) -> None: ... def complete(self, text: _Text, state: int) -> Optional[str]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/sched.pyi0000644€tŠÔÚ€2›s®0000000246413576752252025547 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Dict, List, NamedTuple, Optional, Text, Tuple class Event(NamedTuple): time: float priority: Any action: Callable[..., Any] argument: Tuple[Any, ...] kwargs: Dict[Text, Any] class scheduler: if sys.version_info >= (3, 3): def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], None] = ...) -> None: ... def enterabs(self, time: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...] = ..., kwargs: Dict[str, Any] = ...) -> Event: ... def enter(self, delay: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...] = ..., kwargs: Dict[str, Any] = ...) -> Event: ... def run(self, blocking: bool = ...) -> Optional[float]: ... else: def __init__(self, timefunc: Callable[[], float], delayfunc: Callable[[float], None]) -> None: ... def enterabs(self, time: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...]) -> Event: ... def enter(self, delay: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...]) -> Event: ... def run(self) -> None: ... def cancel(self, event: Event) -> None: ... def empty(self) -> bool: ... @property def queue(self) -> List[Event]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/select.pyi0000644€tŠÔÚ€2›s®0000000754013576752252025740 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Iterable, List, Optional, Protocol, Tuple, Union class _HasFileno(Protocol): def fileno(self) -> int: ... _FileDescriptor = Union[int, _HasFileno] EPOLLERR: int EPOLLET: int EPOLLHUP: int EPOLLIN: int EPOLLMSG: int EPOLLONESHOT: int EPOLLOUT: int EPOLLPRI: int EPOLLRDBAND: int EPOLLRDNORM: int EPOLLWRBAND: int EPOLLWRNORM: int EPOLL_RDHUP: int KQ_EV_ADD: int KQ_EV_CLEAR: int KQ_EV_DELETE: int KQ_EV_DISABLE: int KQ_EV_ENABLE: int KQ_EV_EOF: int KQ_EV_ERROR: int KQ_EV_FLAG1: int KQ_EV_ONESHOT: int KQ_EV_SYSFLAGS: int KQ_FILTER_AIO: int KQ_FILTER_NETDEV: int KQ_FILTER_PROC: int KQ_FILTER_READ: int KQ_FILTER_SIGNAL: int KQ_FILTER_TIMER: int KQ_FILTER_VNODE: int KQ_FILTER_WRITE: int KQ_NOTE_ATTRIB: int KQ_NOTE_CHILD: int KQ_NOTE_DELETE: int KQ_NOTE_EXEC: int KQ_NOTE_EXIT: int KQ_NOTE_EXTEND: int KQ_NOTE_FORK: int KQ_NOTE_LINK: int KQ_NOTE_LINKDOWN: int KQ_NOTE_LINKINV: int KQ_NOTE_LINKUP: int KQ_NOTE_LOWAT: int KQ_NOTE_PCTRLMASK: int KQ_NOTE_PDATAMASK: int KQ_NOTE_RENAME: int KQ_NOTE_REVOKE: int KQ_NOTE_TRACK: int KQ_NOTE_TRACKERR: int KQ_NOTE_WRITE: int PIPE_BUF: int POLLERR: int POLLHUP: int POLLIN: int POLLMSG: int POLLNVAL: int POLLOUT: int POLLPRI: int POLLRDBAND: int POLLRDNORM: int POLLWRBAND: int POLLWRNORM: int class poll: def __init__(self) -> None: ... def register(self, fd: _FileDescriptor, eventmask: int = ...) -> None: ... def modify(self, fd: _FileDescriptor, eventmask: int) -> None: ... def unregister(self, fd: _FileDescriptor) -> None: ... def poll(self, timeout: Optional[float] = ...) -> List[Tuple[int, int]]: ... def select(rlist: Iterable[Any], wlist: Iterable[Any], xlist: Iterable[Any], timeout: Optional[float] = ...) -> Tuple[List[Any], List[Any], List[Any]]: ... if sys.version_info >= (3, 3): error = OSError else: class error(Exception): ... # BSD only class kevent(object): data: Any fflags: int filter: int flags: int ident: int udata: Any def __init__(self, ident: _FileDescriptor, filter: int = ..., flags: int = ..., fflags: int = ..., data: Any = ..., udata: Any = ...) -> None: ... # BSD only class kqueue(object): closed: bool def __init__(self) -> None: ... def close(self) -> None: ... def control(self, changelist: Optional[Iterable[kevent]], max_events: int, timeout: float = ...) -> List[kevent]: ... def fileno(self) -> int: ... @classmethod def fromfd(cls, fd: _FileDescriptor) -> kqueue: ... # Linux only class epoll(object): if sys.version_info >= (3, 3): def __init__(self, sizehint: int = ..., flags: int = ...) -> None: ... else: def __init__(self, sizehint: int = ...) -> None: ... if sys.version_info >= (3, 4): def __enter__(self) -> epoll: ... def __exit__(self, *args: Any) -> None: ... def close(self) -> None: ... closed: bool def fileno(self) -> int: ... def register(self, fd: _FileDescriptor, eventmask: int = ...) -> None: ... def modify(self, fd: _FileDescriptor, eventmask: int) -> None: ... def unregister(self, fd: _FileDescriptor) -> None: ... def poll(self, timeout: float = ..., maxevents: int = ...) -> List[Tuple[int, int]]: ... @classmethod def fromfd(cls, fd: _FileDescriptor) -> epoll: ... if sys.version_info >= (3, 3): # Solaris only class devpoll: if sys.version_info >= (3, 4): def close(self) -> None: ... closed: bool def fileno(self) -> int: ... def register(self, fd: _FileDescriptor, eventmask: int = ...) -> None: ... def modify(self, fd: _FileDescriptor, eventmask: int = ...) -> None: ... def unregister(self, fd: _FileDescriptor) -> None: ... def poll(self, timeout: Optional[float] = ...) -> List[Tuple[int, int]]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/shutil.pyi0000644€tŠÔÚ€2›s®0000001406313576752252025767 0ustar jukkaDROPBOX\Domain Users00000000000000import os import sys # 'bytes' paths are not properly supported: they don't work with all functions, # sometimes they only work partially (broken exception messages), and the test # cases don't use them. from typing import ( List, Iterable, Callable, Any, Tuple, Sequence, NamedTuple, IO, AnyStr, Optional, Union, Set, TypeVar, overload, Type, Protocol, Text ) if sys.version_info >= (3, 6): _Path = Union[str, os.PathLike[str]] _AnyStr = str _AnyPath = TypeVar("_AnyPath", str, os.PathLike[str]) # Return value of some functions that may either return a path-like object that was passed in or # a string _PathReturn = Any elif sys.version_info >= (3,): _Path = str _AnyStr = str _AnyPath = str _PathReturn = str else: _Path = Text _AnyStr = TypeVar("_AnyStr", str, unicode) _AnyPath = TypeVar("_AnyPath", str, unicode) _PathReturn = Type[None] if sys.version_info >= (3,): class Error(OSError): ... class SameFileError(Error): ... class SpecialFileError(OSError): ... class ExecError(OSError): ... class ReadError(OSError): ... class RegistryError(Exception): ... else: class Error(EnvironmentError): ... class SpecialFileError(EnvironmentError): ... class ExecError(EnvironmentError): ... _S_co = TypeVar("_S_co", covariant=True) _S_contra = TypeVar("_S_contra", contravariant=True) class _Reader(Protocol[_S_co]): def read(self, length: int) -> _S_co: ... class _Writer(Protocol[_S_contra]): def write(self, data: _S_contra) -> Any: ... def copyfileobj(fsrc: _Reader[AnyStr], fdst: _Writer[AnyStr], length: int = ...) -> None: ... if sys.version_info >= (3,): def copyfile(src: _Path, dst: _AnyPath, *, follow_symlinks: bool = ...) -> _AnyPath: ... def copymode(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> None: ... def copystat(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> None: ... def copy(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> _PathReturn: ... def copy2(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> _PathReturn: ... else: def copyfile(src: _Path, dst: _Path) -> None: ... def copymode(src: _Path, dst: _Path) -> None: ... def copystat(src: _Path, dst: _Path) -> None: ... def copy(src: _Path, dst: _Path) -> _PathReturn: ... def copy2(src: _Path, dst: _Path) -> _PathReturn: ... def ignore_patterns(*patterns: _Path) -> Callable[[Any, List[_AnyStr]], Set[_AnyStr]]: ... if sys.version_info >= (3, 8): def copytree( src: _Path, dst: _Path, symlinks: bool = ..., ignore: Union[None, Callable[[str, List[str]], Iterable[str]], Callable[[_Path, List[str]], Iterable[str]]] = ..., copy_function: Callable[[str, str], None] = ..., ignore_dangling_symlinks: bool = ..., dirs_exist_ok: bool = ..., ) -> _PathReturn: ... elif sys.version_info >= (3,): def copytree(src: _Path, dst: _Path, symlinks: bool = ..., ignore: Union[None, Callable[[str, List[str]], Iterable[str]], Callable[[_Path, List[str]], Iterable[str]]] = ..., copy_function: Callable[[str, str], None] = ..., ignore_dangling_symlinks: bool = ...) -> _PathReturn: ... else: def copytree(src: AnyStr, dst: AnyStr, symlinks: bool = ..., ignore: Union[None, Callable[[AnyStr, List[AnyStr]], Iterable[AnyStr]]] = ...) -> _PathReturn: ... if sys.version_info >= (3,): @overload def rmtree(path: bytes, ignore_errors: bool = ..., onerror: Optional[Callable[[Any, str, Any], Any]] = ...) -> None: ... @overload def rmtree(path: _AnyPath, ignore_errors: bool = ..., onerror: Optional[Callable[[Any, _AnyPath, Any], Any]] = ...) -> None: ... else: def rmtree(path: _AnyPath, ignore_errors: bool = ..., onerror: Optional[Callable[[Any, _AnyPath, Any], Any]] = ...) -> None: ... if sys.version_info >= (3, 5): _CopyFn = Union[Callable[[str, str], None], Callable[[_Path, _Path], None]] def move(src: _Path, dst: _Path, copy_function: _CopyFn = ...) -> _PathReturn: ... else: def move(src: _Path, dst: _Path) -> _PathReturn: ... if sys.version_info >= (3,): class _ntuple_diskusage(NamedTuple): total: int used: int free: int def disk_usage(path: _Path) -> _ntuple_diskusage: ... def chown(path: _Path, user: Optional[str] = ..., group: Optional[str] = ...) -> None: ... def which(cmd: _Path, mode: int = ..., path: Optional[_Path] = ...) -> Optional[str]: ... def make_archive(base_name: _AnyStr, format: str, root_dir: Optional[_Path] = ..., base_dir: Optional[_Path] = ..., verbose: bool = ..., dry_run: bool = ..., owner: Optional[str] = ..., group: Optional[str] = ..., logger: Optional[Any] = ...) -> _AnyStr: ... def get_archive_formats() -> List[Tuple[str, str]]: ... def register_archive_format(name: str, function: Callable[..., Any], extra_args: Optional[Sequence[Union[Tuple[str, Any], List[Any]]]] = ..., description: str = ...) -> None: ... def unregister_archive_format(name: str) -> None: ... if sys.version_info >= (3,): # Should be _Path once http://bugs.python.org/issue30218 is fixed def unpack_archive(filename: str, extract_dir: Optional[_Path] = ..., format: Optional[str] = ...) -> None: ... def register_unpack_format(name: str, extensions: List[str], function: Any, extra_args: Sequence[Tuple[str, Any]] = ..., description: str = ...) -> None: ... def unregister_unpack_format(name: str) -> None: ... def get_unpack_formats() -> List[Tuple[str, List[str], str]]: ... def get_terminal_size(fallback: Tuple[int, int] = ...) -> os.terminal_size: ... mypy-0.761/mypy/typeshed/stdlib/2and3/site.pyi0000644€tŠÔÚ€2›s®0000000074213576752252025422 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for site from typing import List, Iterable, Optional import sys PREFIXES: List[str] ENABLE_USER_SITE: Optional[bool] USER_SITE: Optional[str] USER_BASE: Optional[str] if sys.version_info < (3,): def main() -> None: ... def addsitedir(sitedir: str, known_paths: Optional[Iterable[str]] = ...) -> None: ... def getsitepackages(prefixes: Optional[Iterable[str]] = ...) -> List[str]: ... def getuserbase() -> str: ... def getusersitepackages() -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/smtpd.pyi0000644€tŠÔÚ€2›s®0000000545013576752252025606 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for smtpd (Python 2 and 3) import sys import socket import asyncore import asynchat from typing import Any, DefaultDict, List, Optional, Text, Tuple, Type _Address = Tuple[str, int] # (host, port) class SMTPChannel(asynchat.async_chat): COMMAND: int DATA: int if sys.version_info >= (3,): command_size_limits: DefaultDict[str, int] smtp_server: SMTPServer conn: socket.socket addr: Any received_lines: List[Text] smtp_state: int seen_greeting: str mailfrom: str rcpttos: List[str] received_data: str fqdn: str peer: str command_size_limit: int data_size_limit: int enable_SMTPUTF8: bool @property def max_command_size_limit(self) -> int: ... if sys.version_info >= (3,): def __init__(self, server: SMTPServer, conn: socket.socket, addr: Any, data_size_limit: int = ..., map: Optional[asyncore._maptype] = ..., enable_SMTPUTF8: bool = ..., decode_data: bool = ...) -> None: ... else: def __init__(self, server: SMTPServer, conn: socket.socket, addr: Any, data_size_limit: int = ...) -> None: ... # base asynchat.async_chat.push() accepts bytes def push(self, msg: Text) -> None: ... # type: ignore def collect_incoming_data(self, data: bytes) -> None: ... def found_terminator(self) -> None: ... def smtp_HELO(self, arg: str) -> None: ... def smtp_NOOP(self, arg: str) -> None: ... def smtp_QUIT(self, arg: str) -> None: ... def smtp_MAIL(self, arg: str) -> None: ... def smtp_RCPT(self, arg: str) -> None: ... def smtp_RSET(self, arg: str) -> None: ... def smtp_DATA(self, arg: str) -> None: ... if sys.version_info >= (3, 3): def smtp_EHLO(self, arg: str) -> None: ... def smtp_HELP(self, arg: str) -> None: ... def smtp_VRFY(self, arg: str) -> None: ... def smtp_EXPN(self, arg: str) -> None: ... class SMTPServer(asyncore.dispatcher): channel_class: Type[SMTPChannel] data_size_limit: int enable_SMTPUTF8: bool if sys.version_info >= (3,): def __init__(self, localaddr: _Address, remoteaddr: _Address, data_size_limit: int = ..., map: Optional[asyncore._maptype] = ..., enable_SMTPUTF8: bool = ..., decode_data: bool = ...) -> None: ... else: def __init__(self, localaddr: _Address, remoteaddr: _Address, data_size_limit: int = ...) -> None: ... def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... def process_message(self, peer: _Address, mailfrom: str, rcpttos: List[Text], data: str, **kwargs: Any) -> Optional[str]: ... class DebuggingServer(SMTPServer): ... class PureProxy(SMTPServer): ... class MailmanProxy(PureProxy): ... mypy-0.761/mypy/typeshed/stdlib/2and3/sndhdr.pyi0000644€tŠÔÚ€2›s®0000000117313576752252025737 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sndhdr (Python 2 and 3) import os import sys from typing import Any, NamedTuple, Optional, Tuple, Union if sys.version_info >= (3, 5): class SndHeaders(NamedTuple): filetype: str framerate: int nchannels: int nframes: int sampwidth: Union[int, str] _SndHeaders = SndHeaders else: _SndHeaders = Tuple[str, int, int, int, Union[int, str]] if sys.version_info >= (3, 6): _Path = Union[str, bytes, os.PathLike[Any]] else: _Path = Union[str, bytes] def what(filename: _Path) -> Optional[_SndHeaders]: ... def whathdr(filename: _Path) -> Optional[_SndHeaders]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/socket.pyi0000644€tŠÔÚ€2›s®0000005227713576752252025760 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub for the socket module This file is organized to mirror the module's documentation, with a very small number of exceptions. To avoid requiring tests on all platforms, platform checks are included only where the documentation notes platform availability (as opposed to following actual availability), with one or two exceptions. Module documentation: https://docs.python.org/3/library/socket.html CPython module source: https://github.com/python/cpython/blob/master/Lib/socket.py CPython C source: https://github.com/python/cpython/blob/master/Modules/socketmodule.c """ # Authorship from original mypy stubs (not in typeshed git history): # Ron Murawski # adapted for Python 2.7 by Michal Pokorny import sys from typing import Any, BinaryIO, Iterable, List, Optional, Text, TextIO, Tuple, TypeVar, Union, overload if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal # ----- Constants ----- # Some socket families are listed in the "Socket families" section of the docs, # but not the "Constants" section. These are listed at the end of the list of # constants. # # Besides those and the first few constants listed, the constants are listed in # documentation order. # Constants defined by Python (i.e. not OS constants re-exported from C) has_ipv6: bool SocketType: Any if sys.version_info >= (3,): SocketIO: Any # Re-exported errno EAGAIN: int EBADF: int EINTR: int EWOULDBLOCK: int # Constants re-exported from C # Per socketmodule.c, only these three families are portable AF_UNIX: AddressFamily AF_INET: AddressFamily AF_INET6: AddressFamily SOCK_STREAM: SocketKind SOCK_DGRAM: SocketKind SOCK_RAW: SocketKind SOCK_RDM: SocketKind SOCK_SEQPACKET: SocketKind if sys.platform == 'linux' and sys.version_info >= (3,): SOCK_CLOEXEC: SocketKind SOCK_NONBLOCK: SocketKind # Address families not mentioned in the docs AF_AAL5: AddressFamily AF_APPLETALK: AddressFamily AF_ASH: AddressFamily AF_ATMPVC: AddressFamily AF_ATMSVC: AddressFamily AF_AX25: AddressFamily AF_BRIDGE: AddressFamily AF_DECnet: AddressFamily AF_ECONET: AddressFamily AF_IPX: AddressFamily AF_IRDA: AddressFamily AF_KEY: AddressFamily AF_LLC: AddressFamily AF_NETBEUI: AddressFamily AF_NETROM: AddressFamily AF_PPPOX: AddressFamily AF_ROSE: AddressFamily AF_ROUTE: AddressFamily AF_SECURITY: AddressFamily AF_SNA: AddressFamily AF_SYSTEM: AddressFamily AF_UNSPEC: AddressFamily AF_WANPIPE: AddressFamily AF_X25: AddressFamily # The "many constants" referenced by the docs SOMAXCONN: int AI_ADDRCONFIG: AddressInfo AI_ALL: AddressInfo AI_CANONNAME: AddressInfo AI_DEFAULT: AddressInfo AI_MASK: AddressInfo AI_NUMERICHOST: AddressInfo AI_NUMERICSERV: AddressInfo AI_PASSIVE: AddressInfo AI_V4MAPPED: AddressInfo AI_V4MAPPED_CFG: AddressInfo EAIEAI_ADDRFAMILY: int EAI_AGAIN: int EAI_BADFLAGS: int EAI_BADHINTS: int EAI_FAIL: int EAI_FAMILY: int EAI_MAX: int EAI_MEMORY: int EAI_NODATA: int EAI_NONAME: int EAI_OVERFLOW: int EAI_PROTOCOL: int EAI_SERVICE: int EAI_SOCKTYPE: int EAI_SYSTEM: int INADDR_ALLHOSTS_GROUP: int INADDR_ANY: int INADDR_BROADCAST: int INADDR_LOOPBACK: int INADDR_MAX_LOCAL_GROUP: int INADDR_NONE: int INADDR_UNSPEC_GROUP: int IPPORT_RESERVED: int IPPORT_USERRESERVED: int IPPROTO_AH: int IPPROTO_BIP: int IPPROTO_DSTOPTS: int IPPROTO_EGP: int IPPROTO_EON: int IPPROTO_ESP: int IPPROTO_FRAGMENT: int IPPROTO_GGP: int IPPROTO_GRE: int IPPROTO_HELLO: int IPPROTO_HOPOPTS: int IPPROTO_ICMP: int IPPROTO_ICMPV6: int IPPROTO_IDP: int IPPROTO_IGMP: int IPPROTO_IP: int IPPROTO_IPCOMP: int IPPROTO_IPIP: int IPPROTO_IPV4: int IPPROTO_IPV6: int IPPROTO_MAX: int IPPROTO_MOBILE: int IPPROTO_ND: int IPPROTO_NONE: int IPPROTO_PIM: int IPPROTO_PUP: int IPPROTO_RAW: int IPPROTO_ROUTING: int IPPROTO_RSVP: int IPPROTO_SCTP: int IPPROTO_TCP: int IPPROTO_TP: int IPPROTO_UDP: int IPPROTO_VRRP: int IPPROTO_XTP: int IPV6_CHECKSUM: int IPV6_DONTFRAG: int IPV6_DSTOPTS: int IPV6_HOPLIMIT: int IPV6_HOPOPTS: int IPV6_JOIN_GROUP: int IPV6_LEAVE_GROUP: int IPV6_MULTICAST_HOPS: int IPV6_MULTICAST_IF: int IPV6_MULTICAST_LOOP: int IPV6_NEXTHOP: int IPV6_PATHMTU: int IPV6_PKTINFO: int IPV6_RECVDSTOPTS: int IPV6_RECVHOPLIMIT: int IPV6_RECVHOPOPTS: int IPV6_RECVPATHMTU: int IPV6_RECVPKTINFO: int IPV6_RECVRTHDR: int IPV6_RECVTCLASS: int IPV6_RTHDR: int IPV6_RTHDRDSTOPTS: int IPV6_RTHDR_TYPE_0: int IPV6_TCLASS: int IPV6_UNICAST_HOPS: int IPV6_USE_MIN_MTU: int IPV6_V6ONLY: int IPX_TYPE: int IP_ADD_MEMBERSHIP: int IP_DEFAULT_MULTICAST_LOOP: int IP_DEFAULT_MULTICAST_TTL: int IP_DROP_MEMBERSHIP: int IP_HDRINCL: int IP_MAX_MEMBERSHIPS: int IP_MULTICAST_IF: int IP_MULTICAST_LOOP: int IP_MULTICAST_TTL: int IP_OPTIONS: int IP_RECVDSTADDR: int IP_RECVOPTS: int IP_RECVRETOPTS: int IP_RETOPTS: int IP_TOS: int IP_TRANSPARENT: int IP_TTL: int LOCAL_PEERCRED: int MSG_BCAST: MsgFlag MSG_BTAG: MsgFlag MSG_CMSG_CLOEXEC: MsgFlag MSG_CONFIRM: MsgFlag MSG_CTRUNC: MsgFlag MSG_DONTROUTE: MsgFlag MSG_DONTWAIT: MsgFlag MSG_EOF: MsgFlag MSG_EOR: MsgFlag MSG_ERRQUEUE: MsgFlag MSG_ETAG: MsgFlag MSG_FASTOPEN: MsgFlag MSG_MCAST: MsgFlag MSG_MORE: MsgFlag MSG_NOSIGNAL: MsgFlag MSG_NOTIFICATION: MsgFlag MSG_OOB: MsgFlag MSG_PEEK: MsgFlag MSG_TRUNC: MsgFlag MSG_WAITALL: MsgFlag NI_DGRAM: int NI_MAXHOST: int NI_MAXSERV: int NI_NAMEREQD: int NI_NOFQDN: int NI_NUMERICHOST: int NI_NUMERICSERV: int SCM_CREDENTIALS: int SCM_CREDS: int SCM_RIGHTS: int SHUT_RD: int SHUT_RDWR: int SHUT_WR: int SOL_ATALK: int SOL_AX25: int SOL_HCI: int SOL_IP: int SOL_IPX: int SOL_NETROM: int SOL_ROSE: int SOL_SOCKET: int SOL_TCP: int SOL_UDP: int SO_ACCEPTCONN: int SO_BINDTODEVICE: int SO_BROADCAST: int SO_DEBUG: int SO_DONTROUTE: int SO_ERROR: int SO_EXCLUSIVEADDRUSE: int SO_KEEPALIVE: int SO_LINGER: int SO_MARK: int SO_OOBINLINE: int SO_PASSCRED: int SO_PEERCRED: int SO_PRIORITY: int SO_RCVBUF: int SO_RCVLOWAT: int SO_RCVTIMEO: int SO_REUSEADDR: int SO_REUSEPORT: int SO_SETFIB: int SO_SNDBUF: int SO_SNDLOWAT: int SO_SNDTIMEO: int SO_TYPE: int SO_USELOOPBACK: int TCP_CORK: int TCP_DEFER_ACCEPT: int TCP_FASTOPEN: int TCP_INFO: int TCP_KEEPCNT: int TCP_KEEPIDLE: int TCP_KEEPINTVL: int TCP_LINGER2: int TCP_MAXSEG: int TCP_NODELAY: int TCP_QUICKACK: int TCP_SYNCNT: int TCP_WINDOW_CLAMP: int if sys.version_info >= (3, 7): TCP_NOTSENT_LOWAT: int # Specifically-documented constants if sys.platform == 'linux' and sys.version_info >= (3,): AF_CAN: AddressFamily PF_CAN: int SOL_CAN_BASE: int SOL_CAN_RAW: int CAN_EFF_FLAG: int CAN_EFF_MASK: int CAN_ERR_FLAG: int CAN_ERR_MASK: int CAN_RAW: int CAN_RAW_ERR_FILTER: int CAN_RAW_FILTER: int CAN_RAW_LOOPBACK: int CAN_RAW_RECV_OWN_MSGS: int CAN_RTR_FLAG: int CAN_SFF_MASK: int CAN_BCM: int CAN_BCM_TX_SETUP: int CAN_BCM_TX_DELETE: int CAN_BCM_TX_READ: int CAN_BCM_TX_SEND: int CAN_BCM_RX_SETUP: int CAN_BCM_RX_DELETE: int CAN_BCM_RX_READ: int CAN_BCM_TX_STATUS: int CAN_BCM_TX_EXPIRED: int CAN_BCM_RX_STATUS: int CAN_BCM_RX_TIMEOUT: int CAN_BCM_RX_CHANGED: int CAN_RAW_FD_FRAMES: int if sys.platform == 'linux' and sys.version_info >= (3, 7): CAN_ISOTP: int if sys.platform == 'linux': AF_PACKET: AddressFamily PF_PACKET: int PACKET_BROADCAST: int PACKET_FASTROUTE: int PACKET_HOST: int PACKET_LOOPBACK: int PACKET_MULTICAST: int PACKET_OTHERHOST: int PACKET_OUTGOING: int if sys.platform == 'linux' and sys.version_info >= (3,): AF_RDS: AddressFamily PF_RDS: int SOL_RDS: int RDS_CANCEL_SENT_TO: int RDS_CMSG_RDMA_ARGS: int RDS_CMSG_RDMA_DEST: int RDS_CMSG_RDMA_MAP: int RDS_CMSG_RDMA_STATUS: int RDS_CMSG_RDMA_UPDATE: int RDS_CONG_MONITOR: int RDS_FREE_MR: int RDS_GET_MR: int RDS_GET_MR_FOR_DEST: int RDS_RDMA_DONTWAIT: int RDS_RDMA_FENCE: int RDS_RDMA_INVALIDATE: int RDS_RDMA_NOTIFY_ME: int RDS_RDMA_READWRITE: int RDS_RDMA_SILENT: int RDS_RDMA_USE_ONCE: int RDS_RECVERR: int if sys.platform == 'win32': SIO_RCVALL: int SIO_KEEPALIVE_VALS: int if sys.version_info >= (3, 6): SIO_LOOPBACK_FAST_PATH: int RCVALL_IPLEVEL: int RCVALL_MAX: int RCVALL_OFF: int RCVALL_ON: int RCVALL_SOCKETLEVELONLY: int if sys.platform == 'linux': AF_TIPC: AddressFamily SOL_TIPC: int TIPC_ADDR_ID: int TIPC_ADDR_NAME: int TIPC_ADDR_NAMESEQ: int TIPC_CFG_SRV: int TIPC_CLUSTER_SCOPE: int TIPC_CONN_TIMEOUT: int TIPC_CRITICAL_IMPORTANCE: int TIPC_DEST_DROPPABLE: int TIPC_HIGH_IMPORTANCE: int TIPC_IMPORTANCE: int TIPC_LOW_IMPORTANCE: int TIPC_MEDIUM_IMPORTANCE: int TIPC_NODE_SCOPE: int TIPC_PUBLISHED: int TIPC_SRC_DROPPABLE: int TIPC_SUBSCR_TIMEOUT: int TIPC_SUB_CANCEL: int TIPC_SUB_PORTS: int TIPC_SUB_SERVICE: int TIPC_TOP_SRV: int TIPC_WAIT_FOREVER: int TIPC_WITHDRAWN: int TIPC_ZONE_SCOPE: int if sys.platform == 'linux' and sys.version_info >= (3, 6): AF_ALG: AddressFamily SOL_ALG: int ALG_OP_DECRYPT: int ALG_OP_ENCRYPT: int ALG_OP_SIGN: int ALG_OP_VERIFY: int ALG_SET_AEAD_ASSOCLEN: int ALG_SET_AEAD_AUTHSIZE: int ALG_SET_IV: int ALG_SET_KEY: int ALG_SET_OP: int ALG_SET_PUBKEY: int if sys.platform == 'linux' and sys.version_info >= (3, 7): AF_VSOCK: AddressFamily IOCTL_VM_SOCKETS_GET_LOCAL_CID: int VMADDR_CID_ANY: int VMADDR_CID_HOST: int VMADDR_PORT_ANY: int SO_VM_SOCKETS_BUFFER_MAX_SIZE: int SO_VM_SOCKETS_BUFFER_SIZE: int SO_VM_SOCKETS_BUFFER_MIN_SIZE: int VM_SOCKETS_INVALID_VERSION: int AF_LINK: AddressFamily # Availability: BSD, macOS # BDADDR_* and HCI_* listed with other bluetooth constants below if sys.version_info >= (3, 6): SO_DOMAIN: int SO_PASSSEC: int SO_PEERSEC: int SO_PROTOCOL: int TCP_CONGESTION: int TCP_USER_TIMEOUT: int if sys.platform == 'linux' and sys.version_info >= (3, 8): AF_QIPCRTR: AddressFamily # Semi-documented constants # (Listed under "Socket families" in the docs, but not "Constants") if sys.platform == 'linux': # Netlink is defined by Linux AF_NETLINK: AddressFamily NETLINK_ARPD: int NETLINK_CRYPTO: int NETLINK_DNRTMSG: int NETLINK_FIREWALL: int NETLINK_IP6_FW: int NETLINK_NFLOG: int NETLINK_ROUTE6: int NETLINK_ROUTE: int NETLINK_SKIP: int NETLINK_TAPBASE: int NETLINK_TCPDIAG: int NETLINK_USERSOCK: int NETLINK_W1: int NETLINK_XFRM: int if sys.platform != 'win32' and sys.platform != 'darwin': # Linux and some BSD support is explicit in the docs # Windows and macOS do not support in practice AF_BLUETOOTH: AddressFamily BTPROTO_HCI: int BTPROTO_L2CAP: int BTPROTO_RFCOMM: int BTPROTO_SCO: int # not in FreeBSD BDADDR_ANY: str BDADDR_LOCAL: str HCI_FILTER: int # not in NetBSD or DragonFlyBSD # not in FreeBSD, NetBSD, or DragonFlyBSD HCI_TIME_STAMP: int HCI_DATA_DIR: int if sys.platform == 'darwin': # PF_SYSTEM is defined by macOS PF_SYSTEM: int SYSPROTO_CONTROL: int # enum versions of above flags if sys.version_info >= (3, 4): from enum import IntEnum class AddressFamily(IntEnum): AF_UNIX: int AF_INET: int AF_INET6: int AF_AAL5: int AF_ALG: int AF_APPLETALK: int AF_ASH: int AF_ATMPVC: int AF_ATMSVC: int AF_AX25: int AF_BLUETOOTH: int AF_BRIDGE: int AF_CAN: int AF_DECnet: int AF_ECONET: int AF_IPX: int AF_IRDA: int AF_KEY: int AF_LINK: int AF_LLC: int AF_NETBEUI: int AF_NETLINK: int AF_NETROM: int AF_PACKET: int AF_PPPOX: int AF_QIPCRTR: int AF_RDS: int AF_ROSE: int AF_ROUTE: int AF_SECURITY: int AF_SNA: int AF_SYSTEM: int AF_TIPC: int AF_UNSPEC: int AF_VSOCK: int AF_WANPIPE: int AF_X25: int class SocketKind(IntEnum): SOCK_STREAM: int SOCK_DGRAM: int SOCK_RAW: int SOCK_RDM: int SOCK_SEQPACKET: int SOCK_CLOEXEC: int SOCK_NONBLOCK: int else: AddressFamily = int SocketKind = int if sys.version_info >= (3, 6): from enum import IntFlag class AddressInfo(IntFlag): AI_ADDRCONFIG: int AI_ALL: int AI_CANONNAME: int AI_NUMERICHOST: int AI_NUMERICSERV: int AI_PASSIVE: int AI_V4MAPPED: int class MsgFlag(IntFlag): MSG_CTRUNC: int MSG_DONTROUTE: int MSG_DONTWAIT: int MSG_EOR: int MSG_OOB: int MSG_PEEK: int MSG_TRUNC: int MSG_WAITALL: int else: AddressInfo = int MsgFlag = int # ----- Exceptions ----- if sys.version_info < (3,): class error(IOError): ... else: error = OSError class herror(error): def __init__(self, herror: int = ..., string: str = ...) -> None: ... class gaierror(error): def __init__(self, error: int = ..., string: str = ...) -> None: ... class timeout(error): def __init__(self, error: int = ..., string: str = ...) -> None: ... # ----- Classes ----- # Addresses can be either tuples of varying lengths (AF_INET, AF_INET6, # AF_NETLINK, AF_TIPC) or strings (AF_UNIX). _Address = Union[tuple, str] _RetAddress = Any # TODO Most methods allow bytes as address objects _WriteBuffer = Union[bytearray, memoryview] _CMSG = Tuple[int, int, bytes] _SelfT = TypeVar('_SelfT', bound=socket) class socket: family: int type: int proto: int if sys.version_info < (3,): def __init__(self, family: int = ..., type: int = ..., proto: int = ...) -> None: ... else: def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: Optional[int] = ...) -> None: ... def __enter__(self: _SelfT) -> _SelfT: ... def __exit__(self, *args: Any) -> None: ... # --- methods --- def accept(self) -> Tuple[socket, _RetAddress]: ... def bind(self, address: Union[_Address, bytes]) -> None: ... def close(self) -> None: ... def connect(self, address: Union[_Address, bytes]) -> None: ... def connect_ex(self, address: Union[_Address, bytes]) -> int: ... def detach(self) -> int: ... def dup(self) -> socket: ... def fileno(self) -> int: ... if sys.version_info >= (3, 4): def get_inheritable(self) -> bool: ... def getpeername(self) -> _RetAddress: ... def getsockname(self) -> _RetAddress: ... @overload def getsockopt(self, level: int, optname: int) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... if sys.version_info >= (3, 7): def getblocking(self) -> bool: ... def gettimeout(self) -> Optional[float]: ... if sys.platform == 'win32': def ioctl(self, control: object, option: Tuple[int, int, int]) -> None: ... if sys.version_info >= (3, 5): def listen(self, __backlog: int = ...) -> None: ... else: def listen(self, __backlog: int) -> None: ... # Note that the makefile's documented windows-specific behavior is not represented if sys.version_info < (3,): def makefile(self, mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... else: # mode strings with duplicates are intentionally excluded @overload def makefile(self, mode: Literal['r', 'w', 'rw', 'wr', ''], buffering: Optional[int] = ..., *, encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> TextIO: ... @overload def makefile(self, mode: Literal['b', 'rb', 'br', 'wb', 'bw', 'rwb', 'rbw', 'wrb', 'wbr', 'brw', 'bwr'] = ..., buffering: Optional[int] = ..., *, encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> BinaryIO: ... def recv(self, bufsize: int, flags: int = ...) -> bytes: ... def recvfrom(self, bufsize: int, flags: int = ...) -> Tuple[bytes, _RetAddress]: ... if sys.version_info >= (3, 3): def recvmsg(self, __bufsize: int, __ancbufsize: int = ..., __flags: int = ...) -> Tuple[bytes, List[_CMSG], int, Any]: ... def recvmsg_into(self, __buffers: Iterable[_WriteBuffer], __ancbufsize: int = ..., __flags: int = ...) -> Tuple[int, List[_CMSG], int, Any]: ... def recvfrom_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> Tuple[int, _RetAddress]: ... def recv_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> int: ... def send(self, data: bytes, flags: int = ...) -> int: ... def sendall(self, data: bytes, flags: int = ...) -> None: ... # return type: None on success @overload def sendto(self, data: bytes, address: _Address) -> int: ... @overload def sendto(self, data: bytes, flags: int, address: _Address) -> int: ... if sys.version_info >= (3, 3): def sendmsg(self, __buffers: Iterable[bytes], __ancdata: Iterable[_CMSG] = ..., __flags: int = ..., __address: _Address = ...) -> int: ... if sys.platform == 'linux' and sys.version_info >= (3, 6): # TODO add the parameter types for sendmsg_afalg def sendmsg_afalg(self, msg=..., *, op, iv=..., assoclen=..., flags=...) -> int: ... if sys.version_info >= (3,): # TODO determine legal types for file parameter def sendfile(self, file, offset: int = ..., count: Optional[int] = ...) -> int: ... def set_inheritable(self, inheritable: bool) -> None: ... def setblocking(self, flag: bool) -> None: ... def settimeout(self, value: Optional[float]) -> None: ... if sys.version_info < (3, 6): def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ... else: @overload def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ... @overload def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... if sys.platform == 'win32': def share(self, process_id: int) -> bytes: ... def shutdown(self, how: int) -> None: ... # ----- Functions ----- if sys.version_info >= (3, 7): def close(fd: int) -> None: ... def create_connection(address: Tuple[Optional[str], int], timeout: Optional[float] = ..., source_address: Tuple[Union[bytearray, bytes, Text], int] = ...) -> socket: ... if sys.version_info >= (3, 8): def create_server(address: _Address, *, family: int = ..., backlog: Optional[int] = ..., reuse_port: bool = ..., dualstack_ipv6: bool = ...) -> socket: ... def has_dualstack_ipv6() -> bool: ... def fromfd(fd: int, family: int, type: int, proto: int = ...) -> socket: ... if sys.platform == 'win32' and sys.version_info >= (3, 3): def fromshare(data: bytes) -> socket: ... # the 5th tuple item is an address # TODO the "Tuple[Any, ...]" should be "Union[Tuple[str, int], Tuple[str, int, int, int]]" but that triggers # https://github.com/python/mypy/issues/2509 def getaddrinfo(host: Optional[Union[bytearray, bytes, Text]], port: Union[str, int, None], family: int = ..., socktype: int = ..., proto: int = ..., flags: int = ...) -> List[Tuple[AddressFamily, SocketKind, int, str, Tuple[Any, ...]]]: ... def getfqdn(name: str = ...) -> str: ... def gethostbyname(hostname: str) -> str: ... def gethostbyname_ex(hostname: str) -> Tuple[str, List[str], List[str]]: ... def gethostname() -> str: ... def gethostbyaddr(ip_address: str) -> Tuple[str, List[str], List[str]]: ... def getnameinfo(sockaddr: Union[Tuple[str, int], Tuple[str, int, int, int]], flags: int) -> Tuple[str, int]: ... def getprotobyname(protocolname: str) -> int: ... def getservbyname(servicename: str, protocolname: str = ...) -> int: ... def getservbyport(port: int, protocolname: str = ...) -> str: ... def socketpair(family: int = ..., type: int = ..., proto: int = ...) -> Tuple[socket, socket]: ... def ntohl(x: int) -> int: ... # param & ret val are 32-bit ints def ntohs(x: int) -> int: ... # param & ret val are 16-bit ints def htonl(x: int) -> int: ... # param & ret val are 32-bit ints def htons(x: int) -> int: ... # param & ret val are 16-bit ints def inet_aton(ip_string: str) -> bytes: ... # ret val 4 bytes in length def inet_ntoa(packed_ip: bytes) -> str: ... def inet_pton(address_family: int, ip_string: str) -> bytes: ... def inet_ntop(address_family: int, packed_ip: bytes) -> str: ... if sys.version_info >= (3, 3): def CMSG_LEN(length: int) -> int: ... def CMSG_SPACE(length: int) -> int: ... def getdefaulttimeout() -> Optional[float]: ... def setdefaulttimeout(timeout: Optional[float]) -> None: ... if sys.version_info >= (3, 3): def sethostname(name: str) -> None: ... def if_nameindex() -> List[Tuple[int, str]]: ... def if_nametoindex(name: str) -> int: ... def if_indextoname(index: int) -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/sqlite3/0000755€tŠÔÚ€2›s®0000000000013576752267025322 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/sqlite3/__init__.pyi0000644€tŠÔÚ€2›s®0000000005313576752252027574 0ustar jukkaDROPBOX\Domain Users00000000000000from sqlite3.dbapi2 import * # noqa: F403 mypy-0.761/mypy/typeshed/stdlib/2and3/sqlite3/dbapi2.pyi0000644€tŠÔÚ€2›s®0000002477213576752252027214 0ustar jukkaDROPBOX\Domain Users00000000000000# Filip Hron # based heavily on Andrey Vlasovskikh's python-skeletons https://github.com/JetBrains/python-skeletons/blob/master/sqlite3.py import os import sys from typing import Any, Callable, Iterable, Iterator, List, Optional, Text, Tuple, Type, TypeVar, Union from datetime import date, time, datetime _T = TypeVar('_T') paramstyle: str threadsafety: int apilevel: str Date = date Time = time Timestamp = datetime def DateFromTicks(ticks): ... def TimeFromTicks(ticks): ... def TimestampFromTicks(ticks): ... version_info: str sqlite_version_info: Tuple[int, int, int] if sys.version_info >= (3,): Binary = memoryview else: Binary = buffer def register_adapters_and_converters(): ... # The remaining definitions are imported from _sqlite3. PARSE_COLNAMES: int PARSE_DECLTYPES: int SQLITE_ALTER_TABLE: int SQLITE_ANALYZE: int SQLITE_ATTACH: int SQLITE_CREATE_INDEX: int SQLITE_CREATE_TABLE: int SQLITE_CREATE_TEMP_INDEX: int SQLITE_CREATE_TEMP_TABLE: int SQLITE_CREATE_TEMP_TRIGGER: int SQLITE_CREATE_TEMP_VIEW: int SQLITE_CREATE_TRIGGER: int SQLITE_CREATE_VIEW: int SQLITE_DELETE: int SQLITE_DENY: int SQLITE_DETACH: int SQLITE_DROP_INDEX: int SQLITE_DROP_TABLE: int SQLITE_DROP_TEMP_INDEX: int SQLITE_DROP_TEMP_TABLE: int SQLITE_DROP_TEMP_TRIGGER: int SQLITE_DROP_TEMP_VIEW: int SQLITE_DROP_TRIGGER: int SQLITE_DROP_VIEW: int SQLITE_IGNORE: int SQLITE_INSERT: int SQLITE_OK: int SQLITE_PRAGMA: int SQLITE_READ: int SQLITE_REINDEX: int SQLITE_SELECT: int SQLITE_TRANSACTION: int SQLITE_UPDATE: int adapters: Any converters: Any sqlite_version: str version: str # TODO: adapt needs to get probed def adapt(obj, protocol, alternate): ... def complete_statement(sql: str) -> bool: ... if sys.version_info >= (3, 7): def connect(database: Union[bytes, Text, os.PathLike[Text]], timeout: float = ..., detect_types: int = ..., isolation_level: Optional[str] = ..., check_same_thread: bool = ..., factory: Optional[Type[Connection]] = ..., cached_statements: int = ..., uri: bool = ...) -> Connection: ... elif sys.version_info >= (3, 4): def connect(database: Union[bytes, Text], timeout: float = ..., detect_types: int = ..., isolation_level: Optional[str] = ..., check_same_thread: bool = ..., factory: Optional[Type[Connection]] = ..., cached_statements: int = ..., uri: bool = ...) -> Connection: ... else: def connect(database: Union[bytes, Text], timeout: float = ..., detect_types: int = ..., isolation_level: Optional[str] = ..., check_same_thread: bool = ..., factory: Optional[Type[Connection]] = ..., cached_statements: int = ...) -> Connection: ... def enable_callback_tracebacks(flag: bool) -> None: ... def enable_shared_cache(do_enable: int) -> None: ... def register_adapter(type: Type[_T], callable: Callable[[_T], Union[int, float, str, bytes]]) -> None: ... def register_converter(typename: str, callable: Callable[[bytes], Any]) -> None: ... if sys.version_info < (3, 8): class Cache(object): def __init__(self, *args, **kwargs) -> None: ... def display(self, *args, **kwargs) -> None: ... def get(self, *args, **kwargs) -> None: ... class Connection(object): DataError: Any DatabaseError: Any Error: Any IntegrityError: Any InterfaceError: Any InternalError: Any NotSupportedError: Any OperationalError: Any ProgrammingError: Any Warning: Any in_transaction: Any isolation_level: Any row_factory: Any text_factory: Any total_changes: Any def __init__(self, *args, **kwargs): ... def close(self) -> None: ... def commit(self) -> None: ... def create_aggregate(self, name: str, num_params: int, aggregate_class: type) -> None: ... def create_collation(self, name: str, callable: Any) -> None: ... def create_function(self, name: str, num_params: int, func: Any) -> None: ... def cursor(self, cursorClass: Optional[type] = ...) -> Cursor: ... def execute(self, sql: str, parameters: Iterable[Any] = ...) -> Cursor: ... # TODO: please check in executemany() if seq_of_parameters type is possible like this def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable[Any]]) -> Cursor: ... def executescript(self, sql_script: Union[bytes, Text]) -> Cursor: ... def interrupt(self, *args, **kwargs) -> None: ... def iterdump(self, *args, **kwargs) -> None: ... def rollback(self, *args, **kwargs) -> None: ... # TODO: set_authorizer(authorzer_callback) # see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_authorizer # returns [SQLITE_OK, SQLITE_DENY, SQLITE_IGNORE] so perhaps int def set_authorizer(self, *args, **kwargs) -> None: ... # set_progress_handler(handler, n) -> see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_progress_handler def set_progress_handler(self, *args, **kwargs) -> None: ... def set_trace_callback(self, *args, **kwargs): ... # enable_load_extension and load_extension is not available on python distributions compiled # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 def enable_load_extension(self, enabled: bool) -> None: ... def load_extension(self, path: str) -> None: ... if sys.version_info >= (3, 7): def backup(self, target: Connection, *, pages: int = ..., progress: Optional[Callable[[int, int, int], object]] = ..., name: str = ..., sleep: float = ...) -> None: ... def __call__(self, *args, **kwargs): ... def __enter__(self, *args, **kwargs): ... def __exit__(self, *args, **kwargs): ... class Cursor(Iterator[Any]): arraysize: Any connection: Any description: Any lastrowid: Any row_factory: Any rowcount: Any # TODO: Cursor class accepts exactly 1 argument # required type is sqlite3.Connection (which is imported as _Connection) # however, the name of the __init__ variable is unknown def __init__(self, *args, **kwargs) -> None: ... def close(self, *args, **kwargs) -> None: ... def execute(self, sql: str, parameters: Iterable[Any] = ...) -> Cursor: ... def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable[Any]]) -> Cursor: ... def executescript(self, sql_script: Union[bytes, Text]) -> Cursor: ... def fetchall(self) -> List[Any]: ... def fetchmany(self, size: Optional[int] = ...) -> List[Any]: ... def fetchone(self) -> Any: ... def setinputsizes(self, *args, **kwargs) -> None: ... def setoutputsize(self, *args, **kwargs) -> None: ... def __iter__(self) -> Cursor: ... if sys.version_info >= (3, 0): def __next__(self) -> Any: ... else: def next(self) -> Any: ... class DataError(DatabaseError): ... class DatabaseError(Error): ... class Error(Exception): ... class IntegrityError(DatabaseError): ... class InterfaceError(Error): ... class InternalError(DatabaseError): ... class NotSupportedError(DatabaseError): ... class OperationalError(DatabaseError): ... class OptimizedUnicode(object): maketrans: Any def __init__(self, *args, **kwargs): ... def capitalize(self, *args, **kwargs): ... def casefold(self, *args, **kwargs): ... def center(self, *args, **kwargs): ... def count(self, *args, **kwargs): ... def encode(self, *args, **kwargs): ... def endswith(self, *args, **kwargs): ... def expandtabs(self, *args, **kwargs): ... def find(self, *args, **kwargs): ... def format(self, *args, **kwargs): ... def format_map(self, *args, **kwargs): ... def index(self, *args, **kwargs): ... def isalnum(self, *args, **kwargs): ... def isalpha(self, *args, **kwargs): ... def isdecimal(self, *args, **kwargs): ... def isdigit(self, *args, **kwargs): ... def isidentifier(self, *args, **kwargs): ... def islower(self, *args, **kwargs): ... def isnumeric(self, *args, **kwargs): ... def isprintable(self, *args, **kwargs): ... def isspace(self, *args, **kwargs): ... def istitle(self, *args, **kwargs): ... def isupper(self, *args, **kwargs): ... def join(self, *args, **kwargs): ... def ljust(self, *args, **kwargs): ... def lower(self, *args, **kwargs): ... def lstrip(self, *args, **kwargs): ... def partition(self, *args, **kwargs): ... def replace(self, *args, **kwargs): ... def rfind(self, *args, **kwargs): ... def rindex(self, *args, **kwargs): ... def rjust(self, *args, **kwargs): ... def rpartition(self, *args, **kwargs): ... def rsplit(self, *args, **kwargs): ... def rstrip(self, *args, **kwargs): ... def split(self, *args, **kwargs): ... def splitlines(self, *args, **kwargs): ... def startswith(self, *args, **kwargs): ... def strip(self, *args, **kwargs): ... def swapcase(self, *args, **kwargs): ... def title(self, *args, **kwargs): ... def translate(self, *args, **kwargs): ... def upper(self, *args, **kwargs): ... def zfill(self, *args, **kwargs): ... def __add__(self, other): ... def __contains__(self, *args, **kwargs): ... def __eq__(self, other): ... def __format__(self, *args, **kwargs): ... def __ge__(self, other): ... def __getitem__(self, index): ... def __getnewargs__(self, *args, **kwargs): ... def __gt__(self, other): ... def __hash__(self): ... def __iter__(self): ... def __le__(self, other): ... def __len__(self, *args, **kwargs): ... def __lt__(self, other): ... def __mod__(self, other): ... def __mul__(self, other): ... def __ne__(self, other): ... def __rmod__(self, other): ... def __rmul__(self, other): ... class PrepareProtocol(object): def __init__(self, *args, **kwargs): ... class ProgrammingError(DatabaseError): ... class Row(object): def __init__(self, *args, **kwargs): ... def keys(self, *args, **kwargs): ... def __eq__(self, other): ... def __ge__(self, other): ... def __getitem__(self, index): ... def __gt__(self, other): ... def __hash__(self): ... def __iter__(self): ... def __le__(self, other): ... def __len__(self, *args, **kwargs): ... def __lt__(self, other): ... def __ne__(self, other): ... if sys.version_info < (3, 8): class Statement(object): def __init__(self, *args, **kwargs): ... class Warning(Exception): ... mypy-0.761/mypy/typeshed/stdlib/2and3/sre_compile.pyi0000644€tŠÔÚ€2›s®0000000117313576752252026756 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/sre_compile.py # and https://github.com/python/cpython/blob/master/Lib/sre_compile.py import sys from sre_parse import SubPattern from typing import Any, List, Pattern, Tuple, Type, TypeVar, Union MAXCODE: int if sys.version_info < (3, 0): STRING_TYPES: Tuple[Type[str], Type[unicode]] _IsStringType = int else: from sre_constants import _NamedIntConstant def dis(code: List[_NamedIntConstant]) -> None: ... _IsStringType = bool def isstring(obj: Any) -> _IsStringType: ... def compile(p: Union[str, bytes, SubPattern], flags: int = ...) -> Pattern[Any]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/ssl.pyi0000644€tŠÔÚ€2›s®0000002664213576752252025266 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for ssl from typing import ( Any, Callable, ClassVar, Dict, List, NamedTuple, Optional, Set, Text, Type, Tuple, Union, ) import enum import socket import sys import os _PCTRTT = Tuple[Tuple[str, str], ...] _PCTRTTT = Tuple[_PCTRTT, ...] _PeerCertRetDictType = Dict[str, Union[str, _PCTRTTT, _PCTRTT]] _PeerCertRetType = Union[_PeerCertRetDictType, bytes, None] _EnumRetType = List[Tuple[bytes, str, Union[Set[str], bool]]] _PasswordType = Union[Callable[[], Union[str, bytes]], str, bytes] if sys.version_info < (3, 6): _Path = Text else: _Path = Union[str, os.PathLike[Any]] if sys.version_info >= (3, 5): _SC1ArgT = Union[SSLSocket, SSLObject] else: _SC1ArgT = SSLSocket _SrvnmeCbType = Callable[[_SC1ArgT, Optional[str], SSLSocket], Optional[int]] class SSLError(OSError): library: str reason: str class SSLZeroReturnError(SSLError): ... class SSLWantReadError(SSLError): ... class SSLWantWriteError(SSLError): ... class SSLSyscallError(SSLError): ... class SSLEOFError(SSLError): ... if sys.version_info >= (3, 7): class SSLCertVerificationError(SSLError, ValueError): verify_code: int verify_message: str CertificateError = SSLCertVerificationError else: class CertificateError(ValueError): ... def wrap_socket(sock: socket.socket, keyfile: Optional[str] = ..., certfile: Optional[str] = ..., server_side: bool = ..., cert_reqs: int = ..., ssl_version: int = ..., ca_certs: Optional[str] = ..., do_handshake_on_connect: bool = ..., suppress_ragged_eofs: bool = ..., ciphers: Optional[str] = ...) -> SSLSocket: ... def create_default_context( purpose: Any = ..., *, cafile: Optional[str] = ..., capath: Optional[str] = ..., cadata: Union[Text, bytes, None] = ..., ) -> SSLContext: ... def _create_unverified_context(protocol: int = ..., *, cert_reqs: int = ..., check_hostname: bool = ..., purpose: Any = ..., certfile: Optional[str] = ..., keyfile: Optional[str] = ..., cafile: Optional[str] = ..., capath: Optional[str] = ..., cadata: Union[Text, bytes, None] = ...) -> SSLContext: ... _create_default_https_context: Callable[..., SSLContext] if sys.version_info >= (3, 3): def RAND_bytes(num: int) -> bytes: ... def RAND_pseudo_bytes(num: int) -> Tuple[bytes, bool]: ... def RAND_status() -> bool: ... def RAND_egd(path: str) -> None: ... def RAND_add(bytes: bytes, entropy: float) -> None: ... def match_hostname(cert: _PeerCertRetType, hostname: str) -> None: ... def cert_time_to_seconds(cert_time: str) -> int: ... def get_server_certificate(addr: Tuple[str, int], ssl_version: int = ..., ca_certs: Optional[str] = ...) -> str: ... def DER_cert_to_PEM_cert(der_cert_bytes: bytes) -> str: ... def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... class DefaultVerifyPaths(NamedTuple): cafile: str capath: str openssl_cafile_env: str openssl_cafile: str openssl_capath_env: str openssl_capath: str def get_default_verify_paths() -> DefaultVerifyPaths: ... if sys.platform == 'win32': def enum_certificates(store_name: str) -> _EnumRetType: ... def enum_crls(store_name: str) -> _EnumRetType: ... CERT_NONE: int CERT_OPTIONAL: int CERT_REQUIRED: int VERIFY_DEFAULT: int VERIFY_CRL_CHECK_LEAF: int VERIFY_CRL_CHECK_CHAIN: int VERIFY_X509_STRICT: int VERIFY_X509_TRUSTED_FIRST: int PROTOCOL_SSLv23: int PROTOCOL_SSLv2: int PROTOCOL_SSLv3: int PROTOCOL_TLSv1: int PROTOCOL_TLSv1_1: int PROTOCOL_TLSv1_2: int if sys.version_info >= (3, 5): PROTOCOL_TLS: int if sys.version_info >= (3, 6): PROTOCOL_TLS_CLIENT: int PROTOCOL_TLS_SERVER: int OP_ALL: int OP_NO_SSLv2: int OP_NO_SSLv3: int OP_NO_TLSv1: int OP_NO_TLSv1_1: int OP_NO_TLSv1_2: int OP_CIPHER_SERVER_PREFERENCE: int OP_SINGLE_DH_USE: int OP_SINGLE_ECDH_USE: int OP_NO_COMPRESSION: int if sys.version_info >= (3, 6): OP_NO_TICKET: int HAS_ALPN: int HAS_ECDH: bool HAS_SNI: bool HAS_NPN: bool CHANNEL_BINDING_TYPES: List[str] OPENSSL_VERSION: str OPENSSL_VERSION_INFO: Tuple[int, int, int, int, int] OPENSSL_VERSION_NUMBER: int ALERT_DESCRIPTION_HANDSHAKE_FAILURE: int ALERT_DESCRIPTION_INTERNAL_ERROR: int ALERT_DESCRIPTION_ACCESS_DENIED: int ALERT_DESCRIPTION_BAD_CERTIFICATE: int ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE: int ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE: int ALERT_DESCRIPTION_BAD_RECORD_MAC: int ALERT_DESCRIPTION_CERTIFICATE_EXPIRED: int ALERT_DESCRIPTION_CERTIFICATE_REVOKED: int ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN: int ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE: int ALERT_DESCRIPTION_CLOSE_NOTIFY: int ALERT_DESCRIPTION_DECODE_ERROR: int ALERT_DESCRIPTION_DECOMPRESSION_FAILURE: int ALERT_DESCRIPTION_DECRYPT_ERROR: int ALERT_DESCRIPTION_ILLEGAL_PARAMETER: int ALERT_DESCRIPTION_INSUFFICIENT_SECURITY: int ALERT_DESCRIPTION_NO_RENEGOTIATION: int ALERT_DESCRIPTION_PROTOCOL_VERSION: int ALERT_DESCRIPTION_RECORD_OVERFLOW: int ALERT_DESCRIPTION_UNEXPECTED_MESSAGE: int ALERT_DESCRIPTION_UNKNOWN_CA: int ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY: int ALERT_DESCRIPTION_UNRECOGNIZED_NAME: int ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE: int ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION: int ALERT_DESCRIPTION_USER_CANCELLED: int class _ASN1Object(NamedTuple): nid: int shortname: str longname: str oid: str if sys.version_info < (3,): class Purpose(_ASN1Object): SERVER_AUTH: ClassVar[Purpose] CLIENT_AUTH: ClassVar[Purpose] else: class Purpose(_ASN1Object, enum.Enum): SERVER_AUTH: _ASN1Object CLIENT_AUTH: _ASN1Object class SSLSocket(socket.socket): context: SSLContext server_side: bool server_hostname: Optional[str] if sys.version_info >= (3, 6): session: Optional[SSLSession] session_reused: Optional[bool] def read(self, len: int = ..., buffer: Optional[bytearray] = ...) -> bytes: ... def write(self, buf: bytes) -> int: ... def do_handshake(self) -> None: ... def getpeercert(self, binary_form: bool = ...) -> _PeerCertRetType: ... def cipher(self) -> Tuple[str, int, int]: ... if sys.version_info >= (3, 5): def shared_cipher(self) -> Optional[List[Tuple[str, int, int]]]: ... def compression(self) -> Optional[str]: ... def get_channel_binding(self, cb_type: str = ...) -> Optional[bytes]: ... def selected_alpn_protocol(self) -> Optional[str]: ... def selected_npn_protocol(self) -> Optional[str]: ... def unwrap(self) -> socket.socket: ... def version(self) -> Optional[str]: ... def pending(self) -> int: ... if sys.version_info >= (3, 8): def verify_client_post_handshake(self) -> None: ... if sys.version_info >= (3, 7): class TLSVersion(enum.IntEnum): MINIMUM_SUPPORTED: int MAXIMUM_SUPPORTED: int SSLv3: int TLSv1: int TLSv1_1: int TLSv1_2: int TLSv1_3: int class SSLContext: check_hostname: bool options: int if sys.version_info >= (3, 8): post_handshake_auth: bool @property def protocol(self) -> int: ... verify_flags: int verify_mode: int if sys.version_info >= (3, 5): def __init__(self, protocol: int = ...) -> None: ... else: def __init__(self, protocol: int) -> None: ... def cert_store_stats(self) -> Dict[str, int]: ... def load_cert_chain(self, certfile: _Path, keyfile: Optional[_Path] = ..., password: _PasswordType = ...) -> None: ... def load_default_certs(self, purpose: Purpose = ...) -> None: ... def load_verify_locations( self, cafile: Optional[str] = ..., capath: Optional[str] = ..., cadata: Union[Text, bytes, None] = ..., ) -> None: ... def get_ca_certs(self, binary_form: bool = ...) -> Union[List[_PeerCertRetDictType], List[bytes]]: ... def set_default_verify_paths(self) -> None: ... def set_ciphers(self, ciphers: str) -> None: ... def set_alpn_protocols(self, protocols: List[str]) -> None: ... if sys.version_info >= (3, 7): sni_callback: Optional[Callable[[SSLObject, str, SSLContext], Union[None, int]]] sslobject_class: Type[SSLObject] def set_npn_protocols(self, protocols: List[str]) -> None: ... def set_servername_callback(self, server_name_callback: Optional[_SrvnmeCbType]) -> None: ... def load_dh_params(self, dhfile: str) -> None: ... def set_ecdh_curve(self, curve_name: str) -> None: ... def wrap_socket(self, sock: socket.socket, server_side: bool = ..., do_handshake_on_connect: bool = ..., suppress_ragged_eofs: bool = ..., server_hostname: Optional[str] = ...) -> SSLSocket: ... if sys.version_info >= (3, 5): def wrap_bio(self, incoming: MemoryBIO, outgoing: MemoryBIO, server_side: bool = ..., server_hostname: Optional[str] = ...) -> SSLObject: ... def session_stats(self) -> Dict[str, int]: ... if sys.version_info >= (3, 7): maximum_version: TLSVersion minimum_version: TLSVersion if sys.version_info >= (3, 5): class SSLObject: context: SSLContext server_side: bool server_hostname: Optional[str] if sys.version_info >= (3, 6): session: Optional[SSLSession] session_reused: bool def read(self, len: int = ..., buffer: Optional[bytearray] = ...) -> bytes: ... def write(self, buf: bytes) -> int: ... def getpeercert(self, binary_form: bool = ...) -> _PeerCertRetType: ... def selected_npn_protocol(self) -> Optional[str]: ... def cipher(self) -> Tuple[str, int, int]: ... def shared_cipher(self) -> Optional[List[Tuple[str, int, int]]]: ... def compression(self) -> Optional[str]: ... def pending(self) -> int: ... def do_handshake(self) -> None: ... def unwrap(self) -> None: ... def get_channel_binding(self, cb_type: str = ...) -> Optional[bytes]: ... if sys.version_info >= (3, 8): def verify_client_post_handshake(self) -> None: ... class MemoryBIO: pending: int eof: bool def read(self, n: int = ...) -> bytes: ... def write(self, buf: bytes) -> int: ... def write_eof(self) -> None: ... if sys.version_info >= (3, 6): class SSLSession: id: bytes time: int timeout: int ticket_lifetime_hint: int has_ticket: bool class VerifyFlags(enum.IntFlag): VERIFY_DEFAULT: int VERIFY_CRL_CHECK_LEAF: int VERIFY_CRL_CHECK_CHAIN: int VERIFY_X509_STRICT: int VERIFY_X509_TRUSTED_FIRST: int class VerifyMode(enum.IntEnum): CERT_NONE: int CERT_OPTIONAL: int CERT_REQUIRED: int # TODO below documented in cpython but not in docs.python.org # taken from python 3.4 SSL_ERROR_EOF: int SSL_ERROR_INVALID_ERROR_CODE: int SSL_ERROR_SSL: int SSL_ERROR_SYSCALL: int SSL_ERROR_WANT_CONNECT: int SSL_ERROR_WANT_READ: int SSL_ERROR_WANT_WRITE: int SSL_ERROR_WANT_X509_LOOKUP: int SSL_ERROR_ZERO_RETURN: int def get_protocol_name(protocol_code: int) -> str: ... AF_INET: int PEM_FOOTER: str PEM_HEADER: str SOCK_STREAM: int SOL_SOCKET: int SO_TYPE: int mypy-0.761/mypy/typeshed/stdlib/2and3/stringprep.pyi0000644€tŠÔÚ€2›s®0000000153213576752252026651 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for stringprep (Python 2 and 3) from typing import Text def in_table_a1(code: Text) -> bool: ... def in_table_b1(code: Text) -> bool: ... def map_table_b3(code: Text) -> Text: ... def map_table_b2(a: Text) -> Text: ... def in_table_c11(code: Text) -> bool: ... def in_table_c12(code: Text) -> bool: ... def in_table_c11_c12(code: Text) -> bool: ... def in_table_c21(code: Text) -> bool: ... def in_table_c22(code: Text) -> bool: ... def in_table_c21_c22(code: Text) -> bool: ... def in_table_c3(code: Text) -> bool: ... def in_table_c4(code: Text) -> bool: ... def in_table_c5(code: Text) -> bool: ... def in_table_c6(code: Text) -> bool: ... def in_table_c7(code: Text) -> bool: ... def in_table_c8(code: Text) -> bool: ... def in_table_c9(code: Text) -> bool: ... def in_table_d1(code: Text) -> bool: ... def in_table_d2(code: Text) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/struct.pyi0000644€tŠÔÚ€2›s®0000000321413576752252025777 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for struct # Based on http://docs.python.org/3.2/library/struct.html # Based on http://docs.python.org/2/library/struct.html import sys from typing import Any, Tuple, Text, Union, Iterator from array import array from mmap import mmap class error(Exception): ... _FmtType = Union[bytes, Text] if sys.version_info >= (3,): _BufferType = Union[array[int], bytes, bytearray, memoryview, mmap] _WriteBufferType = Union[array, bytearray, memoryview, mmap] else: _BufferType = Union[array[int], bytes, bytearray, buffer, memoryview, mmap] _WriteBufferType = Union[array[Any], bytearray, buffer, memoryview, mmap] def pack(fmt: _FmtType, *v: Any) -> bytes: ... def pack_into(fmt: _FmtType, buffer: _WriteBufferType, offset: int, *v: Any) -> None: ... def unpack(fmt: _FmtType, buffer: _BufferType) -> Tuple[Any, ...]: ... def unpack_from(fmt: _FmtType, buffer: _BufferType, offset: int = ...) -> Tuple[Any, ...]: ... if sys.version_info >= (3, 4): def iter_unpack(fmt: _FmtType, buffer: _BufferType) -> Iterator[Tuple[Any, ...]]: ... def calcsize(fmt: _FmtType) -> int: ... class Struct: if sys.version_info >= (3, 7): format: str else: format: bytes size: int def __init__(self, format: _FmtType) -> None: ... def pack(self, *v: Any) -> bytes: ... def pack_into(self, buffer: _WriteBufferType, offset: int, *v: Any) -> None: ... def unpack(self, buffer: _BufferType) -> Tuple[Any, ...]: ... def unpack_from(self, buffer: _BufferType, offset: int = ...) -> Tuple[Any, ...]: ... if sys.version_info >= (3, 4): def iter_unpack(self, buffer: _BufferType) -> Iterator[Tuple[Any, ...]]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/sunau.pyi0000644€tŠÔÚ€2›s®0000000601613576752252025611 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sunau (Python 2 and 3) import sys from typing import Any, NamedTuple, NoReturn, Optional, Text, IO, Union, Tuple _File = Union[Text, IO[bytes]] class Error(Exception): ... AUDIO_FILE_MAGIC: int AUDIO_FILE_ENCODING_MULAW_8: int AUDIO_FILE_ENCODING_LINEAR_8: int AUDIO_FILE_ENCODING_LINEAR_16: int AUDIO_FILE_ENCODING_LINEAR_24: int AUDIO_FILE_ENCODING_LINEAR_32: int AUDIO_FILE_ENCODING_FLOAT: int AUDIO_FILE_ENCODING_DOUBLE: int AUDIO_FILE_ENCODING_ADPCM_G721: int AUDIO_FILE_ENCODING_ADPCM_G722: int AUDIO_FILE_ENCODING_ADPCM_G723_3: int AUDIO_FILE_ENCODING_ADPCM_G723_5: int AUDIO_FILE_ENCODING_ALAW_8: int AUDIO_UNKNOWN_SIZE: int if sys.version_info < (3, 0): _sunau_params = Tuple[int, int, int, int, str, str] else: class _sunau_params(NamedTuple): nchannels: int sampwidth: int framerate: int nframes: int comptype: str compname: str class Au_read: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 3): def __enter__(self) -> Au_read: ... def __exit__(self, *args: Any) -> None: ... def getfp(self) -> Optional[IO[bytes]]: ... def rewind(self) -> None: ... def close(self) -> None: ... def tell(self) -> int: ... def getnchannels(self) -> int: ... def getnframes(self) -> int: ... def getsampwidth(self) -> int: ... def getframerate(self) -> int: ... def getcomptype(self) -> str: ... def getcompname(self) -> str: ... def getparams(self) -> _sunau_params: ... def getmarkers(self) -> None: ... def getmark(self, id: Any) -> NoReturn: ... def setpos(self, pos: int) -> None: ... def readframes(self, nframes: int) -> Optional[bytes]: ... class Au_write: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 3): def __enter__(self) -> Au_write: ... def __exit__(self, *args: Any) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... def getsampwidth(self) -> int: ... def setframerate(self, framerate: float) -> None: ... def getframerate(self) -> int: ... def setnframes(self, nframes: int) -> None: ... def getnframes(self) -> int: ... def setcomptype(self, comptype: str, compname: str) -> None: ... def getcomptype(self) -> str: ... def getcompname(self) -> str: ... def setparams(self, params: _sunau_params) -> None: ... def getparams(self) -> _sunau_params: ... def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ... def getmark(self, id: Any) -> NoReturn: ... def getmarkers(self) -> None: ... def tell(self) -> int: ... # should be any bytes-like object after 3.4, but we don't have a type for that def writeframesraw(self, data: bytes) -> None: ... def writeframes(self, data: bytes) -> None: ... def close(self) -> None: ... # Returns a Au_read if mode is rb and Au_write if mode is wb def open(f: _File, mode: Optional[str] = ...) -> Any: ... openfp = open mypy-0.761/mypy/typeshed/stdlib/2and3/symtable.pyi0000644€tŠÔÚ€2›s®0000000315513576752252026277 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List, Sequence, Tuple, Text def symtable(code: Text, filename: Text, compile_type: Text) -> SymbolTable: ... class SymbolTable(object): def get_type(self) -> str: ... def get_id(self) -> int: ... def get_name(self) -> str: ... def get_lineno(self) -> int: ... def is_optimized(self) -> bool: ... def is_nested(self) -> bool: ... def has_children(self) -> bool: ... def has_exec(self) -> bool: ... if sys.version_info < (3, 0): def has_import_star(self) -> bool: ... def get_identifiers(self) -> Sequence[str]: ... def lookup(self, name: str) -> Symbol: ... def get_symbols(self) -> List[Symbol]: ... def get_children(self) -> List[SymbolTable]: ... class Function(SymbolTable): def get_parameters(self) -> Tuple[str, ...]: ... def get_locals(self) -> Tuple[str, ...]: ... def get_globals(self) -> Tuple[str, ...]: ... def get_frees(self) -> Tuple[str, ...]: ... class Class(SymbolTable): def get_methods(self) -> Tuple[str, ...]: ... class Symbol(object): def get_name(self) -> str: ... def is_referenced(self) -> bool: ... def is_parameter(self) -> bool: ... def is_global(self) -> bool: ... def is_declared_global(self) -> bool: ... def is_local(self) -> bool: ... if sys.version_info >= (3, 6): def is_annotated(self) -> bool: ... def is_free(self) -> bool: ... def is_imported(self) -> bool: ... def is_assigned(self) -> bool: ... def is_namespace(self) -> bool: ... def get_namespaces(self) -> Sequence[SymbolTable]: ... def get_namespace(self) -> SymbolTable: ... mypy-0.761/mypy/typeshed/stdlib/2and3/sysconfig.pyi0000644€tŠÔÚ€2›s®0000000155113576752252026461 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sysconfig from typing import overload, Any, Dict, IO, List, Optional, Tuple, Union @overload def get_config_vars() -> Dict[str, Any]: ... @overload def get_config_vars(arg: str, *args: str) -> List[Any]: ... def get_config_var(name: str) -> Optional[str]: ... def get_scheme_names() -> Tuple[str, ...]: ... def get_path_names() -> Tuple[str, ...]: ... def get_path(name: str, scheme: str = ..., vars: Optional[Dict[str, Any]] = ..., expand: bool = ...) -> Optional[str]: ... def get_paths(scheme: str = ..., vars: Optional[Dict[str, Any]] = ..., expand: bool = ...) -> Dict[str, str]: ... def get_python_version() -> str: ... def get_platform() -> str: ... def is_python_build() -> bool: ... def parse_config_h(fp: IO[Any], vars: Optional[Dict[str, Any]]) -> Dict[str, Any]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... mypy-0.761/mypy/typeshed/stdlib/2and3/syslog.pyi0000644€tŠÔÚ€2›s®0000000146613576752252026002 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload LOG_ALERT: int LOG_AUTH: int LOG_CONS: int LOG_CRIT: int LOG_CRON: int LOG_DAEMON: int LOG_DEBUG: int LOG_EMERG: int LOG_ERR: int LOG_INFO: int LOG_KERN: int LOG_LOCAL0: int LOG_LOCAL1: int LOG_LOCAL2: int LOG_LOCAL3: int LOG_LOCAL4: int LOG_LOCAL5: int LOG_LOCAL6: int LOG_LOCAL7: int LOG_LPR: int LOG_MAIL: int LOG_NDELAY: int LOG_NEWS: int LOG_NOTICE: int LOG_NOWAIT: int LOG_PERROR: int LOG_PID: int LOG_SYSLOG: int LOG_USER: int LOG_UUCP: int LOG_WARNING: int def LOG_MASK(a: int) -> int: ... def LOG_UPTO(a: int) -> int: ... def closelog() -> None: ... def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... def setlogmask(x: int) -> int: ... @overload def syslog(priority: int, message: str) -> None: ... @overload def syslog(message: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/tabnanny.pyi0000644€tŠÔÚ€2›s®0000000112113576752252026260 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tabnanny (Python 2 and 3) import os import sys from typing import Iterable, Tuple, Union if sys.version_info >= (3, 6): _Path = Union[str, bytes, os.PathLike] else: _Path = Union[str, bytes] verbose: int filename_only: int class NannyNag(Exception): def __init__(self, lineno: int, msg: str, line: str) -> None: ... def get_lineno(self) -> int: ... def get_msg(self) -> str: ... def get_line(self) -> str: ... def check(file: _Path) -> None: ... def process_tokens(tokens: Iterable[Tuple[int, str, Tuple[int, int], Tuple[int, int], str]]) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/tarfile.pyi0000644€tŠÔÚ€2›s®0000001526313576752252026110 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tarfile from typing import ( Callable, IO, Iterable, Iterator, List, Mapping, Optional, Type, Union, ) import os import sys from types import TracebackType if sys.version_info >= (3, 6): _Path = Union[bytes, str, os.PathLike] elif sys.version_info >= (3,): _Path = Union[bytes, str] else: _Path = Union[str, unicode] ENCODING: str USTAR_FORMAT: int GNU_FORMAT: int PAX_FORMAT: int DEFAULT_FORMAT: int REGTYPE: bytes AREGTYPE: bytes LNKTYPE: bytes SYMTYPE: bytes DIRTYPE: bytes FIFOTYPE: bytes CONTTYPE: bytes CHRTYPE: bytes BLKTYPE: bytes GNUTYPE_SPARSE: bytes if sys.version_info < (3,): TAR_PLAIN: int TAR_GZIPPED: int def open(name: Optional[_Path] = ..., mode: str = ..., fileobj: Optional[IO[bytes]] = ..., bufsize: int = ..., *, format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ..., dereference: Optional[bool] = ..., ignore_zeros: Optional[bool] = ..., encoding: Optional[str] = ..., errors: str = ..., pax_headers: Optional[Mapping[str, str]] = ..., debug: Optional[int] = ..., errorlevel: Optional[int] = ..., compresslevel: Optional[int] = ...) -> TarFile: ... class TarFile(Iterable[TarInfo]): name: Optional[_Path] mode: str fileobj: Optional[IO[bytes]] format: Optional[int] tarinfo: Optional[TarInfo] dereference: Optional[bool] ignore_zeros: Optional[bool] encoding: Optional[str] errors: str pax_headers: Optional[Mapping[str, str]] debug: Optional[int] errorlevel: Optional[int] if sys.version_info < (3,): posix: bool def __init__(self, name: Optional[_Path] = ..., mode: str = ..., fileobj: Optional[IO[bytes]] = ..., format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ..., dereference: Optional[bool] = ..., ignore_zeros: Optional[bool] = ..., encoding: Optional[str] = ..., errors: str = ..., pax_headers: Optional[Mapping[str, str]] = ..., debug: Optional[int] = ..., errorlevel: Optional[int] = ..., compresslevel: Optional[int] = ...) -> None: ... def __enter__(self) -> TarFile: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> None: ... def __iter__(self) -> Iterator[TarInfo]: ... @classmethod def open(cls, name: Optional[_Path] = ..., mode: str = ..., fileobj: Optional[IO[bytes]] = ..., bufsize: int = ..., *, format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ..., dereference: Optional[bool] = ..., ignore_zeros: Optional[bool] = ..., encoding: Optional[str] = ..., errors: str = ..., pax_headers: Optional[Mapping[str, str]] = ..., debug: Optional[int] = ..., errorlevel: Optional[int] = ...) -> TarFile: ... def getmember(self, name: str) -> TarInfo: ... def getmembers(self) -> List[TarInfo]: ... def getnames(self) -> List[str]: ... if sys.version_info >= (3, 5): def list(self, verbose: bool = ..., *, members: Optional[List[TarInfo]] = ...) -> None: ... else: def list(self, verbose: bool = ...) -> None: ... def next(self) -> Optional[TarInfo]: ... if sys.version_info >= (3, 5): def extractall(self, path: _Path = ..., members: Optional[List[TarInfo]] = ..., *, numeric_owner: bool = ...) -> None: ... else: def extractall(self, path: _Path = ..., members: Optional[List[TarInfo]] = ...) -> None: ... if sys.version_info >= (3, 5): def extract(self, member: Union[str, TarInfo], path: _Path = ..., set_attrs: bool = ..., *, numeric_owner: bool = ...) -> None: ... else: def extract(self, member: Union[str, TarInfo], path: _Path = ...) -> None: ... def extractfile(self, member: Union[str, TarInfo]) -> Optional[IO[bytes]]: ... if sys.version_info >= (3, 7): def add(self, name: str, arcname: Optional[str] = ..., recursive: bool = ..., *, filter: Optional[Callable[[TarInfo], Optional[TarInfo]]] = ...) -> None: ... elif sys.version_info >= (3,): def add(self, name: str, arcname: Optional[str] = ..., recursive: bool = ..., exclude: Optional[Callable[[str], bool]] = ..., *, filter: Optional[Callable[[TarInfo], Optional[TarInfo]]] = ...) -> None: ... else: def add(self, name: str, arcname: Optional[str] = ..., recursive: bool = ..., exclude: Optional[Callable[[str], bool]] = ..., filter: Optional[Callable[[TarInfo], Optional[TarInfo]]] = ...) -> None: ... def addfile(self, tarinfo: TarInfo, fileobj: Optional[IO[bytes]] = ...) -> None: ... def gettarinfo(self, name: Optional[str] = ..., arcname: Optional[str] = ..., fileobj: Optional[IO[bytes]] = ...) -> TarInfo: ... def close(self) -> None: ... def is_tarfile(name: str) -> bool: ... if sys.version_info < (3, 8): def filemode(mode: int) -> str: ... # undocumented if sys.version_info < (3,): class TarFileCompat: def __init__(self, filename: str, mode: str = ..., compression: int = ...) -> None: ... class TarError(Exception): ... class ReadError(TarError): ... class CompressionError(TarError): ... class StreamError(TarError): ... class ExtractError(TarError): ... class HeaderError(TarError): ... class TarInfo: name: str size: int mtime: int mode: int type: bytes linkname: str uid: int gid: int uname: str gname: str pax_headers: Mapping[str, str] def __init__(self, name: str = ...) -> None: ... if sys.version_info >= (3,): @classmethod def frombuf(cls, buf: bytes, encoding: str, errors: str) -> TarInfo: ... else: @classmethod def frombuf(cls, buf: bytes) -> TarInfo: ... @classmethod def fromtarfile(cls, tarfile: TarFile) -> TarInfo: ... def tobuf(self, format: Optional[int] = ..., encoding: Optional[str] = ..., errors: str = ...) -> bytes: ... def isfile(self) -> bool: ... def isreg(self) -> bool: ... def isdir(self) -> bool: ... def issym(self) -> bool: ... def islnk(self) -> bool: ... def ischr(self) -> bool: ... def isblk(self) -> bool: ... def isfifo(self) -> bool: ... def isdev(self) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/telnetlib.pyi0000644€tŠÔÚ€2›s®0000000524313576752252026441 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for telnetlib (Python 2 and 3) import socket import sys from typing import Any, Callable, Match, Optional, Pattern, Sequence, Tuple, Union DEBUGLEVEL: int TELNET_PORT: int IAC: bytes DONT: bytes DO: bytes WONT: bytes WILL: bytes theNULL: bytes SE: bytes NOP: bytes DM: bytes BRK: bytes IP: bytes AO: bytes AYT: bytes EC: bytes EL: bytes GA: bytes SB: bytes BINARY: bytes ECHO: bytes RCP: bytes SGA: bytes NAMS: bytes STATUS: bytes TM: bytes RCTE: bytes NAOL: bytes NAOP: bytes NAOCRD: bytes NAOHTS: bytes NAOHTD: bytes NAOFFD: bytes NAOVTS: bytes NAOVTD: bytes NAOLFD: bytes XASCII: bytes LOGOUT: bytes BM: bytes DET: bytes SUPDUP: bytes SUPDUPOUTPUT: bytes SNDLOC: bytes TTYPE: bytes EOR: bytes TUID: bytes OUTMRK: bytes TTYLOC: bytes VT3270REGIME: bytes X3PAD: bytes NAWS: bytes TSPEED: bytes LFLOW: bytes LINEMODE: bytes XDISPLOC: bytes OLD_ENVIRON: bytes AUTHENTICATION: bytes ENCRYPT: bytes NEW_ENVIRON: bytes TN3270E: bytes XAUTH: bytes CHARSET: bytes RSP: bytes COM_PORT_OPTION: bytes SUPPRESS_LOCAL_ECHO: bytes TLS: bytes KERMIT: bytes SEND_URL: bytes FORWARD_X: bytes PRAGMA_LOGON: bytes SSPI_LOGON: bytes PRAGMA_HEARTBEAT: bytes EXOPL: bytes NOOPT: bytes class Telnet: def __init__(self, host: Optional[str] = ..., port: int = ..., timeout: int = ...) -> None: ... def open(self, host: str, port: int = ..., timeout: int = ...) -> None: ... def msg(self, msg: str, *args: Any) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... def close(self) -> None: ... def get_socket(self) -> socket.socket: ... def fileno(self) -> int: ... def write(self, buffer: bytes) -> None: ... def read_until(self, match: bytes, timeout: Optional[int] = ...) -> bytes: ... def read_all(self) -> bytes: ... def read_some(self) -> bytes: ... def read_very_eager(self) -> bytes: ... def read_eager(self) -> bytes: ... def read_lazy(self) -> bytes: ... def read_very_lazy(self) -> bytes: ... def read_sb_data(self) -> bytes: ... def set_option_negotiation_callback(self, callback: Optional[Callable[[socket.socket, bytes, bytes], Any]]) -> None: ... def process_rawq(self) -> None: ... def rawq_getchar(self) -> bytes: ... def fill_rawq(self) -> None: ... def sock_avail(self) -> bool: ... def interact(self) -> None: ... def mt_interact(self) -> None: ... def listener(self) -> None: ... def expect(self, list: Sequence[Union[Pattern[bytes], bytes]], timeout: Optional[int] = ...) -> Tuple[int, Optional[Match[bytes]], bytes]: ... if sys.version_info >= (3, 6): def __enter__(self) -> Telnet: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/termios.pyi0000644€tŠÔÚ€2›s®0000000651313576752252026142 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for termios from typing import IO, List, Union _FD = Union[int, IO[str]] _Attr = List[Union[int, List[bytes]]] # TODO constants not really documented B0: int B1000000: int B110: int B115200: int B1152000: int B1200: int B134: int B150: int B1500000: int B1800: int B19200: int B200: int B2000000: int B230400: int B2400: int B2500000: int B300: int B3000000: int B3500000: int B38400: int B4000000: int B460800: int B4800: int B50: int B500000: int B57600: int B576000: int B600: int B75: int B921600: int B9600: int BRKINT: int BS0: int BS1: int BSDLY: int CBAUD: int CBAUDEX: int CDSUSP: int CEOF: int CEOL: int CEOT: int CERASE: int CFLUSH: int CIBAUD: int CINTR: int CKILL: int CLNEXT: int CLOCAL: int CQUIT: int CR0: int CR1: int CR2: int CR3: int CRDLY: int CREAD: int CRPRNT: int CRTSCTS: int CS5: int CS6: int CS7: int CS8: int CSIZE: int CSTART: int CSTOP: int CSTOPB: int CSUSP: int CWERASE: int ECHO: int ECHOCTL: int ECHOE: int ECHOK: int ECHOKE: int ECHONL: int ECHOPRT: int EXTA: int EXTB: int FF0: int FF1: int FFDLY: int FIOASYNC: int FIOCLEX: int FIONBIO: int FIONCLEX: int FIONREAD: int FLUSHO: int HUPCL: int ICANON: int ICRNL: int IEXTEN: int IGNBRK: int IGNCR: int IGNPAR: int IMAXBEL: int INLCR: int INPCK: int IOCSIZE_MASK: int IOCSIZE_SHIFT: int ISIG: int ISTRIP: int IUCLC: int IXANY: int IXOFF: int IXON: int NCC: int NCCS: int NL0: int NL1: int NLDLY: int NOFLSH: int N_MOUSE: int N_PPP: int N_SLIP: int N_STRIP: int N_TTY: int OCRNL: int OFDEL: int OFILL: int OLCUC: int ONLCR: int ONLRET: int ONOCR: int OPOST: int PARENB: int PARMRK: int PARODD: int PENDIN: int TAB0: int TAB1: int TAB2: int TAB3: int TABDLY: int TCFLSH: int TCGETA: int TCGETS: int TCIFLUSH: int TCIOFF: int TCIOFLUSH: int TCION: int TCOFLUSH: int TCOOFF: int TCOON: int TCSADRAIN: int TCSAFLUSH: int TCSANOW: int TCSBRK: int TCSBRKP: int TCSETA: int TCSETAF: int TCSETAW: int TCSETS: int TCSETSF: int TCSETSW: int TCXONC: int TIOCCONS: int TIOCEXCL: int TIOCGETD: int TIOCGICOUNT: int TIOCGLCKTRMIOS: int TIOCGPGRP: int TIOCGSERIAL: int TIOCGSOFTCAR: int TIOCGWINSZ: int TIOCINQ: int TIOCLINUX: int TIOCMBIC: int TIOCMBIS: int TIOCMGET: int TIOCMIWAIT: int TIOCMSET: int TIOCM_CAR: int TIOCM_CD: int TIOCM_CTS: int TIOCM_DSR: int TIOCM_DTR: int TIOCM_LE: int TIOCM_RI: int TIOCM_RNG: int TIOCM_RTS: int TIOCM_SR: int TIOCM_ST: int TIOCNOTTY: int TIOCNXCL: int TIOCOUTQ: int TIOCPKT: int TIOCPKT_DATA: int TIOCPKT_DOSTOP: int TIOCPKT_FLUSHREAD: int TIOCPKT_FLUSHWRITE: int TIOCPKT_NOSTOP: int TIOCPKT_START: int TIOCPKT_STOP: int TIOCSCTTY: int TIOCSERCONFIG: int TIOCSERGETLSR: int TIOCSERGETMULTI: int TIOCSERGSTRUCT: int TIOCSERGWILD: int TIOCSERSETMULTI: int TIOCSERSWILD: int TIOCSER_TEMT: int TIOCSETD: int TIOCSLCKTRMIOS: int TIOCSPGRP: int TIOCSSERIAL: int TIOCSSOFTCAR: int TIOCSTI: int TIOCSWINSZ: int TOSTOP: int VDISCARD: int VEOF: int VEOL: int VEOL2: int VERASE: int VINTR: int VKILL: int VLNEXT: int VMIN: int VQUIT: int VREPRINT: int VSTART: int VSTOP: int VSUSP: int VSWTC: int VSWTCH: int VT0: int VT1: int VTDLY: int VTIME: int VWERASE: int XCASE: int XTABS: int def tcgetattr(fd: _FD) -> _Attr: ... def tcsetattr(fd: _FD, when: int, attributes: _Attr) -> None: ... def tcsendbreak(fd: _FD, duration: int) -> None: ... def tcdrain(fd: _FD) -> None: ... def tcflush(fd: _FD, queue: int) -> None: ... def tcflow(fd: _FD, action: int) -> None: ... class error(Exception): ... mypy-0.761/mypy/typeshed/stdlib/2and3/threading.pyi0000644€tŠÔÚ€2›s®0000001525513576752252026430 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for threading from typing import ( Any, Callable, Iterable, List, Mapping, Optional, Tuple, Type, Union, TypeVar, ) from types import FrameType, TracebackType import sys # TODO recursive type _TF = Callable[[FrameType, str, Any], Optional[Callable[..., Any]]] _PF = Callable[[FrameType, str, Any], None] _T = TypeVar('_T') def active_count() -> int: ... if sys.version_info < (3,): def activeCount() -> int: ... def current_thread() -> Thread: ... def currentThread() -> Thread: ... if sys.version_info >= (3,): def get_ident() -> int: ... def enumerate() -> List[Thread]: ... if sys.version_info >= (3, 4): def main_thread() -> Thread: ... if sys.version_info >= (3, 8): from _thread import get_native_id as get_native_id def settrace(func: _TF) -> None: ... def setprofile(func: Optional[_PF]) -> None: ... def stack_size(size: int = ...) -> int: ... if sys.version_info >= (3,): TIMEOUT_MAX: float class ThreadError(Exception): ... class local(object): def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... class Thread: name: str ident: Optional[int] daemon: bool if sys.version_info >= (3,): def __init__(self, group: None = ..., target: Optional[Callable[..., Any]] = ..., name: Optional[str] = ..., args: Iterable[Any] = ..., kwargs: Mapping[str, Any] = ..., *, daemon: Optional[bool] = ...) -> None: ... else: def __init__(self, group: None = ..., target: Optional[Callable[..., Any]] = ..., name: Optional[str] = ..., args: Iterable[Any] = ..., kwargs: Mapping[str, Any] = ...) -> None: ... def start(self) -> None: ... def run(self) -> None: ... def join(self, timeout: Optional[float] = ...) -> None: ... def getName(self) -> str: ... def setName(self, name: str) -> None: ... if sys.version_info >= (3, 8): @property def native_id(self) -> Optional[int]: ... # only available on some platforms def is_alive(self) -> bool: ... def isAlive(self) -> bool: ... def isDaemon(self) -> bool: ... def setDaemon(self, daemonic: bool) -> None: ... class _DummyThread(Thread): ... class Lock: def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> Optional[bool]: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... def locked(self) -> bool: ... class _RLock: def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> Optional[bool]: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... RLock = _RLock class Condition: def __init__(self, lock: Union[Lock, _RLock, None] = ...) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> Optional[bool]: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... def wait(self, timeout: Optional[float] = ...) -> bool: ... if sys.version_info >= (3,): def wait_for(self, predicate: Callable[[], _T], timeout: Optional[float] = ...) -> _T: ... def notify(self, n: int = ...) -> None: ... def notify_all(self) -> None: ... def notifyAll(self) -> None: ... class Semaphore: def __init__(self, value: int = ...) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> Optional[bool]: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... class BoundedSemaphore: def __init__(self, value: int = ...) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> Optional[bool]: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... class Event: def __init__(self) -> None: ... def is_set(self) -> bool: ... if sys.version_info < (3,): def isSet(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... def wait(self, timeout: Optional[float] = ...) -> bool: ... if sys.version_info >= (3, 8): from _thread import _ExceptHookArgs as ExceptHookArgs, ExceptHookArgs as _ExceptHookArgs # don't ask excepthook: Callable[[_ExceptHookArgs], Any] class Timer(Thread): if sys.version_info >= (3,): def __init__(self, interval: float, function: Callable[..., None], args: Optional[Iterable[Any]] = ..., kwargs: Optional[Mapping[str, Any]] = ...) -> None: ... else: def __init__(self, interval: float, function: Callable[..., None], args: Iterable[Any] = ..., kwargs: Mapping[str, Any] = ...) -> None: ... def cancel(self) -> None: ... if sys.version_info >= (3,): class Barrier: parties: int n_waiting: int broken: bool def __init__(self, parties: int, action: Optional[Callable[[], None]] = ..., timeout: Optional[float] = ...) -> None: ... def wait(self, timeout: Optional[float] = ...) -> int: ... def reset(self) -> None: ... def abort(self) -> None: ... class BrokenBarrierError(RuntimeError): ... mypy-0.761/mypy/typeshed/stdlib/2and3/time.pyi0000644€tŠÔÚ€2›s®0000000746113576752252025421 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'time' module.""" # See https://docs.python.org/3/library/time.html import sys from typing import Any, NamedTuple, Tuple, Union, Optional if sys.version_info >= (3, 3): from types import SimpleNamespace _TimeTuple = Tuple[int, int, int, int, int, int, int, int, int] if sys.version_info < (3, 3): accept2dyear: bool altzone: int daylight: int timezone: int tzname: Tuple[str, str] if sys.version_info >= (3, 7) and sys.platform != 'win32': CLOCK_BOOTTIME: int # Linux CLOCK_PROF: int # FreeBSD, NetBSD, OpenBSD CLOCK_UPTIME: int # FreeBSD, OpenBSD if sys.version_info >= (3, 3) and sys.platform != 'win32': CLOCK_HIGHRES: int # Solaris only CLOCK_MONOTONIC: int # Unix only CLOCK_MONOTONIC_RAW: int # Linux 2.6.28 or later CLOCK_PROCESS_CPUTIME_ID: int # Unix only CLOCK_REALTIME: int # Unix only CLOCK_THREAD_CPUTIME_ID: int # Unix only if sys.version_info >= (3, 8) and sys.platform == "darwin": CLOCK_UPTIME_RAW: int if sys.version_info >= (3, 3): class _struct_time(NamedTuple): tm_year: int tm_mon: int tm_mday: int tm_hour: int tm_min: int tm_sec: int tm_wday: int tm_yday: int tm_isdst: int tm_zone: str tm_gmtoff: int class struct_time(_struct_time): def __init__( self, o: Union[ Tuple[int, int, int, int, int, int, int, int, int], Tuple[int, int, int, int, int, int, int, int, int, str], Tuple[int, int, int, int, int, int, int, int, int, str, int] ], _arg: Any = ..., ) -> None: ... def __new__( cls, o: Union[ Tuple[int, int, int, int, int, int, int, int, int], Tuple[int, int, int, int, int, int, int, int, int, str], Tuple[int, int, int, int, int, int, int, int, int, str, int] ], _arg: Any = ..., ) -> struct_time: ... else: class _struct_time(NamedTuple): tm_year: int tm_mon: int tm_mday: int tm_hour: int tm_min: int tm_sec: int tm_wday: int tm_yday: int tm_isdst: int class struct_time(_struct_time): def __init__(self, o: _TimeTuple, _arg: Any = ...) -> None: ... def __new__(cls, o: _TimeTuple, _arg: Any = ...) -> struct_time: ... def asctime(t: Union[_TimeTuple, struct_time] = ...) -> str: ... if sys.version_info < (3, 8): def clock() -> float: ... def ctime(secs: Optional[float] = ...) -> str: ... def gmtime(secs: Optional[float] = ...) -> struct_time: ... def localtime(secs: Optional[float] = ...) -> struct_time: ... def mktime(t: Union[_TimeTuple, struct_time]) -> float: ... def sleep(secs: float) -> None: ... def strftime(format: str, t: Union[_TimeTuple, struct_time] = ...) -> str: ... def strptime(string: str, format: str = ...) -> struct_time: ... def time() -> float: ... if sys.platform != 'win32': def tzset() -> None: ... # Unix only if sys.version_info >= (3, 3): def get_clock_info(name: str) -> SimpleNamespace: ... def monotonic() -> float: ... def perf_counter() -> float: ... def process_time() -> float: ... if sys.platform != 'win32': def clock_getres(clk_id: int) -> float: ... # Unix only def clock_gettime(clk_id: int) -> float: ... # Unix only def clock_settime(clk_id: int, time: float) -> None: ... # Unix only if sys.version_info >= (3, 7): def clock_gettime_ns(clock_id: int) -> int: ... def clock_settime_ns(clock_id: int, time: int) -> int: ... def monotonic_ns() -> int: ... def perf_counter_ns() -> int: ... def process_time_ns() -> int: ... def time_ns() -> int: ... def thread_time() -> float: ... def thread_time_ns() -> int: ... mypy-0.761/mypy/typeshed/stdlib/2and3/timeit.pyi0000644€tŠÔÚ€2›s®0000000314213576752252025746 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for timeit (Python 2 and 3) import sys from typing import Any, Callable, Dict, IO, List, Optional, Sequence, Text, Tuple, Union _str = Union[str, Text] _Timer = Callable[[], float] _stmt = Union[_str, Callable[[], Any]] default_timer: _Timer class Timer: if sys.version_info >= (3, 5): def __init__(self, stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., globals: Optional[Dict[str, Any]] = ...) -> None: ... else: def __init__(self, stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ...) -> None: ... def print_exc(self, file: Optional[IO[str]] = ...) -> None: ... def timeit(self, number: int = ...) -> float: ... def repeat(self, repeat: int = ..., number: int = ...) -> List[float]: ... if sys.version_info >= (3, 6): def autorange(self, callback: Optional[Callable[[int, float], Any]] = ...) -> Tuple[int, float]: ... if sys.version_info >= (3, 5): def timeit(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., number: int = ..., globals: Optional[Dict[str, Any]] = ...) -> float: ... def repeat(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., repeat: int = ..., number: int = ..., globals: Optional[Dict[str, Any]] = ...) -> List[float]: ... else: def timeit(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., number: int = ...) -> float: ... def repeat(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., repeat: int = ..., number: int = ...) -> List[float]: ... def main(args: Optional[Sequence[str]]) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/token.pyi0000644€tŠÔÚ€2›s®0000000221113576752252025567 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Dict ENDMARKER: int NAME: int NUMBER: int STRING: int NEWLINE: int INDENT: int DEDENT: int LPAR: int RPAR: int LSQB: int RSQB: int COLON: int COMMA: int SEMI: int PLUS: int MINUS: int STAR: int SLASH: int VBAR: int AMPER: int LESS: int GREATER: int EQUAL: int DOT: int PERCENT: int if sys.version_info < (3,): BACKQUOTE: int LBRACE: int RBRACE: int EQEQUAL: int NOTEQUAL: int LESSEQUAL: int GREATEREQUAL: int TILDE: int CIRCUMFLEX: int LEFTSHIFT: int RIGHTSHIFT: int DOUBLESTAR: int PLUSEQUAL: int MINEQUAL: int STAREQUAL: int SLASHEQUAL: int PERCENTEQUAL: int AMPEREQUAL: int VBAREQUAL: int CIRCUMFLEXEQUAL: int LEFTSHIFTEQUAL: int RIGHTSHIFTEQUAL: int DOUBLESTAREQUAL: int DOUBLESLASH: int DOUBLESLASHEQUAL: int AT: int if sys.version_info >= (3,): RARROW: int ELLIPSIS: int if sys.version_info >= (3, 5): ATEQUAL: int AWAIT: int ASYNC: int OP: int ERRORTOKEN: int N_TOKENS: int NT_OFFSET: int tok_name: Dict[int, str] if sys.version_info >= (3, 7): COMMENT: int NL: int ENCODING: int def ISTERMINAL(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... def ISEOF(x: int) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/trace.pyi0000644€tŠÔÚ€2›s®0000000400013576752252025543 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for trace (Python 2 and 3) import os import sys import types from typing import Any, Callable, Mapping, Optional, Sequence, Text, Tuple, TypeVar, Union _T = TypeVar('_T') _localtrace = Callable[[types.FrameType, str, Any], Callable[..., Any]] if sys.version_info >= (3, 6): _Path = Union[Text, os.PathLike] else: _Path = Text class CoverageResults: def update(self, other: CoverageResults) -> None: ... def write_results(self, show_missing: bool = ..., summary: bool = ..., coverdir: Optional[_Path] = ...) -> None: ... def write_results_file(self, path: _Path, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: Optional[str] = ...) -> Tuple[int, int]: ... class Trace: def __init__(self, count: int = ..., trace: int = ..., countfuncs: int = ..., countcallers: int = ..., ignoremods: Sequence[str] = ..., ignoredirs: Sequence[str] = ..., infile: Optional[_Path] = ..., outfile: Optional[_Path] = ..., timing: bool = ...) -> None: ... def run(self, cmd: Union[str, types.CodeType]) -> None: ... def runctx(self, cmd: Union[str, types.CodeType], globals: Optional[Mapping[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ...) -> None: ... def runfunc(self, func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ... def file_module_function_of(self, frame: types.FrameType) -> Tuple[str, Optional[str], str]: ... def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def localtrace_trace_and_count(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ... def localtrace_trace(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ... def localtrace_count(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ... def results(self) -> CoverageResults: ... mypy-0.761/mypy/typeshed/stdlib/2and3/traceback.pyi0000644€tŠÔÚ€2›s®0000001333413576752252026376 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for traceback from typing import Any, Dict, Generator, IO, Iterator, List, Mapping, Optional, Protocol, Tuple, Type, Iterable from types import FrameType, TracebackType import sys _PT = Tuple[str, int, str, Optional[str]] def print_tb(tb: Optional[TracebackType], limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... if sys.version_info >= (3,): def print_exception(etype: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[TracebackType], limit: Optional[int] = ..., file: Optional[IO[str]] = ..., chain: bool = ...) -> None: ... def print_exc(limit: Optional[int] = ..., file: Optional[IO[str]] = ..., chain: bool = ...) -> None: ... def print_last(limit: Optional[int] = ..., file: Optional[IO[str]] = ..., chain: bool = ...) -> None: ... else: def print_exception(etype: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[TracebackType], limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... def print_exc(limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... def print_last(limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... def print_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... if sys.version_info >= (3, 5): def extract_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> StackSummary: ... def extract_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...) -> StackSummary: ... def format_list(extracted_list: List[FrameSummary]) -> List[str]: ... class _Writer(Protocol): def write(self, s: str) -> Any: ... # undocumented def print_list(extracted_list: List[FrameSummary], file: Optional[_Writer] = ...) -> None: ... else: def extract_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[_PT]: ... def extract_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...) -> List[_PT]: ... def format_list(extracted_list: List[_PT]) -> List[str]: ... def format_exception_only(etype: Optional[Type[BaseException]], value: Optional[BaseException]) -> List[str]: ... if sys.version_info >= (3,): def format_exception(etype: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[TracebackType], limit: Optional[int] = ..., chain: bool = ...) -> List[str]: ... def format_exc(limit: Optional[int] = ..., chain: bool = ...) -> str: ... else: def format_exception(etype: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[str]: ... def format_exc(limit: Optional[int] = ...) -> str: ... def format_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[str]: ... def format_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...) -> List[str]: ... if sys.version_info >= (3, 4): def clear_frames(tb: TracebackType) -> None: ... if sys.version_info >= (3, 5): def walk_stack(f: Optional[FrameType]) -> Iterator[Tuple[FrameType, int]]: ... def walk_tb(tb: Optional[TracebackType]) -> Iterator[Tuple[FrameType, int]]: ... if sys.version_info < (3,): def tb_lineno(tb: TracebackType) -> int: ... if sys.version_info >= (3, 5): class TracebackException: __cause__: TracebackException __context__: TracebackException __suppress_context__: bool stack: StackSummary exc_type: Type[BaseException] filename: str lineno: int text: str offset: int msg: str def __init__(self, exc_type: Type[BaseException], exc_value: BaseException, exc_traceback: TracebackType, *, limit: Optional[int] = ..., lookup_lines: bool = ..., capture_locals: bool = ...) -> None: ... @classmethod def from_exception(cls, exc: BaseException, *, limit: Optional[int] = ..., lookup_lines: bool = ..., capture_locals: bool = ...) -> TracebackException: ... def format(self, *, chain: bool = ...) -> Generator[str, None, None]: ... def format_exception_only(self) -> Generator[str, None, None]: ... class FrameSummary(Iterable[Any]): filename: str lineno: int name: str line: str locals: Optional[Dict[str, str]] def __init__(self, filename: str, lineno: int, name: str, lookup_line: bool = ..., locals: Optional[Mapping[str, str]] = ..., line: Optional[str] = ...) -> None: ... # TODO: more precise typing for __getitem__ and __iter__, # for a namedtuple-like view on (filename, lineno, name, str). def __getitem__(self, i: int) -> Any: ... def __iter__(self) -> Iterator[Any]: ... class StackSummary(List[FrameSummary]): @classmethod def extract(cls, frame_gen: Generator[Tuple[FrameType, int], None, None], *, limit: Optional[int] = ..., lookup_lines: bool = ..., capture_locals: bool = ...) -> StackSummary: ... @classmethod def from_list(cls, a_list: List[_PT]) -> StackSummary: ... def format(self) -> List[str]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/tty.pyi0000644€tŠÔÚ€2›s®0000000046113576752252025274 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tty (Python 3.6) from typing import IO, Union _FD = Union[int, IO[str]] # XXX: Undocumented integer constants IFLAG: int OFLAG: int CFLAG: int LFLAG: int ISPEED: int OSPEED: int CC: int def setraw(fd: _FD, when: int = ...) -> None: ... def setcbreak(fd: _FD, when: int = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/turtle.pyi0000644€tŠÔÚ€2›s®0000004362013576752252025777 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, overload, Optional, Union, Dict, Any, Sequence, TypeVar, List, Callable, Text import sys if sys.version_info >= (3,): from tkinter import Canvas, PhotoImage else: # TODO: Replace these aliases once we have Python 2 stubs for the Tkinter module. Canvas = Any PhotoImage = Any # Note: '_Color' is the alias we use for arguments and _AnyColor is the # alias we use for return types. Really, these two aliases should be the # same, but as per the "no union returns" typeshed policy, we'll return # Any instead. _Color = Union[Text, Tuple[float, float, float]] _AnyColor = Any # TODO: Replace this with a TypedDict once it becomes standardized. _PenState = Dict[str, Any] _Speed = Union[str, float] _PolygonCoords = Sequence[Tuple[float, float]] # TODO: Type this more accurately # Vec2D is actually a custom subclass of 'tuple'. Vec2D = Tuple[float, float] class TurtleScreenBase(object): cv: Canvas = ... canvwidth: int = ... canvheight: int = ... xscale: float = ... yscale: float = ... def __init__(self, cv: Canvas) -> None: ... if sys.version_info >= (3,): def mainloop(self) -> None: ... def textinput(self, title: str, prompt: str) -> Optional[str]: ... def numinput(self, title: str, prompt: str, default: Optional[float] = ..., minval: Optional[float] = ..., maxval: Optional[float] = ...) -> Optional[float]: ... class Terminator(Exception): ... class TurtleGraphicsError(Exception): ... class Shape(object): def __init__(self, type_: str, data: Union[_PolygonCoords, PhotoImage, None] = ...) -> None: ... def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: Optional[_Color] = ...) -> None: ... class TurtleScreen(TurtleScreenBase): def __init__(self, cv: Canvas, mode: str = ..., colormode: float = ..., delay: int = ...) -> None: ... def clear(self) -> None: ... @overload def mode(self) -> str: ... @overload def mode(self, mode: str) -> None: ... def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: ... def register_shape(self, name: str, shape: Union[_PolygonCoords, Shape, None] = ...) -> None: ... @overload def colormode(self) -> float: ... @overload def colormode(self, cmode: float) -> None: ... def reset(self) -> None: ... def turtles(self) -> List[Turtle]: ... @overload def bgcolor(self) -> _AnyColor: ... @overload def bgcolor(self, color: _Color) -> None: ... @overload def bgcolor(self, r: float, g: float, b: float) -> None: ... @overload def tracer(self) -> int: ... @overload def tracer(self, n: int, delay: Optional[int] = ...) -> None: ... @overload def delay(self) -> int: ... @overload def delay(self, delay: int) -> None: ... def update(self) -> None: ... def window_width(self) -> int: ... def window_height(self) -> int: ... def getcanvas(self) -> Canvas: ... def getshapes(self) -> List[str]: ... def onclick(self, fun: Callable[[float, float], Any], btn: int = ..., add: Optional[Any] = ...) -> None: ... def onkey(self, fun: Callable[[], Any], key: str) -> None: ... def listen(self, xdummy: Optional[float] = ..., ydummy: Optional[float] = ...) -> None: ... def ontimer(self, fun: Callable[[], Any], t: int = ...) -> None: ... @overload def bgpic(self) -> str: ... @overload def bgpic(self, picname: str) -> None: ... @overload def screensize(self) -> Tuple[int, int]: ... @overload def screensize(self, canvwidth: int, canvheight: int, bg: Optional[_Color] = ...) -> None: ... onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape if sys.version_info >= (3,): def onkeypress(self, fun: Callable[[], Any], key: Optional[str] = ...) -> None: ... onkeyrelease = onkey class TNavigator(object): START_ORIENTATION: Dict[str, Vec2D] = ... DEFAULT_MODE: str = ... DEFAULT_ANGLEOFFSET: int = ... DEFAULT_ANGLEORIENT: int = ... def __init__(self, mode: str = ...) -> None: ... def reset(self) -> None: ... def degrees(self, fullcircle: float = ...) -> None: ... def radians(self) -> None: ... def forward(self, distance: float) -> None: ... def back(self, distance: float) -> None: ... def right(self, angle: float) -> None: ... def left(self, angle: float) -> None: ... def pos(self) -> Vec2D: ... def xcor(self) -> float: ... def ycor(self) -> float: ... @overload def goto(self, x: Tuple[float, float]) -> None: ... @overload def goto(self, x: float, y: float) -> None: ... def home(self) -> None: ... def setx(self, x: float) -> None: ... def sety(self, y: float) -> None: ... @overload def distance(self, x: Union[TNavigator, Tuple[float, float]]) -> float: ... @overload def distance(self, x: float, y: float) -> float: ... @overload def towards(self, x: Union[TNavigator, Tuple[float, float]]) -> float: ... @overload def towards(self, x: float, y: float) -> float: ... def heading(self) -> float: ... def setheading(self, to_angle: float) -> None: ... def circle(self, radius: float, extent: Optional[float] = ..., steps: Optional[int] = ...) -> None: ... fd = forward bk = back backward = back rt = right lt = left position = pos setpos = goto setposition = goto seth = setheading class TPen(object): def __init__(self, resizemode: str = ...) -> None: ... @overload def resizemode(self) -> str: ... @overload def resizemode(self, rmode: str) -> None: ... @overload def pensize(self) -> int: ... @overload def pensize(self, width: int) -> None: ... def penup(self) -> None: ... def pendown(self) -> None: ... def isdown(self) -> bool: ... @overload def speed(self) -> int: ... @overload def speed(self, speed: _Speed) -> None: ... @overload def pencolor(self) -> _AnyColor: ... @overload def pencolor(self, color: _Color) -> None: ... @overload def pencolor(self, r: float, g: float, b: float) -> None: ... @overload def fillcolor(self) -> _AnyColor: ... @overload def fillcolor(self, color: _Color) -> None: ... @overload def fillcolor(self, r: float, g: float, b: float) -> None: ... @overload def color(self) -> Tuple[_AnyColor, _AnyColor]: ... @overload def color(self, color: _Color) -> None: ... @overload def color(self, r: float, g: float, b: float) -> None: ... @overload def color(self, color1: _Color, color2: _Color) -> None: ... def showturtle(self) -> None: ... def hideturtle(self) -> None: ... def isvisible(self) -> bool: ... # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload def pen(self) -> _PenState: ... # type: ignore @overload def pen(self, pen: Optional[_PenState] = ..., *, shown: bool = ..., pendown: bool = ..., pencolor: _Color = ..., fillcolor: _Color = ..., pensize: int = ..., speed: int = ..., resizemode: str = ..., stretchfactor: Tuple[float, float] = ..., outline: int = ..., tilt: float = ...) -> None: ... width = pensize up = penup pu = penup pd = pendown down = pendown st = showturtle ht = hideturtle _T = TypeVar('_T') class RawTurtle(TPen, TNavigator): def __init__(self, canvas: Union[Canvas, TurtleScreen, None] = ..., shape: str = ..., undobuffersize: int = ..., visible: bool = ...) -> None: ... def reset(self) -> None: ... def setundobuffer(self, size: Optional[int]) -> None: ... def undobufferentries(self) -> int: ... def clear(self) -> None: ... def clone(self: _T) -> _T: ... @overload def shape(self) -> str: ... @overload def shape(self, name: str) -> None: ... # Unsafely overlaps when no arguments are provided @overload def shapesize(self) -> Tuple[float, float, float]: ... # type: ignore @overload def shapesize(self, stretch_wid: Optional[float] = ..., stretch_len: Optional[float] = ..., outline: Optional[float] = ...) -> None: ... if sys.version_info >= (3,): @overload def shearfactor(self) -> float: ... @overload def shearfactor(self, shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @overload def shapetransform(self) -> Tuple[float, float, float, float]: ... # type: ignore @overload def shapetransform(self, t11: Optional[float] = ..., t12: Optional[float] = ..., t21: Optional[float] = ..., t22: Optional[float] = ...) -> None: ... def get_shapepoly(self) -> Optional[_PolygonCoords]: ... def settiltangle(self, angle: float) -> None: ... @overload def tiltangle(self) -> float: ... @overload def tiltangle(self, angle: float) -> None: ... def tilt(self, angle: float) -> None: ... # Can return either 'int' or Tuple[int, ...] based on if the stamp is # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. def stamp(self) -> Any: ... def clearstamp(self, stampid: Union[int, Tuple[int, ...]]) -> None: ... def clearstamps(self, n: Optional[int] = ...) -> None: ... def filling(self) -> bool: ... def begin_fill(self) -> None: ... def end_fill(self) -> None: ... def dot(self, size: Optional[int] = ..., *color: _Color) -> None: ... def write(self, arg: object, move: bool = ..., align: str = ..., font: Tuple[str, int, str] = ...) -> None: ... def begin_poly(self) -> None: ... def end_poly(self) -> None: ... def get_poly(self) -> Optional[_PolygonCoords]: ... def getscreen(self) -> TurtleScreen: ... def getturtle(self: _T) -> _T: ... getpen = getturtle def onclick(self, fun: Callable[[float, float], Any], btn: int = ..., add: Optional[bool] = ...) -> None: ... def onrelease(self, fun: Callable[[float, float], Any], btn: int = ..., add: Optional[bool] = ...) -> None: ... def ondrag(self, fun: Callable[[float, float], Any], btn: int = ..., add: Optional[bool] = ...) -> None: ... def undo(self) -> None: ... turtlesize = shapesize class _Screen(TurtleScreen): def __init__(self) -> None: ... def setup(self, width: int = ..., height: int = ..., startx: int = ..., starty: int = ...) -> None: ... def title(self, titlestring: str) -> None: ... def bye(self) -> None: ... def exitonclick(self) -> None: ... def Screen() -> _Screen: ... class Turtle(RawTurtle): def __init__(self, shape: str = ..., undobuffersize: int = ..., visible: bool = ...) -> None: ... RawPen = RawTurtle Pen = Turtle def write_docstringdict(filename: str) -> None: ... # Note: it's somewhat unfortunate that we have to copy the function signatures. # It would be nice if we could partially reduce the redundancy by doing something # like the following: # # _screen: Screen # clear = _screen.clear # # However, it seems pytype does not support this type of syntax in pyi files. # Functions copied from TurtleScreenBase: # Note: mainloop() was always present in the global scope, but was added to # TurtleScreenBase in Python 3.0 def mainloop() -> None: ... if sys.version_info >= (3,): def textinput(title: str, prompt: str) -> Optional[str]: ... def numinput(title: str, prompt: str, default: Optional[float] = ..., minval: Optional[float] = ..., maxval: Optional[float] = ...) -> Optional[float]: ... # Functions copied from TurtleScreen: def clear() -> None: ... @overload def mode() -> str: ... @overload def mode(mode: str) -> None: ... def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: ... def register_shape(name: str, shape: Union[_PolygonCoords, Shape, None] = ...) -> None: ... @overload def colormode() -> float: ... @overload def colormode(cmode: float) -> None: ... def reset() -> None: ... def turtles() -> List[Turtle]: ... @overload def bgcolor() -> _AnyColor: ... @overload def bgcolor(color: _Color) -> None: ... @overload def bgcolor(r: float, g: float, b: float) -> None: ... @overload def tracer() -> int: ... @overload def tracer(n: int, delay: Optional[int] = ...) -> None: ... @overload def delay() -> int: ... @overload def delay(delay: int) -> None: ... def update() -> None: ... def window_width() -> int: ... def window_height() -> int: ... def getcanvas() -> Canvas: ... def getshapes() -> List[str]: ... def onclick(fun: Callable[[float, float], Any], btn: int = ..., add: Optional[Any] = ...) -> None: ... def onkey(fun: Callable[[], Any], key: str) -> None: ... def listen(xdummy: Optional[float] = ..., ydummy: Optional[float] = ...) -> None: ... def ontimer(fun: Callable[[], Any], t: int = ...) -> None: ... @overload def bgpic() -> str: ... @overload def bgpic(picname: str) -> None: ... @overload def screensize() -> Tuple[int, int]: ... @overload def screensize(canvwidth: int, canvheight: int, bg: Optional[_Color] = ...) -> None: ... onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape if sys.version_info >= (3,): def onkeypress(fun: Callable[[], Any], key: Optional[str] = ...) -> None: ... onkeyrelease = onkey # Functions copied from TNavigator: def degrees(fullcircle: float = ...) -> None: ... def radians() -> None: ... def forward(distance: float) -> None: ... def back(distance: float) -> None: ... def right(angle: float) -> None: ... def left(angle: float) -> None: ... def pos() -> Vec2D: ... def xcor() -> float: ... def ycor() -> float: ... @overload def goto(x: Tuple[float, float]) -> None: ... @overload def goto(x: float, y: float) -> None: ... def home() -> None: ... def setx(x: float) -> None: ... def sety(y: float) -> None: ... @overload def distance(x: Union[TNavigator, Tuple[float, float]]) -> float: ... @overload def distance(x: float, y: float) -> float: ... @overload def towards(x: Union[TNavigator, Tuple[float, float]]) -> float: ... @overload def towards(x: float, y: float) -> float: ... def heading() -> float: ... def setheading(to_angle: float) -> None: ... def circle(radius: float, extent: Optional[float] = ..., steps: Optional[int] = ...) -> None: ... fd = forward bk = back backward = back rt = right lt = left position = pos setpos = goto setposition = goto seth = setheading # Functions copied from TPen: @overload def resizemode() -> str: ... @overload def resizemode(rmode: str) -> None: ... @overload def pensize() -> int: ... @overload def pensize(width: int) -> None: ... def penup() -> None: ... def pendown() -> None: ... def isdown() -> bool: ... @overload def speed() -> int: ... @overload def speed(speed: _Speed) -> None: ... @overload def pencolor() -> _AnyColor: ... @overload def pencolor(color: _Color) -> None: ... @overload def pencolor(r: float, g: float, b: float) -> None: ... @overload def fillcolor() -> _AnyColor: ... @overload def fillcolor(color: _Color) -> None: ... @overload def fillcolor(r: float, g: float, b: float) -> None: ... @overload def color() -> Tuple[_AnyColor, _AnyColor]: ... @overload def color(color: _Color) -> None: ... @overload def color(r: float, g: float, b: float) -> None: ... @overload def color(color1: _Color, color2: _Color) -> None: ... def showturtle() -> None: ... def hideturtle() -> None: ... def isvisible() -> bool: ... # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload def pen() -> _PenState: ... # type: ignore @overload def pen(pen: Optional[_PenState] = ..., *, shown: bool = ..., pendown: bool = ..., pencolor: _Color = ..., fillcolor: _Color = ..., pensize: int = ..., speed: int = ..., resizemode: str = ..., stretchfactor: Tuple[float, float] = ..., outline: int = ..., tilt: float = ...) -> None: ... width = pensize up = penup pu = penup pd = pendown down = pendown st = showturtle ht = hideturtle # Functions copied from RawTurtle: def setundobuffer(size: Optional[int]) -> None: ... def undobufferentries() -> int: ... @overload def shape() -> str: ... @overload def shape(name: str) -> None: ... # Unsafely overlaps when no arguments are provided @overload def shapesize() -> Tuple[float, float, float]: ... # type: ignore @overload def shapesize(stretch_wid: Optional[float] = ..., stretch_len: Optional[float] = ..., outline: Optional[float] = ...) -> None: ... if sys.version_info >= (3,): @overload def shearfactor() -> float: ... @overload def shearfactor(shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @overload def shapetransform() -> Tuple[float, float, float, float]: ... # type: ignore @overload def shapetransform(t11: Optional[float] = ..., t12: Optional[float] = ..., t21: Optional[float] = ..., t22: Optional[float] = ...) -> None: ... def get_shapepoly() -> Optional[_PolygonCoords]: ... def settiltangle(angle: float) -> None: ... @overload def tiltangle() -> float: ... @overload def tiltangle(angle: float) -> None: ... def tilt(angle: float) -> None: ... # Can return either 'int' or Tuple[int, ...] based on if the stamp is # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. def stamp() -> Any: ... def clearstamp(stampid: Union[int, Tuple[int, ...]]) -> None: ... def clearstamps(n: Optional[int] = ...) -> None: ... def filling() -> bool: ... def begin_fill() -> None: ... def end_fill() -> None: ... def dot(size: Optional[int] = ..., *color: _Color) -> None: ... def write(arg: object, move: bool = ..., align: str = ..., font: Tuple[str, int, str] = ...) -> None: ... def begin_poly() -> None: ... def end_poly() -> None: ... def get_poly() -> Optional[_PolygonCoords]: ... def getscreen() -> TurtleScreen: ... def getturtle() -> Turtle: ... getpen = getturtle def onrelease(fun: Callable[[float, float], Any], btn: int = ..., add: Optional[Any] = ...) -> None: ... def ondrag(fun: Callable[[float, float], Any], btn: int = ..., add: Optional[Any] = ...) -> None: ... def undo() -> None: ... turtlesize = shapesize # Functions copied from RawTurtle with a few tweaks: def clone() -> Turtle: ... # Extra functions present only in the global scope: done = mainloop mypy-0.761/mypy/typeshed/stdlib/2and3/unicodedata.pyi0000644€tŠÔÚ€2›s®0000000355413576752252026742 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Text, TypeVar, Union ucd_3_2_0: UCD ucnhash_CAPI: Any unidata_version: str _default = TypeVar('_default') def bidirectional(__chr: Text) -> Text: ... def category(__chr: Text) -> Text: ... def combining(__chr: Text) -> int: ... def decimal(__chr: Text, __default: _default = ...) -> Union[int, _default]: ... def decomposition(__chr: Text) -> Text: ... def digit(__chr: Text, __default: _default = ...) -> Union[int, _default]: ... def east_asian_width(__chr: Text) -> Text: ... if sys.version_info >= (3, 8): def is_normalized(__form: str, __unistr: str) -> bool: ... def lookup(__name: Union[Text, bytes]) -> Text: ... def mirrored(__chr: Text) -> int: ... def name(__chr: Text, __default: _default = ...) -> Union[Text, _default]: ... def normalize(__form: Text, __unistr: Text) -> Text: ... def numeric(__chr: Text, __default: _default = ...) -> Union[float, _default]: ... class UCD(object): # The methods below are constructed from the same array in C # (unicodedata_functions) and hence identical to the methods above. unidata_version: str def bidirectional(self, __chr: Text) -> str: ... def category(self, __chr: Text) -> str: ... def combining(self, __chr: Text) -> int: ... def decimal(self, __chr: Text, __default: _default = ...) -> Union[int, _default]: ... def decomposition(self, __chr: Text) -> str: ... def digit(self, __chr: Text, __default: _default = ...) -> Union[int, _default]: ... def east_asian_width(self, __chr: Text) -> str: ... def lookup(self, __name: Union[Text, bytes]) -> Text: ... def mirrored(self, __chr: Text) -> int: ... def name(self, __chr: Text, __default: _default = ...) -> Union[Text, _default]: ... def normalize(self, __form: Text, __unistr: Text) -> Text: ... def numeric(self, __chr: Text, __default: _default = ...) -> Union[float, _default]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/uu.pyi0000644€tŠÔÚ€2›s®0000000106213576752252025103 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for uu (Python 2 and 3) import sys from typing import BinaryIO, Union, Optional, Text _File = Union[Text, BinaryIO] class Error(Exception): ... if sys.version_info >= (3, 7): def encode(in_file: _File, out_file: _File, name: Optional[str] = ..., mode: Optional[int] = ..., backtick: bool = ...) -> None: ... else: def encode(in_file: _File, out_file: _File, name: Optional[str] = ..., mode: Optional[int] = ...) -> None: ... def decode(in_file: _File, out_file: Optional[_File] = ..., mode: Optional[int] = ..., quiet: int = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/uuid.pyi0000644€tŠÔÚ€2›s®0000000541613576752252025427 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for uuid import sys from typing import Tuple, Optional, Any, Text # Because UUID has properties called int and bytes we need to rename these temporarily. _Int = int _Bytes = bytes _FieldsType = Tuple[int, int, int, int, int, int] class UUID: def __init__(self, hex: Optional[Text] = ..., bytes: Optional[_Bytes] = ..., bytes_le: Optional[_Bytes] = ..., fields: Optional[_FieldsType] = ..., int: Optional[_Int] = ..., version: Optional[_Int] = ...) -> None: ... @property def bytes(self) -> _Bytes: ... @property def bytes_le(self) -> _Bytes: ... @property def clock_seq(self) -> _Int: ... @property def clock_seq_hi_variant(self) -> _Int: ... @property def clock_seq_low(self) -> _Int: ... @property def fields(self) -> _FieldsType: ... @property def hex(self) -> str: ... @property def int(self) -> _Int: ... @property def node(self) -> _Int: ... @property def time(self) -> _Int: ... @property def time_hi_version(self) -> _Int: ... @property def time_low(self) -> _Int: ... @property def time_mid(self) -> _Int: ... @property def urn(self) -> str: ... @property def variant(self) -> str: ... @property def version(self) -> Optional[_Int]: ... def __int__(self) -> _Int: ... if sys.version_info >= (3,): def __eq__(self, other: Any) -> bool: ... def __lt__(self, other: Any) -> bool: ... def __le__(self, other: Any) -> bool: ... def __gt__(self, other: Any) -> bool: ... def __ge__(self, other: Any) -> bool: ... else: def get_bytes(self) -> _Bytes: ... def get_bytes_le(self) -> _Bytes: ... def get_clock_seq(self) -> _Int: ... def get_clock_seq_hi_variant(self) -> _Int: ... def get_clock_seq_low(self) -> _Int: ... def get_fields(self) -> _FieldsType: ... def get_hex(self) -> str: ... def get_node(self) -> _Int: ... def get_time(self) -> _Int: ... def get_time_hi_version(self) -> _Int: ... def get_time_low(self) -> _Int: ... def get_time_mid(self) -> _Int: ... def get_urn(self) -> str: ... def get_variant(self) -> str: ... def get_version(self) -> Optional[_Int]: ... def __cmp__(self, other: Any) -> _Int: ... def getnode() -> int: ... def uuid1(node: Optional[_Int] = ..., clock_seq: Optional[_Int] = ...) -> UUID: ... def uuid3(namespace: UUID, name: str) -> UUID: ... def uuid4() -> UUID: ... def uuid5(namespace: UUID, name: str) -> UUID: ... NAMESPACE_DNS: UUID NAMESPACE_URL: UUID NAMESPACE_OID: UUID NAMESPACE_X500: UUID RESERVED_NCS: str RFC_4122: str RESERVED_MICROSOFT: str RESERVED_FUTURE: str mypy-0.761/mypy/typeshed/stdlib/2and3/warnings.pyi0000644€tŠÔÚ€2›s®0000000337213576752252026310 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, NamedTuple, Optional, overload, TextIO, Type from types import ModuleType, TracebackType from typing_extensions import Literal from _warnings import warn as warn, warn_explicit as warn_explicit def showwarning( message: str, category: Type[Warning], filename: str, lineno: int, file: Optional[TextIO] = ..., line: Optional[str] = ... ) -> None: ... def formatwarning(message: str, category: Type[Warning], filename: str, lineno: int, line: Optional[str] = ...) -> str: ... def filterwarnings( action: str, message: str = ..., category: Type[Warning] = ..., module: str = ..., lineno: int = ..., append: bool = ... ) -> None: ... def simplefilter(action: str, category: Type[Warning] = ..., lineno: int = ..., append: bool = ...) -> None: ... def resetwarnings() -> None: ... class _Record(NamedTuple): message: str category: Type[Warning] filename: str lineno: int file: Optional[TextIO] line: Optional[str] class catch_warnings: @overload def __new__(cls, *, record: Literal[False] = ..., module: Optional[ModuleType] = ...) -> _catch_warnings_without_records: ... @overload def __new__(cls, *, record: Literal[True], module: Optional[ModuleType] = ...) -> _catch_warnings_with_records: ... @overload def __new__(cls, *, record: bool, module: Optional[ModuleType] = ...) -> catch_warnings: ... def __enter__(self) -> Optional[List[_Record]]: ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType] ) -> None: ... class _catch_warnings_without_records(catch_warnings): def __enter__(self) -> None: ... class _catch_warnings_with_records(catch_warnings): def __enter__(self) -> List[_Record]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/wave.pyi0000644€tŠÔÚ€2›s®0000000513613576752252025422 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for wave (Python 2 and 3) import sys from typing import ( Any, NamedTuple, NoReturn, Optional, Text, BinaryIO, Union, Tuple ) _File = Union[Text, BinaryIO] class Error(Exception): ... WAVE_FORMAT_PCM: int if sys.version_info < (3, 0): _wave_params = Tuple[int, int, int, int, str, str] else: class _wave_params(NamedTuple): nchannels: int sampwidth: int framerate: int nframes: int comptype: str compname: str class Wave_read: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 0): def __enter__(self) -> Wave_read: ... def __exit__(self, *args: Any) -> None: ... def getfp(self) -> Optional[BinaryIO]: ... def rewind(self) -> None: ... def close(self) -> None: ... def tell(self) -> int: ... def getnchannels(self) -> int: ... def getnframes(self) -> int: ... def getsampwidth(self) -> int: ... def getframerate(self) -> int: ... def getcomptype(self) -> str: ... def getcompname(self) -> str: ... def getparams(self) -> _wave_params: ... def getmarkers(self) -> None: ... def getmark(self, id: Any) -> NoReturn: ... def setpos(self, pos: int) -> None: ... def readframes(self, nframes: int) -> bytes: ... class Wave_write: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 0): def __enter__(self) -> Wave_write: ... def __exit__(self, *args: Any) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... def getsampwidth(self) -> int: ... def setframerate(self, framerate: float) -> None: ... def getframerate(self) -> int: ... def setnframes(self, nframes: int) -> None: ... def getnframes(self) -> int: ... def setcomptype(self, comptype: str, compname: str) -> None: ... def getcomptype(self) -> str: ... def getcompname(self) -> str: ... def setparams(self, params: _wave_params) -> None: ... def getparams(self) -> _wave_params: ... def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ... def getmark(self, id: Any) -> NoReturn: ... def getmarkers(self) -> None: ... def tell(self) -> int: ... # should be any bytes-like object after 3.4, but we don't have a type for that def writeframesraw(self, data: bytes) -> None: ... def writeframes(self, data: bytes) -> None: ... def close(self) -> None: ... # Returns a Wave_read if mode is rb and Wave_write if mode is wb def open(f: _File, mode: Optional[str] = ...) -> Any: ... openfp = open mypy-0.761/mypy/typeshed/stdlib/2and3/weakref.pyi0000644€tŠÔÚ€2›s®0000001030013576752252026071 0ustar jukkaDROPBOX\Domain Users00000000000000import sys import types from typing import ( TypeVar, Generic, Any, Callable, overload, Mapping, Iterator, Tuple, Iterable, Optional, Type, MutableMapping, Union, List, Dict ) from _weakref import ( getweakrefcount as getweakrefcount, getweakrefs as getweakrefs, ref as ref, proxy as proxy, CallableProxyType as CallableProxyType, ProxyType as ProxyType, ReferenceType as ReferenceType) from _weakrefset import WeakSet as WeakSet if sys.version_info < (3, 0): from exceptions import ReferenceError as ReferenceError _S = TypeVar('_S') _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') ProxyTypes: Tuple[Type[Any], ...] if sys.version_info >= (3, 4): class WeakMethod(ref[types.MethodType]): def __new__(cls, meth: types.MethodType, callback: Optional[Callable[[types.MethodType], Any]] = ...) -> WeakMethod: ... def __call__(self) -> Optional[types.MethodType]: ... class WeakValueDictionary(MutableMapping[_KT, _VT]): @overload def __init__(self) -> None: ... @overload def __init__(self, __map: Union[Mapping[_KT, _VT], Iterable[Tuple[_KT, _VT]]], **kwargs: _VT) -> None: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... if sys.version_info < (3, 0): def has_key(self, key: object) -> bool: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... def copy(self) -> WeakValueDictionary[_KT, _VT]: ... if sys.version_info < (3, 0): def keys(self) -> List[_KT]: ... def values(self) -> List[_VT]: ... def items(self) -> List[Tuple[_KT, _VT]]: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... else: # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore def values(self) -> Iterator[_VT]: ... # type: ignore def items(self) -> Iterator[Tuple[_KT, _VT]]: ... # type: ignore def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... def valuerefs(self) -> List[KeyedRef[_KT, _VT]]: ... class KeyedRef(ref[_T], Generic[_KT, _T]): key: _KT def __init__(self, ob: _T, callback: Callable[[_T], Any], key: _KT) -> None: ... class WeakKeyDictionary(MutableMapping[_KT, _VT]): @overload def __init__(self) -> None: ... @overload def __init__(self, __map: Union[Mapping[_KT, _VT], Iterable[Tuple[_KT, _VT]]], **kwargs: _VT) -> None: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... if sys.version_info < (3, 0): def has_key(self, key: object) -> bool: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... def copy(self) -> WeakKeyDictionary[_KT, _VT]: ... if sys.version_info < (3, 0): def keys(self) -> List[_KT]: ... def values(self) -> List[_VT]: ... def items(self) -> List[Tuple[_KT, _VT]]: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... def iterkeyrefs(self) -> Iterator[ref[_KT]]: ... else: # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore def values(self) -> Iterator[_VT]: ... # type: ignore def items(self) -> Iterator[Tuple[_KT, _VT]]: ... # type: ignore def keyrefs(self) -> List[ref[_KT]]: ... if sys.version_info >= (3, 4): class finalize: def __init__(self, obj: _S, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ... def __call__(self, _: Any = ...) -> Optional[_T]: ... def detach(self) -> Optional[Tuple[_S, _T, Tuple[Any, ...], Dict[str, Any]]]: ... def peek(self) -> Optional[Tuple[_S, _T, Tuple[Any, ...], Dict[str, Any]]]: ... alive: bool atexit: bool mypy-0.761/mypy/typeshed/stdlib/2and3/webbrowser.pyi0000644€tŠÔÚ€2›s®0000000612613576752252026641 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Optional, Callable, List, Text, Union, Sequence class Error(Exception): ... if sys.version_info >= (3, 7): def register(name: Text, klass: Optional[Callable[[], BaseBrowser]], instance: BaseBrowser = ..., *, preferred: bool = ...) -> None: ... else: def register(name: Text, klass: Optional[Callable[[], BaseBrowser]], instance: BaseBrowser = ..., update_tryorder: int = ...) -> None: ... def get(using: Optional[Text] = ...) -> BaseBrowser: ... def open(url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... def open_new(url: Text) -> bool: ... def open_new_tab(url: Text) -> bool: ... class BaseBrowser: args: List[str] name: str basename: str def __init__(self, name: Text = ...) -> None: ... def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... def open_new(self, url: Text) -> bool: ... def open_new_tab(self, url: Text) -> bool: ... class GenericBrowser(BaseBrowser): args: List[str] name: str basename: str def __init__(self, name: Union[Text, Sequence[Text]]) -> None: ... def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class BackgroundBrowser(GenericBrowser): def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class UnixBrowser(BaseBrowser): raise_opts: List[str] background: bool redirect_stdout: bool remote_args: List[str] remote_action: str remote_action_newwin: str remote_action_newtab: str def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class Mozilla(UnixBrowser): raise_opts: List[str] remote_args: List[str] remote_action: str remote_action_newwin: str remote_action_newtab: str background: bool class Galeon(UnixBrowser): raise_opts: List[str] remote_args: List[str] remote_action: str remote_action_newwin: str background: bool class Chrome(UnixBrowser): remote_args: List[str] remote_action: str remote_action_newwin: str remote_action_newtab: str background: bool class Opera(UnixBrowser): raise_opts: List[str] remote_args: List[str] remote_action: str remote_action_newwin: str remote_action_newtab: str background: bool class Elinks(UnixBrowser): remote_args: List[str] remote_action: str remote_action_newwin: str remote_action_newtab: str background: bool redirect_stdout: bool class Konqueror(BaseBrowser): def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class Grail(BaseBrowser): def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class WindowsDefault(BaseBrowser): def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class MacOSX(BaseBrowser): name: str def __init__(self, name: Text) -> None: ... def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class MacOSXOSAScript(BaseBrowser): def __init__(self, name: Text) -> None: ... def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/wsgiref/0000755€tŠÔÚ€2›s®0000000000013576752267025404 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/wsgiref/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027646 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/wsgiref/handlers.pyi0000644€tŠÔÚ€2›s®0000000603413576752252027724 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from abc import abstractmethod from types import TracebackType from typing import Optional, Dict, MutableMapping, Type, Text, Callable, List, Tuple, IO from .headers import Headers from .types import WSGIApplication, WSGIEnvironment, StartResponse, InputStream, ErrorStream from .util import FileWrapper, guess_scheme _exc_info = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] def format_date_time(timestamp: Optional[float]) -> str: ... # undocumented if sys.version_info >= (3, 2): def read_environ() -> Dict[str, str]: ... class BaseHandler: wsgi_version: Tuple[int, int] # undocumented wsgi_multithread: bool wsgi_multiprocess: bool wsgi_run_once: bool origin_server: bool http_version: str server_software: Optional[str] os_environ: MutableMapping[str, str] wsgi_file_wrapper: Optional[Type[FileWrapper]] headers_class: Type[Headers] # undocumented traceback_limit: Optional[int] error_status: str error_headers: List[Tuple[Text, Text]] error_body: bytes def run(self, application: WSGIApplication) -> None: ... def setup_environ(self) -> None: ... def finish_response(self) -> None: ... def get_scheme(self) -> str: ... def set_content_length(self) -> None: ... def cleanup_headers(self) -> None: ... def start_response(self, status: Text, headers: List[Tuple[Text, Text]], exc_info: Optional[_exc_info] = ...) -> Callable[[bytes], None]: ... def send_preamble(self) -> None: ... def write(self, data: bytes) -> None: ... def sendfile(self) -> bool: ... def finish_content(self) -> None: ... def close(self) -> None: ... def send_headers(self) -> None: ... def result_is_file(self) -> bool: ... def client_is_modern(self) -> bool: ... def log_exception(self, exc_info: _exc_info) -> None: ... def handle_error(self) -> None: ... def error_output(self, environ: WSGIEnvironment, start_response: StartResponse) -> List[bytes]: ... @abstractmethod def _write(self, data: bytes) -> None: ... @abstractmethod def _flush(self) -> None: ... @abstractmethod def get_stdin(self) -> InputStream: ... @abstractmethod def get_stderr(self) -> ErrorStream: ... @abstractmethod def add_cgi_vars(self) -> None: ... class SimpleHandler(BaseHandler): stdin: InputStream stdout: IO[bytes] stderr: ErrorStream base_env: MutableMapping[str, str] def __init__(self, stdin: InputStream, stdout: IO[bytes], stderr: ErrorStream, environ: MutableMapping[str, str], multithread: bool = ..., multiprocess: bool = ...) -> None: ... def get_stdin(self) -> InputStream: ... def get_stderr(self) -> ErrorStream: ... def add_cgi_vars(self) -> None: ... def _write(self, data: bytes) -> None: ... def _flush(self) -> None: ... class BaseCGIHandler(SimpleHandler): ... class CGIHandler(BaseCGIHandler): def __init__(self) -> None: ... class IISCGIHandler(BaseCGIHandler): def __init__(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/wsgiref/headers.pyi0000644€tŠÔÚ€2›s®0000000234213576752252027535 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import overload, Pattern, Optional, List, Tuple _HeaderList = List[Tuple[str, str]] tspecials: Pattern[str] # undocumented class Headers: if sys.version_info < (3, 5): def __init__(self, headers: _HeaderList) -> None: ... else: def __init__(self, headers: Optional[_HeaderList] = ...) -> None: ... def __len__(self) -> int: ... def __setitem__(self, name: str, val: str) -> None: ... def __delitem__(self, name: str) -> None: ... def __getitem__(self, name: str) -> Optional[str]: ... if sys.version_info < (3,): def has_key(self, name: str) -> bool: ... def __contains__(self, name: str) -> bool: ... def get_all(self, name: str) -> List[str]: ... @overload def get(self, name: str, default: str) -> str: ... @overload def get(self, name: str, default: Optional[str] = ...) -> Optional[str]: ... def keys(self) -> List[str]: ... def values(self) -> List[str]: ... def items(self) -> _HeaderList: ... if sys.version_info >= (3,): def __bytes__(self) -> bytes: ... def setdefault(self, name: str, value: str) -> str: ... def add_header(self, _name: str, _value: Optional[str], **_params: Optional[str]) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/wsgiref/simple_server.pyi0000644€tŠÔÚ€2›s®0000000276313576752252031010 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Optional, List, Type, TypeVar, overload from .handlers import SimpleHandler from .types import WSGIApplication, WSGIEnvironment, StartResponse, ErrorStream if sys.version_info < (3,): from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer else: from http.server import HTTPServer, BaseHTTPRequestHandler server_version: str # undocumented sys_version: str # undocumented software_version: str # undocumented class ServerHandler(SimpleHandler): # undocumented server_software: str def close(self) -> None: ... class WSGIServer(HTTPServer): application: Optional[WSGIApplication] base_environ: WSGIEnvironment # only available after call to setup_environ() def setup_environ(self) -> None: ... def get_app(self) -> Optional[WSGIApplication]: ... def set_app(self, application: Optional[WSGIApplication]) -> None: ... class WSGIRequestHandler(BaseHTTPRequestHandler): server_version: str def get_environ(self) -> WSGIEnvironment: ... def get_stderr(self) -> ErrorStream: ... def handle(self) -> None: ... def demo_app(environ: WSGIEnvironment, start_response: StartResponse) -> List[bytes]: ... _S = TypeVar("_S", bound=WSGIServer) @overload def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: Type[WSGIRequestHandler] = ...) -> WSGIServer: ... @overload def make_server(host: str, port: int, app: WSGIApplication, server_class: Type[_S], handler_class: Type[WSGIRequestHandler] = ...) -> _S: ... mypy-0.761/mypy/typeshed/stdlib/2and3/wsgiref/types.pyi0000644€tŠÔÚ€2›s®0000000331613576752252027270 0ustar jukkaDROPBOX\Domain Users00000000000000# Type declaration for a WSGI Function # # wsgiref/types.py doesn't exist and neither do the types defined in this # file. They are provided for type checking purposes. # # This means you cannot simply import wsgiref.types in your code. Instead, # use the `TYPE_CHECKING` flag from the typing module: # # from typing import TYPE_CHECKING # # if TYPE_CHECKING: # from wsgiref.types import WSGIApplication # # This import is now only taken into account by the type checker. Consequently, # you need to use 'WSGIApplication' and not simply WSGIApplication when type # hinting your code. Otherwise Python will raise NameErrors. from sys import _OptExcInfo from typing import Callable, Dict, Iterable, List, Any, Text, Protocol, Tuple, Optional class StartResponse(Protocol): def __call__(self, status: str, headers: List[Tuple[str, str]], exc_info: Optional[_OptExcInfo] = ...) -> Callable[[bytes], Any]: ... WSGIEnvironment = Dict[Text, Any] WSGIApplication = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] # WSGI input streams per PEP 3333 class InputStream(Protocol): def read(self, size: int = ...) -> bytes: ... def readline(self, size: int = ...) -> bytes: ... def readlines(self, hint: int = ...) -> List[bytes]: ... def __iter__(self) -> Iterable[bytes]: ... # WSGI error streams per PEP 3333 class ErrorStream(Protocol): def flush(self) -> None: ... def write(self, s: str) -> None: ... def writelines(self, seq: List[str]) -> None: ... class _Readable(Protocol): def read(self, size: int = ...) -> bytes: ... # Optional file wrapper in wsgi.file_wrapper class FileWrapper(Protocol): def __call__(self, file: _Readable, block_size: int = ...) -> Iterable[bytes]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/wsgiref/util.pyi0000644€tŠÔÚ€2›s®0000000156613576752252027106 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import IO, Any, Optional from .types import WSGIEnvironment class FileWrapper: filelike: IO[bytes] blksize: int def __init__(self, filelike: IO[bytes], bklsize: int = ...) -> None: ... def __getitem__(self, key: Any) -> bytes: ... def __iter__(self) -> FileWrapper: ... if sys.version_info < (3,): def next(self) -> bytes: ... else: def __next__(self) -> bytes: ... def close(self) -> None: ... # only exists if filelike.close exists def guess_scheme(environ: WSGIEnvironment) -> str: ... def application_uri(environ: WSGIEnvironment) -> str: ... def request_uri(environ: WSGIEnvironment, include_query: bool = ...) -> str: ... def shift_path_info(environ: WSGIEnvironment) -> Optional[str]: ... def setup_testing_defaults(environ: WSGIEnvironment) -> None: ... def is_hop_by_hop(header_name: str) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/2and3/wsgiref/validate.pyi0000644€tŠÔÚ€2›s®0000000350513576752252027715 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Iterable, Iterator, Optional, NoReturn, Callable from wsgiref.types import WSGIApplication, InputStream, ErrorStream class WSGIWarning(Warning): ... def validator(application: WSGIApplication) -> WSGIApplication: ... class InputWrapper: input: InputStream def __init__(self, wsgi_input: InputStream) -> None: ... if sys.version_info < (3,): def read(self, size: int = ...) -> bytes: ... def readline(self) -> bytes: ... else: def read(self, size: int) -> bytes: ... def readline(self, size: int = ...) -> bytes: ... def readlines(self, hint: int = ...) -> bytes: ... def __iter__(self) -> Iterable[bytes]: ... def close(self) -> NoReturn: ... class ErrorWrapper: errors: ErrorStream def __init__(self, wsgi_errors: ErrorStream) -> None: ... def write(self, s: str) -> None: ... def flush(self) -> None: ... def writelines(self, seq: Iterable[str]) -> None: ... def close(self) -> NoReturn: ... class WriteWrapper: writer: Callable[[bytes], Any] def __init__(self, wsgi_writer: Callable[[bytes], Any]) -> None: ... def __call__(self, s: bytes) -> None: ... class PartialIteratorWrapper: iterator: Iterator[bytes] def __init__(self, wsgi_iterator: Iterator[bytes]) -> None: ... def __iter__(self) -> IteratorWrapper: ... class IteratorWrapper: original_iterator: Iterator[bytes] iterator: Iterator[bytes] closed: bool check_start_response: Optional[bool] def __init__(self, wsgi_iterator: Iterator[bytes], check_start_response: Optional[bool]) -> None: ... def __iter__(self) -> IteratorWrapper: ... if sys.version_info < (3,): def next(self) -> bytes: ... else: def __next__(self) -> bytes: ... def close(self) -> None: ... def __del__(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/xdrlib.pyi0000644€tŠÔÚ€2›s®0000000446113576752252025744 0ustar jukkaDROPBOX\Domain Users00000000000000# Structs for xdrlib (Python 2 and 3) from typing import Callable, List, Sequence, TypeVar _T = TypeVar('_T') class Error(Exception): msg: str def __init__(self, msg: str) -> None: ... class ConversionError(Error): ... class Packer: def __init__(self) -> None: ... def reset(self) -> None: ... def get_buffer(self) -> bytes: ... def get_buf(self) -> bytes: ... def pack_uint(self, x: int) -> None: ... def pack_int(self, x: int) -> None: ... def pack_enum(self, x: int) -> None: ... def pack_bool(self, x: bool) -> None: ... def pack_uhyper(self, x: int) -> None: ... def pack_hyper(self, x: int) -> None: ... def pack_float(self, x: float) -> None: ... def pack_double(self, x: float) -> None: ... def pack_fstring(self, n: int, s: bytes) -> None: ... def pack_fopaque(self, n: int, s: bytes) -> None: ... def pack_string(self, s: bytes) -> None: ... def pack_opaque(self, s: bytes) -> None: ... def pack_bytes(self, s: bytes) -> None: ... def pack_list(self, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ... def pack_farray(self, n: int, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ... def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ... class Unpacker: def __init__(self, data: bytes) -> None: ... def reset(self, data: bytes) -> None: ... def get_position(self) -> int: ... def set_position(self, position: int) -> None: ... def get_buffer(self) -> bytes: ... def done(self) -> None: ... def unpack_uint(self) -> int: ... def unpack_int(self) -> int: ... def unpack_enum(self) -> int: ... def unpack_bool(self) -> bool: ... def unpack_uhyper(self) -> int: ... def unpack_hyper(self) -> int: ... def unpack_float(self) -> float: ... def unpack_double(self) -> float: ... def unpack_fstring(self, n: int) -> bytes: ... def unpack_fopaque(self, n: int) -> bytes: ... def unpack_string(self) -> bytes: ... def unpack_opaque(self) -> bytes: ... def unpack_bytes(self) -> bytes: ... def unpack_list(self, unpack_item: Callable[[], _T]) -> List[_T]: ... def unpack_farray(self, n: int, unpack_item: Callable[[], _T]) -> List[_T]: ... def unpack_array(self, unpack_item: Callable[[], _T]) -> List[_T]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/xml/0000755€tŠÔÚ€2›s®0000000000013576752267024536 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/xml/__init__.pyi0000644€tŠÔÚ€2›s®0000000003613576752252027011 0ustar jukkaDROPBOX\Domain Users00000000000000import xml.parsers as parsers mypy-0.761/mypy/typeshed/stdlib/2and3/xml/etree/0000755€tŠÔÚ€2›s®0000000000013576752267025642 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/xml/etree/ElementInclude.pyi0000644€tŠÔÚ€2›s®0000000123113576752252031251 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xml.etree.ElementInclude (Python 3.4) from typing import Union, Optional, Callable from xml.etree.ElementTree import Element XINCLUDE: str XINCLUDE_INCLUDE: str XINCLUDE_FALLBACK: str class FatalIncludeError(SyntaxError): ... def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str] = ...) -> Union[str, Element]: ... # TODO: loader is of type default_loader ie it takes a callable that has the # same signature as default_loader. But default_loader has a keyword argument # Which can't be represented using Callable... def include(elem: Element, loader: Optional[Callable[..., Union[str, Element]]] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/xml/etree/ElementPath.pyi0000644€tŠÔÚ€2›s®0000000310313576752252030562 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xml.etree.ElementPath (Python 3.4) from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional from xml.etree.ElementTree import Element xpath_tokenizer_re: Pattern[str] _token = Tuple[str, str] _next = Callable[[], _token] _callback = Callable[[_SelectorContext, List[Element]], Generator[Element, None, None]] def xpath_tokenizer(pattern: str, namespaces: Optional[Dict[str, str]] = ...) -> Generator[_token, None, None]: ... def get_parent_map(context: _SelectorContext) -> Dict[Element, Element]: ... def prepare_child(next: _next, token: _token) -> _callback: ... def prepare_star(next: _next, token: _token) -> _callback: ... def prepare_self(next: _next, token: _token) -> _callback: ... def prepare_descendant(next: _next, token: _token) -> _callback: ... def prepare_parent(next: _next, token: _token) -> _callback: ... def prepare_predicate(next: _next, token: _token) -> _callback: ... ops: Dict[str, Callable[[_next, _token], _callback]] class _SelectorContext: parent_map: Dict[Element, Element] root: Element def __init__(self, root: Element) -> None: ... _T = TypeVar('_T') def iterfind(elem: Element, path: str, namespaces: Optional[Dict[str, str]] = ...) -> List[Element]: ... def find(elem: Element, path: str, namespaces: Optional[Dict[str, str]] = ...) -> Optional[Element]: ... def findall(elem: Element, path: str, namespaces: Optional[Dict[str, str]] = ...) -> List[Element]: ... def findtext(elem: Element, path: str, default: Optional[_T] = ..., namespaces: Optional[Dict[str, str]] = ...) -> Union[_T, str]: ... mypy-0.761/mypy/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi0000644€tŠÔÚ€2›s®0000003045513576752252030577 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xml.etree.ElementTree from typing import ( Any, Callable, Dict, Generator, IO, ItemsView, Iterable, Iterator, KeysView, List, MutableSequence, Optional, Protocol, Sequence, Text, Tuple, TypeVar, Union, overload, ) import io import sys if sys.version_info < (3,) or sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal VERSION: str class ParseError(SyntaxError): code: int position: Tuple[int, int] def iselement(element: object) -> bool: ... _T = TypeVar('_T') # Type for parser inputs. Parser will accept any unicode/str/bytes and coerce, # and this is true in py2 and py3 (even fromstringlist() in python3 can be # called with a heterogeneous list) _parser_input_type = Union[bytes, Text] # Type for individual tag/attr/ns/text values in args to most functions. # In py2, the library accepts str or unicode everywhere and coerces # aggressively. # In py3, bytes is not coerced to str and so use of bytes is probably an error, # so we exclude it. (why? the parser never produces bytes when it parses XML, # so e.g., element.get(b'name') will always return None for parsed XML, even if # there is a 'name' attribute.) _str_argument_type = Union[str, Text] # Type for return values from individual tag/attr/text values if sys.version_info >= (3,): # note: in python3, everything comes out as str, yay: _str_result_type = str else: # in python2, if the tag/attribute/text wasn't decode-able as ascii, it # comes out as a unicode string; otherwise it comes out as str. (see # _fixtext function in the source). Client code knows best: _str_result_type = Any _file_or_filename = Union[str, bytes, int, IO[Any]] if sys.version_info >= (3, 8): class _Writeable(Protocol): def write(self, __s: str) -> Any: ... @overload def canonicalize( xml_data: Optional[_parser_input_type] = ..., *, out: None = ..., from_file: Optional[_file_or_filename] = ..., with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., qname_aware_tags: Optional[Iterable[str]] = ..., qname_aware_attrs: Optional[Iterable[str]] = ..., exclude_attrs: Optional[Iterable[str]] = ..., exclude_tags: Optional[Iterable[str]] = ..., ) -> str: ... @overload def canonicalize( xml_data: Optional[_parser_input_type] = ..., *, out: _Writeable, from_file: Optional[_file_or_filename] = ..., with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., qname_aware_tags: Optional[Iterable[str]] = ..., qname_aware_attrs: Optional[Iterable[str]] = ..., exclude_attrs: Optional[Iterable[str]] = ..., exclude_tags: Optional[Iterable[str]] = ..., ) -> None: ... class Element(MutableSequence[Element]): tag: _str_result_type attrib: Dict[_str_result_type, _str_result_type] text: Optional[_str_result_type] tail: Optional[_str_result_type] def __init__(self, tag: Union[_str_argument_type, Callable[..., Element]], attrib: Dict[_str_argument_type, _str_argument_type] = ..., **extra: _str_argument_type) -> None: ... def append(self, subelement: Element) -> None: ... def clear(self) -> None: ... def copy(self) -> Element: ... def extend(self, elements: Iterable[Element]) -> None: ... def find(self, path: _str_argument_type, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> Optional[Element]: ... def findall(self, path: _str_argument_type, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> List[Element]: ... @overload def findtext(self, path: _str_argument_type, *, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> Optional[_str_result_type]: ... @overload def findtext(self, path: _str_argument_type, default: _T, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> Union[_T, _str_result_type]: ... @overload def get(self, key: _str_argument_type) -> Optional[_str_result_type]: ... @overload def get(self, key: _str_argument_type, default: _T) -> Union[_str_result_type, _T]: ... def getchildren(self) -> List[Element]: ... def getiterator(self, tag: Optional[_str_argument_type] = ...) -> List[Element]: ... if sys.version_info >= (3, 2): def insert(self, index: int, subelement: Element) -> None: ... else: def insert(self, index: int, element: Element) -> None: ... def items(self) -> ItemsView[_str_result_type, _str_result_type]: ... def iter(self, tag: Optional[_str_argument_type] = ...) -> Generator[Element, None, None]: ... def iterfind(self, path: _str_argument_type, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> List[Element]: ... def itertext(self) -> Generator[_str_result_type, None, None]: ... def keys(self) -> KeysView[_str_result_type]: ... def makeelement(self, tag: _str_argument_type, attrib: Dict[_str_argument_type, _str_argument_type]) -> Element: ... def remove(self, subelement: Element) -> None: ... def set(self, key: _str_argument_type, value: _str_argument_type) -> None: ... def __bool__(self) -> bool: ... def __delitem__(self, i: Union[int, slice]) -> None: ... @overload def __getitem__(self, i: int) -> Element: ... @overload def __getitem__(self, s: slice) -> MutableSequence[Element]: ... def __len__(self) -> int: ... @overload def __setitem__(self, i: int, o: Element) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[Element]) -> None: ... def SubElement(parent: Element, tag: _str_argument_type, attrib: Dict[_str_argument_type, _str_argument_type] = ..., **extra: _str_argument_type) -> Element: ... def Comment(text: Optional[_str_argument_type] = ...) -> Element: ... def ProcessingInstruction(target: _str_argument_type, text: Optional[_str_argument_type] = ...) -> Element: ... PI: Callable[..., Element] class QName: text: str def __init__(self, text_or_uri: _str_argument_type, tag: Optional[_str_argument_type] = ...) -> None: ... class ElementTree: def __init__(self, element: Optional[Element] = ..., file: Optional[_file_or_filename] = ...) -> None: ... def getroot(self) -> Element: ... def parse(self, source: _file_or_filename, parser: Optional[XMLParser] = ...) -> Element: ... def iter(self, tag: Optional[_str_argument_type] = ...) -> Generator[Element, None, None]: ... def getiterator(self, tag: Optional[_str_argument_type] = ...) -> List[Element]: ... def find(self, path: _str_argument_type, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> Optional[Element]: ... @overload def findtext(self, path: _str_argument_type, *, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> Optional[_str_result_type]: ... @overload def findtext(self, path: _str_argument_type, default: _T, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> Union[_T, _str_result_type]: ... def findall(self, path: _str_argument_type, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> List[Element]: ... def iterfind(self, path: _str_argument_type, namespaces: Optional[Dict[_str_argument_type, _str_argument_type]] = ...) -> List[Element]: ... if sys.version_info >= (3, 4): def write(self, file_or_filename: _file_or_filename, encoding: Optional[str] = ..., xml_declaration: Optional[bool] = ..., default_namespace: Optional[_str_argument_type] = ..., method: Optional[str] = ..., *, short_empty_elements: bool = ...) -> None: ... else: def write(self, file_or_filename: _file_or_filename, encoding: Optional[str] = ..., xml_declaration: Optional[bool] = ..., default_namespace: Optional[_str_argument_type] = ..., method: Optional[str] = ...) -> None: ... def write_c14n(self, file: _file_or_filename) -> None: ... def register_namespace(prefix: _str_argument_type, uri: _str_argument_type) -> None: ... if sys.version_info >= (3,): @overload def tostring(element: Element, encoding: None = ..., method: Optional[str] = ..., *, short_empty_elements: bool = ...) -> bytes: ... @overload def tostring(element: Element, encoding: Literal["unicode"], method: Optional[str] = ..., *, short_empty_elements: bool = ...) -> str: ... @overload def tostring(element: Element, encoding: str, method: Optional[str] = ..., *, short_empty_elements: bool = ...) -> Any: ... @overload def tostringlist(element: Element, encoding: None = ..., method: Optional[str] = ..., *, short_empty_elements: bool = ...) -> List[bytes]: ... @overload def tostringlist(element: Element, encoding: Literal["unicode"], method: Optional[str] = ..., *, short_empty_elements: bool = ...) -> List[str]: ... @overload def tostringlist(element: Element, encoding: str, method: Optional[str] = ..., *, short_empty_elements: bool = ...) -> List[Any]: ... else: def tostring(element: Element, encoding: Optional[str] = ..., method: Optional[str] = ...) -> bytes: ... def tostringlist(element: Element, encoding: Optional[str] = ..., method: Optional[str] = ...) -> List[bytes]: ... def dump(elem: Element) -> None: ... def parse(source: _file_or_filename, parser: Optional[XMLParser] = ...) -> ElementTree: ... def iterparse(source: _file_or_filename, events: Optional[Sequence[str]] = ..., parser: Optional[XMLParser] = ...) -> Iterator[Tuple[str, Any]]: ... if sys.version_info >= (3, 4): class XMLPullParser: def __init__(self, events: Optional[Sequence[str]] = ..., *, _parser: Optional[XMLParser] = ...) -> None: ... def feed(self, data: bytes) -> None: ... def close(self) -> None: ... def read_events(self) -> Iterator[Tuple[str, Element]]: ... def XML(text: _parser_input_type, parser: Optional[XMLParser] = ...) -> Element: ... def XMLID(text: _parser_input_type, parser: Optional[XMLParser] = ...) -> Tuple[Element, Dict[_str_result_type, Element]]: ... # This is aliased to XML in the source. fromstring = XML def fromstringlist(sequence: Sequence[_parser_input_type], parser: Optional[XMLParser] = ...) -> Element: ... # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce # some kind of object that has .text and .tail properties. # I've chosen to constrain the ElementFactory to always produce an Element # because that is how almost everyone will use it. # Unfortunately, the type of the factory arguments is dependent on how # TreeBuilder is called by client code (they could pass strs, bytes or whatever); # but we don't want to use a too-broad type, or it would be too hard to write # elementfactories. _ElementFactory = Callable[[Any, Dict[Any, Any]], Element] class TreeBuilder: def __init__(self, element_factory: Optional[_ElementFactory] = ...) -> None: ... def close(self) -> Element: ... def data(self, data: _parser_input_type) -> None: ... def start(self, tag: _parser_input_type, attrs: Dict[_parser_input_type, _parser_input_type]) -> Element: ... def end(self, tag: _parser_input_type) -> Element: ... if sys.version_info >= (3, 8): class C14NWriterTarget: def __init__( self, write: Callable[[str], Any], *, with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., qname_aware_tags: Optional[Iterable[str]] = ..., qname_aware_attrs: Optional[Iterable[str]] = ..., exclude_attrs: Optional[Iterable[str]] = ..., exclude_tags: Optional[Iterable[str]] = ..., ) -> None: ... class XMLParser: parser: Any target: TreeBuilder # TODO-what is entity used for??? entity: Any version: str if sys.version_info >= (3, 8): def __init__(self, *, target: Optional[TreeBuilder] = ..., encoding: Optional[str] = ...) -> None: ... else: def __init__(self, html: int = ..., target: Optional[TreeBuilder] = ..., encoding: Optional[str] = ...) -> None: ... def doctype(self, name: str, pubid: str, system: str) -> None: ... def close(self) -> Element: ... def feed(self, data: _parser_input_type) -> None: ... mypy-0.761/mypy/typeshed/stdlib/2and3/xml/etree/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252030104 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/xml/etree/cElementTree.pyi0000644€tŠÔÚ€2›s®0000000014313576752252030731 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xml.etree.cElementTree (Python 3.4) from xml.etree.ElementTree import * # noqa: F403 mypy-0.761/mypy/typeshed/stdlib/2and3/xml/parsers/0000755€tŠÔÚ€2›s®0000000000013576752267026215 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/xml/parsers/__init__.pyi0000644€tŠÔÚ€2›s®0000000004213576752252030465 0ustar jukkaDROPBOX\Domain Users00000000000000import xml.parsers.expat as expat mypy-0.761/mypy/typeshed/stdlib/2and3/xml/parsers/expat/0000755€tŠÔÚ€2›s®0000000000013576752267027336 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/xml/parsers/expat/__init__.pyi0000644€tŠÔÚ€2›s®0000000002613576752252031610 0ustar jukkaDROPBOX\Domain Users00000000000000from pyexpat import * mypy-0.761/mypy/typeshed/stdlib/2and3/xml/parsers/expat/errors.pyi0000644€tŠÔÚ€2›s®0000000003513576752252031365 0ustar jukkaDROPBOX\Domain Users00000000000000from pyexpat.errors import * mypy-0.761/mypy/typeshed/stdlib/2and3/xml/parsers/expat/model.pyi0000644€tŠÔÚ€2›s®0000000003413576752252031150 0ustar jukkaDROPBOX\Domain Users00000000000000from pyexpat.model import * mypy-0.761/mypy/typeshed/stdlib/2and3/xml/sax/0000755€tŠÔÚ€2›s®0000000000013576752267025331 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/2and3/xml/sax/__init__.pyi0000644€tŠÔÚ€2›s®0000000255513576752252027614 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, NoReturn, Optional, Text, Union, IO import xml.sax from xml.sax.xmlreader import InputSource, Locator from xml.sax.handler import ContentHandler, ErrorHandler class SAXException(Exception): def __init__(self, msg: str, exception: Optional[Exception] = ...) -> None: ... def getMessage(self) -> str: ... def getException(self) -> Exception: ... def __getitem__(self, ix: Any) -> NoReturn: ... class SAXParseException(SAXException): def __init__(self, msg: str, exception: Exception, locator: Locator) -> None: ... def getColumnNumber(self) -> int: ... def getLineNumber(self) -> int: ... def getPublicId(self): ... def getSystemId(self): ... class SAXNotRecognizedException(SAXException): ... class SAXNotSupportedException(SAXException): ... class SAXReaderNotAvailable(SAXNotSupportedException): ... default_parser_list: List[str] def make_parser(parser_list: List[str] = ...) -> xml.sax.xmlreader.XMLReader: ... def parse(source: Union[str, IO[str]], handler: xml.sax.handler.ContentHandler, errorHandler: xml.sax.handler.ErrorHandler = ...) -> None: ... def parseString(string: Union[bytes, Text], handler: xml.sax.handler.ContentHandler, errorHandler: Optional[xml.sax.handler.ErrorHandler] = ...) -> None: ... def _create_parser(parser_name: str) -> xml.sax.xmlreader.XMLReader: ... mypy-0.761/mypy/typeshed/stdlib/2and3/xml/sax/handler.pyi0000644€tŠÔÚ€2›s®0000000255713576752252027474 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any version: Any class ErrorHandler: def error(self, exception): ... def fatalError(self, exception): ... def warning(self, exception): ... class ContentHandler: def __init__(self) -> None: ... def setDocumentLocator(self, locator): ... def startDocument(self): ... def endDocument(self): ... def startPrefixMapping(self, prefix, uri): ... def endPrefixMapping(self, prefix): ... def startElement(self, name, attrs): ... def endElement(self, name): ... def startElementNS(self, name, qname, attrs): ... def endElementNS(self, name, qname): ... def characters(self, content): ... def ignorableWhitespace(self, whitespace): ... def processingInstruction(self, target, data): ... def skippedEntity(self, name): ... class DTDHandler: def notationDecl(self, name, publicId, systemId): ... def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... class EntityResolver: def resolveEntity(self, publicId, systemId): ... feature_namespaces: Any feature_namespace_prefixes: Any feature_string_interning: Any feature_validation: Any feature_external_ges: Any feature_external_pes: Any all_features: Any property_lexical_handler: Any property_declaration_handler: Any property_dom_node: Any property_xml_string: Any property_encoding: Any property_interning_dict: Any all_properties: Any mypy-0.761/mypy/typeshed/stdlib/2and3/xml/sax/saxutils.pyi0000644€tŠÔÚ€2›s®0000000446513576752252027733 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Mapping, Text from xml.sax import handler from xml.sax import xmlreader def escape(data: Text, entities: Mapping[Text, Text] = ...) -> Text: ... def unescape(data: Text, entities: Mapping[Text, Text] = ...) -> Text: ... def quoteattr(data: Text, entities: Mapping[Text, Text] = ...) -> Text: ... class XMLGenerator(handler.ContentHandler): if sys.version_info >= (3, 0): def __init__(self, out=..., encoding=..., short_empty_elements: bool = ...) -> None: ... else: def __init__(self, out=..., encoding=...) -> None: ... def startDocument(self): ... def endDocument(self): ... def startPrefixMapping(self, prefix, uri): ... def endPrefixMapping(self, prefix): ... def startElement(self, name, attrs): ... def endElement(self, name): ... def startElementNS(self, name, qname, attrs): ... def endElementNS(self, name, qname): ... def characters(self, content): ... def ignorableWhitespace(self, content): ... def processingInstruction(self, target, data): ... class XMLFilterBase(xmlreader.XMLReader): def __init__(self, parent=...) -> None: ... def error(self, exception): ... def fatalError(self, exception): ... def warning(self, exception): ... def setDocumentLocator(self, locator): ... def startDocument(self): ... def endDocument(self): ... def startPrefixMapping(self, prefix, uri): ... def endPrefixMapping(self, prefix): ... def startElement(self, name, attrs): ... def endElement(self, name): ... def startElementNS(self, name, qname, attrs): ... def endElementNS(self, name, qname): ... def characters(self, content): ... def ignorableWhitespace(self, chars): ... def processingInstruction(self, target, data): ... def skippedEntity(self, name): ... def notationDecl(self, name, publicId, systemId): ... def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... def resolveEntity(self, publicId, systemId): ... def parse(self, source): ... def setLocale(self, locale): ... def getFeature(self, name): ... def setFeature(self, name, state): ... def getProperty(self, name): ... def setProperty(self, name, value): ... def getParent(self): ... def setParent(self, parent): ... def prepare_input_source(source, base=...): ... mypy-0.761/mypy/typeshed/stdlib/2and3/xml/sax/xmlreader.pyi0000644€tŠÔÚ€2›s®0000000442713576752252030040 0ustar jukkaDROPBOX\Domain Users00000000000000class XMLReader: def __init__(self) -> None: ... def parse(self, source): ... def getContentHandler(self): ... def setContentHandler(self, handler): ... def getDTDHandler(self): ... def setDTDHandler(self, handler): ... def getEntityResolver(self): ... def setEntityResolver(self, resolver): ... def getErrorHandler(self): ... def setErrorHandler(self, handler): ... def setLocale(self, locale): ... def getFeature(self, name): ... def setFeature(self, name, state): ... def getProperty(self, name): ... def setProperty(self, name, value): ... class IncrementalParser(XMLReader): def __init__(self, bufsize=...) -> None: ... def parse(self, source): ... def feed(self, data): ... def prepareParser(self, source): ... def close(self): ... def reset(self): ... class Locator: def getColumnNumber(self): ... def getLineNumber(self): ... def getPublicId(self): ... def getSystemId(self): ... class InputSource: def __init__(self, system_id=...) -> None: ... def setPublicId(self, public_id): ... def getPublicId(self): ... def setSystemId(self, system_id): ... def getSystemId(self): ... def setEncoding(self, encoding): ... def getEncoding(self): ... def setByteStream(self, bytefile): ... def getByteStream(self): ... def setCharacterStream(self, charfile): ... def getCharacterStream(self): ... class AttributesImpl: def __init__(self, attrs) -> None: ... def getLength(self): ... def getType(self, name): ... def getValue(self, name): ... def getValueByQName(self, name): ... def getNameByQName(self, name): ... def getQNameByName(self, name): ... def getNames(self): ... def getQNames(self): ... def __len__(self): ... def __getitem__(self, name): ... def keys(self): ... def has_key(self, name): ... def __contains__(self, name): ... def get(self, name, alternative=...): ... def copy(self): ... def items(self): ... def values(self): ... class AttributesNSImpl(AttributesImpl): def __init__(self, attrs, qnames) -> None: ... def getValueByQName(self, name): ... def getNameByQName(self, name): ... def getQNameByName(self, name): ... def getQNames(self): ... def copy(self): ... mypy-0.761/mypy/typeshed/stdlib/2and3/zipfile.pyi0000644€tŠÔÚ€2›s®0000001141213576752252026114 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for zipfile from typing import Callable, Dict, IO, Iterable, List, Optional, Text, Tuple, Type, Union, Sequence, Pattern from types import TracebackType import io import os import sys if sys.version_info >= (3, 6): _Path = Union[os.PathLike[str], str] else: _Path = Text _SZI = Union[Text, ZipInfo] _DT = Tuple[int, int, int, int, int, int] if sys.version_info >= (3,): class BadZipFile(Exception): ... BadZipfile = BadZipFile else: class BadZipfile(Exception): ... error = BadZipfile class LargeZipFile(Exception): ... class ZipExtFile(io.BufferedIOBase): MAX_N: int = ... MIN_READ_SIZE: int = ... if sys.version_info < (3, 6): PATTERN: Pattern[str] = ... if sys.version_info >= (3, 7): MAX_SEEK_READ: int = ... newlines: Optional[List[bytes]] mode: str name: str def __init__( self, fileobj: IO[bytes], mode: str, zipinfo: ZipInfo, decrypter: Optional[Callable[[Sequence[int]], bytes]] = ..., close_fileobj: bool = ..., ) -> None: ... def __repr__(self) -> str: ... def peek(self, n: int = ...) -> bytes: ... def read1(self, n: Optional[int]) -> bytes: ... # type: ignore class ZipFile: filename: Optional[Text] debug: int comment: bytes filelist: List[ZipInfo] fp: IO[bytes] NameToInfo: Dict[Text, ZipInfo] start_dir: int # undocumented if sys.version_info >= (3, 8): def __init__( self, file: Union[_Path, IO[bytes]], mode: Text = ..., compression: int = ..., allowZip64: bool = ..., compresslevel: Optional[int] = ..., *, strict_timestamps: bool = ..., ) -> None: ... elif sys.version_info >= (3, 7): def __init__( self, file: Union[_Path, IO[bytes]], mode: Text = ..., compression: int = ..., allowZip64: bool = ..., compresslevel: Optional[int] = ..., ) -> None: ... else: def __init__( self, file: Union[_Path, IO[bytes]], mode: Text = ..., compression: int = ..., allowZip64: bool = ... ) -> None: ... def __enter__(self) -> ZipFile: ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType] ) -> None: ... def close(self) -> None: ... def getinfo(self, name: Text) -> ZipInfo: ... def infolist(self) -> List[ZipInfo]: ... def namelist(self) -> List[Text]: ... def open(self, name: _SZI, mode: Text = ..., pwd: Optional[bytes] = ...) -> IO[bytes]: ... def extract(self, member: _SZI, path: Optional[_SZI] = ..., pwd: bytes = ...) -> str: ... def extractall( self, path: Optional[_Path] = ..., members: Optional[Iterable[Text]] = ..., pwd: Optional[bytes] = ... ) -> None: ... def printdir(self) -> None: ... def setpassword(self, pwd: bytes) -> None: ... def read(self, name: _SZI, pwd: Optional[bytes] = ...) -> bytes: ... def testzip(self) -> Optional[str]: ... def write(self, filename: _Path, arcname: Optional[_Path] = ..., compress_type: Optional[int] = ...) -> None: ... if sys.version_info >= (3,): def writestr(self, zinfo_or_arcname: _SZI, data: Union[bytes, str], compress_type: Optional[int] = ...) -> None: ... else: def writestr(self, zinfo_or_arcname: _SZI, bytes: bytes, compress_type: Optional[int] = ...) -> None: ... class PyZipFile(ZipFile): if sys.version_info >= (3,): def __init__( self, file: Union[str, IO[bytes]], mode: str = ..., compression: int = ..., allowZip64: bool = ..., opimize: int = ... ) -> None: ... def writepy(self, pathname: str, basename: str = ..., filterfunc: Optional[Callable[[str], bool]] = ...) -> None: ... else: def writepy(self, pathname: Text, basename: Text = ...) -> None: ... class ZipInfo: filename: Text date_time: _DT compress_type: int comment: bytes extra: bytes create_system: int create_version: int extract_version: int reserved: int flag_bits: int volume: int internal_attr: int external_attr: int header_offset: int CRC: int compress_size: int file_size: int def __init__(self, filename: Optional[Text] = ..., date_time: Optional[_DT] = ...) -> None: ... if sys.version_info >= (3, 6): def is_dir(self) -> bool: ... @classmethod def from_file(cls, filename: _Path, arcname: Optional[_Path] = ...) -> ZipInfo: ... def FileHeader(self, zip64: Optional[bool] = ...) -> bytes: ... def is_zipfile(filename: Union[_Path, IO[bytes]]) -> bool: ... ZIP_STORED: int ZIP_DEFLATED: int if sys.version_info >= (3, 3): ZIP_BZIP2: int ZIP_LZMA: int mypy-0.761/mypy/typeshed/stdlib/2and3/zipimport.pyi0000644€tŠÔÚ€2›s®0000000126113576752252026510 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'zipimport' module.""" from typing import Optional from types import CodeType, ModuleType class ZipImportError(ImportError): ... class zipimporter(object): archive: str prefix: str def __init__(self, archivepath: str) -> None: ... def find_module(self, fullname: str, path: str = ...) -> Optional[zipimporter]: ... def get_code(self, fullname: str) -> CodeType: ... def get_data(self, pathname: str) -> str: ... def get_filename(self, fullname: str) -> str: ... def get_source(self, fullname: str) -> Optional[str]: ... def is_package(self, fullname: str) -> bool: ... def load_module(self, fullname: str) -> ModuleType: ... mypy-0.761/mypy/typeshed/stdlib/2and3/zlib.pyi0000644€tŠÔÚ€2›s®0000000316613576752252025421 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for zlib import sys DEFLATED: int DEF_MEM_LEVEL: int MAX_WBITS: int ZLIB_VERSION: str Z_BEST_COMPRESSION: int Z_BEST_SPEED: int Z_DEFAULT_COMPRESSION: int Z_DEFAULT_STRATEGY: int Z_FILTERED: int Z_FINISH: int Z_FULL_FLUSH: int Z_HUFFMAN_ONLY: int Z_NO_FLUSH: int Z_SYNC_FLUSH: int if sys.version_info >= (3,): DEF_BUF_SIZE: int ZLIB_RUNTIME_VERSION: str class error(Exception): ... class _Compress: def compress(self, data: bytes) -> bytes: ... def flush(self, mode: int = ...) -> bytes: ... def copy(self) -> _Compress: ... class _Decompress: unused_data: bytes unconsumed_tail: bytes if sys.version_info >= (3,): eof: bool def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... def flush(self, length: int = ...) -> bytes: ... def copy(self) -> _Decompress: ... def adler32(data: bytes, value: int = ...) -> int: ... def compress(data: bytes, level: int = ...) -> bytes: ... if sys.version_info >= (3,): def compressobj(level: int = ..., method: int = ..., wbits: int = ..., memLevel: int = ..., strategy: int = ..., zdict: bytes = ...) -> _Compress: ... else: def compressobj(level: int = ..., method: int = ..., wbits: int = ..., memlevel: int = ..., strategy: int = ...) -> _Compress: ... def crc32(data: bytes, value: int = ...) -> int: ... def decompress(data: bytes, wbits: int = ..., bufsize: int = ...) -> bytes: ... if sys.version_info >= (3,): def decompressobj(wbits: int = ..., zdict: bytes = ...) -> _Decompress: ... else: def decompressobj(wbits: int = ...) -> _Decompress: ... mypy-0.761/mypy/typeshed/stdlib/3/0000755€tŠÔÚ€2›s®0000000000013576752267023171 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/_ast.pyi0000644€tŠÔÚ€2›s®0000001767413576752252024653 0ustar jukkaDROPBOX\Domain Users00000000000000import sys import typing from typing import Any, Optional, ClassVar PyCF_ONLY_AST: int if sys.version_info >= (3, 8): PyCF_TYPE_COMMENTS: int PyCF_ALLOW_TOP_LEVEL_AWAIT: int _identifier = str class AST: _attributes: ClassVar[typing.Tuple[str, ...]] _fields: ClassVar[typing.Tuple[str, ...]] def __init__(self, *args: Any, **kwargs: Any) -> None: ... # TODO: Not all nodes have all of the following attributes lineno: int col_offset: int if sys.version_info >= (3, 8): end_lineno: Optional[int] end_col_offset: Optional[int] type_comment: Optional[str] class mod(AST): ... if sys.version_info >= (3, 8): class type_ignore(AST): ... class TypeIgnore(type_ignore): ... class FunctionType(mod): argtypes: typing.List[expr] returns: expr class Module(mod): body: typing.List[stmt] if sys.version_info >= (3, 7): docstring: Optional[str] if sys.version_info >= (3, 8): type_ignores: typing.List[TypeIgnore] class Interactive(mod): body: typing.List[stmt] class Expression(mod): body: expr class Suite(mod): body: typing.List[stmt] class stmt(AST): ... class FunctionDef(stmt): name: _identifier args: arguments body: typing.List[stmt] decorator_list: typing.List[expr] returns: Optional[expr] if sys.version_info >= (3, 7): docstring: Optional[str] class AsyncFunctionDef(stmt): name: _identifier args: arguments body: typing.List[stmt] decorator_list: typing.List[expr] returns: Optional[expr] if sys.version_info >= (3, 7): docstring: Optional[str] class ClassDef(stmt): name: _identifier bases: typing.List[expr] keywords: typing.List[keyword] body: typing.List[stmt] decorator_list: typing.List[expr] if sys.version_info >= (3, 7): docstring: Optional[str] class Return(stmt): value: Optional[expr] class Delete(stmt): targets: typing.List[expr] class Assign(stmt): targets: typing.List[expr] value: expr class AugAssign(stmt): target: expr op: operator value: expr if sys.version_info >= (3, 6): class AnnAssign(stmt): target: expr annotation: expr value: Optional[expr] simple: int class For(stmt): target: expr iter: expr body: typing.List[stmt] orelse: typing.List[stmt] class AsyncFor(stmt): target: expr iter: expr body: typing.List[stmt] orelse: typing.List[stmt] class While(stmt): test: expr body: typing.List[stmt] orelse: typing.List[stmt] class If(stmt): test: expr body: typing.List[stmt] orelse: typing.List[stmt] class With(stmt): items: typing.List[withitem] body: typing.List[stmt] class AsyncWith(stmt): items: typing.List[withitem] body: typing.List[stmt] class Raise(stmt): exc: Optional[expr] cause: Optional[expr] class Try(stmt): body: typing.List[stmt] handlers: typing.List[ExceptHandler] orelse: typing.List[stmt] finalbody: typing.List[stmt] class Assert(stmt): test: expr msg: Optional[expr] class Import(stmt): names: typing.List[alias] class ImportFrom(stmt): module: Optional[_identifier] names: typing.List[alias] level: int class Global(stmt): names: typing.List[_identifier] class Nonlocal(stmt): names: typing.List[_identifier] class Expr(stmt): value: expr class Pass(stmt): ... class Break(stmt): ... class Continue(stmt): ... class slice(AST): ... _slice = slice # this lets us type the variable named 'slice' below class Slice(slice): lower: Optional[expr] upper: Optional[expr] step: Optional[expr] class ExtSlice(slice): dims: typing.List[slice] class Index(slice): value: expr class expr(AST): ... class BoolOp(expr): op: boolop values: typing.List[expr] class BinOp(expr): left: expr op: operator right: expr class UnaryOp(expr): op: unaryop operand: expr class Lambda(expr): args: arguments body: expr class IfExp(expr): test: expr body: expr orelse: expr class Dict(expr): keys: typing.List[Optional[expr]] values: typing.List[expr] class Set(expr): elts: typing.List[expr] class ListComp(expr): elt: expr generators: typing.List[comprehension] class SetComp(expr): elt: expr generators: typing.List[comprehension] class DictComp(expr): key: expr value: expr generators: typing.List[comprehension] class GeneratorExp(expr): elt: expr generators: typing.List[comprehension] class Await(expr): value: expr class Yield(expr): value: Optional[expr] class YieldFrom(expr): value: expr class Compare(expr): left: expr ops: typing.List[cmpop] comparators: typing.List[expr] class Call(expr): func: expr args: typing.List[expr] keywords: typing.List[keyword] class Num(expr): # Deprecated in 3.8; use Constant n: complex class Str(expr): # Deprecated in 3.8; use Constant s: str if sys.version_info >= (3, 6): class FormattedValue(expr): value: expr conversion: Optional[int] format_spec: Optional[expr] class JoinedStr(expr): values: typing.List[expr] class Bytes(expr): # Deprecated in 3.8; use Constant s: bytes class NameConstant(expr): value: Any if sys.version_info >= (3, 6): class Constant(expr): value: Any # None, str, bytes, bool, int, float, complex, Ellipsis kind: Optional[str] # Aliases for value, for backwards compatibility s: Any n: complex if sys.version_info >= (3, 8): class NamedExpr(expr): target: expr value: expr class Ellipsis(expr): ... class Attribute(expr): value: expr attr: _identifier ctx: expr_context class Subscript(expr): value: expr slice: _slice ctx: expr_context class Starred(expr): value: expr ctx: expr_context class Name(expr): id: _identifier ctx: expr_context class List(expr): elts: typing.List[expr] ctx: expr_context class Tuple(expr): elts: typing.List[expr] ctx: expr_context class expr_context(AST): ... class AugLoad(expr_context): ... class AugStore(expr_context): ... class Del(expr_context): ... class Load(expr_context): ... class Param(expr_context): ... class Store(expr_context): ... class boolop(AST): ... class And(boolop): ... class Or(boolop): ... class operator(AST): ... class Add(operator): ... class BitAnd(operator): ... class BitOr(operator): ... class BitXor(operator): ... class Div(operator): ... class FloorDiv(operator): ... class LShift(operator): ... class Mod(operator): ... class Mult(operator): ... class MatMult(operator): ... class Pow(operator): ... class RShift(operator): ... class Sub(operator): ... class unaryop(AST): ... class Invert(unaryop): ... class Not(unaryop): ... class UAdd(unaryop): ... class USub(unaryop): ... class cmpop(AST): ... class Eq(cmpop): ... class Gt(cmpop): ... class GtE(cmpop): ... class In(cmpop): ... class Is(cmpop): ... class IsNot(cmpop): ... class Lt(cmpop): ... class LtE(cmpop): ... class NotEq(cmpop): ... class NotIn(cmpop): ... class comprehension(AST): target: expr iter: expr ifs: typing.List[expr] if sys.version_info >= (3, 6): is_async: int class excepthandler(AST): ... class ExceptHandler(excepthandler): type: Optional[expr] name: Optional[_identifier] body: typing.List[stmt] class arguments(AST): if sys.version_info >= (3, 8): posonlyargs: typing.List[arg] args: typing.List[arg] vararg: Optional[arg] kwonlyargs: typing.List[arg] kw_defaults: typing.List[expr] kwarg: Optional[arg] defaults: typing.List[expr] class arg(AST): arg: _identifier annotation: Optional[expr] class keyword(AST): arg: Optional[_identifier] value: expr class alias(AST): name: _identifier asname: Optional[_identifier] class withitem(AST): context_expr: expr optional_vars: Optional[expr] mypy-0.761/mypy/typeshed/stdlib/3/_compression.pyi0000644€tŠÔÚ€2›s®0000000066413576752252026414 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import io BUFFER_SIZE: Any class BaseStream(io.BufferedIOBase): ... class DecompressReader(io.RawIOBase): def readable(self): ... def __init__(self, fp, decomp_factory, trailing_error=..., **decomp_args): ... def close(self): ... def seekable(self): ... def readinto(self, b): ... def read(self, size: int = ...): ... def seek(self, offset, whence=...): ... def tell(self): ... mypy-0.761/mypy/typeshed/stdlib/3/_dummy_thread.pyi0000644€tŠÔÚ€2›s®0000000144013576752252026526 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, NoReturn, Optional, Tuple TIMEOUT_MAX: int error = RuntimeError def start_new_thread(function: Callable[..., Any], args: Tuple[Any, ...], kwargs: Dict[str, Any] = ...) -> None: ... def exit() -> NoReturn: ... def get_ident() -> int: ... def allocate_lock() -> LockType: ... def stack_size(size: Optional[int] = ...) -> int: ... class LockType(object): locked_status: bool def __init__(self) -> None: ... def acquire(self, waitflag: Optional[bool] = ..., timeout: int = ...) -> bool: ... def __enter__(self, waitflag: Optional[bool] = ..., timeout: int = ...) -> bool: ... def __exit__(self, typ: Any, val: Any, tb: Any) -> None: ... def release(self) -> bool: ... def locked(self) -> bool: ... def interrupt_main() -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/_imp.pyi0000644€tŠÔÚ€2›s®0000000131513576752252024632 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _imp (Python 3.6) import types from typing import Any, List from importlib.machinery import ModuleSpec def create_builtin(spec: ModuleSpec) -> types.ModuleType: ... def create_dynamic(spec: ModuleSpec, file: Any = ...) -> None: ... def acquire_lock() -> None: ... def exec_builtin(mod: types.ModuleType) -> int: ... def exec_dynamic(mod: types.ModuleType) -> int: ... def extension_suffixes() -> List[str]: ... def get_frozen_object(name: str) -> types.CodeType: ... def init_frozen(name: str) -> types.ModuleType: ... def is_builtin(name: str) -> int: ... def is_frozen(name: str) -> bool: ... def is_frozen_package(name: str) -> bool: ... def lock_held() -> bool: ... def release_lock() -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/_importlib_modulespec.pyi0000644€tŠÔÚ€2›s®0000000302513576752252030266 0ustar jukkaDROPBOX\Domain Users00000000000000# ModuleSpec, ModuleType, Loader are part of a dependency cycle. # They are officially defined/exported in other places: # # - ModuleType in types # - Loader in importlib.abc # - ModuleSpec in importlib.machinery (3.4 and later only) # # _Loader is the PEP-451-defined interface for a loader type/object. from abc import ABCMeta from typing import Any, Dict, List, Optional, Protocol class _Loader(Protocol): def load_module(self, fullname: str) -> ModuleType: ... class ModuleSpec: def __init__(self, name: str, loader: Optional[Loader], *, origin: Optional[str] = ..., loader_state: Any = ..., is_package: Optional[bool] = ...) -> None: ... name: str loader: Optional[_Loader] origin: Optional[str] submodule_search_locations: Optional[List[str]] loader_state: Any cached: Optional[str] parent: Optional[str] has_location: bool class ModuleType: __name__: str __file__: str __dict__: Dict[str, Any] __loader__: Optional[_Loader] __package__: Optional[str] __spec__: Optional[ModuleSpec] def __init__(self, name: str, doc: Optional[str] = ...) -> None: ... class Loader(metaclass=ABCMeta): def load_module(self, fullname: str) -> ModuleType: ... def module_repr(self, module: ModuleType) -> str: ... def create_module(self, spec: ModuleSpec) -> Optional[ModuleType]: ... # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. def exec_module(self, module: ModuleType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/_json.pyi0000644€tŠÔÚ€2›s®0000000164413576752252025023 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_json' module.""" from typing import Any, Tuple class make_encoder: sort_keys: Any skipkeys: Any key_separator: Any indent: Any markers: Any default: Any encoder: Any item_separator: Any def __init__(self, markers, default, encoder, indent, key_separator, item_separator, sort_keys, skipkeys, allow_nan) -> None: ... def __call__(self, *args, **kwargs) -> Any: ... class make_scanner: object_hook: Any object_pairs_hook: Any parse_int: Any parse_constant: Any parse_float: Any strict: bool # TODO: 'context' needs the attrs above (ducktype), but not __call__. def __init__(self, context: make_scanner) -> None: ... def __call__(self, string: str, index: int) -> Tuple[Any, int]: ... def encode_basestring_ascii(s: str) -> str: ... def scanstring(string: str, end: int, strict: bool = ...) -> Tuple[str, int]: ... mypy-0.761/mypy/typeshed/stdlib/3/_markupbase.pyi0000644€tŠÔÚ€2›s®0000000040113576752252026172 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple class ParserBase: def __init__(self) -> None: ... def error(self, message: str) -> None: ... def reset(self) -> None: ... def getpos(self) -> Tuple[int, int]: ... def unknown_decl(self, data: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/_operator.pyi0000644€tŠÔÚ€2›s®0000000247713576752252025712 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _operator (Python 3.5) from typing import AnyStr # In reality the import is the other way around, but this way we can keep the operator stub in 2and3 from operator import ( truth as truth, contains as contains, indexOf as indexOf, countOf as countOf, is_ as is_, is_not as is_not, index as index, add as add, sub as sub, mul as mul, floordiv as floordiv, truediv as truediv, mod as mod, neg as neg, pos as pos, abs as abs, inv as inv, invert as invert, length_hint as length_hint, lshift as lshift, rshift as rshift, not_ as not_, and_ as and_, xor as xor, or_ as or_, iadd as iadd, isub as isub, imul as imul, ifloordiv as ifloordiv, itruediv as itruediv, imod as imod, ilshift as ilshift, irshift as irshift, iand as iand, ixor as ixor, ior as ior, concat as concat, iconcat as iconcat, getitem as getitem, setitem as setitem, delitem as delitem, pow as pow, ipow as ipow, eq as eq, ne as ne, lt as lt, le as le, gt as gt, ge as ge, itemgetter as itemgetter, attrgetter as attrgetter, methodcaller as methodcaller, matmul as matmul, imatmul as imatmul, ) def _compare_digest(a: AnyStr, b: AnyStr) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/3/_posixsubprocess.pyi0000644€tŠÔÚ€2›s®0000000113013576752252027313 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _posixsubprocess # NOTE: These are incomplete! from typing import Tuple, Sequence, Callable def cloexec_pipe() -> Tuple[int, int]: ... def fork_exec(args: Sequence[str], executable_list: Sequence[bytes], close_fds: bool, fds_to_keep: Sequence[int], cwd: str, env_list: Sequence[bytes], p2cread: int, p2cwrite: int, c2pred: int, c2pwrite: int, errread: int, errwrite: int, errpipe_read: int, errpipe_write: int, restore_signals: int, start_new_session: int, preexec_fn: Callable[[], None]) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/_stat.pyi0000644€tŠÔÚ€2›s®0000000230613576752252025021 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_stat' module.""" SF_APPEND: int SF_ARCHIVED: int SF_IMMUTABLE: int SF_NOUNLINK: int SF_SNAPSHOT: int ST_ATIME: int ST_CTIME: int ST_DEV: int ST_GID: int ST_INO: int ST_MODE: int ST_MTIME: int ST_NLINK: int ST_SIZE: int ST_UID: int S_ENFMT: int S_IEXEC: int S_IFBLK: int S_IFCHR: int S_IFDIR: int S_IFDOOR: int S_IFIFO: int S_IFLNK: int S_IFPORT: int S_IFREG: int S_IFSOCK: int S_IFWHT: int S_IREAD: int S_IRGRP: int S_IROTH: int S_IRUSR: int S_IRWXG: int S_IRWXO: int S_IRWXU: int S_ISGID: int S_ISUID: int S_ISVTX: int S_IWGRP: int S_IWOTH: int S_IWRITE: int S_IWUSR: int S_IXGRP: int S_IXOTH: int S_IXUSR: int UF_APPEND: int UF_COMPRESSED: int UF_HIDDEN: int UF_IMMUTABLE: int UF_NODUMP: int UF_NOUNLINK: int UF_OPAQUE: int def S_IMODE(mode: int) -> int: ... def S_IFMT(mode: int) -> int: ... def S_ISBLK(mode: int) -> bool: ... def S_ISCHR(mode: int) -> bool: ... def S_ISDIR(mode: int) -> bool: ... def S_ISDOOR(mode: int) -> bool: ... def S_ISFIFO(mode: int) -> bool: ... def S_ISLNK(mode: int) -> bool: ... def S_ISPORT(mode: int) -> bool: ... def S_ISREG(mode: int) -> bool: ... def S_ISSOCK(mode: int) -> bool: ... def S_ISWHT(mode: int) -> bool: ... def filemode(mode: int) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/_subprocess.pyi0000644€tŠÔÚ€2›s®0000000237713576752252026246 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _subprocess # NOTE: These are incomplete! from typing import Mapping, Any, Tuple CREATE_NEW_CONSOLE: int CREATE_NEW_PROCESS_GROUP: int STD_INPUT_HANDLE: int STD_OUTPUT_HANDLE: int STD_ERROR_HANDLE: int SW_HIDE: int STARTF_USESTDHANDLES: int STARTF_USESHOWWINDOW: int INFINITE: int DUPLICATE_SAME_ACCESS: int WAIT_OBJECT_0: int # TODO not exported by the Python module class Handle: def Close(self) -> None: ... def GetVersion() -> int: ... def GetExitCodeProcess(handle: Handle) -> int: ... def WaitForSingleObject(handle: Handle, timeout: int) -> int: ... def CreateProcess(executable: str, cmd_line: str, proc_attrs, thread_attrs, inherit: int, flags: int, env_mapping: Mapping[str, str], curdir: str, startupinfo: Any) -> Tuple[Any, Handle, int, int]: ... def GetModuleFileName(module: int) -> str: ... def GetCurrentProcess() -> Handle: ... def DuplicateHandle(source_proc: Handle, source: Handle, target_proc: Handle, target: Any, access: int, inherit: int) -> int: ... def CreatePipe(pipe_attrs, size: int) -> Tuple[Handle, Handle]: ... def GetStdHandle(arg: int) -> int: ... def TerminateProcess(handle: Handle, exit_code: int) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/_thread.pyi0000644€tŠÔÚ€2›s®0000000246713576752252025325 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _thread import sys from threading import Thread from types import TracebackType from typing import Any, Callable, Dict, NamedTuple, NoReturn, Optional, Tuple, Type error = RuntimeError def _count() -> int: ... _dangling: Any class LockType: def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... def locked(self) -> bool: ... def __enter__(self) -> bool: ... def __exit__( self, type: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: ... def start_new_thread(function: Callable[..., Any], args: Tuple[Any, ...], kwargs: Dict[str, Any] = ...) -> int: ... def interrupt_main() -> None: ... def exit() -> NoReturn: ... def allocate_lock() -> LockType: ... def get_ident() -> int: ... def stack_size(size: int = ...) -> int: ... TIMEOUT_MAX: int if sys.version_info >= (3, 8): def get_native_id() -> int: ... # only available on some platforms class ExceptHookArgs(NamedTuple): exc_type: Type[BaseException] exc_value: Optional[BaseException] exc_traceback: Optional[TracebackType] thread: Optional[Thread] def _ExceptHookArgs(args) -> ExceptHookArgs: ... _excepthook: Callable[[ExceptHookArgs], Any] mypy-0.761/mypy/typeshed/stdlib/3/_threading_local.pyi0000644€tŠÔÚ€2›s®0000000107213576752252027164 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://github.com/python/cpython/blob/master/Lib/_threading_local.py from typing import Any, Dict, Tuple from weakref import ReferenceType localdict = Dict[Any, Any] class _localimpl: key: str dicts: Dict[int, Tuple[ReferenceType[Any], localdict]] def __init__(self) -> None: ... def get_dict(self) -> localdict: ... def create_dict(self) -> localdict: ... class local: def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/_tracemalloc.pyi0000644€tŠÔÚ€2›s®0000000114113576752252026330 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_tracemalloc' module.""" # This is an autogenerated file. It serves as a starting point # for a more precise manual annotation of this module. # Feel free to edit the source below, but remove this header when you do. from typing import Any, Tuple def _get_object_traceback(*args, **kwargs) -> Any: ... def _get_traces() -> Any: ... def clear_traces() -> None: ... def get_traceback_limit() -> int: ... def get_traced_memory() -> Tuple[Any, ...]: ... def get_tracemalloc_memory() -> Any: ... def is_tracing() -> bool: ... def start(*args, **kwargs) -> None: ... def stop() -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/_winapi.pyi0000644€tŠÔÚ€2›s®0000001001213576752252025326 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Union, Tuple, Optional, overload, Dict, NoReturn, Sequence if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal CREATE_NEW_CONSOLE: int CREATE_NEW_PROCESS_GROUP: int DUPLICATE_CLOSE_SOURCE: int DUPLICATE_SAME_ACCESS: int ERROR_ALREADY_EXISTS: int ERROR_BROKEN_PIPE: int ERROR_IO_PENDING: int ERROR_MORE_DATA: int ERROR_NETNAME_DELETED: int ERROR_NO_DATA: int ERROR_NO_SYSTEM_RESOURCES: int ERROR_OPERATION_ABORTED: int ERROR_PIPE_BUSY: int ERROR_PIPE_CONNECTED: int ERROR_SEM_TIMEOUT: int FILE_FLAG_FIRST_PIPE_INSTANCE: int FILE_FLAG_OVERLAPPED: int FILE_GENERIC_READ: int FILE_GENERIC_WRITE: int GENERIC_READ: int GENERIC_WRITE: int INFINITE: int NMPWAIT_WAIT_FOREVER: int NULL: int OPEN_EXISTING: int PIPE_ACCESS_DUPLEX: int PIPE_ACCESS_INBOUND: int PIPE_READMODE_MESSAGE: int PIPE_TYPE_MESSAGE: int PIPE_UNLIMITED_INSTANCES: int PIPE_WAIT: int PROCESS_ALL_ACCESS: int PROCESS_DUP_HANDLE: int STARTF_USESHOWWINDOW: int STARTF_USESTDHANDLES: int STD_ERROR_HANDLE: int STD_INPUT_HANDLE: int STD_OUTPUT_HANDLE: int STILL_ACTIVE: int SW_HIDE: int WAIT_ABANDONED_0: int WAIT_OBJECT_0: int WAIT_TIMEOUT: int def CloseHandle(handle: int) -> None: ... @overload def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... @overload def ConnectNamedPipe(handle: int, overlapped: Literal[False] = ...) -> None: ... @overload def ConnectNamedPipe(handle: int, overlapped: bool) -> Any: ... def CreateFile(file_name: str, desired_access: int, share_mode: int, security_attributes: int, creation_disposition: int, flags_and_attributes: int, template_file: int) -> int: ... def CreateJunction(src_path: str, dest_path: str) -> None: ... def CreateNamedPipe(name: str, open_mode: int, pipe_mode: int, max_instances: int, out_buffer_size: int, in_buffer_size: int, default_timeout: int, security_attributes: int) -> int: ... def CreatePipe(pipe_attrs: Any, size: int) -> Tuple[int, int]: ... def CreateProcess(application_name: Optional[str], command_line: Optional[str], proc_attrs: Any, thread_attrs: Any, inherit_handles: bool, creation_flags: int, env_mapping: Dict[str, str], cwd: Optional[str], startup_info: Any) -> Tuple[int, int, int, int]: ... def DuplicateHandle(source_process_handle: int, source_handle: int, target_process_handle: int, desired_access: int, inherit_handle: bool, options: int = ...) -> int: ... def ExitProcess(ExitCode: int) -> NoReturn: ... def GetACP() -> int: ... def GetFileType(handle: int) -> int: ... def GetCurrentProcess() -> int: ... def GetExitCodeProcess(process: int) -> int: ... def GetLastError() -> int: ... def GetModuleFileName(module_handle: int) -> str: ... def GetStdHandle(std_handle: int) -> int: ... def GetVersion() -> int: ... def OpenProcess(desired_access: int, inherit_handle: bool, process_id: int) -> int: ... def PeekNamedPipe(handle: int, size: int = ...) -> Union[Tuple[int, int], Tuple[bytes, int, int]]: ... # TODO: once literal types are supported, overload with Literal[True/False] @overload def ReadFile(handle: int, size: int, overlapped: Union[int, bool]) -> Any: ... @overload def ReadFile(handle: int, size: int) -> Tuple[int, int]: ... def SetNamedPipeHandleState(named_pipe: int, mode: Optional[int], max_collection_count: Optional[int], collect_data_timeout: Optional[int]) -> None: ... def TerminateProcess(handle: int, exit_code: int) -> None: ... def WaitForMultipleObjects(handle_seq: Sequence[int], wait_flag: bool, milliseconds: int = ...) -> int: ... def WaitForSingleObject(handle: int, milliseconds: int) -> int: ... def WaitNamedPipe(name: str, timeout: int) -> None: ... # TODO: once literal types are supported, overload with Literal[True/False] @overload def WriteFile(handle: int, buffer: bytes, overlapped: Union[int, bool]) -> Any: ... @overload def WriteFile(handle: int, buffer: bytes) -> Tuple[bytes, int]: ... class Overlapped: event: int = ... def GetOverlappedResult(self, wait: bool) -> Tuple[int, int]: ... def cancel(self) -> None: ... def getbuffer(self) -> Optional[bytes]: ... mypy-0.761/mypy/typeshed/stdlib/3/abc.pyi0000644€tŠÔÚ€2›s®0000000114513576752252024434 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Type, TypeVar # Stubs for abc. _T = TypeVar('_T') _FuncT = TypeVar('_FuncT', bound=Callable[..., Any]) # Thesee definitions have special processing in mypy class ABCMeta(type): def register(cls: ABCMeta, subclass: Type[_T]) -> Type[_T]: ... def abstractmethod(callable: _FuncT) -> _FuncT: ... class abstractproperty(property): ... # These two are deprecated and not supported by mypy def abstractstaticmethod(callable: _FuncT) -> _FuncT: ... def abstractclassmethod(callable: _FuncT) -> _FuncT: ... class ABC(metaclass=ABCMeta): ... def get_cache_token() -> object: ... mypy-0.761/mypy/typeshed/stdlib/3/ast.pyi0000644€tŠÔÚ€2›s®0000000443513576752252024503 0ustar jukkaDROPBOX\Domain Users00000000000000import sys # Rename typing to _typing, as not to conflict with typing imported # from _ast below when loaded in an unorthodox way by the Dropbox # internal Bazel integration. import typing as _typing from typing import Any, Iterator, Optional, TypeVar, Union, overload # The same unorthodox Bazel integration causes issues with sys, which # is imported in both modules. unfortunately we can't just rename sys, # since mypy only supports version checks with a sys that is named # sys. from _ast import * # type: ignore if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal class NodeVisitor: def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> Any: ... class NodeTransformer(NodeVisitor): def generic_visit(self, node: AST) -> Optional[AST]: ... _T = TypeVar("_T", bound=AST) if sys.version_info >= (3, 8): @overload def parse( source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: Literal["exec"] = ..., type_comments: bool = ..., feature_version: Union[None, int, _typing.Tuple[int, int]] = ..., ) -> Module: ... @overload def parse( source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ..., type_comments: bool = ..., feature_version: Union[None, int, _typing.Tuple[int, int]] = ..., ) -> AST: ... else: @overload def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: Literal["exec"] = ...) -> Module: ... @overload def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ... def copy_location(new_node: _T, old_node: AST) -> _T: ... def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: _T) -> _T: ... def get_docstring(node: AST, clean: bool = ...) -> str: ... def increment_lineno(node: _T, n: int = ...) -> _T: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[_typing.Tuple[str, Any]]: ... def literal_eval(node_or_string: Union[str, AST]) -> Any: ... def get_source_segment(source: str, node: AST, *, padded: bool = ...) -> Optional[str]: ... def walk(node: AST) -> Iterator[AST]: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/0000755€tŠÔÚ€2›s®0000000000013576752267024636 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/asyncio/__init__.pyi0000644€tŠÔÚ€2›s®0000001007513576752252027115 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Type from asyncio.coroutines import ( coroutine as coroutine, iscoroutinefunction as iscoroutinefunction, iscoroutine as iscoroutine, ) from asyncio.protocols import ( BaseProtocol as BaseProtocol, Protocol as Protocol, DatagramProtocol as DatagramProtocol, SubprocessProtocol as SubprocessProtocol, ) from asyncio.streams import ( StreamReader as StreamReader, StreamWriter as StreamWriter, StreamReaderProtocol as StreamReaderProtocol, open_connection as open_connection, start_server as start_server, ) from asyncio.subprocess import ( create_subprocess_exec as create_subprocess_exec, create_subprocess_shell as create_subprocess_shell, ) from asyncio.transports import ( BaseTransport as BaseTransport, ReadTransport as ReadTransport, WriteTransport as WriteTransport, Transport as Transport, DatagramTransport as DatagramTransport, SubprocessTransport as SubprocessTransport, ) from asyncio.futures import ( Future as Future, wrap_future as wrap_future, ) from asyncio.tasks import ( FIRST_COMPLETED as FIRST_COMPLETED, FIRST_EXCEPTION as FIRST_EXCEPTION, ALL_COMPLETED as ALL_COMPLETED, as_completed as as_completed, ensure_future as ensure_future, gather as gather, run_coroutine_threadsafe as run_coroutine_threadsafe, shield as shield, sleep as sleep, wait as wait, wait_for as wait_for, Task as Task, ) from asyncio.base_events import ( BaseEventLoop as BaseEventLoop, Server as Server ) from asyncio.events import ( AbstractEventLoopPolicy as AbstractEventLoopPolicy, AbstractEventLoop as AbstractEventLoop, AbstractServer as AbstractServer, Handle as Handle, TimerHandle as TimerHandle, get_event_loop_policy as get_event_loop_policy, set_event_loop_policy as set_event_loop_policy, get_event_loop as get_event_loop, set_event_loop as set_event_loop, new_event_loop as new_event_loop, get_child_watcher as get_child_watcher, set_child_watcher as set_child_watcher, ) from asyncio.queues import ( Queue as Queue, PriorityQueue as PriorityQueue, LifoQueue as LifoQueue, QueueFull as QueueFull, QueueEmpty as QueueEmpty, ) from asyncio.locks import ( Lock as Lock, Event as Event, Condition as Condition, Semaphore as Semaphore, BoundedSemaphore as BoundedSemaphore, ) from asyncio.futures import isfuture as isfuture from asyncio.events import ( _set_running_loop as _set_running_loop, _get_running_loop as _get_running_loop, ) if sys.platform == 'win32': from asyncio.windows_events import * else: from asyncio.streams import ( open_unix_connection as open_unix_connection, start_unix_server as start_unix_server, ) DefaultEventLoopPolicy: Type[AbstractEventLoopPolicy] if sys.version_info >= (3, 7): from asyncio.events import ( get_running_loop as get_running_loop, ) from asyncio.tasks import ( all_tasks as all_tasks, create_task as create_task, current_task as current_task, ) from asyncio.runners import ( run as run, ) if sys.platform != 'win32': # This is already imported above on Windows. SelectorEventLoop: Type[AbstractEventLoop] # TODO: AbstractChildWatcher (UNIX only) if sys.version_info >= (3, 8): from asyncio.exceptions import ( CancelledError as CancelledError, IncompleteReadError as IncompleteReadError, InvalidStateError as InvalidStateError, LimitOverrunError as LimitOverrunError, SendfileNotAvailableError as SendfileNotAvailableError, TimeoutError as TimeoutError, ) else: from asyncio.events import ( SendfileNotAvailableError as SendfileNotAvailableError ) from asyncio.futures import ( CancelledError as CancelledError, TimeoutError as TimeoutError, InvalidStateError as InvalidStateError, ) from asyncio.streams import ( IncompleteReadError as IncompleteReadError, LimitOverrunError as LimitOverrunError, ) mypy-0.761/mypy/typeshed/stdlib/3/asyncio/base_events.pyi0000644€tŠÔÚ€2›s®0000003163013576752252027654 0ustar jukkaDROPBOX\Domain Users00000000000000import selectors from socket import socket, _Address, _RetAddress import ssl import sys from typing import Any, Awaitable, Callable, Dict, Generator, IO, List, Optional, Sequence, Tuple, TypeVar, Union, overload from abc import ABCMeta from asyncio.futures import Future from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle from asyncio.protocols import BaseProtocol from asyncio.tasks import Task from asyncio.transports import BaseTransport if sys.version_info >= (3, 7): from contextvars import Context _T = TypeVar('_T') _Context = Dict[str, Any] _ExceptionHandler = Callable[[AbstractEventLoop, _Context], Any] _ProtocolFactory = Callable[[], BaseProtocol] _SSLContext = Union[bool, None, ssl.SSLContext] _TransProtPair = Tuple[BaseTransport, BaseProtocol] class Server(AbstractServer): ... class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): def run_forever(self) -> None: ... # Can't use a union, see mypy issue # 1873. @overload def run_until_complete(self, future: Generator[Any, None, _T]) -> _T: ... @overload def run_until_complete(self, future: Awaitable[_T]) -> _T: ... def stop(self) -> None: ... def is_running(self) -> bool: ... def is_closed(self) -> bool: ... def close(self) -> None: ... if sys.version_info >= (3, 6): async def shutdown_asyncgens(self) -> None: ... # Methods scheduling callbacks. All these return Handles. if sys.version_info >= (3, 7): def call_soon(self, callback: Callable[..., Any], *args: Any, context: Optional[Context] = ...) -> Handle: ... def call_later( self, delay: float, callback: Callable[..., Any], *args: Any, context: Optional[Context] = ..., ) -> TimerHandle: ... def call_at( self, when: float, callback: Callable[..., Any], *args: Any, context: Optional[Context] = ..., ) -> TimerHandle: ... else: def call_soon(self, callback: Callable[..., Any], *args: Any) -> Handle: ... def call_later(self, delay: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... def call_at(self, when: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... def time(self) -> float: ... # Future methods def create_future(self) -> Future[Any]: ... # Tasks methods if sys.version_info >= (3, 8): def create_task( self, coro: Union[Awaitable[_T], Generator[Any, None, _T]], *, name: Optional[str] = ..., ) -> Task[_T]: ... else: def create_task(self, coro: Union[Awaitable[_T], Generator[Any, None, _T]]) -> Task[_T]: ... def set_task_factory(self, factory: Optional[Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]]]) -> None: ... def get_task_factory(self) -> Optional[Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]]]: ... # Methods for interacting with threads if sys.version_info >= (3, 7): def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any, context: Optional[Context] = ...) -> Handle: ... else: def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any) -> Handle: ... async def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> _T: ... def set_default_executor(self, executor: Any) -> None: ... # Network I/O methods returning Futures. # TODO the "Tuple[Any, ...]" should be "Union[Tuple[str, int], Tuple[str, int, int, int]]" but that triggers # https://github.com/python/mypy/issues/2509 async def getaddrinfo(self, host: Optional[str], port: Union[str, int, None], *, family: int = ..., type: int = ..., proto: int = ..., flags: int = ...) -> List[Tuple[int, int, int, str, Tuple[Any, ...]]]: ... async def getnameinfo(self, sockaddr: Tuple[Any, ...], flags: int = ...) -> Tuple[str, int]: ... if sys.version_info >= (3, 8): @overload async def create_connection( self, protocol_factory: _ProtocolFactory, host: str = ..., port: int = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: None = ..., local_addr: Optional[str] = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ..., happy_eyeballs_delay: Optional[float] = ..., interleave: Optional[int] = ..., ) -> _TransProtPair: ... @overload async def create_connection( self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: socket, local_addr: None = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ..., happy_eyeballs_delay: Optional[float] = ..., interleave: Optional[int] = ..., ) -> _TransProtPair: ... elif sys.version_info >= (3, 7): @overload async def create_connection(self, protocol_factory: _ProtocolFactory, host: str = ..., port: int = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: None = ..., local_addr: Optional[str] = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ...) -> _TransProtPair: ... @overload async def create_connection(self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: socket, local_addr: None = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ...) -> _TransProtPair: ... else: @overload async def create_connection(self, protocol_factory: _ProtocolFactory, host: str = ..., port: int = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: None = ..., local_addr: Optional[str] = ..., server_hostname: Optional[str] = ...) -> _TransProtPair: ... @overload async def create_connection(self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: socket, local_addr: None = ..., server_hostname: Optional[str] = ...) -> _TransProtPair: ... if sys.version_info >= (3, 7): async def sock_sendfile(self, sock: socket, file: IO[bytes], offset: int = ..., count: Optional[int] = ..., *, fallback: bool = ...) -> int: ... @overload async def create_server(self, protocol_factory: _ProtocolFactory, host: Optional[Union[str, Sequence[str]]] = ..., port: int = ..., *, family: int = ..., flags: int = ..., sock: None = ..., backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ..., ssl_handshake_timeout: Optional[float] = ..., start_serving: bool = ...) -> Server: ... @overload async def create_server(self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, family: int = ..., flags: int = ..., sock: socket = ..., backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ..., ssl_handshake_timeout: Optional[float] = ..., start_serving: bool = ...) -> Server: ... async def connect_accepted_socket(self, protocol_factory: _ProtocolFactory, sock: socket, *, ssl: _SSLContext = ..., ssl_handshake_timeout: Optional[float] = ...) -> _TransProtPair: ... async def sendfile(self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: Optional[int] = ..., *, fallback: bool = ...) -> int: ... async def start_tls(self, transport: BaseTransport, protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, server_side: bool = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ...) -> BaseTransport: ... else: @overload async def create_server(self, protocol_factory: _ProtocolFactory, host: Optional[Union[str, Sequence[str]]] = ..., port: int = ..., *, family: int = ..., flags: int = ..., sock: None = ..., backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ...) -> Server: ... @overload async def create_server(self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, family: int = ..., flags: int = ..., sock: socket, backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ...) -> Server: ... async def connect_accepted_socket(self, protocol_factory: _ProtocolFactory, sock: socket, *, ssl: _SSLContext = ...) -> _TransProtPair: ... async def create_datagram_endpoint(self, protocol_factory: _ProtocolFactory, local_addr: Optional[Tuple[str, int]] = ..., remote_addr: Optional[Tuple[str, int]] = ..., *, family: int = ..., proto: int = ..., flags: int = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ..., allow_broadcast: Optional[bool] = ..., sock: Optional[socket] = ...) -> _TransProtPair: ... # Pipes and subprocesses. async def connect_read_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... async def connect_write_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... async def subprocess_shell(self, protocol_factory: _ProtocolFactory, cmd: Union[bytes, str], *, stdin: Any = ..., stdout: Any = ..., stderr: Any = ..., **kwargs: Any) -> _TransProtPair: ... async def subprocess_exec(self, protocol_factory: _ProtocolFactory, *args: Any, stdin: Any = ..., stdout: Any = ..., stderr: Any = ..., **kwargs: Any) -> _TransProtPair: ... def add_reader(self, fd: selectors._FileObject, callback: Callable[..., Any], *args: Any) -> None: ... def remove_reader(self, fd: selectors._FileObject) -> None: ... def add_writer(self, fd: selectors._FileObject, callback: Callable[..., Any], *args: Any) -> None: ... def remove_writer(self, fd: selectors._FileObject) -> None: ... # Completion based I/O methods returning Futures prior to 3.7 if sys.version_info >= (3, 7): async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... async def sock_recv_into(self, sock: socket, buf: bytearray) -> int: ... async def sock_sendall(self, sock: socket, data: bytes) -> None: ... async def sock_connect(self, sock: socket, address: _Address) -> None: ... async def sock_accept(self, sock: socket) -> Tuple[socket, _RetAddress]: ... else: def sock_recv(self, sock: socket, nbytes: int) -> Future[bytes]: ... def sock_sendall(self, sock: socket, data: bytes) -> Future[None]: ... def sock_connect(self, sock: socket, address: _Address) -> Future[None]: ... def sock_accept(self, sock: socket) -> Future[Tuple[socket, _RetAddress]]: ... # Signal handling. def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... def remove_signal_handler(self, sig: int) -> None: ... # Error handlers. def set_exception_handler(self, handler: Optional[_ExceptionHandler]) -> None: ... def get_exception_handler(self) -> Optional[_ExceptionHandler]: ... def default_exception_handler(self, context: _Context) -> None: ... def call_exception_handler(self, context: _Context) -> None: ... # Debug flag management. def get_debug(self) -> bool: ... def set_debug(self, enabled: bool) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/constants.pyi0000644€tŠÔÚ€2›s®0000000042613576752252027371 0ustar jukkaDROPBOX\Domain Users00000000000000 import enum LOG_THRESHOLD_FOR_CONNLOST_WRITES: int ACCEPT_RETRY_DELAY: int DEBUG_STACK_DEPTH: int SSL_HANDSHAKE_TIMEOUT: float SENDFILE_FALLBACK_READBUFFER_SIZE: int class _SendfileMode(enum.Enum): UNSUPPORTED: int = ... TRY_NATIVE: int = ... FALLBACK: int = ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/coroutines.pyi0000644€tŠÔÚ€2›s®0000000034213576752252027544 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, TypeVar _F = TypeVar('_F', bound=Callable[..., Any]) def coroutine(func: _F) -> _F: ... def iscoroutinefunction(func: Callable[..., Any]) -> bool: ... def iscoroutine(obj: Any) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/events.pyi0000644€tŠÔÚ€2›s®0000004257113576752252026670 0ustar jukkaDROPBOX\Domain Users00000000000000import selectors from socket import socket, _Address, _RetAddress import ssl import sys from typing import Any, Awaitable, Callable, Dict, Generator, IO, List, Optional, Sequence, Tuple, TypeVar, Union, overload from abc import ABCMeta, abstractmethod from asyncio.futures import Future from asyncio.protocols import BaseProtocol from asyncio.tasks import Task from asyncio.transports import BaseTransport _T = TypeVar('_T') _Context = Dict[str, Any] _ExceptionHandler = Callable[[AbstractEventLoop, _Context], Any] _ProtocolFactory = Callable[[], BaseProtocol] _SSLContext = Union[bool, None, ssl.SSLContext] _TransProtPair = Tuple[BaseTransport, BaseProtocol] class Handle: _cancelled = False _args: List[Any] def __init__(self, callback: Callable[..., Any], args: List[Any], loop: AbstractEventLoop) -> None: ... def __repr__(self) -> str: ... def cancel(self) -> None: ... def _run(self) -> None: ... if sys.version_info >= (3, 7): def cancelled(self) -> bool: ... class TimerHandle(Handle): def __init__(self, when: float, callback: Callable[..., Any], args: List[Any], loop: AbstractEventLoop) -> None: ... def __hash__(self) -> int: ... if sys.version_info >= (3, 7): def when(self) -> float: ... class AbstractServer: sockets: Optional[List[socket]] def close(self) -> None: ... if sys.version_info >= (3, 7): async def __aenter__(self: _T) -> _T: ... async def __aexit__(self, *exc: Any) -> None: ... def get_loop(self) -> AbstractEventLoop: ... def is_serving(self) -> bool: ... async def start_serving(self) -> None: ... async def serve_forever(self) -> None: ... async def wait_closed(self) -> None: ... class AbstractEventLoop(metaclass=ABCMeta): slow_callback_duration: float = ... @abstractmethod def run_forever(self) -> None: ... # Can't use a union, see mypy issue # 1873. @overload @abstractmethod def run_until_complete(self, future: Generator[Any, None, _T]) -> _T: ... @overload @abstractmethod def run_until_complete(self, future: Awaitable[_T]) -> _T: ... @abstractmethod def stop(self) -> None: ... @abstractmethod def is_running(self) -> bool: ... @abstractmethod def is_closed(self) -> bool: ... @abstractmethod def close(self) -> None: ... if sys.version_info >= (3, 6): @abstractmethod async def shutdown_asyncgens(self) -> None: ... # Methods scheduling callbacks. All these return Handles. @abstractmethod def call_soon(self, callback: Callable[..., Any], *args: Any) -> Handle: ... @abstractmethod def call_later(self, delay: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... @abstractmethod def call_at(self, when: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... @abstractmethod def time(self) -> float: ... # Future methods @abstractmethod def create_future(self) -> Future[Any]: ... # Tasks methods if sys.version_info >= (3, 8): @abstractmethod def create_task( self, coro: Union[Awaitable[_T], Generator[Any, None, _T]], *, name: Optional[str] = ..., ) -> Task[_T]: ... else: @abstractmethod def create_task(self, coro: Union[Awaitable[_T], Generator[Any, None, _T]]) -> Task[_T]: ... @abstractmethod def set_task_factory(self, factory: Optional[Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]]]) -> None: ... @abstractmethod def get_task_factory(self) -> Optional[Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]]]: ... # Methods for interacting with threads @abstractmethod def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any) -> Handle: ... @abstractmethod async def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> _T: ... @abstractmethod def set_default_executor(self, executor: Any) -> None: ... # Network I/O methods returning Futures. @abstractmethod # TODO the "Tuple[Any, ...]" should be "Union[Tuple[str, int], Tuple[str, int, int, int]]" but that triggers # https://github.com/python/mypy/issues/2509 async def getaddrinfo(self, host: Optional[str], port: Union[str, int, None], *, family: int = ..., type: int = ..., proto: int = ..., flags: int = ...) -> List[Tuple[int, int, int, str, Tuple[Any, ...]]]: ... @abstractmethod async def getnameinfo(self, sockaddr: Tuple[Any, ...], flags: int = ...) -> Tuple[str, int]: ... if sys.version_info >= (3, 8): @overload @abstractmethod async def create_connection( self, protocol_factory: _ProtocolFactory, host: str = ..., port: int = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: None = ..., local_addr: Optional[str] = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ..., happy_eyeballs_delay: Optional[float] = ..., interleave: Optional[int] = ..., ) -> _TransProtPair: ... @overload @abstractmethod async def create_connection( self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: socket, local_addr: None = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ..., happy_eyeballs_delay: Optional[float] = ..., interleave: Optional[int] = ..., ) -> _TransProtPair: ... elif sys.version_info >= (3, 7): @overload @abstractmethod async def create_connection(self, protocol_factory: _ProtocolFactory, host: str = ..., port: int = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: None = ..., local_addr: Optional[str] = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ...) -> _TransProtPair: ... @overload @abstractmethod async def create_connection(self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: socket, local_addr: None = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ...) -> _TransProtPair: ... else: @overload @abstractmethod async def create_connection(self, protocol_factory: _ProtocolFactory, host: str = ..., port: int = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: None = ..., local_addr: Optional[str] = ..., server_hostname: Optional[str] = ...) -> _TransProtPair: ... @overload @abstractmethod async def create_connection(self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: socket, local_addr: None = ..., server_hostname: Optional[str] = ...) -> _TransProtPair: ... if sys.version_info >= (3, 7): @abstractmethod async def sock_sendfile(self, sock: socket, file: IO[bytes], offset: int = ..., count: Optional[int] = ..., *, fallback: bool = ...) -> int: ... @overload @abstractmethod async def create_server(self, protocol_factory: _ProtocolFactory, host: Optional[Union[str, Sequence[str]]] = ..., port: int = ..., *, family: int = ..., flags: int = ..., sock: None = ..., backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ..., ssl_handshake_timeout: Optional[float] = ..., start_serving: bool = ...) -> AbstractServer: ... @overload @abstractmethod async def create_server(self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, family: int = ..., flags: int = ..., sock: socket = ..., backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ..., ssl_handshake_timeout: Optional[float] = ..., start_serving: bool = ...) -> AbstractServer: ... @abstractmethod async def create_unix_connection(self, protocol_factory: _ProtocolFactory, path: str, *, ssl: _SSLContext = ..., sock: Optional[socket] = ..., server_hostname: str = ..., ssl_handshake_timeout: Optional[float] = ...) -> _TransProtPair: ... @abstractmethod async def create_unix_server(self, protocol_factory: _ProtocolFactory, path: str, *, sock: Optional[socket] = ..., backlog: int = ..., ssl: _SSLContext = ..., ssl_handshake_timeout: Optional[float] = ..., start_serving: bool = ...) -> AbstractServer: ... @abstractmethod async def connect_accepted_socket(self, protocol_factory: _ProtocolFactory, sock: socket, *, ssl: _SSLContext = ..., ssl_handshake_timeout: Optional[float] = ...) -> _TransProtPair: ... @abstractmethod async def sendfile(self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: Optional[int] = ..., *, fallback: bool = ...) -> int: ... @abstractmethod async def start_tls(self, transport: BaseTransport, protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, server_side: bool = ..., server_hostname: Optional[str] = ..., ssl_handshake_timeout: Optional[float] = ...) -> BaseTransport: ... else: @overload @abstractmethod async def create_server(self, protocol_factory: _ProtocolFactory, host: Optional[Union[str, Sequence[str]]] = ..., port: int = ..., *, family: int = ..., flags: int = ..., sock: None = ..., backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ...) -> AbstractServer: ... @overload @abstractmethod async def create_server(self, protocol_factory: _ProtocolFactory, host: None = ..., port: None = ..., *, family: int = ..., flags: int = ..., sock: socket, backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ...) -> AbstractServer: ... @abstractmethod async def create_unix_connection(self, protocol_factory: _ProtocolFactory, path: str, *, ssl: _SSLContext = ..., sock: Optional[socket] = ..., server_hostname: str = ...) -> _TransProtPair: ... @abstractmethod async def create_unix_server(self, protocol_factory: _ProtocolFactory, path: str, *, sock: Optional[socket] = ..., backlog: int = ..., ssl: _SSLContext = ...) -> AbstractServer: ... @abstractmethod async def connect_accepted_socket(self, protocol_factory: _ProtocolFactory, sock: socket, *, ssl: _SSLContext = ...) -> _TransProtPair: ... @abstractmethod async def create_datagram_endpoint(self, protocol_factory: _ProtocolFactory, local_addr: Optional[Tuple[str, int]] = ..., remote_addr: Optional[Tuple[str, int]] = ..., *, family: int = ..., proto: int = ..., flags: int = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ..., allow_broadcast: Optional[bool] = ..., sock: Optional[socket] = ...) -> _TransProtPair: ... # Pipes and subprocesses. @abstractmethod async def connect_read_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... @abstractmethod async def connect_write_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... @abstractmethod async def subprocess_shell(self, protocol_factory: _ProtocolFactory, cmd: Union[bytes, str], *, stdin: Any = ..., stdout: Any = ..., stderr: Any = ..., **kwargs: Any) -> _TransProtPair: ... @abstractmethod async def subprocess_exec(self, protocol_factory: _ProtocolFactory, *args: Any, stdin: Any = ..., stdout: Any = ..., stderr: Any = ..., **kwargs: Any) -> _TransProtPair: ... @abstractmethod def add_reader(self, fd: selectors._FileObject, callback: Callable[..., Any], *args: Any) -> None: ... @abstractmethod def remove_reader(self, fd: selectors._FileObject) -> None: ... @abstractmethod def add_writer(self, fd: selectors._FileObject, callback: Callable[..., Any], *args: Any) -> None: ... @abstractmethod def remove_writer(self, fd: selectors._FileObject) -> None: ... # Completion based I/O methods returning Futures prior to 3.7 if sys.version_info >= (3, 7): @abstractmethod async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... @abstractmethod async def sock_recv_into(self, sock: socket, buf: bytearray) -> int: ... @abstractmethod async def sock_sendall(self, sock: socket, data: bytes) -> None: ... @abstractmethod async def sock_connect(self, sock: socket, address: _Address) -> None: ... @abstractmethod async def sock_accept(self, sock: socket) -> Tuple[socket, _RetAddress]: ... else: @abstractmethod def sock_recv(self, sock: socket, nbytes: int) -> Future[bytes]: ... @abstractmethod def sock_sendall(self, sock: socket, data: bytes) -> Future[None]: ... @abstractmethod def sock_connect(self, sock: socket, address: _Address) -> Future[None]: ... @abstractmethod def sock_accept(self, sock: socket) -> Future[Tuple[socket, _RetAddress]]: ... # Signal handling. @abstractmethod def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... @abstractmethod def remove_signal_handler(self, sig: int) -> None: ... # Error handlers. @abstractmethod def set_exception_handler(self, handler: Optional[_ExceptionHandler]) -> None: ... @abstractmethod def get_exception_handler(self) -> Optional[_ExceptionHandler]: ... @abstractmethod def default_exception_handler(self, context: _Context) -> None: ... @abstractmethod def call_exception_handler(self, context: _Context) -> None: ... # Debug flag management. @abstractmethod def get_debug(self) -> bool: ... @abstractmethod def set_debug(self, enabled: bool) -> None: ... class AbstractEventLoopPolicy(metaclass=ABCMeta): @abstractmethod def get_event_loop(self) -> AbstractEventLoop: ... @abstractmethod def set_event_loop(self, loop: Optional[AbstractEventLoop]) -> None: ... @abstractmethod def new_event_loop(self) -> AbstractEventLoop: ... # Child processes handling (Unix only). @abstractmethod def get_child_watcher(self) -> Any: ... # TODO: unix_events.AbstractChildWatcher @abstractmethod def set_child_watcher(self, watcher: Any) -> None: ... # TODO: unix_events.AbstractChildWatcher class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta): def __init__(self) -> None: ... def get_event_loop(self) -> AbstractEventLoop: ... def set_event_loop(self, loop: Optional[AbstractEventLoop]) -> None: ... def new_event_loop(self) -> AbstractEventLoop: ... def get_event_loop_policy() -> AbstractEventLoopPolicy: ... def set_event_loop_policy(policy: AbstractEventLoopPolicy) -> None: ... def get_event_loop() -> AbstractEventLoop: ... def set_event_loop(loop: Optional[AbstractEventLoop]) -> None: ... def new_event_loop() -> AbstractEventLoop: ... def get_child_watcher() -> Any: ... # TODO: unix_events.AbstractChildWatcher def set_child_watcher(watcher: Any) -> None: ... # TODO: unix_events.AbstractChildWatcher def _set_running_loop(loop: Optional[AbstractEventLoop]) -> None: ... def _get_running_loop() -> AbstractEventLoop: ... if sys.version_info >= (3, 7): def get_running_loop() -> AbstractEventLoop: ... if sys.version_info < (3, 8): class SendfileNotAvailableError(RuntimeError): ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/exceptions.pyi0000644€tŠÔÚ€2›s®0000000106213576752252027533 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Optional if sys.version_info >= (3, 8): class CancelledError(BaseException): ... class TimeoutError(Exception): ... class InvalidStateError(Exception): ... class SendfileNotAvailableError(RuntimeError): ... class IncompleteReadError(EOFError): expected: Optional[int] partial: bytes def __init__(self, partial: bytes, expected: Optional[int]) -> None: ... class LimitOverrunError(Exception): consumed: int def __init__(self, message: str, consumed: int) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/futures.pyi0000644€tŠÔÚ€2›s®0000000454313576752252027056 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Union, Callable, TypeVar, Type, List, Iterable, Generator, Awaitable, Optional, Tuple from .events import AbstractEventLoop from concurrent.futures import ( Future as _ConcurrentFuture, Error, ) if sys.version_info < (3, 8): from concurrent.futures import CancelledError as CancelledError from concurrent.futures import TimeoutError as TimeoutError class InvalidStateError(Error): ... if sys.version_info >= (3, 7): from contextvars import Context _T = TypeVar('_T') _S = TypeVar('_S') class _TracebackLogger: exc: BaseException tb: List[str] def __init__(self, exc: Any, loop: AbstractEventLoop) -> None: ... def activate(self) -> None: ... def clear(self) -> None: ... def __del__(self) -> None: ... def isfuture(obj: object) -> bool: ... class Future(Awaitable[_T], Iterable[_T]): _state: str _exception: BaseException _blocking = False _log_traceback = False _tb_logger: Type[_TracebackLogger] def __init__(self, *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def __repr__(self) -> str: ... def __del__(self) -> None: ... if sys.version_info >= (3, 7): def get_loop(self) -> AbstractEventLoop: ... def _callbacks(self: _S) -> List[Tuple[Callable[[_S], Any], Context]]: ... def add_done_callback(self: _S, __fn: Callable[[_S], Any], *, context: Optional[Context] = ...) -> None: ... else: @property def _callbacks(self: _S) -> List[Callable[[_S], Any]]: ... def add_done_callback(self: _S, __fn: Callable[[_S], Any]) -> None: ... def cancel(self) -> bool: ... def _schedule_callbacks(self) -> None: ... def cancelled(self) -> bool: ... def done(self) -> bool: ... def result(self) -> _T: ... def exception(self) -> BaseException: ... def remove_done_callback(self: _S, fn: Callable[[_S], Any]) -> int: ... def set_result(self, result: _T) -> None: ... def set_exception(self, exception: Union[type, BaseException]) -> None: ... def _copy_state(self, other: Any) -> None: ... def __iter__(self) -> Generator[Any, None, _T]: ... def __await__(self) -> Generator[Any, None, _T]: ... @property def _loop(self) -> AbstractEventLoop: ... def wrap_future(f: Union[_ConcurrentFuture[_T], Future[_T]], *, loop: Optional[AbstractEventLoop] = ...) -> Future[_T]: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/locks.pyi0000644€tŠÔÚ€2›s®0000000402313576752252026465 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Type, TypeVar, Union, Optional, Awaitable from .events import AbstractEventLoop from .futures import Future from types import TracebackType _T = TypeVar('_T') class _ContextManager: def __init__(self, lock: Union[Lock, Semaphore]) -> None: ... def __enter__(self) -> object: ... def __exit__(self, *args: Any) -> None: ... class _ContextManagerMixin(Future[_ContextManager]): # Apparently this exists to *prohibit* use as a context manager. def __enter__(self) -> object: ... def __exit__(self, *args: Any) -> None: ... def __aenter__(self) -> Awaitable[None]: ... def __aexit__(self, exc_type: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType]) -> Awaitable[None]: ... class Lock(_ContextManagerMixin): def __init__(self, *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> bool: ... def release(self) -> None: ... class Event: def __init__(self, *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... async def wait(self) -> bool: ... class Condition(_ContextManagerMixin): def __init__(self, lock: Optional[Lock] = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> bool: ... def release(self) -> None: ... async def wait(self) -> bool: ... async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... def notify(self, n: int = ...) -> None: ... def notify_all(self) -> None: ... class Semaphore(_ContextManagerMixin): def __init__(self, value: int = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> bool: ... def release(self) -> None: ... class BoundedSemaphore(Semaphore): def __init__(self, value: int = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/proactor_events.pyi0000644€tŠÔÚ€2›s®0000001016213576752252030570 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from socket import socket from typing import Any, Mapping, Optional, Union from . import base_events, constants, events, futures, streams, transports if sys.version_info >= (3, 7): from os import PathLike _Path = Union[str, PathLike[str]] else: _Path = str if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport): def __init__(self, loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, waiter: Optional[futures.Future[Any]] = ..., extra: Optional[Mapping[Any, Any]] = ..., server: Optional[events.AbstractServer] = ...) -> None: ... def __repr__(self) -> str: ... def __del__(self) -> None: ... def get_write_buffer_size(self) -> int: ... class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport): def __init__(self, loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, waiter: Optional[futures.Future[Any]] = ..., extra: Optional[Mapping[Any, Any]] = ..., server: Optional[events.AbstractServer] = ...) -> None: ... class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): def __init__(self, loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, waiter: Optional[futures.Future[Any]] = ..., extra: Optional[Mapping[Any, Any]] = ..., server: Optional[events.AbstractServer] = ...) -> None: ... class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): def __init__(self, loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, waiter: Optional[futures.Future[Any]] = ..., extra: Optional[Mapping[Any, Any]] = ..., server: Optional[events.AbstractServer] = ...) -> None: ... class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ... class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): _sendfile_compatible: constants._SendfileMode = ... def __init__(self, loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, waiter: Optional[futures.Future[Any]] = ..., extra: Optional[Mapping[Any, Any]] = ..., server: Optional[events.AbstractServer] = ...) -> None: ... def _set_extra(self, sock: socket) -> None: ... def can_write_eof(self) -> Literal[True]: ... def write_eof(self) -> None: ... class BaseProactorEventLoop(base_events.BaseEventLoop): def __init__(self, proactor: Any) -> None: ... # The methods below don't actually exist directly, ProactorEventLoops do not implement them. However, they are # needed to satisfy mypy if sys.version_info >= (3, 7): async def create_unix_connection( self, protocol_factory: events._ProtocolFactory, path: _Path, *, ssl: events._SSLContext = ..., sock: Optional[socket] = ..., server_hostname: str = ..., ssl_handshake_timeout: Optional[float] = ..., ) -> events._TransProtPair: ... async def create_unix_server( self, protocol_factory: events._ProtocolFactory, path: _Path, *, sock: Optional[socket] = ..., backlog: int = ..., ssl: events._SSLContext = ..., ssl_handshake_timeout: Optional[float] = ..., start_serving: bool = ..., ) -> events.AbstractServer: ... else: async def create_unix_connection(self, protocol_factory: events._ProtocolFactory, path: str, *, ssl: events._SSLContext = ..., sock: Optional[socket] = ..., server_hostname: str = ...) -> events._TransProtPair: ... async def create_unix_server(self, protocol_factory: events._ProtocolFactory, path: str, *, sock: Optional[socket] = ..., backlog: int = ..., ssl: events._SSLContext = ...) -> events.AbstractServer: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/protocols.pyi0000644€tŠÔÚ€2›s®0000000203513576752252027377 0ustar jukkaDROPBOX\Domain Users00000000000000from asyncio import transports from typing import Optional, Text, Tuple, Union class BaseProtocol: def connection_made(self, transport: transports.BaseTransport) -> None: ... def connection_lost(self, exc: Optional[Exception]) -> None: ... def pause_writing(self) -> None: ... def resume_writing(self) -> None: ... class Protocol(BaseProtocol): def data_received(self, data: bytes) -> None: ... def eof_received(self) -> Optional[bool]: ... class BufferedProtocol(Protocol): def get_buffer(self, sizehint: int) -> bytearray: ... def buffer_updated(self, nbytes: int) -> None: ... class DatagramProtocol(BaseProtocol): def datagram_received(self, data: Union[bytes, Text], addr: Tuple[str, int]) -> None: ... def error_received(self, exc: Exception) -> None: ... class SubprocessProtocol(BaseProtocol): def pipe_data_received(self, fd: int, data: Union[bytes, Text]) -> None: ... def pipe_connection_lost(self, fd: int, exc: Optional[Exception]) -> None: ... def process_exited(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/queues.pyi0000644€tŠÔÚ€2›s®0000000207213576752252026663 0ustar jukkaDROPBOX\Domain Users00000000000000from asyncio.events import AbstractEventLoop from typing import Generic, TypeVar, Optional class QueueEmpty(Exception): ... class QueueFull(Exception): ... _T = TypeVar('_T') class Queue(Generic[_T]): def __init__(self, maxsize: int = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def _init(self, maxsize: int) -> None: ... def _get(self) -> _T: ... def _put(self, item: _T) -> None: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def _format(self) -> str: ... def _consume_done_getters(self) -> None: ... def _consume_done_putters(self) -> None: ... def qsize(self) -> int: ... @property def maxsize(self) -> int: ... def empty(self) -> bool: ... def full(self) -> bool: ... async def put(self, item: _T) -> None: ... def put_nowait(self, item: _T) -> None: ... async def get(self) -> _T: ... def get_nowait(self) -> _T: ... async def join(self) -> bool: ... def task_done(self) -> None: ... class PriorityQueue(Queue[_T]): ... class LifoQueue(Queue[_T]): ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/runners.pyi0000644€tŠÔÚ€2›s®0000000026113576752252027046 0ustar jukkaDROPBOX\Domain Users00000000000000import sys if sys.version_info >= (3, 7): from typing import Awaitable, TypeVar _T = TypeVar('_T') def run(main: Awaitable[_T], *, debug: bool = ...) -> _T: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/selector_events.pyi0000644€tŠÔÚ€2›s®0000000333713576752252030565 0ustar jukkaDROPBOX\Domain Users00000000000000import selectors import sys from socket import socket from typing import Optional, Union from . import base_events, events if sys.version_info >= (3, 7): from os import PathLike _Path = Union[str, PathLike[str]] else: _Path = str class BaseSelectorEventLoop(base_events.BaseEventLoop): def __init__(self, selector: selectors.BaseSelector = ...) -> None: ... if sys.version_info >= (3, 7): async def create_unix_connection( self, protocol_factory: events._ProtocolFactory, path: _Path, *, ssl: events._SSLContext = ..., sock: Optional[socket] = ..., server_hostname: str = ..., ssl_handshake_timeout: Optional[float] = ..., ) -> events._TransProtPair: ... async def create_unix_server( self, protocol_factory: events._ProtocolFactory, path: _Path, *, sock: Optional[socket] = ..., backlog: int = ..., ssl: events._SSLContext = ..., ssl_handshake_timeout: Optional[float] = ..., start_serving: bool = ..., ) -> events.AbstractServer: ... else: async def create_unix_connection(self, protocol_factory: events._ProtocolFactory, path: str, *, ssl: events._SSLContext = ..., sock: Optional[socket] = ..., server_hostname: str = ...) -> events._TransProtPair: ... async def create_unix_server(self, protocol_factory: events._ProtocolFactory, path: str, *, sock: Optional[socket] = ..., backlog: int = ..., ssl: events._SSLContext = ...) -> events.AbstractServer: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/streams.pyi0000644€tŠÔÚ€2›s®0000000747313576752252027044 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Union from . import events from . import protocols from . import transports _ClientConnectedCallback = Callable[[StreamReader, StreamWriter], Optional[Awaitable[None]]] if sys.version_info < (3, 8): class IncompleteReadError(EOFError): expected: Optional[int] partial: bytes def __init__(self, partial: bytes, expected: Optional[int]) -> None: ... class LimitOverrunError(Exception): consumed: int def __init__(self, message: str, consumed: int) -> None: ... async def open_connection( host: str = ..., port: Union[int, str] = ..., *, loop: Optional[events.AbstractEventLoop] = ..., limit: int = ..., ssl_handshake_timeout: Optional[float] = ..., **kwds: Any ) -> Tuple[StreamReader, StreamWriter]: ... async def start_server( client_connected_cb: _ClientConnectedCallback, host: Optional[str] = ..., port: Optional[Union[int, str]] = ..., *, loop: Optional[events.AbstractEventLoop] = ..., limit: int = ..., ssl_handshake_timeout: Optional[float] = ..., **kwds: Any ) -> events.AbstractServer: ... if sys.platform != 'win32': if sys.version_info >= (3, 7): from os import PathLike _PathType = Union[str, PathLike[str]] else: _PathType = str async def open_unix_connection( path: _PathType = ..., *, loop: Optional[events.AbstractEventLoop] = ..., limit: int = ..., **kwds: Any ) -> Tuple[StreamReader, StreamWriter]: ... async def start_unix_server( client_connected_cb: _ClientConnectedCallback, path: _PathType = ..., *, loop: Optional[events.AbstractEventLoop] = ..., limit: int = ..., **kwds: Any) -> events.AbstractServer: ... class FlowControlMixin(protocols.Protocol): ... class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): def __init__(self, stream_reader: StreamReader, client_connected_cb: _ClientConnectedCallback = ..., loop: Optional[events.AbstractEventLoop] = ...) -> None: ... def connection_made(self, transport: transports.BaseTransport) -> None: ... def connection_lost(self, exc: Optional[Exception]) -> None: ... def data_received(self, data: bytes) -> None: ... def eof_received(self) -> bool: ... class StreamWriter: def __init__(self, transport: transports.BaseTransport, protocol: protocols.BaseProtocol, reader: Optional[StreamReader], loop: events.AbstractEventLoop) -> None: ... @property def transport(self) -> transports.BaseTransport: ... def write(self, data: bytes) -> None: ... def writelines(self, data: Iterable[bytes]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def close(self) -> None: ... if sys.version_info >= (3, 7): def is_closing(self) -> bool: ... async def wait_closed(self) -> None: ... def get_extra_info(self, name: str, default: Any = ...) -> Any: ... async def drain(self) -> None: ... class StreamReader: def __init__(self, limit: int = ..., loop: Optional[events.AbstractEventLoop] = ...) -> None: ... def exception(self) -> Exception: ... def set_exception(self, exc: Exception) -> None: ... def set_transport(self, transport: transports.BaseTransport) -> None: ... def feed_eof(self) -> None: ... def at_eof(self) -> bool: ... def feed_data(self, data: bytes) -> None: ... async def readline(self) -> bytes: ... async def readuntil(self, separator: bytes = ...) -> bytes: ... async def read(self, n: int = ...) -> bytes: ... async def readexactly(self, n: int) -> bytes: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/subprocess.pyi0000644€tŠÔÚ€2›s®0000000422613576752252027547 0ustar jukkaDROPBOX\Domain Users00000000000000from asyncio import events from asyncio import protocols from asyncio import streams from asyncio import transports from typing import Any, Optional, Text, Tuple, Union, IO PIPE: int STDOUT: int DEVNULL: int class SubprocessStreamProtocol(streams.FlowControlMixin, protocols.SubprocessProtocol): stdin: Optional[streams.StreamWriter] stdout: Optional[streams.StreamReader] stderr: Optional[streams.StreamReader] def __init__(self, limit: int, loop: events.AbstractEventLoop) -> None: ... def connection_made(self, transport: transports.BaseTransport) -> None: ... def pipe_data_received(self, fd: int, data: Union[bytes, Text]) -> None: ... def pipe_connection_lost(self, fd: int, exc: Optional[Exception]) -> None: ... def process_exited(self) -> None: ... class Process: stdin: Optional[streams.StreamWriter] stdout: Optional[streams.StreamReader] stderr: Optional[streams.StreamReader] pid: int def __init__(self, transport: transports.BaseTransport, protocol: protocols.BaseProtocol, loop: events.AbstractEventLoop) -> None: ... @property def returncode(self) -> int: ... async def wait(self) -> int: ... def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... async def communicate(self, input: Optional[bytes] = ...) -> Tuple[bytes, bytes]: ... async def create_subprocess_shell( *Args: Union[str, bytes], # Union used instead of AnyStr due to mypy issue #1236 stdin: Union[int, IO[Any], None] = ..., stdout: Union[int, IO[Any], None] = ..., stderr: Union[int, IO[Any], None] = ..., loop: events.AbstractEventLoop = ..., limit: int = ..., **kwds: Any ) -> Process: ... async def create_subprocess_exec( program: Union[str, bytes], # Union used instead of AnyStr due to mypy issue #1236 *args: Any, stdin: Union[int, IO[Any], None] = ..., stdout: Union[int, IO[Any], None] = ..., stderr: Union[int, IO[Any], None] = ..., loop: events.AbstractEventLoop = ..., limit: int = ..., **kwds: Any ) -> Process: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/tasks.pyi0000644€tŠÔÚ€2›s®0000001533513576752252026507 0ustar jukkaDROPBOX\Domain Users00000000000000import concurrent.futures import sys from typing import ( Any, TypeVar, Set, List, TextIO, Union, Tuple, Generic, Generator, Iterable, Awaitable, overload, Iterator, Optional, ) from types import FrameType from .events import AbstractEventLoop from .futures import Future if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal _T = TypeVar('_T') _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _FutureT = Union[Future[_T], Generator[Any, None, _T], Awaitable[_T]] FIRST_EXCEPTION: str FIRST_COMPLETED: str ALL_COMPLETED: str def as_completed(fs: Iterable[_FutureT[_T]], *, loop: Optional[AbstractEventLoop] = ..., timeout: Optional[float] = ...) -> Iterator[Future[_T]]: ... def ensure_future(coro_or_future: _FutureT[_T], *, loop: Optional[AbstractEventLoop] = ...) -> Future[_T]: ... # Prior to Python 3.7 'async' was an alias for 'ensure_future'. # It became a keyword in 3.7. # `gather()` actually returns a list with length equal to the number # of tasks passed; however, Tuple is used similar to the annotation for # zip() because typing does not support variadic type variables. See # typing PR #1550 for discussion. @overload def gather(coro_or_future1: _FutureT[_T1], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: Literal[False] = ...) -> Future[Tuple[_T1]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: Literal[False] = ...) -> Future[Tuple[_T1, _T2]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: Literal[False] = ...) -> Future[Tuple[_T1, _T2, _T3]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], coro_or_future4: _FutureT[_T4], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: Literal[False] = ...) -> Future[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], coro_or_future4: _FutureT[_T4], coro_or_future5: _FutureT[_T5], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: Literal[False] = ...) -> Future[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def gather(coro_or_future1: _FutureT[Any], coro_or_future2: _FutureT[Any], coro_or_future3: _FutureT[Any], coro_or_future4: _FutureT[Any], coro_or_future5: _FutureT[Any], coro_or_future6: _FutureT[Any], *coros_or_futures: _FutureT[Any], loop: Optional[AbstractEventLoop] = ..., return_exceptions: bool = ...) -> Future[List[Any]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: bool = ...) -> Future[Tuple[Union[_T1, BaseException]]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: bool = ...) -> Future[Tuple[Union[_T1, BaseException], Union[_T2, BaseException]]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: bool = ...) -> Future[ Tuple[Union[_T1, BaseException], Union[_T2, BaseException], Union[_T3, BaseException]]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], coro_or_future4: _FutureT[_T4], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: bool = ...) -> Future[ Tuple[Union[_T1, BaseException], Union[_T2, BaseException], Union[_T3, BaseException], Union[_T4, BaseException]]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], coro_or_future4: _FutureT[_T4], coro_or_future5: _FutureT[_T5], *, loop: Optional[AbstractEventLoop] = ..., return_exceptions: bool = ...) -> Future[ Tuple[Union[_T1, BaseException], Union[_T2, BaseException], Union[_T3, BaseException], Union[_T4, BaseException], Union[_T5, BaseException]]]: ... def run_coroutine_threadsafe(coro: _FutureT[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... def shield(arg: _FutureT[_T], *, loop: Optional[AbstractEventLoop] = ...) -> Future[_T]: ... def sleep(delay: float, result: _T = ..., loop: Optional[AbstractEventLoop] = ...) -> Future[_T]: ... def wait(fs: Iterable[_FutureT[_T]], *, loop: Optional[AbstractEventLoop] = ..., timeout: Optional[float] = ..., return_when: str = ...) -> Future[Tuple[Set[Future[_T]], Set[Future[_T]]]]: ... def wait_for(fut: _FutureT[_T], timeout: Optional[float], *, loop: Optional[AbstractEventLoop] = ...) -> Future[_T]: ... class Task(Future[_T], Generic[_T]): @classmethod def current_task(cls, loop: Optional[AbstractEventLoop] = ...) -> Task[Any]: ... @classmethod def all_tasks(cls, loop: Optional[AbstractEventLoop] = ...) -> Set[Task[Any]]: ... if sys.version_info >= (3, 8): def __init__( self, coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, loop: AbstractEventLoop = ..., name: Optional[str] = ..., ) -> None: ... else: def __init__(self, coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, loop: AbstractEventLoop = ...) -> None: ... def __repr__(self) -> str: ... if sys.version_info >= (3, 8): def get_coro(self) -> Any: ... def get_name(self) -> str: ... def set_name(self, value: object) -> None: ... def get_stack(self, *, limit: int = ...) -> List[FrameType]: ... def print_stack(self, *, limit: int = ..., file: TextIO = ...) -> None: ... def cancel(self) -> bool: ... def _step(self, value: Any = ..., exc: Exception = ...) -> None: ... def _wakeup(self, future: Future[Any]) -> None: ... if sys.version_info >= (3, 7): def all_tasks(loop: Optional[AbstractEventLoop] = ...) -> Set[Task[Any]]: ... if sys.version_info >= (3, 8): def create_task( coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, name: Optional[str] = ..., ) -> Task[Any]: ... else: def create_task(coro: Union[Generator[Any, None, _T], Awaitable[_T]]) -> Task[Any]: ... def current_task(loop: Optional[AbstractEventLoop] = ...) -> Optional[Task[Any]]: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/transports.pyi0000644€tŠÔÚ€2›s®0000000352013576752252027572 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Mapping, List, Optional, Tuple from asyncio.protocols import BaseProtocol from asyncio.events import AbstractEventLoop class BaseTransport: def __init__(self, extra: Mapping[Any, Any] = ...) -> None: ... def get_extra_info(self, name: Any, default: Any = ...) -> Any: ... def is_closing(self) -> bool: ... def close(self) -> None: ... if sys.version_info >= (3, 5): def set_protocol(self, protocol: BaseProtocol) -> None: ... def get_protocol(self) -> BaseProtocol: ... class ReadTransport(BaseTransport): if sys.version_info >= (3, 7): def is_reading(self) -> bool: ... def pause_reading(self) -> None: ... def resume_reading(self) -> None: ... class WriteTransport(BaseTransport): def set_write_buffer_limits( self, high: int = ..., low: int = ... ) -> None: ... def get_write_buffer_size(self) -> int: ... def write(self, data: Any) -> None: ... def writelines(self, list_of_data: List[Any]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def abort(self) -> None: ... class Transport(ReadTransport, WriteTransport): ... class DatagramTransport(BaseTransport): def sendto(self, data: Any, addr: Optional[Tuple[str, int]] = ...) -> None: ... def abort(self) -> None: ... class SubprocessTransport(BaseTransport): def get_pid(self) -> int: ... def get_returncode(self) -> int: ... def get_pipe_transport(self, fd: int) -> BaseTransport: ... def send_signal(self, signal: int) -> int: ... def terminate(self) -> None: ... def kill(self) -> None: ... class _FlowControlMixin(Transport): def __init__(self, extra: Optional[Mapping[Any, Any]] = ..., loop: Optional[AbstractEventLoop] = ...) -> None: ... def get_write_buffer_limits(self) -> Tuple[int, int]: ... mypy-0.761/mypy/typeshed/stdlib/3/asyncio/windows_events.pyi0000644€tŠÔÚ€2›s®0000000536413576752252030441 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import Callable, Tuple, List, IO, Any, Optional import socket from . import proactor_events, events, futures, windows_utils, selector_events, streams NULL: int INFINITE: int ERROR_CONNECTION_REFUSED: int ERROR_CONNECTION_ABORTED: int CONNECT_PIPE_INIT_DELAY: float CONNECT_PIPE_MAX_DELAY: float class PipeServer: def __init__(self, address: str) -> None: ... def __del__(self) -> None: ... def closed(self) -> bool: ... def close(self) -> None: ... class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... class ProactorEventLoop(proactor_events.BaseProactorEventLoop): def __init__(self, proactor: Optional[IocpProactor] = ...) -> None: ... async def create_pipe_connection(self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str) -> Tuple[proactor_events._ProactorDuplexPipeTransport, streams.StreamReaderProtocol]: ... async def start_serving_pipe(self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str) -> List[PipeServer]: ... class IocpProactor: def __init__(self, concurrency: int = ...) -> None: ... def __repr__(self) -> str: ... def __del__(self) -> None: ... def set_loop(self, loop: events.AbstractEventLoop) -> None: ... def select(self, timeout: Optional[int] = ...) -> List[futures.Future[Any]]: ... def recv(self, conn: socket.socket, nbytes: int, flags: int = ...) -> futures.Future[bytes]: ... def recv_into(self, conn: socket.socket, buf: socket._WriteBuffer, flags: int = ...) -> futures.Future[Any]: ... def send(self, conn: socket.socket, buf: socket._WriteBuffer, flags: int = ...) -> futures.Future[Any]: ... def accept(self, listener: socket.socket) -> futures.Future[Any]: ... def connect(self, conn: socket.socket, address: bytes) -> futures.Future[Any]: ... def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... async def connect_pipe(self, address: bytes) -> windows_utils.PipeHandle: ... def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int = ...) -> bool: ... def close(self) -> None: ... SelectorEventLoop = _WindowsSelectorEventLoop class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: events.AbstractEventLoop = ... def get_child_watcher(self) -> Any: ... def set_child_watcher(self, watcher: Any) -> None: ... class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: events.AbstractEventLoop = ... def get_child_watcher(self) -> Any: ... def set_child_watcher(self, watcher: Any) -> None: ... DefaultEventLoopPolicy = WindowsSelectorEventLoopPolicy mypy-0.761/mypy/typeshed/stdlib/3/asyncio/windows_utils.pyi0000644€tŠÔÚ€2›s®0000000125013576752252030263 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import Tuple, Callable, Optional from types import TracebackType BUFSIZE: int PIPE: int STDOUT: int def pipe(*, duplex: bool = ..., overlapped: Tuple[bool, bool] = ..., bufsize: int = ...) -> Tuple[int, int]: ... class PipeHandle: def __init__(self, handle: int) -> None: ... def __repr__(self) -> str: ... def __del__(self) -> None: ... def __enter__(self) -> PipeHandle: ... def __exit__(self, t: Optional[type], v: Optional[BaseException], tb: Optional[TracebackType]) -> None: ... @property def handle(self) -> int: ... def fileno(self) -> int: ... def close(self, *, CloseHandle: Callable[[int], None] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/atexit.pyi0000644€tŠÔÚ€2›s®0000000047113576752252025206 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'atexit' module.""" from typing import Any, Callable def _clear() -> None: ... def _ncallbacks() -> int: ... def _run_exitfuncs() -> None: ... def register(func: Callable[..., Any], *args: Any, **kwargs: Any) -> Callable[..., Any]: ... def unregister(func: Callable[..., Any]) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/collections/0000755€tŠÔÚ€2›s®0000000000013576752267025507 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/collections/__init__.pyi0000644€tŠÔÚ€2›s®0000003413713576752252027773 0ustar jukkaDROPBOX\Domain Users00000000000000# These are not exported. import sys import typing from typing import ( TypeVar, Generic, Dict, overload, List, Tuple, Any, Type, Optional, Union ) # These are exported. from . import abc from typing import ( AsyncIterable as AsyncIterable, AsyncIterator as AsyncIterator, Awaitable as Awaitable, Callable as Callable, Container as Container, Coroutine as Coroutine, Hashable as Hashable, Iterable as Iterable, Iterator as Iterator, Sized as Sized, Generator as Generator, ByteString as ByteString, Reversible as Reversible, Mapping as Mapping, MappingView as MappingView, ItemsView as ItemsView, KeysView as KeysView, ValuesView as ValuesView, MutableMapping as MutableMapping, Sequence as Sequence, MutableSequence as MutableSequence, MutableSet as MutableSet, AbstractSet as Set, ) if sys.version_info >= (3, 6): from typing import ( Collection as Collection, AsyncGenerator as AsyncGenerator, ) _S = TypeVar('_S') _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') # namedtuple is special-cased in the type checker; the initializer is ignored. if sys.version_info >= (3, 7): def namedtuple( typename: str, field_names: Union[str, Iterable[str]], *, rename: bool = ..., module: Optional[str] = ..., defaults: Optional[Iterable[Any]] = ..., ) -> Type[Tuple[Any, ...]]: ... elif sys.version_info >= (3, 6): def namedtuple( typename: str, field_names: Union[str, Iterable[str]], *, verbose: bool = ..., rename: bool = ..., module: Optional[str] = ..., ) -> Type[Tuple[Any, ...]]: ... else: def namedtuple( typename: str, field_names: Union[str, Iterable[str]], verbose: bool = ..., rename: bool = ..., ) -> Type[Tuple[Any, ...]]: ... class UserDict(MutableMapping[_KT, _VT]): data: Dict[_KT, _VT] def __init__(self, dict: Optional[Mapping[_KT, _VT]] = ..., **kwargs: _VT) -> None: ... def __len__(self) -> int: ... def __getitem__(self, key: _KT) -> _VT: ... def __setitem__(self, key: _KT, item: _VT) -> None: ... def __delitem__(self, key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def __contains__(self, key: object) -> bool: ... def copy(self: _S) -> _S: ... @classmethod def fromkeys(cls: Type[_S], iterable: Iterable[_KT], value: Optional[_VT] = ...) -> _S: ... class UserList(MutableSequence[_T]): data: List[_T] def __init__(self, initlist: Optional[Iterable[_T]] = ...) -> None: ... def __lt__(self, other: object) -> bool: ... def __le__(self, other: object) -> bool: ... def __gt__(self, other: object) -> bool: ... def __ge__(self, other: object) -> bool: ... def __contains__(self, item: object) -> bool: ... def __len__(self) -> int: ... @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, i: slice) -> MutableSequence[_T]: ... @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, i: slice, o: Iterable[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __add__(self: _S, other: Iterable[_T]) -> _S: ... def __iadd__(self: _S, other: Iterable[_T]) -> _S: ... def __mul__(self: _S, n: int) -> _S: ... def __imul__(self: _S, n: int) -> _S: ... def append(self, item: _T) -> None: ... def insert(self, i: int, item: _T) -> None: ... def pop(self, i: int = ...) -> _T: ... def remove(self, item: _T) -> None: ... def clear(self) -> None: ... def copy(self: _S) -> _S: ... def count(self, item: _T) -> int: ... def index(self, item: _T, *args: Any) -> int: ... def reverse(self) -> None: ... def sort(self, *args: Any, **kwds: Any) -> None: ... def extend(self, other: Iterable[_T]) -> None: ... _UserStringT = TypeVar('_UserStringT', bound=UserString) class UserString(Sequence[str]): data: str def __init__(self, seq: object) -> None: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __complex__(self) -> complex: ... def __getnewargs__(self) -> Tuple[str]: ... def __lt__(self, string: Union[str, UserString]) -> bool: ... def __le__(self, string: Union[str, UserString]) -> bool: ... def __gt__(self, string: Union[str, UserString]) -> bool: ... def __ge__(self, string: Union[str, UserString]) -> bool: ... def __contains__(self, char: object) -> bool: ... def __len__(self) -> int: ... # It should return a str to implement Sequence correctly, but it doesn't. def __getitem__(self: _UserStringT, i: Union[int, slice]) -> _UserStringT: ... # type: ignore def __add__(self: _UserStringT, other: object) -> _UserStringT: ... def __mul__(self: _UserStringT, n: int) -> _UserStringT: ... def __mod__(self: _UserStringT, args: Any) -> _UserStringT: ... def capitalize(self: _UserStringT) -> _UserStringT: ... def casefold(self: _UserStringT) -> _UserStringT: ... def center(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... def count(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ... def encode(self: _UserStringT, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> _UserStringT: ... def endswith(self, suffix: Union[str, Tuple[str, ...]], start: int = ..., end: int = ...) -> bool: ... def expandtabs(self: _UserStringT, tabsize: int = ...) -> _UserStringT: ... def find(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ... def format(self, *args: Any, **kwds: Any) -> str: ... def format_map(self, mapping: Mapping[str, Any]) -> str: ... def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... def isalpha(self) -> bool: ... def isalnum(self) -> bool: ... def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def isidentifier(self) -> bool: ... def islower(self) -> bool: ... def isnumeric(self) -> bool: ... def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, seq: Iterable[str]) -> str: ... def ljust(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... def lower(self: _UserStringT) -> _UserStringT: ... def lstrip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ... @staticmethod @overload def maketrans(x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ... @staticmethod @overload def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Union[int, None]]: ... def partition(self, sep: str) -> Tuple[str, str, str]: ... def replace(self: _UserStringT, old: Union[str, UserString], new: Union[str, UserString], maxsplit: int = ...) -> _UserStringT: ... def rfind(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ... def rjust(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... def rpartition(self, sep: str) -> Tuple[str, str, str]: ... def rstrip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ... def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... def splitlines(self, keepends: bool = ...) -> List[str]: ... def startswith(self, prefix: Union[str, Tuple[str, ...]], start: int = ..., end: int = ...) -> bool: ... def strip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ... def swapcase(self: _UserStringT) -> _UserStringT: ... def title(self: _UserStringT) -> _UserStringT: ... def translate(self: _UserStringT, *args: Any) -> _UserStringT: ... def upper(self: _UserStringT) -> _UserStringT: ... def zfill(self: _UserStringT, width: int) -> _UserStringT: ... # Technically, deque only derives from MutableSequence in 3.5 (before then, the insert and index # methods did not exist). # But in practice it's not worth losing sleep over. class deque(MutableSequence[_T], Generic[_T]): @property def maxlen(self) -> Optional[int]: ... def __init__(self, iterable: Iterable[_T] = ..., maxlen: Optional[int] = ...) -> None: ... def append(self, x: _T) -> None: ... def appendleft(self, x: _T) -> None: ... def clear(self) -> None: ... def copy(self) -> deque[_T]: ... def count(self, x: _T) -> int: ... def extend(self, iterable: Iterable[_T]) -> None: ... def extendleft(self, iterable: Iterable[_T]) -> None: ... def insert(self, i: int, x: _T) -> None: ... def index(self, x: _T, start: int = ..., stop: int = ...) -> int: ... def pop(self, i: int = ...) -> _T: ... def popleft(self) -> _T: ... def remove(self, value: _T) -> None: ... def reverse(self) -> None: ... def rotate(self, n: int) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __hash__(self) -> int: ... # These methods of deque don't really take slices, but we need to # define them as taking a slice to satisfy MutableSequence. @overload def __getitem__(self, index: int) -> _T: ... @overload def __getitem__(self, s: slice) -> MutableSequence[_T]: ... @overload def __setitem__(self, i: int, x: _T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... @overload def __delitem__(self, i: int) -> None: ... @overload def __delitem__(self, s: slice) -> None: ... def __contains__(self, o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __iadd__(self: _S, iterable: Iterable[_T]) -> _S: ... def __add__(self, other: deque[_T]) -> deque[_T]: ... def __mul__(self, other: int) -> deque[_T]: ... def __imul__(self, other: int) -> None: ... class Counter(Dict[_T, int], Generic[_T]): @overload def __init__(self, **kwargs: int) -> None: ... @overload def __init__(self, mapping: Mapping[_T, int]) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def copy(self: _S) -> _S: ... def elements(self) -> Iterator[_T]: ... def most_common(self, n: Optional[int] = ...) -> List[Tuple[_T, int]]: ... @overload def subtract(self, __mapping: Mapping[_T, int]) -> None: ... @overload def subtract(self, iterable: Iterable[_T]) -> None: ... # The Iterable[Tuple[...]] argument type is not actually desirable # (the tuples will be added as keys, breaking type safety) but # it's included so that the signature is compatible with # Dict.update. Not sure if we should use '# type: ignore' instead # and omit the type from the union. @overload def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... @overload def update(self, __m: Union[Iterable[_T], Iterable[Tuple[_T, int]]], **kwargs: int) -> None: ... @overload def update(self, **kwargs: int) -> None: ... def __add__(self, other: Counter[_T]) -> Counter[_T]: ... def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... def __and__(self, other: Counter[_T]) -> Counter[_T]: ... def __or__(self, other: Counter[_T]) -> Counter[_T]: ... def __pos__(self) -> Counter[_T]: ... def __neg__(self) -> Counter[_T]: ... def __iadd__(self, other: Counter[_T]) -> Counter[_T]: ... def __isub__(self, other: Counter[_T]) -> Counter[_T]: ... def __iand__(self, other: Counter[_T]) -> Counter[_T]: ... def __ior__(self, other: Counter[_T]) -> Counter[_T]: ... class _OrderedDictKeysView(KeysView[_KT], Reversible[_KT]): def __reversed__(self) -> Iterator[_KT]: ... class _OrderedDictItemsView(ItemsView[_KT, _VT], Reversible[Tuple[_KT, _VT]]): def __reversed__(self) -> Iterator[Tuple[_KT, _VT]]: ... class _OrderedDictValuesView(ValuesView[_VT], Reversible[_VT]): def __reversed__(self) -> Iterator[_VT]: ... class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ... def move_to_end(self, key: _KT, last: bool = ...) -> None: ... def copy(self: _S) -> _S: ... def __reversed__(self) -> Iterator[_KT]: ... def keys(self) -> _OrderedDictKeysView[_KT]: ... def items(self) -> _OrderedDictItemsView[_KT, _VT]: ... def values(self) -> _OrderedDictValuesView[_VT]: ... class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]): default_factory: Optional[Callable[[], _VT]] @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], **kwargs: _VT) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], map: Mapping[_KT, _VT]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... def __missing__(self, key: _KT) -> _VT: ... # TODO __reversed__ def copy(self: _S) -> _S: ... class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __init__(self, *maps: Mapping[_KT, _VT]) -> None: ... @property def maps(self) -> List[Mapping[_KT, _VT]]: ... def new_child(self, m: Mapping[_KT, _VT] = ...) -> typing.ChainMap[_KT, _VT]: ... @property def parents(self) -> typing.ChainMap[_KT, _VT]: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __getitem__(self, k: _KT) -> _VT: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/collections/abc.pyi0000644€tŠÔÚ€2›s®0000000166113576752252026755 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for collections.abc (introduced from Python 3.3) # # https://docs.python.org/3.3/whatsnew/3.3.html#collections import sys from . import ( AsyncIterable as AsyncIterable, AsyncIterator as AsyncIterator, Awaitable as Awaitable, ByteString as ByteString, Container as Container, Coroutine as Coroutine, Generator as Generator, Hashable as Hashable, Iterable as Iterable, Iterator as Iterator, Sized as Sized, Callable as Callable, Mapping as Mapping, MutableMapping as MutableMapping, Sequence as Sequence, MutableSequence as MutableSequence, Set as Set, MutableSet as MutableSet, MappingView as MappingView, ItemsView as ItemsView, KeysView as KeysView, ValuesView as ValuesView, ) if sys.version_info >= (3, 6): from . import ( Collection as Collection, Reversible as Reversible, AsyncGenerator as AsyncGenerator, ) mypy-0.761/mypy/typeshed/stdlib/3/compileall.pyi0000644€tŠÔÚ€2›s®0000000427513576752252026037 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for compileall (Python 3) import os import sys from typing import Any, Optional, Union, Pattern if sys.version_info < (3, 6): _Path = Union[str, bytes] _SuccessType = bool else: _Path = Union[str, bytes, os.PathLike] _SuccessType = int if sys.version_info >= (3, 7): from py_compile import PycInvalidationMode def compile_dir( dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern[Any]] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ..., workers: int = ..., invalidation_mode: Optional[PycInvalidationMode] = ..., ) -> _SuccessType: ... def compile_file( fullname: _Path, ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern[Any]] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ..., invalidation_mode: Optional[PycInvalidationMode] = ..., ) -> _SuccessType: ... def compile_path( skip_curdir: bool = ..., maxlevels: int = ..., force: bool = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ..., invalidation_mode: Optional[PycInvalidationMode] = ..., ) -> _SuccessType: ... else: # rx can be any object with a 'search' method; once we have Protocols we can change the type def compile_dir( dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern[Any]] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ..., workers: int = ..., ) -> _SuccessType: ... def compile_file( fullname: _Path, ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern[Any]] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ..., ) -> _SuccessType: ... def compile_path( skip_curdir: bool = ..., maxlevels: int = ..., force: bool = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ..., ) -> _SuccessType: ... mypy-0.761/mypy/typeshed/stdlib/3/concurrent/0000755€tŠÔÚ€2›s®0000000000013576752267025353 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/concurrent/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027615 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/concurrent/futures/0000755€tŠÔÚ€2›s®0000000000013576752267027050 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/concurrent/futures/__init__.pyi0000644€tŠÔÚ€2›s®0000000015413576752252031324 0ustar jukkaDROPBOX\Domain Users00000000000000from ._base import * # noqa: F403 from .thread import * # noqa: F403 from .process import * # noqa: F403 mypy-0.761/mypy/typeshed/stdlib/3/concurrent/futures/_base.pyi0000644€tŠÔÚ€2›s®0000000752313576752252030645 0ustar jukkaDROPBOX\Domain Users00000000000000import threading from logging import Logger from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Optional, Set, List from types import TracebackType import sys FIRST_COMPLETED: str FIRST_EXCEPTION: str ALL_COMPLETED: str PENDING: str RUNNING: str CANCELLED: str CANCELLED_AND_NOTIFIED: str FINISHED: str LOGGER: Logger class Error(Exception): ... class CancelledError(Error): ... class TimeoutError(Error): ... if sys.version_info >= (3, 7): class BrokenExecutor(RuntimeError): ... _T = TypeVar('_T') class Future(Generic[_T]): def __init__(self) -> None: ... def cancel(self) -> bool: ... def cancelled(self) -> bool: ... def running(self) -> bool: ... def done(self) -> bool: ... def add_done_callback(self, fn: Callable[[Future[_T]], Any]) -> None: ... def result(self, timeout: Optional[float] = ...) -> _T: ... def set_running_or_notify_cancel(self) -> bool: ... def set_result(self, result: _T) -> None: ... if sys.version_info >= (3,): def exception(self, timeout: Optional[float] = ...) -> Optional[BaseException]: ... def set_exception(self, exception: Optional[BaseException]) -> None: ... else: def exception(self, timeout: Optional[float] = ...) -> Any: ... def exception_info(self, timeout: Optional[float] = ...) -> Tuple[Any, Optional[TracebackType]]: ... def set_exception(self, exception: Any) -> None: ... def set_exception_info(self, exception: Any, traceback: Optional[TracebackType]) -> None: ... class Executor: def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ... if sys.version_info >= (3, 5): def map(self, func: Callable[..., _T], *iterables: Iterable[Any], timeout: Optional[float] = ..., chunksize: int = ...) -> Iterator[_T]: ... else: def map(self, func: Callable[..., _T], *iterables: Iterable[Any], timeout: Optional[float] = ...,) -> Iterator[_T]: ... def shutdown(self, wait: bool = ...) -> None: ... def __enter__(self: _T) -> _T: ... def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Optional[bool]: ... def as_completed(fs: Iterable[Future[_T]], timeout: Optional[float] = ...) -> Iterator[Future[_T]]: ... def wait(fs: Iterable[Future[_T]], timeout: Optional[float] = ..., return_when: str = ...) -> Tuple[Set[Future[_T]], Set[Future[_T]]]: ... class _Waiter: event: threading.Event finished_futures: List[Future[Any]] def __init__(self) -> None: ... def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _AsCompletedWaiter(_Waiter): lock: threading.Lock def __init__(self) -> None: ... def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _FirstCompletedWaiter(_Waiter): def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _AllCompletedWaiter(_Waiter): num_pending_calls: int stop_on_exception: bool lock: threading.Lock def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ... def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _AcquireFutures: futures: Iterable[Future[Any]] def __init__(self, futures: Iterable[Future[Any]]) -> None: ... def __enter__(self) -> None: ... def __exit__(self, *args: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/concurrent/futures/process.pyi0000644€tŠÔÚ€2›s®0000000127313576752252031246 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Optional, Tuple from ._base import Executor import sys EXTRA_QUEUED_CALLS: Any if sys.version_info >= (3,): class BrokenProcessPool(RuntimeError): ... if sys.version_info >= (3, 7): from multiprocessing.context import BaseContext class ProcessPoolExecutor(Executor): def __init__(self, max_workers: Optional[int] = ..., mp_context: Optional[BaseContext] = ..., initializer: Optional[Callable[..., None]] = ..., initargs: Tuple[Any, ...] = ...) -> None: ... else: class ProcessPoolExecutor(Executor): def __init__(self, max_workers: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/concurrent/futures/thread.pyi0000644€tŠÔÚ€2›s®0000000221213576752252031031 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterable, Mapping, Optional, Tuple, TypeVar, Generic from ._base import Executor, Future import sys if sys.version_info >= (3, 7): from ._base import BrokenExecutor class BrokenThreadPool(BrokenExecutor): ... _S = TypeVar('_S') class ThreadPoolExecutor(Executor): if sys.version_info >= (3, 7): def __init__(self, max_workers: Optional[int] = ..., thread_name_prefix: str = ..., initializer: Optional[Callable[..., None]] = ..., initargs: Tuple[Any, ...] = ...) -> None: ... elif sys.version_info >= (3, 6) or sys.version_info < (3,): def __init__(self, max_workers: Optional[int] = ..., thread_name_prefix: str = ...) -> None: ... else: def __init__(self, max_workers: Optional[int] = ...) -> None: ... class _WorkItem(Generic[_S]): future: Future[_S] fn: Callable[..., _S] args: Iterable[Any] kwargs: Mapping[str, Any] def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... def run(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/configparser.pyi0000644€tŠÔÚ€2›s®0000002026713576752252026377 0ustar jukkaDROPBOX\Domain Users00000000000000# Based on http://docs.python.org/3.5/library/configparser.html and on # reading configparser.py. import sys from typing import (AbstractSet, MutableMapping, Mapping, Dict, Sequence, List, Union, Iterable, Iterator, Callable, Any, IO, overload, Optional, Pattern, Type, TypeVar, ClassVar) # Types only used in type comments only from typing import Optional, Tuple # noqa if sys.version_info >= (3, 6): from os import PathLike # Internal type aliases _section = Mapping[str, str] _parser = MutableMapping[str, _section] _converter = Callable[[str], Any] _converters = Dict[str, _converter] _T = TypeVar('_T') if sys.version_info >= (3, 7): _Path = Union[str, bytes, PathLike[str]] elif sys.version_info >= (3, 6): _Path = Union[str, PathLike[str]] else: _Path = str DEFAULTSECT: str MAX_INTERPOLATION_DEPTH: int class Interpolation: def before_get(self, parser: _parser, section: str, option: str, value: str, defaults: _section) -> str: ... def before_set(self, parser: _parser, section: str, option: str, value: str) -> str: ... def before_read(self, parser: _parser, section: str, option: str, value: str) -> str: ... def before_write(self, parser: _parser, section: str, option: str, value: str) -> str: ... class BasicInterpolation(Interpolation): ... class ExtendedInterpolation(Interpolation): ... class LegacyInterpolation(Interpolation): ... class RawConfigParser(_parser): BOOLEAN_STATES: ClassVar[Mapping[str, bool]] = ... # Undocumented def __init__(self, defaults: Optional[_section] = ..., dict_type: Type[Mapping[str, str]] = ..., allow_no_value: bool = ..., *, delimiters: Sequence[str] = ..., comment_prefixes: Sequence[str] = ..., inline_comment_prefixes: Optional[Sequence[str]] = ..., strict: bool = ..., empty_lines_in_values: bool = ..., default_section: str = ..., interpolation: Optional[Interpolation] = ...) -> None: ... def __len__(self) -> int: ... def __getitem__(self, section: str) -> SectionProxy: ... def __setitem__(self, section: str, options: _section) -> None: ... def __delitem__(self, section: str) -> None: ... def __iter__(self) -> Iterator[str]: ... def defaults(self) -> _section: ... def sections(self) -> List[str]: ... def add_section(self, section: str) -> None: ... def has_section(self, section: str) -> bool: ... def options(self, section: str) -> List[str]: ... def has_option(self, section: str, option: str) -> bool: ... def read(self, filenames: Union[_Path, Iterable[_Path]], encoding: Optional[str] = ...) -> List[str]: ... def read_file(self, f: Iterable[str], source: Optional[str] = ...) -> None: ... def read_string(self, string: str, source: str = ...) -> None: ... def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = ...) -> None: ... def readfp(self, fp: Iterable[str], filename: Optional[str] = ...) -> None: ... # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together def getint(self, section: str, option: str, *, raw: bool = ..., vars: Optional[_section] = ..., fallback: int = ...) -> int: ... def getfloat(self, section: str, option: str, *, raw: bool = ..., vars: Optional[_section] = ..., fallback: float = ...) -> float: ... def getboolean(self, section: str, option: str, *, raw: bool = ..., vars: Optional[_section] = ..., fallback: bool = ...) -> bool: ... def _get_conv(self, section: str, option: str, conv: Callable[[str], _T], *, raw: bool = ..., vars: Optional[_section] = ..., fallback: _T = ...) -> _T: ... # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore def get(self, section: str, option: str, *, raw: bool = ..., vars: Optional[_section] = ...) -> str: ... @overload def get(self, section: str, option: str, *, raw: bool = ..., vars: Optional[_section] = ..., fallback: _T) -> Union[str, _T]: ... @overload def items(self, *, raw: bool = ..., vars: Optional[_section] = ...) -> AbstractSet[Tuple[str, SectionProxy]]: ... @overload def items(self, section: str, raw: bool = ..., vars: Optional[_section] = ...) -> List[Tuple[str, str]]: ... def set(self, section: str, option: str, value: str) -> None: ... def write(self, fileobject: IO[str], space_around_delimiters: bool = ...) -> None: ... def remove_option(self, section: str, option: str) -> bool: ... def remove_section(self, section: str) -> bool: ... def optionxform(self, option: str) -> str: ... class ConfigParser(RawConfigParser): def __init__(self, defaults: Optional[_section] = ..., dict_type: Type[Mapping[str, str]] = ..., allow_no_value: bool = ..., delimiters: Sequence[str] = ..., comment_prefixes: Sequence[str] = ..., inline_comment_prefixes: Optional[Sequence[str]] = ..., strict: bool = ..., empty_lines_in_values: bool = ..., default_section: str = ..., interpolation: Optional[Interpolation] = ..., converters: _converters = ...) -> None: ... class SafeConfigParser(ConfigParser): ... class SectionProxy(MutableMapping[str, str]): def __init__(self, parser: RawConfigParser, name: str) -> None: ... def __getitem__(self, key: str) -> str: ... def __setitem__(self, key: str, value: str) -> None: ... def __delitem__(self, key: str) -> None: ... def __contains__(self, key: object) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... @property def parser(self) -> RawConfigParser: ... @property def name(self) -> str: ... def get(self, option: str, fallback: Optional[str] = ..., *, raw: bool = ..., vars: Optional[_section] = ..., **kwargs: Any) -> str: ... # type: ignore # These are partially-applied version of the methods with the same names in # RawConfigParser; the stubs should be kept updated together def getint(self, option: str, *, raw: bool = ..., vars: Optional[_section] = ..., fallback: int = ...) -> int: ... def getfloat(self, option: str, *, raw: bool = ..., vars: Optional[_section] = ..., fallback: float = ...) -> float: ... def getboolean(self, option: str, *, raw: bool = ..., vars: Optional[_section] = ..., fallback: bool = ...) -> bool: ... # SectionProxy can have arbitrary attributes when custon converters are used def __getattr__(self, key: str) -> Callable[..., Any]: ... class ConverterMapping(MutableMapping[str, Optional[_converter]]): GETTERCRE: Pattern[Any] def __init__(self, parser: RawConfigParser) -> None: ... def __getitem__(self, key: str) -> _converter: ... def __setitem__(self, key: str, value: Optional[_converter]) -> None: ... def __delitem__(self, key: str) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... class Error(Exception): ... class NoSectionError(Error): ... class DuplicateSectionError(Error): section: str source: Optional[str] lineno: Optional[int] class DuplicateOptionError(Error): section: str option: str source: Optional[str] lineno: Optional[int] class NoOptionError(Error): section: str option: str class InterpolationError(Error): section: str option: str class InterpolationDepthError(InterpolationError): ... class InterpolationMissingOptionError(InterpolationError): reference: str class InterpolationSyntaxError(InterpolationError): ... class ParsingError(Error): source: str errors: Sequence[Tuple[int, str]] class MissingSectionHeaderError(ParsingError): lineno: int line: str mypy-0.761/mypy/typeshed/stdlib/3/copyreg.pyi0000644€tŠÔÚ€2›s®0000000132613576752252025360 0ustar jukkaDROPBOX\Domain Users00000000000000 from typing import TypeVar, Callable, Union, Tuple, Any, Optional, SupportsInt, Hashable, List _Type = TypeVar("_Type", bound=type) _Reduce = Union[Tuple[Callable[..., _Type], Tuple[Any, ...]], Tuple[Callable[..., _Type], Tuple[Any, ...], Optional[Any]]] __all__: List[str] def pickle(ob_type: _Type, pickle_function: Callable[[_Type], Union[str, _Reduce[_Type]]], constructor_ob: Optional[Callable[[_Reduce[_Type]], _Type]] = ...) -> None: ... def constructor(object: Callable[[_Reduce[_Type]], _Type]) -> None: ... def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: ... def remove_extension(module: Hashable, name: Hashable, code: int) -> None: ... def clear_extension_cache() -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/0000755€tŠÔÚ€2›s®0000000000013576752267024260 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/email/__init__.pyi0000644€tŠÔÚ€2›s®0000000142513576752252026536 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email (Python 3.4) from typing import Callable, IO from email.message import Message from email.policy import Policy def message_from_string(s: str, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_bytes(s: bytes, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_file(fp: IO[str], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_binary_file(fp: IO[bytes], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... # Names in __all__ with no definition: # base64mime # charset # encoders # errors # feedparser # generator # header # iterators # message # mime # parser # quoprimime # utils mypy-0.761/mypy/typeshed/stdlib/3/email/charset.pyi0000644€tŠÔÚ€2›s®0000000220413576752252026424 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.charset (Python 3.4) from typing import List, Optional, Iterator, Any QP: int # undocumented BASE64: int # undocumented SHORTEST: int # undocumented class Charset: input_charset: str header_encoding: int body_encoding: int output_charset: Optional[str] input_codec: Optional[str] output_codec: Optional[str] def __init__(self, input_charset: str = ...) -> None: ... def get_body_encoding(self) -> str: ... def get_output_charset(self) -> Optional[str]: ... def header_encode(self, string: str) -> str: ... def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> List[str]: ... def body_encode(self, string: str) -> str: ... def __str__(self) -> str: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... def add_charset(charset: str, header_enc: Optional[int] = ..., body_enc: Optional[int] = ..., output_charset: Optional[str] = ...) -> None: ... def add_alias(alias: str, canonical: str) -> None: ... def add_codec(charset: str, codecname: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/contentmanager.pyi0000644€tŠÔÚ€2›s®0000000110413576752252027776 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.contentmanager (Python 3.4) from typing import Any, Callable from email.message import Message class ContentManager: def __init__(self) -> None: ... def get_content(self, msg: Message, *args: Any, **kw: Any) -> Any: ... def set_content(self, msg: Message, obj: Any, *args: Any, **kw: Any) -> Any: ... def add_get_handler(self, key: str, handler: Callable[..., Any]) -> None: ... def add_set_handler(self, typekey: type, handler: Callable[..., Any]) -> None: ... raw_data_manager: ContentManager mypy-0.761/mypy/typeshed/stdlib/3/email/encoders.pyi0000644€tŠÔÚ€2›s®0000000037713576752252026606 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.encoders (Python 3.4) from email.message import Message def encode_base64(msg: Message) -> None: ... def encode_quopri(msg: Message) -> None: ... def encode_7or8bit(msg: Message) -> None: ... def encode_noop(msg: Message) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/errors.pyi0000644€tŠÔÚ€2›s®0000000153713576752252026317 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.errors (Python 3.4) class MessageError(Exception): ... class MessageParseError(MessageError): ... class HeaderParseError(MessageParseError): ... class BoundaryError(MessageParseError): ... class MultipartConversionError(MessageError, TypeError): ... class MessageDefect(ValueError): ... class NoBoundaryInMultipartDefect(MessageDefect): ... class StartBoundaryNotFoundDefect(MessageDefect): ... class FirstHeaderLineIsContinuationDefect(MessageDefect): ... class MisplacedEnvelopeHeaderDefect(MessageDefect): ... class MalformedHeaderDefect(MessageDefect): ... class MultipartInvariantViolationDefect(MessageDefect): ... class InvalidBase64PaddingDefect(MessageDefect): ... class InvalidBase64CharactersDefect(MessageDefect): ... class CloseBoundaryNotFoundDefect(MessageDefect): ... class MissingHeaderBodySeparatorDefect(MessageDefect): ... mypy-0.761/mypy/typeshed/stdlib/3/email/feedparser.pyi0000644€tŠÔÚ€2›s®0000000107413576752252027117 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.feedparser (Python 3.4) from typing import Callable from email.message import Message from email.policy import Policy class FeedParser: def __init__(self, _factory: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... def feed(self, data: str) -> None: ... def close(self) -> Message: ... class BytesFeedParser: def __init__(self, _factory: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... def feed(self, data: str) -> None: ... def close(self) -> Message: ... mypy-0.761/mypy/typeshed/stdlib/3/email/generator.pyi0000644€tŠÔÚ€2›s®0000000214613576752252026766 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.generator (Python 3.4) from typing import BinaryIO, TextIO, Optional from email.message import Message from email.policy import Policy class Generator: def clone(self, fp: TextIO) -> Generator: ... def write(self, s: str) -> None: ... def __init__(self, outfp: TextIO, mangle_from_: bool = ..., maxheaderlen: int = ..., *, policy: Policy = ...) -> None: ... def flatten(self, msg: Message, unixfrom: bool = ..., linesep: Optional[str] = ...) -> None: ... class BytesGenerator: def clone(self, fp: BinaryIO) -> BytesGenerator: ... def write(self, s: str) -> None: ... def __init__(self, outfp: BinaryIO, mangle_from_: bool = ..., maxheaderlen: int = ..., *, policy: Policy = ...) -> None: ... def flatten(self, msg: Message, unixfrom: bool = ..., linesep: Optional[str] = ...) -> None: ... class DecodedGenerator(Generator): def __init__(self, outfp: TextIO, mangle_from_: bool = ..., maxheaderlen: int = ..., *, fmt: Optional[str] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/header.pyi0000644€tŠÔÚ€2›s®0000000216713576752252026233 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.header (Python 3.4) from typing import Union, Optional, Any, List, Tuple from email.charset import Charset class Header: def __init__(self, s: Union[bytes, str, None] = ..., charset: Union[Charset, str, None] = ..., maxlinelen: Optional[int] = ..., header_name: Optional[str] = ..., continuation_ws: str = ..., errors: str = ...) -> None: ... def append(self, s: Union[bytes, str], charset: Union[Charset, str, None] = ..., errors: str = ...) -> None: ... def encode(self, splitchars: str = ..., maxlinelen: Optional[int] = ..., linesep: str = ...) -> str: ... def __str__(self) -> str: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... def decode_header(header: Union[Header, str]) -> List[Tuple[bytes, Optional[str]]]: ... def make_header(decoded_seq: List[Tuple[bytes, Optional[str]]], maxlinelen: Optional[int] = ..., header_name: Optional[str] = ..., continuation_ws: str = ...) -> Header: ... mypy-0.761/mypy/typeshed/stdlib/3/email/headerregistry.pyi0000644€tŠÔÚ€2›s®0000000564113576752252030024 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.headerregistry (Python 3.4) from datetime import datetime as _datetime from typing import Dict, Tuple, Optional, Any, Union, Mapping from email.errors import MessageDefect from email.policy import Policy class BaseHeader(str): @property def name(self) -> str: ... @property def defects(self) -> Tuple[MessageDefect, ...]: ... @property def max_count(self) -> Optional[int]: ... def __new__(cls, name: str, value: Any) -> BaseHeader: ... def init(self, *args: Any, **kw: Any) -> None: ... def fold(self, *, policy: Policy) -> str: ... class UnstructuredHeader: @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class UniqueUnstructuredHeader(UnstructuredHeader): ... class DateHeader: datetime: _datetime @classmethod def parse(cls, string: Union[str, _datetime], kwds: Dict[str, Any]) -> None: ... class UniqueDateHeader(DateHeader): ... class AddressHeader: groups: Tuple[Group, ...] addresses: Tuple[Address, ...] @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class UniqueAddressHeader(AddressHeader): ... class SingleAddressHeader(AddressHeader): @property def address(self) -> Address: ... class UniqueSingleAddressHeader(SingleAddressHeader): ... class MIMEVersionHeader: version: Optional[str] major: Optional[int] minor: Optional[int] @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class ParameterizedMIMEHeader: params: Mapping[str, Any] @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class ContentTypeHeader(ParameterizedMIMEHeader): content_type: str maintype: str subtype: str class ContentDispositionHeader(ParameterizedMIMEHeader): content_disposition: str class ContentTransferEncodingHeader: cte: str @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class HeaderRegistry: def __init__(self, base_class: BaseHeader = ..., default_class: BaseHeader = ..., use_default_map: bool = ...) -> None: ... def map_to_type(self, name: str, cls: BaseHeader) -> None: ... def __getitem__(self, name: str) -> BaseHeader: ... def __call__(self, name: str, value: Any) -> BaseHeader: ... class Address: display_name: str username: str domain: str @property def addr_spec(self) -> str: ... def __init__(self, display_name: str = ..., username: Optional[str] = ..., domain: Optional[str] = ..., addr_spec: Optional[str] = ...) -> None: ... def __str__(self) -> str: ... class Group: display_name: Optional[str] addresses: Tuple[Address, ...] def __init__(self, display_name: Optional[str] = ..., addresses: Optional[Tuple[Address, ...]] = ...) -> None: ... def __str__(self) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/email/iterators.pyi0000644€tŠÔÚ€2›s®0000000051713576752252027014 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.iterators (Python 3.4) from typing import Iterator, Optional from email.message import Message def body_line_iterator(msg: Message, decode: bool = ...) -> Iterator[str]: ... def typed_subpart_iterator(msg: Message, maintype: str = ..., subtype: Optional[str] = ...) -> Iterator[str]: ... mypy-0.761/mypy/typeshed/stdlib/3/email/message.pyi0000644€tŠÔÚ€2›s®0000001173413576752252026427 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.message (Python 3.4) from typing import ( List, Optional, Union, Tuple, TypeVar, Generator, Sequence, Iterator, Any ) from email.charset import Charset from email.errors import MessageDefect from email.header import Header from email.policy import Policy from email.contentmanager import ContentManager _T = TypeVar('_T') _PayloadType = Union[List[Message], str, bytes] _CharsetType = Union[Charset, str, None] _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] _ParamType = Union[str, Tuple[Optional[str], Optional[str], str]] _HeaderType = Any class Message: preamble: Optional[str] epilogue: Optional[str] defects: List[MessageDefect] def __str__(self) -> str: ... def is_multipart(self) -> bool: ... def set_unixfrom(self, unixfrom: str) -> None: ... def get_unixfrom(self) -> Optional[str]: ... def attach(self, payload: Message) -> None: ... def get_payload(self, i: int = ..., decode: bool = ...) -> Any: ... # returns Optional[_PayloadType] def set_payload(self, payload: _PayloadType, charset: _CharsetType = ...) -> None: ... def set_charset(self, charset: _CharsetType) -> None: ... def get_charset(self) -> _CharsetType: ... def __len__(self) -> int: ... def __contains__(self, name: str) -> bool: ... def __getitem__(self, name: str) -> _HeaderType: ... def __setitem__(self, name: str, val: _HeaderType) -> None: ... def __delitem__(self, name: str) -> None: ... def keys(self) -> List[str]: ... def values(self) -> List[_HeaderType]: ... def items(self) -> List[Tuple[str, _HeaderType]]: ... def get(self, name: str, failobj: _T = ...) -> Union[_HeaderType, _T]: ... def get_all(self, name: str, failobj: _T = ...) -> Union[List[_HeaderType], _T]: ... def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... def replace_header(self, _name: str, _value: _HeaderType) -> None: ... def get_content_type(self) -> str: ... def get_content_maintype(self) -> str: ... def get_content_subtype(self) -> str: ... def get_default_type(self) -> str: ... def set_default_type(self, ctype: str) -> None: ... def get_params(self, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> Union[List[Tuple[str, str]], _T]: ... def get_param(self, param: str, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> Union[_T, _ParamType]: ... def del_param(self, param: str, header: str = ..., requote: bool = ...) -> None: ... def set_type(self, type: str, header: str = ..., requote: bool = ...) -> None: ... def get_filename(self, failobj: _T = ...) -> Union[_T, str]: ... def get_boundary(self, failobj: _T = ...) -> Union[_T, str]: ... def set_boundary(self, boundary: str) -> None: ... def get_content_charset(self, failobj: _T = ...) -> Union[_T, str]: ... def get_charsets(self, failobj: _T = ...) -> Union[_T, List[str]]: ... def walk(self) -> Generator[Message, None, None]: ... def get_content_disposition(self) -> Optional[str]: ... def as_string(self, unixfrom: bool = ..., maxheaderlen: int = ..., policy: Optional[Policy] = ...) -> str: ... def as_bytes(self, unixfrom: bool = ..., policy: Optional[Policy] = ...) -> bytes: ... def __bytes__(self) -> bytes: ... def set_param(self, param: str, value: str, header: str = ..., requote: bool = ..., charset: str = ..., language: str = ..., replace: bool = ...) -> None: ... def __init__(self, policy: Policy = ...) -> None: ... class MIMEPart(Message): def get_body(self, preferencelist: Sequence[str] = ...) -> Optional[Message]: ... def iter_attachments(self) -> Iterator[Message]: ... def iter_parts(self) -> Iterator[Message]: ... def get_content(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> Any: ... def set_content(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... def make_related(self, boundary: Optional[str] = ...) -> None: ... def make_alternative(self, boundary: Optional[str] = ...) -> None: ... def make_mixed(self, boundary: Optional[str] = ...) -> None: ... def add_related(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... def add_alternative(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... def add_attachment(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... def clear(self) -> None: ... def clear_content(self) -> None: ... def is_attachment(self) -> bool: ... class EmailMessage(MIMEPart): ... mypy-0.761/mypy/typeshed/stdlib/3/email/mime/0000755€tŠÔÚ€2›s®0000000000013576752267025207 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/email/mime/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027451 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/email/mime/application.pyi0000644€tŠÔÚ€2›s®0000000070413576752252030230 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.application (Python 3.4) from typing import Callable, Optional, Tuple, Union from email.mime.nonmultipart import MIMENonMultipart _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEApplication(MIMENonMultipart): def __init__(self, _data: Union[str, bytes], _subtype: str = ..., _encoder: Callable[[MIMEApplication], None] = ..., **_params: _ParamsType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/mime/audio.pyi0000644€tŠÔÚ€2›s®0000000070113576752252027023 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.audio (Python 3.4) from typing import Callable, Optional, Tuple, Union from email.mime.nonmultipart import MIMENonMultipart _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEAudio(MIMENonMultipart): def __init__(self, _audiodata: Union[str, bytes], _subtype: Optional[str] = ..., _encoder: Callable[[MIMEAudio], None] = ..., **_params: _ParamsType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/mime/base.pyi0000644€tŠÔÚ€2›s®0000000047513576752252026644 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.base (Python 3.4) from typing import Optional, Tuple, Union import email.message _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEBase(email.message.Message): def __init__(self, _maintype: str, _subtype: str, **_params: _ParamsType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/mime/image.pyi0000644€tŠÔÚ€2›s®0000000070113576752252027004 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.image (Python 3.4) from typing import Callable, Optional, Tuple, Union from email.mime.nonmultipart import MIMENonMultipart _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEImage(MIMENonMultipart): def __init__(self, _imagedata: Union[str, bytes], _subtype: Optional[str] = ..., _encoder: Callable[[MIMEImage], None] = ..., **_params: _ParamsType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/mime/message.pyi0000644€tŠÔÚ€2›s®0000000036213576752252027351 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.message (Python 3.4) from email.message import Message from email.mime.nonmultipart import MIMENonMultipart class MIMEMessage(MIMENonMultipart): def __init__(self, _msg: Message, _subtype: str = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/mime/multipart.pyi0000644€tŠÔÚ€2›s®0000000071213576752252027745 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.multipart (Python 3.4) from typing import Optional, Sequence, Tuple, Union from email.message import Message from email.mime.base import MIMEBase _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEMultipart(MIMEBase): def __init__(self, _subtype: str = ..., boundary: Optional[str] = ..., _subparts: Optional[Sequence[Message]] = ..., **_params: _ParamsType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/mime/nonmultipart.pyi0000644€tŠÔÚ€2›s®0000000017613576752252030464 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.nonmultipart (Python 3.4) from email.mime.base import MIMEBase class MIMENonMultipart(MIMEBase): ... mypy-0.761/mypy/typeshed/stdlib/3/email/mime/text.pyi0000644€tŠÔÚ€2›s®0000000042313576752252026707 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.text (Python 3.4) from typing import Optional from email.mime.nonmultipart import MIMENonMultipart class MIMEText(MIMENonMultipart): def __init__(self, _text: str, _subtype: str = ..., _charset: Optional[str] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/email/parser.pyi0000644€tŠÔÚ€2›s®0000000263313576752252026275 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.parser (Python 3.4) import email.feedparser from email.message import Message from email.policy import Policy from typing import BinaryIO, Callable, TextIO FeedParser = email.feedparser.FeedParser BytesFeedParser = email.feedparser.BytesFeedParser class Parser: def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... def parse(self, fp: TextIO, headersonly: bool = ...) -> Message: ... def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... class HeaderParser(Parser): def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... def parse(self, fp: TextIO, headersonly: bool = ...) -> Message: ... def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... class BytesHeaderParser(BytesParser): def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... def parsebytes(self, text: bytes, headersonly: bool = ...) -> Message: ... class BytesParser: def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... def parsebytes(self, text: bytes, headersonly: bool = ...) -> Message: ... mypy-0.761/mypy/typeshed/stdlib/3/email/policy.pyi0000644€tŠÔÚ€2›s®0000000441413576752252026277 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.policy (Python 3.4) from abc import abstractmethod from typing import Any, List, Optional, Tuple, Union, Callable from email.message import Message from email.errors import MessageDefect from email.header import Header from email.contentmanager import ContentManager class Policy: max_line_length: Optional[int] linesep: str cte_type: str raise_on_defect: bool mange_from: bool def __init__(self, **kw: Any) -> None: ... def clone(self, **kw: Any) -> Policy: ... def handle_defect(self, obj: Message, defect: MessageDefect) -> None: ... def register_defect(self, obj: Message, defect: MessageDefect) -> None: ... def header_max_count(self, name: str) -> Optional[int]: ... @abstractmethod def header_source_parse(self, sourcelines: List[str]) -> str: ... @abstractmethod def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... @abstractmethod def header_fetch_parse(self, name: str, value: str) -> str: ... @abstractmethod def fold(self, name: str, value: str) -> str: ... @abstractmethod def fold_binary(self, name: str, value: str) -> bytes: ... class Compat32(Policy): def header_source_parse(self, sourcelines: List[str]) -> str: ... def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... def header_fetch_parse(self, name: str, value: str) -> Union[str, Header]: ... # type: ignore def fold(self, name: str, value: str) -> str: ... def fold_binary(self, name: str, value: str) -> bytes: ... compat32: Compat32 class EmailPolicy(Policy): utf8: bool refold_source: str header_factory: Callable[[str, str], str] content_manager: ContentManager def header_source_parse(self, sourcelines: List[str]) -> str: ... def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... def header_fetch_parse(self, name: str, value: str) -> str: ... def fold(self, name: str, value: str) -> str: ... def fold_binary(self, name: str, value: str) -> bytes: ... default: EmailPolicy SMTP: EmailPolicy SMTPUTF8: EmailPolicy HTTP: EmailPolicy strict: EmailPolicy mypy-0.761/mypy/typeshed/stdlib/3/email/utils.pyi0000644€tŠÔÚ€2›s®0000000313613576752252026140 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.utils (Python 3.4) from typing import List, Optional, Tuple, Union from email.charset import Charset import datetime _ParamType = Union[str, Tuple[Optional[str], Optional[str], str]] _PDTZ = Tuple[int, int, int, int, int, int, int, int, int, Optional[int]] def quote(str: str) -> str: ... def unquote(str: str) -> str: ... def parseaddr(address: Optional[str]) -> Tuple[str, str]: ... def formataddr(pair: Tuple[Optional[str], str], charset: Union[str, Charset] = ...) -> str: ... def getaddresses(fieldvalues: List[str]) -> List[Tuple[str, str]]: ... def parsedate(date: str) -> Optional[Tuple[int, int, int, int, int, int, int, int, int]]: ... def parsedate_tz(date: str) -> Optional[_PDTZ]: ... def parsedate_to_datetime(date: str) -> datetime.datetime: ... def mktime_tz(tuple: _PDTZ) -> int: ... def formatdate(timeval: Optional[float] = ..., localtime: bool = ..., usegmt: bool = ...) -> str: ... def format_datetime(dt: datetime.datetime, usegmt: bool = ...) -> str: ... def localtime(dt: Optional[datetime.datetime] = ...) -> datetime.datetime: ... def make_msgid(idstring: Optional[str] = ..., domain: Optional[str] = ...) -> str: ... def decode_rfc2231(s: str) -> Tuple[Optional[str], Optional[str], str]: ... def encode_rfc2231(s: str, charset: Optional[str] = ..., language: Optional[str] = ...) -> str: ... def collapse_rfc2231_value(value: _ParamType, errors: str = ..., fallback_charset: str = ...) -> str: ... def decode_params( params: List[Tuple[str, str]] ) -> List[Tuple[str, _ParamType]]: ... mypy-0.761/mypy/typeshed/stdlib/3/encodings/0000755€tŠÔÚ€2›s®0000000000013576752267025142 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/encodings/__init__.pyi0000644€tŠÔÚ€2›s®0000000011313576752252027411 0ustar jukkaDROPBOX\Domain Users00000000000000import codecs def search_function(encoding: str) -> codecs.CodecInfo: ... mypy-0.761/mypy/typeshed/stdlib/3/encodings/utf_8.pyi0000644€tŠÔÚ€2›s®0000000107513576752252026707 0ustar jukkaDROPBOX\Domain Users00000000000000import codecs from typing import Text, Tuple class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: Text, final: bool = ...) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): def _buffer_decode(self, input: bytes, errors: str, final: bool) -> Tuple[Text, int]: ... class StreamWriter(codecs.StreamWriter): ... class StreamReader(codecs.StreamReader): ... def getregentry() -> codecs.CodecInfo: ... def encode(input: Text, errors: Text = ...) -> bytes: ... def decode(input: bytes, errors: Text = ...) -> Text: ... mypy-0.761/mypy/typeshed/stdlib/3/enum.pyi0000644€tŠÔÚ€2›s®0000000546313576752252024662 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: third_party/2/enum.pyi and stdlib/3.4/enum.pyi must remain consistent! import sys from typing import Any, Dict, Iterator, List, Mapping, Type, TypeVar, Union from abc import ABCMeta _T = TypeVar('_T') _S = TypeVar('_S', bound=Type[Enum]) # Note: EnumMeta actually subclasses type directly, not ABCMeta. # This is a temporary workaround to allow multiple creation of enums with builtins # such as str as mixins, which due to the handling of ABCs of builtin types, cause # spurious inconsistent metaclass structure. See #1595. # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself class EnumMeta(ABCMeta): def __iter__(self: Type[_T]) -> Iterator[_T]: ... def __reversed__(self: Type[_T]) -> Iterator[_T]: ... def __contains__(self: Type[_T], member: object) -> bool: ... def __getitem__(self: Type[_T], name: str) -> _T: ... @property def __members__(self: Type[_T]) -> Mapping[str, _T]: ... def __len__(self) -> int: ... class Enum(metaclass=EnumMeta): name: str value: Any _name_: str _value_: Any _member_names_: List[str] # undocumented _member_map_: Dict[str, Enum] # undocumented _value2member_map_: Dict[int, Enum] # undocumented if sys.version_info >= (3, 7): _ignore_: Union[str, List[str]] if sys.version_info >= (3, 6): _order_: str @classmethod def _missing_(cls, value: object) -> Any: ... @staticmethod def _generate_next_value_(name: str, start: int, count: int, last_values: List[Any]) -> Any: ... def __new__(cls: Type[_T], value: object) -> _T: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __dir__(self) -> List[str]: ... def __format__(self, format_spec: str) -> str: ... def __hash__(self) -> Any: ... def __reduce_ex__(self, proto: object) -> Any: ... class IntEnum(int, Enum): value: int def unique(enumeration: _S) -> _S: ... if sys.version_info >= (3, 6): _auto_null: Any # subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() class auto(IntFlag): value: Any class Flag(Enum): def __contains__(self: _T, other: _T) -> bool: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __bool__(self) -> bool: ... def __or__(self: _T, other: _T) -> _T: ... def __and__(self: _T, other: _T) -> _T: ... def __xor__(self: _T, other: _T) -> _T: ... def __invert__(self: _T) -> _T: ... class IntFlag(int, Flag): def __or__(self: _T, other: Union[int, _T]) -> _T: ... def __and__(self: _T, other: Union[int, _T]) -> _T: ... def __xor__(self: _T, other: Union[int, _T]) -> _T: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ mypy-0.761/mypy/typeshed/stdlib/3/faulthandler.pyi0000644€tŠÔÚ€2›s®0000000124713576752252026363 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Union, Protocol class _HasFileno(Protocol): def fileno(self) -> int: ... _File = Union[_HasFileno, int] def cancel_dump_traceback_later() -> None: ... def disable() -> None: ... def dump_traceback(file: _File = ..., all_threads: bool = ...) -> None: ... def dump_traceback_later(timeout: float, repeat: bool = ..., file: _File = ..., exit: bool = ...) -> None: ... def enable(file: _File = ..., all_threads: bool = ...) -> None: ... def is_enabled() -> bool: ... if sys.platform != "win32": def register(signum: int, file: _File = ..., all_threads: bool = ..., chain: bool = ...) -> None: ... def unregister(signum: int) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/fcntl.pyi0000644€tŠÔÚ€2›s®0000000337413576752252025023 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for fcntl from io import IOBase from typing import Any, IO, Union FASYNC: int FD_CLOEXEC: int DN_ACCESS: int DN_ATTRIB: int DN_CREATE: int DN_DELETE: int DN_MODIFY: int DN_MULTISHOT: int DN_RENAME: int F_DUPFD: int F_DUPFD_CLOEXEC: int F_FULLFSYNC: int F_EXLCK: int F_GETFD: int F_GETFL: int F_GETLEASE: int F_GETLK: int F_GETLK64: int F_GETOWN: int F_NOCACHE: int F_GETSIG: int F_NOTIFY: int F_RDLCK: int F_SETFD: int F_SETFL: int F_SETLEASE: int F_SETLK: int F_SETLK64: int F_SETLKW: int F_SETLKW64: int F_SETOWN: int F_SETSIG: int F_SHLCK: int F_UNLCK: int F_WRLCK: int I_ATMARK: int I_CANPUT: int I_CKBAND: int I_FDINSERT: int I_FIND: int I_FLUSH: int I_FLUSHBAND: int I_GETBAND: int I_GETCLTIME: int I_GETSIG: int I_GRDOPT: int I_GWROPT: int I_LINK: int I_LIST: int I_LOOK: int I_NREAD: int I_PEEK: int I_PLINK: int I_POP: int I_PUNLINK: int I_PUSH: int I_RECVFD: int I_SENDFD: int I_SETCLTIME: int I_SETSIG: int I_SRDOPT: int I_STR: int I_SWROPT: int I_UNLINK: int LOCK_EX: int LOCK_MAND: int LOCK_NB: int LOCK_READ: int LOCK_RW: int LOCK_SH: int LOCK_UN: int LOCK_WRITE: int _AnyFile = Union[int, IO[Any], IOBase] # TODO All these return either int or bytes depending on the value of # cmd (not on the type of arg). def fcntl(fd: _AnyFile, cmd: int, arg: Union[int, bytes] = ...) -> Any: ... # TODO This function accepts any object supporting a buffer interface, # as arg, is there a better way to express this than bytes? def ioctl(fd: _AnyFile, request: int, arg: Union[int, bytes] = ..., mutate_flag: bool = ...) -> Any: ... def flock(fd: _AnyFile, operation: int) -> None: ... def lockf(fd: _AnyFile, cmd: int, len: int = ..., start: int = ..., whence: int = ...) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/3/fnmatch.pyi0000644€tŠÔÚ€2›s®0000000055613576752252025334 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for fnmatch # Based on http://docs.python.org/3.2/library/fnmatch.html and # python-lib/fnmatch.py from typing import Iterable, List, AnyStr def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ... def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ... def filter(names: Iterable[AnyStr], pat: AnyStr) -> List[AnyStr]: ... def translate(pat: str) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/functools.pyi0000644€tŠÔÚ€2›s®0000000675013576752252025732 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Generic, Dict, Iterable, Mapping, Optional, Sequence, Tuple, Type, TypeVar, NamedTuple, Union, overload _AnyCallable = Callable[..., Any] _T = TypeVar("_T") _S = TypeVar("_S") @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... @overload def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... class _CacheInfo(NamedTuple): hits: int misses: int maxsize: int currsize: int class _lru_cache_wrapper(Generic[_T]): __wrapped__: Callable[..., _T] def __call__(self, *args: Any, **kwargs: Any) -> _T: ... def cache_info(self) -> _CacheInfo: ... def cache_clear(self) -> None: ... if sys.version_info >= (3, 8): @overload def lru_cache(maxsize: Optional[int] = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... @overload def lru_cache(maxsize: Callable[..., _T], typed: bool = ...) -> _lru_cache_wrapper[_T]: ... else: def lru_cache(maxsize: Optional[int] = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... WRAPPER_ASSIGNMENTS: Sequence[str] WRAPPER_UPDATES: Sequence[str] def update_wrapper(wrapper: _AnyCallable, wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> _AnyCallable: ... def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_AnyCallable], _AnyCallable]: ... def total_ordering(cls: type) -> type: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], Any]: ... class partial(Generic[_T]): func: Callable[..., _T] args: Tuple[Any, ...] keywords: Dict[str, Any] def __init__(self, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> _T: ... # With protocols, this could change into a generic protocol that defines __get__ and returns _T _Descriptor = Any class partialmethod(Generic[_T]): func: Union[Callable[..., _T], _Descriptor] args: Tuple[Any, ...] keywords: Dict[str, Any] @overload def __init__(self, func: Callable[..., _T], *args: Any, **keywords: Any) -> None: ... @overload def __init__(self, func: _Descriptor, *args: Any, **keywords: Any) -> None: ... def __get__(self, obj: Any, cls: Type[Any]) -> Callable[..., _T]: ... @property def __isabstractmethod__(self) -> bool: ... class _SingleDispatchCallable(Generic[_T]): registry: Mapping[Any, Callable[..., _T]] def dispatch(self, cls: Any) -> Callable[..., _T]: ... @overload def register(self, cls: Any) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload def register(self, cls: Any, func: Callable[..., _T]) -> Callable[..., _T]: ... def _clear_cache(self) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> _T: ... def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... if sys.version_info >= (3, 8): class cached_property(Generic[_S, _T]): func: Callable[[_S], _T] attrname: Optional[str] def __init__(self, func: Callable[[_S], _T]) -> None: ... @overload def __get__(self, instance: None, owner: Optional[Type[_S]] = ...) -> cached_property[_S, _T]: ... @overload def __get__(self, instance: _S, owner: Optional[Type[_S]] = ...) -> _T: ... def __set_name__(self, owner: Type[_S], name: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/gc.pyi0000644€tŠÔÚ€2›s®0000000167013576752252024303 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for gc import sys from typing import Any, Dict, List, Optional, Tuple DEBUG_COLLECTABLE: int DEBUG_LEAK: int DEBUG_SAVEALL: int DEBUG_STATS: int DEBUG_UNCOLLECTABLE: int callbacks: List[Any] garbage: List[Any] def collect(generations: int = ...) -> int: ... def disable() -> None: ... def enable() -> None: ... def get_count() -> Tuple[int, int, int]: ... def get_debug() -> int: ... if sys.version_info >= (3, 8): def get_objects(generation: Optional[int] = ...) -> List[Any]: ... else: def get_objects() -> List[Any]: ... def get_referents(*objs: Any) -> List[Any]: ... def get_referrers(*objs: Any) -> List[Any]: ... def get_stats() -> List[Dict[str, Any]]: ... def get_threshold() -> Tuple[int, int, int]: ... def is_tracked(obj: Any) -> bool: ... def isenabled() -> bool: ... def set_debug(flags: int) -> None: ... def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/getopt.pyi0000644€tŠÔÚ€2›s®0000000065713576752252025220 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for getopt # Based on http://docs.python.org/3.2/library/getopt.html from typing import List, Tuple def getopt(args: List[str], shortopts: str, longopts: List[str] = ...) -> Tuple[List[Tuple[str, str]], List[str]]: ... def gnu_getopt(args: List[str], shortopts: str, longopts: List[str] = ...) -> Tuple[List[Tuple[str, str]], List[str]]: ... class GetoptError(Exception): msg: str opt: str error = GetoptError mypy-0.761/mypy/typeshed/stdlib/3/getpass.pyi0000644€tŠÔÚ€2›s®0000000031313576752252025351 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for getpass from typing import Optional, TextIO def getpass(prompt: str = ..., stream: Optional[TextIO] = ...) -> str: ... def getuser() -> str: ... class GetPassWarning(UserWarning): ... mypy-0.761/mypy/typeshed/stdlib/3/gettext.pyi0000644€tŠÔÚ€2›s®0000000577313576752252025406 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import overload, Any, Container, IO, Iterable, Optional, Type, TypeVar if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal class NullTranslations: def __init__(self, fp: IO[str] = ...) -> None: ... def _parse(self, fp: IO[str]) -> None: ... def add_fallback(self, fallback: NullTranslations) -> None: ... def gettext(self, message: str) -> str: ... def lgettext(self, message: str) -> str: ... def ngettext(self, singular: str, plural: str, n: int) -> str: ... def lngettext(self, singular: str, plural: str, n: int) -> str: ... def pgettext(self, context: str, message: str) -> str: ... def npgettext(self, context: str, msgid1: str, msgid2: str, n: int) -> str: ... def info(self) -> Any: ... def charset(self) -> Any: ... def output_charset(self) -> Any: ... def set_output_charset(self, charset: str) -> None: ... def install(self, names: Optional[Container[str]] = ...) -> None: ... class GNUTranslations(NullTranslations): LE_MAGIC: int BE_MAGIC: int def find(domain: str, localedir: Optional[str] = ..., languages: Optional[Iterable[str]] = ..., all: bool = ...) -> Any: ... _T = TypeVar('_T') @overload def translation(domain: str, localedir: Optional[str] = ..., languages: Optional[Iterable[str]] = ..., class_: None = ..., fallback: bool = ..., codeset: Optional[str] = ...) -> NullTranslations: ... @overload def translation(domain: str, localedir: Optional[str] = ..., languages: Optional[Iterable[str]] = ..., class_: Type[_T] = ..., fallback: Literal[False] = ..., codeset: Optional[str] = ...) -> _T: ... @overload def translation(domain: str, localedir: Optional[str] = ..., languages: Optional[Iterable[str]] = ..., class_: Type[_T] = ..., fallback: Literal[True] = ..., codeset: Optional[str] = ...) -> Any: ... def install(domain: str, localedir: Optional[str] = ..., codeset: Optional[str] = ..., names: Optional[Container[str]] = ...) -> None: ... def textdomain(domain: Optional[str] = ...) -> str: ... def bindtextdomain(domain: str, localedir: Optional[str] = ...) -> str: ... def bind_textdomain_codeset(domain: str, codeset: Optional[str] = ...) -> str: ... def dgettext(domain: str, message: str) -> str: ... def ldgettext(domain: str, message: str) -> str: ... def dngettext(domain: str, singular: str, plural: str, n: int) -> str: ... def ldngettext(domain: str, singular: str, plural: str, n: int) -> str: ... def gettext(message: str) -> str: ... def lgettext(message: str) -> str: ... def ngettext(singular: str, plural: str, n: int) -> str: ... def lngettext(singular: str, plural: str, n: int) -> str: ... def pgettext(context: str, message: str) -> str: ... def dpgettext(domain: str, context: str, message: str) -> str: ... def npgettext(context: str, msgid1: str, msgid2: str, n: int) -> str: ... def dnpgettext(domain: str, context: str, msgid1: str, msgid2: str, n: int) -> str: ... Catalog = translation mypy-0.761/mypy/typeshed/stdlib/3/glob.pyi0000644€tŠÔÚ€2›s®0000000120013576752252024622 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for glob # Based on http://docs.python.org/3/library/glob.html from typing import List, Iterator, AnyStr, Union import sys if sys.version_info >= (3, 6): def glob0(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]: ... else: def glob0(dirname: AnyStr, basename: AnyStr) -> List[AnyStr]: ... def glob1(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]: ... def glob(pathname: AnyStr, *, recursive: bool = ...) -> List[AnyStr]: ... def iglob(pathname: AnyStr, *, recursive: bool = ...) -> Iterator[AnyStr]: ... def escape(pathname: AnyStr) -> AnyStr: ... def has_magic(s: Union[str, bytes]) -> bool: ... # undocumented mypy-0.761/mypy/typeshed/stdlib/3/gzip.pyi0000644€tŠÔÚ€2›s®0000000400413576752252024655 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Optional from os.path import _PathType import _compression import sys import zlib def open(filename, mode: str = ..., compresslevel: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ... class _PaddedFile: file: IO[bytes] def __init__(self, f: IO[bytes], prepend: bytes = ...) -> None: ... def read(self, size: int) -> bytes: ... def prepend(self, prepend: bytes = ...) -> None: ... def seek(self, off: int) -> int: ... def seekable(self) -> bool: ... class GzipFile(_compression.BaseStream): myfileobj: Optional[IO[bytes]] mode: str name: str compress: zlib._Compress fileobj: IO[bytes] def __init__(self, filename: Optional[_PathType] = ..., mode: Optional[str] = ..., compresslevel: int = ..., fileobj: Optional[IO[bytes]] = ..., mtime: Optional[float] = ...) -> None: ... @property def filename(self) -> str: ... @property def mtime(self) -> Optional[int]: ... crc: int def write(self, data: bytes) -> int: ... def read(self, size: Optional[int] = ...) -> bytes: ... def read1(self, size: int = ...) -> bytes: ... def peek(self, n: int) -> bytes: ... @property def closed(self) -> bool: ... def close(self) -> None: ... def flush(self, zlib_mode: int = ...) -> None: ... def fileno(self) -> int: ... def rewind(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def seekable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> int: ... def readline(self, size: int = ...) -> bytes: ... class _GzipReader(_compression.DecompressReader): def __init__(self, fp: IO[bytes]) -> None: ... def read(self, size: int = ...) -> bytes: ... if sys.version_info >= (3, 8): def compress(data, compresslevel: int = ..., *, mtime: Optional[float] = ...) -> bytes: ... else: def compress(data, compresslevel: int = ...) -> bytes: ... def decompress(data: bytes) -> bytes: ... mypy-0.761/mypy/typeshed/stdlib/3/hashlib.pyi0000644€tŠÔÚ€2›s®0000000446113576752252025325 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for hashlib import sys from typing import AbstractSet, Optional, Union _DataType = Union[bytes, bytearray, memoryview] class _Hash(object): digest_size: int block_size: int # [Python documentation note] Changed in version 3.4: The name attribute has # been present in CPython since its inception, but until Python 3.4 was not # formally specified, so may not exist on some platforms name: str def __init__(self, data: _DataType = ...) -> None: ... def copy(self) -> _Hash: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... def update(self, arg: _DataType) -> None: ... def md5(arg: _DataType = ...) -> _Hash: ... def sha1(arg: _DataType = ...) -> _Hash: ... def sha224(arg: _DataType = ...) -> _Hash: ... def sha256(arg: _DataType = ...) -> _Hash: ... def sha384(arg: _DataType = ...) -> _Hash: ... def sha512(arg: _DataType = ...) -> _Hash: ... def new(name: str, data: _DataType = ...) -> _Hash: ... algorithms_guaranteed: AbstractSet[str] algorithms_available: AbstractSet[str] def pbkdf2_hmac(hash_name: str, password: _DataType, salt: _DataType, iterations: int, dklen: Optional[int] = ...) -> bytes: ... if sys.version_info >= (3, 6): class _VarLenHash(object): digest_size: int block_size: int name: str def __init__(self, data: _DataType = ...) -> None: ... def copy(self) -> _VarLenHash: ... def digest(self, length: int) -> bytes: ... def hexdigest(self, length: int) -> str: ... def update(self, arg: _DataType) -> None: ... sha3_224 = _Hash sha3_256 = _Hash sha3_384 = _Hash sha3_512 = _Hash shake_128 = _VarLenHash shake_256 = _VarLenHash def scrypt(password: _DataType, *, salt: _DataType, n: int, r: int, p: int, maxmem: int = ..., dklen: int = ...) -> bytes: ... class _BlakeHash(_Hash): MAX_DIGEST_SIZE: int MAX_KEY_SIZE: int PERSON_SIZE: int SALT_SIZE: int def __init__(self, data: _DataType = ..., digest_size: int = ..., key: _DataType = ..., salt: _DataType = ..., person: _DataType = ..., fanout: int = ..., depth: int = ..., leaf_size: int = ..., node_offset: int = ..., node_depth: int = ..., inner_size: int = ..., last_node: bool = ...) -> None: ... blake2b = _BlakeHash blake2s = _BlakeHash mypy-0.761/mypy/typeshed/stdlib/3/heapq.pyi0000644€tŠÔÚ€2›s®0000000134113576752252025003 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for heapq # Based on http://docs.python.org/3.2/library/heapq.html from typing import TypeVar, List, Iterable, Any, Callable, Optional _T = TypeVar('_T') def heappush(heap: List[_T], item: _T) -> None: ... def heappop(heap: List[_T]) -> _T: ... def heappushpop(heap: List[_T], item: _T) -> _T: ... def heapify(x: List[_T]) -> None: ... def heapreplace(heap: List[_T], item: _T) -> _T: ... def merge(*iterables: Iterable[_T], key: Callable[[_T], Any] = ..., reverse: bool = ...) -> Iterable[_T]: ... def nlargest(n: int, iterable: Iterable[_T], key: Optional[Callable[[_T], Any]] = ...) -> List[_T]: ... def nsmallest(n: int, iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> List[_T]: ... mypy-0.761/mypy/typeshed/stdlib/3/html/0000755€tŠÔÚ€2›s®0000000000013576752267024135 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/html/__init__.pyi0000644€tŠÔÚ€2›s®0000000017213576752252026411 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr def escape(s: AnyStr, quote: bool = ...) -> AnyStr: ... def unescape(s: AnyStr) -> AnyStr: ... mypy-0.761/mypy/typeshed/stdlib/3/html/entities.pyi0000644€tŠÔÚ€2›s®0000000021013576752252026467 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict name2codepoint: Dict[str, int] html5: Dict[str, str] codepoint2name: Dict[int, str] entitydefs: Dict[str, str] mypy-0.761/mypy/typeshed/stdlib/3/html/parser.pyi0000644€tŠÔÚ€2›s®0000000201413576752252026143 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Optional, Tuple from _markupbase import ParserBase class HTMLParser(ParserBase): def __init__(self, *, convert_charrefs: bool = ...) -> None: ... def feed(self, feed: str) -> None: ... def close(self) -> None: ... def reset(self) -> None: ... def getpos(self) -> Tuple[int, int]: ... def get_starttag_text(self) -> Optional[str]: ... def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None: ... def handle_endtag(self, tag: str) -> None: ... def handle_startendtag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None: ... def handle_data(self, data: str) -> None: ... def handle_entityref(self, name: str) -> None: ... def handle_charref(self, name: str) -> None: ... def handle_comment(self, data: str) -> None: ... def handle_decl(self, decl: str) -> None: ... def handle_pi(self, data: str) -> None: ... def unknown_decl(self, data: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/http/0000755€tŠÔÚ€2›s®0000000000013576752267024150 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/http/__init__.pyi0000644€tŠÔÚ€2›s®0000000323113576752252026423 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from enum import IntEnum class HTTPStatus(IntEnum): @property def phrase(self) -> str: ... @property def description(self) -> str: ... CONTINUE: int SWITCHING_PROTOCOLS: int PROCESSING: int OK: int CREATED: int ACCEPTED: int NON_AUTHORITATIVE_INFORMATION: int NO_CONTENT: int RESET_CONTENT: int PARTIAL_CONTENT: int MULTI_STATUS: int ALREADY_REPORTED: int IM_USED: int MULTIPLE_CHOICES: int MOVED_PERMANENTLY: int FOUND: int SEE_OTHER: int NOT_MODIFIED: int USE_PROXY: int TEMPORARY_REDIRECT: int PERMANENT_REDIRECT: int BAD_REQUEST: int UNAUTHORIZED: int PAYMENT_REQUIRED: int FORBIDDEN: int NOT_FOUND: int METHOD_NOT_ALLOWED: int NOT_ACCEPTABLE: int PROXY_AUTHENTICATION_REQUIRED: int REQUEST_TIMEOUT: int CONFLICT: int GONE: int LENGTH_REQUIRED: int PRECONDITION_FAILED: int REQUEST_ENTITY_TOO_LARGE: int REQUEST_URI_TOO_LONG: int UNSUPPORTED_MEDIA_TYPE: int REQUESTED_RANGE_NOT_SATISFIABLE: int EXPECTATION_FAILED: int UNPROCESSABLE_ENTITY: int LOCKED: int FAILED_DEPENDENCY: int UPGRADE_REQUIRED: int PRECONDITION_REQUIRED: int TOO_MANY_REQUESTS: int REQUEST_HEADER_FIELDS_TOO_LARGE: int INTERNAL_SERVER_ERROR: int NOT_IMPLEMENTED: int BAD_GATEWAY: int SERVICE_UNAVAILABLE: int GATEWAY_TIMEOUT: int HTTP_VERSION_NOT_SUPPORTED: int VARIANT_ALSO_NEGOTIATES: int INSUFFICIENT_STORAGE: int LOOP_DETECTED: int NOT_EXTENDED: int NETWORK_AUTHENTICATION_REQUIRED: int if sys.version_info >= (3, 7): MISDIRECTED_REQUEST: int mypy-0.761/mypy/typeshed/stdlib/3/http/client.pyi0000644€tŠÔÚ€2›s®0000001417513576752252026153 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Dict, IO, Iterable, List, Iterator, Mapping, Optional, Protocol, Tuple, Type, TypeVar, Union, overload, BinaryIO, ) import email.message import io from socket import socket import sys import ssl import types _DataType = Union[bytes, IO[Any], Iterable[bytes], str] _T = TypeVar('_T') HTTP_PORT: int HTTPS_PORT: int CONTINUE: int SWITCHING_PROTOCOLS: int PROCESSING: int OK: int CREATED: int ACCEPTED: int NON_AUTHORITATIVE_INFORMATION: int NO_CONTENT: int RESET_CONTENT: int PARTIAL_CONTENT: int MULTI_STATUS: int IM_USED: int MULTIPLE_CHOICES: int MOVED_PERMANENTLY: int FOUND: int SEE_OTHER: int NOT_MODIFIED: int USE_PROXY: int TEMPORARY_REDIRECT: int BAD_REQUEST: int UNAUTHORIZED: int PAYMENT_REQUIRED: int FORBIDDEN: int NOT_FOUND: int METHOD_NOT_ALLOWED: int NOT_ACCEPTABLE: int PROXY_AUTHENTICATION_REQUIRED: int REQUEST_TIMEOUT: int CONFLICT: int GONE: int LENGTH_REQUIRED: int PRECONDITION_FAILED: int REQUEST_ENTITY_TOO_LARGE: int REQUEST_URI_TOO_LONG: int UNSUPPORTED_MEDIA_TYPE: int REQUESTED_RANGE_NOT_SATISFIABLE: int EXPECTATION_FAILED: int UNPROCESSABLE_ENTITY: int LOCKED: int FAILED_DEPENDENCY: int UPGRADE_REQUIRED: int PRECONDITION_REQUIRED: int TOO_MANY_REQUESTS: int REQUEST_HEADER_FIELDS_TOO_LARGE: int INTERNAL_SERVER_ERROR: int NOT_IMPLEMENTED: int BAD_GATEWAY: int SERVICE_UNAVAILABLE: int GATEWAY_TIMEOUT: int HTTP_VERSION_NOT_SUPPORTED: int INSUFFICIENT_STORAGE: int NOT_EXTENDED: int NETWORK_AUTHENTICATION_REQUIRED: int responses: Dict[int, str] class HTTPMessage(email.message.Message): ... class HTTPResponse(io.BufferedIOBase, BinaryIO): msg: HTTPMessage headers: HTTPMessage version: int debuglevel: int closed: bool status: int reason: str def __init__(self, sock: socket, debuglevel: int = ..., method: Optional[str] = ..., url: Optional[str] = ...) -> None: ... def read(self, amt: Optional[int] = ...) -> bytes: ... @overload def getheader(self, name: str) -> Optional[str]: ... @overload def getheader(self, name: str, default: _T) -> Union[str, _T]: ... def getheaders(self) -> List[Tuple[str, str]]: ... def fileno(self) -> int: ... def isclosed(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __enter__(self) -> HTTPResponse: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[types.TracebackType]) -> Optional[bool]: ... def info(self) -> email.message.Message: ... def geturl(self) -> str: ... def getcode(self) -> int: ... def begin(self) -> None: ... # This is an API stub only for the class below, not a class itself. # urllib.request uses it for a parameter. class _HTTPConnectionProtocol(Protocol): if sys.version_info >= (3, 7): def __call__( self, host: str, port: Optional[int] = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ..., blocksize: int = ..., ) -> HTTPConnection: ... else: def __call__( self, host: str, port: Optional[int] = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ..., ) -> HTTPConnection: ... class HTTPConnection: timeout: float host: str port: int sock: Any if sys.version_info >= (3, 7): def __init__( self, host: str, port: Optional[int] = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ..., blocksize: int = ... ) -> None: ... else: def __init__( self, host: str, port: Optional[int] = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ... ) -> None: ... if sys.version_info >= (3, 6): def request(self, method: str, url: str, body: Optional[_DataType] = ..., headers: Mapping[str, str] = ..., *, encode_chunked: bool = ...) -> None: ... else: def request(self, method: str, url: str, body: Optional[_DataType] = ..., headers: Mapping[str, str] = ...) -> None: ... def getresponse(self) -> HTTPResponse: ... def set_debuglevel(self, level: int) -> None: ... def set_tunnel(self, host: str, port: Optional[int] = ..., headers: Optional[Mapping[str, str]] = ...) -> None: ... def connect(self) -> None: ... def close(self) -> None: ... def putrequest(self, method: str, url: str, skip_host: bool = ..., skip_accept_encoding: bool = ...) -> None: ... def putheader(self, header: str, *argument: str) -> None: ... if sys.version_info >= (3, 6): def endheaders(self, message_body: Optional[_DataType] = ..., *, encode_chunked: bool = ...) -> None: ... else: def endheaders(self, message_body: Optional[_DataType] = ...) -> None: ... def send(self, data: _DataType) -> None: ... class HTTPSConnection(HTTPConnection): def __init__(self, host: str, port: Optional[int] = ..., key_file: Optional[str] = ..., cert_file: Optional[str] = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ..., *, context: Optional[ssl.SSLContext] = ..., check_hostname: Optional[bool] = ...) -> None: ... class HTTPException(Exception): ... error = HTTPException class NotConnected(HTTPException): ... class InvalidURL(HTTPException): ... class UnknownProtocol(HTTPException): ... class UnknownTransferEncoding(HTTPException): ... class UnimplementedFileMode(HTTPException): ... class IncompleteRead(HTTPException): ... class ImproperConnectionState(HTTPException): ... class CannotSendRequest(ImproperConnectionState): ... class CannotSendHeader(ImproperConnectionState): ... class ResponseNotReady(ImproperConnectionState): ... class BadStatusLine(HTTPException): ... class LineTooLong(HTTPException): ... class RemoteDisconnected(ConnectionResetError, BadStatusLine): ... mypy-0.761/mypy/typeshed/stdlib/3/http/cookiejar.pyi0000644€tŠÔÚ€2›s®0000001143013576752252026632 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Iterable, Iterator, Optional, Sequence, Tuple, TypeVar, Union, overload from http.client import HTTPResponse from urllib.request import Request _T = TypeVar('_T') class LoadError(OSError): ... class CookieJar(Iterable[Cookie]): def __init__(self, policy: Optional[CookiePolicy] = ...) -> None: ... def add_cookie_header(self, request: Request) -> None: ... def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ... def set_policy(self, policy: CookiePolicy) -> None: ... def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ... def set_cookie(self, cookie: Cookie) -> None: ... def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ... def clear(self, domain: str = ..., path: str = ..., name: str = ...) -> None: ... def clear_session_cookies(self) -> None: ... def __iter__(self) -> Iterator[Cookie]: ... def __len__(self) -> int: ... class FileCookieJar(CookieJar): filename: str delayload: bool def __init__(self, filename: str = ..., delayload: bool = ..., policy: Optional[CookiePolicy] = ...) -> None: ... def save(self, filename: Optional[str] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... def load(self, filename: Optional[str] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... def revert(self, filename: Optional[str] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... class MozillaCookieJar(FileCookieJar): ... class LWPCookieJar(FileCookieJar): def as_lwp_str(self, ignore_discard: bool = ..., ignore_expires: bool = ...) -> str: ... # undocumented class CookiePolicy: netscape: bool rfc2965: bool hide_cookie2: bool def set_ok(self, cookie: Cookie, request: Request) -> bool: ... def return_ok(self, cookie: Cookie, request: Request) -> bool: ... def domain_return_ok(self, domain: str, request: Request) -> bool: ... def path_return_ok(self, path: str, request: Request) -> bool: ... class DefaultCookiePolicy(CookiePolicy): rfc2109_as_netscape: bool strict_domain: bool strict_rfc2965_unverifiable: bool strict_ns_unverifiable: bool strict_ns_domain: int strict_ns_set_initial_dollar: bool strict_ns_set_path: bool DomainStrictNoDots: int DomainStrictNonDomain: int DomainRFC2965Match: int DomainLiberal: int DomainStrict: int def __init__(self, blocked_domains: Optional[Sequence[str]] = ..., allowed_domains: Optional[Sequence[str]] = ..., netscape: bool = ..., rfc2965: bool = ..., rfc2109_as_netscape: Optional[bool] = ..., hide_cookie2: bool = ..., strict_domain: bool = ..., strict_rfc2965_unverifiable: bool = ..., strict_ns_unverifiable: bool = ..., strict_ns_domain: int = ..., strict_ns_set_initial_dollar: bool = ..., strict_ns_set_path: bool = ...) -> None: ... def blocked_domains(self) -> Tuple[str, ...]: ... def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: ... def is_blocked(self, domain: str) -> bool: ... def allowed_domains(self) -> Optional[Tuple[str, ...]]: ... def set_allowed_domains(self, allowed_domains: Optional[Sequence[str]]) -> None: ... def is_not_allowed(self, domain: str) -> bool: ... class Cookie: version: Optional[int] name: str value: Optional[str] port: Optional[str] path: str secure: bool expires: Optional[int] discard: bool comment: Optional[str] comment_url: Optional[str] rfc2109: bool port_specified: bool domain: str # undocumented domain_specified: bool domain_initial_dot: bool def __init__(self, version: Optional[int], name: str, value: Optional[str], # undocumented port: Optional[str], port_specified: bool, domain: str, domain_specified: bool, domain_initial_dot: bool, path: str, path_specified: bool, secure: bool, expires: Optional[int], discard: bool, comment: Optional[str], comment_url: Optional[str], rest: Dict[str, str], rfc2109: bool = ...) -> None: ... def has_nonstandard_attr(self, name: str) -> bool: ... @overload def get_nonstandard_attr(self, name: str) -> Optional[str]: ... @overload def get_nonstandard_attr(self, name: str, default: _T = ...) -> Union[str, _T]: ... def set_nonstandard_attr(self, name: str, value: str) -> None: ... def is_expired(self, now: int = ...) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/3/http/cookies.pyi0000644€tŠÔÚ€2›s®0000000240013576752252026315 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for http.cookies (Python 3.5) from typing import Generic, Dict, List, Mapping, Optional, TypeVar, Union, Any _DataType = Union[str, Mapping[str, Union[str, Morsel[Any]]]] _T = TypeVar('_T') class CookieError(Exception): ... class Morsel(Dict[str, Any], Generic[_T]): value: str coded_value: _T key: str def set(self, key: str, val: str, coded_val: _T) -> None: ... def isReservedKey(self, K: str) -> bool: ... def output(self, attrs: Optional[List[str]] = ..., header: str = ...) -> str: ... def js_output(self, attrs: Optional[List[str]] = ...) -> str: ... def OutputString(self, attrs: Optional[List[str]] = ...) -> str: ... class BaseCookie(Dict[str, Morsel[_T]], Generic[_T]): def __init__(self, input: Optional[_DataType] = ...) -> None: ... def value_decode(self, val: str) -> _T: ... def value_encode(self, val: _T) -> str: ... def output(self, attrs: Optional[List[str]] = ..., header: str = ..., sep: str = ...) -> str: ... def js_output(self, attrs: Optional[List[str]] = ...) -> str: ... def load(self, rawdata: _DataType) -> None: ... def __setitem__(self, key: str, value: Union[str, Morsel[_T]]) -> None: ... class SimpleCookie(BaseCookie[_T], Generic[_T]): ... mypy-0.761/mypy/typeshed/stdlib/3/http/server.pyi0000644€tŠÔÚ€2›s®0000000615013576752252026175 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for http.server (Python 3.4) import sys from typing import Any, BinaryIO, Callable, ClassVar, Dict, List, Mapping, Optional, Sequence, Tuple, Union import socketserver import email.message if sys.version_info >= (3, 7): from builtins import _PathLike class HTTPServer(socketserver.TCPServer): server_name: str server_port: int def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: Callable[..., BaseHTTPRequestHandler]) -> None: ... if sys.version_info >= (3, 7): class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): daemon_threads: bool # undocumented class BaseHTTPRequestHandler: client_address: Tuple[str, int] server: socketserver.BaseServer close_connection: bool requestline: str command: str path: str request_version: str headers: email.message.Message rfile: BinaryIO wfile: BinaryIO server_version: str sys_version: str error_message_format: str error_content_type: str protocol_version: str MessageClass: type responses: Mapping[int, Tuple[str, str]] weekdayname: ClassVar[Sequence[str]] = ... # Undocumented monthname: ClassVar[Sequence[Optional[str]]] = ... # Undocumented def __init__(self, request: bytes, client_address: Tuple[str, int], server: socketserver.BaseServer) -> None: ... def handle(self) -> None: ... def handle_one_request(self) -> None: ... def handle_expect_100(self) -> bool: ... def send_error(self, code: int, message: Optional[str] = ..., explain: Optional[str] = ...) -> None: ... def send_response(self, code: int, message: Optional[str] = ...) -> None: ... def send_header(self, keyword: str, value: str) -> None: ... def send_response_only(self, code: int, message: Optional[str] = ...) -> None: ... def end_headers(self) -> None: ... def flush_headers(self) -> None: ... def log_request(self, code: Union[int, str] = ..., size: Union[int, str] = ...) -> None: ... def log_error(self, format: str, *args: Any) -> None: ... def log_message(self, format: str, *args: Any) -> None: ... def version_string(self) -> str: ... def date_time_string(self, timestamp: Optional[int] = ...) -> str: ... def log_date_time_string(self) -> str: ... def address_string(self) -> str: ... def parse_request(self) -> bool: ... # Undocumented class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): extensions_map: Dict[str, str] if sys.version_info >= (3, 7): def __init__(self, request: bytes, client_address: Tuple[str, int], server: socketserver.BaseServer, directory: Optional[Union[str, _PathLike[str]]]) -> None: ... else: def __init__(self, request: bytes, client_address: Tuple[str, int], server: socketserver.BaseServer) -> None: ... def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): cgi_directories: List[str] def do_POST(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/imp.pyi0000644€tŠÔÚ€2›s®0000000413213576752252024473 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for imp (Python 3.6) import os import sys import types from typing import Any, IO, List, Optional, Tuple, TypeVar, Union from _imp import (lock_held as lock_held, acquire_lock as acquire_lock, release_lock as release_lock, get_frozen_object as get_frozen_object, is_frozen_package as is_frozen_package, init_frozen as init_frozen, is_builtin as is_builtin, is_frozen as is_frozen) from _imp import create_dynamic as create_dynamic _T = TypeVar('_T') if sys.version_info >= (3, 6): _Path = Union[str, os.PathLike[str]] else: _Path = str SEARCH_ERROR: int PY_SOURCE: int PY_COMPILED: int C_EXTENSION: int PY_RESOURCE: int PKG_DIRECTORY: int C_BUILTIN: int PY_FROZEN: int PY_CODERESOURCE: int IMP_HOOK: int def new_module(name: str) -> types.ModuleType: ... def get_magic() -> bytes: ... def get_tag() -> str: ... def cache_from_source(path: _Path, debug_override: Optional[bool] = ...) -> str: ... def source_from_cache(path: _Path) -> str: ... def get_suffixes() -> List[Tuple[str, str, int]]: ... class NullImporter: def __init__(self, path: _Path) -> None: ... def find_module(self, fullname: Any) -> None: ... # PathLike doesn't work for the pathname argument here def load_source(name: str, pathname: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ... def load_compiled(name: str, pathname: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ... def load_package(name: str, path: _Path) -> types.ModuleType: ... def load_module(name: str, file: IO[Any], filename: str, details: Tuple[str, str, int]) -> types.ModuleType: ... if sys.version_info >= (3, 6): def find_module(name: str, path: Union[None, List[str], List[os.PathLike[str]], List[_Path]] = ...) -> Tuple[IO[Any], str, Tuple[str, str, int]]: ... else: def find_module(name: str, path: Optional[List[str]] = ...) -> Tuple[IO[Any], str, Tuple[str, str, int]]: ... def reload(module: types.ModuleType) -> types.ModuleType: ... def init_builtin(name: str) -> Optional[types.ModuleType]: ... def load_dynamic(name: str, path: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ... mypy-0.761/mypy/typeshed/stdlib/3/importlib/0000755€tŠÔÚ€2›s®0000000000013576752267025172 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/importlib/__init__.pyi0000644€tŠÔÚ€2›s®0000000112513576752252027445 0ustar jukkaDROPBOX\Domain Users00000000000000from importlib.abc import Loader import types from typing import Any, Mapping, Optional, Sequence def __import__(name: str, globals: Optional[Mapping[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ..., fromlist: Sequence[str] = ..., level: int = ...) -> types.ModuleType: ... def import_module(name: str, package: Optional[str] = ...) -> types.ModuleType: ... def find_loader(name: str, path: Optional[str] = ...) -> Optional[Loader]: ... def invalidate_caches() -> None: ... def reload(module: types.ModuleType) -> types.ModuleType: ... mypy-0.761/mypy/typeshed/stdlib/3/importlib/abc.pyi0000644€tŠÔÚ€2›s®0000000670213576752252026441 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod import os import sys import types from typing import Any, IO, Iterator, Mapping, Optional, Sequence, Tuple, Union # Loader is exported from this module, but for circular import reasons # exists in its own stub file (with ModuleSpec and ModuleType). from _importlib_modulespec import Loader as Loader # Exported from _importlib_modulespec import ModuleSpec _Path = Union[bytes, str] class Finder(metaclass=ABCMeta): ... # Technically this class defines the following method, but its subclasses # in this module violate its signature. Since this class is deprecated, it's # easier to simply ignore that this method exists. # @abstractmethod # def find_module(self, fullname: str, # path: Optional[Sequence[_Path]] = ...) -> Optional[Loader]: ... class ResourceLoader(Loader): @abstractmethod def get_data(self, path: _Path) -> bytes: ... class InspectLoader(Loader): def is_package(self, fullname: str) -> bool: ... def get_code(self, fullname: str) -> Optional[types.CodeType]: ... def load_module(self, fullname: str) -> types.ModuleType: ... @abstractmethod def get_source(self, fullname: str) -> Optional[str]: ... def exec_module(self, module: types.ModuleType) -> None: ... @staticmethod def source_to_code(data: Union[bytes, str], path: str = ...) -> types.CodeType: ... class ExecutionLoader(InspectLoader): @abstractmethod def get_filename(self, fullname: str) -> _Path: ... def get_code(self, fullname: str) -> Optional[types.CodeType]: ... class SourceLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): def path_mtime(self, path: _Path) -> float: ... def set_data(self, path: _Path, data: bytes) -> None: ... def get_source(self, fullname: str) -> Optional[str]: ... def path_stats(self, path: _Path) -> Mapping[str, Any]: ... class MetaPathFinder(Finder): def find_module(self, fullname: str, path: Optional[Sequence[_Path]]) -> Optional[Loader]: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, path: Optional[Sequence[_Path]], target: Optional[types.ModuleType] = ... ) -> Optional[ModuleSpec]: ... class PathEntryFinder(Finder): def find_module(self, fullname: str) -> Optional[Loader]: ... def find_loader( self, fullname: str ) -> Tuple[Optional[Loader], Sequence[_Path]]: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, target: Optional[types.ModuleType] = ... ) -> Optional[ModuleSpec]: ... class FileLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): name: str path: _Path def __init__(self, fullname: str, path: _Path) -> None: ... def get_data(self, path: _Path) -> bytes: ... def get_filename(self, fullname: str) -> _Path: ... if sys.version_info >= (3, 7): _PathLike = Union[bytes, str, os.PathLike[Any]] class ResourceReader(metaclass=ABCMeta): @abstractmethod def open_resource(self, resource: _PathLike) -> IO[bytes]: ... @abstractmethod def resource_path(self, resource: _PathLike) -> str: ... @abstractmethod def is_resource(self, name: str) -> bool: ... @abstractmethod def contents(self) -> Iterator[str]: ... mypy-0.761/mypy/typeshed/stdlib/3/importlib/machinery.pyi0000644€tŠÔÚ€2›s®0000000754013576752252027674 0ustar jukkaDROPBOX\Domain Users00000000000000import importlib.abc import types from typing import Any, Callable, List, Optional, Sequence, Tuple # ModuleSpec is exported from this module, but for circular import # reasons exists in its own stub file (with Loader and ModuleType). from _importlib_modulespec import ModuleSpec as ModuleSpec # Exported class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... @classmethod def find_spec(cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]], target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]: ... # InspectLoader @classmethod def is_package(cls, fullname: str) -> bool: ... @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... @classmethod def get_code(cls, fullname: str) -> None: ... @classmethod def get_source(cls, fullname: str) -> None: ... # Loader @staticmethod def module_repr(module: types.ModuleType) -> str: ... @classmethod def create_module(cls, spec: ModuleSpec) -> Optional[types.ModuleType]: ... @classmethod def exec_module(cls, module: types.ModuleType) -> None: ... class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... @classmethod def find_spec(cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]], target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]: ... # InspectLoader @classmethod def is_package(cls, fullname: str) -> bool: ... @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... @classmethod def get_code(cls, fullname: str) -> None: ... @classmethod def get_source(cls, fullname: str) -> None: ... # Loader @staticmethod def module_repr(module: types.ModuleType) -> str: ... @classmethod def create_module(cls, spec: ModuleSpec) -> Optional[types.ModuleType]: ... @staticmethod def exec_module(module: types.ModuleType) -> None: ... class WindowsRegistryFinder(importlib.abc.MetaPathFinder): @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... @classmethod def find_spec(cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]], target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]: ... class PathFinder(importlib.abc.MetaPathFinder): ... SOURCE_SUFFIXES: List[str] DEBUG_BYTECODE_SUFFIXES: List[str] OPTIMIZED_BYTECODE_SUFFIXES: List[str] BYTECODE_SUFFIXES: List[str] EXTENSION_SUFFIXES: List[str] def all_suffixes() -> List[str]: ... class FileFinder(importlib.abc.PathEntryFinder): path: str def __init__( self, path: str, *loader_details: Tuple[importlib.abc.Loader, List[str]] ) -> None: ... @classmethod def path_hook( cls, *loader_details: Tuple[importlib.abc.Loader, List[str]] ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... class ExtensionFileLoader(importlib.abc.ExecutionLoader): def get_filename(self, fullname: str) -> importlib.abc._Path: ... def get_source(self, fullname: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/importlib/metadata.pyi0000644€tŠÔÚ€2›s®0000000726713576752252027503 0ustar jukkaDROPBOX\Domain Users00000000000000import abc import os import pathlib import sys from email.message import Message from importlib.abc import MetaPathFinder from pathlib import Path from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Tuple, Union, overload if sys.version_info >= (3, 8): class PackageNotFoundError(ModuleNotFoundError): ... class EntryPointBase(NamedTuple): name: str value: str class EntryPoint(EntryPointBase): def load(self) -> Any: ... # Callable[[], Any] or an importable module @property def extras(self) -> List[str]: ... class PackagePath(pathlib.PurePosixPath): def read_text(self, encoding: str = ...) -> str: ... def read_binary(self) -> bytes: ... def locate(self) -> os.PathLike[str]: ... # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: hash: Optional[FileHash] size: Optional[int] dist: Distribution class FileHash: mode: str value: str def __init__(self, spec: str) -> None: ... class Distribution: @abc.abstractmethod def read_text(self, filename: str) -> Optional[str]: ... @abc.abstractmethod def locate_file(self, path: Union[os.PathLike[str], str]) -> os.PathLike[str]: ... @classmethod def from_name(cls, name: str) -> Distribution: ... @overload @classmethod def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ... @overload @classmethod def discover( cls, *, context: None = ..., name: Optional[str] = ..., path: List[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... @staticmethod def at(path: Union[str, os.PathLike[str]]) -> PathDistribution: ... @property def metadata(self) -> Message: ... @property def version(self) -> str: ... @property def entry_points(self) -> List[EntryPoint]: ... @property def files(self) -> Optional[List[PackagePath]]: ... @property def requires(self) -> Optional[List[str]]: ... class DistributionFinder(MetaPathFinder): class Context: name: Optional[str] def __init__(self, *, name: Optional[str] = ..., path: List[str] = ..., **kwargs: Any) -> None: ... @property def path(self) -> List[str]: ... @property def pattern(self) -> str: ... @abc.abstractmethod def find_distributions(self, context: Context = ...) -> Iterable[Distribution]: ... class MetadataPathFinder(DistributionFinder): @classmethod def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... class PathDistribution(Distribution): def __init__(self, path: Path) -> None: ... def read_text(self, filename: Union[str, os.PathLike[str]]) -> str: ... def locate_file(self, path: Union[str, os.PathLike[str]]) -> os.PathLike[str]: ... def distribution(distribution_name: str) -> Distribution: ... @overload def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... @overload def distributions( *, context: None = ..., name: Optional[str] = ..., path: List[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... def metadata(distribution_name: str) -> Message: ... def version(distribution_name: str) -> str: ... def entry_points() -> Dict[str, Tuple[EntryPoint, ...]]: ... def files(distribution_name: str) -> Optional[List[PackagePath]]: ... def requires(distribution_name: str) -> Optional[List[str]]: ... mypy-0.761/mypy/typeshed/stdlib/3/importlib/resources.pyi0000644€tŠÔÚ€2›s®0000000176113576752252027726 0ustar jukkaDROPBOX\Domain Users00000000000000import sys # This is a >=3.7 module, so we conditionally include its source. if sys.version_info >= (3, 7): import os from pathlib import Path from types import ModuleType from typing import ContextManager, Iterator, Union, BinaryIO, TextIO Package = Union[str, ModuleType] Resource = Union[str, os.PathLike] def open_binary(package: Package, resource: Resource) -> BinaryIO: ... def open_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> TextIO: ... def read_binary(package: Package, resource: Resource) -> bytes: ... def read_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> str: ... def path(package: Package, resource: Resource) -> ContextManager[Path]: ... def is_resource(package: Package, name: str) -> bool: ... def contents(package: Package) -> Iterator[str]: ... mypy-0.761/mypy/typeshed/stdlib/3/importlib/util.pyi0000644€tŠÔÚ€2›s®0000000351713576752252026672 0ustar jukkaDROPBOX\Domain Users00000000000000import importlib.abc import importlib.machinery import sys import types from typing import Any, Callable, List, Optional, Union def module_for_loader( fxn: Callable[..., types.ModuleType] ) -> Callable[..., types.ModuleType]: ... def set_loader( fxn: Callable[..., types.ModuleType] ) -> Callable[..., types.ModuleType]: ... def set_package( fxn: Callable[..., types.ModuleType] ) -> Callable[..., types.ModuleType]: ... def resolve_name(name: str, package: str) -> str: ... MAGIC_NUMBER: bytes def cache_from_source(path: str, debug_override: Optional[bool] = ..., *, optimization: Optional[Any] = ...) -> str: ... def source_from_cache(path: str) -> str: ... def decode_source(source_bytes: bytes) -> str: ... def find_spec( name: str, package: Optional[str] = ... ) -> Optional[importlib.machinery.ModuleSpec]: ... def spec_from_loader( name: str, loader: Optional[importlib.abc.Loader], *, origin: Optional[str] = ..., loader_state: Optional[Any] = ..., is_package: Optional[bool] = ... ) -> importlib.machinery.ModuleSpec: ... if sys.version_info >= (3, 6): import os _Path = Union[str, bytes, os.PathLike] else: _Path = str def spec_from_file_location( name: str, location: _Path, *, loader: Optional[importlib.abc.Loader] = ..., submodule_search_locations: Optional[List[str]] = ... ) -> importlib.machinery.ModuleSpec: ... def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: ... class LazyLoader(importlib.abc.Loader): def __init__(self, loader: importlib.abc.Loader) -> None: ... @classmethod def factory(cls, loader: importlib.abc.Loader) -> Callable[..., LazyLoader]: ... def create_module(self, spec: importlib.machinery.ModuleSpec) -> Optional[types.ModuleType]: ... def exec_module(self, module: types.ModuleType) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/inspect.pyi0000644€tŠÔÚ€2›s®0000002344613576752252025364 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import (AbstractSet, Any, Callable, Dict, Generator, List, Mapping, NamedTuple, Optional, Sequence, Tuple, Type, Union, ) from types import (CodeType, FrameType, FunctionType, MethodType, ModuleType, TracebackType, ) from collections import OrderedDict # # Types and members # class EndOfBlock(Exception): ... class BlockFinder: indent: int islambda: bool started: bool passline: bool indecorator: bool decoratorhasargs: bool last: int def tokeneater(self, type: int, token: str, srow_scol: Tuple[int, int], erow_ecol: Tuple[int, int], line: str) -> None: ... CO_OPTIMIZED: int CO_NEWLOCALS: int CO_VARARGS: int CO_VARKEYWORDS: int CO_NESTED: int CO_GENERATOR: int CO_NOFREE: int CO_COROUTINE: int CO_ITERABLE_COROUTINE: int if sys.version_info >= (3, 6): CO_ASYNC_GENERATOR: int TPFLAGS_IS_ABSTRACT: int if sys.version_info < (3, 6): class ModuleInfo(NamedTuple): name: str suffix: str mode: str module_type: int def getmoduleinfo(path: str) -> Optional[ModuleInfo]: ... def getmembers(object: object, predicate: Optional[Callable[[Any], bool]] = ..., ) -> List[Tuple[str, Any]]: ... def getmodulename(path: str) -> Optional[str]: ... def ismodule(object: object) -> bool: ... def isclass(object: object) -> bool: ... def ismethod(object: object) -> bool: ... def isfunction(object: object) -> bool: ... def isgeneratorfunction(object: object) -> bool: ... def isgenerator(object: object) -> bool: ... def iscoroutinefunction(object: object) -> bool: ... def iscoroutine(object: object) -> bool: ... def isawaitable(object: object) -> bool: ... if sys.version_info >= (3, 6): def isasyncgenfunction(object: object) -> bool: ... def isasyncgen(object: object) -> bool: ... def istraceback(object: object) -> bool: ... def isframe(object: object) -> bool: ... def iscode(object: object) -> bool: ... def isbuiltin(object: object) -> bool: ... def isroutine(object: object) -> bool: ... def isabstract(object: object) -> bool: ... def ismethoddescriptor(object: object) -> bool: ... def isdatadescriptor(object: object) -> bool: ... def isgetsetdescriptor(object: object) -> bool: ... def ismemberdescriptor(object: object) -> bool: ... # # Retrieving source code # _SourceObjectType = Union[ModuleType, Type[Any], MethodType, FunctionType, TracebackType, FrameType, CodeType, Callable[..., Any]] def findsource(object: _SourceObjectType) -> Tuple[List[str], int]: ... def getabsfile(object: _SourceObjectType) -> str: ... def getblock(lines: Sequence[str]) -> Sequence[str]: ... def getdoc(object: object) -> Optional[str]: ... def getcomments(object: object) -> Optional[str]: ... def getfile(object: _SourceObjectType) -> str: ... def getmodule(object: object) -> Optional[ModuleType]: ... def getsourcefile(object: _SourceObjectType) -> Optional[str]: ... def getsourcelines(object: _SourceObjectType) -> Tuple[List[str], int]: ... def getsource(object: _SourceObjectType) -> str: ... def cleandoc(doc: str) -> str: ... def indentsize(line: str) -> int: ... # # Introspecting callables with the Signature object # def signature(callable: Callable[..., Any], *, follow_wrapped: bool = ...) -> Signature: ... class Signature: def __init__(self, parameters: Optional[Sequence[Parameter]] = ..., *, return_annotation: Any = ...) -> None: ... # TODO: can we be more specific here? empty: object = ... parameters: Mapping[str, Parameter] # TODO: can we be more specific here? return_annotation: Any def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: ... def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... def replace(self, *, parameters: Optional[Sequence[Parameter]] = ..., return_annotation: Any = ...) -> Signature: ... @classmethod def from_callable(cls, obj: Callable[..., Any], *, follow_wrapped: bool = ...) -> Signature: ... # The name is the same as the enum's name in CPython class _ParameterKind: ... class Parameter: def __init__(self, name: str, kind: _ParameterKind, *, default: Any = ..., annotation: Any = ...) -> None: ... empty: Any = ... name: str default: Any annotation: Any kind: _ParameterKind POSITIONAL_ONLY: _ParameterKind = ... POSITIONAL_OR_KEYWORD: _ParameterKind = ... VAR_POSITIONAL: _ParameterKind = ... KEYWORD_ONLY: _ParameterKind = ... VAR_KEYWORD: _ParameterKind = ... def replace(self, *, name: Optional[str] = ..., kind: Optional[_ParameterKind] = ..., default: Any = ..., annotation: Any = ...) -> Parameter: ... class BoundArguments: arguments: OrderedDict[str, Any] args: Tuple[Any, ...] kwargs: Dict[str, Any] signature: Signature def apply_defaults(self) -> None: ... # # Classes and functions # # TODO: The actual return type should be List[_ClassTreeItem] but mypy doesn't # seem to be supporting this at the moment: # _ClassTreeItem = Union[List[_ClassTreeItem], Tuple[type, Tuple[type, ...]]] def getclasstree(classes: List[type], unique: bool = ...) -> Any: ... class ArgSpec(NamedTuple): args: List[str] varargs: str keywords: str defaults: Tuple[Any, ...] class Arguments(NamedTuple): args: List[str] varargs: Optional[str] varkw: Optional[str] def getargs(co: CodeType) -> Arguments: ... def getargspec(func: object) -> ArgSpec: ... class FullArgSpec(NamedTuple): args: List[str] varargs: Optional[str] varkw: Optional[str] defaults: Optional[Tuple[Any, ...]] kwonlyargs: List[str] kwonlydefaults: Optional[Dict[str, Any]] annotations: Dict[str, Any] def getfullargspec(func: object) -> FullArgSpec: ... class ArgInfo(NamedTuple): args: List[str] varargs: Optional[str] keywords: Optional[str] locals: Dict[str, Any] def getargvalues(frame: FrameType) -> ArgInfo: ... def formatannotation(annotation: object, base_module: Optional[str] = ...) -> str: ... def formatannotationrelativeto(object: object) -> Callable[[object], str]: ... def formatargspec(args: List[str], varargs: Optional[str] = ..., varkw: Optional[str] = ..., defaults: Optional[Tuple[Any, ...]] = ..., kwonlyargs: Optional[List[str]] = ..., kwonlydefaults: Optional[Dict[str, Any]] = ..., annotations: Dict[str, Any] = ..., formatarg: Callable[[str], str] = ..., formatvarargs: Callable[[str], str] = ..., formatvarkw: Callable[[str], str] = ..., formatvalue: Callable[[Any], str] = ..., formatreturns: Callable[[Any], str] = ..., formatannotations: Callable[[Any], str] = ..., ) -> str: ... def formatargvalues(args: List[str], varargs: Optional[str] = ..., varkw: Optional[str] = ..., locals: Optional[Dict[str, Any]] = ..., formatarg: Optional[Callable[[str], str]] = ..., formatvarargs: Optional[Callable[[str], str]] = ..., formatvarkw: Optional[Callable[[str], str]] = ..., formatvalue: Optional[Callable[[Any], str]] = ..., ) -> str: ... def getmro(cls: type) -> Tuple[type, ...]: ... def getcallargs(func: Callable[..., Any], *args: Any, **kwds: Any) -> Dict[str, Any]: ... class ClosureVars(NamedTuple): nonlocals: Mapping[str, Any] globals: Mapping[str, Any] builtins: Mapping[str, Any] unbound: AbstractSet[str] def getclosurevars(func: Callable[..., Any]) -> ClosureVars: ... def unwrap(func: Callable[..., Any], *, stop: Optional[Callable[[Any], Any]] = ...) -> Any: ... # # The interpreter stack # class Traceback(NamedTuple): filename: str lineno: int function: str code_context: Optional[List[str]] index: Optional[int] # type: ignore class FrameInfo(NamedTuple): frame: FrameType filename: str lineno: int function: str code_context: Optional[List[str]] index: Optional[int] # type: ignore def getframeinfo(frame: Union[FrameType, TracebackType], context: int = ...) -> Traceback: ... def getouterframes(frame: Any, context: int = ...) -> List[FrameInfo]: ... def getinnerframes(traceback: TracebackType, context: int = ...) -> List[FrameInfo]: ... def getlineno(frame: FrameType) -> int: ... def currentframe() -> Optional[FrameType]: ... def stack(context: int = ...) -> List[FrameInfo]: ... def trace(context: int = ...) -> List[FrameInfo]: ... # # Fetching attributes statically # def getattr_static(obj: object, attr: str, default: Optional[Any] = ...) -> Any: ... # # Current State of Generators and Coroutines # # TODO In the next two blocks of code, can we be more specific regarding the # type of the "enums"? GEN_CREATED: str GEN_RUNNING: str GEN_SUSPENDED: str GEN_CLOSED: str def getgeneratorstate(generator: Generator[Any, Any, Any]) -> str: ... CORO_CREATED: str CORO_RUNNING: str CORO_SUSPENDED: str CORO_CLOSED: str # TODO can we be more specific than "object"? def getcoroutinestate(coroutine: object) -> str: ... def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> Dict[str, Any]: ... # TODO can we be more specific than "object"? def getcoroutinelocals(coroutine: object) -> Dict[str, Any]: ... class Attribute(NamedTuple): name: str kind: str defining_class: type object: object def classify_class_attrs(cls: type) -> List[Attribute]: ... mypy-0.761/mypy/typeshed/stdlib/3/io.pyi0000644€tŠÔÚ€2›s®0000002042213576752252024315 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( List, BinaryIO, TextIO, Iterator, Union, Optional, Callable, Tuple, Type, Any, IO, Iterable ) import builtins import codecs from mmap import mmap from types import TracebackType from typing import TypeVar _bytearray_like = Union[bytearray, mmap] DEFAULT_BUFFER_SIZE: int SEEK_SET: int SEEK_CUR: int SEEK_END: int _T = TypeVar('_T', bound=IOBase) open = builtins.open BlockingIOError = builtins.BlockingIOError class UnsupportedOperation(OSError, ValueError): ... class IOBase: def __iter__(self) -> Iterator[bytes]: ... def __next__(self) -> bytes: ... def __enter__(self: _T) -> _T: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> Optional[bool]: ... def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def readlines(self, hint: int = ...) -> List[bytes]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def writelines(self, lines: Iterable[Union[bytes, bytearray]]) -> None: ... def readline(self, size: int = ...) -> bytes: ... def __del__(self) -> None: ... @property def closed(self) -> bool: ... def _checkClosed(self, msg: Optional[str] = ...) -> None: ... # undocumented class RawIOBase(IOBase): def readall(self) -> bytes: ... def readinto(self, b: bytearray) -> Optional[int]: ... def write(self, b: Union[bytes, bytearray]) -> Optional[int]: ... def read(self, size: int = ...) -> Optional[bytes]: ... class BufferedIOBase(IOBase): raw: RawIOBase # This is not part of the BufferedIOBase API and may not exist on some implementations. def detach(self) -> RawIOBase: ... def readinto(self, b: _bytearray_like) -> int: ... def write(self, b: Union[bytes, bytearray]) -> int: ... def readinto1(self, b: _bytearray_like) -> int: ... def read(self, size: Optional[int] = ...) -> bytes: ... def read1(self, size: int = ...) -> bytes: ... class FileIO(RawIOBase): mode: str name: Union[int, str] def __init__( self, file: Union[str, bytes, int], mode: str = ..., closefd: bool = ..., opener: Optional[Callable[[Union[int, str], str], int]] = ... ) -> None: ... # TODO should extend from BufferedIOBase class BytesIO(BinaryIO): def __init__(self, initial_bytes: bytes = ...) -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any def getvalue(self) -> bytes: ... def getbuffer(self) -> memoryview: ... # copied from IOBase def __iter__(self) -> Iterator[bytes]: ... def __next__(self) -> bytes: ... def __enter__(self) -> BytesIO: ... def __exit__(self, t: Optional[Type[BaseException]] = ..., value: Optional[BaseException] = ..., traceback: Optional[TracebackType] = ...) -> Optional[bool]: ... def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def readlines(self, hint: int = ...) -> List[bytes]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... # TODO should be the next line instead # def writelines(self, lines: List[Union[bytes, bytearray]]) -> None: ... def writelines(self, lines: Any) -> None: ... def readline(self, size: int = ...) -> bytes: ... def __del__(self) -> None: ... closed: bool # copied from BufferedIOBase def detach(self) -> RawIOBase: ... def readinto(self, b: _bytearray_like) -> int: ... def write(self, b: Union[bytes, bytearray]) -> int: ... def readinto1(self, b: _bytearray_like) -> int: ... def read(self, size: Optional[int] = ...) -> bytes: ... def read1(self, size: int = ...) -> bytes: ... class BufferedReader(BufferedIOBase): def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def peek(self, size: int = ...) -> bytes: ... class BufferedWriter(BufferedIOBase): def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def flush(self) -> None: ... def write(self, b: Union[bytes, bytearray]) -> int: ... class BufferedRandom(BufferedReader, BufferedWriter): def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... class BufferedRWPair(BufferedIOBase): def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = ...) -> None: ... class TextIOBase(IOBase): encoding: str errors: Optional[str] newlines: Union[str, Tuple[str, ...], None] def __iter__(self) -> Iterator[str]: ... # type: ignore def __next__(self) -> str: ... # type: ignore def detach(self) -> IOBase: ... def write(self, s: str) -> int: ... def writelines(self, lines: List[str]) -> None: ... # type: ignore def readline(self, size: int = ...) -> str: ... # type: ignore def readlines(self, hint: int = ...) -> List[str]: ... # type: ignore def read(self, size: Optional[int] = ...) -> str: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... # TODO should extend from TextIOBase class TextIOWrapper(TextIO): line_buffering: bool # TODO uncomment after fixing mypy about using write_through # def __init__(self, buffer: IO[bytes], encoding: str = ..., # errors: Optional[str] = ..., newline: Optional[str] = ..., # line_buffering: bool = ..., write_through: bool = ...) \ # -> None: ... def __init__( self, buffer: IO[bytes], encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ..., line_buffering: bool = ..., write_through: bool = ... ) -> None: ... # copied from IOBase def __exit__(self, t: Optional[Type[BaseException]] = ..., value: Optional[BaseException] = ..., traceback: Optional[TracebackType] = ...) -> Optional[bool]: ... def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def readlines(self, hint: int = ...) -> List[str]: ... def seekable(self) -> bool: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... # TODO should be the next line instead # def writelines(self, lines: List[str]) -> None: ... def writelines(self, lines: Any) -> None: ... def __del__(self) -> None: ... closed: bool # copied from TextIOBase encoding: str errors: Optional[str] newlines: Union[str, Tuple[str, ...], None] def __iter__(self) -> Iterator[str]: ... def __next__(self) -> str: ... def __enter__(self) -> TextIO: ... def detach(self) -> IOBase: ... def write(self, s: str) -> int: ... def readline(self, size: int = ...) -> str: ... def read(self, size: Optional[int] = ...) -> str: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... class StringIO(TextIOWrapper): def __init__(self, initial_value: str = ..., newline: Optional[str] = ...) -> None: ... # StringIO does not contain a "name" field. This workaround is necessary # to allow StringIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any def getvalue(self) -> str: ... def __enter__(self) -> StringIO: ... class IncrementalNewlineDecoder(codecs.IncrementalDecoder): def __init__(self, decoder: Optional[codecs.IncrementalDecoder], translate: bool, errors: str = ...) -> None: ... def decode(self, input: Union[bytes, str], final: bool = ...) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/ipaddress.pyi0000644€tŠÔÚ€2›s®0000001177713576752252025701 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import (Any, Container, Generic, Iterable, Iterator, Optional, overload, SupportsInt, Text, Tuple, TypeVar) # Undocumented length constants IPV4LENGTH: int IPV6LENGTH: int _A = TypeVar("_A", IPv4Address, IPv6Address) _N = TypeVar("_N", IPv4Network, IPv6Network) _T = TypeVar("_T") def ip_address(address: object) -> Any: ... # morally Union[IPv4Address, IPv6Address] def ip_network(address: object, strict: bool = ...) -> Any: ... # morally Union[IPv4Network, IPv6Network] def ip_interface(address: object) -> Any: ... # morally Union[IPv4Interface, IPv6Interface] class _IPAddressBase: def __eq__(self, other: Any) -> bool: ... def __ge__(self: _T, other: _T) -> bool: ... def __gt__(self: _T, other: _T) -> bool: ... def __le__(self: _T, other: _T) -> bool: ... def __lt__(self: _T, other: _T) -> bool: ... def __ne__(self, other: Any) -> bool: ... @property def compressed(self) -> Text: ... @property def exploded(self) -> Text: ... @property def reverse_pointer(self) -> Text: ... @property def version(self) -> int: ... class _BaseAddress(_IPAddressBase, SupportsInt): def __init__(self, address: object) -> None: ... def __add__(self: _T, other: int) -> _T: ... def __hash__(self) -> int: ... def __int__(self) -> int: ... def __sub__(self: _T, other: int) -> _T: ... @property def is_global(self) -> bool: ... @property def is_link_local(self) -> bool: ... @property def is_loopback(self) -> bool: ... @property def is_multicast(self) -> bool: ... @property def is_private(self) -> bool: ... @property def is_reserved(self) -> bool: ... @property def is_unspecified(self) -> bool: ... @property def max_prefixlen(self) -> int: ... @property def packed(self) -> bytes: ... class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): network_address: _A netmask: _A def __init__(self, address: object, strict: bool = ...) -> None: ... def __contains__(self, other: Any) -> bool: ... def __getitem__(self, n: int) -> _A: ... def __iter__(self) -> Iterator[_A]: ... def address_exclude(self: _T, other: _T) -> Iterator[_T]: ... @property def broadcast_address(self) -> _A: ... def compare_networks(self: _T, other: _T) -> int: ... def hosts(self) -> Iterator[_A]: ... @property def is_global(self) -> bool: ... @property def is_link_local(self) -> bool: ... @property def is_loopback(self) -> bool: ... @property def is_multicast(self) -> bool: ... @property def is_private(self) -> bool: ... @property def is_reserved(self) -> bool: ... @property def is_unspecified(self) -> bool: ... @property def max_prefixlen(self) -> int: ... @property def num_addresses(self) -> int: ... def overlaps(self: _T, other: _T) -> bool: ... @property def prefixlen(self) -> int: ... def subnets(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> Iterator[_T]: ... def supernet(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> _T: ... @property def with_hostmask(self) -> Text: ... @property def with_netmask(self) -> Text: ... @property def with_prefixlen(self) -> Text: ... @property def hostmask(self) -> _A: ... class _BaseInterface(_BaseAddress, Generic[_A, _N]): hostmask: _A netmask: _A network: _N @property def ip(self) -> _A: ... @property def with_hostmask(self) -> Text: ... @property def with_netmask(self) -> Text: ... @property def with_prefixlen(self) -> Text: ... class IPv4Address(_BaseAddress): ... class IPv4Network(_BaseNetwork[IPv4Address]): ... class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ... class IPv6Address(_BaseAddress): @property def ipv4_mapped(self) -> Optional[IPv4Address]: ... @property def is_site_local(self) -> bool: ... @property def sixtofour(self) -> Optional[IPv4Address]: ... @property def teredo(self) -> Optional[Tuple[IPv4Address, IPv4Address]]: ... class IPv6Network(_BaseNetwork[IPv6Address]): @property def is_site_local(self) -> bool: ... class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ... def v4_int_to_packed(address: int) -> bytes: ... def v6_int_to_packed(address: int) -> bytes: ... @overload def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ... @overload def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ... @overload def get_mixed_type_key(obj: _A) -> Tuple[int, _A]: ... @overload def get_mixed_type_key(obj: IPv4Network) -> Tuple[int, IPv4Address, IPv4Address]: ... @overload def get_mixed_type_key(obj: IPv6Network) -> Tuple[int, IPv6Address, IPv6Address]: ... class AddressValueError(ValueError): ... class NetmaskValueError(ValueError): ... mypy-0.761/mypy/typeshed/stdlib/3/itertools.pyi0000644€tŠÔÚ€2›s®0000001041613576752252025734 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for itertools # Based on http://docs.python.org/3.2/library/itertools.html from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple, Generic, Optional) import sys _T = TypeVar('_T') _S = TypeVar('_S') _N = TypeVar('_N', int, float) Predicate = Callable[[_T], object] def count(start: _N = ..., step: _N = ...) -> Iterator[_N]: ... # more general types? def cycle(iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def repeat(object: _T) -> Iterator[_T]: ... @overload def repeat(object: _T, times: int) -> Iterator[_T]: ... if sys.version_info >= (3, 8): @overload def accumulate(iterable: Iterable[_T], func: Callable[[_T, _T], _T] = ...) -> Iterator[_T]: ... @overload def accumulate(iterable: Iterable[_T], func: Callable[[_S, _T], _S], initial: Optional[_S]) -> Iterator[_S]: ... else: def accumulate(iterable: Iterable[_T], func: Callable[[_T, _T], _T] = ...) -> Iterator[_T]: ... class chain(Iterator[_T], Generic[_T]): def __init__(self, *iterables: Iterable[_T]) -> None: ... def __next__(self) -> _T: ... def __iter__(self) -> Iterator[_T]: ... @staticmethod def from_iterable(iterable: Iterable[Iterable[_S]]) -> Iterator[_S]: ... def compress(data: Iterable[_T], selectors: Iterable[Any]) -> Iterator[_T]: ... def dropwhile(predicate: Predicate[_T], iterable: Iterable[_T]) -> Iterator[_T]: ... def filterfalse(predicate: Optional[Predicate[_T]], iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def groupby(iterable: Iterable[_T], key: None = ...) -> Iterator[Tuple[_T, Iterator[_T]]]: ... @overload def groupby(iterable: Iterable[_T], key: Callable[[_T], _S]) -> Iterator[Tuple[_S, Iterator[_T]]]: ... @overload def islice(iterable: Iterable[_T], stop: Optional[int]) -> Iterator[_T]: ... @overload def islice(iterable: Iterable[_T], start: Optional[int], stop: Optional[int], step: Optional[int] = ...) -> Iterator[_T]: ... def starmap(func: Callable[..., _S], iterable: Iterable[Iterable[Any]]) -> Iterator[_S]: ... def takewhile(predicate: Predicate[_T], iterable: Iterable[_T]) -> Iterator[_T]: ... def tee(iterable: Iterable[_T], n: int = ...) -> Tuple[Iterator[_T], ...]: ... def zip_longest(*p: Iterable[Any], fillvalue: Any = ...) -> Iterator[Any]: ... _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _T6 = TypeVar('_T6') @overload def product(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], iter6: Iterable[_T6]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def product(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], iter7: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... @overload def product(*iterables: Iterable[Any], repeat: int = ...) -> Iterator[Tuple[Any, ...]]: ... def permutations(iterable: Iterable[_T], r: Optional[int] = ...) -> Iterator[Tuple[_T, ...]]: ... def combinations(iterable: Iterable[_T], r: int) -> Iterator[Tuple[_T, ...]]: ... def combinations_with_replacement(iterable: Iterable[_T], r: int) -> Iterator[Tuple[_T, ...]]: ... mypy-0.761/mypy/typeshed/stdlib/3/json/0000755€tŠÔÚ€2›s®0000000000013576752267024142 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/json/__init__.pyi0000644€tŠÔÚ€2›s®0000000433213576752252026420 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union, Protocol, Type from .decoder import JSONDecoder as JSONDecoder from .encoder import JSONEncoder as JSONEncoder from .decoder import JSONDecodeError as JSONDecodeError def dumps(obj: Any, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Optional[Type[JSONEncoder]] = ..., indent: Union[None, int, str] = ..., separators: Optional[Tuple[str, str]] = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any) -> str: ... def dump(obj: Any, fp: IO[str], skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Optional[Type[JSONEncoder]] = ..., indent: Union[None, int, str] = ..., separators: Optional[Tuple[str, str]] = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any) -> None: ... if sys.version_info >= (3, 6): _LoadsString = Union[str, bytes, bytearray] else: _LoadsString = str def loads(s: _LoadsString, encoding: Any = ..., # ignored and deprecated cls: Optional[Type[JSONDecoder]] = ..., object_hook: Optional[Callable[[Dict[Any, Any]], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any) -> Any: ... class _Reader(Protocol): def read(self) -> _LoadsString: ... def load(fp: _Reader, cls: Optional[Type[JSONDecoder]] = ..., object_hook: Optional[Callable[[Dict[Any, Any]], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/3/json/decoder.pyi0000644€tŠÔÚ€2›s®0000000203413576752252026263 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, List, Optional, Tuple class JSONDecodeError(ValueError): msg: str doc: str pos: int lineno: int colno: int def __init__(self, msg: str, doc: str, pos: int) -> None: ... class JSONDecoder: object_hook: Callable[[Dict[str, Any]], Any] parse_float: Callable[[str], Any] parse_int: Callable[[str], Any] parse_constant = ... # Callable[[str], Any] strict: bool object_pairs_hook: Callable[[List[Tuple[str, Any]]], Any] def __init__(self, object_hook: Optional[Callable[[Dict[str, Any]], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., strict: bool = ..., object_pairs_hook: Optional[Callable[[List[Tuple[str, Any]]], Any]] = ...) -> None: ... def decode(self, s: str) -> Any: ... def raw_decode(self, s: str, idx: int = ...) -> Tuple[Any, int]: ... mypy-0.761/mypy/typeshed/stdlib/3/json/encoder.pyi0000644€tŠÔÚ€2›s®0000000134613576752252026302 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterator, Optional, Tuple class JSONEncoder: item_separator: str key_separator: str skipkeys: bool ensure_ascii: bool check_circular: bool allow_nan: bool sort_keys: bool indent: int def __init__(self, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ..., indent: Optional[int] = ..., separators: Optional[Tuple[str, str]] = ..., default: Optional[Callable[..., Any]] = ...) -> None: ... def default(self, o: Any) -> Any: ... def encode(self, o: Any) -> str: ... def iterencode(self, o: Any, _one_shot: bool = ...) -> Iterator[str]: ... mypy-0.761/mypy/typeshed/stdlib/3/lzma.pyi0000644€tŠÔÚ€2›s®0000000631113576752252024652 0ustar jukkaDROPBOX\Domain Users00000000000000import io import sys from typing import Any, IO, Mapping, Optional, Sequence, Union if sys.version_info >= (3, 6): from os import PathLike _PathOrFile = Union[str, bytes, IO[Any], PathLike[Any]] else: _PathOrFile = Union[str, bytes, IO[Any]] _FilterChain = Sequence[Mapping[str, Any]] FORMAT_AUTO: int FORMAT_XZ: int FORMAT_ALONE: int FORMAT_RAW: int CHECK_NONE: int CHECK_CRC32: int CHECK_CRC64: int CHECK_SHA256: int CHECK_ID_MAX: int CHECK_UNKNOWN: int FILTER_LZMA1: int FILTER_LZMA2: int FILTER_DELTA: int FILTER_X86: int FILTER_IA64: int FILTER_ARM: int FILTER_ARMTHUMB: int FILTER_SPARC: int FILTER_POWERPC: int MF_HC3: int MF_HC4: int MF_BT2: int MF_BT3: int MF_BT4: int MODE_FAST: int MODE_NORMAL: int PRESET_DEFAULT: int PRESET_EXTREME: int # from _lzma.c class LZMADecompressor(object): def __init__(self, format: Optional[int] = ..., memlimit: Optional[int] = ..., filters: Optional[_FilterChain] = ...) -> None: ... def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... @property def check(self) -> int: ... @property def eof(self) -> bool: ... @property def unused_data(self) -> bytes: ... @property def needs_input(self) -> bool: ... # from _lzma.c class LZMACompressor(object): def __init__(self, format: Optional[int] = ..., check: int = ..., preset: Optional[int] = ..., filters: Optional[_FilterChain] = ...) -> None: ... def compress(self, data: bytes) -> bytes: ... def flush(self) -> bytes: ... class LZMAError(Exception): ... class LZMAFile(io.BufferedIOBase, IO[bytes]): # type: ignore # python/mypy#5027 def __init__(self, filename: Optional[_PathOrFile] = ..., mode: str = ..., *, format: Optional[int] = ..., check: int = ..., preset: Optional[int] = ..., filters: Optional[_FilterChain] = ...) -> None: ... def close(self) -> None: ... @property def closed(self) -> bool: ... def fileno(self) -> int: ... def seekable(self) -> bool: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def peek(self, size: int = ...) -> bytes: ... def read(self, size: Optional[int] = ...) -> bytes: ... def read1(self, size: int = ...) -> bytes: ... def readline(self, size: int = ...) -> bytes: ... def write(self, data: bytes) -> int: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def open(filename: _PathOrFile, mode: str = ..., *, format: Optional[int] = ..., check: int = ..., preset: Optional[int] = ..., filters: Optional[_FilterChain] = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ... def compress(data: bytes, format: int = ..., check: int = ..., preset: Optional[int] = ..., filters: Optional[_FilterChain] = ...) -> bytes: ... def decompress(data: bytes, format: int = ..., memlimit: Optional[int] = ..., filters: Optional[_FilterChain] = ...) -> bytes: ... def is_check_supported(check: int) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/3/msvcrt.pyi0000644€tŠÔÚ€2›s®0000000022413576752252025222 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for msvcrt # NOTE: These are incomplete! def get_osfhandle(file: int) -> int: ... def open_osfhandle(handle: int, flags: int) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/0000755€tŠÔÚ€2›s®0000000000013576752267026420 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/__init__.pyi0000644€tŠÔÚ€2›s®0000000677013576752252030706 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for multiprocessing import sys from typing import Any, Callable, Iterable, Mapping, Optional, List, Union, Sequence, Tuple, Type, overload from ctypes import _CData from logging import Logger from multiprocessing import connection, pool, spawn, synchronize from multiprocessing.context import ( AuthenticationError as AuthenticationError, BaseContext, BufferTooShort as BufferTooShort, Process as Process, ProcessError as ProcessError, TimeoutError as TimeoutError, ) from multiprocessing.managers import SyncManager from multiprocessing.process import active_children as active_children, current_process as current_process from multiprocessing.queues import Queue as Queue, SimpleQueue as SimpleQueue, JoinableQueue as JoinableQueue from multiprocessing.spawn import freeze_support as freeze_support from multiprocessing.spawn import set_executable as set_executable if sys.version_info >= (3, 8): from multiprocessing.process import parent_process as parent_process # N.B. The functions below are generated at runtime by partially applying # multiprocessing.context.BaseContext's methods, so the two signatures should # be identical (modulo self). # Sychronization primitives _LockLike = Union[synchronize.Lock, synchronize.RLock] def Barrier(parties: int, action: Optional[Callable[..., Any]] = ..., timeout: Optional[float] = ...) -> synchronize.Barrier: ... def BoundedSemaphore(value: int = ...) -> synchronize.BoundedSemaphore: ... def Condition(lock: Optional[_LockLike] = ...) -> synchronize.Condition: ... def Event(lock: Optional[_LockLike] = ...) -> synchronize.Event: ... def Lock() -> synchronize.Lock: ... def RLock() -> synchronize.RLock: ... def Semaphore(value: int = ...) -> synchronize.Semaphore: ... def Pipe(duplex: bool = ...) -> Tuple[connection.Connection, connection.Connection]: ... def Pool(processes: Optional[int] = ..., initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ..., maxtasksperchild: Optional[int] = ...) -> pool.Pool: ... class Array(): value: Any = ... def __init__(self, typecode_or_type: Union[str, Type[_CData]], size_or_initializer: Union[int, Sequence[Any]], *, lock: Union[bool, _LockLike] = ...) -> None: ... def acquire(self) -> bool: ... def release(self) -> bool: ... def get_lock(self) -> _LockLike: ... def get_obj(self) -> Any: ... @overload def __getitem__(self, key: int) -> Any: ... @overload def __getitem__(self, key: slice) -> List[Any]: ... def __getslice__(self, start: int, stop: int) -> Any: ... def __setitem__(self, key: int, value: Any) -> None: ... class Value(): value: Any = ... def __init__(self, typecode_or_type: Union[str, Type[_CData]], *args: Any, lock: Union[bool, _LockLike] = ...) -> None: ... def get_lock(self) -> _LockLike: ... def get_obj(self) -> Any: ... def acquire(self) -> bool: ... def release(self) -> bool: ... # ----- multiprocessing function stubs ----- def allow_connection_pickling() -> None: ... def cpu_count() -> int: ... def get_logger() -> Logger: ... def log_to_stderr(level: Optional[Union[str, int]] = ...) -> Logger: ... def Manager() -> SyncManager: ... def set_forkserver_preload(module_names: List[str]) -> None: ... def get_all_start_methods() -> List[str]: ... def get_context(method: Optional[str] = ...) -> BaseContext: ... def get_start_method(allow_none: bool = ...) -> Optional[str]: ... def set_start_method(method: str, force: Optional[bool] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/connection.pyi0000644€tŠÔÚ€2›s®0000000405513576752252031300 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterable, List, Optional, Tuple, Type, Union import socket import sys import types # https://docs.python.org/3/library/multiprocessing.html#address-formats _Address = Union[str, Tuple[str, int]] class _ConnectionBase: @property def closed(self) -> bool: ... # undocumented @property def readable(self) -> bool: ... # undocumented @property def writable(self) -> bool: ... # undocumented def fileno(self) -> int: ... def close(self) -> None: ... def send_bytes(self, buf: bytes, offset: int = ..., size: Optional[int] = ...) -> None: ... def send(self, obj: Any) -> None: ... def recv_bytes(self, maxlength: Optional[int] = ...) -> bytes: ... def recv_bytes_into(self, buf: Any, offset: int = ...) -> int: ... def recv(self) -> Any: ... def poll(self, timeout: Optional[float] = ...) -> bool: ... class Connection(_ConnectionBase): ... if sys.platform == "win32": class PipeConnection(_ConnectionBase): ... class Listener: def __init__(self, address: Optional[_Address] = ..., family: Optional[str] = ..., backlog: int = ..., authkey: Optional[bytes] = ...) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... @property def address(self) -> _Address: ... @property def last_accepted(self) -> Optional[_Address]: ... def __enter__(self) -> Listener: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], exc_tb: Optional[types.TracebackType]) -> None: ... def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... def answer_challenge(connection: Connection, authkey: bytes) -> None: ... def wait(object_list: Iterable[Union[Connection, socket.socket, int]], timeout: Optional[float] = ...) -> List[Union[Connection, socket.socket, int]]: ... def Client(address: _Address, family: Optional[str] = ..., authkey: Optional[bytes] = ...) -> Connection: ... def Pipe(duplex: bool = ...) -> Tuple[Connection, Connection]: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/context.pyi0000644€tŠÔÚ€2›s®0000001436113576752252030626 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for multiprocessing.context from logging import Logger import multiprocessing from multiprocessing import synchronize from multiprocessing import queues from multiprocessing.process import BaseProcess import sys from typing import Any, Callable, Iterable, Optional, List, Mapping, Sequence, Type, Union _LockLike = Union[synchronize.Lock, synchronize.RLock] class ProcessError(Exception): ... class BufferTooShort(ProcessError): ... class TimeoutError(ProcessError): ... class AuthenticationError(ProcessError): ... class BaseContext(object): ProcessError: Type[Exception] BufferTooShort: Type[Exception] TimeoutError: Type[Exception] AuthenticationError: Type[Exception] # N.B. The methods below are applied at runtime to generate # multiprocessing.*, so the signatures should be identical (modulo self). @staticmethod def current_process() -> BaseProcess: ... if sys.version_info >= (3, 8): @staticmethod def parent_process() -> Optional[BaseProcess]: ... @staticmethod def active_children() -> List[BaseProcess]: ... def cpu_count(self) -> int: ... # TODO: change return to SyncManager once a stub exists in multiprocessing.managers def Manager(self) -> Any: ... # TODO: change return to Pipe once a stub exists in multiprocessing.connection def Pipe(self, duplex: bool = ...) -> Any: ... def Barrier(self, parties: int, action: Optional[Callable[..., Any]] = ..., timeout: Optional[float] = ...) -> synchronize.Barrier: ... def BoundedSemaphore(self, value: int = ...) -> synchronize.BoundedSemaphore: ... def Condition(self, lock: Optional[_LockLike] = ...) -> synchronize.Condition: ... def Event(self, lock: Optional[_LockLike] = ...) -> synchronize.Event: ... def Lock(self) -> synchronize.Lock: ... def RLock(self) -> synchronize.RLock: ... def Semaphore(self, value: int = ...) -> synchronize.Semaphore: ... def Queue(self, maxsize: int = ...) -> queues.Queue[Any]: ... def JoinableQueue(self, maxsize: int = ...) -> queues.JoinableQueue[Any]: ... def SimpleQueue(self) -> queues.SimpleQueue[Any]: ... def Pool( self, processes: Optional[int] = ..., initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ..., maxtasksperchild: Optional[int] = ... ) -> multiprocessing.pool.Pool: ... # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out # how to handle the ctype # TODO: change return to RawValue once a stub exists in multiprocessing.sharedctypes def RawValue(self, typecode_or_type: Any, *args: Any) -> Any: ... # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out # how to handle the ctype # TODO: change return to RawArray once a stub exists in multiprocessing.sharedctypes def RawArray(self, typecode_or_type: Any, size_or_initializer: Union[int, Sequence[Any]]) -> Any: ... # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out # how to handle the ctype # TODO: change return to Value once a stub exists in multiprocessing.sharedctypes def Value( self, typecode_or_type: Any, *args: Any, lock: bool = ... ) -> Any: ... # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out # how to handle the ctype # TODO: change return to Array once a stub exists in multiprocessing.sharedctypes def Array( self, typecode_or_type: Any, size_or_initializer: Union[int, Sequence[Any]], *, lock: bool = ... ) -> Any: ... def freeze_support(self) -> None: ... def get_logger(self) -> Logger: ... def log_to_stderr(self, level: Optional[str] = ...) -> Logger: ... def allow_connection_pickling(self) -> None: ... def set_executable(self, executable: str) -> None: ... def set_forkserver_preload(self, module_names: List[str]) -> None: ... def get_context(self, method: Optional[str] = ...) -> BaseContext: ... def get_start_method(self, allow_none: bool = ...) -> str: ... def set_start_method(self, method: Optional[str], force: bool = ...) -> None: ... @property def reducer(self) -> str: ... @reducer.setter def reducer(self, reduction: str) -> None: ... def _check_available(self) -> None: ... class Process(BaseProcess): _start_method: Optional[str] @staticmethod def _Popen(process_obj: BaseProcess) -> DefaultContext: ... class DefaultContext(BaseContext): Process: Type[multiprocessing.Process] def __init__(self, context: BaseContext) -> None: ... def get_context(self, method: Optional[str] = ...) -> BaseContext: ... def set_start_method(self, method: Optional[str], force: bool = ...) -> None: ... def get_start_method(self, allow_none: bool = ...) -> str: ... def get_all_start_methods(self) -> List[str]: ... if sys.platform != 'win32': class ForkProcess(BaseProcess): _start_method: str @staticmethod def _Popen(process_obj: BaseProcess) -> Any: ... class SpawnProcess(BaseProcess): _start_method: str @staticmethod def _Popen(process_obj: BaseProcess) -> SpawnProcess: ... class ForkServerProcess(BaseProcess): _start_method: str @staticmethod def _Popen(process_obj: BaseProcess) -> Any: ... class ForkContext(BaseContext): _name: str Process: Type[ForkProcess] class SpawnContext(BaseContext): _name: str Process: Type[SpawnProcess] class ForkServerContext(BaseContext): _name: str Process: Type[ForkServerProcess] else: class SpawnProcess(BaseProcess): _start_method: str @staticmethod def _Popen(process_obj: BaseProcess) -> Any: ... class SpawnContext(BaseContext): _name: str Process: Type[SpawnProcess] def _force_start_method(method: str) -> None: ... def get_spawning_popen() -> Optional[Any]: ... def set_spawning_popen(popen: Any) -> None: ... def assert_spawning(obj: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/dummy/0000755€tŠÔÚ€2›s®0000000000013576752267027553 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/dummy/__init__.pyi0000644€tŠÔÚ€2›s®0000000221513576752252032027 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, List import array import threading import weakref from queue import Queue as Queue JoinableQueue = Queue Barrier = threading.Barrier BoundedSemaphore = threading.BoundedSemaphore Condition = threading.Condition Event = threading.Event Lock = threading.Lock RLock = threading.RLock Semaphore = threading.Semaphore class DummyProcess(threading.Thread): _children: weakref.WeakKeyDictionary[Any, Any] _parent: threading.Thread _pid: None _start_called: int exitcode: Optional[int] def __init__(self, group=..., target=..., name=..., args=..., kwargs=...) -> None: ... Process = DummyProcess class Namespace(object): def __init__(self, **kwds) -> None: ... class Value(object): _typecode: Any _value: Any value: Any def __init__(self, typecode, value, lock=...) -> None: ... def Array(typecode, sequence, lock=...) -> array.array[Any]: ... def Manager() -> Any: ... def Pool(processes=..., initializer=..., initargs=...) -> Any: ... def active_children() -> List[Any]: ... def current_process() -> threading.Thread: ... def freeze_support() -> None: ... def shutdown() -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/dummy/connection.pyi0000644€tŠÔÚ€2›s®0000000211113576752252032422 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional, Tuple, TypeVar from queue import Queue families: List[None] _TConnection = TypeVar('_TConnection', bound=Connection) _TListener = TypeVar('_TListener', bound=Listener) class Connection(object): _in: Any _out: Any recv: Any recv_bytes: Any send: Any send_bytes: Any def __enter__(self: _TConnection) -> _TConnection: ... def __exit__(self, exc_type, exc_value, exc_tb) -> None: ... def __init__(self, _in, _out) -> None: ... def close(self) -> None: ... def poll(self, timeout: float = ...) -> bool: ... class Listener(object): _backlog_queue: Optional[Queue[Any]] @property def address(self) -> Optional[Queue[Any]]: ... def __enter__(self: _TListener) -> _TListener: ... def __exit__(self, exc_type, exc_value, exc_tb) -> None: ... def __init__(self, address=..., family=..., backlog=...) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... def Client(address) -> Connection: ... def Pipe(duplex: bool = ...) -> Tuple[Connection, Connection]: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/managers.pyi0000644€tŠÔÚ€2›s®0000000553113576752252030736 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for multiprocessing.managers # NOTE: These are incomplete! import queue import sys import threading from typing import ( Any, Callable, ContextManager, Dict, Iterable, Generic, List, Mapping, Optional, Sequence, Tuple, TypeVar, Union, ) from .context import BaseContext if sys.version_info >= (3, 8): from .shared_memory import ShareableList, SharedMemory, _SLT _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') class Namespace: ... _Namespace = Namespace class BaseProxy: ... class ValueProxy(BaseProxy, Generic[_T]): def get(self) -> _T: ... def set(self, value: _T) -> None: ... value: _T # Returned by BaseManager.get_server() class Server: address: Any def serve_forever(self) -> None: ... class BaseManager(ContextManager[BaseManager]): def __init__( self, address: Optional[Any] = ..., authkey: Optional[bytes] = ..., serializer: str = ..., ctx: Optional[BaseContext] = ..., ) -> None: ... def get_server(self) -> Server: ... def connect(self) -> None: ... def start(self, initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ...) -> None: ... def shutdown(self) -> None: ... # only available after start() was called def join(self, timeout: Optional[float] = ...) -> None: ... # undocumented @property def address(self) -> Any: ... @classmethod def register(cls, typeid: str, callable: Optional[Callable[..., Any]] = ..., proxytype: Any = ..., exposed: Optional[Sequence[str]] = ..., method_to_typeid: Optional[Mapping[str, str]] = ..., create_method: bool = ...) -> None: ... class SyncManager(BaseManager, ContextManager[SyncManager]): def BoundedSemaphore(self, value: Any = ...) -> threading.BoundedSemaphore: ... def Condition(self, lock: Any = ...) -> threading.Condition: ... def Event(self) -> threading.Event: ... def Lock(self) -> threading.Lock: ... def Namespace(self) -> _Namespace: ... def Queue(self, maxsize: int = ...) -> queue.Queue[Any]: ... def RLock(self) -> threading.RLock: ... def Semaphore(self, value: Any = ...) -> threading.Semaphore: ... def Array(self, typecode: Any, sequence: Sequence[_T]) -> Sequence[_T]: ... def Value(self, typecode: Any, value: _T) -> ValueProxy[_T]: ... def dict(self, sequence: Mapping[_KT, _VT] = ...) -> Dict[_KT, _VT]: ... def list(self, sequence: Sequence[_T] = ...) -> List[_T]: ... class RemoteError(Exception): ... if sys.version_info >= (3, 8): class SharedMemoryServer(Server): ... class SharedMemoryManager(BaseManager): def get_server(self) -> SharedMemoryServer: ... def SharedMemory(self, size: int) -> SharedMemory: ... def ShareableList(self, sequence: Optional[Iterable[_SLT]]) -> ShareableList[_SLT]: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/pool.pyi0000644€tŠÔÚ€2›s®0000000657413576752252030122 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, ContextManager, Iterable, Mapping, Optional, List, TypeVar, Generic, Iterator _PT = TypeVar('_PT', bound=Pool) _S = TypeVar('_S') _T = TypeVar('_T') class ApplyResult(Generic[_T]): def get(self, timeout: Optional[float] = ...) -> _T: ... def wait(self, timeout: Optional[float] = ...) -> None: ... def ready(self) -> bool: ... def successful(self) -> bool: ... # alias created during issue #17805 AsyncResult = ApplyResult class MapResult(ApplyResult[List[_T]]): ... class IMapIterator(Iterator[_T]): def __iter__(self: _S) -> _S: ... def next(self, timeout: Optional[float] = ...) -> _T: ... def __next__(self, timeout: Optional[float] = ...) -> _T: ... class IMapUnorderedIterator(IMapIterator[_T]): ... class Pool(ContextManager[Pool]): def __init__(self, processes: Optional[int] = ..., initializer: Optional[Callable[..., None]] = ..., initargs: Iterable[Any] = ..., maxtasksperchild: Optional[int] = ..., context: Optional[Any] = ...) -> None: ... def apply(self, func: Callable[..., _T], args: Iterable[Any] = ..., kwds: Mapping[str, Any] = ...) -> _T: ... def apply_async(self, func: Callable[..., _T], args: Iterable[Any] = ..., kwds: Mapping[str, Any] = ..., callback: Optional[Callable[[_T], None]] = ..., error_callback: Optional[Callable[[BaseException], None]] = ...) -> AsyncResult[_T]: ... def map(self, func: Callable[[_S], _T], iterable: Iterable[_S] = ..., chunksize: Optional[int] = ...) -> List[_T]: ... def map_async(self, func: Callable[[_S], _T], iterable: Iterable[_S] = ..., chunksize: Optional[int] = ..., callback: Optional[Callable[[_T], None]] = ..., error_callback: Optional[Callable[[BaseException], None]] = ...) -> MapResult[_T]: ... def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S] = ..., chunksize: Optional[int] = ...) -> IMapIterator[_T]: ... def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S] = ..., chunksize: Optional[int] = ...) -> IMapIterator[_T]: ... def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]] = ..., chunksize: Optional[int] = ...) -> List[_T]: ... def starmap_async(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]] = ..., chunksize: Optional[int] = ..., callback: Optional[Callable[[_T], None]] = ..., error_callback: Optional[Callable[[BaseException], None]] = ...) -> AsyncResult[List[_T]]: ... def close(self) -> None: ... def terminate(self) -> None: ... def join(self) -> None: ... def __enter__(self: _PT) -> _PT: ... class ThreadPool(Pool, ContextManager[ThreadPool]): def __init__(self, processes: Optional[int] = ..., initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ...) -> None: ... # undocumented RUN: int CLOSE: int TERMINATE: int mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/process.pyi0000644€tŠÔÚ€2›s®0000000216613576752252030620 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, List, Mapping, Optional, Tuple class BaseProcess: name: str daemon: bool authkey: bytes def __init__( self, group: None = ..., target: Optional[Callable[..., Any]] = ..., name: Optional[str] = ..., args: Tuple[Any, ...] = ..., kwargs: Mapping[str, Any] = ..., *, daemon: Optional[bool] = ..., ) -> None: ... def run(self) -> None: ... def start(self) -> None: ... def terminate(self) -> None: ... if sys.version_info >= (3, 7): def kill(self) -> None: ... def close(self) -> None: ... def join(self, timeout: Optional[float] = ...) -> None: ... def is_alive(self) -> bool: ... @property def exitcode(self) -> Optional[int]: ... @property def ident(self) -> Optional[int]: ... @property def pid(self) -> Optional[int]: ... @property def sentinel(self) -> int: ... def current_process() -> BaseProcess: ... def active_children() -> List[BaseProcess]: ... if sys.version_info >= (3, 8): def parent_process() -> Optional[BaseProcess]: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/queues.pyi0000644€tŠÔÚ€2›s®0000000212513576752252030444 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Generic, Optional, TypeVar import queue _T = TypeVar('_T') class Queue(queue.Queue[_T]): # FIXME: `ctx` is a circular dependency and it's not actually optional. # It's marked as such to be able to use the generic Queue in __init__.pyi. def __init__(self, maxsize: int = ..., *, ctx: Any = ...) -> None: ... def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ... def put(self, obj: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ... def qsize(self) -> int: ... def empty(self) -> bool: ... def full(self) -> bool: ... def put_nowait(self, item: _T) -> None: ... def get_nowait(self) -> _T: ... def close(self) -> None: ... def join_thread(self) -> None: ... def cancel_join_thread(self) -> None: ... class JoinableQueue(Queue[_T]): def task_done(self) -> None: ... def join(self) -> None: ... class SimpleQueue(Generic[_T]): def __init__(self, *, ctx: Any = ...) -> None: ... def empty(self) -> bool: ... def get(self) -> _T: ... def put(self, item: _T) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/shared_memory.pyi0000644€tŠÔÚ€2›s®0000000214113576752252031771 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Generic, Iterable, Optional, Tuple, TypeVar _S = TypeVar("_S") _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) if sys.version_info >= (3, 8): class SharedMemory: def __init__(self, name: Optional[str] = ..., create: bool = ..., size: int = ...) -> None: ... @property def buf(self) -> memoryview: ... @property def name(self) -> str: ... @property def size(self) -> int: ... def close(self) -> None: ... def unlink(self) -> None: ... class ShareableList(Generic[_SLT]): shm: SharedMemory def __init__(self, sequence: Optional[Iterable[_SLT]] = ..., *, name: Optional[str] = ...) -> None: ... def __getitem__(self, position: int) -> _SLT: ... def __setitem__(self, position: int, value: _SLT) -> None: ... def __reduce__(self: _S) -> Tuple[_S, Tuple[_SLT, ...]]: ... def __len__(self) -> int: ... @property def format(self) -> str: ... def count(self, value: _SLT) -> int: ... def index(self, value: _SLT) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/spawn.pyi0000644€tŠÔÚ€2›s®0000000125713576752252030272 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, List, Mapping, Optional, Sequence from types import ModuleType WINEXE: bool WINSERVICE: bool def set_executable(exe: str) -> None: ... def get_executable() -> str: ... def is_forking(argv: Sequence[str]) -> bool: ... def freeze_support() -> None: ... def get_command_line(**kwds: Any) -> List[str]: ... def spawn_main(pipe_handle: int, parent_pid: Optional[int] = ..., tracker_fd: Optional[int] = ...) -> None: ... # undocumented def _main(fd: int) -> Any: ... def get_preparation_data(name: str) -> Dict[str, Any]: ... old_main_modules: List[ModuleType] def prepare(data: Mapping[str, Any]) -> None: ... def import_main_path(main_path: str) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/multiprocessing/synchronize.pyi0000644€tŠÔÚ€2›s®0000000403013576752252031505 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, ContextManager, Optional, Union from multiprocessing.context import BaseContext import threading import sys _LockLike = Union[Lock, RLock] class Barrier(threading.Barrier): def __init__(self, parties: int, action: Optional[Callable[..., Any]] = ..., timeout: Optional[float] = ..., * ctx: BaseContext) -> None: ... class BoundedSemaphore(Semaphore): def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... class Condition(ContextManager[bool]): def __init__(self, lock: Optional[_LockLike] = ..., *, ctx: BaseContext) -> None: ... if sys.version_info >= (3, 7): def notify(self, n: int = ...) -> None: ... else: def notify(self) -> None: ... def notify_all(self) -> None: ... def wait(self, timeout: Optional[float] = ...) -> bool: ... def wait_for(self, predicate: Callable[[], bool], timeout: Optional[float] = ...) -> bool: ... def acquire(self, block: bool = ..., timeout: Optional[float] = ...) -> bool: ... def release(self) -> None: ... class Event(ContextManager[bool]): def __init__(self, lock: Optional[_LockLike] = ..., *, ctx: BaseContext) -> None: ... def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... def wait(self, timeout: Optional[float] = ...) -> bool: ... class Lock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... class RLock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... class Semaphore(SemLock): def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... # Not part of public API class SemLock(ContextManager[bool]): def acquire(self, block: bool = ..., timeout: Optional[float] = ...) -> bool: ... def release(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/nntplib.pyi0000644€tŠÔÚ€2›s®0000001021013576752252025346 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for nntplib (Python 3) import datetime import socket import ssl from typing import Any, Dict, IO, Iterable, List, NamedTuple, Optional, Tuple, TypeVar, Union _SelfT = TypeVar('_SelfT', bound=_NNTPBase) _File = Union[IO[bytes], bytes, str, None] class NNTPError(Exception): response: str class NNTPReplyError(NNTPError): ... class NNTPTemporaryError(NNTPError): ... class NNTPPermanentError(NNTPError): ... class NNTPProtocolError(NNTPError): ... class NNTPDataError(NNTPError): ... NNTP_PORT: int NNTP_SSL_PORT: int class GroupInfo(NamedTuple): group: str last: str first: str flag: str class ArticleInfo(NamedTuple): number: int message_id: str lines: List[bytes] def decode_header(header_str: str) -> str: ... class _NNTPBase: encoding: str errors: str host: str file: IO[bytes] debugging: int welcome: str readermode_afterauth: bool tls_on: bool authenticated: bool nntp_implementation: str nntp_version: int def __init__(self, file: IO[bytes], host: str, readermode: Optional[bool] = ..., timeout: float = ...) -> None: ... def __enter__(self: _SelfT) -> _SelfT: ... def __exit__(self, *args: Any) -> None: ... def getwelcome(self) -> str: ... def getcapabilities(self) -> Dict[str, List[str]]: ... def set_debuglevel(self, level: int) -> None: ... def debug(self, level: int) -> None: ... def capabilities(self) -> Tuple[str, Dict[str, List[str]]]: ... def newgroups(self, date: Union[datetime.date, datetime.datetime], *, file: _File = ...) -> Tuple[str, List[str]]: ... def newnews(self, group: str, date: Union[datetime.date, datetime.datetime], *, file: _File = ...) -> Tuple[str, List[str]]: ... def list(self, group_pattern: Optional[str] = ..., *, file: _File = ...) -> Tuple[str, List[str]]: ... def description(self, group: str) -> str: ... def descriptions(self, group_pattern: str) -> Tuple[str, Dict[str, str]]: ... def group(self, name: str) -> Tuple[str, int, int, int, str]: ... def help(self, *, file: _File = ...) -> Tuple[str, List[str]]: ... def stat(self, message_spec: Any = ...) -> Tuple[str, int, str]: ... def next(self) -> Tuple[str, int, str]: ... def last(self) -> Tuple[str, int, str]: ... def head(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... def body(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... def article(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... def slave(self) -> str: ... def xhdr(self, hdr: str, str: Any, *, file: _File = ...) -> Tuple[str, List[str]]: ... def xover(self, start: int, end: int, *, file: _File = ...) -> Tuple[str, List[Tuple[int, Dict[str, str]]]]: ... def over(self, message_spec: Union[None, str, List[Any], Tuple[Any, ...]], *, file: _File = ...) -> Tuple[str, List[Tuple[int, Dict[str, str]]]]: ... def xgtitle(self, group: str, *, file: _File = ...) -> Tuple[str, List[Tuple[str, str]]]: ... def xpath(self, id: Any) -> Tuple[str, str]: ... def date(self) -> Tuple[str, datetime.datetime]: ... def post(self, data: Union[bytes, Iterable[bytes]]) -> str: ... def ihave(self, message_id: Any, data: Union[bytes, Iterable[bytes]]) -> str: ... def quit(self) -> str: ... def login(self, user: Optional[str] = ..., password: Optional[str] = ..., usenetrc: bool = ...) -> None: ... def starttls(self, ssl_context: Optional[ssl.SSLContext] = ...) -> None: ... class NNTP(_NNTPBase): port: int sock: socket.socket def __init__(self, host: str, port: int = ..., user: Optional[str] = ..., password: Optional[str] = ..., readermode: Optional[bool] = ..., usenetrc: bool = ..., timeout: float = ...) -> None: ... class NNTP_SSL(_NNTPBase): sock: socket.socket def __init__(self, host: str, port: int = ..., user: Optional[str] = ..., password: Optional[str] = ..., ssl_context: Optional[ssl.SSLContext] = ..., readermode: Optional[bool] = ..., usenetrc: bool = ..., timeout: float = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/nturl2path.pyi0000644€tŠÔÚ€2›s®0000000011413576752252026005 0ustar jukkaDROPBOX\Domain Users00000000000000def url2pathname(url: str) -> str: ... def pathname2url(p: str) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/os/0000755€tŠÔÚ€2›s®0000000000013576752267023612 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/os/__init__.pyi0000644€tŠÔÚ€2›s®0000006300513576752252026072 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os # Ron Murawski from io import TextIOWrapper as _TextIOWrapper from posix import listdir as listdir, times_result import sys from typing import ( Mapping, MutableMapping, Dict, List, Any, Tuple, Iterable, Iterator, NoReturn, overload, Union, AnyStr, Optional, Generic, Set, Callable, Text, Sequence, NamedTuple, TypeVar, ContextManager ) # Re-exported names from other modules. from builtins import OSError as error from . import path as path _T = TypeVar('_T') # ----- os variables ----- supports_bytes_environ: bool supports_dir_fd: Set[Callable[..., Any]] supports_fd: Set[Callable[..., Any]] supports_effective_ids: Set[Callable[..., Any]] supports_follow_symlinks: Set[Callable[..., Any]] if sys.platform != 'win32': # Unix only PRIO_PROCESS: int PRIO_PGRP: int PRIO_USER: int F_LOCK: int F_TLOCK: int F_ULOCK: int F_TEST: int POSIX_FADV_NORMAL: int POSIX_FADV_SEQUENTIAL: int POSIX_FADV_RANDOM: int POSIX_FADV_NOREUSE: int POSIX_FADV_WILLNEED: int POSIX_FADV_DONTNEED: int SF_NODISKIO: int SF_MNOWAIT: int SF_SYNC: int XATTR_SIZE_MAX: int # Linux only XATTR_CREATE: int # Linux only XATTR_REPLACE: int # Linux only P_PID: int P_PGID: int P_ALL: int WEXITED: int WSTOPPED: int WNOWAIT: int CLD_EXITED: int CLD_DUMPED: int CLD_TRAPPED: int CLD_CONTINUED: int SCHED_OTHER: int # some flavors of Unix SCHED_BATCH: int # some flavors of Unix SCHED_IDLE: int # some flavors of Unix SCHED_SPORADIC: int # some flavors of Unix SCHED_FIFO: int # some flavors of Unix SCHED_RR: int # some flavors of Unix SCHED_RESET_ON_FORK: int # some flavors of Unix RTLD_LAZY: int RTLD_NOW: int RTLD_GLOBAL: int RTLD_LOCAL: int RTLD_NODELETE: int RTLD_NOLOAD: int RTLD_DEEPBIND: int SEEK_SET: int SEEK_CUR: int SEEK_END: int if sys.platform != 'win32': SEEK_DATA: int # some flavors of Unix SEEK_HOLE: int # some flavors of Unix O_RDONLY: int O_WRONLY: int O_RDWR: int O_APPEND: int O_CREAT: int O_EXCL: int O_TRUNC: int # We don't use sys.platform for O_* flags to denote platform-dependent APIs because some codes, # including tests for mypy, use a more finer way than sys.platform before using these APIs # See https://github.com/python/typeshed/pull/2286 for discussions O_DSYNC: int # Unix only O_RSYNC: int # Unix only O_SYNC: int # Unix only O_NDELAY: int # Unix only O_NONBLOCK: int # Unix only O_NOCTTY: int # Unix only O_CLOEXEC: int # Unix only O_SHLOCK: int # Unix only O_EXLOCK: int # Unix only O_BINARY: int # Windows only O_NOINHERIT: int # Windows only O_SHORT_LIVED: int # Windows only O_TEMPORARY: int # Windows only O_RANDOM: int # Windows only O_SEQUENTIAL: int # Windows only O_TEXT: int # Windows only O_ASYNC: int # Gnu extension if in C library O_DIRECT: int # Gnu extension if in C library O_DIRECTORY: int # Gnu extension if in C library O_NOFOLLOW: int # Gnu extension if in C library O_NOATIME: int # Gnu extension if in C library O_PATH: int # Gnu extension if in C library O_TMPFILE: int # Gnu extension if in C library O_LARGEFILE: int # Gnu extension if in C library curdir: str pardir: str sep: str if sys.platform == 'win32': altsep: str else: altsep: Optional[str] extsep: str pathsep: str defpath: str linesep: str devnull: str name: str F_OK: int R_OK: int W_OK: int X_OK: int class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): def copy(self) -> Dict[AnyStr, AnyStr]: ... def __delitem__(self, key: AnyStr) -> None: ... def __getitem__(self, key: AnyStr) -> AnyStr: ... def __setitem__(self, key: AnyStr, value: AnyStr) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... def __len__(self) -> int: ... environ: _Environ[str] environb: _Environ[bytes] if sys.platform != 'win32': confstr_names: Dict[str, int] pathconf_names: Dict[str, int] sysconf_names: Dict[str, int] EX_OK: int EX_USAGE: int EX_DATAERR: int EX_NOINPUT: int EX_NOUSER: int EX_NOHOST: int EX_UNAVAILABLE: int EX_SOFTWARE: int EX_OSERR: int EX_OSFILE: int EX_CANTCREAT: int EX_IOERR: int EX_TEMPFAIL: int EX_PROTOCOL: int EX_NOPERM: int EX_CONFIG: int EX_NOTFOUND: int P_NOWAIT: int P_NOWAITO: int P_WAIT: int if sys.platform == 'win32': P_DETACH: int P_OVERLAY: int # wait()/waitpid() options if sys.platform != 'win32': WNOHANG: int # Unix only WCONTINUED: int # some Unix systems WUNTRACED: int # Unix only TMP_MAX: int # Undocumented, but used by tempfile # ----- os classes (structures) ----- class stat_result: # For backward compatibility, the return value of stat() is also # accessible as a tuple of at least 10 integers giving the most important # (and portable) members of the stat structure, in the order st_mode, # st_ino, st_dev, st_nlink, st_uid, st_gid, st_size, st_atime, st_mtime, # st_ctime. More items may be added at the end by some implementations. st_mode: int # protection bits, st_ino: int # inode number, st_dev: int # device, st_nlink: int # number of hard links, st_uid: int # user id of owner, st_gid: int # group id of owner, st_size: int # size of file, in bytes, st_atime: float # time of most recent access, st_mtime: float # time of most recent content modification, st_ctime: float # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) st_atime_ns: int # time of most recent access, in nanoseconds st_mtime_ns: int # time of most recent content modification in nanoseconds st_ctime_ns: int # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds if sys.version_info >= (3, 8) and sys.platform == "win32": st_reparse_tag: int def __getitem__(self, i: int) -> int: ... # not documented def __init__(self, tuple: Tuple[int, ...]) -> None: ... # On some Unix systems (such as Linux), the following attributes may also # be available: st_blocks: int # number of blocks allocated for file st_blksize: int # filesystem blocksize st_rdev: int # type of device if an inode device st_flags: int # user defined flags for file # On other Unix systems (such as FreeBSD), the following attributes may be # available (but may be only filled out if root tries to use them): st_gen: int # file generation number st_birthtime: int # time of file creation # On Mac OS systems, the following attributes may also be available: st_rsize: int st_creator: int st_type: int if sys.version_info >= (3, 6): from builtins import _PathLike as PathLike # See comment in builtins _PathType = path._PathType _FdOrPathType = Union[int, _PathType] if sys.version_info >= (3, 6): class DirEntry(PathLike[AnyStr]): # This is what the scandir interator yields # The constructor is hidden name: AnyStr path: AnyStr def inode(self) -> int: ... def is_dir(self, *, follow_symlinks: bool = ...) -> bool: ... def is_file(self, *, follow_symlinks: bool = ...) -> bool: ... def is_symlink(self) -> bool: ... def stat(self, *, follow_symlinks: bool = ...) -> stat_result: ... def __fspath__(self) -> AnyStr: ... else: class DirEntry(Generic[AnyStr]): # This is what the scandir interator yields # The constructor is hidden name: AnyStr path: AnyStr def inode(self) -> int: ... def is_dir(self, *, follow_symlinks: bool = ...) -> bool: ... def is_file(self, *, follow_symlinks: bool = ...) -> bool: ... def is_symlink(self) -> bool: ... def stat(self, *, follow_symlinks: bool = ...) -> stat_result: ... if sys.platform != 'win32': class statvfs_result: # Unix only f_bsize: int f_frsize: int f_blocks: int f_bfree: int f_bavail: int f_files: int f_ffree: int f_favail: int f_flag: int f_namemax: int # ----- os function stubs ----- if sys.version_info >= (3, 6): def fsencode(filename: Union[str, bytes, PathLike[Any]]) -> bytes: ... else: def fsencode(filename: Union[str, bytes]) -> bytes: ... if sys.version_info >= (3, 6): def fsdecode(filename: Union[str, bytes, PathLike[Any]]) -> str: ... else: def fsdecode(filename: Union[str, bytes]) -> str: ... if sys.version_info >= (3, 6): @overload def fspath(path: str) -> str: ... @overload def fspath(path: bytes) -> bytes: ... @overload def fspath(path: PathLike[Any]) -> Any: ... def get_exec_path(env: Optional[Mapping[str, str]] = ...) -> List[str]: ... # NOTE: get_exec_path(): returns List[bytes] when env not None def getlogin() -> str: ... def getpid() -> int: ... def getppid() -> int: ... def strerror(code: int) -> str: ... def umask(mask: int) -> int: ... if sys.platform != 'win32': # Unix only def ctermid() -> str: ... def getegid() -> int: ... def geteuid() -> int: ... def getgid() -> int: ... def getgrouplist(user: str, gid: int) -> List[int]: ... def getgroups() -> List[int]: ... # Unix only, behaves differently on Mac def initgroups(username: str, gid: int) -> None: ... def getpgid(pid: int) -> int: ... def getpgrp() -> int: ... def getpriority(which: int, who: int) -> int: ... def setpriority(which: int, who: int, priority: int) -> None: ... def getresuid() -> Tuple[int, int, int]: ... def getresgid() -> Tuple[int, int, int]: ... def getuid() -> int: ... def setegid(egid: int) -> None: ... def seteuid(euid: int) -> None: ... def setgid(gid: int) -> None: ... def setgroups(groups: Sequence[int]) -> None: ... def setpgrp() -> None: ... def setpgid(pid: int, pgrp: int) -> None: ... def setregid(rgid: int, egid: int) -> None: ... def setresgid(rgid: int, egid: int, sgid: int) -> None: ... def setresuid(ruid: int, euid: int, suid: int) -> None: ... def setreuid(ruid: int, euid: int) -> None: ... def getsid(pid: int) -> int: ... def setsid() -> None: ... def setuid(uid: int) -> None: ... from posix import uname_result def uname() -> uname_result: ... @overload def getenv(key: Text) -> Optional[str]: ... @overload def getenv(key: Text, default: _T) -> Union[str, _T]: ... def getenvb(key: bytes, default: bytes = ...) -> bytes: ... def putenv(key: Union[bytes, Text], value: Union[bytes, Text]) -> None: ... def unsetenv(key: Union[bytes, Text]) -> None: ... # Return IO or TextIO def fdopen(fd: int, mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: str = ..., newline: str = ..., closefd: bool = ...) -> Any: ... def close(fd: int) -> None: ... def closerange(fd_low: int, fd_high: int) -> None: ... def device_encoding(fd: int) -> Optional[str]: ... def dup(fd: int) -> int: ... if sys.version_info >= (3, 7): def dup2(fd: int, fd2: int, inheritable: bool = ...) -> int: ... else: def dup2(fd: int, fd2: int, inheritable: bool = ...) -> None: ... def fstat(fd: int) -> stat_result: ... def fsync(fd: int) -> None: ... def lseek(fd: int, pos: int, how: int) -> int: ... def open(file: _PathType, flags: int, mode: int = ..., *, dir_fd: Optional[int] = ...) -> int: ... def pipe() -> Tuple[int, int]: ... def read(fd: int, n: int) -> bytes: ... if sys.platform != 'win32': # Unix only def fchmod(fd: int, mode: int) -> None: ... def fchown(fd: int, uid: int, gid: int) -> None: ... def fdatasync(fd: int) -> None: ... # Unix only, not Mac def fpathconf(fd: int, name: Union[str, int]) -> int: ... def fstatvfs(fd: int) -> statvfs_result: ... def ftruncate(fd: int, length: int) -> None: ... def get_blocking(fd: int) -> bool: ... def set_blocking(fd: int, blocking: bool) -> None: ... def isatty(fd: int) -> bool: ... def lockf(__fd: int, __cmd: int, __length: int) -> None: ... def openpty() -> Tuple[int, int]: ... # some flavors of Unix def pipe2(flags: int) -> Tuple[int, int]: ... # some flavors of Unix def posix_fallocate(fd: int, offset: int, length: int) -> None: ... def posix_fadvise(fd: int, offset: int, length: int, advice: int) -> None: ... def pread(fd: int, buffersize: int, offset: int) -> bytes: ... def pwrite(fd: int, string: bytes, offset: int) -> int: ... @overload def sendfile(__out_fd: int, __in_fd: int, offset: Optional[int], count: int) -> int: ... @overload def sendfile(__out_fd: int, __in_fd: int, offset: int, count: int, headers: Sequence[bytes] = ..., trailers: Sequence[bytes] = ..., flags: int = ...) -> int: ... # FreeBSD and Mac OS X only def readv(fd: int, buffers: Sequence[bytearray]) -> int: ... def writev(fd: int, buffers: Sequence[bytes]) -> int: ... class terminal_size(Tuple[int, int]): columns: int lines: int def get_terminal_size(fd: int = ...) -> terminal_size: ... def get_inheritable(fd: int) -> bool: ... def set_inheritable(fd: int, inheritable: bool) -> None: ... if sys.platform != 'win32': # Unix only def tcgetpgrp(fd: int) -> int: ... def tcsetpgrp(fd: int, pg: int) -> None: ... def ttyname(fd: int) -> str: ... def write(fd: int, string: bytes) -> int: ... def access( path: _FdOrPathType, mode: int, *, dir_fd: Optional[int] = ..., effective_ids: bool = ..., follow_symlinks: bool = ..., ) -> bool: ... def chdir(path: _FdOrPathType) -> None: ... def fchdir(fd: int) -> None: ... def getcwd() -> str: ... def getcwdb() -> bytes: ... def chmod(path: _FdOrPathType, mode: int, *, dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ... if sys.platform != 'win32': def chflags(path: _PathType, flags: int, follow_symlinks: bool = ...) -> None: ... # some flavors of Unix def chown(path: _FdOrPathType, uid: int, gid: int, *, dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ... # Unix only if sys.platform != 'win32': # Unix only def chroot(path: _PathType) -> None: ... def lchflags(path: _PathType, flags: int) -> None: ... def lchmod(path: _PathType, mode: int) -> None: ... def lchown(path: _PathType, uid: int, gid: int) -> None: ... def link( src: _PathType, link_name: _PathType, *, src_dir_fd: Optional[int] = ..., dst_dir_fd: Optional[int] = ..., follow_symlinks: bool = ..., ) -> None: ... def lstat(path: _PathType, *, dir_fd: Optional[int] = ...) -> stat_result: ... def mkdir(path: _PathType, mode: int = ..., *, dir_fd: Optional[int] = ...) -> None: ... if sys.platform != 'win32': def mkfifo(path: _PathType, mode: int = ..., *, dir_fd: Optional[int] = ...) -> None: ... # Unix only def makedirs(name: _PathType, mode: int = ..., exist_ok: bool = ...) -> None: ... def mknod(path: _PathType, mode: int = ..., device: int = ..., *, dir_fd: Optional[int] = ...) -> None: ... def major(device: int) -> int: ... def minor(device: int) -> int: ... def makedev(major: int, minor: int) -> int: ... if sys.platform != 'win32': def pathconf(path: _FdOrPathType, name: Union[str, int]) -> int: ... # Unix only if sys.version_info >= (3, 6): def readlink(path: Union[AnyStr, PathLike[AnyStr]], *, dir_fd: Optional[int] = ...) -> AnyStr: ... else: def readlink(path: AnyStr, *, dir_fd: Optional[int] = ...) -> AnyStr: ... def remove(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ... def removedirs(name: _PathType) -> None: ... def rename(src: _PathType, dst: _PathType, *, src_dir_fd: Optional[int] = ..., dst_dir_fd: Optional[int] = ...) -> None: ... def renames(old: _PathType, new: _PathType) -> None: ... def replace(src: _PathType, dst: _PathType, *, src_dir_fd: Optional[int] = ..., dst_dir_fd: Optional[int] = ...) -> None: ... def rmdir(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ... if sys.version_info >= (3, 7): class _ScandirIterator(Iterator[DirEntry[AnyStr]], ContextManager[_ScandirIterator[AnyStr]]): def __next__(self) -> DirEntry[AnyStr]: ... def close(self) -> None: ... @overload def scandir() -> _ScandirIterator[str]: ... @overload def scandir(path: int) -> _ScandirIterator[str]: ... @overload def scandir(path: Union[AnyStr, PathLike[AnyStr]]) -> _ScandirIterator[AnyStr]: ... elif sys.version_info >= (3, 6): class _ScandirIterator(Iterator[DirEntry[AnyStr]], ContextManager[_ScandirIterator[AnyStr]]): def __next__(self) -> DirEntry[AnyStr]: ... def close(self) -> None: ... @overload def scandir() -> _ScandirIterator[str]: ... @overload def scandir(path: Union[AnyStr, PathLike[AnyStr]]) -> _ScandirIterator[AnyStr]: ... else: @overload def scandir() -> Iterator[DirEntry[str]]: ... @overload def scandir(path: AnyStr) -> Iterator[DirEntry[AnyStr]]: ... def stat(path: _FdOrPathType, *, dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> stat_result: ... if sys.version_info < (3, 7): @overload def stat_float_times() -> bool: ... @overload def stat_float_times(__newvalue: bool) -> None: ... if sys.platform != 'win32': def statvfs(path: _FdOrPathType) -> statvfs_result: ... # Unix only def symlink( source: _PathType, link_name: _PathType, target_is_directory: bool = ..., *, dir_fd: Optional[int] = ..., ) -> None: ... if sys.platform != 'win32': def sync() -> None: ... # Unix only def truncate(path: _FdOrPathType, length: int) -> None: ... # Unix only up to version 3.4 def unlink(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ... def utime( path: _FdOrPathType, times: Optional[Union[Tuple[int, int], Tuple[float, float]]] = ..., *, ns: Tuple[int, int] = ..., dir_fd: Optional[int] = ..., follow_symlinks: bool = ..., ) -> None: ... _OnError = Callable[[OSError], Any] if sys.version_info >= (3, 6): def walk(top: Union[AnyStr, PathLike[AnyStr]], topdown: bool = ..., onerror: Optional[_OnError] = ..., followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr]]]: ... else: def walk(top: AnyStr, topdown: bool = ..., onerror: Optional[_OnError] = ..., followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr]]]: ... if sys.platform != 'win32': if sys.version_info >= (3, 7): @overload def fwalk(top: Union[str, PathLike[str]] = ..., topdown: bool = ..., onerror: Optional[_OnError] = ..., *, follow_symlinks: bool = ..., dir_fd: Optional[int] = ...) -> Iterator[Tuple[str, List[str], List[str], int]]: ... @overload def fwalk(top: bytes, topdown: bool = ..., onerror: Optional[_OnError] = ..., *, follow_symlinks: bool = ..., dir_fd: Optional[int] = ...) -> Iterator[Tuple[bytes, List[bytes], List[bytes], int]]: ... elif sys.version_info >= (3, 6): def fwalk(top: Union[str, PathLike[str]] = ..., topdown: bool = ..., onerror: Optional[_OnError] = ..., *, follow_symlinks: bool = ..., dir_fd: Optional[int] = ...) -> Iterator[Tuple[str, List[str], List[str], int]]: ... else: def fwalk(top: str = ..., topdown: bool = ..., onerror: Optional[_OnError] = ..., *, follow_symlinks: bool = ..., dir_fd: Optional[int] = ...) -> Iterator[Tuple[str, List[str], List[str], int]]: ... def getxattr(path: _FdOrPathType, attribute: _PathType, *, follow_symlinks: bool = ...) -> bytes: ... # Linux only def listxattr(path: _FdOrPathType, *, follow_symlinks: bool = ...) -> List[str]: ... # Linux only def removexattr(path: _FdOrPathType, attribute: _PathType, *, follow_symlinks: bool = ...) -> None: ... # Linux only def setxattr(path: _FdOrPathType, attribute: _PathType, value: bytes, flags: int = ..., *, follow_symlinks: bool = ...) -> None: ... # Linux only def abort() -> NoReturn: ... # These are defined as execl(file, *args) but the first *arg is mandatory. def execl(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ... def execlp(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ... # These are: execle(file, *args, env) but env is pulled from the last element of the args. def execle(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ... def execlpe(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ... # The docs say `args: tuple or list of strings` # The implementation enforces tuple or list so we can't use Sequence. _ExecVArgs = Union[Tuple[Union[bytes, Text], ...], List[bytes], List[Text], List[Union[bytes, Text]]] _ExecEnv = Union[Mapping[bytes, Union[bytes, str]], Mapping[str, Union[bytes, str]]] def execv(path: _PathType, args: _ExecVArgs) -> NoReturn: ... def execve(path: _FdOrPathType, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... def execvp(file: _PathType, args: _ExecVArgs) -> NoReturn: ... def execvpe(file: _PathType, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... def _exit(n: int) -> NoReturn: ... def kill(pid: int, sig: int) -> None: ... if sys.platform != 'win32': # Unix only def fork() -> int: ... def forkpty() -> Tuple[int, int]: ... # some flavors of Unix def killpg(pgid: int, sig: int) -> None: ... def nice(increment: int) -> int: ... def plock(op: int) -> None: ... # ???op is int? class _wrap_close(_TextIOWrapper): def close(self) -> Optional[int]: ... # type: ignore def popen(command: str, mode: str = ..., buffering: int = ...) -> _wrap_close: ... def spawnl(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ... def spawnle(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Any) -> int: ... # Imprecise sig def spawnv(mode: int, path: _PathType, args: List[Union[bytes, Text]]) -> int: ... def spawnve(mode: int, path: _PathType, args: List[Union[bytes, Text]], env: _ExecEnv) -> int: ... def system(command: _PathType) -> int: ... def times() -> times_result: ... def waitpid(pid: int, options: int) -> Tuple[int, int]: ... if sys.platform == 'win32': def startfile(path: _PathType, operation: Optional[str] = ...) -> None: ... else: # Unix only def spawnlp(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ... def spawnlpe(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Any) -> int: ... # Imprecise signature def spawnvp(mode: int, file: _PathType, args: List[Union[bytes, Text]]) -> int: ... def spawnvpe(mode: int, file: _PathType, args: List[Union[bytes, Text]], env: _ExecEnv) -> int: ... def wait() -> Tuple[int, int]: ... # Unix only from posix import waitid_result def waitid(idtype: int, ident: int, options: int) -> waitid_result: ... def wait3(options: int) -> Tuple[int, int, Any]: ... def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... def WCOREDUMP(status: int) -> bool: ... def WIFCONTINUED(status: int) -> bool: ... def WIFSTOPPED(status: int) -> bool: ... def WIFSIGNALED(status: int) -> bool: ... def WIFEXITED(status: int) -> bool: ... def WEXITSTATUS(status: int) -> int: ... def WSTOPSIG(status: int) -> int: ... def WTERMSIG(status: int) -> int: ... if sys.platform != 'win32': from posix import sched_param def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix def sched_setscheduler(pid: int, policy: int, param: sched_param) -> None: ... # some flavors of Unix def sched_getscheduler(pid: int) -> int: ... # some flavors of Unix def sched_setparam(pid: int, param: sched_param) -> None: ... # some flavors of Unix def sched_getparam(pid: int) -> sched_param: ... # some flavors of Unix def sched_rr_get_interval(pid: int) -> float: ... # some flavors of Unix def sched_yield() -> None: ... # some flavors of Unix def sched_setaffinity(pid: int, mask: Iterable[int]) -> None: ... # some flavors of Unix def sched_getaffinity(pid: int) -> Set[int]: ... # some flavors of Unix def cpu_count() -> Optional[int]: ... if sys.platform != 'win32': # Unix only def confstr(name: Union[str, int]) -> Optional[str]: ... def getloadavg() -> Tuple[float, float, float]: ... def sysconf(name: Union[str, int]) -> int: ... if sys.version_info >= (3, 6): def getrandom(size: int, flags: int = ...) -> bytes: ... def urandom(size: int) -> bytes: ... else: def urandom(n: int) -> bytes: ... if sys.version_info >= (3, 7): def register_at_fork(func: Callable[..., object], when: str) -> None: ... if sys.version_info >= (3, 8): if sys.platform == "win32": class _AddedDllDirectory: path: Optional[str] def close(self) -> None: ... def __enter__(self: _T) -> _T: ... def __exit__(self, *args: Any) -> None: ... def add_dll_directory(path: str) -> _AddedDllDirectory: ... if sys.platform == "linux": MFD_CLOEXEC: int MFD_ALLOW_SEALING: int MFD_HUGETLB: int MFD_HUGE_SHIFT: int MFD_HUGE_MASK: int MFD_HUGE_64KB: int MFD_HUGE_512KB: int MFD_HUGE_1MB: int MFD_HUGE_2MB: int MFD_HUGE_8MB: int MFD_HUGE_16MB: int MFD_HUGE_32MB: int MFD_HUGE_256MB: int MFD_HUGE_512MB: int MFD_HUGE_1GB: int MFD_HUGE_2GB: int MFD_HUGE_16GB: int def memfd_create(name: str, flags: int = ...) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/os/path.pyi0000644€tŠÔÚ€2›s®0000001412413576752252025265 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: path.pyi and stdlib/2 and stdlib/3 must remain consistent! # Stubs for os.path # Ron Murawski import os import sys from typing import overload, List, Any, AnyStr, Sequence, Tuple, TypeVar, Union, Text, Callable, Optional _T = TypeVar('_T') if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] _StrPath = Union[Text, _PathLike[Text]] _BytesPath = Union[bytes, _PathLike[bytes]] else: _PathType = Union[bytes, Text] _StrPath = Text _BytesPath = bytes # ----- os.path variables ----- supports_unicode_filenames: bool # aliases (also in os) curdir: str pardir: str sep: str if sys.platform == 'win32': altsep: str else: altsep: Optional[str] extsep: str pathsep: str defpath: str devnull: str # ----- os.path function stubs ----- if sys.version_info >= (3, 6): # Overloads are necessary to work around python/mypy#3644. @overload def abspath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def abspath(path: AnyStr) -> AnyStr: ... @overload def basename(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def basename(path: AnyStr) -> AnyStr: ... @overload def dirname(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def dirname(path: AnyStr) -> AnyStr: ... @overload def expanduser(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload def expandvars(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload def normcase(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normcase(path: AnyStr) -> AnyStr: ... @overload def normpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': @overload def realpath(path: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(path: AnyStr) -> AnyStr: ... else: @overload def realpath(filename: _PathLike[AnyStr]) -> AnyStr: ... @overload def realpath(filename: AnyStr) -> AnyStr: ... else: def abspath(path: AnyStr) -> AnyStr: ... def basename(path: AnyStr) -> AnyStr: ... def dirname(path: AnyStr) -> AnyStr: ... def expanduser(path: AnyStr) -> AnyStr: ... def expandvars(path: AnyStr) -> AnyStr: ... def normcase(path: AnyStr) -> AnyStr: ... def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': def realpath(path: AnyStr) -> AnyStr: ... else: def realpath(filename: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 6): # In reality it returns str for sequences of _StrPath and bytes for sequences # of _BytesPath, but mypy does not accept such a signature. def commonpath(paths: Sequence[_PathType]) -> Any: ... elif sys.version_info >= (3, 5): def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ... # NOTE: Empty lists results in '' (str) regardless of contained type. # Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes # So, fall back to Any def commonprefix(list: Sequence[_PathType]) -> Any: ... if sys.version_info >= (3, 3): def exists(path: Union[_PathType, int]) -> bool: ... else: def exists(path: _PathType) -> bool: ... def lexists(path: _PathType) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(path: _PathType) -> float: ... def getmtime(path: _PathType) -> float: ... def getctime(path: _PathType) -> float: ... def getsize(path: _PathType) -> int: ... def isabs(path: _PathType) -> bool: ... def isfile(path: _PathType) -> bool: ... def isdir(path: _PathType) -> bool: ... def islink(path: _PathType) -> bool: ... def ismount(path: _PathType) -> bool: ... if sys.version_info < (3, 0): # Make sure signatures are disjunct, and allow combinations of bytes and unicode. # (Since Python 2 allows that, too) # Note that e.g. os.path.join("a", "b", "c", "d", u"e") will still result in # a type error. @overload def join(__p1: bytes, *p: bytes) -> bytes: ... @overload def join(__p1: bytes, __p2: bytes, __p3: bytes, __p4: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: Text, *p: _PathType) -> Text: ... @overload def join(__p1: Text, *p: _PathType) -> Text: ... elif sys.version_info >= (3, 6): # Mypy complains that the signatures overlap (same for relpath below), but things seem to behave correctly anyway. @overload def join(path: _StrPath, *paths: _StrPath) -> Text: ... @overload def join(path: _BytesPath, *paths: _BytesPath) -> bytes: ... else: def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ... @overload def relpath(path: _BytesPath, start: Optional[_BytesPath] = ...) -> bytes: ... @overload def relpath(path: _StrPath, start: Optional[_StrPath] = ...) -> Text: ... def samefile(path1: _PathType, path2: _PathType) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(stat1: os.stat_result, stat2: os.stat_result) -> bool: ... if sys.version_info >= (3, 6): @overload def split(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: _PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... @overload def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... else: def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... if sys.platform == 'win32': def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # deprecated if sys.version_info < (3,): def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/pathlib.pyi0000644€tŠÔÚ€2›s®0000001260413576752252025334 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Generator, IO, Optional, Sequence, Tuple, Type, TypeVar, Union, List from types import TracebackType import os import sys _P = TypeVar('_P', bound=PurePath) if sys.version_info >= (3, 6): _PurePathBase = os.PathLike[str] else: _PurePathBase = object class PurePath(_PurePathBase): parts: Tuple[str, ...] drive: str root: str anchor: str name: str suffix: str suffixes: List[str] stem: str if sys.version_info < (3, 5): def __init__(self, *pathsegments: str) -> None: ... elif sys.version_info < (3, 6): def __new__(cls: Type[_P], *args: Union[str, PurePath]) -> _P: ... else: def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]]) -> _P: ... def __hash__(self) -> int: ... def __lt__(self, other: PurePath) -> bool: ... def __le__(self, other: PurePath) -> bool: ... def __gt__(self, other: PurePath) -> bool: ... def __ge__(self, other: PurePath) -> bool: ... if sys.version_info < (3, 6): def __truediv__(self: _P, key: Union[str, PurePath]) -> _P: ... def __rtruediv__(self: _P, key: Union[str, PurePath]) -> _P: ... else: def __truediv__(self: _P, key: Union[str, os.PathLike[str]]) -> _P: ... def __rtruediv__(self: _P, key: Union[str, os.PathLike[str]]) -> _P: ... if sys.version_info < (3,): def __div__(self: _P, key: Union[str, PurePath]) -> _P: ... def __bytes__(self) -> bytes: ... def as_posix(self) -> str: ... def as_uri(self) -> str: ... def is_absolute(self) -> bool: ... def is_reserved(self) -> bool: ... def match(self, path_pattern: str) -> bool: ... if sys.version_info < (3, 6): def relative_to(self: _P, *other: Union[str, PurePath]) -> _P: ... else: def relative_to(self: _P, *other: Union[str, os.PathLike[str]]) -> _P: ... def with_name(self: _P, name: str) -> _P: ... def with_suffix(self: _P, suffix: str) -> _P: ... if sys.version_info < (3, 6): def joinpath(self: _P, *other: Union[str, PurePath]) -> _P: ... else: def joinpath(self: _P, *other: Union[str, os.PathLike[str]]) -> _P: ... @property def parents(self: _P) -> Sequence[_P]: ... @property def parent(self: _P) -> _P: ... class PurePosixPath(PurePath): ... class PureWindowsPath(PurePath): ... class Path(PurePath): def __enter__(self) -> Path: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool]: ... @classmethod def cwd(cls: Type[_P]) -> _P: ... def stat(self) -> os.stat_result: ... def chmod(self, mode: int) -> None: ... def exists(self) -> bool: ... def glob(self, pattern: str) -> Generator[Path, None, None]: ... def group(self) -> str: ... def is_dir(self) -> bool: ... def is_file(self) -> bool: ... def is_symlink(self) -> bool: ... def is_socket(self) -> bool: ... def is_fifo(self) -> bool: ... def is_block_device(self) -> bool: ... def is_char_device(self) -> bool: ... def iterdir(self) -> Generator[Path, None, None]: ... def lchmod(self, mode: int) -> None: ... def lstat(self) -> os.stat_result: ... if sys.version_info < (3, 5): def mkdir(self, mode: int = ..., parents: bool = ...) -> None: ... else: def mkdir(self, mode: int = ..., parents: bool = ..., exist_ok: bool = ...) -> None: ... def open(self, mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ... def owner(self) -> str: ... def rename(self, target: Union[str, PurePath]) -> None: ... def replace(self, target: Union[str, PurePath]) -> None: ... if sys.version_info < (3, 6): def resolve(self: _P) -> _P: ... else: def resolve(self: _P, strict: bool = ...) -> _P: ... def rglob(self, pattern: str) -> Generator[Path, None, None]: ... def rmdir(self) -> None: ... def symlink_to(self, target: Union[str, Path], target_is_directory: bool = ...) -> None: ... def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ... def unlink(self) -> None: ... if sys.version_info >= (3, 5): @classmethod def home(cls: Type[_P]) -> _P: ... if sys.version_info < (3, 6): def __new__(cls: Type[_P], *args: Union[str, PurePath], **kwargs: Any) -> _P: ... else: def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]], **kwargs: Any) -> _P: ... def absolute(self: _P) -> _P: ... def expanduser(self: _P) -> _P: ... def read_bytes(self) -> bytes: ... def read_text(self, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> str: ... def samefile(self, other_path: Union[str, bytes, int, Path]) -> bool: ... def write_bytes(self, data: bytes) -> int: ... def write_text(self, data: str, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> int: ... if sys.version_info >= (3, 8): def link_to(self, target: Union[str, bytes, os.PathLike[str]]) -> None: ... class PosixPath(Path, PurePosixPath): ... class WindowsPath(Path, PureWindowsPath): ... mypy-0.761/mypy/typeshed/stdlib/3/pipes.pyi0000644€tŠÔÚ€2›s®0000000112313576752252025023 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pipes # Based on http://docs.python.org/3.5/library/pipes.html import os class Template: def __init__(self) -> None: ... def reset(self) -> None: ... def clone(self) -> Template: ... def debug(self, flag: bool) -> None: ... def append(self, cmd: str, kind: str) -> None: ... def prepend(self, cmd: str, kind: str) -> None: ... def open(self, file: str, rw: str) -> os._wrap_close: ... def copy(self, file: str, rw: str) -> os._wrap_close: ... # Not documented, but widely used. # Documented as shlex.quote since 3.3. def quote(s: str) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/platform.pyi0000644€tŠÔÚ€2›s®0000000346513576752252025542 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for platform (Python 3.5) from os import devnull as DEV_NULL from typing import Tuple, NamedTuple def libc_ver(executable: str = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> Tuple[str, str]: ... def linux_distribution(distname: str = ..., version: str = ..., id: str = ..., supported_dists: Tuple[str, ...] = ..., full_distribution_name: bool = ...) -> Tuple[str, str, str]: ... def dist(distname: str = ..., version: str = ..., id: str = ..., supported_dists: Tuple[str, ...] = ...) -> Tuple[str, str, str]: ... def win32_ver(release: str = ..., version: str = ..., csd: str = ..., ptype: str = ...) -> Tuple[str, str, str, str]: ... def mac_ver(release: str = ..., versioninfo: Tuple[str, str, str] = ..., machine: str = ...) -> Tuple[str, Tuple[str, str, str], str]: ... def java_ver(release: str = ..., vendor: str = ..., vminfo: Tuple[str, str, str] = ..., osinfo: Tuple[str, str, str] = ...) -> Tuple[str, str, Tuple[str, str, str], Tuple[str, str, str]]: ... def system_alias(system: str, release: str, version: str) -> Tuple[str, str, str]: ... def architecture(executable: str = ..., bits: str = ..., linkage: str = ...) -> Tuple[str, str]: ... class uname_result(NamedTuple): system: str node: str release: str version: str machine: str processor: str def uname() -> uname_result: ... def system() -> str: ... def node() -> str: ... def release() -> str: ... def version() -> str: ... def machine() -> str: ... def processor() -> str: ... def python_implementation() -> str: ... def python_version() -> str: ... def python_version_tuple() -> Tuple[str, str, str]: ... def python_branch() -> str: ... def python_revision() -> str: ... def python_build() -> Tuple[str, str]: ... def python_compiler() -> str: ... def platform(aliased: bool = ..., terse: bool = ...) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/posix.pyi0000644€tŠÔÚ€2›s®0000000427213576752252025055 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for posix # NOTE: These are incomplete! import sys from typing import List, NamedTuple, Optional, overload from os import stat_result as stat_result if sys.version_info >= (3, 6): from builtins import _PathLike # See comment in builtins class uname_result(NamedTuple): sysname: str nodename: str release: str version: str machine: str class times_result(NamedTuple): user: float system: float children_user: float children_system: float elapsed: float class waitid_result(NamedTuple): si_pid: int si_uid: int si_signo: int si_status: int si_code: int class sched_param(NamedTuple): sched_priority: int EX_CANTCREAT: int EX_CONFIG: int EX_DATAERR: int EX_IOERR: int EX_NOHOST: int EX_NOINPUT: int EX_NOPERM: int EX_NOTFOUND: int EX_NOUSER: int EX_OK: int EX_OSERR: int EX_OSFILE: int EX_PROTOCOL: int EX_SOFTWARE: int EX_TEMPFAIL: int EX_UNAVAILABLE: int EX_USAGE: int F_OK: int R_OK: int W_OK: int X_OK: int if sys.version_info >= (3, 6): GRND_NONBLOCK: int GRND_RANDOM: int NGROUPS_MAX: int O_APPEND: int O_ACCMODE: int O_ASYNC: int O_CREAT: int O_DIRECT: int O_DIRECTORY: int O_DSYNC: int O_EXCL: int O_LARGEFILE: int O_NDELAY: int O_NOATIME: int O_NOCTTY: int O_NOFOLLOW: int O_NONBLOCK: int O_RDONLY: int O_RDWR: int O_RSYNC: int O_SYNC: int O_TRUNC: int O_WRONLY: int ST_APPEND: int ST_MANDLOCK: int ST_NOATIME: int ST_NODEV: int ST_NODIRATIME: int ST_NOEXEC: int ST_NOSUID: int ST_RDONLY: int ST_RELATIME: int ST_SYNCHRONOUS: int ST_WRITE: int TMP_MAX: int WCONTINUED: int WCOREDUMP: int WEXITSTATUS: int WIFCONTINUED: int WIFEXITED: int WIFSIGNALED: int WIFSTOPPED: int WNOHANG: int WSTOPSIG: int WTERMSIG: int WUNTRACED: int if sys.version_info >= (3, 6): @overload def listdir(path: Optional[str] = ...) -> List[str]: ... @overload def listdir(path: bytes) -> List[bytes]: ... @overload def listdir(path: int) -> List[str]: ... @overload def listdir(path: _PathLike[str]) -> List[str]: ... else: @overload def listdir(path: Optional[str] = ...) -> List[str]: ... @overload def listdir(path: bytes) -> List[bytes]: ... @overload def listdir(path: int) -> List[str]: ... mypy-0.761/mypy/typeshed/stdlib/3/queue.pyi0000644€tŠÔÚ€2›s®0000000320613576752252025033 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for queue # NOTE: These are incomplete! from threading import Condition, Lock from typing import Any, Deque, TypeVar, Generic, Optional import sys _T = TypeVar('_T') class Empty(Exception): ... class Full(Exception): ... class Queue(Generic[_T]): maxsize: int mutex: Lock # undocumented not_empty: Condition # undocumented not_full: Condition # undocumented all_tasks_done: Condition # undocumented unfinished_tasks: int # undocumented queue: Deque[Any] # undocumented def __init__(self, maxsize: int = ...) -> None: ... def _init(self, maxsize: int) -> None: ... def empty(self) -> bool: ... def full(self) -> bool: ... def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ... def get_nowait(self) -> _T: ... def _get(self) -> _T: ... def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ... def put_nowait(self, item: _T) -> None: ... def _put(self, item: _T) -> None: ... def join(self) -> None: ... def qsize(self) -> int: ... def _qsize(self) -> int: ... def task_done(self) -> None: ... class PriorityQueue(Queue[_T]): ... class LifoQueue(Queue[_T]): ... if sys.version_info >= (3, 7): class SimpleQueue(Generic[_T]): def __init__(self) -> None: ... def empty(self) -> bool: ... def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ... def get_nowait(self) -> _T: ... def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ... def put_nowait(self, item: _T) -> None: ... def qsize(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/random.pyi0000644€tŠÔÚ€2›s®0000000663313576752252025176 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for random # Ron Murawski # Updated by Jukka Lehtosalo # based on http://docs.python.org/3.2/library/random.html # ----- random classes ----- import _random import sys from typing import Any, TypeVar, Sequence, List, Callable, AbstractSet, Union, Optional, Tuple _T = TypeVar('_T') class Random(_random.Random): def __init__(self, x: Any = ...) -> None: ... def seed(self, a: Any = ..., version: int = ...) -> None: ... def getstate(self) -> Tuple[Any, ...]: ... def setstate(self, state: Tuple[Any, ...]) -> None: ... def getrandbits(self, k: int) -> int: ... def randrange(self, start: int, stop: Union[int, None] = ..., step: int = ...) -> int: ... def randint(self, a: int, b: int) -> int: ... def choice(self, seq: Sequence[_T]) -> _T: ... if sys.version_info >= (3, 6): def choices(self, population: Sequence[_T], weights: Optional[Sequence[float]] = ..., *, cum_weights: Optional[Sequence[float]] = ..., k: int = ...) -> List[_T]: ... def shuffle(self, x: List[Any], random: Union[Callable[[], float], None] = ...) -> None: ... def sample(self, population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ... def random(self) -> float: ... def uniform(self, a: float, b: float) -> float: ... def triangular(self, low: float = ..., high: float = ..., mode: float = ...) -> float: ... def betavariate(self, alpha: float, beta: float) -> float: ... def expovariate(self, lambd: float) -> float: ... def gammavariate(self, alpha: float, beta: float) -> float: ... def gauss(self, mu: float, sigma: float) -> float: ... def lognormvariate(self, mu: float, sigma: float) -> float: ... def normalvariate(self, mu: float, sigma: float) -> float: ... def vonmisesvariate(self, mu: float, kappa: float) -> float: ... def paretovariate(self, alpha: float) -> float: ... def weibullvariate(self, alpha: float, beta: float) -> float: ... # SystemRandom is not implemented for all OS's; good on Windows & Linux class SystemRandom(Random): ... # ----- random function stubs ----- def seed(a: Any = ..., version: int = ...) -> None: ... def getstate() -> object: ... def setstate(state: object) -> None: ... def getrandbits(k: int) -> int: ... def randrange(start: int, stop: Union[None, int] = ..., step: int = ...) -> int: ... def randint(a: int, b: int) -> int: ... def choice(seq: Sequence[_T]) -> _T: ... if sys.version_info >= (3, 6): def choices(population: Sequence[_T], weights: Optional[Sequence[float]] = ..., *, cum_weights: Optional[Sequence[float]] = ..., k: int = ...) -> List[_T]: ... def shuffle(x: List[Any], random: Union[Callable[[], float], None] = ...) -> None: ... def sample(population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ... def random() -> float: ... def uniform(a: float, b: float) -> float: ... def triangular(low: float = ..., high: float = ..., mode: float = ...) -> float: ... def betavariate(alpha: float, beta: float) -> float: ... def expovariate(lambd: float) -> float: ... def gammavariate(alpha: float, beta: float) -> float: ... def gauss(mu: float, sigma: float) -> float: ... def lognormvariate(mu: float, sigma: float) -> float: ... def normalvariate(mu: float, sigma: float) -> float: ... def vonmisesvariate(mu: float, kappa: float) -> float: ... def paretovariate(alpha: float) -> float: ... def weibullvariate(alpha: float, beta: float) -> float: ... mypy-0.761/mypy/typeshed/stdlib/3/re.pyi0000644€tŠÔÚ€2›s®0000001200613576752252024313 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for re # Ron Murawski # 'bytes' support added by Jukka Lehtosalo # based on: http://docs.python.org/3.2/library/re.html # and http://hg.python.org/cpython/file/618ea5612e83/Lib/re.py import sys from typing import ( List, Iterator, overload, Callable, Tuple, AnyStr, Any, Optional, Union ) # ----- re variables and constants ----- if sys.version_info >= (3, 7): from typing import Pattern as Pattern, Match as Match else: from typing import Pattern, Match if sys.version_info >= (3, 6): import enum class RegexFlag(enum.IntFlag): A: int ASCII: int DEBUG: int I: int IGNORECASE: int L: int LOCALE: int M: int MULTILINE: int S: int DOTALL: int X: int VERBOSE: int U: int UNICODE: int T: int TEMPLATE: int A = RegexFlag.A ASCII = RegexFlag.ASCII DEBUG = RegexFlag.DEBUG I = RegexFlag.I IGNORECASE = RegexFlag.IGNORECASE L = RegexFlag.L LOCALE = RegexFlag.LOCALE M = RegexFlag.M MULTILINE = RegexFlag.MULTILINE S = RegexFlag.S DOTALL = RegexFlag.DOTALL X = RegexFlag.X VERBOSE = RegexFlag.VERBOSE U = RegexFlag.U UNICODE = RegexFlag.UNICODE T = RegexFlag.T TEMPLATE = RegexFlag.TEMPLATE _FlagsType = Union[int, RegexFlag] else: A: int ASCII: int DEBUG: int I: int IGNORECASE: int L: int LOCALE: int M: int MULTILINE: int S: int DOTALL: int X: int VERBOSE: int U: int UNICODE: int T: int TEMPLATE: int _FlagsType = int if sys.version_info < (3, 7): # undocumented _pattern_type: type class error(Exception): ... @overload def compile(pattern: AnyStr, flags: _FlagsType = ...) -> Pattern[AnyStr]: ... @overload def compile(pattern: Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... @overload def search(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ... @overload def search(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ... @overload def match(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ... @overload def match(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ... # New in Python 3.4 @overload def fullmatch(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ... @overload def fullmatch(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ... @overload def split(pattern: AnyStr, string: AnyStr, maxsplit: int = ..., flags: _FlagsType = ...) -> List[AnyStr]: ... @overload def split(pattern: Pattern[AnyStr], string: AnyStr, maxsplit: int = ..., flags: _FlagsType = ...) -> List[AnyStr]: ... @overload def findall(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> List[Any]: ... @overload def findall(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> List[Any]: ... # Return an iterator yielding match objects over all non-overlapping matches # for the RE pattern in string. The string is scanned left-to-right, and # matches are returned in the order found. Empty matches are included in the # result unless they touch the beginning of another match. @overload def finditer(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Iterator[Match[AnyStr]]: ... @overload def finditer(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Iterator[Match[AnyStr]]: ... @overload def sub(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ... @overload def sub(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ... @overload def sub(pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ... @overload def sub(pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ... @overload def subn(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... def escape(string: AnyStr) -> AnyStr: ... def purge() -> None: ... def template(pattern: Union[AnyStr, Pattern[AnyStr]], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... mypy-0.761/mypy/typeshed/stdlib/3/reprlib.pyi0000644€tŠÔÚ€2›s®0000000235313576752252025350 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for reprlib (Python 3) from array import array from typing import Any, Callable, Deque, Dict, FrozenSet, List, Set, Tuple _ReprFunc = Callable[[Any], str] def recursive_repr(fillvalue: str = ...) -> Callable[[_ReprFunc], _ReprFunc]: ... class Repr: maxlevel: int maxdict: int maxlist: int maxtuple: int maxset: int maxfrozenset: int maxdeque: int maxarray: int maxlong: int maxstring: int maxother: int def __init__(self) -> None: ... def repr(self, x: Any) -> str: ... def repr1(self, x: Any, level: int) -> str: ... def repr_tuple(self, x: Tuple[Any, ...], level: int) -> str: ... def repr_list(self, x: List[Any], level: int) -> str: ... def repr_array(self, x: array[Any], level: int) -> str: ... def repr_set(self, x: Set[Any], level: int) -> str: ... def repr_frozenset(self, x: FrozenSet[Any], level: int) -> str: ... def repr_deque(self, x: Deque[Any], level: int) -> str: ... def repr_dict(self, x: Dict[Any, Any], level: int) -> str: ... def repr_str(self, x: str, level: int) -> str: ... def repr_int(self, x: int, level: int) -> str: ... def repr_instance(self, x: Any, level: int) -> str: ... aRepr: Repr def repr(x: object) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/resource.pyi0000644€tŠÔÚ€2›s®0000000222013576752252025531 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for resource # NOTE: These are incomplete! from typing import Tuple, Optional, NamedTuple RLIMIT_AS: int RLIMIT_CORE: int RLIMIT_CPU: int RLIMIT_DATA: int RLIMIT_FSIZE: int RLIMIT_MEMLOCK: int RLIMIT_MSGQUEUE: int RLIMIT_NICE: int RLIMIT_NOFILE: int RLIMIT_NPROC: int RLIMIT_OFILE: int RLIMIT_RSS: int RLIMIT_RTPRIO: int RLIMIT_RTTIME: int RLIMIT_SIGPENDING: int RLIMIT_STACK: int RLIM_INFINITY: int RUSAGE_CHILDREN: int RUSAGE_SELF: int RUSAGE_THREAD: int class _RUsage(NamedTuple): ru_utime: float ru_stime: float ru_maxrss: int ru_ixrss: int ru_idrss: int ru_isrss: int ru_minflt: int ru_majflt: int ru_nswap: int ru_inblock: int ru_oublock: int ru_msgsnd: int ru_msgrcv: int ru_nsignals: int ru_nvcsw: int ru_nivcsw: int def getpagesize() -> int: ... def getrlimit(resource: int) -> Tuple[int, int]: ... def getrusage(who: int) -> _RUsage: ... def prlimit(pid: int, resource: int, limits: Optional[Tuple[int, int]]) -> Tuple[int, int]: ... def setrlimit(resource: int, limits: Tuple[int, int]) -> None: ... # NOTE: This is an alias of OSError in Python 3.3. class error(Exception): ... mypy-0.761/mypy/typeshed/stdlib/3/runpy.pyi0000644€tŠÔÚ€2›s®0000000126613576752252025070 0ustar jukkaDROPBOX\Domain Users00000000000000from types import ModuleType from typing import Dict, Optional, Any class _TempModule: mod_name: str = ... module: ModuleType = ... def __init__(self, mod_name): ... def __enter__(self): ... def __exit__(self, *args): ... class _ModifiedArgv0: value: Any = ... def __init__(self, value): ... def __enter__(self): ... def __exit__(self, *args): ... def run_module(mod_name: str, init_globals: Optional[Dict[str, Any]] = ..., run_name: Optional[str] = ..., alter_sys: bool = ...): ... def run_path(path_name: str, init_globals: Optional[Dict[str, Any]] = ..., run_name: str = ...): ... mypy-0.761/mypy/typeshed/stdlib/3/selectors.pyi0000644€tŠÔÚ€2›s®0000000703613576752252025717 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for selector # See https://docs.python.org/3/library/selectors.html from abc import ABCMeta, abstractmethod from typing import Any, List, Mapping, NamedTuple, Optional, Protocol, Tuple, Union class _HasFileno(Protocol): def fileno(self) -> int: ... # Type aliases added mainly to preserve some context _FileObject = Union[int, _HasFileno] _FileDescriptor = int _EventMask = int EVENT_READ: _EventMask EVENT_WRITE: _EventMask class SelectorKey(NamedTuple): fileobj: _FileObject fd: _FileDescriptor events: _EventMask data: Any class BaseSelector(metaclass=ABCMeta): @abstractmethod def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... @abstractmethod def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def modify(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... @abstractmethod def select(self, timeout: Optional[float] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def close(self) -> None: ... def get_key(self, fileobj: _FileObject) -> SelectorKey: ... @abstractmethod def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... def __enter__(self) -> BaseSelector: ... def __exit__(self, *args: Any) -> None: ... class SelectSelector(BaseSelector): def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[float] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class PollSelector(BaseSelector): def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[float] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class EpollSelector(BaseSelector): def fileno(self) -> int: ... def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[float] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class DevpollSelector(BaseSelector): def fileno(self) -> int: ... def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[float] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class KqueueSelector(BaseSelector): def fileno(self) -> int: ... def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[float] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class DefaultSelector(BaseSelector): def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[float] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... mypy-0.761/mypy/typeshed/stdlib/3/shelve.pyi0000644€tŠÔÚ€2›s®0000000305213576752252025174 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterator, Optional, Tuple import collections class Shelf(collections.MutableMapping[Any, Any]): def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... def __contains__(self, key: Any) -> bool: ... # key should be str, but it would conflict with superclass's type signature def get(self, key: str, default: Any = ...) -> Any: ... def __getitem__(self, key: str) -> Any: ... def __setitem__(self, key: str, value: Any) -> None: ... def __delitem__(self, key: str) -> None: ... def __enter__(self) -> Shelf: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... def close(self) -> None: ... def __del__(self) -> None: ... def sync(self) -> None: ... class BsdDbShelf(Shelf): def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ... def set_location(self, key: Any) -> Tuple[str, Any]: ... def next(self) -> Tuple[str, Any]: ... def previous(self) -> Tuple[str, Any]: ... def first(self) -> Tuple[str, Any]: ... def last(self) -> Tuple[str, Any]: ... class DbfilenameShelf(Shelf): def __init__(self, filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> None: ... def open(filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> DbfilenameShelf: ... mypy-0.761/mypy/typeshed/stdlib/3/shlex.pyi0000644€tŠÔÚ€2›s®0000000302013576752252025024 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple, Any, TextIO, Union, Optional, Iterable, TypeVar import sys def split(s: str, comments: bool = ..., posix: bool = ...) -> List[str]: ... if sys.version_info >= (3, 8): def join(split_command: Iterable[str]) -> str: ... def quote(s: str) -> str: ... _SLT = TypeVar('_SLT', bound=shlex) class shlex(Iterable[str]): commenters: str wordchars: str whitespace: str escape: str quotes: str escapedquotes: str whitespace_split: bool infile: str instream: TextIO source: str debug: int lineno: int token: str eof: str if sys.version_info >= (3, 6): punctuation_chars: str if sys.version_info >= (3, 6): def __init__(self, instream: Union[str, TextIO] = ..., infile: Optional[str] = ..., posix: bool = ..., punctuation_chars: Union[bool, str] = ...) -> None: ... else: def __init__(self, instream: Union[str, TextIO] = ..., infile: Optional[str] = ..., posix: bool = ...) -> None: ... def get_token(self) -> str: ... def push_token(self, tok: str) -> None: ... def read_token(self) -> str: ... def sourcehook(self, filename: str) -> Tuple[str, TextIO]: ... # TODO argument types def push_source(self, newstream: Any, newfile: Any = ...) -> None: ... def pop_source(self) -> None: ... def error_leader(self, infile: str = ..., lineno: int = ...) -> None: ... def __iter__(self: _SLT) -> _SLT: ... def __next__(self) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/signal.pyi0000644€tŠÔÚ€2›s®0000000645113576752252025171 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'signal' module.""" from enum import IntEnum from typing import Any, Callable, Tuple, Union, Optional, Iterable, Set from types import FrameType class ItimerError(IOError): ... ITIMER_PROF: int ITIMER_REAL: int ITIMER_VIRTUAL: int NSIG: int class Signals(IntEnum): SIGABRT: int SIGALRM: int SIGBREAK: int # Windows SIGBUS: int SIGCHLD: int SIGCLD: int SIGCONT: int SIGEMT: int SIGFPE: int SIGHUP: int SIGILL: int SIGINFO: int SIGINT: int SIGIO: int SIGIOT: int SIGKILL: int SIGPIPE: int SIGPOLL: int SIGPROF: int SIGPWR: int SIGQUIT: int SIGRTMAX: int SIGRTMIN: int SIGSEGV: int SIGSTOP: int SIGSYS: int SIGTERM: int SIGTRAP: int SIGTSTP: int SIGTTIN: int SIGTTOU: int SIGURG: int SIGUSR1: int SIGUSR2: int SIGVTALRM: int SIGWINCH: int SIGXCPU: int SIGXFSZ: int class Handlers(IntEnum): SIG_DFL: int SIG_IGN: int SIG_DFL = Handlers.SIG_DFL SIG_IGN = Handlers.SIG_IGN class Sigmasks(IntEnum): SIG_BLOCK: int SIG_UNBLOCK: int SIG_SETMASK: int SIG_BLOCK = Sigmasks.SIG_BLOCK SIG_UNBLOCK = Sigmasks.SIG_UNBLOCK SIG_SETMASK = Sigmasks.SIG_SETMASK _SIGNUM = Union[int, Signals] _HANDLER = Union[Callable[[Signals, FrameType], None], int, Handlers, None] SIGABRT: Signals SIGALRM: Signals SIGBREAK: Signals # Windows SIGBUS: Signals SIGCHLD: Signals SIGCLD: Signals SIGCONT: Signals SIGEMT: Signals SIGFPE: Signals SIGHUP: Signals SIGILL: Signals SIGINFO: Signals SIGINT: Signals SIGIO: Signals SIGIOT: Signals SIGKILL: Signals SIGPIPE: Signals SIGPOLL: Signals SIGPROF: Signals SIGPWR: Signals SIGQUIT: Signals SIGRTMAX: Signals SIGRTMIN: Signals SIGSEGV: Signals SIGSTOP: Signals SIGSYS: Signals SIGTERM: Signals SIGTRAP: Signals SIGTSTP: Signals SIGTTIN: Signals SIGTTOU: Signals SIGURG: Signals SIGUSR1: Signals SIGUSR2: Signals SIGVTALRM: Signals SIGWINCH: Signals SIGXCPU: Signals SIGXFSZ: Signals # Windows CTRL_C_EVENT: int CTRL_BREAK_EVENT: int class struct_siginfo(Tuple[int, int, int, int, int, int, int]): def __init__(self, sequence: Iterable[int]) -> None: ... @property def si_signo(self) -> int: ... @property def si_code(self) -> int: ... @property def si_errno(self) -> int: ... @property def si_pid(self) -> int: ... @property def si_uid(self) -> int: ... @property def si_status(self) -> int: ... @property def si_band(self) -> int: ... def alarm(time: int) -> int: ... def default_int_handler(signum: int, frame: FrameType) -> None: ... def getitimer(which: int) -> Tuple[float, float]: ... def getsignal(signalnum: _SIGNUM) -> _HANDLER: ... def pause() -> None: ... def pthread_kill(thread_id: int, signum: int) -> None: ... def pthread_sigmask(how: int, mask: Iterable[int]) -> Set[_SIGNUM]: ... def set_wakeup_fd(fd: int) -> int: ... def setitimer(which: int, seconds: float, interval: float = ...) -> Tuple[float, float]: ... def siginterrupt(signalnum: int, flag: bool) -> None: ... def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: ... def sigpending() -> Any: ... def sigtimedwait(sigset: Iterable[int], timeout: float) -> Optional[struct_siginfo]: ... def sigwait(sigset: Iterable[int]) -> _SIGNUM: ... def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: ... mypy-0.761/mypy/typeshed/stdlib/3/smtplib.pyi0000644€tŠÔÚ€2›s®0000001274413576752252025370 0ustar jukkaDROPBOX\Domain Users00000000000000from email.message import Message as _Message from socket import socket from ssl import SSLContext from types import TracebackType from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, Pattern, Type, Protocol, overload _Reply = Tuple[int, bytes] _SendErrs = Dict[str, _Reply] # Should match source_address for socket.create_connection _SourceAddress = Tuple[Union[bytearray, bytes, str], int] SMTP_PORT: int SMTP_SSL_PORT: int CRLF: str bCRLF: bytes OLDSTYLE_AUTH: Pattern[str] class SMTPException(OSError): ... class SMTPNotSupportedError(SMTPException): ... class SMTPServerDisconnected(SMTPException): ... class SMTPResponseException(SMTPException): smtp_code: int smtp_error: Union[bytes, str] args: Union[Tuple[int, Union[bytes, str]], Tuple[int, bytes, str]] def __init__(self, code: int, msg: Union[bytes, str]) -> None: ... class SMTPSenderRefused(SMTPResponseException): smtp_code: int smtp_error: bytes sender: str args: Tuple[int, bytes, str] def __init__(self, code: int, msg: bytes, sender: str) -> None: ... class SMTPRecipientsRefused(SMTPException): recipients: _SendErrs args: Tuple[_SendErrs] def __init__(self, recipients: _SendErrs) -> None: ... class SMTPDataError(SMTPResponseException): ... class SMTPConnectError(SMTPResponseException): ... class SMTPHeloError(SMTPResponseException): ... class SMTPAuthenticationError(SMTPResponseException): ... def quoteaddr(addrstring: str) -> str: ... def quotedata(data: str) -> str: ... class _AuthObject(Protocol): @overload def __call__(self, challenge: None = ...) -> Optional[str]: ... @overload def __call__(self, challenge: bytes) -> str: ... class SMTP: debuglevel: int = ... sock: Optional[socket] = ... # Type of file should match what socket.makefile() returns file: Optional[Any] = ... helo_resp: Optional[bytes] = ... ehlo_msg: str = ... ehlo_resp: Optional[bytes] = ... does_esmtp: bool = ... default_port: int = ... timeout: float esmtp_features: Dict[str, str] command_encoding: str source_address: Optional[_SourceAddress] local_hostname: str def __init__(self, host: str = ..., port: int = ..., local_hostname: Optional[str] = ..., timeout: float = ..., source_address: Optional[_SourceAddress] = ...) -> None: ... def __enter__(self) -> SMTP: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], tb: Optional[TracebackType]) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... def connect(self, host: str = ..., port: int = ..., source_address: Optional[_SourceAddress] = ...) -> _Reply: ... def send(self, s: Union[bytes, str]) -> None: ... def putcmd(self, cmd: str, args: str = ...) -> None: ... def getreply(self) -> _Reply: ... def docmd(self, cmd: str, args: str = ...) -> _Reply: ... def helo(self, name: str = ...) -> _Reply: ... def ehlo(self, name: str = ...) -> _Reply: ... def has_extn(self, opt: str) -> bool: ... def help(self, args: str = ...) -> bytes: ... def rset(self) -> _Reply: ... def noop(self) -> _Reply: ... def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ... def rcpt(self, recip: str, options: Sequence[str] = ...) -> _Reply: ... def data(self, msg: Union[bytes, str]) -> _Reply: ... def verify(self, address: str) -> _Reply: ... vrfy = verify def expn(self, address: str) -> _Reply: ... def ehlo_or_helo_if_needed(self) -> None: ... user: str password: str def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = ...) -> _Reply: ... @overload def auth_cram_md5(self, challenge: None = ...) -> None: ... @overload def auth_cram_md5(self, challenge: bytes) -> str: ... def auth_plain(self, challenge: Optional[bytes] = ...) -> str: ... def auth_login(self, challenge: Optional[bytes] = ...) -> str: ... def login(self, user: str, password: str, *, initial_response_ok: bool = ...) -> _Reply: ... def starttls(self, keyfile: Optional[str] = ..., certfile: Optional[str] = ..., context: Optional[SSLContext] = ...) -> _Reply: ... def sendmail(self, from_addr: str, to_addrs: Union[str, Sequence[str]], msg: Union[bytes, str], mail_options: Sequence[str] = ..., rcpt_options: List[str] = ...) -> _SendErrs: ... def send_message(self, msg: _Message, from_addr: Optional[str] = ..., to_addrs: Optional[Union[str, Sequence[str]]] = ..., mail_options: List[str] = ..., rcpt_options: Sequence[str] = ...) -> _SendErrs: ... def close(self) -> None: ... def quit(self) -> _Reply: ... class SMTP_SSL(SMTP): default_port: int = ... keyfile: Optional[str] certfile: Optional[str] context: SSLContext def __init__(self, host: str = ..., port: int = ..., local_hostname: Optional[str] = ..., keyfile: Optional[str] = ..., certfile: Optional[str] = ..., timeout: float = ..., source_address: Optional[_SourceAddress] = ..., context: Optional[SSLContext] = ...) -> None: ... LMTP_PORT: int class LMTP(SMTP): def __init__(self, host: str = ..., port: int = ..., local_hostname: Optional[str] = ..., source_address: Optional[_SourceAddress] = ...) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/socketserver.pyi0000644€tŠÔÚ€2›s®0000000755613576752252026442 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: SocketServer.pyi and socketserver.pyi must remain consistent! # Stubs for socketserver from typing import Any, BinaryIO, Callable, Optional, Tuple, Type, Text, Union from socket import SocketType import sys import types class BaseServer: address_family: int RequestHandlerClass: Callable[..., BaseRequestHandler] server_address: Tuple[str, int] socket: SocketType allow_reuse_address: bool request_queue_size: int socket_type: int timeout: Optional[float] def __init__(self, server_address: Any, RequestHandlerClass: Callable[..., BaseRequestHandler]) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = ...) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... def finish_request(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def get_request(self) -> None: ... def handle_error(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def handle_timeout(self) -> None: ... def process_request(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def server_activate(self) -> None: ... def server_bind(self) -> None: ... def verify_request(self, request: bytes, client_address: Tuple[str, int]) -> bool: ... if sys.version_info >= (3, 6): def __enter__(self) -> BaseServer: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[types.TracebackType]) -> None: ... if sys.version_info >= (3, 3): def service_actions(self) -> None: ... class TCPServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: Callable[..., BaseRequestHandler], bind_and_activate: bool = ...) -> None: ... class UDPServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: Callable[..., BaseRequestHandler], bind_and_activate: bool = ...) -> None: ... if sys.platform != 'win32': class UnixStreamServer(BaseServer): def __init__(self, server_address: Union[Text, bytes], RequestHandlerClass: Callable[..., BaseRequestHandler], bind_and_activate: bool = ...) -> None: ... class UnixDatagramServer(BaseServer): def __init__(self, server_address: Union[Text, bytes], RequestHandlerClass: Callable[..., BaseRequestHandler], bind_and_activate: bool = ...) -> None: ... class ForkingMixIn: ... class ThreadingMixIn: ... class ForkingTCPServer(ForkingMixIn, TCPServer): ... class ForkingUDPServer(ForkingMixIn, UDPServer): ... class ThreadingTCPServer(ThreadingMixIn, TCPServer): ... class ThreadingUDPServer(ThreadingMixIn, UDPServer): ... if sys.platform != 'win32': class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): ... class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: # Those are technically of types, respectively: # * Union[SocketType, Tuple[bytes, SocketType]] # * Union[Tuple[str, int], str] # But there are some concerns that having unions here would cause # too much inconvenience to people using it (see # https://github.com/python/typeshed/pull/384#issuecomment-234649696) request: Any client_address: Any server: BaseServer def setup(self) -> None: ... def handle(self) -> None: ... def finish(self) -> None: ... class StreamRequestHandler(BaseRequestHandler): rfile: BinaryIO wfile: BinaryIO class DatagramRequestHandler(BaseRequestHandler): rfile: BinaryIO wfile: BinaryIO mypy-0.761/mypy/typeshed/stdlib/3/spwd.pyi0000644€tŠÔÚ€2›s®0000000046613576752252024671 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, NamedTuple class struct_spwd(NamedTuple): sp_namp: str sp_pwdp: str sp_lstchg: int sp_min: int sp_max: int sp_warn: int sp_inact: int sp_expire: int sp_flag: int def getspall() -> List[struct_spwd]: ... def getspnam(name: str) -> struct_spwd: ... mypy-0.761/mypy/typeshed/stdlib/3/sre_constants.pyi0000644€tŠÔÚ€2›s®0000000651613576752252026603 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://github.com/python/cpython/blob/master/Lib/sre_constants.py from typing import Any, Dict, List, Optional, Union MAGIC: int class error(Exception): msg: str pattern: Optional[Union[str, bytes]] pos: Optional[int] lineno: int colno: int def __init__(self, msg: str, pattern: Union[str, bytes] = ..., pos: int = ...) -> None: ... class _NamedIntConstant(int): name: Any def __new__(cls, value: int, name: str): ... MAXREPEAT: _NamedIntConstant OPCODES: List[_NamedIntConstant] ATCODES: List[_NamedIntConstant] CHCODES: List[_NamedIntConstant] OP_IGNORE: Dict[_NamedIntConstant, _NamedIntConstant] AT_MULTILINE: Dict[_NamedIntConstant, _NamedIntConstant] AT_LOCALE: Dict[_NamedIntConstant, _NamedIntConstant] AT_UNICODE: Dict[_NamedIntConstant, _NamedIntConstant] CH_LOCALE: Dict[_NamedIntConstant, _NamedIntConstant] CH_UNICODE: Dict[_NamedIntConstant, _NamedIntConstant] SRE_FLAG_TEMPLATE: int SRE_FLAG_IGNORECASE: int SRE_FLAG_LOCALE: int SRE_FLAG_MULTILINE: int SRE_FLAG_DOTALL: int SRE_FLAG_UNICODE: int SRE_FLAG_VERBOSE: int SRE_FLAG_DEBUG: int SRE_FLAG_ASCII: int SRE_INFO_PREFIX: int SRE_INFO_LITERAL: int SRE_INFO_CHARSET: int # Stubgen above; manually defined constants below (dynamic at runtime) # from OPCODES FAILURE: _NamedIntConstant SUCCESS: _NamedIntConstant ANY: _NamedIntConstant ANY_ALL: _NamedIntConstant ASSERT: _NamedIntConstant ASSERT_NOT: _NamedIntConstant AT: _NamedIntConstant BRANCH: _NamedIntConstant CALL: _NamedIntConstant CATEGORY: _NamedIntConstant CHARSET: _NamedIntConstant BIGCHARSET: _NamedIntConstant GROUPREF: _NamedIntConstant GROUPREF_EXISTS: _NamedIntConstant GROUPREF_IGNORE: _NamedIntConstant IN: _NamedIntConstant IN_IGNORE: _NamedIntConstant INFO: _NamedIntConstant JUMP: _NamedIntConstant LITERAL: _NamedIntConstant LITERAL_IGNORE: _NamedIntConstant MARK: _NamedIntConstant MAX_UNTIL: _NamedIntConstant MIN_UNTIL: _NamedIntConstant NOT_LITERAL: _NamedIntConstant NOT_LITERAL_IGNORE: _NamedIntConstant NEGATE: _NamedIntConstant RANGE: _NamedIntConstant REPEAT: _NamedIntConstant REPEAT_ONE: _NamedIntConstant SUBPATTERN: _NamedIntConstant MIN_REPEAT_ONE: _NamedIntConstant RANGE_IGNORE: _NamedIntConstant MIN_REPEAT: _NamedIntConstant MAX_REPEAT: _NamedIntConstant # from ATCODES AT_BEGINNING: _NamedIntConstant AT_BEGINNING_LINE: _NamedIntConstant AT_BEGINNING_STRING: _NamedIntConstant AT_BOUNDARY: _NamedIntConstant AT_NON_BOUNDARY: _NamedIntConstant AT_END: _NamedIntConstant AT_END_LINE: _NamedIntConstant AT_END_STRING: _NamedIntConstant AT_LOC_BOUNDARY: _NamedIntConstant AT_LOC_NON_BOUNDARY: _NamedIntConstant AT_UNI_BOUNDARY: _NamedIntConstant AT_UNI_NON_BOUNDARY: _NamedIntConstant # from CHCODES CATEGORY_DIGIT: _NamedIntConstant CATEGORY_NOT_DIGIT: _NamedIntConstant CATEGORY_SPACE: _NamedIntConstant CATEGORY_NOT_SPACE: _NamedIntConstant CATEGORY_WORD: _NamedIntConstant CATEGORY_NOT_WORD: _NamedIntConstant CATEGORY_LINEBREAK: _NamedIntConstant CATEGORY_NOT_LINEBREAK: _NamedIntConstant CATEGORY_LOC_WORD: _NamedIntConstant CATEGORY_LOC_NOT_WORD: _NamedIntConstant CATEGORY_UNI_DIGIT: _NamedIntConstant CATEGORY_UNI_NOT_DIGIT: _NamedIntConstant CATEGORY_UNI_SPACE: _NamedIntConstant CATEGORY_UNI_NOT_SPACE: _NamedIntConstant CATEGORY_UNI_WORD: _NamedIntConstant CATEGORY_UNI_NOT_WORD: _NamedIntConstant CATEGORY_UNI_LINEBREAK: _NamedIntConstant CATEGORY_UNI_NOT_LINEBREAK: _NamedIntConstant mypy-0.761/mypy/typeshed/stdlib/3/sre_parse.pyi0000644€tŠÔÚ€2›s®0000000653313576752252025700 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://github.com/python/cpython/blob/master/Lib/sre_parse.py from typing import ( Any, Dict, FrozenSet, Iterable, List, Match, Optional, Pattern as _Pattern, Tuple, Union ) import sys from sre_constants import _NamedIntConstant as NIC, error as _Error SPECIAL_CHARS: str REPEAT_CHARS: str DIGITS: FrozenSet[str] OCTDIGITS: FrozenSet[str] HEXDIGITS: FrozenSet[str] ASCIILETTERS: FrozenSet[str] WHITESPACE: FrozenSet[str] ESCAPES: Dict[str, Tuple[NIC, int]] CATEGORIES: Dict[str, Union[Tuple[NIC, NIC], Tuple[NIC, List[Tuple[NIC, NIC]]]]] FLAGS: Dict[str, int] GLOBAL_FLAGS: int class Verbose(Exception): ... class _State: flags: int groupdict: Dict[str, int] groupwidths: List[Optional[int]] lookbehindgroups: Optional[int] def __init__(self) -> None: ... @property def groups(self) -> int: ... def opengroup(self, name: str = ...) -> int: ... def closegroup(self, gid: int, p: SubPattern) -> None: ... def checkgroup(self, gid: int) -> bool: ... def checklookbehindgroup(self, gid: int, source: Tokenizer) -> None: ... if sys.version_info >= (3, 8): State = _State else: Pattern = _State _OpSubpatternType = Tuple[Optional[int], int, int, SubPattern] _OpGroupRefExistsType = Tuple[int, SubPattern, SubPattern] _OpInType = List[Tuple[NIC, int]] _OpBranchType = Tuple[None, List[SubPattern]] _AvType = Union[_OpInType, _OpBranchType, Iterable[SubPattern], _OpGroupRefExistsType, _OpSubpatternType] _CodeType = Tuple[NIC, _AvType] class SubPattern: data: List[_CodeType] width: Optional[int] if sys.version_info >= (3, 8): state: State def __init__(self, state: State, data: List[_CodeType] = ...) -> None: ... else: pattern: Pattern def __init__(self, pattern: Pattern, data: List[_CodeType] = ...) -> None: ... def dump(self, level: int = ...) -> None: ... def __len__(self) -> int: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def __getitem__(self, index: Union[int, slice]) -> Union[SubPattern, _CodeType]: ... def __setitem__(self, index: Union[int, slice], code: _CodeType) -> None: ... def insert(self, index: int, code: _CodeType) -> None: ... def append(self, code: _CodeType) -> None: ... def getwidth(self) -> int: ... class Tokenizer: istext: bool string: Any decoded_string: str index: int next: Optional[str] def __init__(self, string: Any) -> None: ... def match(self, char: str) -> bool: ... def get(self) -> Optional[str]: ... def getwhile(self, n: int, charset: Iterable[str]) -> str: ... def getuntil(self, terminator: str) -> str: ... @property def pos(self) -> int: ... def tell(self) -> int: ... def seek(self, index: int) -> None: ... def error(self, msg: str, offset: int = ...) -> _Error: ... def fix_flags(src: Union[str, bytes], flag: int) -> int: ... _TemplateType = Tuple[List[Tuple[int, int]], List[str]] if sys.version_info >= (3, 8): def parse(str: str, flags: int = ..., state: State = ...) -> SubPattern: ... def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ... else: def parse(str: str, flags: int = ..., pattern: Pattern = ...) -> SubPattern: ... def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... def expand_template(template: _TemplateType, match: Match[Any]) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/stat.pyi0000644€tŠÔÚ€2›s®0000000353513576752252024667 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for stat # Based on http://docs.python.org/3.2/library/stat.html import sys def S_ISDIR(mode: int) -> bool: ... def S_ISCHR(mode: int) -> bool: ... def S_ISBLK(mode: int) -> bool: ... def S_ISREG(mode: int) -> bool: ... def S_ISFIFO(mode: int) -> bool: ... def S_ISLNK(mode: int) -> bool: ... def S_ISSOCK(mode: int) -> bool: ... def S_IMODE(mode: int) -> int: ... def S_IFMT(mode: int) -> int: ... def filemode(mode: int) -> str: ... ST_MODE: int ST_INO: int ST_DEV: int ST_NLINK: int ST_UID: int ST_GID: int ST_SIZE: int ST_ATIME: int ST_MTIME: int ST_CTIME: int S_IFSOCK: int S_IFLNK: int S_IFREG: int S_IFBLK: int S_IFDIR: int S_IFCHR: int S_IFIFO: int S_ISUID: int S_ISGID: int S_ISVTX: int S_IRWXU: int S_IRUSR: int S_IWUSR: int S_IXUSR: int S_IRWXG: int S_IRGRP: int S_IWGRP: int S_IXGRP: int S_IRWXO: int S_IROTH: int S_IWOTH: int S_IXOTH: int S_ENFMT: int S_IREAD: int S_IWRITE: int S_IEXEC: int UF_NODUMP: int UF_IMMUTABLE: int UF_APPEND: int UF_OPAQUE: int UF_NOUNLINK: int if sys.platform == 'darwin': UF_COMPRESSED: int # OS X 10.6+ only UF_HIDDEN: int # OX X 10.5+ only SF_ARCHIVED: int SF_IMMUTABLE: int SF_APPEND: int SF_NOUNLINK: int SF_SNAPSHOT: int FILE_ATTRIBUTE_ARCHIVE: int FILE_ATTRIBUTE_COMPRESSED: int FILE_ATTRIBUTE_DEVICE: int FILE_ATTRIBUTE_DIRECTORY: int FILE_ATTRIBUTE_ENCRYPTED: int FILE_ATTRIBUTE_HIDDEN: int FILE_ATTRIBUTE_INTEGRITY_STREAM: int FILE_ATTRIBUTE_NORMAL: int FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: int FILE_ATTRIBUTE_NO_SCRUB_DATA: int FILE_ATTRIBUTE_OFFLINE: int FILE_ATTRIBUTE_READONLY: int FILE_ATTRIBUTE_REPARSE_POINT: int FILE_ATTRIBUTE_SPARSE_FILE: int FILE_ATTRIBUTE_SYSTEM: int FILE_ATTRIBUTE_TEMPORARY: int FILE_ATTRIBUTE_VIRTUAL: int if sys.platform == "win32" and sys.version_info >= (3, 8): IO_REPARSE_TAG_SYMLINK: int IO_REPARSE_TAG_MOUNT_POINT: int IO_REPARSE_TAG_APPEXECLINK: int mypy-0.761/mypy/typeshed/stdlib/3/statistics.pyi0000644€tŠÔÚ€2›s®0000000551213576752252026103 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for statistics from decimal import Decimal from fractions import Fraction import sys from typing import Any, Iterable, List, Optional, SupportsFloat, Type, TypeVar, Union _T = TypeVar("_T") # Most functions in this module accept homogeneous collections of one of these types _Number = TypeVar('_Number', float, Decimal, Fraction) class StatisticsError(ValueError): ... if sys.version_info >= (3, 8): def fmean(data: Iterable[SupportsFloat]) -> float: ... def geometric_mean(data: Iterable[SupportsFloat]) -> float: ... def mean(data: Iterable[_Number]) -> _Number: ... if sys.version_info >= (3, 6): def harmonic_mean(data: Iterable[_Number]) -> _Number: ... def median(data: Iterable[_Number]) -> _Number: ... def median_low(data: Iterable[_Number]) -> _Number: ... def median_high(data: Iterable[_Number]) -> _Number: ... def median_grouped(data: Iterable[_Number]) -> _Number: ... def mode(data: Iterable[_Number]) -> _Number: ... if sys.version_info >= (3, 8): def multimode(data: Iterable[_T]) -> List[_T]: ... def pstdev(data: Iterable[_Number], mu: Optional[_Number] = ...) -> _Number: ... def pvariance(data: Iterable[_Number], mu: Optional[_Number] = ...) -> _Number: ... if sys.version_info >= (3, 8): def quantiles(data: Iterable[_Number], *, n: int = ..., method: str = ...) -> List[_Number]: ... def stdev(data: Iterable[_Number], xbar: Optional[_Number] = ...) -> _Number: ... def variance(data: Iterable[_Number], xbar: Optional[_Number] = ...) -> _Number: ... if sys.version_info >= (3, 8): class NormalDist: def __init__(self, mu: float = ..., sigma: float = ...) -> None: ... @property def mean(self) -> float: ... @property def median(self) -> float: ... @property def mode(self) -> float: ... @property def stdev(self) -> float: ... @property def variance(self) -> float: ... @classmethod def from_samples(cls: Type[_T], data: Iterable[SupportsFloat]) -> _T: ... def samples(self, n: int, *, seed: Optional[Any]) -> List[float]: ... def pdf(self, x: float) -> float: ... def cdf(self, x: float) -> float: ... def inv_cdf(self, p: float) -> float: ... def overlap(self, other: NormalDist) -> float: ... def quantiles(self, n: int = ...) -> List[float]: ... def __add__(self, x2: Union[float, NormalDist]) -> NormalDist: ... def __sub__(self, x2: Union[float, NormalDist]) -> NormalDist: ... def __mul__(self, x2: float) -> NormalDist: ... def __truediv__(self, x2: float) -> NormalDist: ... def __pos__(self) -> NormalDist: ... def __neg__(self) -> NormalDist: ... __radd__ = __add__ def __rsub__(self, x2: Union[float, NormalDist]) -> NormalDist: ... __rmul__ = __mul__ def __hash__(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/string.pyi0000644€tŠÔÚ€2›s®0000000304313576752252025214 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for string # Based on http://docs.python.org/3.2/library/string.html from typing import Mapping, Sequence, Any, Optional, Union, Tuple, Iterable ascii_letters: str ascii_lowercase: str ascii_uppercase: str digits: str hexdigits: str octdigits: str punctuation: str printable: str whitespace: str def capwords(s: str, sep: str = ...) -> str: ... class Template: template: str def __init__(self, template: str) -> None: ... def substitute(self, mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... def safe_substitute(self, mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... # TODO(MichalPokorny): This is probably badly and/or loosely typed. class Formatter: def format(self, format_string: str, *args: Any, **kwargs: Any) -> str: ... def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... def parse(self, format_string: str) -> Iterable[Tuple[str, Optional[str], Optional[str], Optional[str]]]: ... def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def get_value(self, key: Union[int, str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def check_unused_args(self, used_args: Sequence[Union[int, str]], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... def format_field(self, value: Any, format_spec: str) -> Any: ... def convert_field(self, value: Any, conversion: str) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/3/subprocess.pyi0000644€tŠÔÚ€2›s®0000013250213576752252026101 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for subprocess # Based on http://docs.python.org/3.6/library/subprocess.html import sys from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Optional, Union, Type, Text, Generic, TypeVar, AnyStr, overload from types import TracebackType if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal # We prefer to annotate inputs to methods (eg subprocess.check_call) with these # union types. # For outputs we use laborious literal based overloads to try to determine # which specific return types to use, and prefer to fall back to Any when # this does not work, so the caller does not have to use an assertion to confirm # which type. # # For example: # # try: # x = subprocess.check_output(["ls", "-l"]) # reveal_type(x) # bytes, based on the overloads # except TimeoutError as e: # reveal_type(e.cmd) # Any, but morally is _CMD _FILE = Union[None, int, IO[Any]] _TXT = Union[bytes, Text] if sys.version_info >= (3, 6): from builtins import _PathLike _PATH = Union[bytes, Text, _PathLike] else: _PATH = Union[bytes, Text] # Python 3.6 does't support _CMD being a single PathLike. # See: https://bugs.python.org/issue31961 _CMD = Union[_TXT, Sequence[_PATH]] _ENV = Union[Mapping[bytes, _TXT], Mapping[Text, _TXT]] _S = TypeVar('_S') _T = TypeVar('_T') class CompletedProcess(Generic[_T]): # morally: _CMD args: Any returncode: int # These are really both Optional, but requiring checks would be tedious # and writing all the overloads would be horrific. stdout: _T stderr: _T def __init__(self, args: _CMD, returncode: int, stdout: Optional[_T] = ..., stderr: Optional[_T] = ...) -> None: ... def check_returncode(self) -> None: ... if sys.version_info >= (3, 7): # Nearly the same args as for 3.6, except for capture_output and text @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, capture_output: bool = ..., check: bool = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., input: Optional[str] = ..., text: Literal[True], timeout: Optional[float] = ..., ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, capture_output: bool = ..., check: bool = ..., encoding: str, errors: Optional[str] = ..., input: Optional[str] = ..., text: Optional[bool] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, capture_output: bool = ..., check: bool = ..., encoding: Optional[str] = ..., errors: str, input: Optional[str] = ..., text: Optional[bool] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., *, universal_newlines: Literal[True], startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., input: Optional[str] = ..., text: Optional[bool] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: Literal[False] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, capture_output: bool = ..., check: bool = ..., encoding: None = ..., errors: None = ..., input: Optional[bytes] = ..., text: Literal[None, False] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[bytes]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, capture_output: bool = ..., check: bool = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., input: Optional[_TXT] = ..., text: Optional[bool] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[Any]: ... elif sys.version_info >= (3, 6): # Nearly same args as Popen.__init__ except for timeout, input, and check @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, check: bool = ..., encoding: str, errors: Optional[str] = ..., input: Optional[str] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, check: bool = ..., encoding: Optional[str] = ..., errors: str, input: Optional[str] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., *, universal_newlines: Literal[True], startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., # where the *real* keyword only args start check: bool = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., input: Optional[str] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: Literal[False] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, check: bool = ..., encoding: None = ..., errors: None = ..., input: Optional[bytes] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[bytes]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, check: bool = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., input: Optional[_TXT] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[Any]: ... else: # Nearly same args as Popen.__init__ except for timeout, input, and check @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., *, universal_newlines: Literal[True], startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., # where the *real* keyword only args start check: bool = ..., input: Optional[str] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: Literal[False] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, check: bool = ..., input: Optional[bytes] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[bytes]: ... @overload def run( args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, check: bool = ..., input: Optional[_TXT] = ..., timeout: Optional[float] = ..., ) -> CompletedProcess[Any]: ... # Same args as Popen.__init__ def call(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: Optional[float] = ...) -> int: ... # Same args as Popen.__init__ def check_call(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: Optional[float] = ...) -> int: ... if sys.version_info >= (3, 7): # 3.7 added text @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., text: Literal[True], ) -> str: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: str, errors: Optional[str] = ..., text: Optional[bool] = ..., ) -> str: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: Optional[str] = ..., errors: str, text: Optional[bool] = ..., ) -> str: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., *, universal_newlines: Literal[True], startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., # where the real keyword only ones start timeout: Optional[float] = ..., input: _TXT = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., text: Optional[bool] = ..., ) -> str: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: Literal[False] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., ) -> bytes: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., text: Optional[bool] = ..., ) -> Any: ... # morally: -> _TXT elif sys.version_info >= (3, 6): # 3.6 added encoding and errors @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: str, errors: Optional[str] = ..., ) -> str: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: Optional[str] = ..., errors: str, ) -> str: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, universal_newlines: Literal[True], timeout: Optional[float] = ..., input: _TXT = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., ) -> str: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: Literal[False] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: None = ..., errors: None = ..., ) -> bytes: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: Optional[float] = ..., input: _TXT = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., ) -> Any: ... # morally: -> _TXT else: @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: Optional[float] = ..., input: _TXT = ..., *, universal_newlines: Literal[True], ) -> str: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: Literal[False] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: Optional[float] = ..., input: _TXT = ..., ) -> bytes: ... @overload def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: Optional[float] = ..., input: _TXT = ..., ) -> Any: ... # morally: -> _TXT PIPE: int STDOUT: int DEVNULL: int class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): def __init__(self, cmd: _CMD, timeout: float, output: Optional[_TXT] = ..., stderr: Optional[_TXT] = ...) -> None: ... # morally: _CMD cmd: Any timeout: float # morally: Optional[_TXT] output: Any stdout: Any stderr: Any class CalledProcessError(Exception): returncode: int # morally: _CMD cmd: Any # morally: Optional[_TXT] output: Any # morally: Optional[_TXT] stdout: Any stderr: Any def __init__(self, returncode: int, cmd: _CMD, output: Optional[_TXT] = ..., stderr: Optional[_TXT] = ...) -> None: ... class Popen(Generic[AnyStr]): args: _CMD stdin: IO[AnyStr] stdout: IO[AnyStr] stderr: IO[AnyStr] pid: int returncode: int # Technically it is wrong that Popen provides __new__ instead of __init__ # but this shouldn't come up hopefully? if sys.version_info >= (3, 7): # text is added in 3.7 @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, text: Optional[bool] = ..., encoding: str, errors: Optional[str] = ...) -> Popen[str]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, text: Optional[bool] = ..., encoding: Optional[str] = ..., errors: str) -> Popen[str]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., *, universal_newlines: Literal[True], startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., # where the *real* keyword only args start text: Optional[bool] = ..., encoding: Optional[str] = ..., errors: Optional[str] = ...) -> Popen[str]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, text: Literal[True], encoding: Optional[str] = ..., errors: Optional[str] = ...) -> Popen[str]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: Literal[False] = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, text: Literal[None, False] = ..., encoding: None = ..., errors: None = ...) -> Popen[bytes]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, text: Optional[bool] = ..., encoding: Optional[str] = ..., errors: Optional[str] = ...) -> Popen[Any]: ... elif sys.version_info >= (3, 6): @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, encoding: str, errors: Optional[str] = ...) -> Popen[str]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, encoding: Optional[str] = ..., errors: str) -> Popen[str]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., *, universal_newlines: Literal[True], startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., # where the *real* keyword only args start encoding: Optional[str] = ..., errors: Optional[str] = ...) -> Popen[str]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: Literal[False] = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, encoding: None = ..., errors: None = ...) -> Popen[bytes]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> Popen[Any]: ... else: @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., *, universal_newlines: Literal[True], startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ...) -> Popen[str]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., *, universal_newlines: Literal[False] = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ...) -> Popen[bytes]: ... @overload def __new__(cls, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ...) -> Popen[Any]: ... def poll(self) -> int: ... def wait(self, timeout: Optional[float] = ...) -> int: ... # Return str/bytes def communicate(self, input: Optional[AnyStr] = ..., timeout: Optional[float] = ..., # morally this should be optional ) -> Tuple[AnyStr, AnyStr]: ... def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def __enter__(self: _S) -> _S: ... def __exit__(self, type: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType]) -> None: ... # The result really is always a str. def getstatusoutput(cmd: _TXT) -> Tuple[int, str]: ... def getoutput(cmd: _TXT) -> str: ... def list2cmdline(seq: Sequence[str]) -> str: ... # undocumented if sys.platform == 'win32': class STARTUPINFO: if sys.version_info >= (3, 7): def __init__(self, *, dwFlags: int = ..., hStdInput: Optional[Any] = ..., hStdOutput: Optional[Any] = ..., hStdError: Optional[Any] = ..., wShowWindow: int = ..., lpAttributeList: Optional[Mapping[str, Any]] = ...) -> None: ... dwFlags: int hStdInput: Optional[Any] hStdOutput: Optional[Any] hStdError: Optional[Any] wShowWindow: int if sys.version_info >= (3, 7): lpAttributeList: Mapping[str, Any] STD_INPUT_HANDLE: Any STD_OUTPUT_HANDLE: Any STD_ERROR_HANDLE: Any SW_HIDE: int STARTF_USESTDHANDLES: int STARTF_USESHOWWINDOW: int CREATE_NEW_CONSOLE: int CREATE_NEW_PROCESS_GROUP: int if sys.version_info >= (3, 7): ABOVE_NORMAL_PRIORITY_CLASS: int BELOW_NORMAL_PRIORITY_CLASS: int HIGH_PRIORITY_CLASS: int IDLE_PRIORITY_CLASS: int NORMAL_PRIORITY_CLASS: int REALTIME_PRIORITY_CLASS: int CREATE_NO_WINDOW: int DETACHED_PROCESS: int CREATE_DEFAULT_ERROR_MODE: int CREATE_BREAKAWAY_FROM_JOB: int mypy-0.761/mypy/typeshed/stdlib/3/symbol.pyi0000644€tŠÔÚ€2›s®0000000266413576752252025223 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for symbol (Python 3) import sys from typing import Dict single_input: int file_input: int eval_input: int decorator: int decorators: int decorated: int async_funcdef: int funcdef: int parameters: int typedargslist: int tfpdef: int varargslist: int vfpdef: int stmt: int simple_stmt: int small_stmt: int expr_stmt: int if sys.version_info >= (3, 6): annassign: int testlist_star_expr: int augassign: int del_stmt: int pass_stmt: int flow_stmt: int break_stmt: int continue_stmt: int return_stmt: int yield_stmt: int raise_stmt: int import_stmt: int import_name: int import_from: int import_as_name: int dotted_as_name: int import_as_names: int dotted_as_names: int dotted_name: int global_stmt: int nonlocal_stmt: int assert_stmt: int compound_stmt: int async_stmt: int if_stmt: int while_stmt: int for_stmt: int try_stmt: int with_stmt: int with_item: int except_clause: int suite: int test: int test_nocond: int lambdef: int lambdef_nocond: int or_test: int and_test: int not_test: int comparison: int comp_op: int star_expr: int expr: int xor_expr: int and_expr: int shift_expr: int arith_expr: int term: int factor: int power: int atom_expr: int atom: int testlist_comp: int trailer: int subscriptlist: int subscript: int sliceop: int exprlist: int testlist: int dictorsetmaker: int classdef: int arglist: int argument: int comp_iter: int comp_for: int comp_if: int encoding_decl: int yield_expr: int yield_arg: int sym_name: Dict[int, str] mypy-0.761/mypy/typeshed/stdlib/3/sys.pyi0000644€tŠÔÚ€2›s®0000001347513576752252024536 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sys # Ron Murawski # based on http://docs.python.org/3.2/library/sys.html from typing import ( List, NoReturn, Sequence, Any, Dict, Tuple, TextIO, overload, Optional, Union, TypeVar, Callable, Type ) import sys from types import FrameType, ModuleType, TracebackType from importlib.abc import MetaPathFinder _T = TypeVar('_T') # The following type alias are stub-only and do not exist during runtime _ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] _OptExcInfo = Union[_ExcInfo, Tuple[None, None, None]] # ----- sys variables ----- abiflags: str argv: List[str] base_exec_prefix: str base_prefix: str byteorder: str builtin_module_names: Sequence[str] # actually a tuple of strings copyright: str # dllhandle = 0 # Windows only dont_write_bytecode: bool __displayhook__: Any # contains the original value of displayhook __excepthook__: Any # contains the original value of excepthook exec_prefix: str executable: str float_repr_style: str hexversion: int last_type: Optional[Type[BaseException]] last_value: Optional[BaseException] last_traceback: Optional[TracebackType] maxsize: int maxunicode: int meta_path: List[MetaPathFinder] modules: Dict[str, ModuleType] path: List[str] path_hooks: List[Any] # TODO precise type; function, path to finder path_importer_cache: Dict[str, Any] # TODO precise type platform: str prefix: str if sys.version_info >= (3, 8): pycache_prefix: Optional[str] ps1: str ps2: str stdin: TextIO stdout: TextIO stderr: TextIO __stdin__: TextIO __stdout__: TextIO __stderr__: TextIO tracebacklimit: int version: str api_version: int warnoptions: Any # Each entry is a tuple of the form (action, message, category, module, # lineno) # winver = '' # Windows only _xoptions: Dict[Any, Any] flags: _flags class _flags: debug: int division_warning: int inspect: int interactive: int optimize: int dont_write_bytecode: int no_user_site: int no_site: int ignore_environment: int verbose: int bytes_warning: int quiet: int hash_randomization: int if sys.version_info >= (3, 7): dev_mode: int utf8_mode: int float_info: _float_info class _float_info: epsilon: float # DBL_EPSILON dig: int # DBL_DIG mant_dig: int # DBL_MANT_DIG max: float # DBL_MAX max_exp: int # DBL_MAX_EXP max_10_exp: int # DBL_MAX_10_EXP min: float # DBL_MIN min_exp: int # DBL_MIN_EXP min_10_exp: int # DBL_MIN_10_EXP radix: int # FLT_RADIX rounds: int # FLT_ROUNDS hash_info: _hash_info class _hash_info: width: int modulus: int inf: int nan: int imag: int implementation: _implementation class _implementation: name: str version: _version_info hexversion: int cache_tag: str int_info: _int_info class _int_info: bits_per_digit: int sizeof_digit: int class _version_info(Tuple[int, int, int, str, int]): major: int minor: int micro: int releaselevel: str serial: int version_info: _version_info def call_tracing(fn: Callable[..., _T], args: Any) -> _T: ... def _clear_type_cache() -> None: ... def _current_frames() -> Dict[int, Any]: ... def _debugmallocstats() -> None: ... def displayhook(value: Optional[int]) -> None: ... def excepthook(type_: Type[BaseException], value: BaseException, traceback: TracebackType) -> None: ... def exc_info() -> _OptExcInfo: ... # sys.exit() accepts an optional argument of anything printable def exit(arg: object = ...) -> NoReturn: ... def getcheckinterval() -> int: ... # deprecated def getdefaultencoding() -> str: ... if sys.platform != 'win32': # Unix only def getdlopenflags() -> int: ... def getfilesystemencoding() -> str: ... def getrefcount(arg: Any) -> int: ... def getrecursionlimit() -> int: ... @overload def getsizeof(obj: object) -> int: ... @overload def getsizeof(obj: object, default: int) -> int: ... def getswitchinterval() -> float: ... @overload def _getframe() -> FrameType: ... @overload def _getframe(depth: int) -> FrameType: ... _ProfileFunc = Callable[[FrameType, str, Any], Any] def getprofile() -> Optional[_ProfileFunc]: ... def setprofile(profilefunc: Optional[_ProfileFunc]) -> None: ... _TraceFunc = Callable[[FrameType, str, Any], Optional[Callable[[FrameType, str, Any], Any]]] def gettrace() -> Optional[_TraceFunc]: ... def settrace(tracefunc: Optional[_TraceFunc]) -> None: ... class _WinVersion(Tuple[int, int, int, int, str, int, int, int, int, Tuple[int, int, int]]): major: int minor: int build: int platform: int service_pack: str service_pack_minor: int service_pack_major: int suite_mast: int product_type: int platform_version: Tuple[int, int, int] def getwindowsversion() -> _WinVersion: ... # Windows only def intern(string: str) -> str: ... def is_finalizing() -> bool: ... if sys.version_info >= (3, 7): __breakpointhook__: Any # contains the original value of breakpointhook def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... def setcheckinterval(interval: int) -> None: ... # deprecated def setdlopenflags(n: int) -> None: ... # Linux only def setrecursionlimit(limit: int) -> None: ... def setswitchinterval(interval: float) -> None: ... def settscdump(on_flag: bool) -> None: ... def gettotalrefcount() -> int: ... # Debug builds only if sys.version_info >= (3, 8): # not exported by sys class UnraisableHookArgs: exc_type: Type[BaseException] exc_value: Optional[BaseException] exc_traceback: Optional[TracebackType] err_msg: Optional[str] object: Optional[object] unraisablehook: Callable[[UnraisableHookArgs], Any] def addaudithook(hook: Callable[[str, Tuple[Any, ...]], Any]) -> None: ... def audit(__event: str, *args: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/tempfile.pyi0000644€tŠÔÚ€2›s®0000001430313576752252025514 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tempfile # Ron Murawski # based on http://docs.python.org/3.3/library/tempfile.html import os import sys from types import TracebackType from typing import Any, AnyStr, Generic, IO, Iterable, Iterator, List, Optional, overload, Tuple, Type, TypeVar, Union if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal # global variables TMP_MAX: int tempdir: Optional[str] template: str _S = TypeVar("_S") _T = TypeVar("_T") # for pytype, define typevar in same file as alias if sys.version_info >= (3, 6): _DirT = Union[_T, os.PathLike[_T]] else: _DirT = Union[_T] @overload def TemporaryFile( mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"], buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ..., ) -> IO[str]: ... @overload def TemporaryFile( mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ..., ) -> IO[bytes]: ... @overload def TemporaryFile( mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ..., ) -> IO[Any]: ... @overload def NamedTemporaryFile( mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"], buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ..., delete: bool = ..., ) -> IO[str]: ... @overload def NamedTemporaryFile( mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ..., delete: bool = ..., ) -> IO[bytes]: ... @overload def NamedTemporaryFile( mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ..., delete: bool = ..., ) -> IO[Any]: ... # It does not actually derive from IO[AnyStr], but it does implement the # protocol. class SpooledTemporaryFile(IO[AnyStr]): # bytes needs to go first, as default mode is to open as bytes @overload def __init__( self: SpooledTemporaryFile[bytes], max_size: int = ..., mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[str] = ..., prefix: Optional[str] = ..., dir: Optional[str] = ..., ) -> None: ... @overload def __init__( self: SpooledTemporaryFile[str], max_size: int = ..., mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"] = ..., buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[str] = ..., prefix: Optional[str] = ..., dir: Optional[str] = ..., ) -> None: ... @overload def __init__( self, max_size: int = ..., mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., newline: Optional[str] = ..., suffix: Optional[str] = ..., prefix: Optional[str] = ..., dir: Optional[str] = ..., ) -> None: ... def rollover(self) -> None: ... def __enter__(self: _S) -> _S: ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType] ) -> Optional[bool]: ... # These methods are copied from the abstract methods of IO, because # SpooledTemporaryFile implements IO. # See also https://github.com/python/typeshed/pull/2452#issuecomment-420657918. def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def read(self, n: int = ...) -> AnyStr: ... def readable(self) -> bool: ... def readline(self, limit: int = ...) -> AnyStr: ... def readlines(self, hint: int = ...) -> List[AnyStr]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def write(self, s: AnyStr) -> int: ... def writelines(self, lines: Iterable[AnyStr]) -> None: ... def __next__(self) -> AnyStr: ... def __iter__(self) -> Iterator[AnyStr]: ... class TemporaryDirectory(Generic[AnyStr]): name: str def __init__( self, suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ... ) -> None: ... def cleanup(self) -> None: ... def __enter__(self) -> AnyStr: ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType] ) -> None: ... def mkstemp( suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ..., text: bool = ... ) -> Tuple[int, AnyStr]: ... @overload def mkdtemp() -> str: ... @overload def mkdtemp(suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ...) -> AnyStr: ... def mktemp(suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ...) -> AnyStr: ... def gettempdirb() -> bytes: ... def gettempprefixb() -> bytes: ... def gettempdir() -> str: ... def gettempprefix() -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/textwrap.pyi0000644€tŠÔÚ€2›s®0000000660113576752252025567 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Callable, List, Optional, Dict, Pattern class TextWrapper: width: int = ... initial_indent: str = ... subsequent_indent: str = ... expand_tabs: bool = ... replace_whitespace: bool = ... fix_sentence_endings: bool = ... drop_whitespace: bool = ... break_long_words: bool = ... break_on_hyphens: bool = ... tabsize: int = ... max_lines: Optional[int] = ... placeholder: str = ... # Attributes not present in documentation sentence_end_re: Pattern[str] = ... wordsep_re: Pattern[str] = ... wordsep_simple_re: Pattern[str] = ... whitespace_trans: str = ... unicode_whitespace_trans: Dict[int, int] = ... uspace: int = ... x: int = ... def __init__( self, width: int = ..., initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., drop_whitespace: bool = ..., break_on_hyphens: bool = ..., tabsize: int = ..., *, max_lines: Optional[int] = ..., placeholder: str = ...) -> None: ... # Private methods *are* part of the documented API for subclasses. def _munge_whitespace(self, text: str) -> str: ... def _split(self, text: str) -> List[str]: ... def _fix_sentence_endings(self, chunks: List[str]) -> None: ... def _handle_long_word(self, reversed_chunks: List[str], cur_line: List[str], cur_len: int, width: int) -> None: ... def _wrap_chunks(self, chunks: List[str]) -> List[str]: ... def _split_chunks(self, text: str) -> List[str]: ... def wrap(self, text: str) -> List[str]: ... def fill(self, text: str) -> str: ... def wrap( text: str = ..., width: int = ..., *, initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., tabsize: int = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., break_on_hyphens: bool = ..., drop_whitespace: bool = ..., max_lines: int = ..., placeholder: str = ... ) -> List[str]: ... def fill( text: str, width: int = ..., *, initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., tabsize: int = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., break_on_hyphens: bool = ..., drop_whitespace: bool = ..., max_lines: int = ..., placeholder: str = ... ) -> str: ... def shorten( text: str, width: int, *, initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., tabsize: int = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., break_on_hyphens: bool = ..., drop_whitespace: bool = ..., # Omit `max_lines: int = None`, it is forced to 1 here. placeholder: str = ... ) -> str: ... def dedent(text: str) -> str: ... def indent(text: str, prefix: str, predicate: Callable[[str], bool] = ...) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3/tkinter/0000755€tŠÔÚ€2›s®0000000000013576752267024651 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/tkinter/__init__.pyi0000644€tŠÔÚ€2›s®0000006215613576752252027137 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from types import TracebackType from typing import Any, Optional, Dict, Callable, Tuple, Type, Union from tkinter.constants import * # noqa: F403 TclError: Any wantobjects: Any TkVersion: Any TclVersion: Any READABLE: Any WRITABLE: Any EXCEPTION: Any class Event: ... def NoDefaultRoot(): ... class Variable: def __init__(self, master: Optional[Any] = ..., value: Optional[Any] = ..., name: Optional[Any] = ...): ... def __del__(self): ... def set(self, value): ... initialize: Any def get(self): ... def trace_variable(self, mode, callback): ... trace: Any def trace_vdelete(self, mode, cbname): ... def trace_vinfo(self): ... def __eq__(self, other): ... class StringVar(Variable): def __init__(self, master: Optional[Any] = ..., value: Optional[Any] = ..., name: Optional[Any] = ...): ... def get(self): ... class IntVar(Variable): def __init__(self, master: Optional[Any] = ..., value: Optional[Any] = ..., name: Optional[Any] = ...): ... def get(self): ... class DoubleVar(Variable): def __init__(self, master: Optional[Any] = ..., value: Optional[Any] = ..., name: Optional[Any] = ...): ... def get(self): ... class BooleanVar(Variable): def __init__(self, master: Optional[Any] = ..., value: Optional[Any] = ..., name: Optional[Any] = ...): ... def set(self, value): ... initialize: Any def get(self): ... def mainloop(n: int = ...): ... getint: Any getdouble: Any def getboolean(s): ... class Misc: def destroy(self): ... def deletecommand(self, name): ... def tk_strictMotif(self, boolean: Optional[Any] = ...): ... def tk_bisque(self): ... def tk_setPalette(self, *args, **kw): ... def tk_menuBar(self, *args): ... def wait_variable(self, name: str = ...): ... waitvar: Any def wait_window(self, window: Optional[Any] = ...): ... def wait_visibility(self, window: Optional[Any] = ...): ... def setvar(self, name: str = ..., value: str = ...): ... def getvar(self, name: str = ...): ... def getint(self, s): ... def getdouble(self, s): ... def getboolean(self, s): ... def focus_set(self): ... focus: Any def focus_force(self): ... def focus_get(self): ... def focus_displayof(self): ... def focus_lastfor(self): ... def tk_focusFollowsMouse(self): ... def tk_focusNext(self): ... def tk_focusPrev(self): ... def after(self, ms, func: Optional[Any] = ..., *args): ... def after_idle(self, func, *args): ... def after_cancel(self, id): ... def bell(self, displayof: int = ...): ... def clipboard_get(self, **kw): ... def clipboard_clear(self, **kw): ... def clipboard_append(self, string, **kw): ... def grab_current(self): ... def grab_release(self): ... def grab_set(self): ... def grab_set_global(self): ... def grab_status(self): ... def option_add(self, pattern, value, priority: Optional[Any] = ...): ... def option_clear(self): ... def option_get(self, name, className): ... def option_readfile(self, fileName, priority: Optional[Any] = ...): ... def selection_clear(self, **kw): ... def selection_get(self, **kw): ... def selection_handle(self, command, **kw): ... def selection_own(self, **kw): ... def selection_own_get(self, **kw): ... def send(self, interp, cmd, *args): ... def lower(self, belowThis: Optional[Any] = ...): ... def tkraise(self, aboveThis: Optional[Any] = ...): ... lift: Any def winfo_atom(self, name, displayof: int = ...): ... def winfo_atomname(self, id, displayof: int = ...): ... def winfo_cells(self): ... def winfo_children(self): ... def winfo_class(self): ... def winfo_colormapfull(self): ... def winfo_containing(self, rootX, rootY, displayof: int = ...): ... def winfo_depth(self): ... def winfo_exists(self): ... def winfo_fpixels(self, number): ... def winfo_geometry(self): ... def winfo_height(self): ... def winfo_id(self): ... def winfo_interps(self, displayof: int = ...): ... def winfo_ismapped(self): ... def winfo_manager(self): ... def winfo_name(self): ... def winfo_parent(self): ... def winfo_pathname(self, id, displayof: int = ...): ... def winfo_pixels(self, number): ... def winfo_pointerx(self): ... def winfo_pointerxy(self): ... def winfo_pointery(self): ... def winfo_reqheight(self): ... def winfo_reqwidth(self): ... def winfo_rgb(self, color): ... def winfo_rootx(self): ... def winfo_rooty(self): ... def winfo_screen(self): ... def winfo_screencells(self): ... def winfo_screendepth(self): ... def winfo_screenheight(self): ... def winfo_screenmmheight(self): ... def winfo_screenmmwidth(self): ... def winfo_screenvisual(self): ... def winfo_screenwidth(self): ... def winfo_server(self): ... def winfo_toplevel(self): ... def winfo_viewable(self): ... def winfo_visual(self): ... def winfo_visualid(self): ... def winfo_visualsavailable(self, includeids: int = ...): ... def winfo_vrootheight(self): ... def winfo_vrootwidth(self): ... def winfo_vrootx(self): ... def winfo_vrooty(self): ... def winfo_width(self): ... def winfo_x(self): ... def winfo_y(self): ... def update(self): ... def update_idletasks(self): ... def bindtags(self, tagList: Optional[Any] = ...): ... def bind(self, sequence: Optional[Any] = ..., func: Optional[Any] = ..., add: Optional[Any] = ...): ... def unbind(self, sequence, funcid: Optional[Any] = ...): ... def bind_all(self, sequence: Optional[Any] = ..., func: Optional[Any] = ..., add: Optional[Any] = ...): ... def unbind_all(self, sequence): ... def bind_class(self, className, sequence: Optional[Any] = ..., func: Optional[Any] = ..., add: Optional[Any] = ...): ... def unbind_class(self, className, sequence): ... def mainloop(self, n: int = ...): ... def quit(self): ... def nametowidget(self, name): ... register: Any def configure(self, cnf: Optional[Any] = ..., **kw): ... config: Any def cget(self, key): ... __getitem__: Any def __setitem__(self, key, value): ... def keys(self): ... def pack_propagate(self, flag=...): ... propagate: Any def pack_slaves(self): ... slaves: Any def place_slaves(self): ... def grid_anchor(self, anchor: Optional[Any] = ...): ... anchor: Any def grid_bbox(self, column: Optional[Any] = ..., row: Optional[Any] = ..., col2: Optional[Any] = ..., row2: Optional[Any] = ...): ... bbox: Any def grid_columnconfigure(self, index, cnf=..., **kw): ... columnconfigure: Any def grid_location(self, x, y): ... def grid_propagate(self, flag=...): ... def grid_rowconfigure(self, index, cnf=..., **kw): ... rowconfigure: Any def grid_size(self): ... size: Any def grid_slaves(self, row: Optional[Any] = ..., column: Optional[Any] = ...): ... def event_add(self, virtual, *sequences): ... def event_delete(self, virtual, *sequences): ... def event_generate(self, sequence, **kw): ... def event_info(self, virtual: Optional[Any] = ...): ... def image_names(self): ... def image_types(self): ... class CallWrapper: func: Any subst: Any widget: Any def __init__(self, func, subst, widget): ... def __call__(self, *args): ... class XView: def xview(self, *args): ... def xview_moveto(self, fraction): ... def xview_scroll(self, number, what): ... class YView: def yview(self, *args): ... def yview_moveto(self, fraction): ... def yview_scroll(self, number, what): ... class Wm: def wm_aspect(self, minNumer: Optional[Any] = ..., minDenom: Optional[Any] = ..., maxNumer: Optional[Any] = ..., maxDenom: Optional[Any] = ...): ... aspect: Any def wm_attributes(self, *args): ... attributes: Any def wm_client(self, name: Optional[Any] = ...): ... client: Any def wm_colormapwindows(self, *wlist): ... colormapwindows: Any def wm_command(self, value: Optional[Any] = ...): ... command: Any def wm_deiconify(self): ... deiconify: Any def wm_focusmodel(self, model: Optional[Any] = ...): ... focusmodel: Any def wm_forget(self, window): ... forget: Any def wm_frame(self): ... frame: Any def wm_geometry(self, newGeometry: Optional[Any] = ...): ... geometry: Any def wm_grid(self, baseWidth: Optional[Any] = ..., baseHeight: Optional[Any] = ..., widthInc: Optional[Any] = ..., heightInc: Optional[Any] = ...): ... grid: Any def wm_group(self, pathName: Optional[Any] = ...): ... group: Any def wm_iconbitmap(self, bitmap: Optional[Any] = ..., default: Optional[Any] = ...): ... iconbitmap: Any def wm_iconify(self): ... iconify: Any def wm_iconmask(self, bitmap: Optional[Any] = ...): ... iconmask: Any def wm_iconname(self, newName: Optional[Any] = ...): ... iconname: Any def wm_iconphoto(self, default: bool = ..., *args): ... iconphoto: Any def wm_iconposition(self, x: Optional[Any] = ..., y: Optional[Any] = ...): ... iconposition: Any def wm_iconwindow(self, pathName: Optional[Any] = ...): ... iconwindow: Any def wm_manage(self, widget): ... manage: Any def wm_maxsize(self, width: Optional[Any] = ..., height: Optional[Any] = ...): ... maxsize: Any def wm_minsize(self, width: Optional[Any] = ..., height: Optional[Any] = ...): ... minsize: Any def wm_overrideredirect(self, boolean: Optional[Any] = ...): ... overrideredirect: Any def wm_positionfrom(self, who: Optional[Any] = ...): ... positionfrom: Any def wm_protocol(self, name: Optional[Any] = ..., func: Optional[Any] = ...): ... protocol: Any def wm_resizable(self, width: Optional[Any] = ..., height: Optional[Any] = ...): ... resizable: Any def wm_sizefrom(self, who: Optional[Any] = ...): ... sizefrom: Any def wm_state(self, newstate: Optional[Any] = ...): ... state: Any def wm_title(self, string: Optional[Any] = ...): ... title: Any def wm_transient(self, master: Optional[Any] = ...): ... transient: Any def wm_withdraw(self): ... withdraw: Any class Tk(Misc, Wm): master: Optional[Any] children: Dict[str, Any] tk: Any def __init__(self, screenName: Optional[str] = ..., baseName: Optional[str] = ..., className: str = ..., useTk: bool = ..., sync: bool = ..., use: Optional[str] = ...) -> None: ... def loadtk(self) -> None: ... def destroy(self) -> None: ... def readprofile(self, baseName: str, className: str) -> None: ... report_callback_exception: Callable[[Type[BaseException], BaseException, TracebackType], Any] def __getattr__(self, attr: str) -> Any: ... def Tcl(screenName: Optional[Any] = ..., baseName: Optional[Any] = ..., className: str = ..., useTk: bool = ...): ... class Pack: def pack_configure(self, cnf=..., **kw): ... pack: Any def pack_forget(self): ... forget: Any def pack_info(self): ... info: Any propagate: Any slaves: Any class Place: def place_configure(self, cnf=..., **kw): ... place: Any def place_forget(self): ... forget: Any def place_info(self): ... info: Any slaves: Any class Grid: def grid_configure(self, cnf=..., **kw): ... grid: Any bbox: Any columnconfigure: Any def grid_forget(self): ... forget: Any def grid_remove(self): ... def grid_info(self): ... info: Any location: Any propagate: Any rowconfigure: Any size: Any slaves: Any class BaseWidget(Misc): widgetName: Any def __init__(self, master, widgetName, cnf=..., kw=..., extra=...): ... def destroy(self): ... class Widget(BaseWidget, Pack, Place, Grid): ... class Toplevel(BaseWidget, Wm): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... class Button(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def flash(self): ... def invoke(self): ... class Canvas(Widget, XView, YView): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def addtag(self, *args): ... def addtag_above(self, newtag, tagOrId): ... def addtag_all(self, newtag): ... def addtag_below(self, newtag, tagOrId): ... def addtag_closest(self, newtag, x, y, halo: Optional[Any] = ..., start: Optional[Any] = ...): ... def addtag_enclosed(self, newtag, x1, y1, x2, y2): ... def addtag_overlapping(self, newtag, x1, y1, x2, y2): ... def addtag_withtag(self, newtag, tagOrId): ... def bbox(self, *args): ... def tag_unbind(self, tagOrId, sequence, funcid: Optional[Any] = ...): ... def tag_bind(self, tagOrId, sequence: Optional[Any] = ..., func: Optional[Any] = ..., add: Optional[Any] = ...): ... def canvasx(self, screenx, gridspacing: Optional[Any] = ...): ... def canvasy(self, screeny, gridspacing: Optional[Any] = ...): ... def coords(self, *args): ... def create_arc(self, *args, **kw): ... def create_bitmap(self, *args, **kw): ... def create_image(self, *args, **kw): ... def create_line(self, *args, **kw): ... def create_oval(self, *args, **kw): ... def create_polygon(self, *args, **kw): ... def create_rectangle(self, *args, **kw): ... def create_text(self, *args, **kw): ... def create_window(self, *args, **kw): ... def dchars(self, *args): ... def delete(self, *args): ... def dtag(self, *args): ... def find(self, *args): ... def find_above(self, tagOrId): ... def find_all(self): ... def find_below(self, tagOrId): ... def find_closest(self, x, y, halo: Optional[Any] = ..., start: Optional[Any] = ...): ... def find_enclosed(self, x1, y1, x2, y2): ... def find_overlapping(self, x1, y1, x2, y2): ... def find_withtag(self, tagOrId): ... def focus(self, *args): ... def gettags(self, *args): ... def icursor(self, *args): ... def index(self, *args): ... def insert(self, *args): ... def itemcget(self, tagOrId, option): ... def itemconfigure(self, tagOrId, cnf: Optional[Any] = ..., **kw): ... itemconfig: Any def tag_lower(self, *args): ... lower: Any def move(self, *args): ... if sys.version_info >= (3, 8): def moveto(self, tagOrId: Union[int, str], x: str = ..., y: str = ...) -> None: ... def postscript(self, cnf=..., **kw): ... def tag_raise(self, *args): ... lift: Any def scale(self, *args): ... def scan_mark(self, x, y): ... def scan_dragto(self, x, y, gain: int = ...): ... def select_adjust(self, tagOrId, index): ... def select_clear(self): ... def select_from(self, tagOrId, index): ... def select_item(self): ... def select_to(self, tagOrId, index): ... def type(self, tagOrId): ... class Checkbutton(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def deselect(self): ... def flash(self): ... def invoke(self): ... def select(self): ... def toggle(self): ... class Entry(Widget, XView): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def delete(self, first, last: Optional[Any] = ...): ... def get(self): ... def icursor(self, index): ... def index(self, index): ... def insert(self, index, string): ... def scan_mark(self, x): ... def scan_dragto(self, x): ... def selection_adjust(self, index): ... select_adjust: Any def selection_clear(self): ... select_clear: Any def selection_from(self, index): ... select_from: Any def selection_present(self): ... select_present: Any def selection_range(self, start, end): ... select_range: Any def selection_to(self, index): ... select_to: Any class Frame(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... class Label(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... class Listbox(Widget, XView, YView): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def activate(self, index): ... def bbox(self, index): ... def curselection(self): ... def delete(self, first, last: Optional[Any] = ...): ... def get(self, first, last: Optional[Any] = ...): ... def index(self, index): ... def insert(self, index, *elements): ... def nearest(self, y): ... def scan_mark(self, x, y): ... def scan_dragto(self, x, y): ... def see(self, index): ... def selection_anchor(self, index): ... select_anchor: Any def selection_clear(self, first, last: Optional[Any] = ...): ... # type: ignore select_clear: Any def selection_includes(self, index): ... select_includes: Any def selection_set(self, first, last: Optional[Any] = ...): ... select_set: Any def size(self): ... def itemcget(self, index, option): ... def itemconfigure(self, index, cnf: Optional[Any] = ..., **kw): ... itemconfig: Any class Menu(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def tk_popup(self, x, y, entry: str = ...): ... def tk_bindForTraversal(self): ... def activate(self, index): ... def add(self, itemType, cnf=..., **kw): ... def add_cascade(self, cnf=..., **kw): ... def add_checkbutton(self, cnf=..., **kw): ... def add_command(self, cnf=..., **kw): ... def add_radiobutton(self, cnf=..., **kw): ... def add_separator(self, cnf=..., **kw): ... def insert(self, index, itemType, cnf=..., **kw): ... def insert_cascade(self, index, cnf=..., **kw): ... def insert_checkbutton(self, index, cnf=..., **kw): ... def insert_command(self, index, cnf=..., **kw): ... def insert_radiobutton(self, index, cnf=..., **kw): ... def insert_separator(self, index, cnf=..., **kw): ... def delete(self, index1, index2: Optional[Any] = ...): ... def entrycget(self, index, option): ... def entryconfigure(self, index, cnf: Optional[Any] = ..., **kw): ... entryconfig: Any def index(self, index): ... def invoke(self, index): ... def post(self, x, y): ... def type(self, index): ... def unpost(self): ... def xposition(self, index): ... def yposition(self, index): ... class Menubutton(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... class Message(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... class Radiobutton(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def deselect(self): ... def flash(self): ... def invoke(self): ... def select(self): ... class Scale(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def get(self): ... def set(self, value): ... def coords(self, value: Optional[Any] = ...): ... def identify(self, x, y): ... class Scrollbar(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def activate(self, index: Optional[Any] = ...): ... def delta(self, deltax, deltay): ... def fraction(self, x, y): ... def identify(self, x, y): ... def get(self): ... def set(self, first, last): ... class Text(Widget, XView, YView): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def bbox(self, index): ... def compare(self, index1, op, index2): ... def count(self, index1, index2, *args): ... def debug(self, boolean: Optional[Any] = ...): ... def delete(self, index1, index2: Optional[Any] = ...): ... def dlineinfo(self, index): ... def dump(self, index1, index2: Optional[Any] = ..., command: Optional[Any] = ..., **kw): ... def edit(self, *args): ... def edit_modified(self, arg: Optional[Any] = ...): ... def edit_redo(self): ... def edit_reset(self): ... def edit_separator(self): ... def edit_undo(self): ... def get(self, index1, index2: Optional[Any] = ...): ... def image_cget(self, index, option): ... def image_configure(self, index, cnf: Optional[Any] = ..., **kw): ... def image_create(self, index, cnf=..., **kw): ... def image_names(self): ... def index(self, index): ... def insert(self, index, chars, *args): ... def mark_gravity(self, markName, direction: Optional[Any] = ...): ... def mark_names(self): ... def mark_set(self, markName, index): ... def mark_unset(self, *markNames): ... def mark_next(self, index): ... def mark_previous(self, index): ... def peer_create(self, newPathName, cnf=..., **kw): ... def peer_names(self): ... def replace(self, index1, index2, chars, *args): ... def scan_mark(self, x, y): ... def scan_dragto(self, x, y): ... def search(self, pattern, index, stopindex: Optional[Any] = ..., forwards: Optional[Any] = ..., backwards: Optional[Any] = ..., exact: Optional[Any] = ..., regexp: Optional[Any] = ..., nocase: Optional[Any] = ..., count: Optional[Any] = ..., elide: Optional[Any] = ...): ... def see(self, index): ... def tag_add(self, tagName, index1, *args): ... def tag_unbind(self, tagName, sequence, funcid: Optional[Any] = ...): ... def tag_bind(self, tagName, sequence, func, add: Optional[Any] = ...): ... def tag_cget(self, tagName, option): ... def tag_configure(self, tagName, cnf: Optional[Any] = ..., **kw): ... tag_config: Any def tag_delete(self, *tagNames): ... def tag_lower(self, tagName, belowThis: Optional[Any] = ...): ... def tag_names(self, index: Optional[Any] = ...): ... def tag_nextrange(self, tagName, index1, index2: Optional[Any] = ...): ... def tag_prevrange(self, tagName, index1, index2: Optional[Any] = ...): ... def tag_raise(self, tagName, aboveThis: Optional[Any] = ...): ... def tag_ranges(self, tagName): ... def tag_remove(self, tagName, index1, index2: Optional[Any] = ...): ... def window_cget(self, index, option): ... def window_configure(self, index, cnf: Optional[Any] = ..., **kw): ... window_config: Any def window_create(self, index, cnf=..., **kw): ... def window_names(self): ... def yview_pickplace(self, *what): ... class _setit: def __init__(self, var, value, callback: Optional[Any] = ...): ... def __call__(self, *args): ... class OptionMenu(Menubutton): widgetName: Any menuname: Any def __init__(self, master, variable, value, *values, **kwargs): ... def __getitem__(self, name): ... def destroy(self): ... class Image: name: Any tk: Any def __init__(self, imgtype, name: Optional[Any] = ..., cnf=..., master: Optional[Any] = ..., **kw): ... def __del__(self): ... def __setitem__(self, key, value): ... def __getitem__(self, key): ... def configure(self, **kw): ... config: Any def height(self): ... def type(self): ... def width(self): ... class PhotoImage(Image): def __init__(self, name: Optional[Any] = ..., cnf=..., master: Optional[Any] = ..., **kw): ... def blank(self): ... def cget(self, option): ... def __getitem__(self, key): ... def copy(self): ... def zoom(self, x, y: str = ...): ... def subsample(self, x, y: str = ...): ... def get(self, x, y): ... def put(self, data, to: Optional[Any] = ...): ... def write(self, filename, format: Optional[Any] = ..., from_coords: Optional[Any] = ...): ... if sys.version_info >= (3, 8): def transparency_get(self, x: int, y: int) -> bool: ... def transparency_set(self, x: int, y: int, boolean: bool) -> None: ... class BitmapImage(Image): def __init__(self, name: Optional[Any] = ..., cnf=..., master: Optional[Any] = ..., **kw): ... def image_names(): ... def image_types(): ... class Spinbox(Widget, XView): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def bbox(self, index): ... def delete(self, first, last: Optional[Any] = ...): ... def get(self): ... def icursor(self, index): ... def identify(self, x, y): ... def index(self, index): ... def insert(self, index, s): ... def invoke(self, element): ... def scan(self, *args): ... def scan_mark(self, x): ... def scan_dragto(self, x): ... def selection(self, *args: Any) -> Tuple[int, ...]: ... def selection_adjust(self, index): ... def selection_clear(self): ... def selection_element(self, element: Optional[Any] = ...): ... if sys.version_info >= (3, 8): def selection_from(self, index: int) -> None: ... def selection_present(self) -> None: ... def selection_range(self, start: int, end: int) -> None: ... def selection_to(self, index: int) -> None: ... class LabelFrame(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... class PanedWindow(Widget): def __init__(self, master: Optional[Any] = ..., cnf=..., **kw): ... def add(self, child, **kw): ... def remove(self, child): ... forget: Any def identify(self, x, y): ... def proxy(self, *args): ... def proxy_coord(self): ... def proxy_forget(self): ... def proxy_place(self, x, y): ... def sash(self, *args): ... def sash_coord(self, index): ... def sash_mark(self, index): ... def sash_place(self, index, x, y): ... def panecget(self, child, option): ... def paneconfigure(self, tagOrId, cnf: Optional[Any] = ..., **kw): ... paneconfig: Any def panes(self): ... mypy-0.761/mypy/typeshed/stdlib/3/tkinter/commondialog.pyi0000644€tŠÔÚ€2›s®0000000042513576752252030037 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Mapping, Optional class Dialog: command: Optional[Any] = ... master: Optional[Any] = ... options: Mapping[str, Any] = ... def __init__(self, master: Optional[Any] = ..., **options) -> None: ... def show(self, **options) -> Any: ... mypy-0.761/mypy/typeshed/stdlib/3/tkinter/constants.pyi0000644€tŠÔÚ€2›s®0000000156013576752252027404 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any NO: Any YES: Any TRUE: Any FALSE: Any ON: Any OFF: Any N: Any S: Any W: Any E: Any NW: Any SW: Any NE: Any SE: Any NS: Any EW: Any NSEW: Any CENTER: Any NONE: Any X: Any Y: Any BOTH: Any LEFT: Any TOP: Any RIGHT: Any BOTTOM: Any RAISED: Any SUNKEN: Any FLAT: Any RIDGE: Any GROOVE: Any SOLID: Any HORIZONTAL: Any VERTICAL: Any NUMERIC: Any CHAR: Any WORD: Any BASELINE: Any INSIDE: Any OUTSIDE: Any SEL: Any SEL_FIRST: Any SEL_LAST: Any END: Any INSERT: Any CURRENT: Any ANCHOR: Any ALL: Any NORMAL: Any DISABLED: Any ACTIVE: Any HIDDEN: Any CASCADE: Any CHECKBUTTON: Any COMMAND: Any RADIOBUTTON: Any SEPARATOR: Any SINGLE: Any BROWSE: Any MULTIPLE: Any EXTENDED: Any DOTBOX: Any UNDERLINE: Any PIESLICE: Any CHORD: Any ARC: Any FIRST: Any LAST: Any BUTT: Any PROJECTING: Any ROUND: Any BEVEL: Any MITER: Any MOVETO: Any SCROLL: Any UNITS: Any PAGES: Any mypy-0.761/mypy/typeshed/stdlib/3/tkinter/dialog.pyi0000644€tŠÔÚ€2›s®0000000044313576752252026626 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Mapping, Optional from tkinter import Widget DIALOG_ICON: str class Dialog(Widget): widgetName: str = ... num: int = ... def __init__(self, master: Optional[Any] = ..., cnf: Mapping[str, Any] = ..., **kw) -> None: ... def destroy(self) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/tkinter/filedialog.pyi0000644€tŠÔÚ€2›s®0000000427113576752252027471 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Optional, Tuple from tkinter import Button, commondialog, Entry, Frame, Listbox, Scrollbar, Toplevel dialogstates: Dict[Any, Tuple[Any, Any]] class FileDialog: title: str = ... master: Any = ... directory: Optional[Any] = ... top: Toplevel = ... botframe: Frame = ... selection: Entry = ... filter: Entry = ... midframe: Entry = ... filesbar: Scrollbar = ... files: Listbox = ... dirsbar: Scrollbar = ... dirs: Listbox = ... ok_button: Button = ... filter_button: Button = ... cancel_button: Button = ... def __init__(self, master, title: Optional[Any] = ...) -> None: ... # title is usually a str or None, but e.g. int doesn't raise en exception either how: Optional[Any] = ... def go(self, dir_or_file: Any = ..., pattern: str = ..., default: str = ..., key: Optional[Any] = ...): ... def quit(self, how: Optional[Any] = ...) -> None: ... def dirs_double_event(self, event) -> None: ... def dirs_select_event(self, event) -> None: ... def files_double_event(self, event) -> None: ... def files_select_event(self, event) -> None: ... def ok_event(self, event) -> None: ... def ok_command(self) -> None: ... def filter_command(self, event: Optional[Any] = ...) -> None: ... def get_filter(self): ... def get_selection(self): ... def cancel_command(self, event: Optional[Any] = ...) -> None: ... def set_filter(self, dir, pat) -> None: ... def set_selection(self, file) -> None: ... class LoadFileDialog(FileDialog): title: str = ... def ok_command(self) -> None: ... class SaveFileDialog(FileDialog): title: str = ... def ok_command(self): ... class _Dialog(commondialog.Dialog): ... class Open(_Dialog): command: str = ... class SaveAs(_Dialog): command: str = ... class Directory(commondialog.Dialog): command: str = ... def askopenfilename(**options): ... def asksaveasfilename(**options): ... def askopenfilenames(**options): ... def askopenfile(mode: str = ..., **options): ... def askopenfiles(mode: str = ..., **options): ... def asksaveasfile(mode: str = ..., **options): ... def askdirectory(**options): ... def test() -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/tkinter/messagebox.pyi0000644€tŠÔÚ€2›s®0000000217613576752252027531 0ustar jukkaDROPBOX\Domain Users00000000000000from tkinter.commondialog import Dialog from typing import Any, Optional ERROR: str INFO: str QUESTION: str WARNING: str ABORTRETRYIGNORE: str OK: str OKCANCEL: str RETRYCANCEL: str YESNO: str YESNOCANCEL: str ABORT: str RETRY: str IGNORE: str CANCEL: str YES: str NO: str class Message(Dialog): command: str = ... def showinfo(title: Optional[str] = ..., message: Optional[str] = ..., **options: Any) -> str: ... def showwarning(title: Optional[str] = ..., message: Optional[str] = ..., **options: Any) -> str: ... def showerror(title: Optional[str] = ..., message: Optional[str] = ..., **options: Any) -> str: ... def askquestion(title: Optional[str] = ..., message: Optional[str] = ..., **options: Any) -> str: ... def askokcancel(title: Optional[str] = ..., message: Optional[str] = ..., **options: Any) -> bool: ... def askyesno(title: Optional[str] = ..., message: Optional[str] = ..., **options: Any) -> bool: ... def askyesnocancel(title: Optional[str] = ..., message: Optional[str] = ..., **options: Any) -> Optional[bool]: ... def askretrycancel(title: Optional[str] = ..., message: Optional[str] = ..., **options: Any) -> bool: ... mypy-0.761/mypy/typeshed/stdlib/3/tkinter/ttk.pyi0000644€tŠÔÚ€2›s®0000001432313576752252026173 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, List, Optional import tkinter def tclobjs_to_py(adict): ... def setup_master(master: Optional[Any] = ...): ... class Style: master: Any tk: Any def __init__(self, master: Optional[Any] = ...): ... def configure(self, style, query_opt: Optional[Any] = ..., **kw): ... def map(self, style, query_opt: Optional[Any] = ..., **kw): ... def lookup(self, style, option, state: Optional[Any] = ..., default: Optional[Any] = ...): ... def layout(self, style, layoutspec: Optional[Any] = ...): ... def element_create(self, elementname, etype, *args, **kw): ... def element_names(self): ... def element_options(self, elementname): ... def theme_create(self, themename, parent: Optional[Any] = ..., settings: Optional[Any] = ...): ... def theme_settings(self, themename, settings): ... def theme_names(self): ... def theme_use(self, themename: Optional[Any] = ...): ... class Widget(tkinter.Widget): def __init__(self, master, widgetname, kw: Optional[Any] = ...): ... def identify(self, x, y): ... def instate(self, statespec, callback: Optional[Any] = ..., *args, **kw): ... def state(self, statespec: Optional[Any] = ...): ... class Button(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... def invoke(self): ... class Checkbutton(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... def invoke(self): ... class Entry(Widget, tkinter.Entry): def __init__(self, master: Optional[Any] = ..., widget: Optional[Any] = ..., **kw): ... def bbox(self, index): ... def identify(self, x, y): ... def validate(self): ... class Combobox(Entry): def __init__(self, master: Optional[Any] = ..., **kw): ... def current(self, newindex: Optional[Any] = ...): ... def set(self, value): ... class Frame(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... class Label(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... class Labelframe(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... LabelFrame: Any class Menubutton(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... class Notebook(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... def add(self, child, **kw): ... def forget(self, tab_id): ... def hide(self, tab_id): ... def identify(self, x, y): ... def index(self, tab_id): ... def insert(self, pos, child, **kw): ... def select(self, tab_id: Optional[Any] = ...): ... def tab(self, tab_id, option: Optional[Any] = ..., **kw): ... def tabs(self): ... def enable_traversal(self): ... class Panedwindow(Widget, tkinter.PanedWindow): def __init__(self, master: Optional[Any] = ..., **kw): ... forget: Any def insert(self, pos, child, **kw): ... def pane(self, pane, option: Optional[Any] = ..., **kw): ... def sashpos(self, index, newpos: Optional[Any] = ...): ... PanedWindow: Any class Progressbar(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... def start(self, interval: Optional[Any] = ...): ... def step(self, amount: Optional[Any] = ...): ... def stop(self): ... class Radiobutton(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... def invoke(self): ... class Scale(Widget, tkinter.Scale): def __init__(self, master: Optional[Any] = ..., **kw): ... def configure(self, cnf: Optional[Any] = ..., **kw): ... def get(self, x: Optional[Any] = ..., y: Optional[Any] = ...): ... class Scrollbar(Widget, tkinter.Scrollbar): def __init__(self, master: Optional[Any] = ..., **kw): ... class Separator(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... class Sizegrip(Widget): def __init__(self, master: Optional[Any] = ..., **kw): ... if sys.version_info >= (3, 7): class Spinbox(Entry): def __init__(self, master: Any = ..., **kw: Any) -> None: ... def set(self, value: Any) -> None: ... class Treeview(Widget, tkinter.XView, tkinter.YView): def __init__(self, master: Optional[Any] = ..., **kw): ... def bbox(self, item, column: Optional[Any] = ...): ... def get_children(self, item: Optional[Any] = ...): ... def set_children(self, item, *newchildren): ... def column(self, column, option: Optional[Any] = ..., **kw): ... def delete(self, *items): ... def detach(self, *items): ... def exists(self, item): ... def focus(self, item: Optional[Any] = ...): ... def heading(self, column, option: Optional[Any] = ..., **kw): ... def identify(self, component, x, y): ... def identify_row(self, y): ... def identify_column(self, x): ... def identify_region(self, x, y): ... def identify_element(self, x, y): ... def index(self, item): ... def insert(self, parent, index, iid: Optional[Any] = ..., **kw): ... def item(self, item, option: Optional[Any] = ..., **kw): ... def move(self, item, parent, index): ... reattach: Any def next(self, item): ... def parent(self, item): ... def prev(self, item): ... def see(self, item): ... if sys.version_info >= (3, 8): def selection(self) -> List[Any]: ... else: def selection(self, selop: Optional[Any] = ..., items: Optional[Any] = ...) -> List[Any]: ... def selection_set(self, items): ... def selection_add(self, items): ... def selection_remove(self, items): ... def selection_toggle(self, items): ... def set(self, item, column: Optional[Any] = ..., value: Optional[Any] = ...): ... def tag_bind(self, tagname, sequence: Optional[Any] = ..., callback: Optional[Any] = ...): ... def tag_configure(self, tagname, option: Optional[Any] = ..., **kw): ... def tag_has(self, tagname, item: Optional[Any] = ...): ... class LabeledScale(Frame): label: Any scale: Any def __init__(self, master: Optional[Any] = ..., variable: Optional[Any] = ..., from_: int = ..., to: int = ..., **kw): ... def destroy(self): ... value: Any class OptionMenu(Menubutton): def __init__(self, master, variable, default: Optional[Any] = ..., *values, **kwargs): ... def __getitem__(self, item): ... def set_menu(self, default: Optional[Any] = ..., *values): ... def destroy(self): ... mypy-0.761/mypy/typeshed/stdlib/3/tokenize.pyi0000644€tŠÔÚ€2›s®0000000460713576752252025545 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generator, Iterable, List, NamedTuple, Optional, Union, Sequence, TextIO, Tuple from builtins import open as _builtin_open import sys from token import * # noqa: F403 if sys.version_info < (3, 7): COMMENT: int NL: int ENCODING: int _Position = Tuple[int, int] class _TokenInfo(NamedTuple): type: int string: str start: _Position end: _Position line: str class TokenInfo(_TokenInfo): @property def exact_type(self) -> int: ... # Backwards compatible tokens can be sequences of a shorter length too _Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]] class TokenError(Exception): ... class StopTokenizing(Exception): ... class Untokenizer: tokens: List[str] prev_row: int prev_col: int encoding: Optional[str] def __init__(self) -> None: ... def add_whitespace(self, start: _Position) -> None: ... def untokenize(self, iterable: Iterable[_Token]) -> str: ... def compat(self, token: Sequence[Union[int, str]], iterable: Iterable[_Token]) -> None: ... def untokenize(iterable: Iterable[_Token]) -> Any: ... def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented if sys.version_info >= (3, 6): from os import PathLike def open(filename: Union[str, bytes, int, PathLike[Any]]) -> TextIO: ... else: def open(filename: Union[str, bytes, int]) -> TextIO: ... # Names in __all__ with no definition: # AMPER # AMPEREQUAL # ASYNC # AT # ATEQUAL # AWAIT # CIRCUMFLEX # CIRCUMFLEXEQUAL # COLON # COMMA # DEDENT # DOT # DOUBLESLASH # DOUBLESLASHEQUAL # DOUBLESTAR # DOUBLESTAREQUAL # ELLIPSIS # ENDMARKER # EQEQUAL # EQUAL # ERRORTOKEN # GREATER # GREATEREQUAL # INDENT # ISEOF # ISNONTERMINAL # ISTERMINAL # LBRACE # LEFTSHIFT # LEFTSHIFTEQUAL # LESS # LESSEQUAL # LPAR # LSQB # MINEQUAL # MINUS # NAME # NEWLINE # NOTEQUAL # NT_OFFSET # NUMBER # N_TOKENS # OP # PERCENT # PERCENTEQUAL # PLUS # PLUSEQUAL # RARROW # RBRACE # RIGHTSHIFT # RIGHTSHIFTEQUAL # RPAR # RSQB # SEMI # SLASH # SLASHEQUAL # STAR # STAREQUAL # STRING # TILDE # VBAR # VBAREQUAL # tok_name mypy-0.761/mypy/typeshed/stdlib/3/tracemalloc.pyi0000644€tŠÔÚ€2›s®0000000414013576752252026173 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tracemalloc (Python 3.4+) import sys from typing import List, Optional, Sequence, Tuple, Union, overload def clear_traces() -> None: ... def get_object_traceback(obj: object) -> Optional[Traceback]: ... def get_traceback_limit() -> int: ... def get_traced_memory() -> Tuple[int, int]: ... def get_tracemalloc_memory() -> int: ... def is_tracing() -> bool: ... def start(nframe: int = ...) -> None: ... def stop() -> None: ... def take_snapshot() -> Snapshot: ... if sys.version_info >= (3, 6): class DomainFilter: inclusive: bool domain: int def __init__(self, inclusive: bool, domain: int) -> None: ... class Filter: if sys.version_info >= (3, 6): domain: Optional[int] inclusive: bool lineno: Optional[int] filename_pattern: str all_frames: bool def __init__(self, inclusive: bool, filename_pattern: str, lineno: Optional[int] = ..., all_frames: bool = ..., domain: Optional[int] = ...) -> None: ... class Frame: filename: str lineno: int class Snapshot: def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = ...) -> List[StatisticDiff]: ... def dump(self, filename: str) -> None: ... if sys.version_info >= (3, 6): def filter_traces(self, filters: Sequence[Union[DomainFilter, Filter]]) -> Snapshot: ... else: def filter_traces(self, filters: Sequence[Filter]) -> Snapshot: ... @classmethod def load(cls, filename: str) -> Snapshot: ... def statistics(self, key_type: str, cumulative: bool = ...) -> List[Statistic]: ... traceback_limit: int traces: Sequence[Trace] class Statistic: count: int size: int traceback: Traceback class StatisticDiff: count: int count_diff: int size: int size_diff: int traceback: Traceback class Trace: size: int traceback: Traceback class Traceback(Sequence[Frame]): def format(self, limit: Optional[int] = ...) -> List[str]: ... @overload def __getitem__(self, i: int) -> Frame: ... @overload def __getitem__(self, s: slice) -> Sequence[Frame]: ... def __len__(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/types.pyi0000644€tŠÔÚ€2›s®0000002354313576752252025061 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for types # Note, all classes "defined" here require special handling. # TODO parts of this should be conditional on version import sys from typing import ( Any, Awaitable, Callable, Dict, Generic, Iterator, Mapping, Optional, Tuple, TypeVar, Union, overload, Type, Iterable ) # ModuleType is exported from this module, but for circular import # reasons exists in its own stub file (with ModuleSpec and Loader). from _importlib_modulespec import ModuleType as ModuleType # Exported _T = TypeVar('_T') _T_co = TypeVar('_T_co', covariant=True) _T_contra = TypeVar('_T_contra', contravariant=True) _KT = TypeVar('_KT') _VT = TypeVar('_VT') class _Cell: cell_contents: Any class FunctionType: __closure__: Optional[Tuple[_Cell, ...]] __code__: CodeType __defaults__: Optional[Tuple[Any, ...]] __dict__: Dict[str, Any] __globals__: Dict[str, Any] __name__: str __qualname__: str __annotations__: Dict[str, Any] __kwdefaults__: Dict[str, Any] def __init__(self, code: CodeType, globals: Dict[str, Any], name: Optional[str] = ..., argdefs: Optional[Tuple[object, ...]] = ..., closure: Optional[Tuple[_Cell, ...]] = ...) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __get__(self, obj: Optional[object], type: Optional[type]) -> MethodType: ... LambdaType = FunctionType class CodeType: """Create a code object. Not for the faint of heart.""" co_argcount: int if sys.version_info >= (3, 8): co_posonlyargcount: int co_kwonlyargcount: int co_nlocals: int co_stacksize: int co_flags: int co_code: bytes co_consts: Tuple[Any, ...] co_names: Tuple[str, ...] co_varnames: Tuple[str, ...] co_filename: str co_name: str co_firstlineno: int co_lnotab: bytes co_freevars: Tuple[str, ...] co_cellvars: Tuple[str, ...] if sys.version_info >= (3, 8): def __init__( self, argcount: int, posonlyargcount: int, kwonlyargcount: int, nlocals: int, stacksize: int, flags: int, codestring: bytes, constants: Tuple[Any, ...], names: Tuple[str, ...], varnames: Tuple[str, ...], filename: str, name: str, firstlineno: int, lnotab: bytes, freevars: Tuple[str, ...] = ..., cellvars: Tuple[str, ...] = ..., ) -> None: ... else: def __init__( self, argcount: int, kwonlyargcount: int, nlocals: int, stacksize: int, flags: int, codestring: bytes, constants: Tuple[Any, ...], names: Tuple[str, ...], varnames: Tuple[str, ...], filename: str, name: str, firstlineno: int, lnotab: bytes, freevars: Tuple[str, ...] = ..., cellvars: Tuple[str, ...] = ..., ) -> None: ... if sys.version_info >= (3, 8): def replace( self, *, co_argcount: int = ..., co_posonlyargcount: int = ..., co_kwonlyargcount: int = ..., co_nlocals: int = ..., co_stacksize: int = ..., co_flags: int = ..., co_firstlineno: int = ..., co_code: bytes = ..., co_consts: Tuple[Any, ...] = ..., co_names: Tuple[str, ...] = ..., co_varnames: Tuple[str, ...] = ..., co_freevars: Tuple[str, ...] = ..., co_cellvars: Tuple[str, ...] = ..., co_filename: str = ..., co_name: str = ..., co_lnotab: bytes = ..., ) -> CodeType: ... class MappingProxyType(Mapping[_KT, _VT], Generic[_KT, _VT]): def __init__(self, mapping: Mapping[_KT, _VT]) -> None: ... def __getitem__(self, k: _KT) -> _VT: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... def copy(self) -> Mapping[_KT, _VT]: ... class SimpleNamespace: def __init__(self, **kwargs: Any) -> None: ... def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... class GeneratorType: gi_code: CodeType gi_frame: FrameType gi_running: bool gi_yieldfrom: Optional[GeneratorType] def __iter__(self) -> GeneratorType: ... def __next__(self) -> Any: ... def close(self) -> None: ... def send(self, arg: Any) -> Any: ... @overload def throw(self, val: BaseException) -> Any: ... @overload def throw(self, typ: type, val: BaseException = ..., tb: TracebackType = ...) -> Any: ... if sys.version_info >= (3, 6): class AsyncGeneratorType(Generic[_T_co, _T_contra]): ag_await: Optional[Awaitable[Any]] ag_frame: FrameType ag_running: bool ag_code: CodeType def __aiter__(self) -> Awaitable[AsyncGeneratorType[_T_co, _T_contra]]: ... def __anext__(self) -> Awaitable[_T_co]: ... def asend(self, val: _T_contra) -> Awaitable[_T_co]: ... @overload def athrow(self, val: BaseException) -> Awaitable[_T_co]: ... @overload def athrow(self, typ: Type[BaseException], val: BaseException, tb: TracebackType = ...) -> Awaitable[_T_co]: ... def aclose(self) -> Awaitable[None]: ... class CoroutineType: cr_await: Optional[Any] cr_code: CodeType cr_frame: FrameType cr_running: bool def close(self) -> None: ... def send(self, arg: Any) -> Any: ... @overload def throw(self, val: BaseException) -> Any: ... @overload def throw(self, typ: type, val: BaseException = ..., tb: TracebackType = ...) -> Any: ... class _StaticFunctionType: """Fictional type to correct the type of MethodType.__func__. FunctionType is a descriptor, so mypy follows the descriptor protocol and converts MethodType.__func__ back to MethodType (the return type of FunctionType.__get__). But this is actually a special case; MethodType is implemented in C and its attribute access doesn't go through __getattribute__. By wrapping FunctionType in _StaticFunctionType, we get the right result; similar to wrapping a function in staticmethod() at runtime to prevent it being bound as a method. """ def __get__(self, obj: Optional[object], type: Optional[type]) -> FunctionType: ... class MethodType: __func__: _StaticFunctionType __self__: object __name__: str __qualname__: str def __init__(self, func: Callable[..., Any], obj: object) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... class BuiltinFunctionType: __self__: Union[object, ModuleType] __name__: str __qualname__: str def __call__(self, *args: Any, **kwargs: Any) -> Any: ... BuiltinMethodType = BuiltinFunctionType if sys.version_info >= (3, 7): class WrapperDescriptorType: __name__: str __qualname__: str __objclass__: type def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __get__(self, obj: Any, type: type = ...) -> Any: ... class MethodWrapperType: __self__: object __name__: str __qualname__: str __objclass__: type def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... class MethodDescriptorType: __name__: str __qualname__: str __objclass__: type def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __get__(self, obj: Any, type: type = ...) -> Any: ... class ClassMethodDescriptorType: __name__: str __qualname__: str __objclass__: type def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __get__(self, obj: Any, type: type = ...) -> Any: ... class TracebackType: if sys.version_info >= (3, 7): def __init__(self, tb_next: Optional[TracebackType], tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> None: ... tb_next: Optional[TracebackType] else: @property def tb_next(self) -> Optional[TracebackType]: ... # the rest are read-only even in 3.7 @property def tb_frame(self) -> FrameType: ... @property def tb_lasti(self) -> int: ... @property def tb_lineno(self) -> int: ... class FrameType: f_back: FrameType f_builtins: Dict[str, Any] f_code: CodeType f_globals: Dict[str, Any] f_lasti: int f_lineno: int f_locals: Dict[str, Any] f_trace: Callable[[], None] if sys.version_info >= (3, 7): f_trace_lines: bool f_trace_opcodes: bool def clear(self) -> None: ... class GetSetDescriptorType: __name__: str __objclass__: type def __get__(self, obj: Any, type: type = ...) -> Any: ... def __set__(self, obj: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... class MemberDescriptorType: __name__: str __objclass__: type def __get__(self, obj: Any, type: type = ...) -> Any: ... def __set__(self, obj: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... if sys.version_info >= (3, 7): def new_class(name: str, bases: Iterable[object] = ..., kwds: Dict[str, Any] = ..., exec_body: Callable[[Dict[str, Any]], None] = ...) -> type: ... def resolve_bases(bases: Iterable[object]) -> Tuple[Any, ...]: ... else: def new_class(name: str, bases: Tuple[type, ...] = ..., kwds: Dict[str, Any] = ..., exec_body: Callable[[Dict[str, Any]], None] = ...) -> type: ... def prepare_class(name: str, bases: Tuple[type, ...] = ..., kwds: Dict[str, Any] = ...) -> Tuple[type, Dict[str, Any], Dict[str, Any]]: ... # Actually a different type, but `property` is special and we want that too. DynamicClassAttribute = property def coroutine(f: Callable[..., Any]) -> CoroutineType: ... mypy-0.761/mypy/typeshed/stdlib/3/typing.pyi0000644€tŠÔÚ€2›s®0000005370713576752252025234 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for typing import sys from abc import abstractmethod, ABCMeta from types import CodeType, FrameType, TracebackType import collections # Needed by aliases like DefaultDict, see mypy issue 2986 # Definitions of special type checking related constructs. Their definitions # are not used, so their value does not matter. overload = object() Any = object() TypeVar = object() _promote = object() class _SpecialForm: def __getitem__(self, typeargs: Any) -> Any: ... Tuple: _SpecialForm = ... Generic: _SpecialForm = ... Protocol: _SpecialForm = ... Callable: _SpecialForm = ... Type: _SpecialForm = ... ClassVar: _SpecialForm = ... if sys.version_info >= (3, 8): Final: _SpecialForm = ... _F = TypeVar('_F', bound=Callable[..., Any]) def final(f: _F) -> _F: ... Literal: _SpecialForm = ... # TypedDict is a (non-subscriptable) special form. TypedDict: object class GenericMeta(type): ... # Return type that indicates a function does not return. # This type is equivalent to the None type, but the no-op Union is necessary to # distinguish the None type from the None value. NoReturn = Union[None] # These type variables are used by the container types. _T = TypeVar('_T') _S = TypeVar('_S') _KT = TypeVar('_KT') # Key type. _VT = TypeVar('_VT') # Value type. _T_co = TypeVar('_T_co', covariant=True) # Any type covariant containers. _V_co = TypeVar('_V_co', covariant=True) # Any type covariant containers. _KT_co = TypeVar('_KT_co', covariant=True) # Key type covariant containers. _VT_co = TypeVar('_VT_co', covariant=True) # Value type covariant containers. _T_contra = TypeVar('_T_contra', contravariant=True) # Ditto contravariant. _TC = TypeVar('_TC', bound=Type[object]) _C = TypeVar("_C", bound=Callable[..., Any]) no_type_check = object() def no_type_check_decorator(decorator: _C) -> _C: ... # Type aliases and type constructors class TypeAlias: # Class for defining generic aliases for library types. def __init__(self, target_type: type) -> None: ... def __getitem__(self, typeargs: Any) -> Any: ... Union = TypeAlias(object) Optional = TypeAlias(object) List = TypeAlias(object) Dict = TypeAlias(object) DefaultDict = TypeAlias(object) Set = TypeAlias(object) FrozenSet = TypeAlias(object) Counter = TypeAlias(object) Deque = TypeAlias(object) ChainMap = TypeAlias(object) if sys.version_info >= (3, 7): OrderedDict = TypeAlias(object) # Predefined type variables. AnyStr = TypeVar('AnyStr', str, bytes) # Abstract base classes. def runtime_checkable(cls: _TC) -> _TC: ... @runtime_checkable class SupportsInt(Protocol, metaclass=ABCMeta): @abstractmethod def __int__(self) -> int: ... @runtime_checkable class SupportsFloat(Protocol, metaclass=ABCMeta): @abstractmethod def __float__(self) -> float: ... @runtime_checkable class SupportsComplex(Protocol, metaclass=ABCMeta): @abstractmethod def __complex__(self) -> complex: ... @runtime_checkable class SupportsBytes(Protocol, metaclass=ABCMeta): @abstractmethod def __bytes__(self) -> bytes: ... if sys.version_info >= (3, 8): @runtime_checkable class SupportsIndex(Protocol, metaclass=ABCMeta): @abstractmethod def __index__(self) -> int: ... @runtime_checkable class SupportsAbs(Protocol[_T_co]): @abstractmethod def __abs__(self) -> _T_co: ... @runtime_checkable class SupportsRound(Protocol[_T_co]): @overload @abstractmethod def __round__(self) -> int: ... @overload @abstractmethod def __round__(self, ndigits: int) -> _T_co: ... @runtime_checkable class Reversible(Protocol[_T_co]): @abstractmethod def __reversed__(self) -> Iterator[_T_co]: ... @runtime_checkable class Sized(Protocol, metaclass=ABCMeta): @abstractmethod def __len__(self) -> int: ... @runtime_checkable class Hashable(Protocol, metaclass=ABCMeta): # TODO: This is special, in that a subclass of a hashable class may not be hashable # (for example, list vs. object). It's not obvious how to represent this. This class # is currently mostly useless for static checking. @abstractmethod def __hash__(self) -> int: ... @runtime_checkable class Iterable(Protocol[_T_co]): @abstractmethod def __iter__(self) -> Iterator[_T_co]: ... @runtime_checkable class Iterator(Iterable[_T_co], Protocol[_T_co]): @abstractmethod def __next__(self) -> _T_co: ... def __iter__(self) -> Iterator[_T_co]: ... class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]): @abstractmethod def __next__(self) -> _T_co: ... @abstractmethod def send(self, value: _T_contra) -> _T_co: ... @abstractmethod def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ..., tb: Optional[TracebackType] = ...) -> _T_co: ... @abstractmethod def close(self) -> None: ... @abstractmethod def __iter__(self) -> Generator[_T_co, _T_contra, _V_co]: ... @property def gi_code(self) -> CodeType: ... @property def gi_frame(self) -> FrameType: ... @property def gi_running(self) -> bool: ... @property def gi_yieldfrom(self) -> Optional[Generator[Any, Any, Any]]: ... @runtime_checkable class Awaitable(Protocol[_T_co]): @abstractmethod def __await__(self) -> Generator[Any, None, _T_co]: ... class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]): @property def cr_await(self) -> Optional[Any]: ... @property def cr_code(self) -> CodeType: ... @property def cr_frame(self) -> FrameType: ... @property def cr_running(self) -> bool: ... @abstractmethod def send(self, value: _T_contra) -> _T_co: ... @abstractmethod def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ..., tb: Optional[TracebackType] = ...) -> _T_co: ... @abstractmethod def close(self) -> None: ... # NOTE: This type does not exist in typing.py or PEP 484. # The parameters correspond to Generator, but the 4th is the original type. class AwaitableGenerator(Awaitable[_V_co], Generator[_T_co, _T_contra, _V_co], Generic[_T_co, _T_contra, _V_co, _S], metaclass=ABCMeta): ... @runtime_checkable class AsyncIterable(Protocol[_T_co]): @abstractmethod def __aiter__(self) -> AsyncIterator[_T_co]: ... @runtime_checkable class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): @abstractmethod def __anext__(self) -> Awaitable[_T_co]: ... def __aiter__(self) -> AsyncIterator[_T_co]: ... if sys.version_info >= (3, 6): class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]): @abstractmethod def __anext__(self) -> Awaitable[_T_co]: ... @abstractmethod def asend(self, value: _T_contra) -> Awaitable[_T_co]: ... @abstractmethod def athrow(self, typ: Type[BaseException], val: Optional[BaseException] = ..., tb: Any = ...) -> Awaitable[_T_co]: ... @abstractmethod def aclose(self) -> Awaitable[None]: ... @abstractmethod def __aiter__(self) -> AsyncGenerator[_T_co, _T_contra]: ... @property def ag_await(self) -> Any: ... @property def ag_code(self) -> CodeType: ... @property def ag_frame(self) -> FrameType: ... @property def ag_running(self) -> bool: ... @runtime_checkable class Container(Protocol[_T_co]): @abstractmethod def __contains__(self, __x: object) -> bool: ... if sys.version_info >= (3, 6): @runtime_checkable class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): # Implement Sized (but don't have it as a base class). @abstractmethod def __len__(self) -> int: ... _Collection = Collection else: @runtime_checkable class _Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): # Implement Sized (but don't have it as a base class). @abstractmethod def __len__(self) -> int: ... class Sequence(_Collection[_T_co], Reversible[_T_co], Generic[_T_co]): @overload @abstractmethod def __getitem__(self, i: int) -> _T_co: ... @overload @abstractmethod def __getitem__(self, s: slice) -> Sequence[_T_co]: ... # Mixin methods def index(self, x: Any, start: int = ..., end: int = ...) -> int: ... def count(self, x: Any) -> int: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... def __reversed__(self) -> Iterator[_T_co]: ... class MutableSequence(Sequence[_T], Generic[_T]): @abstractmethod def insert(self, index: int, object: _T) -> None: ... @overload @abstractmethod def __getitem__(self, i: int) -> _T: ... @overload @abstractmethod def __getitem__(self, s: slice) -> MutableSequence[_T]: ... @overload @abstractmethod def __setitem__(self, i: int, o: _T) -> None: ... @overload @abstractmethod def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... @overload @abstractmethod def __delitem__(self, i: int) -> None: ... @overload @abstractmethod def __delitem__(self, i: slice) -> None: ... # Mixin methods def append(self, object: _T) -> None: ... def clear(self) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def reverse(self) -> None: ... def pop(self, index: int = ...) -> _T: ... def remove(self, object: _T) -> None: ... def __iadd__(self, x: Iterable[_T]) -> MutableSequence[_T]: ... class AbstractSet(_Collection[_T_co], Generic[_T_co]): @abstractmethod def __contains__(self, x: object) -> bool: ... # Mixin methods def __le__(self, s: AbstractSet[Any]) -> bool: ... def __lt__(self, s: AbstractSet[Any]) -> bool: ... def __gt__(self, s: AbstractSet[Any]) -> bool: ... def __ge__(self, s: AbstractSet[Any]) -> bool: ... def __and__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __or__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ... def __sub__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __xor__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ... def isdisjoint(self, s: Iterable[Any]) -> bool: ... class MutableSet(AbstractSet[_T], Generic[_T]): @abstractmethod def add(self, x: _T) -> None: ... @abstractmethod def discard(self, x: _T) -> None: ... # Mixin methods def clear(self) -> None: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def __ior__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ... def __iand__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... def __ixor__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ... def __isub__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... class MappingView: def __len__(self) -> int: ... class ItemsView(MappingView, AbstractSet[Tuple[_KT_co, _VT_co]], Generic[_KT_co, _VT_co]): def __and__(self, o: Iterable[Any]) -> Set[Tuple[_KT_co, _VT_co]]: ... def __rand__(self, o: Iterable[_T]) -> Set[_T]: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... def __or__(self, o: Iterable[_T]) -> Set[Union[Tuple[_KT_co, _VT_co], _T]]: ... def __ror__(self, o: Iterable[_T]) -> Set[Union[Tuple[_KT_co, _VT_co], _T]]: ... def __sub__(self, o: Iterable[Any]) -> Set[Tuple[_KT_co, _VT_co]]: ... def __rsub__(self, o: Iterable[_T]) -> Set[_T]: ... def __xor__(self, o: Iterable[_T]) -> Set[Union[Tuple[_KT_co, _VT_co], _T]]: ... def __rxor__(self, o: Iterable[_T]) -> Set[Union[Tuple[_KT_co, _VT_co], _T]]: ... class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]): def __and__(self, o: Iterable[Any]) -> Set[_KT_co]: ... def __rand__(self, o: Iterable[_T]) -> Set[_T]: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT_co]: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[_KT_co]: ... def __or__(self, o: Iterable[_T]) -> Set[Union[_KT_co, _T]]: ... def __ror__(self, o: Iterable[_T]) -> Set[Union[_KT_co, _T]]: ... def __sub__(self, o: Iterable[Any]) -> Set[_KT_co]: ... def __rsub__(self, o: Iterable[_T]) -> Set[_T]: ... def __xor__(self, o: Iterable[_T]) -> Set[Union[_KT_co, _T]]: ... def __rxor__(self, o: Iterable[_T]) -> Set[Union[_KT_co, _T]]: ... class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_VT_co]: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[_VT_co]: ... @runtime_checkable class ContextManager(Protocol[_T_co]): def __enter__(self) -> _T_co: ... def __exit__(self, __exc_type: Optional[Type[BaseException]], __exc_value: Optional[BaseException], __traceback: Optional[TracebackType]) -> Optional[bool]: ... @runtime_checkable class AsyncContextManager(Protocol[_T_co]): def __aenter__(self) -> Awaitable[_T_co]: ... def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Awaitable[Optional[bool]]: ... class Mapping(_Collection[_KT], Generic[_KT, _VT_co]): # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https: //github.com/python/typing/pull/273. @abstractmethod def __getitem__(self, k: _KT) -> _VT_co: ... # Mixin methods @overload def get(self, k: _KT) -> Optional[_VT_co]: ... @overload def get(self, k: _KT, default: Union[_VT_co, _T]) -> Union[_VT_co, _T]: ... def items(self) -> AbstractSet[Tuple[_KT, _VT_co]]: ... def keys(self) -> AbstractSet[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... def __contains__(self, o: object) -> bool: ... class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): @abstractmethod def __setitem__(self, k: _KT, v: _VT) -> None: ... @abstractmethod def __delitem__(self, v: _KT) -> None: ... def clear(self) -> None: ... @overload def pop(self, k: _KT) -> _VT: ... @overload def pop(self, k: _KT, default: Union[_VT, _T] = ...) -> Union[_VT, _T]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... # 'update' used to take a Union, but using overloading is better. # The second overloaded type here is a bit too general, because # Mapping[Tuple[_KT, _VT], W] is a subclass of Iterable[Tuple[_KT, _VT]], # but will always have the behavior of the first overloaded type # at runtime, leading to keys of a mix of types _KT and Tuple[_KT, _VT]. # We don't currently have any way of forcing all Mappings to use # the first overload, but by using overloading rather than a Union, # mypy will commit to using the first overload when the argument is # known to be a Mapping with unknown type parameters, which is closer # to the behavior we want. See mypy issue #1430. @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... Text = str TYPE_CHECKING = True class IO(Iterator[AnyStr], Generic[AnyStr]): # TODO detach # TODO use abstract properties @property def mode(self) -> str: ... @property def name(self) -> str: ... @abstractmethod def close(self) -> None: ... @property def closed(self) -> bool: ... @abstractmethod def fileno(self) -> int: ... @abstractmethod def flush(self) -> None: ... @abstractmethod def isatty(self) -> bool: ... # TODO what if n is None? @abstractmethod def read(self, n: int = ...) -> AnyStr: ... @abstractmethod def readable(self) -> bool: ... @abstractmethod def readline(self, limit: int = ...) -> AnyStr: ... @abstractmethod def readlines(self, hint: int = ...) -> list[AnyStr]: ... @abstractmethod def seek(self, offset: int, whence: int = ...) -> int: ... @abstractmethod def seekable(self) -> bool: ... @abstractmethod def tell(self) -> int: ... @abstractmethod def truncate(self, size: Optional[int] = ...) -> int: ... @abstractmethod def writable(self) -> bool: ... # TODO buffer objects @abstractmethod def write(self, s: AnyStr) -> int: ... @abstractmethod def writelines(self, lines: Iterable[AnyStr]) -> None: ... @abstractmethod def __next__(self) -> AnyStr: ... @abstractmethod def __iter__(self) -> Iterator[AnyStr]: ... @abstractmethod def __enter__(self) -> IO[AnyStr]: ... @abstractmethod def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool]: ... class BinaryIO(IO[bytes]): # TODO readinto # TODO read1? # TODO peek? @overload @abstractmethod def write(self, s: bytearray) -> int: ... @overload @abstractmethod def write(self, s: bytes) -> int: ... @abstractmethod def __enter__(self) -> BinaryIO: ... class TextIO(IO[str]): # TODO use abstractproperty @property def buffer(self) -> BinaryIO: ... @property def encoding(self) -> str: ... @property def errors(self) -> Optional[str]: ... @property def line_buffering(self) -> int: ... # int on PyPy, bool on CPython @property def newlines(self) -> Any: ... # None, str or tuple @abstractmethod def __enter__(self) -> TextIO: ... class ByteString(Sequence[int], metaclass=ABCMeta): ... class Match(Generic[AnyStr]): pos: int endpos: int lastindex: Optional[int] lastgroup: Optional[AnyStr] string: AnyStr # The regular expression object whose match() or search() method produced # this match instance. re: Pattern[AnyStr] def expand(self, template: AnyStr) -> AnyStr: ... # TODO: The return for a group may be None, except if __group is 0 or not given. @overload def group(self, __group: Union[str, int] = ...) -> AnyStr: ... @overload def group( self, __group1: Union[str, int], __group2: Union[str, int], *groups: Union[str, int], ) -> Tuple[AnyStr, ...]: ... def groups(self, default: AnyStr = ...) -> Sequence[AnyStr]: ... def groupdict(self, default: AnyStr = ...) -> dict[str, AnyStr]: ... def start(self, group: Union[int, str] = ...) -> int: ... def end(self, group: Union[int, str] = ...) -> int: ... def span(self, group: Union[int, str] = ...) -> Tuple[int, int]: ... @property def regs(self) -> Tuple[Tuple[int, int], ...]: ... # undocumented if sys.version_info >= (3, 6): def __getitem__(self, g: Union[int, str]) -> AnyStr: ... class Pattern(Generic[AnyStr]): flags: int groupindex: Mapping[str, int] groups: int pattern: AnyStr def search(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Optional[Match[AnyStr]]: ... def match(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Optional[Match[AnyStr]]: ... # New in Python 3.4 def fullmatch(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Optional[Match[AnyStr]]: ... def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr]: ... def findall(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> list[Any]: ... def finditer(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Iterator[Match[AnyStr]]: ... @overload def sub(self, repl: AnyStr, string: AnyStr, count: int = ...) -> AnyStr: ... @overload def sub(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> AnyStr: ... @overload def subn(self, repl: AnyStr, string: AnyStr, count: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> Tuple[AnyStr, int]: ... # Functions def get_type_hints( obj: Callable[..., Any], globalns: Optional[Dict[str, Any]] = ..., localns: Optional[Dict[str, Any]] = ..., ) -> Dict[str, Any]: ... if sys.version_info >= (3, 8): def get_origin(tp: Any) -> Optional[Any]: ... def get_args(tp: Any) -> Tuple[Any, ...]: ... @overload def cast(tp: Type[_T], obj: Any) -> _T: ... @overload def cast(tp: str, obj: Any) -> Any: ... # Type constructors # NamedTuple is special-cased in the type checker class NamedTuple(Tuple[Any, ...]): _field_types: collections.OrderedDict[str, Type[Any]] _field_defaults: Dict[str, Any] = ... _fields: Tuple[str, ...] _source: str def __init__(self, typename: str, fields: Iterable[Tuple[str, Any]] = ..., **kwargs: Any) -> None: ... @classmethod def _make(cls: Type[_T], iterable: Iterable[Any]) -> _T: ... if sys.version_info >= (3, 8): def _asdict(self) -> Dict[str, Any]: ... else: def _asdict(self) -> collections.OrderedDict[str, Any]: ... def _replace(self: _T, **kwargs: Any) -> _T: ... # Internal mypy fallback type for all typed dicts (does not exist at runtime) class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def copy(self: _T) -> _T: ... # Using NoReturn so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... def update(self: _T, __m: _T) -> None: ... def __delitem__(self, k: NoReturn) -> None: ... def NewType(name: str, tp: Type[_T]) -> Type[_T]: ... # This itself is only available during type checking def type_check_only(func_or_cls: _C) -> _C: ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/0000755€tŠÔÚ€2›s®0000000000013576752267025050 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/unittest/__init__.pyi0000644€tŠÔÚ€2›s®0000000201213576752252027317 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for unittest from typing import Iterable, List, Optional, Type, Union from types import ModuleType from unittest.async_case import * from unittest.case import * from unittest.loader import * from unittest.result import * from unittest.runner import * from unittest.signals import * from unittest.suite import * # not really documented class TestProgram: result: TestResult def runTests(self) -> None: ... # undocumented def main(module: Union[None, str, ModuleType] = ..., defaultTest: Union[str, Iterable[str], None] = ..., argv: Optional[List[str]] = ..., testRunner: Union[Type[TestRunner], TestRunner, None] = ..., testLoader: TestLoader = ..., exit: bool = ..., verbosity: int = ..., failfast: Optional[bool] = ..., catchbreak: Optional[bool] = ..., buffer: Optional[bool] = ..., warnings: Optional[str] = ...) -> TestProgram: ... def load_tests(loader: TestLoader, tests: TestSuite, pattern: Optional[str]) -> TestSuite: ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/async_case.pyi0000644€tŠÔÚ€2›s®0000000056313576752252027701 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Awaitable, Callable from .case import TestCase if sys.version_info >= (3, 8): class IsolatedAsyncioTestCase(TestCase): async def asyncSetUp(self) -> None: ... async def asyncTearDown(self) -> None: ... def addAsyncCleanup(self, __func: Callable[..., Awaitable[Any]], *args: Any, **kwargs: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/case.pyi0000644€tŠÔÚ€2›s®0000003131213576752252026500 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime import logging import sys import unittest.result from types import TracebackType from typing import ( Any, AnyStr, Callable, Container, ContextManager, Dict, FrozenSet, Generic, Iterable, List, Mapping, NoReturn, Optional, overload, Pattern, Sequence, Set, Tuple, Type, TypeVar, Union, ) _E = TypeVar('_E', bound=BaseException) _FT = TypeVar('_FT', bound=Callable[..., Any]) if sys.version_info >= (3, 8): def addModuleCleanup(__function: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... def doModuleCleanups() -> None: ... def expectedFailure(func: _FT) -> _FT: ... def skip(reason: str) -> Callable[[_FT], _FT]: ... def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ... def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ... class SkipTest(Exception): def __init__(self, reason: str) -> None: ... class TestCase: failureException: Type[BaseException] longMessage: bool maxDiff: Optional[int] # undocumented _testMethodName: str # undocumented _testMethodDoc: str def __init__(self, methodName: str = ...) -> None: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... @classmethod def setUpClass(cls) -> None: ... @classmethod def tearDownClass(cls) -> None: ... def run(self, result: Optional[unittest.result.TestResult] = ...) -> Optional[unittest.result.TestResult]: ... def __call__(self, result: Optional[unittest.result.TestResult] = ...) -> Optional[unittest.result.TestResult]: ... def skipTest(self, reason: Any) -> None: ... def subTest(self, msg: Any = ..., **params: Any) -> ContextManager[None]: ... def debug(self) -> None: ... def _addSkip( self, result: unittest.result.TestResult, test_case: unittest.case.TestCase, reason: str ) -> None: ... def assertEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertNotEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertTrue(self, expr: Any, msg: Any = ...) -> None: ... def assertFalse(self, expr: Any, msg: Any = ...) -> None: ... def assertIs(self, expr1: Any, expr2: Any, msg: Any = ...) -> None: ... def assertIsNot(self, expr1: Any, expr2: Any, msg: Any = ...) -> None: ... def assertIsNone(self, obj: Any, msg: Any = ...) -> None: ... def assertIsNotNone(self, obj: Any, msg: Any = ...) -> None: ... def assertIn(self, member: Any, container: Union[Iterable[Any], Container[Any]], msg: Any = ...) -> None: ... def assertNotIn(self, member: Any, container: Union[Iterable[Any], Container[Any]], msg: Any = ...) -> None: ... def assertIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]], msg: Any = ...) -> None: ... def assertNotIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]], msg: Any = ...) -> None: ... def assertGreater(self, a: Any, b: Any, msg: Any = ...) -> None: ... def assertGreaterEqual(self, a: Any, b: Any, msg: Any = ...) -> None: ... def assertLess(self, a: Any, b: Any, msg: Any = ...) -> None: ... def assertLessEqual(self, a: Any, b: Any, msg: Any = ...) -> None: ... @overload def assertRaises(self, # type: ignore expected_exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertRaises(self, expected_exception: Union[Type[_E], Tuple[Type[_E], ...]], msg: Any = ...) -> _AssertRaisesContext[_E]: ... @overload def assertRaisesRegex(self, # type: ignore expected_exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], expected_regex: Union[str, bytes, Pattern[str], Pattern[bytes]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertRaisesRegex(self, expected_exception: Union[Type[_E], Tuple[Type[_E], ...]], expected_regex: Union[str, bytes, Pattern[str], Pattern[bytes]], msg: Any = ...) -> _AssertRaisesContext[_E]: ... @overload def assertWarns(self, # type: ignore expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertWarns(self, expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]], msg: Any = ...) -> _AssertWarnsContext: ... @overload def assertWarnsRegex(self, # type: ignore expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]], expected_regex: Union[str, bytes, Pattern[str], Pattern[bytes]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertWarnsRegex(self, expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]], expected_regex: Union[str, bytes, Pattern[str], Pattern[bytes]], msg: Any = ...) -> _AssertWarnsContext: ... def assertLogs( self, logger: Optional[Union[str, logging.Logger]] = ..., level: Union[int, str, None] = ... ) -> _AssertLogsContext: ... @overload def assertAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ...) -> None: ... @overload def assertAlmostEqual(self, first: datetime.datetime, second: datetime.datetime, places: int = ..., msg: Any = ..., delta: datetime.timedelta = ...) -> None: ... @overload def assertNotAlmostEqual(self, first: float, second: float, *, msg: Any = ...) -> None: ... @overload def assertNotAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... @overload def assertNotAlmostEqual(self, first: float, second: float, *, msg: Any = ..., delta: float = ...) -> None: ... @overload def assertNotAlmostEqual(self, first: datetime.datetime, second: datetime.datetime, places: int = ..., msg: Any = ..., delta: datetime.timedelta = ...) -> None: ... def assertRegex(self, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: Any = ...) -> None: ... def assertNotRegex(self, text: AnyStr, unexpected_regex: Union[AnyStr, Pattern[AnyStr]], msg: Any = ...) -> None: ... def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = ...) -> None: ... def addTypeEqualityFunc(self, typeobj: Type[Any], function: Callable[..., None]) -> None: ... def assertMultiLineEqual(self, first: str, second: str, msg: Any = ...) -> None: ... def assertSequenceEqual(self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = ..., seq_type: Type[Sequence[Any]] = ...) -> None: ... def assertListEqual(self, list1: List[Any], list2: List[Any], msg: Any = ...) -> None: ... def assertTupleEqual(self, tuple1: Tuple[Any, ...], tuple2: Tuple[Any, ...], msg: Any = ...) -> None: ... def assertSetEqual(self, set1: Union[Set[Any], FrozenSet[Any]], set2: Union[Set[Any], FrozenSet[Any]], msg: Any = ...) -> None: ... def assertDictEqual(self, d1: Dict[Any, Any], d2: Dict[Any, Any], msg: Any = ...) -> None: ... def fail(self, msg: Any = ...) -> NoReturn: ... def countTestCases(self) -> int: ... def defaultTestResult(self) -> unittest.result.TestResult: ... def id(self) -> str: ... def shortDescription(self) -> Optional[str]: ... def addCleanup(self, function: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... def doCleanups(self) -> None: ... if sys.version_info >= (3, 8): @classmethod def addClassCleanup(cls, __function: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @classmethod def doClassCleanups(cls) -> None: ... def _formatMessage(self, msg: Optional[str], standardMsg: str) -> str: ... # undocumented def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: ... # undocumented # below is deprecated def failUnlessEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ... def failIfEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertNotEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ... def failUnless(self, expr: bool, msg: Any = ...) -> None: ... def assert_(self, expr: bool, msg: Any = ...) -> None: ... def failIf(self, expr: bool, msg: Any = ...) -> None: ... @overload def failUnlessRaises(self, # type: ignore exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], callable: Callable[..., Any] = ..., *args: Any, **kwargs: Any) -> None: ... @overload def failUnlessRaises(self, exception: Union[Type[_E], Tuple[Type[_E], ...]], msg: Any = ...) -> _AssertRaisesContext[_E]: ... def failUnlessAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... def assertAlmostEquals(self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ...) -> None: ... def failIfAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... def assertNotAlmostEquals(self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ...) -> None: ... def assertRegexpMatches(self, text: AnyStr, regex: Union[AnyStr, Pattern[AnyStr]], msg: Any = ...) -> None: ... @overload def assertRaisesRegexp(self, # type: ignore exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], expected_regex: Union[str, bytes, Pattern[str], Pattern[bytes]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertRaisesRegexp(self, exception: Union[Type[_E], Tuple[Type[_E], ...]], expected_regex: Union[str, bytes, Pattern[str], Pattern[bytes]], msg: Any = ...) -> _AssertRaisesContext[_E]: ... def assertDictContainsSubset(self, expected: Mapping[Any, Any], actual: Mapping[Any, Any], msg: object = ...) -> None: ... class FunctionTestCase(TestCase): def __init__(self, testFunc: Callable[[], None], setUp: Optional[Callable[[], None]] = ..., tearDown: Optional[Callable[[], None]] = ..., description: Optional[str] = ...) -> None: ... class _AssertRaisesContext(Generic[_E]): exception: _E def __enter__(self) -> _AssertRaisesContext[_E]: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> bool: ... class _AssertWarnsContext: warning: Warning filename: str lineno: int def __enter__(self) -> _AssertWarnsContext: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> None: ... class _AssertLogsContext: records: List[logging.LogRecord] output: List[str] def __enter__(self) -> _AssertLogsContext: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> Optional[bool]: ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/loader.pyi0000644€tŠÔÚ€2›s®0000000230613576752252027034 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest.case import unittest.suite import unittest.result from types import ModuleType from typing import Any, Callable, List, Optional, Sequence, Type class TestLoader: errors: List[Type[BaseException]] testMethodPrefix: str sortTestMethodsUsing: Callable[[str, str], bool] suiteClass: Callable[[List[unittest.case.TestCase]], unittest.suite.TestSuite] def loadTestsFromTestCase(self, testCaseClass: Type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... def loadTestsFromModule(self, module: ModuleType, *, pattern: Any = ...) -> unittest.suite.TestSuite: ... def loadTestsFromName(self, name: str, module: Optional[ModuleType] = ...) -> unittest.suite.TestSuite: ... def loadTestsFromNames(self, names: Sequence[str], module: Optional[ModuleType] = ...) -> unittest.suite.TestSuite: ... def getTestCaseNames(self, testCaseClass: Type[unittest.case.TestCase]) -> Sequence[str]: ... def discover(self, start_dir: str, pattern: str = ..., top_level_dir: Optional[str] = ...) -> unittest.suite.TestSuite: ... defaultTestLoader: TestLoader mypy-0.761/mypy/typeshed/stdlib/3/unittest/mock.pyi0000644€tŠÔÚ€2›s®0000001227013576752252026520 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for mock import sys from typing import Any, List, Optional, Text, Tuple, Type, TypeVar _T = TypeVar("_T") FILTER_DIR: Any class _slotted: ... class _SentinelObject: name: Any def __init__(self, name: Any) -> None: ... class _Sentinel: def __init__(self) -> None: ... def __getattr__(self, name: str) -> Any: ... sentinel: Any DEFAULT: Any class _CallList(List[_T]): def __contains__(self, value: Any) -> bool: ... class _MockIter: obj: Any def __init__(self, obj: Any) -> None: ... def __iter__(self) -> Any: ... def __next__(self) -> Any: ... class Base: def __init__(self, *args: Any, **kwargs: Any) -> None: ... # TODO: Defining this and other mock classes as classes in this stub causes # many false positives with mypy and production code. See if we can # improve mypy somehow and use a class with an "Any" base class. NonCallableMock = Any class CallableMixin(Base): side_effect: Any def __init__(self, spec: Optional[Any] = ..., side_effect: Optional[Any] = ..., return_value: Any = ..., wraps: Optional[Any] = ..., name: Optional[Any] = ..., spec_set: Optional[Any] = ..., parent: Optional[Any] = ..., _spec_state: Optional[Any] = ..., _new_name: Any = ..., _new_parent: Optional[Any] = ..., **kwargs: Any) -> None: ... def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ... Mock = Any class _patch: attribute_name: Any getter: Any attribute: Any new: Any new_callable: Any spec: Any create: bool has_local: Any spec_set: Any autospec: Any kwargs: Any additional_patchers: Any def __init__(self, getter: Any, attribute: Any, new: Any, spec: Any, create: Any, spec_set: Any, autospec: Any, new_callable: Any, kwargs: Any) -> None: ... def copy(self) -> Any: ... def __call__(self, func: Any) -> Any: ... def decorate_class(self, klass: Any) -> Any: ... def decorate_callable(self, func: Any) -> Any: ... def get_original(self) -> Any: ... target: Any temp_original: Any is_local: Any def __enter__(self) -> Any: ... def __exit__(self, *exc_info: Any) -> Any: ... def start(self) -> Any: ... def stop(self) -> Any: ... class _patch_dict: in_dict: Any values: Any clear: Any def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ... def __call__(self, f: Any) -> Any: ... def decorate_class(self, klass: Any) -> Any: ... def __enter__(self) -> Any: ... def __exit__(self, *args: Any) -> Any: ... start: Any stop: Any class _patcher: TEST_PREFIX: str dict: Type[_patch_dict] def __call__(self, target: Any, new: Optional[Any] = ..., spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> _patch: ... def object(self, target: Any, attribute: Text, new: Optional[Any] = ..., spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> _patch: ... def multiple(self, target: Any, spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> _patch: ... def stopall(self) -> None: ... patch: _patcher class MagicMixin: def __init__(self, *args: Any, **kw: Any) -> None: ... NonCallableMagicMock = Any MagicMock = Any if sys.version_info >= (3, 8): AsyncMock = Any class MagicProxy: name: Any parent: Any def __init__(self, name: Any, parent: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def create_mock(self) -> Any: ... def __get__(self, obj: Any, _type: Optional[Any] = ...) -> Any: ... class _ANY: def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... ANY: Any class _Call(Tuple[Any, ...]): def __new__(cls, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...) -> Any: ... name: Any parent: Any from_kall: Any def __init__(self, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...) -> None: ... def __eq__(self, other: Any) -> bool: ... __ne__: Any def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __getattr__(self, attr: Any) -> Any: ... def count(self, *args: Any, **kwargs: Any) -> Any: ... def index(self, *args: Any, **kwargs: Any) -> Any: ... def call_list(self) -> Any: ... call: Any def create_autospec(spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Optional[Any] = ..., _name: Optional[Any] = ..., **kwargs: Any) -> Any: ... class _SpecState: spec: Any ids: Any spec_set: Any parent: Any instance: Any name: Any def __init__(self, spec: Any, spec_set: Any = ..., parent: Optional[Any] = ..., name: Optional[Any] = ..., ids: Optional[Any] = ..., instance: Any = ...) -> None: ... def mock_open(mock: Optional[Any] = ..., read_data: Any = ...) -> Any: ... PropertyMock = Any if sys.version_info >= (3, 7): def seal(mock: Any) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/result.pyi0000644€tŠÔÚ€2›s®0000000312113576752252027100 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Optional, Tuple, Type from types import TracebackType import unittest.case _SysExcInfoType = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] class TestResult: errors: List[Tuple[unittest.case.TestCase, str]] failures: List[Tuple[unittest.case.TestCase, str]] skipped: List[Tuple[unittest.case.TestCase, str]] expectedFailures: List[Tuple[unittest.case.TestCase, str]] unexpectedSuccesses: List[unittest.case.TestCase] shouldStop: bool testsRun: int buffer: bool failfast: bool tb_locals: bool def wasSuccessful(self) -> bool: ... def stop(self) -> None: ... def startTest(self, test: unittest.case.TestCase) -> None: ... def stopTest(self, test: unittest.case.TestCase) -> None: ... def startTestRun(self) -> None: ... def stopTestRun(self) -> None: ... def addError(self, test: unittest.case.TestCase, err: _SysExcInfoType) -> None: ... def addFailure(self, test: unittest.case.TestCase, err: _SysExcInfoType) -> None: ... def addSuccess(self, test: unittest.case.TestCase) -> None: ... def addSkip(self, test: unittest.case.TestCase, reason: str) -> None: ... def addExpectedFailure(self, test: unittest.case.TestCase, err: _SysExcInfoType) -> None: ... def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: ... def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, outcome: Optional[_SysExcInfoType]) -> None: ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/runner.pyi0000644€tŠÔÚ€2›s®0000000227213576752252027101 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Callable, Optional, TextIO, Tuple, Type, Union import unittest.case import unittest.result import unittest.suite _ResultClassType = Callable[[TextIO, bool, int], unittest.result.TestResult] class TextTestResult(unittest.result.TestResult): separator1: str separator2: str def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ... def getDescription(self, test: unittest.case.TestCase) -> str: ... def printErrors(self) -> None: ... def printErrorList(self, flavour: str, errors: Tuple[unittest.case.TestCase, str]) -> None: ... class TestRunner: def run(self, test: Union[unittest.suite.TestSuite, unittest.case.TestCase]) -> unittest.result.TestResult: ... class TextTestRunner(TestRunner): def __init__( self, stream: Optional[TextIO] = ..., descriptions: bool = ..., verbosity: int = ..., failfast: bool = ..., buffer: bool = ..., resultclass: Optional[_ResultClassType] = ..., warnings: Optional[Type[Warning]] = ..., *, tb_locals: bool = ..., ) -> None: ... def _makeResult(self) -> unittest.result.TestResult: ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/signals.pyi0000644€tŠÔÚ€2›s®0000000060413576752252027225 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, overload, TypeVar import unittest.result _F = TypeVar('_F', bound=Callable[..., Any]) def installHandler() -> None: ... def registerResult(result: unittest.result.TestResult) -> None: ... def removeResult(result: unittest.result.TestResult) -> bool: ... @overload def removeHandler() -> None: ... @overload def removeHandler(function: _F) -> _F: ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/suite.pyi0000644€tŠÔÚ€2›s®0000000142713576752252026722 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, Iterator, List, Union import unittest.case import unittest.result _TestType = Union[unittest.case.TestCase, TestSuite] class BaseTestSuite(Iterable[_TestType]): _tests: List[unittest.case.TestCase] _removed_tests: int def __init__(self, tests: Iterable[_TestType] = ...) -> None: ... def __call__(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... def addTest(self, test: _TestType) -> None: ... def addTests(self, tests: Iterable[_TestType]) -> None: ... def run(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... def debug(self) -> None: ... def countTestCases(self) -> int: ... def __iter__(self) -> Iterator[_TestType]: ... class TestSuite(BaseTestSuite): ... mypy-0.761/mypy/typeshed/stdlib/3/unittest/util.pyi0000644€tŠÔÚ€2›s®0000000163113576752252026543 0ustar jukkaDROPBOX\Domain Users00000000000000# undocumented from typing import Any, List, Sequence, Tuple, TypeVar _T = TypeVar('_T') _Mismatch = Tuple[_T, _T, int] _MAX_LENGTH: int _PLACEHOLDER_LEN: int _MIN_BEGIN_LEN: int _MIN_END_LEN: int _MIN_COMMON_LEN: int _MIN_DIFF_LEN: int def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... def _common_shorten_repr(*args: str) -> Tuple[str]: ... def safe_repr(obj: object, short: bool = ...) -> str: ... def strclass(cls: type) -> str: ... def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> Tuple[List[_T], List[_T]]: ... def unorderable_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> Tuple[List[_T], List[_T]]: ... def three_way_cmp(x: Any, y: Any) -> int: ... def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> List[_Mismatch[_T]]: ... def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> List[_Mismatch[_T]]: ... mypy-0.761/mypy/typeshed/stdlib/3/urllib/0000755€tŠÔÚ€2›s®0000000000013576752267024462 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/urllib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252026724 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3/urllib/error.pyi0000644€tŠÔÚ€2›s®0000000047513576752252026336 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Union from urllib.response import addinfourl # Stubs for urllib.error class URLError(IOError): reason: Union[str, BaseException] class HTTPError(URLError, addinfourl): code: int def __init__(self, url, code, msg, hdrs, fp) -> None: ... class ContentTooShortError(URLError): ... mypy-0.761/mypy/typeshed/stdlib/3/urllib/parse.pyi0000644€tŠÔÚ€2›s®0000001105313576752252026311 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for urllib.parse from typing import Any, List, Dict, Tuple, AnyStr, Generic, overload, Sequence, Mapping, Union, NamedTuple, Callable, Optional _Str = Union[bytes, str] uses_relative: List[str] uses_netloc: List[str] uses_params: List[str] non_hierarchical: List[str] uses_query: List[str] uses_fragment: List[str] scheme_chars: str MAX_CACHE_SIZE: int class _ResultMixinBase(Generic[AnyStr]): def geturl(self) -> AnyStr: ... class _ResultMixinStr(_ResultMixinBase[str]): def encode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinBytes: ... class _ResultMixinBytes(_ResultMixinBase[str]): def decode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinStr: ... class _NetlocResultMixinBase(Generic[AnyStr]): username: Optional[AnyStr] password: Optional[AnyStr] hostname: Optional[AnyStr] port: Optional[int] class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): ... class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): ... class _DefragResultBase(Tuple[Any, ...], Generic[AnyStr]): url: AnyStr fragment: AnyStr class _SplitResultBase(NamedTuple): scheme: str netloc: str path: str query: str fragment: str class _SplitResultBytesBase(NamedTuple): scheme: bytes netloc: bytes path: bytes query: bytes fragment: bytes class _ParseResultBase(NamedTuple): scheme: str netloc: str path: str params: str query: str fragment: str class _ParseResultBytesBase(NamedTuple): scheme: bytes netloc: bytes path: bytes params: bytes query: bytes fragment: bytes # Structured result objects for string data class DefragResult(_DefragResultBase[str], _ResultMixinStr): ... class SplitResult(_SplitResultBase, _NetlocResultMixinStr): ... class ParseResult(_ParseResultBase, _NetlocResultMixinStr): ... # Structured result objects for bytes data class DefragResultBytes(_DefragResultBase[bytes], _ResultMixinBytes): ... class SplitResultBytes(_SplitResultBytesBase, _NetlocResultMixinBytes): ... class ParseResultBytes(_ParseResultBytesBase, _NetlocResultMixinBytes): ... def parse_qs(qs: Optional[AnyStr], keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> Dict[AnyStr, List[AnyStr]]: ... def parse_qsl(qs: Optional[AnyStr], keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> List[Tuple[AnyStr, AnyStr]]: ... @overload def quote(string: str, safe: _Str = ..., encoding: str = ..., errors: str = ...) -> str: ... @overload def quote(string: bytes, safe: _Str = ...) -> str: ... def quote_from_bytes(bs: bytes, safe: _Str = ...) -> str: ... @overload def quote_plus(string: str, safe: _Str = ..., encoding: str = ..., errors: str = ...) -> str: ... @overload def quote_plus(string: bytes, safe: _Str = ...) -> str: ... def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ... def unquote_to_bytes(string: _Str) -> bytes: ... def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ... @overload def urldefrag(url: str) -> DefragResult: ... @overload def urldefrag(url: Optional[bytes]) -> DefragResultBytes: ... def urlencode( query: Union[Mapping[Any, Any], Mapping[Any, Sequence[Any]], Sequence[Tuple[Any, Any]], Sequence[Tuple[Any, Sequence[Any]]]], doseq: bool = ..., safe: AnyStr = ..., encoding: str = ..., errors: str = ..., quote_via: Callable[[str, AnyStr, str, str], str] = ..., ) -> str: ... def urljoin(base: AnyStr, url: Optional[AnyStr], allow_fragments: bool = ...) -> AnyStr: ... @overload def urlparse(url: str, scheme: Optional[str] = ..., allow_fragments: bool = ...) -> ParseResult: ... @overload def urlparse(url: Optional[bytes], scheme: Optional[bytes] = ..., allow_fragments: bool = ...) -> ParseResultBytes: ... @overload def urlsplit(url: str, scheme: Optional[str] = ..., allow_fragments: bool = ...) -> SplitResult: ... @overload def urlsplit(url: Optional[bytes], scheme: Optional[bytes] = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... @overload def urlunparse( components: Tuple[Optional[AnyStr], Optional[AnyStr], Optional[AnyStr], Optional[AnyStr], Optional[AnyStr], Optional[AnyStr]] ) -> AnyStr: ... @overload def urlunparse(components: Sequence[Optional[AnyStr]]) -> AnyStr: ... @overload def urlunsplit(components: Tuple[Optional[AnyStr], Optional[AnyStr], Optional[AnyStr], Optional[AnyStr], Optional[AnyStr]]) -> AnyStr: ... @overload def urlunsplit(components: Sequence[Optional[AnyStr]]) -> AnyStr: ... mypy-0.761/mypy/typeshed/stdlib/3/urllib/request.pyi0000644€tŠÔÚ€2›s®0000002342413576752252026674 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for urllib.request (Python 3.4) from typing import ( Any, Callable, ClassVar, Dict, List, IO, Mapping, Optional, Sequence, Tuple, TypeVar, Union, overload, NoReturn, ) from http.client import HTTPResponse, HTTPMessage, _HTTPConnectionProtocol from http.cookiejar import CookieJar from email.message import Message from urllib.response import addinfourl import ssl import sys import os _T = TypeVar('_T') _UrlopenRet = Any class _HTTPResponse(HTTPResponse): url: str msg: str # type: ignore def urlopen( url: Union[str, Request], data: Optional[bytes] = ..., timeout: Optional[float] = ..., *, cafile: Optional[str] = ..., capath: Optional[str] = ..., cadefault: bool = ..., context: Optional[ssl.SSLContext] = ... ) -> _UrlopenRet: ... def install_opener(opener: OpenerDirector) -> None: ... def build_opener( *handlers: Union[BaseHandler, Callable[[], BaseHandler]] ) -> OpenerDirector: ... def url2pathname(path: str) -> str: ... def pathname2url(path: str) -> str: ... def getproxies() -> Dict[str, str]: ... def parse_http_list(s: str) -> List[str]: ... def parse_keqv_list(l: List[str]) -> Dict[str, str]: ... def proxy_bypass(host: str) -> Any: ... # Undocumented class Request: @property def full_url(self) -> str: ... @full_url.setter def full_url(self, value: str) -> None: ... @full_url.deleter def full_url(self) -> None: ... type: str host: str origin_req_host: str selector: str data: Optional[bytes] headers: Dict[str, str] unverifiable: bool method: Optional[str] def __init__(self, url: str, data: Optional[bytes] = ..., headers: Dict[str, str] = ..., origin_req_host: Optional[str] = ..., unverifiable: bool = ..., method: Optional[str] = ...) -> None: ... def get_method(self) -> str: ... def add_header(self, key: str, val: str) -> None: ... def add_unredirected_header(self, key: str, val: str) -> None: ... def has_header(self, header_name: str) -> bool: ... def remove_header(self, header_name: str) -> None: ... def get_full_url(self) -> str: ... def set_proxy(self, host: str, type: str) -> None: ... @overload def get_header(self, header_name: str) -> Optional[str]: ... @overload def get_header(self, header_name: str, default: _T) -> Union[str, _T]: ... def header_items(self) -> List[Tuple[str, str]]: ... class OpenerDirector: addheaders: List[Tuple[str, str]] def add_handler(self, handler: BaseHandler) -> None: ... def open(self, url: Union[str, Request], data: Optional[bytes] = ..., timeout: Optional[float] = ...) -> _UrlopenRet: ... def error(self, proto: str, *args: Any) -> _UrlopenRet: ... class BaseHandler: handler_order: ClassVar[int] parent: OpenerDirector def add_parent(self, parent: OpenerDirector) -> None: ... def close(self) -> None: ... def http_error_nnn(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> _UrlopenRet: ... class HTTPDefaultErrorHandler(BaseHandler): ... class HTTPRedirectHandler(BaseHandler): def redirect_request(self, req: Request, fp: IO[str], code: int, msg: str, hdrs: Mapping[str, str], newurl: str) -> Optional[Request]: ... def http_error_301(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... def http_error_302(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... def http_error_303(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... def http_error_307(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class HTTPCookieProcessor(BaseHandler): cookiejar: CookieJar def __init__(self, cookiejar: Optional[CookieJar] = ...) -> None: ... class ProxyHandler(BaseHandler): def __init__(self, proxies: Optional[Dict[str, str]] = ...) -> None: ... # TODO add a method for every (common) proxy protocol class HTTPPasswordMgr: def add_password(self, realm: str, uri: Union[str, Sequence[str]], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> Tuple[Optional[str], Optional[str]]: ... class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str, uri: Union[str, Sequence[str]], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> Tuple[Optional[str], Optional[str]]: ... class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm): def add_password( self, realm: str, uri: Union[str, Sequence[str]], user: str, passwd: str, is_authenticated: bool = ..., ) -> None: ... def update_authenticated(self, uri: Union[str, Sequence[str]], is_authenticated: bool = ...) -> None: ... def is_authenticated(self, authuri: str) -> bool: ... class AbstractBasicAuthHandler: def __init__(self, password_mgr: Optional[HTTPPasswordMgr] = ...) -> None: ... def http_error_auth_reqed(self, authreq: str, host: str, req: Request, headers: Mapping[str, str]) -> None: ... class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): def http_error_401(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): def http_error_407(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class AbstractDigestAuthHandler: def __init__(self, passwd: Optional[HTTPPasswordMgr] = ...) -> None: ... def reset_retry_count(self) -> None: ... def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: Mapping[str, str]) -> None: ... def retry_http_digest_auth(self, req: Request, auth: str) -> Optional[_UrlopenRet]: ... def get_cnonce(self, nonce: str) -> str: ... def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... def get_algorithm_impls(self, algorithm: str) -> Tuple[Callable[[str], str], Callable[[str, str], str]]: ... def get_entity_digest(self, data: Optional[bytes], chal: Mapping[str, str]) -> Optional[str]: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): def http_error_401(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): def http_error_407(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class AbstractHTTPHandler(BaseHandler): # undocumented def __init__(self, debuglevel: int = ...) -> None: ... def set_http_debuglevel(self, level: int) -> None: ... def do_request_(self, request: Request) -> Request: ... def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ... class HTTPHandler(AbstractHTTPHandler): def http_open(self, req: Request) -> HTTPResponse: ... def http_request(self, request: Request) -> Request: ... # undocumented class HTTPSHandler(AbstractHTTPHandler): def __init__(self, debuglevel: int = ..., context: Optional[ssl.SSLContext] = ..., check_hostname: Optional[bool] = ...) -> None: ... def https_open(self, req: Request) -> HTTPResponse: ... def https_request(self, request: Request) -> Request: ... # undocumented class FileHandler(BaseHandler): def file_open(self, req: Request) -> addinfourl: ... class DataHandler(BaseHandler): def data_open(self, req: Request) -> addinfourl: ... class FTPHandler(BaseHandler): def ftp_open(self, req: Request) -> addinfourl: ... class CacheFTPHandler(FTPHandler): def setTimeout(self, t: float) -> None: ... def setMaxConns(self, m: int) -> None: ... class UnknownHandler(BaseHandler): def unknown_open(self, req: Request) -> NoReturn: ... class HTTPErrorProcessor(BaseHandler): def http_response(self, request, response) -> _UrlopenRet: ... def https_response(self, request, response) -> _UrlopenRet: ... if sys.version_info >= (3, 6): def urlretrieve(url: str, filename: Optional[Union[str, os.PathLike[Any]]] = ..., reporthook: Optional[Callable[[int, int, int], None]] = ..., data: Optional[bytes] = ...) -> Tuple[str, HTTPMessage]: ... else: def urlretrieve(url: str, filename: Optional[str] = ..., reporthook: Optional[Callable[[int, int, int], None]] = ..., data: Optional[bytes] = ...) -> Tuple[str, HTTPMessage]: ... def urlcleanup() -> None: ... class URLopener: version: ClassVar[str] def __init__(self, proxies: Optional[Dict[str, str]] = ..., **x509: str) -> None: ... def open(self, fullurl: str, data: Optional[bytes] = ...) -> _UrlopenRet: ... def open_unknown(self, fullurl: str, data: Optional[bytes] = ...) -> _UrlopenRet: ... def retrieve(self, url: str, filename: Optional[str] = ..., reporthook: Optional[Callable[[int, int, int], None]] = ..., data: Optional[bytes] = ...) -> Tuple[str, Optional[Message]]: ... class FancyURLopener(URLopener): def prompt_user_passwd(self, host: str, realm: str) -> Tuple[str, str]: ... mypy-0.761/mypy/typeshed/stdlib/3/urllib/response.pyi0000644€tŠÔÚ€2›s®0000000322213576752252027034 0ustar jukkaDROPBOX\Domain Users00000000000000# private module, we only expose what's needed from typing import BinaryIO, Iterable, List, Mapping, Optional, Type, TypeVar from email.message import Message from types import TracebackType _AIUT = TypeVar("_AIUT", bound=addbase) class addbase(BinaryIO): def __enter__(self: _AIUT) -> _AIUT: ... def __exit__(self, type: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType]) -> None: ... def __iter__(self: _AIUT) -> _AIUT: ... def __next__(self) -> bytes: ... def close(self) -> None: ... # These methods don't actually exist, but the class inherits at runtime from # tempfile._TemporaryFileWrapper, which uses __getattr__ to delegate to the # underlying file object. To satisfy the BinaryIO interface, we pretend that this # class has these additional methods. def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def read(self, n: int = ...) -> bytes: ... def readable(self) -> bool: ... def readline(self, limit: int = ...) -> bytes: ... def readlines(self, hint: int = ...) -> List[bytes]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def write(self, s: bytes) -> int: ... def writelines(self, lines: Iterable[bytes]) -> None: ... class addinfo(addbase): headers: Message def info(self) -> Message: ... class addinfourl(addinfo): url: str code: int def geturl(self) -> str: ... def getcode(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3/urllib/robotparser.pyi0000644€tŠÔÚ€2›s®0000000126713576752252027547 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for urllib.robotparser (Python 3.4) from typing import Iterable, NamedTuple, Optional import sys class _RequestRate(NamedTuple): requests: int seconds: int class RobotFileParser: def __init__(self, url: str = ...) -> None: ... def set_url(self, url: str) -> None: ... def read(self) -> None: ... def parse(self, lines: Iterable[str]) -> None: ... def can_fetch(self, user_agent: str, url: str) -> bool: ... def mtime(self) -> int: ... def modified(self) -> None: ... if sys.version_info >= (3, 6): def crawl_delay(self, useragent: str) -> Optional[str]: ... def request_rate(self, useragent: str) -> Optional[_RequestRate]: ... mypy-0.761/mypy/typeshed/stdlib/3/zipapp.pyi0000644€tŠÔÚ€2›s®0000000122713576752252025213 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for zipapp (Python 3.5+) from pathlib import Path import sys from typing import BinaryIO, Callable, Optional, Union _Path = Union[str, Path, BinaryIO] class ZipAppError(Exception): ... if sys.version_info >= (3, 7): def create_archive(source: _Path, target: Optional[_Path] = ..., interpreter: Optional[str] = ..., main: Optional[str] = ..., filter: Optional[Callable[[Path], bool]] = ..., compressed: bool = ...) -> None: ... else: def create_archive(source: _Path, target: Optional[_Path] = ..., interpreter: Optional[str] = ..., main: Optional[str] = ...) -> None: ... def get_interpreter(archive: _Path) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3.6/0000755€tŠÔÚ€2›s®0000000000013576752267023335 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3.6/secrets.pyi0000644€tŠÔÚ€2›s®0000000076513576752252025532 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for secrets (Python 3.6) from typing import Optional, Sequence, TypeVar from hmac import compare_digest as compare_digest from random import SystemRandom as SystemRandom _T = TypeVar('_T') def randbelow(exclusive_upper_bound: int) -> int: ... def randbits(k: int) -> int: ... def choice(seq: Sequence[_T]) -> _T: ... def token_bytes(nbytes: Optional[int] = ...) -> bytes: ... def token_hex(nbytes: Optional[int] = ...) -> str: ... def token_urlsafe(nbytes: Optional[int] = ...) -> str: ... mypy-0.761/mypy/typeshed/stdlib/3.7/0000755€tŠÔÚ€2›s®0000000000013576752267023336 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/stdlib/3.7/contextvars.pyi0000644€tŠÔÚ€2›s®0000000214313576752252026433 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, ClassVar, Generic, Iterator, Mapping, TypeVar _T = TypeVar('_T') class ContextVar(Generic[_T]): def __init__(self, name: str, *, default: _T = ...) -> None: ... @property def name(self) -> str: ... def get(self, default: _T = ...) -> _T: ... def set(self, value: _T) -> Token[_T]: ... def reset(self, token: Token[_T]) -> None: ... class Token(Generic[_T]): @property def var(self) -> ContextVar[_T]: ... @property def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express MISSING: ClassVar[object] def copy_context() -> Context: ... # It doesn't make sense to make this generic, because for most Contexts each ContextVar will have # a different value. class Context(Mapping[ContextVar[Any], Any]): def __init__(self) -> None: ... def run(self, callable: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ... def copy(self) -> Context: ... def __getitem__(self, key: ContextVar[Any]) -> Any: ... def __iter__(self) -> Iterator[ContextVar[Any]]: ... def __len__(self) -> int: ... mypy-0.761/mypy/typeshed/stdlib/3.7/dataclasses.pyi0000644€tŠÔÚ€2›s®0000000450313576752252026344 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, Any, Callable, Dict, Generic, Iterable, List, Mapping, Optional, Tuple, Type, TypeVar, Union _T = TypeVar('_T') class _MISSING_TYPE: ... MISSING: _MISSING_TYPE @overload def asdict(obj: Any) -> Dict[str, Any]: ... @overload def asdict(obj: Any, *, dict_factory: Callable[[List[Tuple[str, Any]]], _T]) -> _T: ... @overload def astuple(obj: Any) -> Tuple[Any, ...]: ... @overload def astuple(obj: Any, *, tuple_factory: Callable[[List[Any]], _T]) -> _T: ... @overload def dataclass(_cls: Type[_T]) -> Type[_T]: ... @overload def dataclass(*, init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., unsafe_hash: bool = ..., frozen: bool = ...) -> Callable[[Type[_T]], Type[_T]]: ... class Field(Generic[_T]): name: str type: Type[_T] default: _T default_factory: Callable[[], _T] repr: bool hash: Optional[bool] init: bool compare: bool metadata: Mapping[str, Any] # NOTE: Actual return type is 'Field[_T]', but we want to help type checkers # to understand the magic that happens at runtime. @overload # `default` and `default_factory` are optional and mutually exclusive. def field(*, default: _T, init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> _T: ... @overload def field(*, default_factory: Callable[[], _T], init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> _T: ... @overload def field(*, init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> Any: ... def fields(class_or_instance: Any) -> Tuple[Field[Any], ...]: ... def is_dataclass(obj: Any) -> bool: ... class FrozenInstanceError(AttributeError): ... class InitVar(Generic[_T]): ... def make_dataclass(cls_name: str, fields: Iterable[Union[str, Tuple[str, type], Tuple[str, type, Field[Any]]]], *, bases: Tuple[type, ...] = ..., namespace: Optional[Dict[str, Any]] = ..., init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., hash: bool = ..., frozen: bool = ...): ... def replace(obj: _T, **changes: Any) -> _T: ... mypy-0.761/mypy/typeshed/tests/0000755€tŠÔÚ€2›s®0000000000013576752267022710 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/tests/check_consistent.py0000755€tŠÔÚ€2›s®0000000452513576752252026613 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 # For various reasons we need the contents of certain files to be # duplicated in two places, for example stdlib/2and3/builtins.pyi and # stdlib/2/__builtin__.pyi must be identical. In the past we used # symlinks but that doesn't always work on Windows, so now you must # manually update both files, and this test verifies that they are # identical. The list below indicates which sets of files must match. import os import filecmp consistent_files = [ {'stdlib/2and3/builtins.pyi', 'stdlib/2/__builtin__.pyi'}, {'stdlib/2/SocketServer.pyi', 'stdlib/3/socketserver.pyi'}, {'stdlib/2/os2emxpath.pyi', 'stdlib/2and3/posixpath.pyi', 'stdlib/2and3/ntpath.pyi', 'stdlib/2/os/path.pyi', 'stdlib/3/os/path.pyi'}, {'stdlib/3/enum.pyi', 'third_party/2/enum.pyi'}, {'stdlib/3/unittest/mock.pyi', 'third_party/2and3/mock.pyi'}, {'stdlib/3/concurrent/__init__.pyi', 'third_party/2/concurrent/__init__.pyi'}, {'stdlib/3/concurrent/futures/__init__.pyi', 'third_party/2/concurrent/futures/__init__.pyi'}, {'stdlib/3/concurrent/futures/_base.pyi', 'third_party/2/concurrent/futures/_base.pyi'}, {'stdlib/3/concurrent/futures/thread.pyi', 'third_party/2/concurrent/futures/thread.pyi'}, {'stdlib/3/concurrent/futures/process.pyi', 'third_party/2/concurrent/futures/process.pyi'}, {'stdlib/3.7/dataclasses.pyi', 'third_party/3/dataclasses.pyi'}, {'stdlib/3/pathlib.pyi', 'third_party/2/pathlib2.pyi'}, {'stdlib/3.7/contextvars.pyi', 'third_party/3/contextvars.pyi'}, {'stdlib/3/ipaddress.pyi', 'third_party/2/ipaddress.pyi'}, {'stdlib/2/copy_reg.pyi', 'stdlib/3/copyreg.pyi'}, ] def main(): files = [os.path.join(root, file) for root, dir, files in os.walk('.') for file in files] no_symlink = 'You cannot use symlinks in typeshed, please copy {} to its link.' for file in files: _, ext = os.path.splitext(file) if ext == '.pyi' and os.path.islink(file): raise ValueError(no_symlink.format(file)) for file1, *others in consistent_files: f1 = os.path.join(os.getcwd(), file1) for file2 in others: f2 = os.path.join(os.getcwd(), file2) if not filecmp.cmp(f1, f2): raise ValueError('File {f1} does not match file {f2}. Please copy it to {f2}'.format(f1=file1, f2=file2)) if __name__ == '__main__': main() mypy-0.761/mypy/typeshed/tests/mypy_selftest.py0000755€tŠÔÚ€2›s®0000000225413576752252026171 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Script to run mypy's test suite against this version of typeshed.""" from pathlib import Path import shutil import subprocess import sys import tempfile if __name__ == '__main__': with tempfile.TemporaryDirectory() as tempdir: dirpath = Path(tempdir) subprocess.run(['python2.7', '-m', 'pip', 'install', '--user', 'typing'], check=True) subprocess.run(['git', 'clone', '--depth', '1', 'git://github.com/python/mypy', str(dirpath / 'mypy')], check=True) subprocess.run([sys.executable, '-m', 'pip', 'install', '-U', '-r', str(dirpath / 'mypy/test-requirements.txt')], check=True) shutil.copytree('stdlib', str(dirpath / 'mypy/mypy/typeshed/stdlib')) shutil.copytree('third_party', str(dirpath / 'mypy/mypy/typeshed/third_party')) try: subprocess.run(['pytest', '-n12'], cwd=str(dirpath / 'mypy'), check=True) except subprocess.CalledProcessError as e: print('mypy tests failed', file=sys.stderr) sys.exit(e.returncode) else: print('mypy tests succeeded', file=sys.stderr) sys.exit(0) mypy-0.761/mypy/typeshed/tests/mypy_test.py0000755€tŠÔÚ€2›s®0000001317713576752252025325 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Test runner for typeshed. Depends on mypy being installed. Approach: 1. Parse sys.argv 2. Compute appropriate arguments for mypy 3. Stuff those arguments into sys.argv 4. Run mypy.main('') 5. Repeat steps 2-4 for other mypy runs (e.g. --py2) """ import os import re import sys import argparse parser = argparse.ArgumentParser(description="Test runner for typeshed. " "Patterns are unanchored regexps on the full path.") parser.add_argument('-v', '--verbose', action='count', default=0, help="More output") parser.add_argument('-n', '--dry-run', action='store_true', help="Don't actually run mypy") parser.add_argument('-x', '--exclude', type=str, nargs='*', help="Exclude pattern") parser.add_argument('-p', '--python-version', type=str, nargs='*', help="These versions only (major[.minor])") parser.add_argument('--platform', help="Run mypy for a certain OS platform (defaults to sys.platform)") parser.add_argument('--warn-unused-ignores', action='store_true', help="Run mypy with --warn-unused-ignores " "(hint: only get rid of warnings that are " "unused for all platforms and Python versions)") parser.add_argument('filter', type=str, nargs='*', help="Include pattern (default all)") def log(args, *varargs): if args.verbose >= 2: print(*varargs) def match(fn, args, blacklist): if blacklist.match(fn): log(args, fn, 'exluded by blacklist') return False if not args.filter and not args.exclude: log(args, fn, 'accept by default') return True if args.exclude: for f in args.exclude: if re.search(f, fn): log(args, fn, 'excluded by pattern', f) return False if args.filter: for f in args.filter: if re.search(f, fn): log(args, fn, 'accepted by pattern', f) return True if args.filter: log(args, fn, 'rejected (no pattern matches)') return False log(args, fn, 'accepted (no exclude pattern matches)') return True def libpath(major, minor): versions = ['%d.%d' % (major, minor) for minor in reversed(range(minor + 1))] versions.append(str(major)) versions.append('2and3') paths = [] for v in versions: for top in ['stdlib', 'third_party']: p = os.path.join(top, v) if os.path.isdir(p): paths.append(p) return paths def main(): args = parser.parse_args() with open(os.path.join(os.path.dirname(__file__), "mypy_blacklist.txt")) as f: blacklist = re.compile("(%s)$" % "|".join( re.findall(r"^\s*([^\s#]+)\s*(?:#.*)?$", f.read(), flags=re.M))) try: from mypy.main import main as mypy_main except ImportError: print("Cannot import mypy. Did you install it?") sys.exit(1) versions = [(3, 8), (3, 7), (3, 6), (3, 5), (2, 7)] if args.python_version: versions = [v for v in versions if any(('%d.%d' % v).startswith(av) for av in args.python_version)] if not versions: print("--- no versions selected ---") sys.exit(1) code = 0 runs = 0 for major, minor in versions: roots = libpath(major, minor) files = [] seen = {'__builtin__', 'builtins', 'typing'} # Always ignore these. for root in roots: names = os.listdir(root) for name in names: full = os.path.join(root, name) mod, ext = os.path.splitext(name) if mod in seen or mod.startswith('.'): continue if ext in ['.pyi', '.py']: if match(full, args, blacklist): seen.add(mod) files.append(full) elif (os.path.isfile(os.path.join(full, '__init__.pyi')) or os.path.isfile(os.path.join(full, '__init__.py'))): for r, ds, fs in os.walk(full): ds.sort() fs.sort() for f in fs: m, x = os.path.splitext(f) if x in ['.pyi', '.py']: fn = os.path.join(r, f) if match(fn, args, blacklist): seen.add(mod) files.append(fn) if files: runs += 1 flags = ['--python-version', '%d.%d' % (major, minor)] flags.append('--strict-optional') flags.append('--no-site-packages') flags.append('--show-traceback') flags.append('--no-implicit-optional') flags.append('--disallow-any-generics') if args.warn_unused_ignores: flags.append('--warn-unused-ignores') if args.platform: flags.extend(['--platform', args.platform]) sys.argv = ['mypy'] + flags + files if args.verbose: print("running", ' '.join(sys.argv)) else: print("running mypy", ' '.join(flags), "# with", len(files), "files") try: if not args.dry_run: mypy_main('', sys.stdout, sys.stderr) except SystemExit as err: code = max(code, err.code) if code: print("--- exit status", code, "---") sys.exit(code) if not runs: print("--- nothing to do; exit 1 ---") sys.exit(1) if __name__ == '__main__': main() mypy-0.761/mypy/typeshed/tests/pytype_test.py0000755€tŠÔÚ€2›s®0000001735313576752252025661 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Test runner for typeshed. Depends on pytype being installed. If pytype is installed: 1. For every pyi, do nothing if it is in pytype_blacklist.txt. 2. Otherwise, call 'pytype.io.parse_pyi'. Option two will load the file and all the builtins, typeshed dependencies. This will also discover incorrect usage of imported modules. """ import argparse import itertools import os import re import subprocess import traceback from typing import List, Match, Optional, Sequence, Tuple from pytype import config as pytype_config, io as pytype_io TYPESHED_SUBDIRS = ["stdlib", "third_party"] TYPESHED_HOME = "TYPESHED_HOME" UNSET = object() # marker for tracking the TYPESHED_HOME environment variable def main() -> None: args = create_parser().parse_args() typeshed_location = args.typeshed_location or os.getcwd() subdir_paths = [os.path.join(typeshed_location, d) for d in TYPESHED_SUBDIRS] check_subdirs_discoverable(subdir_paths) check_python_exes_runnable(python27_exe_arg=args.python27_exe, python36_exe_arg=args.python36_exe) files_to_test = determine_files_to_test(typeshed_location=typeshed_location, subdir_paths=subdir_paths) run_all_tests( files_to_test=files_to_test, typeshed_location=typeshed_location, python27_exe=args.python27_exe, python36_exe=args.python36_exe, print_stderr=args.print_stderr, dry_run=args.dry_run, ) def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser(description="Pytype/typeshed tests.") parser.add_argument("-n", "--dry-run", action="store_true", default=False, help="Don't actually run tests") # Default to '' so that symlinking typeshed subdirs in cwd will work. parser.add_argument("--typeshed-location", type=str, default="", help="Path to typeshed installation.") # Set to true to print a stack trace every time an exception is thrown. parser.add_argument( "--print-stderr", action="store_true", default=False, help="Print stderr every time an error is encountered." ) # We need to invoke python2.7 and 3.6. parser.add_argument("--python27-exe", type=str, default="python2.7", help="Path to a python 2.7 interpreter.") parser.add_argument("--python36-exe", type=str, default="python3.6", help="Path to a python 3.6 interpreter.") return parser class PathMatcher: def __init__(self, patterns: Sequence[str]) -> None: self.matcher = re.compile(r"({})$".format("|".join(patterns))) if patterns else None def search(self, path: str) -> Optional[Match[str]]: if not self.matcher: return None return self.matcher.search(path) def load_blacklist(typeshed_location: str) -> List[str]: filename = os.path.join(typeshed_location, "tests", "pytype_blacklist.txt") skip_re = re.compile(r"^\s*([^\s#]+)\s*(?:#.*)?$") skip = [] with open(filename) as f: for line in f: skip_match = skip_re.match(line) if skip_match: skip.append(skip_match.group(1)) return skip def run_pytype(*, filename: str, python_version: str, python_exe: str, typeshed_location: str) -> Optional[str]: """Runs pytype, returning the stderr if any.""" options = pytype_config.Options.create( filename, module_name=_get_module_name(filename), parse_pyi=True, python_version=python_version, python_exe=python_exe) old_typeshed_home = os.environ.get(TYPESHED_HOME, UNSET) os.environ[TYPESHED_HOME] = typeshed_location try: pytype_io.parse_pyi(options) except Exception: stderr = traceback.format_exc() else: stderr = None if old_typeshed_home is UNSET: del os.environ[TYPESHED_HOME] else: os.environ[TYPESHED_HOME] = old_typeshed_home return stderr def _get_relative(filename: str) -> str: top = 0 for d in TYPESHED_SUBDIRS: try: top = filename.index(d) except ValueError: continue else: break return filename[top:] def _get_module_name(filename: str) -> str: """Converts a filename {subdir}/m.n/module/foo to module.foo.""" return ".".join(_get_relative(filename).split(os.path.sep)[2:]).replace(".pyi", "").replace(".__init__", "") def can_run(exe: str, *, args: List[str]) -> bool: try: subprocess.run([exe] + args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except OSError: return False else: return True def _is_version(path: str, version: str) -> bool: return any("{}{}{}".format(d, os.path.sep, version) in path for d in TYPESHED_SUBDIRS) def check_subdirs_discoverable(subdir_paths: List[str]) -> None: for p in subdir_paths: if not os.path.isdir(p): raise SystemExit("Cannot find typeshed subdir at {} (specify parent dir via --typeshed-location)".format(p)) def check_python_exes_runnable(*, python27_exe_arg: str, python36_exe_arg: str) -> None: for exe, version_str in zip([python27_exe_arg, python36_exe_arg], ["27", "36"]): if can_run(exe, args=["--version"]): continue formatted_version = ".".join(list(version_str)) script_arg = "--python{}-exe".format(version_str) raise SystemExit( "Cannot run Python {version}. (point to a valid executable via {arg})".format( version=formatted_version, arg=script_arg ) ) def determine_files_to_test(*, typeshed_location: str, subdir_paths: Sequence[str]) -> List[Tuple[str, int]]: """Determine all files to test, checking if it's in the blacklist and which Python versions to use. Returns a list of pairs of the file path and Python version as an int.""" skipped = PathMatcher(load_blacklist(typeshed_location)) files = [] for root, _, filenames in itertools.chain.from_iterable(os.walk(p) for p in subdir_paths): for f in sorted(f for f in filenames if f.endswith(".pyi")): f = os.path.join(root, f) rel = _get_relative(f) if skipped.search(rel): continue if _is_version(f, "2and3"): files.append((f, 2)) files.append((f, 3)) elif _is_version(f, "2"): files.append((f, 2)) elif _is_version(f, "3"): files.append((f, 3)) else: print("Unrecognized path: {}".format(f)) return files def run_all_tests( *, files_to_test: Sequence[Tuple[str, int]], typeshed_location: str, python27_exe: str, python36_exe: str, print_stderr: bool, dry_run: bool ) -> None: bad = [] errors = 0 total_tests = len(files_to_test) print("Testing files with pytype...") for i, (f, version) in enumerate(files_to_test): stderr = ( run_pytype( filename=f, python_version="2.7" if version == 2 else "3.6", python_exe=python27_exe if version == 2 else python36_exe, typeshed_location=typeshed_location, ) if not dry_run else None ) if stderr: if print_stderr: print(stderr) errors += 1 stacktrace_final_line = stderr.rstrip().rsplit("\n", 1)[-1] bad.append((_get_relative(f), stacktrace_final_line)) runs = i + 1 if runs % 25 == 0: print(" {:3d}/{:d} with {:3d} errors".format(runs, total_tests, errors)) print("Ran pytype with {:d} pyis, got {:d} errors.".format(total_tests, errors)) for f, err in bad: print("{}: {}".format(f, err)) if errors: raise SystemExit("\nRun again with --print-stderr to get the full stacktrace.") if __name__ == "__main__": main() mypy-0.761/mypy/typeshed/third_party/0000755€tŠÔÚ€2›s®0000000000013576752266024076 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/0000755€tŠÔÚ€2›s®0000000000013576752267024240 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/OpenSSL/0000755€tŠÔÚ€2›s®0000000000013576752267025523 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/OpenSSL/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027765 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/OpenSSL/crypto.pyi0000644€tŠÔÚ€2›s®0000001673413576752252027573 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for OpenSSL.crypto (Python 2) from typing import Any, Callable, Iterable, List, Optional, Set, Text, Tuple, Union from cryptography.hazmat.primitives.asymmetric import dsa, rsa from datetime import datetime FILETYPE_PEM: int FILETYPE_ASN1: int FILETYPE_TEXT: int TYPE_RSA: int TYPE_DSA: int class Error(Exception): ... _Key = Union[rsa.RSAPublicKey, rsa.RSAPrivateKey, dsa.DSAPublicKey, dsa.DSAPrivateKey] class PKey: def __init__(self) -> None: ... def to_cryptography_key(self) -> _Key: ... @classmethod def from_cryptography_key(cls, crypto_key: _Key): ... def generate_key(self, type: int, bits: int) -> None: ... def check(self) -> bool: ... def type(self) -> int: ... def bits(self) -> int: ... class _EllipticCurve: name: Text def get_elliptic_curves() -> Set[_EllipticCurve]: ... def get_elliptic_curve(name: str) -> _EllipticCurve: ... class X509Name: def __init__(self, name: X509Name) -> None: ... countryName: Union[str, unicode] stateOrProvinceName: Union[str, unicode] localityName: Union[str, unicode] organizationName: Union[str, unicode] organizationalUnitName: Union[str, unicode] commonName: Union[str, unicode] emailAddress: Union[str, unicode] C: Union[str, unicode] ST: Union[str, unicode] L: Union[str, unicode] O: Union[str, unicode] OU: Union[str, unicode] CN: Union[str, unicode] def hash(self) -> int: ... def der(self) -> bytes: ... def get_components(self) -> List[Tuple[str, str]]: ... class X509Extension: def __init__(self, type_name: bytes, critical: bool, value: bytes, subject: Optional[X509] = ..., issuer: Optional[X509] = ...) -> None: ... def get_critical(self) -> bool: ... def get_short_name(self) -> str: ... def get_data(self) -> str: ... class X509Req: def __init__(self) -> None: ... def set_pubkey(self, pkey: PKey) -> None: ... def get_pubkey(self) -> PKey: ... def set_version(self, version: int) -> None: ... def get_version(self) -> int: ... def get_subject(self) -> X509Name: ... def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ... def get_extensions(self) -> List[X509Extension]: ... def sign(self, pkey: PKey, digest: str) -> None: ... def verify(self, pkey: PKey) -> bool: ... class X509: def __init__(self) -> None: ... def set_version(self, version: int) -> None: ... def get_version(self) -> int: ... def get_pubkey(self) -> PKey: ... def set_pubkey(self, pkey: PKey) -> None: ... def sign(self, pkey: PKey, digest: str) -> None: ... def get_signature_algorithm(self) -> str: ... def digest(self, digest_name: str) -> str: ... def subject_name_hash(self) -> str: ... def set_serial_number(self, serial: int) -> None: ... def get_serial_number(self) -> int: ... def gmtime_adj_notAfter(self, amount: int) -> None: ... def gmtime_adj_notBefore(self, amount: int) -> None: ... def has_expired(self) -> bool: ... def get_notBefore(self) -> str: ... def set_notBefore(self, when: str) -> None: ... def get_notAfter(self) -> str: ... def set_notAfter(self, when: str) -> None: ... def get_issuer(self) -> X509Name: ... def set_issuer(self, issuer: X509Name) -> None: ... def get_subject(self) -> X509Name: ... def set_subject(self, subject: X509Name) -> None: ... def get_extension_count(self) -> int: ... def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ... def get_extension(self, index: int) -> X509Extension: ... class X509StoreFlags: CRL_CHECK: int CRL_CHECK_ALL: int IGNORE_CRITICAL: int X509_STRICT: int ALLOW_PROXY_CERTS: int POLICY_CHECK: int EXPLICIT_POLICY: int INHIBIT_MAP: int NOTIFY_POLICY: int CHECK_SS_SIGNATURE: int CB_ISSUER_CHECK: int class X509Store: def __init__(self) -> None: ... def add_cert(self, cert: X509) -> None: ... def add_crl(self, crl: CRL) -> None: ... def set_flags(self, flags: int) -> None: ... def set_time(self, vfy_time: datetime) -> None: ... class X509StoreContextError(Exception): certificate: X509 def __init__(self, message: str, certificate: X509) -> None: ... class X509StoreContext: def __init__(self, store: X509Store, certificate: X509) -> None: ... def set_store(self, store: X509Store) -> None: ... def verify_certificate(self) -> None: ... def load_certificate(type: int, buffer: Union[str, unicode]) -> X509: ... def dump_certificate(type: int, cert: X509) -> bytes: ... def dump_publickey(type: int, pkey: PKey) -> bytes: ... def dump_privatekey(type: int, pkey: PKey, cipher: Optional[str] = ..., passphrase: Optional[Union[str, Callable[[int], int]]] = ...) -> bytes: ... class Revoked: def __init__(self) -> None: ... def set_serial(self, hex_str: str) -> None: ... def get_serial(self) -> str: ... def set_reason(self, reason: str) -> None: ... def get_reason(self) -> str: ... def all_reasons(self) -> List[str]: ... def set_rev_date(self, when: str) -> None: ... def get_rev_date(self) -> str: ... class CRL: def __init__(self) -> None: ... def get_revoked(self) -> Tuple[Revoked, ...]: ... def add_revoked(self, revoked: Revoked) -> None: ... def get_issuer(self) -> X509Name: ... def set_version(self, version: int) -> None: ... def set_lastUpdate(self, when: str) -> None: ... def set_nextUpdate(self, when: str) -> None: ... def sign(self, issuer_cert: X509, issuer_key: PKey, digest: str) -> None: ... def export(self, cert: X509, key: PKey, type: int = ..., days: int = ..., digest: str = ...) -> bytes: ... class PKCS7: def type_is_signed(self) -> bool: ... def type_is_enveloped(self) -> bool: ... def type_is_signedAndEnveloped(self) -> bool: ... def type_is_data(self) -> bool: ... def get_type_name(self) -> str: ... class PKCS12: def __init__(self) -> None: ... def get_certificate(self) -> X509: ... def set_certificate(self, cert: X509) -> None: ... def get_privatekey(self) -> PKey: ... def set_privatekey(self, pkey: PKey) -> None: ... def get_ca_certificates(self) -> Tuple[X509, ...]: ... def set_ca_certificates(self, cacerts: Iterable[X509]) -> None: ... def set_friendlyname(self, name: bytes) -> None: ... def get_friendlyname(self) -> bytes: ... def export(self, passphrase: Optional[str] = ..., iter: int = ..., maciter: int = ...): ... class NetscapeSPKI: def __init__(self) -> None: ... def sign(self, pkey: PKey, digest: str) -> None: ... def verify(self, key: PKey) -> bool: ... def b64_encode(self) -> str: ... def get_pubkey(self) -> PKey: ... def set_pubkey(self, pkey: PKey) -> None: ... def load_publickey(type: int, buffer: Union[str, unicode]) -> PKey: ... def load_privatekey(type: int, buffer: bytes, passphrase: Optional[Union[str, Callable[[int], int]]] = ...): ... def dump_certificate_request(type: int, req: X509Req): ... def load_certificate_request(type, buffer: Union[str, unicode]) -> X509Req: ... def sign(pkey: PKey, data: Union[str, unicode], digest: str) -> bytes: ... def verify(cert: X509, signature: bytes, data: Union[str, unicode], digest: str) -> None: ... def dump_crl(type: int, crl: CRL) -> bytes: ... def load_crl(type: int, buffer: Union[str, unicode]) -> CRL: ... def load_pkcs7_data(type: int, buffer: Union[str, unicode]) -> PKCS7: ... def load_pkcs12(buffer: Union[str, unicode], passphrase: Optional[Union[str, Callable[[int], int]]] = ...) -> PKCS12: ... mypy-0.761/mypy/typeshed/third_party/2/concurrent/0000755€tŠÔÚ€2›s®0000000000013576752267026422 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/concurrent/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252030664 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/concurrent/futures/0000755€tŠÔÚ€2›s®0000000000013576752267030117 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/concurrent/futures/__init__.pyi0000644€tŠÔÚ€2›s®0000000015413576752252032373 0ustar jukkaDROPBOX\Domain Users00000000000000from ._base import * # noqa: F403 from .thread import * # noqa: F403 from .process import * # noqa: F403 mypy-0.761/mypy/typeshed/third_party/2/concurrent/futures/_base.pyi0000644€tŠÔÚ€2›s®0000000752313576752252031714 0ustar jukkaDROPBOX\Domain Users00000000000000import threading from logging import Logger from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Optional, Set, List from types import TracebackType import sys FIRST_COMPLETED: str FIRST_EXCEPTION: str ALL_COMPLETED: str PENDING: str RUNNING: str CANCELLED: str CANCELLED_AND_NOTIFIED: str FINISHED: str LOGGER: Logger class Error(Exception): ... class CancelledError(Error): ... class TimeoutError(Error): ... if sys.version_info >= (3, 7): class BrokenExecutor(RuntimeError): ... _T = TypeVar('_T') class Future(Generic[_T]): def __init__(self) -> None: ... def cancel(self) -> bool: ... def cancelled(self) -> bool: ... def running(self) -> bool: ... def done(self) -> bool: ... def add_done_callback(self, fn: Callable[[Future[_T]], Any]) -> None: ... def result(self, timeout: Optional[float] = ...) -> _T: ... def set_running_or_notify_cancel(self) -> bool: ... def set_result(self, result: _T) -> None: ... if sys.version_info >= (3,): def exception(self, timeout: Optional[float] = ...) -> Optional[BaseException]: ... def set_exception(self, exception: Optional[BaseException]) -> None: ... else: def exception(self, timeout: Optional[float] = ...) -> Any: ... def exception_info(self, timeout: Optional[float] = ...) -> Tuple[Any, Optional[TracebackType]]: ... def set_exception(self, exception: Any) -> None: ... def set_exception_info(self, exception: Any, traceback: Optional[TracebackType]) -> None: ... class Executor: def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ... if sys.version_info >= (3, 5): def map(self, func: Callable[..., _T], *iterables: Iterable[Any], timeout: Optional[float] = ..., chunksize: int = ...) -> Iterator[_T]: ... else: def map(self, func: Callable[..., _T], *iterables: Iterable[Any], timeout: Optional[float] = ...,) -> Iterator[_T]: ... def shutdown(self, wait: bool = ...) -> None: ... def __enter__(self: _T) -> _T: ... def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Optional[bool]: ... def as_completed(fs: Iterable[Future[_T]], timeout: Optional[float] = ...) -> Iterator[Future[_T]]: ... def wait(fs: Iterable[Future[_T]], timeout: Optional[float] = ..., return_when: str = ...) -> Tuple[Set[Future[_T]], Set[Future[_T]]]: ... class _Waiter: event: threading.Event finished_futures: List[Future[Any]] def __init__(self) -> None: ... def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _AsCompletedWaiter(_Waiter): lock: threading.Lock def __init__(self) -> None: ... def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _FirstCompletedWaiter(_Waiter): def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _AllCompletedWaiter(_Waiter): num_pending_calls: int stop_on_exception: bool lock: threading.Lock def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ... def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _AcquireFutures: futures: Iterable[Future[Any]] def __init__(self, futures: Iterable[Future[Any]]) -> None: ... def __enter__(self) -> None: ... def __exit__(self, *args: Any) -> None: ... mypy-0.761/mypy/typeshed/third_party/2/concurrent/futures/process.pyi0000644€tŠÔÚ€2›s®0000000127313576752252032315 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Optional, Tuple from ._base import Executor import sys EXTRA_QUEUED_CALLS: Any if sys.version_info >= (3,): class BrokenProcessPool(RuntimeError): ... if sys.version_info >= (3, 7): from multiprocessing.context import BaseContext class ProcessPoolExecutor(Executor): def __init__(self, max_workers: Optional[int] = ..., mp_context: Optional[BaseContext] = ..., initializer: Optional[Callable[..., None]] = ..., initargs: Tuple[Any, ...] = ...) -> None: ... else: class ProcessPoolExecutor(Executor): def __init__(self, max_workers: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2/concurrent/futures/thread.pyi0000644€tŠÔÚ€2›s®0000000221213576752252032100 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterable, Mapping, Optional, Tuple, TypeVar, Generic from ._base import Executor, Future import sys if sys.version_info >= (3, 7): from ._base import BrokenExecutor class BrokenThreadPool(BrokenExecutor): ... _S = TypeVar('_S') class ThreadPoolExecutor(Executor): if sys.version_info >= (3, 7): def __init__(self, max_workers: Optional[int] = ..., thread_name_prefix: str = ..., initializer: Optional[Callable[..., None]] = ..., initargs: Tuple[Any, ...] = ...) -> None: ... elif sys.version_info >= (3, 6) or sys.version_info < (3,): def __init__(self, max_workers: Optional[int] = ..., thread_name_prefix: str = ...) -> None: ... else: def __init__(self, max_workers: Optional[int] = ...) -> None: ... class _WorkItem(Generic[_S]): future: Future[_S] fn: Callable[..., _S] args: Iterable[Any] kwargs: Mapping[str, Any] def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... def run(self) -> None: ... mypy-0.761/mypy/typeshed/third_party/2/enum.pyi0000644€tŠÔÚ€2›s®0000000546313576752252025731 0ustar jukkaDROPBOX\Domain Users00000000000000# NB: third_party/2/enum.pyi and stdlib/3.4/enum.pyi must remain consistent! import sys from typing import Any, Dict, Iterator, List, Mapping, Type, TypeVar, Union from abc import ABCMeta _T = TypeVar('_T') _S = TypeVar('_S', bound=Type[Enum]) # Note: EnumMeta actually subclasses type directly, not ABCMeta. # This is a temporary workaround to allow multiple creation of enums with builtins # such as str as mixins, which due to the handling of ABCs of builtin types, cause # spurious inconsistent metaclass structure. See #1595. # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself class EnumMeta(ABCMeta): def __iter__(self: Type[_T]) -> Iterator[_T]: ... def __reversed__(self: Type[_T]) -> Iterator[_T]: ... def __contains__(self: Type[_T], member: object) -> bool: ... def __getitem__(self: Type[_T], name: str) -> _T: ... @property def __members__(self: Type[_T]) -> Mapping[str, _T]: ... def __len__(self) -> int: ... class Enum(metaclass=EnumMeta): name: str value: Any _name_: str _value_: Any _member_names_: List[str] # undocumented _member_map_: Dict[str, Enum] # undocumented _value2member_map_: Dict[int, Enum] # undocumented if sys.version_info >= (3, 7): _ignore_: Union[str, List[str]] if sys.version_info >= (3, 6): _order_: str @classmethod def _missing_(cls, value: object) -> Any: ... @staticmethod def _generate_next_value_(name: str, start: int, count: int, last_values: List[Any]) -> Any: ... def __new__(cls: Type[_T], value: object) -> _T: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __dir__(self) -> List[str]: ... def __format__(self, format_spec: str) -> str: ... def __hash__(self) -> Any: ... def __reduce_ex__(self, proto: object) -> Any: ... class IntEnum(int, Enum): value: int def unique(enumeration: _S) -> _S: ... if sys.version_info >= (3, 6): _auto_null: Any # subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() class auto(IntFlag): value: Any class Flag(Enum): def __contains__(self: _T, other: _T) -> bool: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __bool__(self) -> bool: ... def __or__(self: _T, other: _T) -> _T: ... def __and__(self: _T, other: _T) -> _T: ... def __xor__(self: _T, other: _T) -> _T: ... def __invert__(self: _T) -> _T: ... class IntFlag(int, Flag): def __or__(self: _T, other: Union[int, _T]) -> _T: ... def __and__(self: _T, other: Union[int, _T]) -> _T: ... def __xor__(self: _T, other: Union[int, _T]) -> _T: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ mypy-0.761/mypy/typeshed/third_party/2/fb303/0000755€tŠÔÚ€2›s®0000000000013576752267025055 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/fb303/FacebookService.pyi0000644€tŠÔÚ€2›s®0000002074313576752252030632 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from thrift.Thrift import TProcessor # type: ignore fastbinary: Any class Iface: def getName(self): ... def getVersion(self): ... def getStatus(self): ... def getStatusDetails(self): ... def getCounters(self): ... def getCounter(self, key): ... def setOption(self, key, value): ... def getOption(self, key): ... def getOptions(self): ... def getCpuProfile(self, profileDurationInSec): ... def aliveSince(self): ... def reinitialize(self): ... def shutdown(self): ... class Client(Iface): def __init__(self, iprot, oprot=...) -> None: ... def getName(self): ... def send_getName(self): ... def recv_getName(self): ... def getVersion(self): ... def send_getVersion(self): ... def recv_getVersion(self): ... def getStatus(self): ... def send_getStatus(self): ... def recv_getStatus(self): ... def getStatusDetails(self): ... def send_getStatusDetails(self): ... def recv_getStatusDetails(self): ... def getCounters(self): ... def send_getCounters(self): ... def recv_getCounters(self): ... def getCounter(self, key): ... def send_getCounter(self, key): ... def recv_getCounter(self): ... def setOption(self, key, value): ... def send_setOption(self, key, value): ... def recv_setOption(self): ... def getOption(self, key): ... def send_getOption(self, key): ... def recv_getOption(self): ... def getOptions(self): ... def send_getOptions(self): ... def recv_getOptions(self): ... def getCpuProfile(self, profileDurationInSec): ... def send_getCpuProfile(self, profileDurationInSec): ... def recv_getCpuProfile(self): ... def aliveSince(self): ... def send_aliveSince(self): ... def recv_aliveSince(self): ... def reinitialize(self): ... def send_reinitialize(self): ... def shutdown(self): ... def send_shutdown(self): ... class Processor(Iface, TProcessor): def __init__(self, handler) -> None: ... def process(self, iprot, oprot): ... def process_getName(self, seqid, iprot, oprot): ... def process_getVersion(self, seqid, iprot, oprot): ... def process_getStatus(self, seqid, iprot, oprot): ... def process_getStatusDetails(self, seqid, iprot, oprot): ... def process_getCounters(self, seqid, iprot, oprot): ... def process_getCounter(self, seqid, iprot, oprot): ... def process_setOption(self, seqid, iprot, oprot): ... def process_getOption(self, seqid, iprot, oprot): ... def process_getOptions(self, seqid, iprot, oprot): ... def process_getCpuProfile(self, seqid, iprot, oprot): ... def process_aliveSince(self, seqid, iprot, oprot): ... def process_reinitialize(self, seqid, iprot, oprot): ... def process_shutdown(self, seqid, iprot, oprot): ... class getName_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getName_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getVersion_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getVersion_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getStatus_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getStatus_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getStatusDetails_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getStatusDetails_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCounters_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCounters_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCounter_args: thrift_spec: Any key: Any def __init__(self, key=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCounter_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class setOption_args: thrift_spec: Any key: Any value: Any def __init__(self, key=..., value=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class setOption_result: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getOption_args: thrift_spec: Any key: Any def __init__(self, key=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getOption_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getOptions_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getOptions_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCpuProfile_args: thrift_spec: Any profileDurationInSec: Any def __init__(self, profileDurationInSec=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCpuProfile_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class aliveSince_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class aliveSince_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class reinitialize_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class shutdown_args: thrift_spec: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... mypy-0.761/mypy/typeshed/third_party/2/fb303/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027317 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/gflags.pyi0000644€tŠÔÚ€2›s®0000002451113576752252026223 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Iterable, IO, List, Optional, Sequence, Union from types import ModuleType class Error(Exception): ... FlagsError = Error class DuplicateFlag(FlagsError): ... class CantOpenFlagFileError(FlagsError): ... class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag): ... class DuplicateFlagError(DuplicateFlag): def __init__(self, flagname: str, flag_values: FlagValues, other_flag_values: FlagValues = ...) -> None: ... class IllegalFlagValueError(FlagsError): ... IllegalFlagValue = IllegalFlagValueError class UnrecognizedFlag(FlagsError): ... class UnrecognizedFlagError(UnrecognizedFlag): def __init__(self, flagname: str, flagvalue: str = ...) -> None: ... def get_help_width() -> int: ... GetHelpWidth = get_help_width def CutCommonSpacePrefix(text) -> str: ... def text_wrap(text: str, length: int = ..., indent: str = ..., firstline_indent: str = ..., tabs: str = ...) -> str: ... TextWrap = text_wrap def doc_to_help(doc: str) -> str: ... DocToHelp = doc_to_help class FlagValues: def __init__(self) -> None: ... def UseGnuGetOpt(self, use_gnu_getopt: bool = ...) -> None: ... def is_gnu_getopt(self) -> bool: ... IsGnuGetOpt = is_gnu_getopt # TODO dict type def FlagDict(self) -> Dict[Any, Any]: ... def flags_by_module_dict(self) -> Dict[str, List[Flag]]: ... FlagsByModuleDict = flags_by_module_dict def flags_by_module_id_dict(self) -> Dict[int, List[Flag]]: ... FlagsByModuleIdDict = flags_by_module_id_dict def key_flags_by_module_dict(self) -> Dict[str, List[Flag]]: ... KeyFlagsByModuleDict = key_flags_by_module_dict def find_module_defining_flag(self, flagname: str, default: str = ...) -> str: ... FindModuleDefiningFlag = find_module_defining_flag def find_module_id_defining_flag(self, flagname: str, default: int = ...) -> int: ... FindModuleIdDefiningFlag = find_module_id_defining_flag def append_flag_values(self, flag_values: FlagValues) -> None: ... AppendFlagValues = append_flag_values def remove_flag_values(self, flag_values: FlagValues) -> None: ... RemoveFlagValues = remove_flag_values def __setitem__(self, name: str, flag: Flag) -> None: ... def __getitem__(self, name: str) -> Flag: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any): ... def __delattr__(self, flag_name: str) -> None: ... def set_default(self, name: str, value: Any) -> None: ... SetDefault = set_default def __contains__(self, name: str) -> bool: ... has_key = __contains__ def __iter__(self) -> Iterable[str]: ... def __call__(self, argv: List[str], known_only: bool = ...) -> List[str]: ... def reset(self) -> None: ... Reset = reset def RegisteredFlags(self) -> List[str]: ... def flag_values_dict(self) -> Dict[str, Any]: ... FlagValuesDict = flag_values_dict def __str__(self) -> str: ... def GetHelp(self, prefix: str = ...) -> str: ... def module_help(self, module: Union[ModuleType, str]) -> str: ... ModuleHelp = module_help def main_module_help(self) -> str: ... MainModuleHelp = main_module_help def get(self, name: str, default: Any) -> Any: ... def ShortestUniquePrefixes(self, fl: Dict[str, Flag]) -> Dict[str, str]: ... def ExtractFilename(self, flagfile_str: str) -> str: ... def read_flags_from_files(self, argv: List[str], force_gnu: bool = ...) -> List[str]: ... ReadFlagsFromFiles = read_flags_from_files def flags_into_string(self) -> str: ... FlagsIntoString = flags_into_string def append_flags_into_file(self, filename: str) -> None: ... AppendFlagsIntoFile = append_flags_into_file def write_help_in_xml_format(self, outfile: IO[str] = ...) -> None: ... WriteHelpInXMLFormat = write_help_in_xml_format # TODO validator: gflags_validators.Validator def AddValidator(self, validator: Any) -> None: ... FLAGS: FlagValues class Flag: name: str default: Any default_as_str: str value: Any help: str short_name: str boolean = False present = False parser: ArgumentParser serializer: ArgumentSerializer allow_override = False def __init__(self, parser: ArgumentParser, serializer: ArgumentSerializer, name: str, default: Optional[str], help_string: str, short_name: str = ..., boolean: bool = ..., allow_override: bool = ...) -> None: ... def Parse(self, argument: Any) -> Any: ... def Unparse(self) -> None: ... def Serialize(self) -> str: ... def SetDefault(self, value: Any) -> None: ... def Type(self) -> str: ... def WriteInfoInXMLFormat(self, outfile: IO[str], module_name: str, is_key: bool = ..., indent: str = ...) -> None: ... class ArgumentParser(object): syntactic_help: str # TODO what is this def parse(self, argument: Any) -> Any: ... Parser = parse def flag_type(self) -> str: ... Type = flag_type def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ... class ArgumentSerializer: def Serialize(self, value: Any) -> unicode: ... class ListSerializer(ArgumentSerializer): def __init__(self, list_sep: str) -> None: ... def Serialize(self, value: List[Any]) -> str: ... def register_validator(flag_name: str, checker: Callable[[Any], bool], message: str = ..., flag_values: FlagValues = ...) -> None: ... RegisterValidator = register_validator def mark_flag_as_required(flag_name: str, flag_values: FlagValues = ...) -> None: ... MarkFlagAsRequired = mark_flag_as_required def DEFINE(parser: ArgumentParser, name: str, default: Any, help: str, flag_values: FlagValues = ..., serializer: ArgumentSerializer = ..., **args: Any) -> None: ... def DEFINE_flag(flag: Flag, flag_values: FlagValues = ...) -> None: ... def declare_key_flag(flag_name: str, flag_values: FlagValues = ...) -> None: ... DECLARE_key_flag = declare_key_flag def adopt_module_key_flags(module: ModuleType, flag_values: FlagValues = ...) -> None: ... ADOPT_module_key_flags = adopt_module_key_flags def DEFINE_string(name: str, default: Optional[str], help: str, flag_values: FlagValues = ..., **args: Any): ... class BooleanParser(ArgumentParser): def Convert(self, argument: Any) -> bool: ... def Parse(self, argument: Any) -> bool: ... class BooleanFlag(Flag): def __init__(self, name: str, default: Optional[bool], help: str, short_name=..., **args: Any) -> None: ... def DEFINE_boolean(name: str, default: Optional[bool], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... DEFINE_bool = DEFINE_boolean class HelpFlag(BooleanFlag): def __init__(self) -> None: ... def Parse(self, arg: Any) -> None: ... class HelpXMLFlag(BooleanFlag): def __init__(self) -> None: ... def Parse(self, arg: Any) -> None: ... class HelpshortFlag(BooleanFlag): def __init__(self) -> None: ... def Parse(self, arg: Any) -> None: ... class NumericParser(ArgumentParser): def IsOutsideBounds(self, val: float) -> bool: ... def Parse(self, argument: Any) -> float: ... def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ... def Convert(self, argument: Any) -> Any: ... class FloatParser(NumericParser): number_article: str number_name: str syntactic_help: str def __init__(self, lower_bound: float = ..., upper_bound: float = ...) -> None: ... def Convert(self, argument: Any) -> float: ... def DEFINE_float(name: str, default: Optional[float], help: str, lower_bound: float = ..., upper_bound: float = ..., flag_values: FlagValues = ..., **args: Any) -> None: ... class IntegerParser(NumericParser): number_article: str number_name: str syntactic_help: str def __init__(self, lower_bound: int = ..., upper_bound: int = ...) -> None: ... def Convert(self, argument: Any) -> int: ... def DEFINE_integer(name: str, default: Optional[int], help: str, lower_bound: int = ..., upper_bound: int = ..., flag_values: FlagValues = ..., **args: Any) -> None: ... class EnumParser(ArgumentParser): def __init__(self, enum_values: List[str]) -> None: ... def Parse(self, argument: Any) -> Any: ... class EnumFlag(Flag): def __init__(self, name: str, default: Optional[str], help: str, enum_values: List[str], short_name: str, **args: Any) -> None: ... def DEFINE_enum(name: str, default: Optional[str], enum_values: List[str], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... class BaseListParser(ArgumentParser): def __init__(self, token: str = ..., name: str = ...) -> None: ... def Parse(self, argument: Any) -> List[Any]: ... class ListParser(BaseListParser): def __init__(self) -> None: ... def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str): ... class WhitespaceSeparatedListParser(BaseListParser): def __init__(self) -> None: ... def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str): ... def DEFINE_list(name: str, default: Optional[List[str]], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... def DEFINE_spaceseplist(name: str, default: Optional[List[str]], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... class MultiFlag(Flag): def __init__(self, *args: Any, **kwargs: Any) -> None: ... def Parse(self, arguments: Any) -> None: ... def Serialize(self) -> str: ... def DEFINE_multi_string(name: str, default: Optional[Union[str, List[str]]], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... DEFINE_multistring = DEFINE_multi_string def DEFINE_multi_integer(name: str, default: Optional[Union[int, List[int]]], help: str, lower_bound: int = ..., upper_bound: int = ..., flag_values: FlagValues = ..., **args: Any) -> None: ... DEFINE_multi_int = DEFINE_multi_integer def DEFINE_multi_float(name: str, default: Optional[Union[float, List[float]]], help: str, lower_bound: float = ..., upper_bound: float = ..., flag_values: FlagValues = ..., **args: Any) -> None: ... def DEFINE_multi_enum(name: str, default: Optional[Union[Sequence[str], str]], enum_values: Sequence[str], help: str, flag_values: FlagValues = ..., case_sensitive: bool = ..., **args: Any): ... mypy-0.761/mypy/typeshed/third_party/2/ipaddress.pyi0000644€tŠÔÚ€2›s®0000001177713576752252026750 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import (Any, Container, Generic, Iterable, Iterator, Optional, overload, SupportsInt, Text, Tuple, TypeVar) # Undocumented length constants IPV4LENGTH: int IPV6LENGTH: int _A = TypeVar("_A", IPv4Address, IPv6Address) _N = TypeVar("_N", IPv4Network, IPv6Network) _T = TypeVar("_T") def ip_address(address: object) -> Any: ... # morally Union[IPv4Address, IPv6Address] def ip_network(address: object, strict: bool = ...) -> Any: ... # morally Union[IPv4Network, IPv6Network] def ip_interface(address: object) -> Any: ... # morally Union[IPv4Interface, IPv6Interface] class _IPAddressBase: def __eq__(self, other: Any) -> bool: ... def __ge__(self: _T, other: _T) -> bool: ... def __gt__(self: _T, other: _T) -> bool: ... def __le__(self: _T, other: _T) -> bool: ... def __lt__(self: _T, other: _T) -> bool: ... def __ne__(self, other: Any) -> bool: ... @property def compressed(self) -> Text: ... @property def exploded(self) -> Text: ... @property def reverse_pointer(self) -> Text: ... @property def version(self) -> int: ... class _BaseAddress(_IPAddressBase, SupportsInt): def __init__(self, address: object) -> None: ... def __add__(self: _T, other: int) -> _T: ... def __hash__(self) -> int: ... def __int__(self) -> int: ... def __sub__(self: _T, other: int) -> _T: ... @property def is_global(self) -> bool: ... @property def is_link_local(self) -> bool: ... @property def is_loopback(self) -> bool: ... @property def is_multicast(self) -> bool: ... @property def is_private(self) -> bool: ... @property def is_reserved(self) -> bool: ... @property def is_unspecified(self) -> bool: ... @property def max_prefixlen(self) -> int: ... @property def packed(self) -> bytes: ... class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): network_address: _A netmask: _A def __init__(self, address: object, strict: bool = ...) -> None: ... def __contains__(self, other: Any) -> bool: ... def __getitem__(self, n: int) -> _A: ... def __iter__(self) -> Iterator[_A]: ... def address_exclude(self: _T, other: _T) -> Iterator[_T]: ... @property def broadcast_address(self) -> _A: ... def compare_networks(self: _T, other: _T) -> int: ... def hosts(self) -> Iterator[_A]: ... @property def is_global(self) -> bool: ... @property def is_link_local(self) -> bool: ... @property def is_loopback(self) -> bool: ... @property def is_multicast(self) -> bool: ... @property def is_private(self) -> bool: ... @property def is_reserved(self) -> bool: ... @property def is_unspecified(self) -> bool: ... @property def max_prefixlen(self) -> int: ... @property def num_addresses(self) -> int: ... def overlaps(self: _T, other: _T) -> bool: ... @property def prefixlen(self) -> int: ... def subnets(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> Iterator[_T]: ... def supernet(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> _T: ... @property def with_hostmask(self) -> Text: ... @property def with_netmask(self) -> Text: ... @property def with_prefixlen(self) -> Text: ... @property def hostmask(self) -> _A: ... class _BaseInterface(_BaseAddress, Generic[_A, _N]): hostmask: _A netmask: _A network: _N @property def ip(self) -> _A: ... @property def with_hostmask(self) -> Text: ... @property def with_netmask(self) -> Text: ... @property def with_prefixlen(self) -> Text: ... class IPv4Address(_BaseAddress): ... class IPv4Network(_BaseNetwork[IPv4Address]): ... class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ... class IPv6Address(_BaseAddress): @property def ipv4_mapped(self) -> Optional[IPv4Address]: ... @property def is_site_local(self) -> bool: ... @property def sixtofour(self) -> Optional[IPv4Address]: ... @property def teredo(self) -> Optional[Tuple[IPv4Address, IPv4Address]]: ... class IPv6Network(_BaseNetwork[IPv6Address]): @property def is_site_local(self) -> bool: ... class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ... def v4_int_to_packed(address: int) -> bytes: ... def v6_int_to_packed(address: int) -> bytes: ... @overload def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ... @overload def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ... @overload def get_mixed_type_key(obj: _A) -> Tuple[int, _A]: ... @overload def get_mixed_type_key(obj: IPv4Network) -> Tuple[int, IPv4Address, IPv4Address]: ... @overload def get_mixed_type_key(obj: IPv6Network) -> Tuple[int, IPv6Address, IPv6Address]: ... class AddressValueError(ValueError): ... class NetmaskValueError(ValueError): ... mypy-0.761/mypy/typeshed/third_party/2/kazoo/0000755€tŠÔÚ€2›s®0000000000013576752267025363 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/kazoo/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027625 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/kazoo/client.pyi0000644€tŠÔÚ€2›s®0000000635613576752252027370 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any string_types: Any bytes_types: Any LOST_STATES: Any ENVI_VERSION: Any ENVI_VERSION_KEY: Any log: Any class KazooClient: logger: Any handler: Any auth_data: Any default_acl: Any randomize_hosts: Any hosts: Any chroot: Any state: Any state_listeners: Any read_only: Any retry: Any Barrier: Any Counter: Any DoubleBarrier: Any ChildrenWatch: Any DataWatch: Any Election: Any NonBlockingLease: Any MultiNonBlockingLease: Any Lock: Any Party: Any Queue: Any LockingQueue: Any SetPartitioner: Any Semaphore: Any ShallowParty: Any def __init__(self, hosts=..., timeout=..., client_id=..., handler=..., default_acl=..., auth_data=..., read_only=..., randomize_hosts=..., connection_retry=..., command_retry=..., logger=..., **kwargs) -> None: ... @property def client_state(self): ... @property def client_id(self): ... @property def connected(self): ... def set_hosts(self, hosts, randomize_hosts=...): ... def add_listener(self, listener): ... def remove_listener(self, listener): ... def start(self, timeout=...): ... def start_async(self): ... def stop(self): ... def restart(self): ... def close(self): ... def command(self, cmd=...): ... def server_version(self, retries=...): ... def add_auth(self, scheme, credential): ... def add_auth_async(self, scheme, credential): ... def unchroot(self, path): ... def sync_async(self, path): ... def sync(self, path): ... def create(self, path, value=..., acl=..., ephemeral=..., sequence=..., makepath=...): ... def create_async(self, path, value=..., acl=..., ephemeral=..., sequence=..., makepath=...): ... def ensure_path(self, path, acl=...): ... def ensure_path_async(self, path, acl=...): ... def exists(self, path, watch=...): ... def exists_async(self, path, watch=...): ... def get(self, path, watch=...): ... def get_async(self, path, watch=...): ... def get_children(self, path, watch=..., include_data=...): ... def get_children_async(self, path, watch=..., include_data=...): ... def get_acls(self, path): ... def get_acls_async(self, path): ... def set_acls(self, path, acls, version=...): ... def set_acls_async(self, path, acls, version=...): ... def set(self, path, value, version=...): ... def set_async(self, path, value, version=...): ... def transaction(self): ... def delete(self, path, version=..., recursive=...): ... def delete_async(self, path, version=...): ... def reconfig(self, joining, leaving, new_members, from_config=...): ... def reconfig_async(self, joining, leaving, new_members, from_config): ... class TransactionRequest: client: Any operations: Any committed: Any def __init__(self, client) -> None: ... def create(self, path, value=..., acl=..., ephemeral=..., sequence=...): ... def delete(self, path, version=...): ... def set_data(self, path, value, version=...): ... def check(self, path, version): ... def commit_async(self): ... def commit(self): ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, exc_tb): ... class KazooState: ... mypy-0.761/mypy/typeshed/third_party/2/kazoo/exceptions.pyi0000644€tŠÔÚ€2›s®0000000400613576752252030261 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class KazooException(Exception): ... class ZookeeperError(KazooException): ... class CancelledError(KazooException): ... class ConfigurationError(KazooException): ... class ZookeeperStoppedError(KazooException): ... class ConnectionDropped(KazooException): ... class LockTimeout(KazooException): ... class WriterNotClosedException(KazooException): ... EXCEPTIONS: Any class RolledBackError(ZookeeperError): ... class SystemZookeeperError(ZookeeperError): ... class RuntimeInconsistency(ZookeeperError): ... class DataInconsistency(ZookeeperError): ... class ConnectionLoss(ZookeeperError): ... class MarshallingError(ZookeeperError): ... class UnimplementedError(ZookeeperError): ... class OperationTimeoutError(ZookeeperError): ... class BadArgumentsError(ZookeeperError): ... class NewConfigNoQuorumError(ZookeeperError): ... class ReconfigInProcessError(ZookeeperError): ... class APIError(ZookeeperError): ... class NoNodeError(ZookeeperError): ... class NoAuthError(ZookeeperError): ... class BadVersionError(ZookeeperError): ... class NoChildrenForEphemeralsError(ZookeeperError): ... class NodeExistsError(ZookeeperError): ... class NotEmptyError(ZookeeperError): ... class SessionExpiredError(ZookeeperError): ... class InvalidCallbackError(ZookeeperError): ... class InvalidACLError(ZookeeperError): ... class AuthFailedError(ZookeeperError): ... class SessionMovedError(ZookeeperError): ... class NotReadOnlyCallError(ZookeeperError): ... class ConnectionClosedError(SessionExpiredError): ... ConnectionLossException: Any MarshallingErrorException: Any SystemErrorException: Any RuntimeInconsistencyException: Any DataInconsistencyException: Any UnimplementedException: Any OperationTimeoutException: Any BadArgumentsException: Any ApiErrorException: Any NoNodeException: Any NoAuthException: Any BadVersionException: Any NoChildrenForEphemeralsException: Any NodeExistsException: Any InvalidACLException: Any AuthFailedException: Any NotEmptyException: Any SessionExpiredException: Any InvalidCallbackException: Any mypy-0.761/mypy/typeshed/third_party/2/kazoo/recipe/0000755€tŠÔÚ€2›s®0000000000013576752267026632 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/kazoo/recipe/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031074 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/kazoo/recipe/watchers.pyi0000644€tŠÔÚ€2›s®0000000104713576752252031171 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any log: Any class DataWatch: def __init__(self, client, path, func=..., *args, **kwargs) -> None: ... def __call__(self, func): ... class ChildrenWatch: def __init__(self, client, path, func=..., allow_session_lost=..., send_event=...) -> None: ... def __call__(self, func): ... class PatientChildrenWatch: client: Any path: Any children: Any time_boundary: Any children_changed: Any def __init__(self, client, path, time_boundary=...) -> None: ... asy: Any def start(self): ... mypy-0.761/mypy/typeshed/third_party/2/pathlib2.pyi0000644€tŠÔÚ€2›s®0000001260413576752252026465 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Generator, IO, Optional, Sequence, Tuple, Type, TypeVar, Union, List from types import TracebackType import os import sys _P = TypeVar('_P', bound=PurePath) if sys.version_info >= (3, 6): _PurePathBase = os.PathLike[str] else: _PurePathBase = object class PurePath(_PurePathBase): parts: Tuple[str, ...] drive: str root: str anchor: str name: str suffix: str suffixes: List[str] stem: str if sys.version_info < (3, 5): def __init__(self, *pathsegments: str) -> None: ... elif sys.version_info < (3, 6): def __new__(cls: Type[_P], *args: Union[str, PurePath]) -> _P: ... else: def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]]) -> _P: ... def __hash__(self) -> int: ... def __lt__(self, other: PurePath) -> bool: ... def __le__(self, other: PurePath) -> bool: ... def __gt__(self, other: PurePath) -> bool: ... def __ge__(self, other: PurePath) -> bool: ... if sys.version_info < (3, 6): def __truediv__(self: _P, key: Union[str, PurePath]) -> _P: ... def __rtruediv__(self: _P, key: Union[str, PurePath]) -> _P: ... else: def __truediv__(self: _P, key: Union[str, os.PathLike[str]]) -> _P: ... def __rtruediv__(self: _P, key: Union[str, os.PathLike[str]]) -> _P: ... if sys.version_info < (3,): def __div__(self: _P, key: Union[str, PurePath]) -> _P: ... def __bytes__(self) -> bytes: ... def as_posix(self) -> str: ... def as_uri(self) -> str: ... def is_absolute(self) -> bool: ... def is_reserved(self) -> bool: ... def match(self, path_pattern: str) -> bool: ... if sys.version_info < (3, 6): def relative_to(self: _P, *other: Union[str, PurePath]) -> _P: ... else: def relative_to(self: _P, *other: Union[str, os.PathLike[str]]) -> _P: ... def with_name(self: _P, name: str) -> _P: ... def with_suffix(self: _P, suffix: str) -> _P: ... if sys.version_info < (3, 6): def joinpath(self: _P, *other: Union[str, PurePath]) -> _P: ... else: def joinpath(self: _P, *other: Union[str, os.PathLike[str]]) -> _P: ... @property def parents(self: _P) -> Sequence[_P]: ... @property def parent(self: _P) -> _P: ... class PurePosixPath(PurePath): ... class PureWindowsPath(PurePath): ... class Path(PurePath): def __enter__(self) -> Path: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool]: ... @classmethod def cwd(cls: Type[_P]) -> _P: ... def stat(self) -> os.stat_result: ... def chmod(self, mode: int) -> None: ... def exists(self) -> bool: ... def glob(self, pattern: str) -> Generator[Path, None, None]: ... def group(self) -> str: ... def is_dir(self) -> bool: ... def is_file(self) -> bool: ... def is_symlink(self) -> bool: ... def is_socket(self) -> bool: ... def is_fifo(self) -> bool: ... def is_block_device(self) -> bool: ... def is_char_device(self) -> bool: ... def iterdir(self) -> Generator[Path, None, None]: ... def lchmod(self, mode: int) -> None: ... def lstat(self) -> os.stat_result: ... if sys.version_info < (3, 5): def mkdir(self, mode: int = ..., parents: bool = ...) -> None: ... else: def mkdir(self, mode: int = ..., parents: bool = ..., exist_ok: bool = ...) -> None: ... def open(self, mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ... def owner(self) -> str: ... def rename(self, target: Union[str, PurePath]) -> None: ... def replace(self, target: Union[str, PurePath]) -> None: ... if sys.version_info < (3, 6): def resolve(self: _P) -> _P: ... else: def resolve(self: _P, strict: bool = ...) -> _P: ... def rglob(self, pattern: str) -> Generator[Path, None, None]: ... def rmdir(self) -> None: ... def symlink_to(self, target: Union[str, Path], target_is_directory: bool = ...) -> None: ... def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ... def unlink(self) -> None: ... if sys.version_info >= (3, 5): @classmethod def home(cls: Type[_P]) -> _P: ... if sys.version_info < (3, 6): def __new__(cls: Type[_P], *args: Union[str, PurePath], **kwargs: Any) -> _P: ... else: def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]], **kwargs: Any) -> _P: ... def absolute(self: _P) -> _P: ... def expanduser(self: _P) -> _P: ... def read_bytes(self) -> bytes: ... def read_text(self, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> str: ... def samefile(self, other_path: Union[str, bytes, int, Path]) -> bool: ... def write_bytes(self, data: bytes) -> int: ... def write_text(self, data: str, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> int: ... if sys.version_info >= (3, 8): def link_to(self, target: Union[str, bytes, os.PathLike[str]]) -> None: ... class PosixPath(Path, PurePosixPath): ... class WindowsPath(Path, PureWindowsPath): ... mypy-0.761/mypy/typeshed/third_party/2/pymssql.pyi0000644€tŠÔÚ€2›s®0000000352413576752252026471 0ustar jukkaDROPBOX\Domain Users00000000000000from datetime import datetime, date, time from typing import Any, Dict, Tuple, Iterable, List, Optional, Union, Sequence Scalar = Union[int, float, str, datetime, date, time] Result = Union[Tuple[Scalar, ...], Dict[str, Scalar]] class Connection(object): def __init__(self, user, password, host, database, timeout, login_timeout, charset, as_dict) -> None: ... def autocommit(self, status: bool) -> None: ... def close(self) -> None: ... def commit(self) -> None: ... def cursor(self) -> Cursor: ... def rollback(self) -> None: ... class Cursor(object): def __init__(self) -> None: ... def __iter__(self): ... def __next__(self) -> Any: ... def callproc(self, procname: str, **kwargs) -> None: ... def close(self) -> None: ... def execute(self, stmt: str, params: Optional[Union[Scalar, Tuple[Scalar, ...], Dict[str, Scalar]]]) -> None: ... def executemany(self, stmt: str, params: Optional[Sequence[Tuple[Scalar, ...]]]) -> None: ... def fetchall(self) -> List[Result]: ... def fetchmany(self, size: Optional[int]) -> List[Result]: ... def fetchone(self) -> Result: ... def connect(server: Optional[str], user: Optional[str], password: Optional[str], database: Optional[str], timeout: Optional[int], login_timeout: Optional[int], charset: Optional[str], as_dict: Optional[bool], host: Optional[str], appname: Optional[str], port: Optional[str], conn_properties: Optional[Union[str, Sequence[str]]], autocommit: Optional[bool], tds_version: Optional[str]) -> Connection: ... def get_max_connections() -> int: ... def set_max_connections(n: int) -> None: ... mypy-0.761/mypy/typeshed/third_party/2/routes/0000755€tŠÔÚ€2›s®0000000000013576752267025561 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/routes/__init__.pyi0000644€tŠÔÚ€2›s®0000000057113576752252030040 0ustar jukkaDROPBOX\Domain Users00000000000000from . import mapper from . import util class _RequestConfig: def __getattr__(self, name): ... def __setattr__(self, name, value): ... def __delattr__(self, name): ... def load_wsgi_environ(self, environ): ... def request_config(original=...): ... Mapper = mapper.Mapper redirect_to = util.redirect_to url_for = util.url_for URLGenerator = util.URLGenerator mypy-0.761/mypy/typeshed/third_party/2/routes/mapper.pyi0000644€tŠÔÚ€2›s®0000000436613576752252027573 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any COLLECTION_ACTIONS: Any MEMBER_ACTIONS: Any def strip_slashes(name): ... class SubMapperParent: def submapper(self, **kargs): ... def collection(self, collection_name, resource_name, path_prefix=..., member_prefix=..., controller=..., collection_actions=..., member_actions=..., member_options=..., **kwargs): ... class SubMapper(SubMapperParent): kwargs: Any obj: Any collection_name: Any member: Any resource_name: Any formatted: Any def __init__(self, obj, resource_name=..., collection_name=..., actions=..., formatted=..., **kwargs) -> None: ... def connect(self, *args, **kwargs): ... def link(self, rel=..., name=..., action=..., method=..., formatted=..., **kwargs): ... def new(self, **kwargs): ... def edit(self, **kwargs): ... def action(self, name=..., action=..., method=..., formatted=..., **kwargs): ... def index(self, name=..., **kwargs): ... def show(self, name=..., **kwargs): ... def create(self, **kwargs): ... def update(self, **kwargs): ... def delete(self, **kwargs): ... def add_actions(self, actions): ... def __enter__(self): ... def __exit__(self, type, value, tb): ... class Mapper(SubMapperParent): matchlist: Any maxkeys: Any minkeys: Any urlcache: Any prefix: Any req_data: Any directory: Any always_scan: Any controller_scan: Any debug: Any append_slash: Any sub_domains: Any sub_domains_ignore: Any domain_match: Any explicit: Any encoding: Any decode_errors: Any hardcode_names: Any minimization: Any create_regs_lock: Any def __init__(self, controller_scan=..., directory=..., always_scan=..., register=..., explicit=...) -> None: ... environ: Any def extend(self, routes, path_prefix=...): ... def make_route(self, *args, **kargs): ... def connect(self, *args, **kargs): ... def create_regs(self, *args, **kwargs): ... def match(self, url=..., environ=...): ... def routematch(self, url=..., environ=...): ... obj: Any def generate(self, *args, **kargs): ... def resource(self, member_name, collection_name, **kwargs): ... def redirect(self, match_path, destination_path, *args, **kwargs): ... mypy-0.761/mypy/typeshed/third_party/2/routes/util.pyi0000644€tŠÔÚ€2›s®0000000110013576752252027243 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class RoutesException(Exception): ... class MatchException(RoutesException): ... class GenerationException(RoutesException): ... def url_for(*args, **kargs): ... class URLGenerator: mapper: Any environ: Any def __init__(self, mapper, environ) -> None: ... def __call__(self, *args, **kargs): ... def current(self, *args, **kwargs): ... def redirect_to(*args, **kargs): ... def cache_hostinfo(environ): ... def controller_scan(directory=...): ... def as_unicode(value, encoding, errors=...): ... def ascii_characters(string): ... mypy-0.761/mypy/typeshed/third_party/2/scribe/0000755€tŠÔÚ€2›s®0000000000013576752267025507 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/scribe/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252027751 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/scribe/scribe.pyi0000644€tŠÔÚ€2›s®0000000225713576752252027501 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import fb303.FacebookService from .ttypes import * # noqa: F403 from thrift.Thrift import TProcessor # type: ignore # We don't have thrift stubs in typeshed class Iface(fb303.FacebookService.Iface): def Log(self, messages): ... class Client(fb303.FacebookService.Client, Iface): def __init__(self, iprot, oprot=...) -> None: ... def Log(self, messages): ... def send_Log(self, messages): ... def recv_Log(self): ... class Processor(fb303.FacebookService.Processor, Iface, TProcessor): def __init__(self, handler) -> None: ... def process(self, iprot, oprot): ... def process_Log(self, seqid, iprot, oprot): ... class Log_args: thrift_spec: Any messages: Any def __init__(self, messages=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class Log_result: thrift_spec: Any success: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... mypy-0.761/mypy/typeshed/third_party/2/scribe/ttypes.pyi0000644€tŠÔÚ€2›s®0000000057713576752252027565 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any fastbinary: Any class ResultCode: OK: Any TRY_LATER: Any class LogEntry: thrift_spec: Any category: Any message: Any def __init__(self, category=..., message=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... mypy-0.761/mypy/typeshed/third_party/2/six/0000755€tŠÔÚ€2›s®0000000000013576752267025043 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/six/__init__.pyi0000644€tŠÔÚ€2›s®0000000721213576752252027321 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six (Python 2.7) from __future__ import print_function import types from typing import ( Any, AnyStr, Callable, Dict, Iterable, Mapping, NoReturn, Optional, Pattern, Text, Tuple, Type, TypeVar, Union, overload, ValuesView, KeysView, ItemsView, ) import typing import unittest # Exports from __builtin__ import unichr as unichr from StringIO import StringIO as StringIO, StringIO as BytesIO from functools import wraps as wraps from . import moves _T = TypeVar('_T') _K = TypeVar('_K') _V = TypeVar('_V') # TODO make constant, then move this stub to 2and3 # https://github.com/python/typeshed/issues/17 PY2 = True PY3 = False PY34 = False string_types = (str, unicode) integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str MAXSIZE: int # def add_move # def remove_move def advance_iterator(it: typing.Iterator[_T]) -> _T: ... next = advance_iterator def callable(obj: object) -> bool: ... def get_unbound_function(unbound: types.MethodType) -> types.FunctionType: ... def create_bound_method(func: types.FunctionType, obj: object) -> types.MethodType: ... def create_unbound_method(func: types.FunctionType, cls: Union[type, types.ClassType]) -> types.MethodType: ... class Iterator: def next(self) -> Any: ... def get_method_function(meth: types.MethodType) -> types.FunctionType: ... def get_method_self(meth: types.MethodType) -> Optional[object]: ... def get_function_closure(fun: types.FunctionType) -> Optional[Tuple[types._Cell, ...]]: ... def get_function_code(fun: types.FunctionType) -> types.CodeType: ... def get_function_defaults(fun: types.FunctionType) -> Optional[Tuple[Any, ...]]: ... def get_function_globals(fun: types.FunctionType) -> Dict[str, Any]: ... def iterkeys(d: Mapping[_K, _V]) -> typing.Iterator[_K]: ... def itervalues(d: Mapping[_K, _V]) -> typing.Iterator[_V]: ... def iteritems(d: Mapping[_K, _V]) -> typing.Iterator[Tuple[_K, _V]]: ... # def iterlists def viewkeys(d: Mapping[_K, _V]) -> KeysView[_K]: ... def viewvalues(d: Mapping[_K, _V]) -> ValuesView[_V]: ... def viewitems(d: Mapping[_K, _V]) -> ItemsView[_K, _V]: ... def b(s: str) -> binary_type: ... def u(s: str) -> text_type: ... int2byte = chr def byte2int(bs: binary_type) -> int: ... def indexbytes(buf: binary_type, i: int) -> int: ... def iterbytes(buf: binary_type) -> typing.Iterator[int]: ... def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: str = ...) -> None: ... @overload def assertRaisesRegex(self: unittest.TestCase, msg: str = ...) -> Any: ... @overload def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ... def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: str = ...) -> None: ... def reraise(tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = ...) -> NoReturn: ... def exec_(_code_: Union[unicode, types.CodeType], _globs_: Dict[str, Any] = ..., _locs_: Dict[str, Any] = ...): ... def raise_from(value: Union[BaseException, Type[BaseException]], from_value: Optional[BaseException]) -> NoReturn: ... print_ = print def with_metaclass(meta: type, *bases: type) -> type: ... def add_metaclass(metaclass: type) -> Callable[[_T], _T]: ... def ensure_binary(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> bytes: ... def ensure_str(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> str: ... def ensure_text(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> Text: ... def python_2_unicode_compatible(klass: _T) -> _T: ... mypy-0.761/mypy/typeshed/third_party/2/six/moves/0000755€tŠÔÚ€2›s®0000000000013576752267026174 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/six/moves/BaseHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000003513576752252031450 0ustar jukkaDROPBOX\Domain Users00000000000000from BaseHTTPServer import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/CGIHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000003413576752252031177 0ustar jukkaDROPBOX\Domain Users00000000000000from CGIHTTPServer import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/SimpleHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000003713576752252032031 0ustar jukkaDROPBOX\Domain Users00000000000000from SimpleHTTPServer import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/__init__.pyi0000644€tŠÔÚ€2›s®0000000451713576752252030457 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. from cStringIO import StringIO as cStringIO from itertools import ifilter as filter from itertools import ifilterfalse as filterfalse from __builtin__ import raw_input as input from __builtin__ import intern as intern from itertools import imap as map from os import getcwdu as getcwd from os import getcwd as getcwdb from __builtin__ import xrange as range from __builtin__ import reload as reload_module from __builtin__ import reduce as reduce from pipes import quote as shlex_quote from StringIO import StringIO as StringIO from UserDict import UserDict as UserDict from UserList import UserList as UserList from UserString import UserString as UserString from __builtin__ import xrange as xrange from itertools import izip as zip from itertools import izip_longest as zip_longest import __builtin__ as builtins from . import configparser # import copy_reg as copyreg # import gdbm as dbm_gnu from . import _dummy_thread from . import http_cookiejar from . import http_cookies from . import html_entities from . import html_parser from . import http_client # import email.MIMEMultipart as email_mime_multipart # import email.MIMENonMultipart as email_mime_nonmultipart from . import email_mime_text # import email.MIMEBase as email_mime_base from . import BaseHTTPServer from . import CGIHTTPServer from . import SimpleHTTPServer from . import cPickle from . import queue from . import reprlib from . import socketserver from . import _thread # import Tkinter as tkinter # import Dialog as tkinter_dialog # import FileDialog as tkinter_filedialog # import ScrolledText as tkinter_scrolledtext # import SimpleDialog as tkinter_simpledialog # import Tix as tkinter_tix # import ttk as tkinter_ttk # import Tkconstants as tkinter_constants # import Tkdnd as tkinter_dnd # import tkColorChooser as tkinter_colorchooser # import tkCommonDialog as tkinter_commondialog # import tkFileDialog as tkinter_tkfiledialog # import tkFont as tkinter_font # import tkMessageBox as tkinter_messagebox # import tkSimpleDialog as tkinter_tksimpledialog from . import urllib_parse from . import urllib_error from . import urllib from . import urllib_robotparser from . import xmlrpc_client # import SimpleXMLRPCServer as xmlrpc_server mypy-0.761/mypy/typeshed/third_party/2/six/moves/_dummy_thread.pyi0000644€tŠÔÚ€2›s®0000000003313576752252031526 0ustar jukkaDROPBOX\Domain Users00000000000000from dummy_thread import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/_thread.pyi0000644€tŠÔÚ€2›s®0000000002513576752252030314 0ustar jukkaDROPBOX\Domain Users00000000000000from thread import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/cPickle.pyi0000644€tŠÔÚ€2›s®0000000002613576752252030261 0ustar jukkaDROPBOX\Domain Users00000000000000from cPickle import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/configparser.pyi0000644€tŠÔÚ€2›s®0000000003313576752252031367 0ustar jukkaDROPBOX\Domain Users00000000000000from ConfigParser import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/email_mime_base.pyi0000644€tŠÔÚ€2›s®0000000003613576752252032000 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.base import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/email_mime_multipart.pyi0000644€tŠÔÚ€2›s®0000000004313576752252033105 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.multipart import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/email_mime_nonmultipart.pyi0000644€tŠÔÚ€2›s®0000000004613576752252033623 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/email_mime_text.pyi0000644€tŠÔÚ€2›s®0000000003513576752252032051 0ustar jukkaDROPBOX\Domain Users00000000000000from email.MIMEText import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/html_entities.pyi0000644€tŠÔÚ€2›s®0000000003513576752252031557 0ustar jukkaDROPBOX\Domain Users00000000000000from htmlentitydefs import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/html_parser.pyi0000644€tŠÔÚ€2›s®0000000003113576752252031223 0ustar jukkaDROPBOX\Domain Users00000000000000from HTMLParser import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/http_client.pyi0000644€tŠÔÚ€2›s®0000000002613576752252031224 0ustar jukkaDROPBOX\Domain Users00000000000000from httplib import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/http_cookiejar.pyi0000644€tŠÔÚ€2›s®0000000003013576752252031707 0ustar jukkaDROPBOX\Domain Users00000000000000from cookielib import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/http_cookies.pyi0000644€tŠÔÚ€2›s®0000000002513576752252031401 0ustar jukkaDROPBOX\Domain Users00000000000000from Cookie import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/queue.pyi0000644€tŠÔÚ€2›s®0000000002413576752252030031 0ustar jukkaDROPBOX\Domain Users00000000000000from Queue import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/reprlib.pyi0000644€tŠÔÚ€2›s®0000000002313576752252030343 0ustar jukkaDROPBOX\Domain Users00000000000000from repr import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/socketserver.pyi0000644€tŠÔÚ€2›s®0000000003313576752252031424 0ustar jukkaDROPBOX\Domain Users00000000000000from SocketServer import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib/0000755€tŠÔÚ€2›s®0000000000013576752267027465 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib/__init__.pyi0000644€tŠÔÚ€2›s®0000000033113576752252031736 0ustar jukkaDROPBOX\Domain Users00000000000000import six.moves.urllib.error as error import six.moves.urllib.parse as parse import six.moves.urllib.request as request import six.moves.urllib.response as response import six.moves.urllib.robotparser as robotparser mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib/error.pyi0000644€tŠÔÚ€2›s®0000000022413576752252031331 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib2 import URLError as URLError from urllib2 import HTTPError as HTTPError from urllib import ContentTooShortError as ContentTooShortError mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib/parse.pyi0000644€tŠÔÚ€2›s®0000000203713576752252031316 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.parse from urlparse import ParseResult as ParseResult from urlparse import SplitResult as SplitResult from urlparse import parse_qs as parse_qs from urlparse import parse_qsl as parse_qsl from urlparse import urldefrag as urldefrag from urlparse import urljoin as urljoin from urlparse import urlparse as urlparse from urlparse import urlsplit as urlsplit from urlparse import urlunparse as urlunparse from urlparse import urlunsplit as urlunsplit from urllib import quote as quote from urllib import quote_plus as quote_plus from urllib import unquote as unquote from urllib import unquote_plus as unquote_plus from urllib import urlencode as urlencode from urllib import splitquery as splitquery from urllib import splittag as splittag from urllib import splituser as splituser from urlparse import uses_fragment as uses_fragment from urlparse import uses_netloc as uses_netloc from urlparse import uses_params as uses_params from urlparse import uses_query as uses_query from urlparse import uses_relative as uses_relative mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib/request.pyi0000644€tŠÔÚ€2›s®0000000365013576752252031676 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.request from urllib2 import urlopen as urlopen from urllib2 import install_opener as install_opener from urllib2 import build_opener as build_opener from urllib import pathname2url as pathname2url from urllib import url2pathname as url2pathname from urllib import getproxies as getproxies from urllib2 import Request as Request from urllib2 import OpenerDirector as OpenerDirector from urllib2 import HTTPDefaultErrorHandler as HTTPDefaultErrorHandler from urllib2 import HTTPRedirectHandler as HTTPRedirectHandler from urllib2 import HTTPCookieProcessor as HTTPCookieProcessor from urllib2 import ProxyHandler as ProxyHandler from urllib2 import BaseHandler as BaseHandler from urllib2 import HTTPPasswordMgr as HTTPPasswordMgr from urllib2 import HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm from urllib2 import AbstractBasicAuthHandler as AbstractBasicAuthHandler from urllib2 import HTTPBasicAuthHandler as HTTPBasicAuthHandler from urllib2 import ProxyBasicAuthHandler as ProxyBasicAuthHandler from urllib2 import AbstractDigestAuthHandler as AbstractDigestAuthHandler from urllib2 import HTTPDigestAuthHandler as HTTPDigestAuthHandler from urllib2 import ProxyDigestAuthHandler as ProxyDigestAuthHandler from urllib2 import HTTPHandler as HTTPHandler from urllib2 import HTTPSHandler as HTTPSHandler from urllib2 import FileHandler as FileHandler from urllib2 import FTPHandler as FTPHandler from urllib2 import CacheFTPHandler as CacheFTPHandler from urllib2 import UnknownHandler as UnknownHandler from urllib2 import HTTPErrorProcessor as HTTPErrorProcessor from urllib import urlretrieve as urlretrieve from urllib import urlcleanup as urlcleanup from urllib import URLopener as URLopener from urllib import FancyURLopener as FancyURLopener from urllib import proxy_bypass as proxy_bypass from urllib2 import parse_http_list as parse_http_list from urllib2 import parse_keqv_list as parse_keqv_list mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib/response.pyi0000644€tŠÔÚ€2›s®0000000031613576752252032040 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.response from urllib import addbase as addbase from urllib import addclosehook as addclosehook from urllib import addinfo as addinfo from urllib import addinfourl as addinfourl mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib/robotparser.pyi0000644€tŠÔÚ€2›s®0000000007313576752252032544 0ustar jukkaDROPBOX\Domain Users00000000000000from robotparser import RobotFileParser as RobotFileParser mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib_error.pyi0000644€tŠÔÚ€2›s®0000000003413576752252031410 0ustar jukkaDROPBOX\Domain Users00000000000000from .urllib.error import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib_parse.pyi0000644€tŠÔÚ€2›s®0000000003413576752252031371 0ustar jukkaDROPBOX\Domain Users00000000000000from .urllib.parse import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib_request.pyi0000644€tŠÔÚ€2›s®0000000003613576752252031751 0ustar jukkaDROPBOX\Domain Users00000000000000from .urllib.request import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib_response.pyi0000644€tŠÔÚ€2›s®0000000003713576752252032120 0ustar jukkaDROPBOX\Domain Users00000000000000from .urllib.response import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/urllib_robotparser.pyi0000644€tŠÔÚ€2›s®0000000003213576752252032617 0ustar jukkaDROPBOX\Domain Users00000000000000from robotparser import * mypy-0.761/mypy/typeshed/third_party/2/six/moves/xmlrpc_client.pyi0000644€tŠÔÚ€2›s®0000000003013576752252031545 0ustar jukkaDROPBOX\Domain Users00000000000000from xmlrpclib import * mypy-0.761/mypy/typeshed/third_party/2/tornado/0000755€tŠÔÚ€2›s®0000000000013576752267025706 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/tornado/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252030150 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2/tornado/concurrent.pyi0000644€tŠÔÚ€2›s®0000000177013576752252030612 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any futures: Any class ReturnValueIgnoredError(Exception): ... class _TracebackLogger: exc_info: Any formatted_tb: Any def __init__(self, exc_info) -> None: ... def activate(self): ... def clear(self): ... def __del__(self): ... class Future: def __init__(self) -> None: ... def cancel(self): ... def cancelled(self): ... def running(self): ... def done(self): ... def result(self, timeout=...): ... def exception(self, timeout=...): ... def add_done_callback(self, fn): ... def set_result(self, result): ... def set_exception(self, exception): ... def exc_info(self): ... def set_exc_info(self, exc_info): ... def __del__(self): ... TracebackFuture: Any FUTURES: Any def is_future(x): ... class DummyExecutor: def submit(self, fn, *args, **kwargs): ... def shutdown(self, wait=...): ... dummy_executor: Any def run_on_executor(*args, **kwargs): ... def return_future(f): ... def chain_future(a, b): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/gen.pyi0000644€tŠÔÚ€2›s®0000000534113576752252027177 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, NamedTuple, Tuple singledispatch: Any class KeyReuseError(Exception): ... class UnknownKeyError(Exception): ... class LeakedCallbackError(Exception): ... class BadYieldError(Exception): ... class ReturnValueIgnoredError(Exception): ... class TimeoutError(Exception): ... def engine(func): ... def coroutine(func, replace_callback=...): ... class Return(Exception): value: Any def __init__(self, value=...) -> None: ... class WaitIterator: current_index: Any def __init__(self, *args, **kwargs) -> None: ... def done(self): ... def next(self): ... class YieldPoint: def start(self, runner): ... def is_ready(self): ... def get_result(self): ... class Callback(YieldPoint): key: Any def __init__(self, key) -> None: ... runner: Any def start(self, runner): ... def is_ready(self): ... def get_result(self): ... class Wait(YieldPoint): key: Any def __init__(self, key) -> None: ... runner: Any def start(self, runner): ... def is_ready(self): ... def get_result(self): ... class WaitAll(YieldPoint): keys: Any def __init__(self, keys) -> None: ... runner: Any def start(self, runner): ... def is_ready(self): ... def get_result(self): ... def Task(func, *args, **kwargs): ... class YieldFuture(YieldPoint): future: Any io_loop: Any def __init__(self, future, io_loop=...) -> None: ... runner: Any key: Any result_fn: Any def start(self, runner): ... def is_ready(self): ... def get_result(self): ... class Multi(YieldPoint): keys: Any children: Any unfinished_children: Any quiet_exceptions: Any def __init__(self, children, quiet_exceptions=...) -> None: ... def start(self, runner): ... def is_ready(self): ... def get_result(self): ... def multi_future(children, quiet_exceptions=...): ... def maybe_future(x): ... def with_timeout(timeout, future, io_loop=..., quiet_exceptions=...): ... def sleep(duration): ... moment: Any class Runner: gen: Any result_future: Any future: Any yield_point: Any pending_callbacks: Any results: Any running: Any finished: Any had_exception: Any io_loop: Any stack_context_deactivate: Any def __init__(self, gen, result_future, first_yielded) -> None: ... def register_callback(self, key): ... def is_ready(self, key): ... def set_result(self, key, result): ... def pop_result(self, key): ... def run(self): ... def handle_yield(self, yielded): ... def result_callback(self, key): ... def handle_exception(self, typ, value, tb): ... class Arguments(NamedTuple): args: Tuple[str, ...] kwargs: Dict[str, Any] def convert_yielded(yielded): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/httpclient.pyi0000644€tŠÔÚ€2›s®0000000573113576752252030607 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado.util import Configurable class HTTPClient: def __init__(self, async_client_class=..., **kwargs) -> None: ... def __del__(self): ... def close(self): ... def fetch(self, request, **kwargs): ... class AsyncHTTPClient(Configurable): @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def __new__(cls, io_loop=..., force_instance=..., **kwargs): ... io_loop: Any defaults: Any def initialize(self, io_loop, defaults=...): ... def close(self): ... def fetch(self, request, callback=..., raise_error=..., **kwargs): ... def fetch_impl(self, request, callback): ... @classmethod def configure(cls, impl, **kwargs): ... class HTTPRequest: proxy_host: Any proxy_port: Any proxy_username: Any proxy_password: Any url: Any method: Any body_producer: Any auth_username: Any auth_password: Any auth_mode: Any connect_timeout: Any request_timeout: Any follow_redirects: Any max_redirects: Any user_agent: Any decompress_response: Any network_interface: Any streaming_callback: Any header_callback: Any prepare_curl_callback: Any allow_nonstandard_methods: Any validate_cert: Any ca_certs: Any allow_ipv6: Any client_key: Any client_cert: Any ssl_options: Any expect_100_continue: Any start_time: Any def __init__(self, url, method=..., headers=..., body=..., auth_username=..., auth_password=..., auth_mode=..., connect_timeout=..., request_timeout=..., if_modified_since=..., follow_redirects=..., max_redirects=..., user_agent=..., use_gzip=..., network_interface=..., streaming_callback=..., header_callback=..., prepare_curl_callback=..., proxy_host=..., proxy_port=..., proxy_username=..., proxy_password=..., allow_nonstandard_methods=..., validate_cert=..., ca_certs=..., allow_ipv6=..., client_key=..., client_cert=..., body_producer=..., expect_100_continue=..., decompress_response=..., ssl_options=...) -> None: ... @property def headers(self): ... @headers.setter def headers(self, value): ... @property def body(self): ... @body.setter def body(self, value): ... class HTTPResponse: request: Any code: Any reason: Any headers: Any buffer: Any effective_url: Any error: Any request_time: Any time_info: Any def __init__(self, request, code, headers=..., buffer=..., effective_url=..., error=..., request_time=..., time_info=..., reason=...) -> None: ... body: Any def rethrow(self): ... class HTTPError(Exception): code: Any response: Any def __init__(self, code, message=..., response=...) -> None: ... class _RequestProxy: request: Any defaults: Any def __init__(self, request, defaults) -> None: ... def __getattr__(self, name): ... def main(): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/httpserver.pyi0000644€tŠÔÚ€2›s®0000000277713576752252030646 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado import httputil from tornado.tcpserver import TCPServer from tornado.util import Configurable class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate): def __init__(self, *args, **kwargs) -> None: ... request_callback: Any no_keep_alive: Any xheaders: Any protocol: Any conn_params: Any def initialize(self, request_callback, no_keep_alive=..., io_loop=..., xheaders=..., ssl_options=..., protocol=..., decompress_request=..., chunk_size=..., max_header_size=..., idle_connection_timeout=..., body_timeout=..., max_body_size=..., max_buffer_size=...): ... @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def close_all_connections(self): ... def handle_stream(self, stream, address): ... def start_request(self, server_conn, request_conn): ... def on_close(self, server_conn): ... class _HTTPRequestContext: address: Any protocol: Any address_family: Any remote_ip: Any def __init__(self, stream, address, protocol) -> None: ... class _ServerRequestAdapter(httputil.HTTPMessageDelegate): server: Any connection: Any request: Any delegate: Any def __init__(self, server, server_conn, request_conn) -> None: ... def headers_received(self, start_line, headers): ... def data_received(self, chunk): ... def finish(self): ... def on_connection_close(self): ... HTTPRequest: Any mypy-0.761/mypy/typeshed/third_party/2/tornado/httputil.pyi0000644€tŠÔÚ€2›s®0000000517113576752252030304 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, NamedTuple from tornado.util import ObjectDict class SSLError(Exception): ... class _NormalizedHeaderCache(Dict[Any, Any]): size: Any queue: Any def __init__(self, size) -> None: ... def __missing__(self, key): ... class HTTPHeaders(Dict[Any, Any]): def __init__(self, *args, **kwargs) -> None: ... def add(self, name, value): ... def get_list(self, name): ... def get_all(self): ... def parse_line(self, line): ... @classmethod def parse(cls, headers): ... def __setitem__(self, name, value): ... def __getitem__(self, name): ... def __delitem__(self, name): ... def __contains__(self, name): ... def get(self, name, default=...): ... def update(self, *args, **kwargs): ... def copy(self): ... __copy__: Any def __deepcopy__(self, memo_dict): ... class HTTPServerRequest: method: Any uri: Any version: Any headers: Any body: Any remote_ip: Any protocol: Any host: Any files: Any connection: Any arguments: Any query_arguments: Any body_arguments: Any def __init__(self, method=..., uri=..., version=..., headers=..., body=..., host=..., files=..., connection=..., start_line=...) -> None: ... def supports_http_1_1(self): ... @property def cookies(self): ... def write(self, chunk, callback=...): ... def finish(self): ... def full_url(self): ... def request_time(self): ... def get_ssl_certificate(self, binary_form=...): ... class HTTPInputError(Exception): ... class HTTPOutputError(Exception): ... class HTTPServerConnectionDelegate: def start_request(self, server_conn, request_conn): ... def on_close(self, server_conn): ... class HTTPMessageDelegate: def headers_received(self, start_line, headers): ... def data_received(self, chunk): ... def finish(self): ... def on_connection_close(self): ... class HTTPConnection: def write_headers(self, start_line, headers, chunk=..., callback=...): ... def write(self, chunk, callback=...): ... def finish(self): ... def url_concat(url, args): ... class HTTPFile(ObjectDict): ... def parse_body_arguments(content_type, body, arguments, files, headers=...): ... def parse_multipart_form_data(boundary, data, arguments, files): ... def format_timestamp(ts): ... class RequestStartLine(NamedTuple): method: str path: str version: str def parse_request_start_line(line): ... class ResponseStartLine(NamedTuple): version: str code: str reason: str def parse_response_start_line(line): ... def doctests(): ... def split_host_and_port(netloc): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/ioloop.pyi0000644€tŠÔÚ€2›s®0000000535513576752252027734 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado.util import Configurable signal: Any class TimeoutError(Exception): ... class IOLoop(Configurable): NONE: Any READ: Any WRITE: Any ERROR: Any @staticmethod def instance(): ... @staticmethod def initialized(): ... def install(self): ... @staticmethod def clear_instance(): ... @staticmethod def current(instance=...): ... def make_current(self): ... @staticmethod def clear_current(): ... @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def initialize(self, make_current=...): ... def close(self, all_fds=...): ... def add_handler(self, fd, handler, events): ... def update_handler(self, fd, events): ... def remove_handler(self, fd): ... def set_blocking_signal_threshold(self, seconds, action): ... def set_blocking_log_threshold(self, seconds): ... def log_stack(self, signal, frame): ... def start(self): ... def stop(self): ... def run_sync(self, func, timeout=...): ... def time(self): ... def add_timeout(self, deadline, callback, *args, **kwargs): ... def call_later(self, delay, callback, *args, **kwargs): ... def call_at(self, when, callback, *args, **kwargs): ... def remove_timeout(self, timeout): ... def add_callback(self, callback, *args, **kwargs): ... def add_callback_from_signal(self, callback, *args, **kwargs): ... def spawn_callback(self, callback, *args, **kwargs): ... def add_future(self, future, callback): ... def handle_callback_exception(self, callback): ... def split_fd(self, fd): ... def close_fd(self, fd): ... class PollIOLoop(IOLoop): time_func: Any def initialize(self, impl, time_func=..., **kwargs): ... def close(self, all_fds=...): ... def add_handler(self, fd, handler, events): ... def update_handler(self, fd, events): ... def remove_handler(self, fd): ... def set_blocking_signal_threshold(self, seconds, action): ... def start(self): ... def stop(self): ... def time(self): ... def call_at(self, deadline, callback, *args, **kwargs): ... def remove_timeout(self, timeout): ... def add_callback(self, callback, *args, **kwargs): ... def add_callback_from_signal(self, callback, *args, **kwargs): ... class _Timeout: deadline: Any callback: Any tiebreaker: Any def __init__(self, deadline, callback, io_loop) -> None: ... def __lt__(self, other): ... def __le__(self, other): ... class PeriodicCallback: callback: Any callback_time: Any io_loop: Any def __init__(self, callback, callback_time, io_loop=...) -> None: ... def start(self): ... def stop(self): ... def is_running(self): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/locks.pyi0000644€tŠÔÚ€2›s®0000000237713576752252027547 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class _TimeoutGarbageCollector: def __init__(self): ... class Condition(_TimeoutGarbageCollector): io_loop: Any def __init__(self): ... def wait(self, timeout: Optional[Any] = ...): ... def notify(self, n: int = ...): ... def notify_all(self): ... class Event: def __init__(self): ... def is_set(self): ... def set(self): ... def clear(self): ... def wait(self, timeout: Optional[Any] = ...): ... class _ReleasingContextManager: def __init__(self, obj): ... def __enter__(self): ... def __exit__(self, exc_type, exc_val, exc_tb): ... class Semaphore(_TimeoutGarbageCollector): def __init__(self, value: int = ...): ... def release(self): ... def acquire(self, timeout: Optional[Any] = ...): ... def __enter__(self): ... __exit__: Any def __aenter__(self): ... def __aexit__(self, typ, value, tb): ... class BoundedSemaphore(Semaphore): def __init__(self, value: int = ...): ... def release(self): ... class Lock: def __init__(self): ... def acquire(self, timeout: Optional[Any] = ...): ... def release(self): ... def __enter__(self): ... __exit__: Any def __aenter__(self): ... def __aexit__(self, typ, value, tb): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/netutil.pyi0000644€tŠÔÚ€2›s®0000000250513576752252030111 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado.util import Configurable ssl: Any certifi: Any xrange: Any ssl_match_hostname: Any SSLCertificateError: Any def bind_sockets(port, address=..., family=..., backlog=..., flags=...): ... def bind_unix_socket(file, mode=..., backlog=...): ... def add_accept_handler(sock, callback, io_loop=...): ... def is_valid_ip(ip): ... class Resolver(Configurable): @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def resolve(self, host, port, family=..., callback=...): ... def close(self): ... class ExecutorResolver(Resolver): io_loop: Any executor: Any close_executor: Any def initialize(self, io_loop=..., executor=..., close_executor=...): ... def close(self): ... def resolve(self, host, port, family=...): ... class BlockingResolver(ExecutorResolver): def initialize(self, io_loop=...): ... class ThreadedResolver(ExecutorResolver): def initialize(self, io_loop=..., num_threads=...): ... class OverrideResolver(Resolver): resolver: Any mapping: Any def initialize(self, resolver, mapping): ... def close(self): ... def resolve(self, host, port, *args, **kwargs): ... def ssl_options_to_context(ssl_options): ... def ssl_wrap_socket(socket, ssl_options, server_hostname=..., **kwargs): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/process.pyi0000644€tŠÔÚ€2›s®0000000122613576752252030102 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional long = int CalledProcessError: Any def cpu_count() -> int: ... def fork_processes(num_processes, max_restarts: int = ...) -> Optional[int]: ... def task_id() -> int: ... class Subprocess: STREAM: Any = ... io_loop: Any = ... stdin: Any = ... stdout: Any = ... stderr: Any = ... proc: Any = ... returncode: Any = ... def __init__(self, *args, **kwargs) -> None: ... def set_exit_callback(self, callback): ... def wait_for_exit(self, raise_error: bool = ...): ... @classmethod def initialize(cls, io_loop: Optional[Any] = ...): ... @classmethod def uninitialize(cls): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/tcpserver.pyi0000644€tŠÔÚ€2›s®0000000105413576752252030440 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any ssl: Any class TCPServer: io_loop: Any ssl_options: Any max_buffer_size: Any read_chunk_size: Any def __init__(self, io_loop=..., ssl_options=..., max_buffer_size=..., read_chunk_size=...) -> None: ... def listen(self, port, address=...): ... def add_sockets(self, sockets): ... def add_socket(self, socket): ... def bind(self, port, address=..., family=..., backlog=...): ... def start(self, num_processes=...): ... def stop(self): ... def handle_stream(self, stream, address): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/testing.pyi0000644€tŠÔÚ€2›s®0000000351113576752252030100 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generator, Optional, overload import unittest import logging AsyncHTTPClient: Any gen: Any HTTPServer: Any IOLoop: Any netutil: Any SimpleAsyncHTTPClient: Any def get_unused_port(): ... def bind_unused_port(): ... class AsyncTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): ... io_loop: Any def setUp(self): ... def tearDown(self): ... def get_new_ioloop(self): ... def run(self, result: Optional[Any] = ...): ... def stop(self, _arg: Optional[Any] = ..., **kwargs): ... def wait(self, condition: Optional[Any] = ..., timeout: float = ...): ... class AsyncHTTPTestCase(AsyncTestCase): http_client: Any http_server: Any def setUp(self): ... def get_http_client(self): ... def get_http_server(self): ... def get_app(self): ... def fetch(self, path, **kwargs): ... def get_httpserver_options(self): ... def get_http_port(self): ... def get_protocol(self): ... def get_url(self, path): ... def tearDown(self): ... class AsyncHTTPSTestCase(AsyncHTTPTestCase): def get_http_client(self): ... def get_httpserver_options(self): ... def get_ssl_options(self): ... def get_protocol(self): ... @overload def gen_test(*, timeout: Optional[float] = ...) -> Callable[[Callable[..., Generator[Any, Any, Any]]], Callable[..., None]]: ... @overload def gen_test(func: Callable[..., Generator[Any, Any, Any]]) -> Callable[..., None]: ... class LogTrapTestCase(unittest.TestCase): def run(self, result: Optional[Any] = ...): ... class ExpectLog(logging.Filter): logger: Any regex: Any required: Any matched: Any def __init__(self, logger, regex, required: bool = ...): ... def filter(self, record): ... def __enter__(self): ... def __exit__(self, typ, value, tb): ... def main(**kwargs): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/util.pyi0000644€tŠÔÚ€2›s®0000000206013576752252027376 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict xrange: Any class ObjectDict(Dict[Any, Any]): def __getattr__(self, name): ... def __setattr__(self, name, value): ... class GzipDecompressor: decompressobj: Any def __init__(self) -> None: ... def decompress(self, value, max_length=...): ... @property def unconsumed_tail(self): ... def flush(self): ... unicode_type: Any basestring_type: Any def import_object(name): ... bytes_type: Any def errno_from_exception(e): ... class Configurable: def __new__(cls, *args, **kwargs): ... @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def initialize(self): ... @classmethod def configure(cls, impl, **kwargs): ... @classmethod def configured_class(cls): ... class ArgReplacer: name: Any arg_pos: Any def __init__(self, func, name) -> None: ... def get_old_value(self, args, kwargs, default=...): ... def replace(self, new_value, args, kwargs): ... def timedelta_to_seconds(td): ... def doctests(): ... mypy-0.761/mypy/typeshed/third_party/2/tornado/web.pyi0000644€tŠÔÚ€2›s®0000002100513576752252027176 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado import httputil MIN_SUPPORTED_SIGNED_VALUE_VERSION: Any MAX_SUPPORTED_SIGNED_VALUE_VERSION: Any DEFAULT_SIGNED_VALUE_VERSION: Any DEFAULT_SIGNED_VALUE_MIN_VERSION: Any class RequestHandler: SUPPORTED_METHODS: Any application: Any request: Any path_args: Any path_kwargs: Any ui: Any def __init__(self, application, request, **kwargs) -> None: ... def initialize(self): ... @property def settings(self): ... def head(self, *args, **kwargs): ... def get(self, *args, **kwargs): ... def post(self, *args, **kwargs): ... def delete(self, *args, **kwargs): ... def patch(self, *args, **kwargs): ... def put(self, *args, **kwargs): ... def options(self, *args, **kwargs): ... def prepare(self): ... def on_finish(self): ... def on_connection_close(self): ... def clear(self): ... def set_default_headers(self): ... def set_status(self, status_code, reason=...): ... def get_status(self): ... def set_header(self, name, value): ... def add_header(self, name, value): ... def clear_header(self, name): ... def get_argument(self, name, default=..., strip=...): ... def get_arguments(self, name, strip=...): ... def get_body_argument(self, name, default=..., strip=...): ... def get_body_arguments(self, name, strip=...): ... def get_query_argument(self, name, default=..., strip=...): ... def get_query_arguments(self, name, strip=...): ... def decode_argument(self, value, name=...): ... @property def cookies(self): ... def get_cookie(self, name, default=...): ... def set_cookie(self, name, value, domain=..., expires=..., path=..., expires_days=..., **kwargs): ... def clear_cookie(self, name, path=..., domain=...): ... def clear_all_cookies(self, path=..., domain=...): ... def set_secure_cookie(self, name, value, expires_days=..., version=..., **kwargs): ... def create_signed_value(self, name, value, version=...): ... def get_secure_cookie(self, name, value=..., max_age_days=..., min_version=...): ... def get_secure_cookie_key_version(self, name, value=...): ... def redirect(self, url, permanent=..., status=...): ... def write(self, chunk): ... def render(self, template_name, **kwargs): ... def render_string(self, template_name, **kwargs): ... def get_template_namespace(self): ... def create_template_loader(self, template_path): ... def flush(self, include_footers=..., callback=...): ... def finish(self, chunk=...): ... def send_error(self, status_code=..., **kwargs): ... def write_error(self, status_code, **kwargs): ... @property def locale(self): ... @locale.setter def locale(self, value): ... def get_user_locale(self): ... def get_browser_locale(self, default=...): ... @property def current_user(self): ... @current_user.setter def current_user(self, value): ... def get_current_user(self): ... def get_login_url(self): ... def get_template_path(self): ... @property def xsrf_token(self): ... def check_xsrf_cookie(self): ... def xsrf_form_html(self): ... def static_url(self, path, include_host=..., **kwargs): ... def require_setting(self, name, feature=...): ... def reverse_url(self, name, *args): ... def compute_etag(self): ... def set_etag_header(self): ... def check_etag_header(self): ... def data_received(self, chunk): ... def log_exception(self, typ, value, tb): ... def asynchronous(method): ... def stream_request_body(cls): ... def removeslash(method): ... def addslash(method): ... class Application(httputil.HTTPServerConnectionDelegate): transforms: Any handlers: Any named_handlers: Any default_host: Any settings: Any ui_modules: Any ui_methods: Any def __init__(self, handlers=..., default_host=..., transforms=..., **settings) -> None: ... def listen(self, port, address=..., **kwargs): ... def add_handlers(self, host_pattern, host_handlers): ... def add_transform(self, transform_class): ... def start_request(self, server_conn, request_conn): ... def __call__(self, request): ... def reverse_url(self, name, *args): ... def log_request(self, handler): ... class _RequestDispatcher(httputil.HTTPMessageDelegate): application: Any connection: Any request: Any chunks: Any handler_class: Any handler_kwargs: Any path_args: Any path_kwargs: Any def __init__(self, application, connection) -> None: ... def headers_received(self, start_line, headers): ... stream_request_body: Any def set_request(self, request): ... def data_received(self, data): ... def finish(self): ... def on_connection_close(self): ... handler: Any def execute(self): ... class HTTPError(Exception): status_code: Any log_message: Any args: Any reason: Any def __init__(self, status_code, log_message=..., *args, **kwargs) -> None: ... class Finish(Exception): ... class MissingArgumentError(HTTPError): arg_name: Any def __init__(self, arg_name) -> None: ... class ErrorHandler(RequestHandler): def initialize(self, status_code): ... def prepare(self): ... def check_xsrf_cookie(self): ... class RedirectHandler(RequestHandler): def initialize(self, url, permanent=...): ... def get(self): ... class StaticFileHandler(RequestHandler): CACHE_MAX_AGE: Any root: Any default_filename: Any def initialize(self, path, default_filename=...): ... @classmethod def reset(cls): ... def head(self, path): ... path: Any absolute_path: Any modified: Any def get(self, path, include_body=...): ... def compute_etag(self): ... def set_headers(self): ... def should_return_304(self): ... @classmethod def get_absolute_path(cls, root, path): ... def validate_absolute_path(self, root, absolute_path): ... @classmethod def get_content(cls, abspath, start=..., end=...): ... @classmethod def get_content_version(cls, abspath): ... def get_content_size(self): ... def get_modified_time(self): ... def get_content_type(self): ... def set_extra_headers(self, path): ... def get_cache_time(self, path, modified, mime_type): ... @classmethod def make_static_url(cls, settings, path, include_version=...): ... def parse_url_path(self, url_path): ... @classmethod def get_version(cls, settings, path): ... class FallbackHandler(RequestHandler): fallback: Any def initialize(self, fallback): ... def prepare(self): ... class OutputTransform: def __init__(self, request) -> None: ... def transform_first_chunk(self, status_code, headers, chunk, finishing): ... def transform_chunk(self, chunk, finishing): ... class GZipContentEncoding(OutputTransform): CONTENT_TYPES: Any MIN_LENGTH: Any def __init__(self, request) -> None: ... def transform_first_chunk(self, status_code, headers, chunk, finishing): ... def transform_chunk(self, chunk, finishing): ... def authenticated(method): ... class UIModule: handler: Any request: Any ui: Any locale: Any def __init__(self, handler) -> None: ... @property def current_user(self): ... def render(self, *args, **kwargs): ... def embedded_javascript(self): ... def javascript_files(self): ... def embedded_css(self): ... def css_files(self): ... def html_head(self): ... def html_body(self): ... def render_string(self, path, **kwargs): ... class _linkify(UIModule): def render(self, text, **kwargs): ... class _xsrf_form_html(UIModule): def render(self): ... class TemplateModule(UIModule): def __init__(self, handler) -> None: ... def render(self, path, **kwargs): ... def embedded_javascript(self): ... def javascript_files(self): ... def embedded_css(self): ... def css_files(self): ... def html_head(self): ... def html_body(self): ... class _UIModuleNamespace: handler: Any ui_modules: Any def __init__(self, handler, ui_modules) -> None: ... def __getitem__(self, key): ... def __getattr__(self, key): ... class URLSpec: regex: Any handler_class: Any kwargs: Any name: Any def __init__(self, pattern, handler, kwargs=..., name=...) -> None: ... def reverse(self, *args): ... url: Any def create_signed_value(secret, name, value, version=..., clock=..., key_version=...): ... def decode_signed_value(secret, name, value, max_age_days=..., clock=..., min_version=...): ... def get_signature_key_version(value): ... mypy-0.761/mypy/typeshed/third_party/2and3/0000755€tŠÔÚ€2›s®0000000000013576752267025006 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/0000755€tŠÔÚ€2›s®0000000000013576752267026266 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/0000755€tŠÔÚ€2›s®0000000000013576752267027500 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/AES.pyi0000644€tŠÔÚ€2›s®0000000061513576752252030627 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__: str class AESCipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> AESCipher: ... MODE_ECB: int MODE_CBC: int MODE_CFB: int MODE_PGP: int MODE_OFB: int MODE_CTR: int MODE_OPENPGP: int block_size: int key_size: int mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/ARC2.pyi0000644€tŠÔÚ€2›s®0000000061513576752252030706 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__: str class RC2Cipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> RC2Cipher: ... MODE_ECB: int MODE_CBC: int MODE_CFB: int MODE_PGP: int MODE_OFB: int MODE_CTR: int MODE_OPENPGP: int block_size: int key_size: int mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/ARC4.pyi0000644€tŠÔÚ€2›s®0000000056013576752252030707 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text __revision__: str class ARC4Cipher: block_size: int key_size: int def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def encrypt(self, plaintext): ... def decrypt(self, ciphertext): ... def new(key: Union[bytes, Text], *args, **kwargs) -> ARC4Cipher: ... block_size: int key_size: int mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/Blowfish.pyi0000644€tŠÔÚ€2›s®0000000062713576752252031777 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__: str class BlowfishCipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> BlowfishCipher: ... MODE_ECB: int MODE_CBC: int MODE_CFB: int MODE_PGP: int MODE_OFB: int MODE_CTR: int MODE_OPENPGP: int block_size: int key_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/CAST.pyi0000644€tŠÔÚ€2›s®0000000062513576752252030752 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__: str class CAST128Cipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> CAST128Cipher: ... MODE_ECB: int MODE_CBC: int MODE_CFB: int MODE_PGP: int MODE_OFB: int MODE_CTR: int MODE_OPENPGP: int block_size: int key_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/DES.pyi0000644€tŠÔÚ€2›s®0000000061513576752252030632 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__: str class DESCipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> DESCipher: ... MODE_ECB: int MODE_CBC: int MODE_CFB: int MODE_PGP: int MODE_OFB: int MODE_CTR: int MODE_OPENPGP: int block_size: int key_size: int mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/DES3.pyi0000644€tŠÔÚ€2›s®0000000062013576752252030711 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__: str class DES3Cipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> DES3Cipher: ... MODE_ECB: int MODE_CBC: int MODE_CFB: int MODE_PGP: int MODE_OFB: int MODE_CTR: int MODE_OPENPGP: int block_size: int key_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/PKCS1_OAEP.pyi0000644€tŠÔÚ€2›s®0000000077013576752252031646 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Union, Text from Crypto.PublicKey.RSA import _RSAobj class PKCS1OAEP_Cipher: def __init__(self, key: _RSAobj, hashAlgo: Any, mgfunc: Any, label: Any) -> None: ... def can_encrypt(self): ... def can_decrypt(self): ... def encrypt(self, message: Union[bytes, Text]) -> bytes: ... def decrypt(self, ct: bytes) -> bytes: ... def new(key: _RSAobj, hashAlgo: Optional[Any] = ..., mgfunc: Optional[Any] = ..., label: Any = ...) -> PKCS1OAEP_Cipher: ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/PKCS1_v1_5.pyi0000644€tŠÔÚ€2›s®0000000063613576752252031675 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from Crypto.PublicKey.RSA import _RSAobj class PKCS115_Cipher: def __init__(self, key: _RSAobj) -> None: ... def can_encrypt(self) -> bool: ... def can_decrypt(self) -> bool: ... rf: Any def encrypt(self, message: Union[bytes, Text]) -> bytes: ... def decrypt(self, ct: bytes, sentinel: Any) -> bytes: ... def new(key: _RSAobj) -> PKCS115_Cipher: ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/XOR.pyi0000644€tŠÔÚ€2›s®0000000063413576752252030670 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text __revision__: str class XORCipher: block_size: int key_size: int def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def encrypt(self, plaintext: Union[bytes, Text]) -> bytes: ... def decrypt(self, ciphertext: bytes) -> bytes: ... def new(key: Union[bytes, Text], *args, **kwargs) -> XORCipher: ... block_size: int key_size: int mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/__init__.pyi0000644€tŠÔÚ€2›s®0000000021613576752252031753 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # AES # ARC2 # ARC4 # Blowfish # CAST # DES # DES3 # PKCS1_OAEP # PKCS1_v1_5 # XOR mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Cipher/blockalgo.pyi0000644€tŠÔÚ€2›s®0000000064113576752252032153 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text MODE_ECB: int MODE_CBC: int MODE_CFB: int MODE_PGP: int MODE_OFB: int MODE_CTR: int MODE_OPENPGP: int class BlockAlgo: mode: int block_size: int IV: Any def __init__(self, factory: Any, key: Union[bytes, Text], *args, **kwargs) -> None: ... def encrypt(self, plaintext: Union[bytes, Text]) -> bytes: ... def decrypt(self, ciphertext: bytes) -> bytes: ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/0000755€tŠÔÚ€2›s®0000000000013576752267027151 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/HMAC.pyi0000644€tŠÔÚ€2›s®0000000064413576752252030402 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional digest_size: Any class HMAC: digest_size: Any digestmod: Any outer: Any inner: Any def __init__(self, key, msg: Optional[Any] = ..., digestmod: Optional[Any] = ...) -> None: ... def update(self, msg): ... def copy(self): ... def digest(self): ... def hexdigest(self): ... def new(key, msg: Optional[Any] = ..., digestmod: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/MD2.pyi0000644€tŠÔÚ€2›s®0000000050713576752252030252 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class MD2Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/MD4.pyi0000644€tŠÔÚ€2›s®0000000050713576752252030254 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class MD4Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/MD5.pyi0000644€tŠÔÚ€2›s®0000000050713576752252030255 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class MD5Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/RIPEMD.pyi0000644€tŠÔÚ€2›s®0000000051513576752252030647 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class RIPEMD160Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/SHA.pyi0000644€tŠÔÚ€2›s®0000000051013576752252030275 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA1Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/SHA224.pyi0000644€tŠÔÚ€2›s®0000000051213576752252030527 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA224Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/SHA256.pyi0000644€tŠÔÚ€2›s®0000000051213576752252030534 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA256Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/SHA384.pyi0000644€tŠÔÚ€2›s®0000000051213576752252030536 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA384Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/SHA512.pyi0000644€tŠÔÚ€2›s®0000000051213576752252030527 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA512Hash(HashAlgo): oid: Any digest_size: int block_size: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/__init__.pyi0000644€tŠÔÚ€2›s®0000000020713576752252031424 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # HMAC # MD2 # MD4 # MD5 # RIPEMD # SHA # SHA224 # SHA256 # SHA384 # SHA512 mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Hash/hashalgo.pyi0000644€tŠÔÚ€2›s®0000000051013576752252031450 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class HashAlgo: digest_size: Any block_size: Any def __init__(self, hashFactory, data: Optional[Any] = ...) -> None: ... def update(self, data): ... def digest(self): ... def hexdigest(self): ... def copy(self): ... def new(self, data: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Protocol/0000755€tŠÔÚ€2›s®0000000000013576752267030067 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Protocol/AllOrNothing.pyi0000644€tŠÔÚ€2›s®0000000040613576752252033144 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional __revision__: str def isInt(x): ... class AllOrNothing: def __init__(self, ciphermodule, mode: Optional[Any] = ..., IV: Optional[Any] = ...) -> None: ... def digest(self, text): ... def undigest(self, blocks): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Protocol/Chaffing.pyi0000644€tŠÔÚ€2›s®0000000022013576752252032303 0ustar jukkaDROPBOX\Domain Users00000000000000__revision__: str class Chaff: def __init__(self, factor: float = ..., blocksper: int = ...) -> None: ... def chaff(self, blocks): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Protocol/KDF.pyi0000644€tŠÔÚ€2›s®0000000041713576752252031212 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash import SHA as SHA1 __revision__: str def PBKDF1(password, salt, dkLen, count: int = ..., hashAlgo: Optional[Any] = ...): ... def PBKDF2(password, salt, dkLen: int = ..., count: int = ..., prf: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Protocol/__init__.pyi0000644€tŠÔÚ€2›s®0000000011513576752252032340 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # AllOrNothing # Chaffing # KDF mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/PublicKey/0000755€tŠÔÚ€2›s®0000000000013576752267030155 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/PublicKey/DSA.pyi0000644€tŠÔÚ€2›s®0000000134513576752252031304 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from .pubkey import pubkey class _DSAobj(pubkey): keydata: Any implementation: Any key: Any def __init__(self, implementation, key) -> None: ... def __getattr__(self, attrname): ... def sign(self, M, K): ... def verify(self, M, signature): ... def has_private(self): ... def size(self): ... def can_blind(self): ... def can_encrypt(self): ... def can_sign(self): ... def publickey(self): ... class DSAImplementation: error: Any def __init__(self, **kwargs) -> None: ... def generate(self, bits, randfunc: Optional[Any] = ..., progress_func: Optional[Any] = ...): ... def construct(self, tup): ... generate: Any construct: Any error: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/PublicKey/ElGamal.pyi0000644€tŠÔÚ€2›s®0000000102313576752252032170 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.PublicKey.pubkey import pubkey from Crypto.PublicKey.pubkey import * # noqa: F403 class error(Exception): ... def generate(bits, randfunc, progress_func: Optional[Any] = ...): ... def construct(tup): ... class ElGamalobj(pubkey): keydata: Any def encrypt(self, plaintext, K): ... def decrypt(self, ciphertext): ... def sign(self, M, K): ... def verify(self, M, signature): ... def size(self): ... def has_private(self): ... def publickey(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/PublicKey/RSA.pyi0000644€tŠÔÚ€2›s®0000000210413576752252031314 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Union, Text from .pubkey import pubkey class _RSAobj(pubkey): keydata: Any implementation: Any key: Any def __init__(self, implementation, key, randfunc: Optional[Any] = ...) -> None: ... def __getattr__(self, attrname): ... def encrypt(self, plaintext, K): ... def decrypt(self, ciphertext): ... def sign(self, M, K): ... def verify(self, M, signature): ... def has_private(self): ... def size(self): ... def can_blind(self): ... def can_encrypt(self): ... def can_sign(self): ... def publickey(self): ... def exportKey(self, format: str = ..., passphrase: Optional[Any] = ..., pkcs: int = ...): ... class RSAImplementation: error: Any def __init__(self, **kwargs) -> None: ... def generate(self, bits, randfunc: Optional[Any] = ..., progress_func: Optional[Any] = ..., e: int = ...): ... def construct(self, tup): ... def importKey(self, externKey: Any, passphrase: Union[None, bytes, Text] = ...) -> _RSAobj: ... generate: Any construct: Any importKey: Any error: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/PublicKey/__init__.pyi0000644€tŠÔÚ€2›s®0000000010313576752252032423 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # DSA # ElGamal # RSA mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/PublicKey/pubkey.pyi0000644€tŠÔÚ€2›s®0000000114313576752252032170 0ustar jukkaDROPBOX\Domain Users00000000000000from Crypto.Util.number import * # noqa: F403 __revision__: str class pubkey: def __init__(self) -> None: ... def encrypt(self, plaintext, K): ... def decrypt(self, ciphertext): ... def sign(self, M, K): ... def verify(self, M, signature): ... def validate(self, M, signature): ... def blind(self, M, B): ... def unblind(self, M, B): ... def can_sign(self): ... def can_encrypt(self): ... def can_blind(self): ... def size(self): ... def has_private(self): ... def publickey(self): ... def __eq__(self, other): ... def __ne__(self, other): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/0000755€tŠÔÚ€2›s®0000000000013576752267027506 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/0000755€tŠÔÚ€2›s®0000000000013576752267031124 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/FortunaAccumulator.pyi0000644€tŠÔÚ€2›s®0000000107213576752252035457 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any __revision__: str class FortunaPool: digest_size: Any def __init__(self) -> None: ... def append(self, data): ... def digest(self): ... def hexdigest(self): ... length: int def reset(self): ... def which_pools(r): ... class FortunaAccumulator: min_pool_size: int reseed_interval: float reseed_count: int generator: Any last_reseed: Any pools: Any def __init__(self) -> None: ... def random_data(self, bytes): ... def add_random_event(self, source_number, pool_number, data): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/FortunaGenerator.pyi0000644€tŠÔÚ€2›s®0000000054513576752252035132 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any __revision__: str class AESGenerator: block_size: Any key_size: int max_blocks_per_request: Any counter: Any key: Any block_size_shift: Any blocks_per_key: Any max_bytes_per_request: Any def __init__(self) -> None: ... def reseed(self, seed): ... def pseudo_random_data(self, bytes): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/SHAd256.pyi0000644€tŠÔÚ€2›s®0000000047213576752252032660 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class _SHAd256: digest_size: Any def __init__(self, internal_api_check, sha256_hash_obj) -> None: ... def copy(self): ... def digest(self): ... def hexdigest(self): ... def update(self, data): ... digest_size: Any def new(data: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252033366 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/0000755€tŠÔÚ€2›s®0000000000013576752267030376 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/__init__.pyi0000644€tŠÔÚ€2›s®0000000002213576752252032644 0ustar jukkaDROPBOX\Domain Users00000000000000__revision__: str mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/fallback.pyi0000644€tŠÔÚ€2›s®0000000016413576752252032653 0ustar jukkaDROPBOX\Domain Users00000000000000from .rng_base import BaseRNG class PythonOSURandomRNG(BaseRNG): name: str def __init__(self) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/posix.pyi0000644€tŠÔÚ€2›s®0000000025613576752252032260 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from .rng_base import BaseRNG class DevURandomRNG(BaseRNG): name: str def __init__(self, devname: Optional[Any] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/rng_base.pyi0000644€tŠÔÚ€2›s®0000000040313576752252032670 0ustar jukkaDROPBOX\Domain Users00000000000000__revision__: str class BaseRNG: closed: bool def __init__(self) -> None: ... def __del__(self): ... def __enter__(self): ... def __exit__(self): ... def close(self): ... def flush(self): ... def read(self, N: int = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/__init__.pyi0000644€tŠÔÚ€2›s®0000000003613576752252031761 0ustar jukkaDROPBOX\Domain Users00000000000000def new(*args, **kwargs): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Random/random.pyi0000644€tŠÔÚ€2›s®0000000066313576752252031510 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class StrongRandom: def __init__(self, rng: Optional[Any] = ..., randfunc: Optional[Any] = ...) -> None: ... def getrandbits(self, k): ... def randrange(self, *args): ... def randint(self, a, b): ... def choice(self, seq): ... def shuffle(self, x): ... def sample(self, population, k): ... getrandbits: Any randrange: Any randint: Any choice: Any shuffle: Any sample: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Signature/0000755€tŠÔÚ€2›s®0000000000013576752267030227 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Signature/PKCS1_PSS.pyi0000644€tŠÔÚ€2›s®0000000043613576752252032315 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class PSS_SigScheme: def __init__(self, key, mgfunc, saltLen) -> None: ... def can_sign(self): ... def sign(self, mhash): ... def verify(self, mhash, S): ... def new(key, mgfunc: Optional[Any] = ..., saltLen: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Signature/PKCS1_v1_5.pyi0000644€tŠÔÚ€2›s®0000000026413576752252032421 0ustar jukkaDROPBOX\Domain Users00000000000000class PKCS115_SigScheme: def __init__(self, key) -> None: ... def can_sign(self): ... def sign(self, mhash): ... def verify(self, mhash, S): ... def new(key): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Signature/__init__.pyi0000644€tŠÔÚ€2›s®0000000010413576752252032476 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # PKCS1_PSS # PKCS1_v1_5 mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Util/0000755€tŠÔÚ€2›s®0000000000013576752267027203 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Util/Counter.pyi0000644€tŠÔÚ€2›s®0000000033013576752252031333 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def new(nbits, prefix: Any = ..., suffix: Any = ..., initial_value: int = ..., overflow: int = ..., little_endian: bool = ..., allow_wraparound: bool = ..., disable_shortcut: bool = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Util/RFC1751.pyi0000644€tŠÔÚ€2›s®0000000017613576752252030654 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any __revision__: str binary: Any def key_to_english(key): ... def english_to_key(s): ... wordlist: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Util/__init__.pyi0000644€tŠÔÚ€2›s®0000000013713576752252031460 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # RFC1751 # asn1 # number # randpool # strxor mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Util/asn1.pyi0000644€tŠÔÚ€2›s®0000000260413576752252030564 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class DerObject: typeTags: Any typeTag: Any payload: Any def __init__(self, ASN1Type: Optional[Any] = ..., payload: Any = ...) -> None: ... def isType(self, ASN1Type): ... def encode(self): ... def decode(self, derEle, noLeftOvers: int = ...): ... class DerInteger(DerObject): value: Any def __init__(self, value: int = ...) -> None: ... payload: Any def encode(self): ... def decode(self, derEle, noLeftOvers: int = ...): ... class DerSequence(DerObject): def __init__(self, startSeq: Optional[Any] = ...) -> None: ... def __delitem__(self, n): ... def __getitem__(self, n): ... def __setitem__(self, key, value): ... def __setslice__(self, i, j, sequence): ... def __delslice__(self, i, j): ... def __getslice__(self, i, j): ... def __len__(self): ... def append(self, item): ... def hasInts(self): ... def hasOnlyInts(self): ... payload: Any def encode(self): ... def decode(self, derEle, noLeftOvers: int = ...): ... class DerOctetString(DerObject): payload: Any def __init__(self, value: Any = ...) -> None: ... def decode(self, derEle, noLeftOvers: int = ...): ... class DerNull(DerObject): def __init__(self) -> None: ... class DerObjectId(DerObject): def __init__(self) -> None: ... def decode(self, derEle, noLeftOvers: int = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Util/number.pyi0000644€tŠÔÚ€2›s®0000000144113576752252031210 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from warnings import warn as _warn __revision__: str bignum: Any def size(N): ... def getRandomNumber(N, randfunc: Optional[Any] = ...): ... def getRandomInteger(N, randfunc: Optional[Any] = ...): ... def getRandomRange(a, b, randfunc: Optional[Any] = ...): ... def getRandomNBitInteger(N, randfunc: Optional[Any] = ...): ... def GCD(x, y): ... def inverse(u, v): ... def getPrime(N, randfunc: Optional[Any] = ...): ... def getStrongPrime(N, e: int = ..., false_positive_prob: float = ..., randfunc: Optional[Any] = ...): ... def isPrime(N, false_positive_prob: float = ..., randfunc: Optional[Any] = ...): ... def long_to_bytes(n, blocksize: int = ...): ... def bytes_to_long(s): ... def long2str(n, blocksize: int = ...): ... def str2long(s): ... sieve_base: Any mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Util/randpool.pyi0000644€tŠÔÚ€2›s®0000000102513576752252031534 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional __revision__: str class RandomPool: bytes: Any bits: Any entropy: Any def __init__(self, numbytes: int = ..., cipher: Optional[Any] = ..., hash: Optional[Any] = ..., file: Optional[Any] = ...) -> None: ... def get_bytes(self, N): ... def randomize(self, N: int = ...): ... def stir(self, s: str = ...): ... def stir_n(self, N: int = ...): ... def add_event(self, s: str = ...): ... def getBytes(self, N): ... def addEvent(self, event, s: str = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/Util/strxor.pyi0000644€tŠÔÚ€2›s®0000000010413576752252031254 0ustar jukkaDROPBOX\Domain Users00000000000000def strxor(*args, **kwargs): ... def strxor_c(*args, **kwargs): ... mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/__init__.pyi0000644€tŠÔÚ€2›s®0000000015513576752252030543 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # Cipher # Hash # Protocol # PublicKey # Signature # Util mypy-0.761/mypy/typeshed/third_party/2and3/Crypto/pct_warnings.pyi0000644€tŠÔÚ€2›s®0000000063413576752252031504 0ustar jukkaDROPBOX\Domain Users00000000000000class CryptoWarning(Warning): ... class CryptoDeprecationWarning(DeprecationWarning, CryptoWarning): ... class CryptoRuntimeWarning(RuntimeWarning, CryptoWarning): ... class RandomPool_DeprecationWarning(CryptoDeprecationWarning): ... class ClockRewindWarning(CryptoRuntimeWarning): ... class GetRandomNumber_DeprecationWarning(CryptoDeprecationWarning): ... class PowmInsecureWarning(CryptoRuntimeWarning): ... mypy-0.761/mypy/typeshed/third_party/2and3/atomicwrites/0000755€tŠÔÚ€2›s®0000000000013576752267027520 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/atomicwrites/__init__.pyi0000644€tŠÔÚ€2›s®0000000154613576752252032002 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, AnyStr, Callable, ContextManager, Generic, IO, Optional, Text, Type, Union def replace_atomic(src: AnyStr, dst: AnyStr) -> None: ... def move_atomic(src: AnyStr, dst: AnyStr) -> None: ... class AtomicWriter(object): def __init__(self, path: Union[Text, bytes], mode: Text = ..., overwrite: bool = ...) -> None: ... def open(self) -> ContextManager[IO[Any]]: ... def _open(self, get_fileobject: Callable[..., IO[AnyStr]]) -> ContextManager[IO[AnyStr]]: ... def get_fileobject(self, dir: Union[None, Text, bytes] = ..., **kwargs: Any) -> IO[Any]: ... def sync(self, f: IO[Any]) -> None: ... def commit(self, f: IO[Any]) -> None: ... def rollback(self, f: IO[Any]) -> None: ... def atomic_write( path: Union[Text, bytes], writer_cls: Type[AtomicWriter] = ..., **cls_kwargs: object, ) -> ContextManager[IO[Any]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/attr/0000755€tŠÔÚ€2›s®0000000000013576752267025760 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/attr/__init__.pyi0000644€tŠÔÚ€2›s®0000001713113576752252030237 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Callable, Dict, Generic, List, Optional, Sequence, Mapping, Tuple, Type, TypeVar, Union, overload, ) # `import X as X` is required to make these public from . import exceptions as exceptions from . import filters as filters from . import converters as converters from . import validators as validators _T = TypeVar("_T") _C = TypeVar("_C", bound=type) _ValidatorType = Callable[[Any, Attribute[_T], _T], Any] _ConverterType = Callable[[Any], _T] _FilterType = Callable[[Attribute[_T], _T], bool] # FIXME: in reality, if multiple validators are passed they must be in a list or tuple, # but those are invariant and so would prevent subtypes of _ValidatorType from working # when passed in a list or tuple. _ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] # _make -- NOTHING: object # NOTE: Factory lies about its return type to make this possible: `x: List[int] = Factory(list)` # Work around mypy issue #4554 in the common case by using an overload. @overload def Factory(factory: Callable[[], _T]) -> _T: ... @overload def Factory( factory: Union[Callable[[Any], _T], Callable[[], _T]], takes_self: bool = ..., ) -> _T: ... class Attribute(Generic[_T]): name: str default: Optional[_T] validator: Optional[_ValidatorType[_T]] repr: bool cmp: bool hash: Optional[bool] init: bool converter: Optional[_ConverterType[_T]] metadata: Dict[Any, Any] type: Optional[Type[_T]] kw_only: bool def __lt__(self, x: Attribute[_T]) -> bool: ... def __le__(self, x: Attribute[_T]) -> bool: ... def __gt__(self, x: Attribute[_T]) -> bool: ... def __ge__(self, x: Attribute[_T]) -> bool: ... # NOTE: We had several choices for the annotation to use for type arg: # 1) Type[_T] # - Pros: Handles simple cases correctly # - Cons: Might produce less informative errors in the case of conflicting TypeVars # e.g. `attr.ib(default='bad', type=int)` # 2) Callable[..., _T] # - Pros: Better error messages than #1 for conflicting TypeVars # - Cons: Terrible error messages for validator checks. # e.g. attr.ib(type=int, validator=validate_str) # -> error: Cannot infer function type argument # 3) type (and do all of the work in the mypy plugin) # - Pros: Simple here, and we could customize the plugin with our own errors. # - Cons: Would need to write mypy plugin code to handle all the cases. # We chose option #1. # `attr` lies about its return type to make the following possible: # attr() -> Any # attr(8) -> int # attr(validator=) -> Whatever the callable expects. # This makes this type of assignments possible: # x: int = attr(8) # # This form catches explicit None or no default but with no other arguments returns Any. @overload def attrib( default: None = ..., validator: None = ..., repr: bool = ..., cmp: bool = ..., hash: Optional[bool] = ..., init: bool = ..., convert: None = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: None = ..., converter: None = ..., factory: None = ..., kw_only: bool = ..., ) -> Any: ... # This form catches an explicit None or no default and infers the type from the other arguments. @overload def attrib( default: None = ..., validator: Optional[_ValidatorArgType[_T]] = ..., repr: bool = ..., cmp: bool = ..., hash: Optional[bool] = ..., init: bool = ..., convert: Optional[_ConverterType[_T]] = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: Optional[Type[_T]] = ..., converter: Optional[_ConverterType[_T]] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., ) -> _T: ... # This form catches an explicit default argument. @overload def attrib( default: _T, validator: Optional[_ValidatorArgType[_T]] = ..., repr: bool = ..., cmp: bool = ..., hash: Optional[bool] = ..., init: bool = ..., convert: Optional[_ConverterType[_T]] = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: Optional[Type[_T]] = ..., converter: Optional[_ConverterType[_T]] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., ) -> _T: ... # This form covers type=non-Type: e.g. forward references (str), Any @overload def attrib( default: Optional[_T] = ..., validator: Optional[_ValidatorArgType[_T]] = ..., repr: bool = ..., cmp: bool = ..., hash: Optional[bool] = ..., init: bool = ..., convert: Optional[_ConverterType[_T]] = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: object = ..., converter: Optional[_ConverterType[_T]] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., ) -> Any: ... @overload def attrs( maybe_cls: _C, these: Optional[Dict[str, Any]] = ..., repr_ns: Optional[str] = ..., repr: bool = ..., cmp: bool = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., auto_exc: bool = ..., ) -> _C: ... @overload def attrs( maybe_cls: None = ..., these: Optional[Dict[str, Any]] = ..., repr_ns: Optional[str] = ..., repr: bool = ..., cmp: bool = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., auto_exc: bool = ..., ) -> Callable[[_C], _C]: ... # TODO: add support for returning NamedTuple from the mypy plugin class _Fields(Tuple[Attribute[Any], ...]): def __getattr__(self, name: str) -> Attribute[Any]: ... def fields(cls: type) -> _Fields: ... def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... def validate(inst: Any) -> None: ... # TODO: add support for returning a proper attrs class from the mypy plugin # we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid def make_class( name: str, attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], bases: Tuple[type, ...] = ..., repr_ns: Optional[str] = ..., repr: bool = ..., cmp: bool = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., auto_exc: bool = ..., ) -> type: ... # _funcs -- # TODO: add support for returning TypedDict from the mypy plugin # FIXME: asdict/astuple do not honor their factory args. waiting on one of these: # https://github.com/python/mypy/issues/4236 # https://github.com/python/typing/issues/253 def asdict( inst: Any, recurse: bool = ..., filter: Optional[_FilterType[Any]] = ..., dict_factory: Type[Mapping[Any, Any]] = ..., retain_collection_types: bool = ..., ) -> Dict[str, Any]: ... # TODO: add support for returning NamedTuple from the mypy plugin def astuple( inst: Any, recurse: bool = ..., filter: Optional[_FilterType[Any]] = ..., tuple_factory: Type[Sequence[Any]] = ..., retain_collection_types: bool = ..., ) -> Tuple[Any, ...]: ... def has(cls: type) -> bool: ... def assoc(inst: _T, **changes: Any) -> _T: ... def evolve(inst: _T, **changes: Any) -> _T: ... # _config -- def set_run_validators(run: bool) -> None: ... def get_run_validators() -> bool: ... # aliases -- s = attributes = attrs ib = attr = attrib dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) mypy-0.761/mypy/typeshed/third_party/2and3/attr/converters.pyi0000644€tŠÔÚ€2›s®0000000053713576752252030674 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Optional, Callable, overload from . import _ConverterType _T = TypeVar("_T") def optional( converter: _ConverterType[_T] ) -> _ConverterType[Optional[_T]]: ... @overload def default_if_none(default: _T) -> _ConverterType[_T]: ... @overload def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType[_T]: ... mypy-0.761/mypy/typeshed/third_party/2and3/attr/exceptions.pyi0000644€tŠÔÚ€2›s®0000000040113576752252030651 0ustar jukkaDROPBOX\Domain Users00000000000000class FrozenInstanceError(AttributeError): msg: str = ... class AttrsAttributeNotFoundError(ValueError): ... class NotAnAttrsClassError(ValueError): ... class DefaultAlreadySetError(RuntimeError): ... class UnannotatedAttributeError(RuntimeError): ... mypy-0.761/mypy/typeshed/third_party/2and3/attr/filters.pyi0000644€tŠÔÚ€2›s®0000000032613576752252030146 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Union, Any from . import Attribute, _FilterType def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... mypy-0.761/mypy/typeshed/third_party/2and3/attr/validators.pyi0000644€tŠÔÚ€2›s®0000000160113576752252030643 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Container, List, Union, TypeVar, Type, Any, Optional, Tuple from . import _ValidatorType _T = TypeVar("_T") def instance_of( type: Union[Tuple[Type[_T], ...], Type[_T]] ) -> _ValidatorType[_T]: ... def provides(interface: Any) -> _ValidatorType[Any]: ... def optional( validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] ) -> _ValidatorType[Optional[_T]]: ... def in_(options: Container[_T]) -> _ValidatorType[_T]: ... def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... def deep_iterable( member_validator: _ValidatorType[_T], iterable_validator: Optional[_ValidatorType[_T]], ) -> _ValidatorType[_T]: ... def deep_mapping( key_validator: _ValidatorType[_T], value_validator: _ValidatorType[_T], mapping_validator: Optional[_ValidatorType[_T]], ) -> _ValidatorType[_T]: ... def is_callable() -> _ValidatorType[_T]: ... mypy-0.761/mypy/typeshed/third_party/2and3/backports/0000755€tŠÔÚ€2›s®0000000000013576752267026776 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/backports/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031240 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/backports/ssl_match_hostname.pyi0000644€tŠÔÚ€2›s®0000000012113576752252033360 0ustar jukkaDROPBOX\Domain Users00000000000000class CertificateError(ValueError): ... def match_hostname(cert, hostname): ... mypy-0.761/mypy/typeshed/third_party/2and3/backports_abc.pyi0000644€tŠÔÚ€2›s®0000000033413576752252030320 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def mk_gen(): ... def mk_awaitable(): ... def mk_coroutine(): ... Generator: Any Awaitable: Any Coroutine: Any def isawaitable(obj): ... PATCHED: Any def patch(patch_inspect: bool = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/bleach/0000755€tŠÔÚ€2›s®0000000000013576752267026224 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/bleach/__init__.pyi0000644€tŠÔÚ€2›s®0000000155513576752252030506 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Container, Iterable, Optional, Text from bleach.linkifier import DEFAULT_CALLBACKS as DEFAULT_CALLBACKS, Linker as Linker from bleach.sanitizer import ( ALLOWED_ATTRIBUTES as ALLOWED_ATTRIBUTES, ALLOWED_PROTOCOLS as ALLOWED_PROTOCOLS, ALLOWED_STYLES as ALLOWED_STYLES, ALLOWED_TAGS as ALLOWED_TAGS, Cleaner as Cleaner, ) from .linkifier import _Callback __releasedate__: Text __version__: Text VERSION: Any # packaging.version.Version def clean( text: Text, tags: Container[Text] = ..., attributes: Any = ..., styles: Container[Text] = ..., protocols: Container[Text] = ..., strip: bool = ..., strip_comments: bool = ..., ) -> Text: ... def linkify( text: Text, callbacks: Iterable[_Callback] = ..., skip_tags: Optional[Container[Text]] = ..., parse_email: bool = ..., ) -> Text: ... mypy-0.761/mypy/typeshed/third_party/2and3/bleach/callbacks.pyi0000644€tŠÔÚ€2›s®0000000031613576752252030660 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import MutableMapping, Any, Text _Attrs = MutableMapping[Any, Text] def nofollow(attrs: _Attrs, new: bool = ...) -> _Attrs: ... def target_blank(attrs: _Attrs, new: bool = ...) -> _Attrs: ... mypy-0.761/mypy/typeshed/third_party/2and3/bleach/linkifier.pyi0000644€tŠÔÚ€2›s®0000000172213576752252030717 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Container, Iterable, List, MutableMapping, Optional, Pattern, Protocol, Text _Attrs = MutableMapping[Any, Text] class _Callback(Protocol): def __call__(self, attrs: _Attrs, new: bool = ...) -> _Attrs: ... DEFAULT_CALLBACKS: List[_Callback] TLDS: List[Text] def build_url_re(tlds: Iterable[Text] = ..., protocols: Iterable[Text] = ...) -> Pattern[Text]: ... URL_RE: Pattern[Text] PROTO_RE: Pattern[Text] EMAIL_RE: Pattern[Text] class Linker(object): def __init__( self, callbacks: Iterable[_Callback] = ..., skip_tags: Optional[Container[Text]] = ..., parse_email: bool = ..., url_re: Pattern[Text] = ..., email_re: Pattern[Text] = ..., recognized_tags: Optional[Container[Text]] = ..., ) -> None: ... def linkify(self, text: Text) -> Text: ... class LinkifyFilter(object): # TODO: derives from html5lib.Filter def __getattr__(self, item: str) -> Any: ... # incomplete mypy-0.761/mypy/typeshed/third_party/2and3/bleach/sanitizer.pyi0000644€tŠÔÚ€2›s®0000000220213576752252030745 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Container, Dict, Iterable, List, Optional, Pattern, Text, Type, Union ALLOWED_TAGS: List[Text] ALLOWED_ATTRIBUTES: Dict[Text, List[Text]] ALLOWED_STYLES: List[Text] ALLOWED_PROTOCOLS: List[Text] INVISIBLE_CHARACTERS: Text INVISIBLE_CHARACTERS_RE: Pattern[Text] INVISIBLE_REPLACEMENT_CHAR: Text # A html5lib Filter class _Filter = Any class Cleaner(object): def __init__( self, tags: Container[Text] = ..., attributes: Any = ..., styles: Container[Text] = ..., protocols: Container[Text] = ..., strip: bool = ..., strip_comments: bool = ..., filters: Optional[Iterable[_Filter]] = ..., ) -> None: ... def clean(self, text: Text) -> Text: ... _AttributeFilter = Callable[[Text, Text, Text], bool] _AttributeDict = Dict[Text, Union[Container[Text], _AttributeFilter]] def attribute_filter_factory(attributes: Union[_AttributeFilter, _AttributeDict, Container[Text]]) -> _AttributeFilter: ... class BleachSanitizerFilter(object): # TODO: derives from html5lib.sanitizer.Filter def __getattr__(self, item: str) -> Any: ... # incomplete mypy-0.761/mypy/typeshed/third_party/2and3/bleach/utils.pyi0000644€tŠÔÚ€2›s®0000000043613576752252030104 0ustar jukkaDROPBOX\Domain Users00000000000000from collections import OrderedDict from typing import overload, Mapping, Any, Text @overload def alphabetize_attributes(attrs: None) -> None: ... @overload def alphabetize_attributes(attrs: Mapping[Any, Text]) -> OrderedDict[Any, Text]: ... def force_unicode(text: Text) -> Text: ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/0000755€tŠÔÚ€2›s®0000000000013576752267025751 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/boto/__init__.pyi0000644€tŠÔÚ€2›s®0000001551213576752252030231 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text import logging from .s3.connection import S3Connection Version: Any UserAgent: Any config: Any BUCKET_NAME_RE: Any TOO_LONG_DNS_NAME_COMP: Any GENERATION_RE: Any VERSION_RE: Any ENDPOINTS_PATH: Any def init_logging(): ... class NullHandler(logging.Handler): def emit(self, record): ... log: Any perflog: Any def set_file_logger(name, filepath, level: Any = ..., format_string: Optional[Any] = ...): ... def set_stream_logger(name, level: Any = ..., format_string: Optional[Any] = ...): ... def connect_sqs(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_s3(aws_access_key_id: Optional[Text] = ..., aws_secret_access_key: Optional[Text] = ..., **kwargs) -> S3Connection: ... def connect_gs(gs_access_key_id: Optional[Any] = ..., gs_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_ec2(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_elb(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_autoscale(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudwatch(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_sdb(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_fps(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_mturk(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudfront(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_vpc(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_rds(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_rds2(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_emr(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_sns(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_iam(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_route53(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudformation(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_euca(host: Optional[Any] = ..., aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., port: int = ..., path: str = ..., is_secure: bool = ..., **kwargs): ... def connect_glacier(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_ec2_endpoint(url, aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_walrus(host: Optional[Any] = ..., aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., port: int = ..., path: str = ..., is_secure: bool = ..., **kwargs): ... def connect_ses(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_sts(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_ia(ia_access_key_id: Optional[Any] = ..., ia_secret_access_key: Optional[Any] = ..., is_secure: bool = ..., **kwargs): ... def connect_dynamodb(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_swf(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudsearch(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudsearch2(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., sign_request: bool = ..., **kwargs): ... def connect_cloudsearchdomain(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_beanstalk(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_elastictranscoder(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_opsworks(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_redshift(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_support(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudtrail(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_directconnect(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_kinesis(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_logs(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_route53domains(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cognito_identity(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cognito_sync(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_kms(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_awslambda(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_codedeploy(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_configservice(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudhsm(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_ec2containerservice(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_machinelearning(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def storage_uri(uri_str, default_scheme: str = ..., debug: int = ..., validate: bool = ..., bucket_storage_uri_class: Any = ..., suppress_consec_slashes: bool = ..., is_latest: bool = ...): ... def storage_uri_for_key(key): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/auth.pyi0000644€tŠÔÚ€2›s®0000001005413576752252027427 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from boto.auth_handler import AuthHandler SIGV4_DETECT: Any class HmacKeys: host: Any def __init__(self, host, config, provider) -> None: ... def update_provider(self, provider): ... def algorithm(self): ... def sign_string(self, string_to_sign): ... class AnonAuthHandler(AuthHandler, HmacKeys): capability: Any def __init__(self, host, config, provider) -> None: ... def add_auth(self, http_request, **kwargs): ... class HmacAuthV1Handler(AuthHandler, HmacKeys): capability: Any def __init__(self, host, config, provider) -> None: ... def update_provider(self, provider): ... def add_auth(self, http_request, **kwargs): ... class HmacAuthV2Handler(AuthHandler, HmacKeys): capability: Any def __init__(self, host, config, provider) -> None: ... def update_provider(self, provider): ... def add_auth(self, http_request, **kwargs): ... class HmacAuthV3Handler(AuthHandler, HmacKeys): capability: Any def __init__(self, host, config, provider) -> None: ... def add_auth(self, http_request, **kwargs): ... class HmacAuthV3HTTPHandler(AuthHandler, HmacKeys): capability: Any def __init__(self, host, config, provider) -> None: ... def headers_to_sign(self, http_request): ... def canonical_headers(self, headers_to_sign): ... def string_to_sign(self, http_request): ... def add_auth(self, req, **kwargs): ... class HmacAuthV4Handler(AuthHandler, HmacKeys): capability: Any service_name: Any region_name: Any def __init__(self, host, config, provider, service_name: Optional[Any] = ..., region_name: Optional[Any] = ...) -> None: ... def headers_to_sign(self, http_request): ... def host_header(self, host, http_request): ... def query_string(self, http_request): ... def canonical_query_string(self, http_request): ... def canonical_headers(self, headers_to_sign): ... def signed_headers(self, headers_to_sign): ... def canonical_uri(self, http_request): ... def payload(self, http_request): ... def canonical_request(self, http_request): ... def scope(self, http_request): ... def split_host_parts(self, host): ... def determine_region_name(self, host): ... def determine_service_name(self, host): ... def credential_scope(self, http_request): ... def string_to_sign(self, http_request, canonical_request): ... def signature(self, http_request, string_to_sign): ... def add_auth(self, req, **kwargs): ... class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler): capability: Any region_name: Any def __init__(self, *args, **kwargs) -> None: ... def clean_region_name(self, region_name): ... def canonical_uri(self, http_request): ... def canonical_query_string(self, http_request): ... def host_header(self, host, http_request): ... def headers_to_sign(self, http_request): ... def determine_region_name(self, host): ... def determine_service_name(self, host): ... def mangle_path_and_params(self, req): ... def payload(self, http_request): ... def add_auth(self, req, **kwargs): ... def presign(self, req, expires, iso_date: Optional[Any] = ...): ... class STSAnonHandler(AuthHandler): capability: Any def add_auth(self, http_request, **kwargs): ... class QuerySignatureHelper(HmacKeys): def add_auth(self, http_request, **kwargs): ... class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler): SignatureVersion: int capability: Any class QuerySignatureV1AuthHandler(QuerySignatureHelper, AuthHandler): SignatureVersion: int capability: Any def __init__(self, *args, **kw) -> None: ... class QuerySignatureV2AuthHandler(QuerySignatureHelper, AuthHandler): SignatureVersion: int capability: Any class POSTPathQSV2AuthHandler(QuerySignatureV2AuthHandler, AuthHandler): capability: Any def add_auth(self, req, **kwargs): ... def get_auth_handler(host, config, provider, requested_capability: Optional[Any] = ...): ... def detect_potential_sigv4(func): ... def detect_potential_s3sigv4(func): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/auth_handler.pyi0000644€tŠÔÚ€2›s®0000000037213576752252031126 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from boto.plugin import Plugin class NotReadyToAuthenticate(Exception): ... class AuthHandler(Plugin): capability: Any def __init__(self, host, config, provider) -> None: ... def add_auth(self, http_request): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/compat.pyi0000644€tŠÔÚ€2›s®0000000047113576752252027753 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any from base64 import encodestring as encodebytes from six.moves import http_client expanduser: Any if sys.version_info >= (3, 0): StandardError = Exception else: from __builtin__ import StandardError as StandardError long_type: Any unquote_str: Any parse_qs_safe: Any mypy-0.761/mypy/typeshed/third_party/2and3/boto/connection.pyi0000644€tŠÔÚ€2›s®0000001215613576752252030632 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Optional, Text from six.moves import http_client HAVE_HTTPS_CONNECTION: bool ON_APP_ENGINE: Any PORTS_BY_SECURITY: Any DEFAULT_CA_CERTS_FILE: Any class HostConnectionPool: queue: Any def __init__(self) -> None: ... def size(self): ... def put(self, conn): ... def get(self): ... def clean(self): ... class ConnectionPool: CLEAN_INTERVAL: float STALE_DURATION: float host_to_pool: Any last_clean_time: float mutex: Any def __init__(self) -> None: ... def size(self): ... def get_http_connection(self, host, port, is_secure): ... def put_http_connection(self, host, port, is_secure, conn): ... def clean(self): ... class HTTPRequest: method: Any protocol: Any host: Any port: Any path: Any auth_path: Any params: Any headers: Any body: Any def __init__(self, method, protocol, host, port, path, auth_path, params, headers, body) -> None: ... def authorize(self, connection, **kwargs): ... class HTTPResponse(http_client.HTTPResponse): def __init__(self, *args, **kwargs) -> None: ... def read(self, amt: Optional[Any] = ...): ... class AWSAuthConnection: suppress_consec_slashes: Any num_retries: int is_secure: Any https_validate_certificates: Any ca_certificates_file: Any port: Any http_exceptions: Any http_unretryable_exceptions: Any socket_exception_values: Any https_connection_factory: Any protocol: str host: Any path: Any debug: Any host_header: Any http_connection_kwargs: Any provider: Any auth_service_name: Any request_hook: Any def __init__(self, host, aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., is_secure: bool = ..., port: Optional[Any] = ..., proxy: Optional[Any] = ..., proxy_port: Optional[Any] = ..., proxy_user: Optional[Any] = ..., proxy_pass: Optional[Any] = ..., debug: int = ..., https_connection_factory: Optional[Any] = ..., path: str = ..., provider: str = ..., security_token: Optional[Any] = ..., suppress_consec_slashes: bool = ..., validate_certs: bool = ..., profile_name: Optional[Any] = ...) -> None: ... auth_region_name: Any @property def connection(self): ... @property def aws_access_key_id(self): ... @property def gs_access_key_id(self) -> Any: ... access_key: Any @property def aws_secret_access_key(self): ... @property def gs_secret_access_key(self): ... secret_key: Any @property def profile_name(self): ... def get_path(self, path: str = ...): ... def server_name(self, port: Optional[Any] = ...): ... proxy: Any proxy_port: Any proxy_user: Any proxy_pass: Any no_proxy: Any use_proxy: Any def handle_proxy(self, proxy, proxy_port, proxy_user, proxy_pass): ... def get_http_connection(self, host, port, is_secure): ... def skip_proxy(self, host): ... def new_http_connection(self, host, port, is_secure): ... def put_http_connection(self, host, port, is_secure, connection): ... def proxy_ssl(self, host: Optional[Any] = ..., port: Optional[Any] = ...): ... def prefix_proxy_to_path(self, path, host: Optional[Any] = ...): ... def get_proxy_auth_header(self): ... def get_proxy_url_with_auth(self): ... def set_host_header(self, request): ... def set_request_hook(self, hook): ... def build_base_http_request(self, method, path, auth_path, params: Optional[Any] = ..., headers: Optional[Any] = ..., data: str = ..., host: Optional[Any] = ...): ... def make_request(self, method, path, headers: Optional[Any] = ..., data: str = ..., host: Optional[Any] = ..., auth_path: Optional[Any] = ..., sender: Optional[Any] = ..., override_num_retries: Optional[Any] = ..., params: Optional[Any] = ..., retry_handler: Optional[Any] = ...): ... def close(self): ... class AWSQueryConnection(AWSAuthConnection): APIVersion: str ResponseError: Any def __init__(self, aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., is_secure: bool = ..., port: Optional[Any] = ..., proxy: Optional[Any] = ..., proxy_port: Optional[Any] = ..., proxy_user: Optional[Any] = ..., proxy_pass: Optional[Any] = ..., host: Optional[Any] = ..., debug: int = ..., https_connection_factory: Optional[Any] = ..., path: str = ..., security_token: Optional[Any] = ..., validate_certs: bool = ..., profile_name: Optional[Any] = ..., provider: str = ...) -> None: ... def get_utf8_value(self, value): ... def make_request(self, action, params: Optional[Any] = ..., path: str = ..., verb: str = ..., *args, **kwargs): ... # type: ignore # https://github.com/python/mypy/issues/1237 def build_list_params(self, params, items, label): ... def build_complex_list_params(self, params, items, label, names): ... def get_list(self, action, params, markers, path: str = ..., parent: Optional[Any] = ..., verb: str = ...): ... def get_object(self, action, params, cls, path: str = ..., parent: Optional[Any] = ..., verb: str = ...): ... def get_status(self, action, params, path: str = ..., parent: Optional[Any] = ..., verb: str = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/ec2/0000755€tŠÔÚ€2›s®0000000000013576752267026422 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/boto/ec2/__init__.pyi0000644€tŠÔÚ€2›s®0000000025213576752252030675 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any RegionData: Any def regions(**kw_params): ... def connect_to_region(region_name, **kw_params): ... def get_region(region_name, **kw_params): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/elb/0000755€tŠÔÚ€2›s®0000000000013576752267026513 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/boto/elb/__init__.pyi0000644€tŠÔÚ€2›s®0000000465713576752252031003 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from boto.connection import AWSQueryConnection RegionData: Any def regions(): ... def connect_to_region(region_name, **kw_params): ... class ELBConnection(AWSQueryConnection): APIVersion: Any DefaultRegionName: Any DefaultRegionEndpoint: Any region: Any def __init__(self, aws_access_key_id=..., aws_secret_access_key=..., is_secure=..., port=..., proxy=..., proxy_port=..., proxy_user=..., proxy_pass=..., debug=..., https_connection_factory=..., region=..., path=..., security_token=..., validate_certs=..., profile_name=...) -> None: ... def build_list_params(self, params, items, label): ... def get_all_load_balancers(self, load_balancer_names=..., marker=...): ... def create_load_balancer(self, name, zones, listeners=..., subnets=..., security_groups=..., scheme=..., complex_listeners=...): ... def create_load_balancer_listeners(self, name, listeners=..., complex_listeners=...): ... def delete_load_balancer(self, name): ... def delete_load_balancer_listeners(self, name, ports): ... def enable_availability_zones(self, load_balancer_name, zones_to_add): ... def disable_availability_zones(self, load_balancer_name, zones_to_remove): ... def modify_lb_attribute(self, load_balancer_name, attribute, value): ... def get_all_lb_attributes(self, load_balancer_name): ... def get_lb_attribute(self, load_balancer_name, attribute): ... def register_instances(self, load_balancer_name, instances): ... def deregister_instances(self, load_balancer_name, instances): ... def describe_instance_health(self, load_balancer_name, instances=...): ... def configure_health_check(self, name, health_check): ... def set_lb_listener_SSL_certificate(self, lb_name, lb_port, ssl_certificate_id): ... def create_app_cookie_stickiness_policy(self, name, lb_name, policy_name): ... def create_lb_cookie_stickiness_policy(self, cookie_expiration_period, lb_name, policy_name): ... def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes): ... def delete_lb_policy(self, lb_name, policy_name): ... def set_lb_policies_of_listener(self, lb_name, lb_port, policies): ... def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies): ... def apply_security_groups_to_lb(self, name, security_groups): ... def attach_lb_to_subnets(self, name, subnets): ... def detach_lb_from_subnets(self, name, subnets): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/exception.pyi0000644€tŠÔÚ€2›s®0000001073013576752252030465 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from boto.compat import StandardError class BotoClientError(StandardError): reason: Any def __init__(self, reason, *args) -> None: ... class SDBPersistenceError(StandardError): ... class StoragePermissionsError(BotoClientError): ... class S3PermissionsError(StoragePermissionsError): ... class GSPermissionsError(StoragePermissionsError): ... class BotoServerError(StandardError): status: Any reason: Any body: Any request_id: Any error_code: Any message: str box_usage: Any def __init__(self, status, reason, body: Optional[Any] = ..., *args) -> None: ... def __getattr__(self, name): ... def __setattr__(self, name, value): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class ConsoleOutput: parent: Any instance_id: Any timestamp: Any comment: Any output: Any def __init__(self, parent: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class StorageCreateError(BotoServerError): bucket: Any def __init__(self, status, reason, body: Optional[Any] = ...) -> None: ... def endElement(self, name, value, connection): ... class S3CreateError(StorageCreateError): ... class GSCreateError(StorageCreateError): ... class StorageCopyError(BotoServerError): ... class S3CopyError(StorageCopyError): ... class GSCopyError(StorageCopyError): ... class SQSError(BotoServerError): detail: Any type: Any def __init__(self, status, reason, body: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class SQSDecodeError(BotoClientError): message: Any def __init__(self, reason, message) -> None: ... class StorageResponseError(BotoServerError): resource: Any def __init__(self, status, reason, body: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class S3ResponseError(StorageResponseError): ... class GSResponseError(StorageResponseError): ... class EC2ResponseError(BotoServerError): errors: Any def __init__(self, status, reason, body: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... request_id: Any def endElement(self, name, value, connection): ... class JSONResponseError(BotoServerError): status: Any reason: Any body: Any error_message: Any error_code: Any def __init__(self, status, reason, body: Optional[Any] = ..., *args) -> None: ... class DynamoDBResponseError(JSONResponseError): ... class SWFResponseError(JSONResponseError): ... class EmrResponseError(BotoServerError): ... class _EC2Error: connection: Any error_code: Any error_message: Any def __init__(self, connection: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class SDBResponseError(BotoServerError): ... class AWSConnectionError(BotoClientError): ... class StorageDataError(BotoClientError): ... class S3DataError(StorageDataError): ... class GSDataError(StorageDataError): ... class InvalidUriError(Exception): message: Any def __init__(self, message) -> None: ... class InvalidAclError(Exception): message: Any def __init__(self, message) -> None: ... class InvalidCorsError(Exception): message: Any def __init__(self, message) -> None: ... class NoAuthHandlerFound(Exception): ... class InvalidLifecycleConfigError(Exception): message: Any def __init__(self, message) -> None: ... class ResumableTransferDisposition: START_OVER: str WAIT_BEFORE_RETRY: str ABORT_CUR_PROCESS: str ABORT: str class ResumableUploadException(Exception): message: Any disposition: Any def __init__(self, message, disposition) -> None: ... class ResumableDownloadException(Exception): message: Any disposition: Any def __init__(self, message, disposition) -> None: ... class TooManyRecordsException(Exception): message: Any def __init__(self, message) -> None: ... class PleaseRetryException(Exception): message: Any response: Any def __init__(self, message, response: Optional[Any] = ...) -> None: ... class InvalidInstanceMetadataError(Exception): MSG: str def __init__(self, msg) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/kms/0000755€tŠÔÚ€2›s®0000000000013576752267026543 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/boto/kms/__init__.pyi0000644€tŠÔÚ€2›s®0000000023413576752252031016 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List import boto.regioninfo def regions() -> List[boto.regioninfo.RegionInfo]: ... def connect_to_region(region_name, **kw_params): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/kms/exceptions.pyi0000644€tŠÔÚ€2›s®0000000147513576752252031450 0ustar jukkaDROPBOX\Domain Users00000000000000from boto.exception import BotoServerError class InvalidGrantTokenException(BotoServerError): ... class DisabledException(BotoServerError): ... class LimitExceededException(BotoServerError): ... class DependencyTimeoutException(BotoServerError): ... class InvalidMarkerException(BotoServerError): ... class AlreadyExistsException(BotoServerError): ... class InvalidCiphertextException(BotoServerError): ... class KeyUnavailableException(BotoServerError): ... class InvalidAliasNameException(BotoServerError): ... class UnsupportedOperationException(BotoServerError): ... class InvalidArnException(BotoServerError): ... class KMSInternalException(BotoServerError): ... class InvalidKeyUsageException(BotoServerError): ... class MalformedPolicyDocumentException(BotoServerError): ... class NotFoundException(BotoServerError): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/kms/layer1.pyi0000644€tŠÔÚ€2›s®0000000705513576752252030464 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, List, Mapping, Optional, Type from boto.connection import AWSQueryConnection class KMSConnection(AWSQueryConnection): APIVersion: str DefaultRegionName: str DefaultRegionEndpoint: str ServiceName: str TargetPrefix: str ResponseError: Type[Exception] region: Any def __init__(self, **kwargs) -> None: ... def create_alias(self, alias_name: str, target_key_id: str) -> Optional[Dict[str, Any]]: ... def create_grant(self, key_id: str, grantee_principal: str, retiring_principal: Optional[str] = ..., operations: Optional[List[str]] = ..., constraints: Optional[Dict[str, Dict[str, str]]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def create_key(self, policy: Optional[str] = ..., description: Optional[str] = ..., key_usage: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def decrypt(self, ciphertext_blob: bytes, encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def delete_alias(self, alias_name: str) -> Optional[Dict[str, Any]]: ... def describe_key(self, key_id: str) -> Optional[Dict[str, Any]]: ... def disable_key(self, key_id: str) -> Optional[Dict[str, Any]]: ... def disable_key_rotation(self, key_id: str) -> Optional[Dict[str, Any]]: ... def enable_key(self, key_id: str) -> Optional[Dict[str, Any]]: ... def enable_key_rotation(self, key_id: str) -> Optional[Dict[str, Any]]: ... def encrypt(self, key_id: str, plaintext: bytes, encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def generate_data_key(self, key_id: str, encryption_context: Optional[Mapping[str, Any]] = ..., number_of_bytes: Optional[int] = ..., key_spec: Optional[str] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def generate_data_key_without_plaintext(self, key_id: str, encryption_context: Optional[Mapping[str, Any]] = ..., key_spec: Optional[str] = ..., number_of_bytes: Optional[int] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def generate_random(self, number_of_bytes: Optional[int] = ...) -> Optional[Dict[str, Any]]: ... def get_key_policy(self, key_id: str, policy_name: str) -> Optional[Dict[str, Any]]: ... def get_key_rotation_status(self, key_id: str) -> Optional[Dict[str, Any]]: ... def list_aliases(self, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def list_grants(self, key_id: str, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def list_key_policies(self, key_id: str, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def list_keys(self, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def put_key_policy(self, key_id: str, policy_name: str, policy: str) -> Optional[Dict[str, Any]]: ... def re_encrypt(self, ciphertext_blob: bytes, destination_key_id: str, source_encryption_context: Optional[Mapping[str, Any]] = ..., destination_encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def retire_grant(self, grant_token: str) -> Optional[Dict[str, Any]]: ... def revoke_grant(self, key_id: str, grant_id: str) -> Optional[Dict[str, Any]]: ... def update_key_description(self, key_id: str, description: str) -> Optional[Dict[str, Any]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/plugin.pyi0000644€tŠÔÚ€2›s®0000000035313576752252027765 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Plugin: capability: Any @classmethod def is_capable(cls, requested_capability): ... def get_plugin(cls, requested_capability: Optional[Any] = ...): ... def load_plugins(config): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/regioninfo.pyi0000644€tŠÔÚ€2›s®0000000121613576752252030625 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional def load_endpoint_json(path): ... def merge_endpoints(defaults, additions): ... def load_regions(): ... def get_regions(service_name, region_cls: Optional[Any] = ..., connection_cls: Optional[Any] = ...): ... class RegionInfo: connection: Any name: Any endpoint: Any connection_cls: Any def __init__(self, connection: Optional[Any] = ..., name: Optional[Any] = ..., endpoint: Optional[Any] = ..., connection_cls: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def connect(self, **kw_params): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/0000755€tŠÔÚ€2›s®0000000000013576752267026276 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/__init__.pyi0000644€tŠÔÚ€2›s®0000000075713576752252030563 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from .connection import S3Connection from boto.connection import AWSAuthConnection from boto.regioninfo import RegionInfo from typing import List, Type, Text class S3RegionInfo(RegionInfo): def connect(self, name: Optional[Text] = ..., endpoint: Optional[str] = ..., connection_cls: Optional[Type[AWSAuthConnection]] = ..., **kw_params) -> S3Connection: ... def regions() -> List[S3RegionInfo]: ... def connect_to_region(region_name: Text, **kw_params): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/acl.pyi0000644€tŠÔÚ€2›s®0000000312613576752252027554 0ustar jukkaDROPBOX\Domain Users00000000000000from .connection import S3Connection from .user import User from typing import Any, Dict, Optional, List, Text, Union CannedACLStrings: List[str] class Policy: parent: Any namespace: Any acl: ACL def __init__(self, parent: Optional[Any] = ...) -> None: ... owner: User def startElement(self, name: Text, attrs: Dict[str, Any], connection: S3Connection) -> Union[None, User, ACL]: ... def endElement(self, name: Text, value: Any, connection: S3Connection) -> None: ... def to_xml(self) -> str: ... class ACL: policy: Policy grants: List[Grant] def __init__(self, policy: Optional[Policy] = ...) -> None: ... def add_grant(self, grant: Grant) -> None: ... def add_email_grant(self, permission: Text, email_address: Text) -> None: ... def add_user_grant(self, permission: Text, user_id: Text, display_name: Optional[Text] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name: Text, value: Any, connection: S3Connection) -> None: ... def to_xml(self) -> str: ... class Grant: NameSpace: Text permission: Text id: Text display_name: Text uri: Text email_address: Text type: Text def __init__(self, permission: Optional[Text] = ..., type: Optional[Text] = ..., id: Optional[Text] = ..., display_name: Optional[Text] = ..., uri: Optional[Text] = ..., email_address: Optional[Text] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name: Text, value: Any, connection: S3Connection) -> None: ... def to_xml(self) -> str: ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/bucket.pyi0000644€tŠÔÚ€2›s®0000001753613576752252030304 0ustar jukkaDROPBOX\Domain Users00000000000000from .bucketlistresultset import BucketListResultSet from .connection import S3Connection from .key import Key from typing import Any, Dict, Optional, Text, Type, List class S3WebsiteEndpointTranslate: trans_region: Dict[str, str] @classmethod def translate_region(self, reg: Text) -> str: ... S3Permissions: List[str] class Bucket: LoggingGroup: str BucketPaymentBody: str VersioningBody: str VersionRE: str MFADeleteRE: str name: Text connection: S3Connection key_class: Type[Key] def __init__(self, connection: Optional[S3Connection] = ..., name: Optional[Text] = ..., key_class: Type[Key] = ...) -> None: ... def __iter__(self): ... def __contains__(self, key_name) -> bool: ... def startElement(self, name, attrs, connection): ... creation_date: Any def endElement(self, name, value, connection): ... def set_key_class(self, key_class): ... def lookup(self, key_name, headers: Optional[Dict[Text, Text]] = ...): ... def get_key(self, key_name, headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., validate: bool = ...) -> Key: ... def list(self, prefix: Text = ..., delimiter: Text = ..., marker: Text = ..., headers: Optional[Dict[Text, Text]] = ..., encoding_type: Optional[Any] = ...) -> BucketListResultSet: ... def list_versions(self, prefix: str = ..., delimiter: str = ..., key_marker: str = ..., version_id_marker: str = ..., headers: Optional[Dict[Text, Text]] = ..., encoding_type: Optional[Text] = ...) -> BucketListResultSet: ... def list_multipart_uploads(self, key_marker: str = ..., upload_id_marker: str = ..., headers: Optional[Dict[Text, Text]] = ..., encoding_type: Optional[Any] = ...): ... def validate_kwarg_names(self, kwargs, names): ... def get_all_keys(self, headers: Optional[Dict[Text, Text]] = ..., **params): ... def get_all_versions(self, headers: Optional[Dict[Text, Text]] = ..., **params): ... def validate_get_all_versions_params(self, params): ... def get_all_multipart_uploads(self, headers: Optional[Dict[Text, Text]] = ..., **params): ... def new_key(self, key_name: Optional[Any] = ...): ... def generate_url(self, expires_in, method: str = ..., headers: Optional[Dict[Text, Text]] = ..., force_http: bool = ..., response_headers: Optional[Dict[Text, Text]] = ..., expires_in_absolute: bool = ...): ... def delete_keys(self, keys, quiet: bool = ..., mfa_token: Optional[Any] = ..., headers: Optional[Dict[Text, Text]] = ...): ... def delete_key(self, key_name, headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ..., mfa_token: Optional[Any] = ...): ... def copy_key(self, new_key_name, src_bucket_name, src_key_name, metadata: Optional[Any] = ..., src_version_id: Optional[Any] = ..., storage_class: str = ..., preserve_acl: bool = ..., encrypt_key: bool = ..., headers: Optional[Dict[Text, Text]] = ..., query_args: Optional[Any] = ...): ... def set_canned_acl(self, acl_str, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def get_xml_acl(self, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def set_xml_acl(self, acl_str, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ..., query_args: str = ...): ... def set_acl(self, acl_or_str, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def get_acl(self, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def set_subresource(self, subresource, value, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def get_subresource(self, subresource, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def make_public(self, recursive: bool = ..., headers: Optional[Dict[Text, Text]] = ...): ... def add_email_grant(self, permission, email_address, recursive: bool = ..., headers: Optional[Dict[Text, Text]] = ...): ... def add_user_grant(self, permission, user_id, recursive: bool = ..., headers: Optional[Dict[Text, Text]] = ..., display_name: Optional[Any] = ...): ... def list_grants(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_location(self): ... def set_xml_logging(self, logging_str, headers: Optional[Dict[Text, Text]] = ...): ... def enable_logging(self, target_bucket, target_prefix: str = ..., grants: Optional[Any] = ..., headers: Optional[Dict[Text, Text]] = ...): ... def disable_logging(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_logging_status(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_as_logging_target(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_request_payment(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_request_payment(self, payer: str = ..., headers: Optional[Dict[Text, Text]] = ...): ... def configure_versioning(self, versioning, mfa_delete: bool = ..., mfa_token: Optional[Any] = ..., headers: Optional[Dict[Text, Text]] = ...): ... def get_versioning_status(self, headers: Optional[Dict[Text, Text]] = ...): ... def configure_lifecycle(self, lifecycle_config, headers: Optional[Dict[Text, Text]] = ...): ... def get_lifecycle_config(self, headers: Optional[Dict[Text, Text]] = ...): ... def delete_lifecycle_configuration(self, headers: Optional[Dict[Text, Text]] = ...): ... def configure_website(self, suffix: Optional[Any] = ..., error_key: Optional[Any] = ..., redirect_all_requests_to: Optional[Any] = ..., routing_rules: Optional[Any] = ..., headers: Optional[Dict[Text, Text]] = ...): ... def set_website_configuration(self, config, headers: Optional[Dict[Text, Text]] = ...): ... def set_website_configuration_xml(self, xml, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_configuration(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_configuration_obj(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_configuration_with_xml(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_configuration_xml(self, headers: Optional[Dict[Text, Text]] = ...): ... def delete_website_configuration(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_endpoint(self): ... def get_policy(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_policy(self, policy, headers: Optional[Dict[Text, Text]] = ...): ... def delete_policy(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_cors_xml(self, cors_xml, headers: Optional[Dict[Text, Text]] = ...): ... def set_cors(self, cors_config, headers: Optional[Dict[Text, Text]] = ...): ... def get_cors_xml(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_cors(self, headers: Optional[Dict[Text, Text]] = ...): ... def delete_cors(self, headers: Optional[Dict[Text, Text]] = ...): ... def initiate_multipart_upload(self, key_name, headers: Optional[Dict[Text, Text]] = ..., reduced_redundancy: bool = ..., metadata: Optional[Any] = ..., encrypt_key: bool = ..., policy: Optional[Any] = ...): ... def complete_multipart_upload(self, key_name, upload_id, xml_body, headers: Optional[Dict[Text, Text]] = ...): ... def cancel_multipart_upload(self, key_name, upload_id, headers: Optional[Dict[Text, Text]] = ...): ... def delete(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_tags(self): ... def get_xml_tags(self): ... def set_xml_tags(self, tag_str, headers: Optional[Dict[Text, Text]] = ..., query_args: str = ...): ... def set_tags(self, tags, headers: Optional[Dict[Text, Text]] = ...): ... def delete_tags(self, headers: Optional[Dict[Text, Text]] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/bucketlistresultset.pyi0000644€tŠÔÚ€2›s®0000000341313576752252033140 0ustar jukkaDROPBOX\Domain Users00000000000000from .bucket import Bucket from .key import Key from typing import Any, Iterable, Iterator, Optional def bucket_lister(bucket, prefix: str = ..., delimiter: str = ..., marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...): ... class BucketListResultSet(Iterable[Key]): bucket: Any prefix: Any delimiter: Any marker: Any headers: Any encoding_type: Any def __init__(self, bucket: Optional[Any] = ..., prefix: str = ..., delimiter: str = ..., marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...) -> None: ... def __iter__(self) -> Iterator[Key]: ... def versioned_bucket_lister(bucket, prefix: str = ..., delimiter: str = ..., key_marker: str = ..., version_id_marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...): ... class VersionedBucketListResultSet: bucket: Any prefix: Any delimiter: Any key_marker: Any version_id_marker: Any headers: Any encoding_type: Any def __init__(self, bucket: Optional[Any] = ..., prefix: str = ..., delimiter: str = ..., key_marker: str = ..., version_id_marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...) -> None: ... def __iter__(self) -> Iterator[Key]: ... def multipart_upload_lister(bucket, key_marker: str = ..., upload_id_marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...): ... class MultiPartUploadListResultSet: bucket: Any key_marker: Any upload_id_marker: Any headers: Any encoding_type: Any def __init__(self, bucket: Optional[Any] = ..., key_marker: str = ..., upload_id_marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...) -> None: ... def __iter__(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/bucketlogging.pyi0000644€tŠÔÚ€2›s®0000000062013576752252031635 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class BucketLogging: target: Any prefix: Any grants: Any def __init__(self, target: Optional[Any] = ..., prefix: Optional[Any] = ..., grants: Optional[Any] = ...) -> None: ... def add_grant(self, grant): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/connection.pyi0000644€tŠÔÚ€2›s®0000001040213576752252031147 0ustar jukkaDROPBOX\Domain Users00000000000000from .bucket import Bucket from typing import Any, Dict, Optional, Text, Type from boto.connection import AWSAuthConnection from boto.exception import BotoClientError def check_lowercase_bucketname(n): ... def assert_case_insensitive(f): ... class _CallingFormat: def get_bucket_server(self, server, bucket): ... def build_url_base(self, connection, protocol, server, bucket, key: str = ...): ... def build_host(self, server, bucket): ... def build_auth_path(self, bucket, key: str = ...): ... def build_path_base(self, bucket, key: str = ...): ... class SubdomainCallingFormat(_CallingFormat): def get_bucket_server(self, server, bucket): ... class VHostCallingFormat(_CallingFormat): def get_bucket_server(self, server, bucket): ... class OrdinaryCallingFormat(_CallingFormat): def get_bucket_server(self, server, bucket): ... def build_path_base(self, bucket, key: str = ...): ... class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat): def build_url_base(self, connection, protocol, server, bucket, key: str = ...): ... class Location: DEFAULT: str EU: str EUCentral1: str USWest: str USWest2: str SAEast: str APNortheast: str APSoutheast: str APSoutheast2: str CNNorth1: str class NoHostProvided: ... class HostRequiredError(BotoClientError): ... class S3Connection(AWSAuthConnection): DefaultHost: Any DefaultCallingFormat: Any QueryString: str calling_format: Any bucket_class: Type[Bucket] anon: Any def __init__(self, aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., is_secure: bool = ..., port: Optional[Any] = ..., proxy: Optional[Any] = ..., proxy_port: Optional[Any] = ..., proxy_user: Optional[Any] = ..., proxy_pass: Optional[Any] = ..., host: Any = ..., debug: int = ..., https_connection_factory: Optional[Any] = ..., calling_format: Any = ..., path: str = ..., provider: str = ..., bucket_class: Type[Bucket] = ..., security_token: Optional[Any] = ..., suppress_consec_slashes: bool = ..., anon: bool = ..., validate_certs: Optional[Any] = ..., profile_name: Optional[Any] = ...) -> None: ... def __iter__(self): ... def __contains__(self, bucket_name): ... def set_bucket_class(self, bucket_class: Type[Bucket]) -> None: ... def build_post_policy(self, expiration_time, conditions): ... def build_post_form_args(self, bucket_name, key, expires_in: int = ..., acl: Optional[Any] = ..., success_action_redirect: Optional[Any] = ..., max_content_length: Optional[Any] = ..., http_method: str = ..., fields: Optional[Any] = ..., conditions: Optional[Any] = ..., storage_class: str = ..., server_side_encryption: Optional[Any] = ...): ... def generate_url_sigv4(self, expires_in, method, bucket: str = ..., key: str = ..., headers: Optional[Dict[Text, Text]] = ..., force_http: bool = ..., response_headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ..., iso_date: Optional[Any] = ...): ... def generate_url(self, expires_in, method, bucket: str = ..., key: str = ..., headers: Optional[Dict[Text, Text]] = ..., query_auth: bool = ..., force_http: bool = ..., response_headers: Optional[Dict[Text, Text]] = ..., expires_in_absolute: bool = ..., version_id: Optional[Any] = ...): ... def get_all_buckets(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_canonical_user_id(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_bucket(self, bucket_name: Text, validate: bool = ..., headers: Optional[Dict[Text, Text]] = ...) -> Bucket: ... def head_bucket(self, bucket_name, headers: Optional[Dict[Text, Text]] = ...): ... def lookup(self, bucket_name, validate: bool = ..., headers: Optional[Dict[Text, Text]] = ...): ... def create_bucket(self, bucket_name, headers: Optional[Dict[Text, Text]] = ..., location: Any = ..., policy: Optional[Any] = ...): ... def delete_bucket(self, bucket, headers: Optional[Dict[Text, Text]] = ...): ... def make_request(self, method, bucket: str = ..., key: str = ..., headers: Optional[Any] = ..., data: str = ..., query_args: Optional[Any] = ..., sender: Optional[Any] = ..., override_num_retries: Optional[Any] = ..., retry_handler: Optional[Any] = ..., *args, **kwargs): ... # type: ignore # https://github.com/python/mypy/issues/1237 mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/cors.pyi0000644€tŠÔÚ€2›s®0000000170213576752252027761 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional class CORSRule: allowed_method: Any allowed_origin: Any id: Any allowed_header: Any max_age_seconds: Any expose_header: Any def __init__(self, allowed_method: Optional[Any] = ..., allowed_origin: Optional[Any] = ..., id: Optional[Any] = ..., allowed_header: Optional[Any] = ..., max_age_seconds: Optional[Any] = ..., expose_header: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self) -> str: ... class CORSConfiguration(List[CORSRule]): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self) -> str: ... def add_rule(self, allowed_method, allowed_origin, id: Optional[Any] = ..., allowed_header: Optional[Any] = ..., max_age_seconds: Optional[Any] = ..., expose_header: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/deletemarker.pyi0000644€tŠÔÚ€2›s®0000000055613576752252031465 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class DeleteMarker: bucket: Any name: Any version_id: Any is_latest: bool last_modified: Any owner: Any def __init__(self, bucket: Optional[Any] = ..., name: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/key.pyi0000644€tŠÔÚ€2›s®0000002016513576752252027607 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Optional, Text, Union, overload class Key: DefaultContentType: str RestoreBody: str BufferSize: Any base_user_settable_fields: Any base_fields: Any bucket: Any name: str metadata: Any cache_control: Any content_type: Any content_encoding: Any content_disposition: Any content_language: Any filename: Any etag: Any is_latest: bool last_modified: Any owner: Any path: Any resp: Any mode: Any size: Any version_id: Any source_version_id: Any delete_marker: bool encrypted: Any ongoing_restore: Any expiry_date: Any local_hashes: Any def __init__(self, bucket: Optional[Any] = ..., name: Optional[Any] = ...) -> None: ... def __iter__(self): ... @property def provider(self): ... key: Any md5: Any base64md5: Any storage_class: Any def get_md5_from_hexdigest(self, md5_hexdigest): ... def handle_encryption_headers(self, resp): ... def handle_version_headers(self, resp, force: bool = ...): ... def handle_restore_headers(self, response): ... def handle_addl_headers(self, headers): ... def open_read( self, headers: Optional[Dict[Text, Text]] = ..., query_args: str = ..., override_num_retries: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., ): ... def open_write(self, headers: Optional[Dict[Text, Text]] = ..., override_num_retries: Optional[Any] = ...): ... def open( self, mode: str = ..., headers: Optional[Dict[Text, Text]] = ..., query_args: Optional[Any] = ..., override_num_retries: Optional[Any] = ..., ): ... closed: bool def close(self, fast: bool = ...): ... def next(self): ... __next__: Any def read(self, size: int = ...): ... def change_storage_class(self, new_storage_class, dst_bucket: Optional[Any] = ..., validate_dst_bucket: bool = ...): ... def copy( self, dst_bucket, dst_key, metadata: Optional[Any] = ..., reduced_redundancy: bool = ..., preserve_acl: bool = ..., encrypt_key: bool = ..., validate_dst_bucket: bool = ..., ): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def exists(self, headers: Optional[Dict[Text, Text]] = ...): ... def delete(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_metadata(self, name): ... def set_metadata(self, name, value): ... def update_metadata(self, d): ... def set_acl(self, acl_str, headers: Optional[Dict[Text, Text]] = ...): ... def get_acl(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_xml_acl(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_xml_acl(self, acl_str, headers: Optional[Dict[Text, Text]] = ...): ... def set_canned_acl(self, acl_str, headers: Optional[Dict[Text, Text]] = ...): ... def get_redirect(self): ... def set_redirect(self, redirect_location, headers: Optional[Dict[Text, Text]] = ...): ... def make_public(self, headers: Optional[Dict[Text, Text]] = ...): ... def generate_url( self, expires_in, method: str = ..., headers: Optional[Dict[Text, Text]] = ..., query_auth: bool = ..., force_http: bool = ..., response_headers: Optional[Dict[Text, Text]] = ..., expires_in_absolute: bool = ..., version_id: Optional[Any] = ..., policy: Optional[Any] = ..., reduced_redundancy: bool = ..., encrypt_key: bool = ..., ): ... def send_file( self, fp, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., query_args: Optional[Any] = ..., chunked_transfer: bool = ..., size: Optional[Any] = ..., ): ... def should_retry(self, response, chunked_transfer: bool = ...): ... def compute_md5(self, fp, size: Optional[Any] = ...): ... def set_contents_from_stream( self, fp, headers: Optional[Dict[Text, Text]] = ..., replace: bool = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., policy: Optional[Any] = ..., reduced_redundancy: bool = ..., query_args: Optional[Any] = ..., size: Optional[Any] = ..., ): ... def set_contents_from_file( self, fp, headers: Optional[Dict[Text, Text]] = ..., replace: bool = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., policy: Optional[Any] = ..., md5: Optional[Any] = ..., reduced_redundancy: bool = ..., query_args: Optional[Any] = ..., encrypt_key: bool = ..., size: Optional[Any] = ..., rewind: bool = ..., ): ... def set_contents_from_filename( self, filename, headers: Optional[Dict[Text, Text]] = ..., replace: bool = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., policy: Optional[Any] = ..., md5: Optional[Any] = ..., reduced_redundancy: bool = ..., encrypt_key: bool = ..., ): ... def set_contents_from_string( self, string_data: Union[Text, bytes], headers: Optional[Dict[Text, Text]] = ..., replace: bool = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., policy: Optional[Any] = ..., md5: Optional[Any] = ..., reduced_redundancy: bool = ..., encrypt_key: bool = ..., ) -> None: ... def get_file( self, fp, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., override_num_retries: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., ): ... def get_torrent_file( self, fp, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ... ): ... def get_contents_to_file( self, fp, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., res_download_handler: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., ): ... def get_contents_to_filename( self, filename, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., res_download_handler: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., ): ... @overload def get_contents_as_string( self, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., encoding: None = ..., ) -> bytes: ... @overload def get_contents_as_string( self, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., *, encoding: Text, ) -> Text: ... def add_email_grant(self, permission, email_address, headers: Optional[Dict[Text, Text]] = ...): ... def add_user_grant( self, permission, user_id, headers: Optional[Dict[Text, Text]] = ..., display_name: Optional[Any] = ... ): ... def set_remote_metadata(self, metadata_plus, metadata_minus, preserve_acl, headers: Optional[Dict[Text, Text]] = ...): ... def restore(self, days, headers: Optional[Dict[Text, Text]] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/keyfile.pyi0000644€tŠÔÚ€2›s®0000000125413576752252030445 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class KeyFile: key: Any location: int closed: bool softspace: int mode: str encoding: str errors: str newlines: str name: Any def __init__(self, key) -> None: ... def tell(self): ... def seek(self, pos, whence: Any = ...): ... def read(self, size): ... def close(self): ... def isatty(self): ... def getkey(self): ... def write(self, buf): ... def fileno(self): ... def flush(self): ... def next(self): ... def readinto(self): ... def readline(self): ... def readlines(self): ... def truncate(self): ... def writelines(self): ... def xreadlines(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/lifecycle.pyi0000644€tŠÔÚ€2›s®0000000352213576752252030754 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional class Rule: id: Any prefix: Any status: Any expiration: Any transition: Any def __init__(self, id: Optional[Any] = ..., prefix: Optional[Any] = ..., status: Optional[Any] = ..., expiration: Optional[Any] = ..., transition: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class Expiration: days: Any date: Any def __init__(self, days: Optional[Any] = ..., date: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class Transition: days: Any date: Any storage_class: Any def __init__(self, days: Optional[Any] = ..., date: Optional[Any] = ..., storage_class: Optional[Any] = ...) -> None: ... def to_xml(self): ... class Transitions(List[Transition]): transition_properties: int current_transition_property: int temp_days: Any temp_date: Any temp_storage_class: Any def __init__(self) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... def add_transition(self, days: Optional[Any] = ..., date: Optional[Any] = ..., storage_class: Optional[Any] = ...): ... @property def days(self): ... @property def date(self): ... @property def storage_class(self): ... class Lifecycle(List[Rule]): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... def add_rule(self, id: Optional[Any] = ..., prefix: str = ..., status: str = ..., expiration: Optional[Any] = ..., transition: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/multidelete.pyi0000644€tŠÔÚ€2›s®0000000176613576752252031342 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Deleted: key: Any version_id: Any delete_marker: Any delete_marker_version_id: Any def __init__(self, key: Optional[Any] = ..., version_id: Optional[Any] = ..., delete_marker: bool = ..., delete_marker_version_id: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class Error: key: Any version_id: Any code: Any message: Any def __init__(self, key: Optional[Any] = ..., version_id: Optional[Any] = ..., code: Optional[Any] = ..., message: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class MultiDeleteResult: bucket: Any deleted: Any errors: Any def __init__(self, bucket: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/multipart.pyi0000644€tŠÔÚ€2›s®0000000345113576752252031037 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class CompleteMultiPartUpload: bucket: Any location: Any bucket_name: Any key_name: Any etag: Any version_id: Any encrypted: Any def __init__(self, bucket: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class Part: bucket: Any part_number: Any last_modified: Any etag: Any size: Any def __init__(self, bucket: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def part_lister(mpupload, part_number_marker: Optional[Any] = ...): ... class MultiPartUpload: bucket: Any bucket_name: Any key_name: Any id: Any initiator: Any owner: Any storage_class: Any initiated: Any part_number_marker: Any next_part_number_marker: Any max_parts: Any is_truncated: bool def __init__(self, bucket: Optional[Any] = ...) -> None: ... def __iter__(self): ... def to_xml(self): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def get_all_parts(self, max_parts: Optional[Any] = ..., part_number_marker: Optional[Any] = ..., encoding_type: Optional[Any] = ...): ... def upload_part_from_file(self, fp, part_num, headers: Optional[Any] = ..., replace: bool = ..., cb: Optional[Any] = ..., num_cb: int = ..., md5: Optional[Any] = ..., size: Optional[Any] = ...): ... def copy_part_from_key(self, src_bucket_name, src_key_name, part_num, start: Optional[Any] = ..., end: Optional[Any] = ..., src_version_id: Optional[Any] = ..., headers: Optional[Any] = ...): ... def complete_upload(self): ... def cancel_upload(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/prefix.pyi0000644€tŠÔÚ€2›s®0000000050413576752252030307 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Prefix: bucket: Any name: Any def __init__(self, bucket: Optional[Any] = ..., name: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... @property def provider(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/tagging.pyi0000644€tŠÔÚ€2›s®0000000135413576752252030436 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional class Tag: key: Any value: Any def __init__(self, key: Optional[Any] = ..., value: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... def __eq__(self, other): ... class TagSet(List[Tag]): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def add_tag(self, key, value): ... def to_xml(self): ... class Tags(List[TagSet]): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... def add_tag_set(self, tag_set): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/user.pyi0000644€tŠÔÚ€2›s®0000000055213576752252027773 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class User: type: Any id: Any display_name: Any def __init__(self, parent: Optional[Any] = ..., id: str = ..., display_name: str = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self, element_name: str = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/s3/website.pyi0000644€tŠÔÚ€2›s®0000000467413576752252030470 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional def tag(key, value): ... class WebsiteConfiguration: suffix: Any error_key: Any redirect_all_requests_to: Any routing_rules: Any def __init__(self, suffix: Optional[Any] = ..., error_key: Optional[Any] = ..., redirect_all_requests_to: Optional[Any] = ..., routing_rules: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class _XMLKeyValue: translator: Any container: Any def __init__(self, translator, container: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class RedirectLocation(_XMLKeyValue): TRANSLATOR: Any hostname: Any protocol: Any def __init__(self, hostname: Optional[Any] = ..., protocol: Optional[Any] = ...) -> None: ... def to_xml(self): ... class RoutingRules(List[RoutingRule]): def add_rule(self, rule: RoutingRule) -> RoutingRules: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class RoutingRule: condition: Any redirect: Any def __init__(self, condition: Optional[Any] = ..., redirect: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... @classmethod def when(cls, key_prefix: Optional[Any] = ..., http_error_code: Optional[Any] = ...): ... def then_redirect(self, hostname: Optional[Any] = ..., protocol: Optional[Any] = ..., replace_key: Optional[Any] = ..., replace_key_prefix: Optional[Any] = ..., http_redirect_code: Optional[Any] = ...): ... class Condition(_XMLKeyValue): TRANSLATOR: Any key_prefix: Any http_error_code: Any def __init__(self, key_prefix: Optional[Any] = ..., http_error_code: Optional[Any] = ...) -> None: ... def to_xml(self): ... class Redirect(_XMLKeyValue): TRANSLATOR: Any hostname: Any protocol: Any replace_key: Any replace_key_prefix: Any http_redirect_code: Any def __init__(self, hostname: Optional[Any] = ..., protocol: Optional[Any] = ..., replace_key: Optional[Any] = ..., replace_key_prefix: Optional[Any] = ..., http_redirect_code: Optional[Any] = ...) -> None: ... def to_xml(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/boto/utils.pyi0000644€tŠÔÚ€2›s®0000001407113576752252027631 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime import logging.handlers import subprocess import sys import time import boto.connection from typing import ( Any, Callable, ContextManager, Dict, IO, Iterable, List, Mapping, Optional, Sequence, Tuple, Type, TypeVar, Union, ) _KT = TypeVar('_KT') _VT = TypeVar('_VT') if sys.version_info >= (3,): # TODO move _StringIO definition into boto.compat once stubs exist and rename to StringIO import io _StringIO = io.StringIO from hashlib import _Hash _HashType = _Hash from email.message import Message as _Message else: # TODO move _StringIO definition into boto.compat once stubs exist and rename to StringIO import StringIO _StringIO = StringIO.StringIO[Any] from hashlib import _hash _HashType = _hash # TODO use email.message.Message once stubs exist _Message = Any _Provider = Any # TODO replace this with boto.provider.Provider once stubs exist _LockType = Any # TODO replace this with _thread.LockType once stubs exist JSONDecodeError: Type[ValueError] qsa_of_interest: List[str] def unquote_v(nv: str) -> Union[str, Tuple[str, str]]: ... def canonical_string( method: str, path: str, headers: Mapping[str, Optional[str]], expires: Optional[int] = ..., provider: Optional[_Provider] = ..., ) -> str: ... def merge_meta( headers: Mapping[str, str], metadata: Mapping[str, str], provider: Optional[_Provider] = ..., ) -> Mapping[str, str]: ... def get_aws_metadata( headers: Mapping[str, str], provider: Optional[_Provider] = ..., ) -> Mapping[str, str]: ... def retry_url( url: str, retry_on_404: bool = ..., num_retries: int = ..., timeout: Optional[int] = ..., ) -> str: ... class LazyLoadMetadata(Dict[_KT, _VT]): def __init__( self, url: str, num_retries: int, timeout: Optional[int] = ..., ) -> None: ... def get_instance_metadata( version: str = ..., url: str = ..., data: str = ..., timeout: Optional[int] = ..., num_retries: int = ..., ) -> Optional[LazyLoadMetadata[Any, Any]]: ... def get_instance_identity( version: str = ..., url: str = ..., timeout: Optional[int] = ..., num_retries: int = ..., ) -> Optional[Mapping[str, Any]]: ... def get_instance_userdata( version: str = ..., sep: Optional[str] = ..., url: str = ..., timeout: Optional[int] = ..., num_retries: int = ..., ) -> Mapping[str, str]: ... ISO8601: str ISO8601_MS: str RFC1123: str LOCALE_LOCK: _LockType def setlocale(name: Union[str, Tuple[str, str]]) -> ContextManager[str]: ... def get_ts(ts: Optional[time.struct_time] = ...) -> str: ... def parse_ts(ts: str) -> datetime.datetime: ... def find_class(module_name: str, class_name: Optional[str] = ...) -> Optional[Type[Any]]: ... def update_dme(username: str, password: str, dme_id: str, ip_address: str) -> str: ... def fetch_file( uri: str, file: Optional[IO[str]] = ..., username: Optional[str] = ..., password: Optional[str] = ..., ) -> Optional[IO[str]]: ... class ShellCommand: exit_code: int command: subprocess._CMD log_fp: _StringIO wait: bool fail_fast: bool def __init__( self, command: subprocess._CMD, wait: bool = ..., fail_fast: bool = ..., cwd: Optional[subprocess._TXT] = ..., ) -> None: ... process: subprocess.Popen[Any] def run(self, cwd: Optional[subprocess._CMD] = ...) -> Optional[int]: ... def setReadOnly(self, value) -> None: ... def getStatus(self) -> Optional[int]: ... status: Optional[int] def getOutput(self) -> str: ... output: str class AuthSMTPHandler(logging.handlers.SMTPHandler): username: str password: str def __init__( self, mailhost: str, username: str, password: str, fromaddr: str, toaddrs: Sequence[str], subject: str, ) -> None: ... class LRUCache(Dict[_KT, _VT]): class _Item: previous: Optional[LRUCache._Item] next: Optional[LRUCache._Item] key = ... value = ... def __init__(self, key, value) -> None: ... _dict: Dict[_KT, LRUCache._Item] capacity: int head: Optional[LRUCache._Item] tail: Optional[LRUCache._Item] def __init__(self, capacity: int) -> None: ... # This exists to work around Password.str's name shadowing the str type _str = str class Password: hashfunc: Callable[[bytes], _HashType] str: Optional[_str] def __init__( self, str: Optional[_str] = ..., hashfunc: Optional[Callable[[bytes], _HashType]] = ..., ) -> None: ... def set(self, value: Union[bytes, _str]) -> None: ... def __eq__(self, other: Any) -> bool: ... def __len__(self) -> int: ... def notify( subject: str, body: Optional[str] = ..., html_body: Optional[Union[Sequence[str], str]] = ..., to_string: Optional[str] = ..., attachments: Optional[Iterable[_Message]] = ..., append_instance_id: bool = ..., ) -> None: ... def get_utf8_value(value: str) -> bytes: ... def mklist(value: Any) -> List[Any]: ... def pythonize_name(name: str) -> str: ... def write_mime_multipart( content: List[Tuple[str, str]], compress: bool = ..., deftype: str = ..., delimiter: str = ..., ) -> str: ... def guess_mime_type(content: str, deftype: str) -> str: ... def compute_md5( fp: IO[Any], buf_size: int = ..., size: Optional[int] = ..., ) -> Tuple[str, str, int]: ... def compute_hash( fp: IO[Any], buf_size: int = ..., size: Optional[int] = ..., hash_algorithm: Any = ..., ) -> Tuple[str, str, int]: ... def find_matching_headers(name: str, headers: Mapping[str, Optional[str]]) -> List[str]: ... def merge_headers_by_name(name: str, headers: Mapping[str, Optional[str]]) -> str: ... class RequestHook: def handle_request_data( self, request: boto.connection.HTTPRequest, response: boto.connection.HTTPResponse, error: bool = ..., ) -> Any: ... def host_is_ipv6(hostname: str) -> bool: ... def parse_host(hostname: str) -> str: ... mypy-0.761/mypy/typeshed/third_party/2and3/certifi.pyi0000644€tŠÔÚ€2›s®0000000003013576752252027141 0ustar jukkaDROPBOX\Domain Users00000000000000def where() -> str: ... mypy-0.761/mypy/typeshed/third_party/2and3/characteristic/0000755€tŠÔÚ€2›s®0000000000013576752267027776 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/characteristic/__init__.pyi0000644€tŠÔÚ€2›s®0000000245313576752252032256 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Sequence, Callable, Union, Any, Optional, AnyStr, TypeVar, Type, Dict def with_repr(attrs: Sequence[Union[AnyStr, Attribute]]) -> Callable[..., Any]: ... def with_cmp(attrs: Sequence[Union[AnyStr, Attribute]]) -> Callable[..., Any]: ... def with_init(attrs: Sequence[Union[AnyStr, Attribute]]) -> Callable[..., Any]: ... def immutable(attrs: Sequence[Union[AnyStr, Attribute]]) -> Callable[..., Any]: ... def strip_leading_underscores(attribute_name: AnyStr) -> AnyStr: ... NOTHING = Any _T = TypeVar('_T') def attributes( attrs: Sequence[Union[AnyStr, Attribute]], apply_with_cmp: bool = ..., apply_with_init: bool = ..., apply_with_repr: bool = ..., apply_immutable: bool = ..., store_attributes: Optional[Callable[[type, Attribute], Any]] = ..., **kw: Optional[Dict[Any, Any]]) -> Callable[[Type[_T]], Type[_T]]: ... class Attribute: def __init__( self, name: AnyStr, exclude_from_cmp: bool = ..., exclude_from_init: bool = ..., exclude_from_repr: bool = ..., exclude_from_immutable: bool = ..., default_value: Any = ..., default_factory: Optional[Callable[[None], Any]] = ..., instance_of: Optional[Any] = ..., init_aliaser: Optional[Callable[[AnyStr], AnyStr]] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/0000755€tŠÔÚ€2›s®0000000000013576752267026073 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/click/__init__.pyi0000644€tŠÔÚ€2›s®0000000547013576752252030355 0ustar jukkaDROPBOX\Domain Users00000000000000# -*- coding: utf-8 -*- """ click ~~~~~ Click is a simple Python module that wraps the stdlib's optparse to make writing command line scripts fun. Unlike other modules, it's based around a simple API that does not come with too much magic and is composable. In case optparse ever gets removed from the stdlib, it will be shipped by this module. :copyright: (c) 2014 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ # Core classes from .core import ( Context as Context, BaseCommand as BaseCommand, Command as Command, MultiCommand as MultiCommand, Group as Group, CommandCollection as CommandCollection, Parameter as Parameter, Option as Option, Argument as Argument, ) # Globals from .globals import get_current_context as get_current_context # Decorators from .decorators import ( pass_context as pass_context, pass_obj as pass_obj, make_pass_decorator as make_pass_decorator, command as command, group as group, argument as argument, option as option, confirmation_option as confirmation_option, password_option as password_option, version_option as version_option, help_option as help_option, ) # Types from .types import ( ParamType as ParamType, File as File, FloatRange as FloatRange, DateTime as DateTime, Path as Path, Choice as Choice, IntRange as IntRange, Tuple as Tuple, STRING as STRING, INT as INT, FLOAT as FLOAT, BOOL as BOOL, UUID as UUID, UNPROCESSED as UNPROCESSED, ) # Utilities from .utils import ( echo as echo, get_binary_stream as get_binary_stream, get_text_stream as get_text_stream, open_file as open_file, format_filename as format_filename, get_app_dir as get_app_dir, get_os_args as get_os_args, ) # Terminal functions from .termui import ( prompt as prompt, confirm as confirm, get_terminal_size as get_terminal_size, echo_via_pager as echo_via_pager, progressbar as progressbar, clear as clear, style as style, unstyle as unstyle, secho as secho, edit as edit, launch as launch, getchar as getchar, pause as pause, ) # Exceptions from .exceptions import ( ClickException as ClickException, UsageError as UsageError, BadParameter as BadParameter, FileError as FileError, Abort as Abort, NoSuchOption as NoSuchOption, BadOptionUsage as BadOptionUsage, BadArgumentUsage as BadArgumentUsage, MissingParameter as MissingParameter, ) # Formatting from .formatting import HelpFormatter as HelpFormatter, wrap_text as wrap_text # Parsing from .parser import OptionParser as OptionParser # Controls if click should emit the warning about the use of unicode # literals. disable_unicode_literals_warning: bool __version__: str mypy-0.761/mypy/typeshed/third_party/2and3/click/_termui_impl.pyi0000644€tŠÔÚ€2›s®0000000073413576752252031301 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ContextManager, Iterator, Generic, TypeVar, Optional _T = TypeVar("_T") class ProgressBar(object, Generic[_T]): def update(self, n_steps: int) -> None: ... def finish(self) -> None: ... def __enter__(self) -> ProgressBar[_T]: ... def __exit__(self, exc_type, exc_value, tb) -> None: ... def __iter__(self) -> ProgressBar[_T]: ... def next(self) -> _T: ... def __next__(self) -> _T: ... length: Optional[int] label: str mypy-0.761/mypy/typeshed/third_party/2and3/click/core.pyi0000644€tŠÔÚ€2›s®0000002725013576752252027546 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Callable, ContextManager, Dict, Generator, Iterable, List, Mapping, NoReturn, Optional, Sequence, Set, Tuple, TypeVar, Union, ) from click.formatting import HelpFormatter from click.parser import OptionParser _CC = TypeVar("_CC", bound=Callable[[], Any]) def invoke_param_callback( callback: Callable[[Context, Parameter, Optional[str]], Any], ctx: Context, param: Parameter, value: Optional[str] ) -> Any: ... def augment_usage_errors( ctx: Context, param: Optional[Parameter] = ... ) -> ContextManager[None]: ... def iter_params_for_processing( invocation_order: Sequence[Parameter], declaration_order: Iterable[Parameter], ) -> Iterable[Parameter]: ... class Context: parent: Optional[Context] command: Command info_name: Optional[str] params: Dict[Any, Any] args: List[str] protected_args: List[str] obj: Any default_map: Optional[Mapping[str, Any]] invoked_subcommand: Optional[str] terminal_width: Optional[int] max_content_width: Optional[int] allow_extra_args: bool allow_interspersed_args: bool ignore_unknown_options: bool help_option_names: List[str] token_normalize_func: Optional[Callable[[str], str]] resilient_parsing: bool auto_envvar_prefix: Optional[str] color: Optional[bool] _meta: Dict[str, Any] _close_callbacks: List[Any] _depth: int def __init__( self, command: Command, parent: Optional[Context] = ..., info_name: Optional[str] = ..., obj: Optional[Any] = ..., auto_envvar_prefix: Optional[str] = ..., default_map: Optional[Mapping[str, Any]] = ..., terminal_width: Optional[int] = ..., max_content_width: Optional[int] = ..., resilient_parsing: bool = ..., allow_extra_args: Optional[bool] = ..., allow_interspersed_args: Optional[bool] = ..., ignore_unknown_options: Optional[bool] = ..., help_option_names: Optional[List[str]] = ..., token_normalize_func: Optional[Callable[[str], str]] = ..., color: Optional[bool] = ... ) -> None: ... @property def meta(self) -> Dict[str, Any]: ... @property def command_path(self) -> str: ... def scope(self, cleanup: bool = ...) -> ContextManager[Context]: ... def make_formatter(self) -> HelpFormatter: ... def call_on_close(self, f: _CC) -> _CC: ... def close(self) -> None: ... def find_root(self) -> Context: ... def find_object(self, object_type: type) -> Any: ... def ensure_object(self, object_type: type) -> Any: ... def lookup_default(self, name: str) -> Any: ... def fail(self, message: str) -> NoReturn: ... def abort(self) -> NoReturn: ... def exit(self, code: Union[int, str] = ...) -> NoReturn: ... def get_usage(self) -> str: ... def get_help(self) -> str: ... def invoke(self, callback: Union[Command, Callable[..., Any]], *args, **kwargs) -> Any: ... def forward(self, callback: Union[Command, Callable[..., Any]], *args, **kwargs) -> Any: ... class BaseCommand: allow_extra_args: bool allow_interspersed_args: bool ignore_unknown_options: bool name: str context_settings: Dict[Any, Any] def __init__(self, name: str, context_settings: Optional[Dict[Any, Any]] = ...) -> None: ... def get_usage(self, ctx: Context) -> str: ... def get_help(self, ctx: Context) -> str: ... def make_context( self, info_name: str, args: List[str], parent: Optional[Context] = ..., **extra ) -> Context: ... def parse_args(self, ctx: Context, args: List[str]) -> List[str]: ... def invoke(self, ctx: Context) -> Any: ... def main( self, args: Optional[List[str]] = ..., prog_name: Optional[str] = ..., complete_var: Optional[str] = ..., standalone_mode: bool = ..., **extra ) -> Any: ... def __call__(self, *args, **kwargs) -> Any: ... class Command(BaseCommand): callback: Optional[Callable[..., Any]] params: List[Parameter] help: Optional[str] epilog: Optional[str] short_help: Optional[str] options_metavar: str add_help_option: bool hidden: bool deprecated: bool def __init__( self, name: str, context_settings: Optional[Dict[Any, Any]] = ..., callback: Optional[Callable[..., Any]] = ..., params: Optional[List[Parameter]] = ..., help: Optional[str] = ..., epilog: Optional[str] = ..., short_help: Optional[str] = ..., options_metavar: str = ..., add_help_option: bool = ..., hidden: bool = ..., deprecated: bool = ..., ) -> None: ... def get_params(self, ctx: Context) -> List[Parameter]: ... def format_usage( self, ctx: Context, formatter: HelpFormatter ) -> None: ... def collect_usage_pieces(self, ctx: Context) -> List[str]: ... def get_help_option_names(self, ctx: Context) -> Set[str]: ... def get_help_option(self, ctx: Context) -> Optional[Option]: ... def make_parser(self, ctx: Context) -> OptionParser: ... def get_short_help_str(self, limit: int = ...) -> str: ... def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: ... def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: ... def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: ... def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: ... _T = TypeVar('_T') _F = TypeVar('_F', bound=Callable[..., Any]) class MultiCommand(Command): no_args_is_help: bool invoke_without_command: bool subcommand_metavar: str chain: bool result_callback: Callable[..., Any] def __init__( self, name: Optional[str] = ..., invoke_without_command: bool = ..., no_args_is_help: Optional[bool] = ..., subcommand_metavar: Optional[str] = ..., chain: bool = ..., result_callback: Optional[Callable[..., Any]] = ..., **attrs ) -> None: ... def resultcallback( self, replace: bool = ... ) -> Callable[[_F], _F]: ... def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: ... def resolve_command( self, ctx: Context, args: List[str] ) -> Tuple[str, Command, List[str]]: ... def get_command(self, ctx: Context, cmd_name: str) -> Optional[Command]: ... def list_commands(self, ctx: Context) -> Iterable[str]: ... class Group(MultiCommand): commands: Dict[str, Command] def __init__( self, name: Optional[str] = ..., commands: Optional[Dict[str, Command]] = ..., **attrs ) -> None: ... def add_command(self, cmd: Command, name: Optional[str] = ...): ... def command(self, *args, **kwargs) -> Callable[[Callable[..., Any]], Command]: ... def group(self, *args, **kwargs) -> Callable[[Callable[..., Any]], Group]: ... class CommandCollection(MultiCommand): sources: List[MultiCommand] def __init__( self, name: Optional[str] = ..., sources: Optional[List[MultiCommand]] = ..., **attrs ) -> None: ... def add_source(self, multi_cmd: MultiCommand) -> None: ... class _ParamType: name: str is_composite: bool envvar_list_splitter: Optional[str] def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> Any: ... def get_metavar(self, param: Parameter) -> str: ... def get_missing_message(self, param: Parameter) -> str: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> Any: ... def split_envvar_value(self, rv: str) -> List[str]: ... def fail(self, message: str, param: Optional[Parameter] = ..., ctx: Optional[Context] = ...) -> NoReturn: ... # This type is here to resolve https://github.com/python/mypy/issues/5275 _ConvertibleType = Union[type, _ParamType, Tuple[Union[type, _ParamType], ...], Callable[[str], Any], Callable[[Optional[str]], Any]] class Parameter: param_type_name: str name: str opts: List[str] secondary_opts: List[str] type: _ParamType required: bool callback: Optional[Callable[[Context, Parameter, str], Any]] nargs: int multiple: bool expose_value: bool default: Any is_eager: bool metavar: Optional[str] envvar: Union[str, List[str], None] def __init__( self, param_decls: Optional[List[str]] = ..., type: Optional[_ConvertibleType] = ..., required: bool = ..., default: Optional[Any] = ..., callback: Optional[Callable[[Context, Parameter, str], Any]] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> None: ... @property def human_readable_name(self) -> str: ... def make_metavar(self) -> str: ... def get_default(self, ctx: Context) -> Any: ... def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: ... def consume_value(self, ctx: Context, opts: Dict[str, Any]) -> Any: ... def type_cast_value(self, ctx: Context, value: Any) -> Any: ... def process_value(self, ctx: Context, value: Any) -> Any: ... def value_is_missing(self, value: Any) -> bool: ... def full_process_value(self, ctx: Context, value: Any) -> Any: ... def resolve_envvar_value(self, ctx: Context) -> str: ... def value_from_envvar(self, ctx: Context) -> Union[str, List[str]]: ... def handle_parse_result( self, ctx: Context, opts: Dict[str, Any], args: List[str] ) -> Tuple[Any, List[str]]: ... def get_help_record(self, ctx: Context) -> Tuple[str, str]: ... def get_usage_pieces(self, ctx: Context) -> List[str]: ... def get_error_hint(self, ctx: Context) -> str: ... class Option(Parameter): prompt: str # sic confirmation_prompt: bool hide_input: bool is_flag: bool flag_value: Any is_bool_flag: bool count: bool multiple: bool allow_from_autoenv: bool help: Optional[str] hidden: bool show_default: bool show_choices: bool show_envvar: bool def __init__( self, param_decls: Optional[List[str]] = ..., show_default: bool = ..., prompt: Union[bool, str] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[_ConvertibleType] = ..., help: Optional[str] = ..., hidden: bool = ..., show_choices: bool = ..., show_envvar: bool = ..., **attrs ) -> None: ... def prompt_for_value(self, ctx: Context) -> Any: ... class Argument(Parameter): def __init__( self, param_decls: Optional[List[str]] = ..., required: Optional[bool] = ..., **attrs ) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/decorators.pyi0000644€tŠÔÚ€2›s®0000002205013576752252030754 0ustar jukkaDROPBOX\Domain Users00000000000000from distutils.version import Version from typing import Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar, Union, Text, overload from click.core import Command, Group, Argument, Option, Parameter, Context, _ConvertibleType _T = TypeVar('_T') _F = TypeVar('_F', bound=Callable[..., Any]) # Until https://github.com/python/mypy/issues/3924 is fixed you can't do the following: # _Decorator = Callable[[_F], _F] _Callback = Callable[ [Context, Union[Option, Parameter], Any], Any ] def pass_context(_T) -> _T: ... def pass_obj(_T) -> _T: ... def make_pass_decorator( object_type: type, ensure: bool = ... ) -> Callable[[_T], _T]: ... # NOTE: Decorators below have **attrs converted to concrete constructor # arguments from core.pyi to help with type checking. def command( name: Optional[str] = ..., cls: Optional[Type[Command]] = ..., # Command context_settings: Optional[Dict[Any, Any]] = ..., help: Optional[str] = ..., epilog: Optional[str] = ..., short_help: Optional[str] = ..., options_metavar: str = ..., add_help_option: bool = ..., hidden: bool = ..., deprecated: bool = ..., ) -> Callable[[Callable[..., Any]], Command]: ... # This inherits attrs from Group, MultiCommand and Command. def group( name: Optional[str] = ..., cls: Type[Command] = ..., # Group commands: Optional[Dict[str, Command]] = ..., # MultiCommand invoke_without_command: bool = ..., no_args_is_help: Optional[bool] = ..., subcommand_metavar: Optional[str] = ..., chain: bool = ..., result_callback: Optional[Callable[..., Any]] = ..., # Command help: Optional[str] = ..., epilog: Optional[str] = ..., short_help: Optional[str] = ..., options_metavar: str = ..., add_help_option: bool = ..., hidden: bool = ..., deprecated: bool = ..., # User-defined **kwargs: Any, ) -> Callable[[Callable[..., Any]], Group]: ... def argument( *param_decls: str, cls: Type[Argument] = ..., # Argument required: Optional[bool] = ..., # Parameter type: Optional[_ConvertibleType] = ..., default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ..., autocompletion: Optional[Callable[[Any, List[str], str], List[Union[str, Tuple[str, str]]]]] = ..., ) -> Callable[[_F], _F]: ... @overload def option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: Union[bool, Text] = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[_ConvertibleType] = ..., help: Optional[str] = ..., show_choices: bool = ..., # Parameter default: Optional[Any] = ..., required: bool = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ..., # User-defined **kwargs: Any, ) -> Callable[[_F], _F]: ... @overload def option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: Union[bool, Text] = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: _T = ..., help: Optional[str] = ..., show_choices: bool = ..., # Parameter default: Optional[Any] = ..., required: bool = ..., callback: Optional[Callable[[Context, Union[Option, Parameter], Union[bool, int, str]], _T]] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ..., # User-defined **kwargs: Any, ) -> Callable[[_F], _F]: ... @overload def option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: Union[bool, Text] = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Type[str] = ..., help: Optional[str] = ..., show_choices: bool = ..., # Parameter default: Optional[Any] = ..., required: bool = ..., callback: Callable[[Context, Union[Option, Parameter], str], Any] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ..., # User-defined **kwargs: Any, ) -> Callable[[_F], _F]: ... @overload def option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: Union[bool, Text] = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Type[int] = ..., help: Optional[str] = ..., show_choices: bool = ..., # Parameter default: Optional[Any] = ..., required: bool = ..., callback: Callable[[Context, Union[Option, Parameter], int], Any] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ..., # User-defined **kwargs: Any, ) -> Callable[[_F], _F]: ... def confirmation_option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: Union[bool, Text] = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: bool = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[_ConvertibleType] = ..., help: str = ..., show_choices: bool = ..., # Parameter default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> Callable[[_F], _F]: ... def password_option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: Union[bool, Text] = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[_ConvertibleType] = ..., help: Optional[str] = ..., show_choices: bool = ..., # Parameter default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> Callable[[_F], _F]: ... def version_option( version: Optional[Union[str, Version]] = ..., *param_decls: str, cls: Type[Option] = ..., # Option prog_name: Optional[str] = ..., message: Optional[str] = ..., show_default: Union[bool, Text] = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: bool = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[_ConvertibleType] = ..., help: str = ..., show_choices: bool = ..., # Parameter default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> Callable[[_F], _F]: ... def help_option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: Union[bool, Text] = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: bool = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[_ConvertibleType] = ..., help: str = ..., show_choices: bool = ..., # Parameter default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> Callable[[_F], _F]: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/exceptions.pyi0000644€tŠÔÚ€2›s®0000000402413576752252030771 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import IO, List, Optional, Any from click.core import Context, Parameter class ClickException(Exception): exit_code: int message: str def __init__(self, message: str) -> None: ... def format_message(self) -> str: ... def show(self, file: Optional[Any] = ...) -> None: ... class UsageError(ClickException): ctx: Optional[Context] def __init__(self, message: str, ctx: Optional[Context] = ...) -> None: ... def show(self, file: Optional[IO[Any]] = ...) -> None: ... class BadParameter(UsageError): param: Optional[Parameter] param_hint: Optional[str] def __init__( self, message: str, ctx: Optional[Context] = ..., param: Optional[Parameter] = ..., param_hint: Optional[str] = ... ) -> None: ... class MissingParameter(BadParameter): param_type: str # valid values: 'parameter', 'option', 'argument' def __init__( self, message: Optional[str] = ..., ctx: Optional[Context] = ..., param: Optional[Parameter] = ..., param_hint: Optional[str] = ..., param_type: Optional[str] = ... ) -> None: ... class NoSuchOption(UsageError): option_name: str possibilities: Optional[List[str]] def __init__( self, option_name: str, message: Optional[str] = ..., possibilities: Optional[List[str]] = ..., ctx: Optional[Context] = ... ) -> None: ... class BadOptionUsage(UsageError): def __init__(self, option_name: str, message: str, ctx: Optional[Context] = ...) -> None: ... class BadArgumentUsage(UsageError): def __init__(self, message: str, ctx: Optional[Context] = ...) -> None: ... class FileError(ClickException): ui_filename: str filename: str def __init__(self, filename: str, hint: Optional[str] = ...) -> None: ... class Abort(RuntimeError): ... class Exit(RuntimeError): def __init__(self, code: int = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/formatting.pyi0000644€tŠÔÚ€2›s®0000000311713576752252030764 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ContextManager, Generator, Iterable, List, Optional, Tuple FORCED_WIDTH: Optional[int] def measure_table(rows: Iterable[Iterable[str]]) -> Tuple[int, ...]: ... def iter_rows( rows: Iterable[Iterable[str]], col_count: int ) -> Generator[Tuple[str, ...], None, None]: ... def wrap_text( text: str, width: int = ..., initial_indent: str = ..., subsequent_indent: str = ..., preserve_paragraphs: bool = ... ) -> str: ... class HelpFormatter: indent_increment: int width: Optional[int] current_indent: int buffer: List[str] def __init__( self, indent_increment: int = ..., width: Optional[int] = ..., max_width: Optional[int] = ..., ) -> None: ... def write(self, string: str) -> None: ... def indent(self) -> None: ... def dedent(self) -> None: ... def write_usage( self, prog: str, args: str = ..., prefix: str = ..., ): ... def write_heading(self, heading: str) -> None: ... def write_paragraph(self) -> None: ... def write_text(self, text: str) -> None: ... def write_dl( self, rows: Iterable[Iterable[str]], col_max: int = ..., col_spacing: int = ..., ) -> None: ... def section(self, name) -> ContextManager[None]: ... def indentation(self) -> ContextManager[None]: ... def getvalue(self) -> str: ... def join_options(options: List[str]) -> Tuple[str, bool]: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/globals.pyi0000644€tŠÔÚ€2›s®0000000045013576752252030232 0ustar jukkaDROPBOX\Domain Users00000000000000from click.core import Context from typing import Optional def get_current_context(silent: bool = ...) -> Context: ... def push_context(ctx: Context) -> None: ... def pop_context() -> None: ... def resolve_color_default(color: Optional[bool] = ...) -> Optional[bool]: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/parser.pyi0000644€tŠÔÚ€2›s®0000000413513576752252030107 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterable, List, Optional, Set, Tuple from click.core import Context def _unpack_args( args: Iterable[str], nargs_spec: Iterable[int] ) -> Tuple[Tuple[Optional[Tuple[str, ...]], ...], List[str]]: ... def split_opt(opt: str) -> Tuple[str, str]: ... def normalize_opt(opt: str, ctx: Context) -> str: ... def split_arg_string(string: str) -> List[str]: ... class Option: dest: str action: str nargs: int const: Any obj: Any prefixes: Set[str] _short_opts: List[str] _long_opts: List[str] def __init__( self, opts: Iterable[str], dest: str, action: Optional[str] = ..., nargs: int = ..., const: Optional[Any] = ..., obj: Optional[Any] = ... ) -> None: ... @property def takes_value(self) -> bool: ... def process(self, value: Any, state: ParsingState) -> None: ... class Argument: dest: str nargs: int obj: Any def __init__(self, dest: str, nargs: int = ..., obj: Optional[Any] = ...) -> None: ... def process(self, value: Any, state: ParsingState) -> None: ... class ParsingState: opts: Dict[str, Any] largs: List[str] rargs: List[str] order: List[Any] def __init__(self, rargs: List[str]) -> None: ... class OptionParser: ctx: Optional[Context] allow_interspersed_args: bool ignore_unknown_options: bool _short_opt: Dict[str, Option] _long_opt: Dict[str, Option] _opt_prefixes: Set[str] _args: List[Argument] def __init__(self, ctx: Optional[Context] = ...) -> None: ... def add_option( self, opts: Iterable[str], dest: str, action: Optional[str] = ..., nargs: int = ..., const: Optional[Any] = ..., obj: Optional[Any] = ... ) -> None: ... def add_argument(self, dest: str, nargs: int = ..., obj: Optional[Any] = ...) -> None: ... def parse_args( self, args: List[str] ) -> Tuple[Dict[str, Any], List[str], List[Any]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/termui.pyi0000644€tŠÔÚ€2›s®0000000665513576752252030131 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Callable, Generator, Iterable, IO, List, Optional, Text, overload, Text, Tuple, TypeVar, Union, ) from click.core import _ConvertibleType from click._termui_impl import ProgressBar as _ProgressBar def hidden_prompt_func(prompt: str) -> str: ... def _build_prompt( text: str, suffix: str, show_default: bool = ..., default: Optional[str] = ..., ) -> str: ... def prompt( text: str, default: Optional[str] = ..., hide_input: bool = ..., confirmation_prompt: bool = ..., type: Optional[_ConvertibleType] = ..., value_proc: Optional[Callable[[Optional[str]], Any]] = ..., prompt_suffix: str = ..., show_default: bool = ..., err: bool = ..., show_choices: bool = ..., ) -> Any: ... def confirm( text: str, default: bool = ..., abort: bool = ..., prompt_suffix: str = ..., show_default: bool = ..., err: bool = ..., ) -> bool: ... def get_terminal_size() -> Tuple[int, int]: ... def echo_via_pager( text_or_generator: Union[str, Iterable[str], Callable[[], Generator[str, None, None]]], color: Optional[bool] = ..., ) -> None: ... _T = TypeVar('_T') @overload def progressbar( iterable: Iterable[_T], length: Optional[int] = ..., label: Optional[str] = ..., show_eta: bool = ..., show_percent: Optional[bool] = ..., show_pos: bool = ..., item_show_func: Optional[Callable[[_T], str]] = ..., fill_char: str = ..., empty_char: str = ..., bar_template: str = ..., info_sep: str = ..., width: int = ..., file: Optional[IO[Any]] = ..., color: Optional[bool] = ..., ) -> _ProgressBar[_T]: ... @overload def progressbar( iterable: None = ..., length: Optional[int] = ..., label: Optional[str] = ..., show_eta: bool = ..., show_percent: Optional[bool] = ..., show_pos: bool = ..., item_show_func: Optional[Callable[[_T], str]] = ..., fill_char: str = ..., empty_char: str = ..., bar_template: str = ..., info_sep: str = ..., width: int = ..., file: Optional[IO[Any]] = ..., color: Optional[bool] = ..., ) -> _ProgressBar[int]: ... def clear() -> None: ... def style( text: Text, fg: Optional[str] = ..., bg: Optional[str] = ..., bold: Optional[bool] = ..., dim: Optional[bool] = ..., underline: Optional[bool] = ..., blink: Optional[bool] = ..., reverse: Optional[bool] = ..., reset: bool = ..., ) -> str: ... def unstyle(text: Text) -> str: ... # Styling options copied from style() for nicer type checking. def secho( message: Optional[str] = ..., file: Optional[IO[Any]] = ..., nl: bool = ..., err: bool = ..., color: Optional[bool] = ..., fg: Optional[str] = ..., bg: Optional[str] = ..., bold: Optional[bool] = ..., dim: Optional[bool] = ..., underline: Optional[bool] = ..., blink: Optional[bool] = ..., reverse: Optional[bool] = ..., reset: bool = ..., ): ... def edit( text: Optional[str] = ..., editor: Optional[str] = ..., env: Optional[str] = ..., require_save: bool = ..., extension: str = ..., filename: Optional[str] = ..., ) -> str: ... def launch(url: str, wait: bool = ..., locate: bool = ...) -> int: ... def getchar(echo: bool = ...) -> Text: ... def pause( info: str = ..., err: bool = ... ) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/testing.pyi0000644€tŠÔÚ€2›s®0000000423713576752252030273 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import (IO, Any, BinaryIO, ContextManager, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Union) from .core import BaseCommand clickpkg: Any class EchoingStdin: def __init__(self, input: BinaryIO, output: BinaryIO) -> None: ... def __getattr__(self, x: str) -> Any: ... def read(self, n: int = ...) -> bytes: ... def readline(self, n: int = ...) -> bytes: ... def readlines(self) -> List[bytes]: ... def __iter__(self) -> Iterable[bytes]: ... def make_input_stream(input: Optional[Union[bytes, Text, IO[Any]]], charset: Text) -> BinaryIO: ... class Result: runner: CliRunner exit_code: int exception: Any exc_info: Optional[Any] stdout_bytes: bytes stderr_bytes: bytes def __init__( self, runner: CliRunner, stdout_bytes: bytes, stderr_bytes: bytes, exit_code: int, exception: Any, exc_info: Optional[Any] = ..., ) -> None: ... @property def output(self) -> Text: ... @property def stdout(self) -> Text: ... @property def stderr(self) -> Text: ... class CliRunner: charset: str env: Mapping[str, str] echo_stdin: bool mix_stderr: bool def __init__( self, charset: Optional[Text] = ..., env: Optional[Mapping[str, str]] = ..., echo_stdin: bool = ..., mix_stderr: bool = ..., ) -> None: ... def get_default_prog_name(self, cli: BaseCommand) -> str: ... def make_env(self, overrides: Optional[Mapping[str, str]] = ...) -> Dict[str, str]: ... def isolation( self, input: Optional[Union[bytes, Text, IO[Any]]] = ..., env: Optional[Mapping[str, str]] = ..., color: bool = ..., ) -> ContextManager[BinaryIO]: ... def invoke( self, cli: BaseCommand, args: Optional[Union[str, Iterable[str]]] = ..., input: Optional[Union[bytes, Text, IO[Any]]] = ..., env: Optional[Mapping[str, str]] = ..., catch_exceptions: bool = ..., color: bool = ..., **extra: Any, ) -> Result: ... def isolated_filesystem(self) -> ContextManager[str]: ... mypy-0.761/mypy/typeshed/third_party/2and3/click/types.pyi0000644€tŠÔÚ€2›s®0000001257613576752252027767 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generic, IO, Iterable, List, Optional, TypeVar, Union, Tuple as _PyTuple, Type import datetime import uuid from click.core import Context, Parameter, _ParamType as ParamType, _ConvertibleType class BoolParamType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> bool: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> bool: ... class CompositeParamType(ParamType): arity: int class Choice(ParamType): choices: Iterable[str] def __init__( self, choices: Iterable[str], case_sensitive: bool = ..., ) -> None: ... class DateTime(ParamType): def __init__( self, formats: Optional[List[str]] = ..., ) -> None: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> datetime.datetime: ... class FloatParamType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> float: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> float: ... class FloatRange(FloatParamType): def __init__( self, min: Optional[float] = ..., max: Optional[float] = ..., clamp: bool = ..., ) -> None: ... class File(ParamType): def __init__( self, mode: str = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., lazy: Optional[bool] = ..., atomic: Optional[bool] = ..., ) -> None: ... def __call__(self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ...) -> IO[Any]: ... def convert(self, value: str, param: Optional[Parameter], ctx: Optional[Context]) -> IO[Any]: ... def resolve_lazy_flag(self, value: str) -> bool: ... _F = TypeVar('_F') # result of the function _Func = Callable[[Optional[str]], _F] class FuncParamType(ParamType, Generic[_F]): func: _Func[_F] def __init__(self, func: _Func[_F]) -> None: ... def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> _F: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> _F: ... class IntParamType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> int: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> int: ... class IntRange(IntParamType): def __init__( self, min: Optional[int] = ..., max: Optional[int] = ..., clamp: bool = ... ) -> None: ... _PathType = TypeVar('_PathType', str, bytes) class Path(ParamType): def __init__( self, exists: bool = ..., file_okay: bool = ..., dir_okay: bool = ..., writable: bool = ..., readable: bool = ..., resolve_path: bool = ..., allow_dash: bool = ..., path_type: Optional[Type[_PathType]] = ..., ) -> None: ... def coerce_path_result(self, rv: Union[str, bytes]) -> _PathType: ... def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> _PathType: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> _PathType: ... class StringParamType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> str: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> str: ... class Tuple(CompositeParamType): types: List[ParamType] def __init__(self, types: Iterable[Any]) -> None: ... def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> Tuple: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> Tuple: ... class UnprocessedParamType(ParamType): ... class UUIDParameterType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> uuid.UUID: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> uuid.UUID: ... def convert_type(ty: Optional[_ConvertibleType], default: Optional[Any] = ...) -> ParamType: ... # parameter type shortcuts BOOL: BoolParamType FLOAT: FloatParamType INT: IntParamType STRING: StringParamType UNPROCESSED: UnprocessedParamType UUID: UUIDParameterType mypy-0.761/mypy/typeshed/third_party/2and3/click/utils.pyi0000644€tŠÔÚ€2›s®0000000357313576752252027760 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, AnyStr, Callable, Generic, Iterator, IO, List, Optional, TypeVar, Union, Text _T = TypeVar('_T') def _posixify(name: str) -> str: ... def safecall(func: _T) -> _T: ... def make_str(value: Any) -> str: ... def make_default_short_help(help: str, max_length: int = ...): ... class LazyFile(object): name: str mode: str encoding: Optional[str] errors: str atomic: bool def __init__( self, filename: str, mode: str = ..., encoding: Optional[str] = ..., errors: str = ..., atomic: bool = ... ) -> None: ... def open(self) -> IO[Any]: ... def close(self) -> None: ... def close_intelligently(self) -> None: ... def __enter__(self) -> LazyFile: ... def __exit__(self, exc_type, exc_value, tb): ... def __iter__(self) -> Iterator[Any]: ... class KeepOpenFile(Generic[AnyStr]): _file: IO[AnyStr] def __init__(self, file: IO[AnyStr]) -> None: ... def __enter__(self) -> KeepOpenFile[AnyStr]: ... def __exit__(self, exc_type, exc_value, tb): ... def __iter__(self) -> Iterator[AnyStr]: ... def echo( message: object = ..., file: Optional[IO[Text]] = ..., nl: bool = ..., err: bool = ..., color: Optional[bool] = ..., ) -> None: ... def get_binary_stream(name: str) -> IO[bytes]: ... def get_text_stream( name: str, encoding: Optional[str] = ..., errors: str = ... ) -> IO[str]: ... def open_file( filename: str, mode: str = ..., encoding: Optional[str] = ..., errors: str = ..., lazy: bool = ..., atomic: bool = ... ) -> Any: ... # really Union[IO, LazyFile, KeepOpenFile] def get_os_args() -> List[str]: ... def format_filename(filename: str, shorten: bool = ...) -> str: ... def get_app_dir( app_name: str, roaming: bool = ..., force_posix: bool = ... ) -> str: ... mypy-0.761/mypy/typeshed/third_party/2and3/croniter.pyi0000644€tŠÔÚ€2›s®0000000360013576752252027347 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime from typing import Any, Dict, Iterator, List, Optional, Text, Tuple, Type, TypeVar, Union _RetType = Union[Type[float], Type[datetime.datetime]] _SelfT = TypeVar('_SelfT', bound=croniter) class CroniterError(ValueError): ... class CroniterBadCronError(CroniterError): ... class CroniterBadDateError(CroniterError): ... class CroniterNotAlphaError(CroniterError): ... class croniter(Iterator[Any]): MONTHS_IN_YEAR: int RANGES: Tuple[Tuple[int, int], ...] DAYS: Tuple[int, ...] ALPHACONV: Tuple[Dict[str, Any], ...] LOWMAP: Tuple[Dict[int, Any], ...] bad_length: str tzinfo: Optional[datetime.tzinfo] cur: float expanded: List[List[str]] start_time: float dst_start_time: float nth_weekday_of_month: Dict[str, Any] def __init__(self, expr_format: Text, start_time: Optional[Union[float, datetime.datetime]] = ..., ret_type: Optional[_RetType] = ...) -> None: ... # Most return value depend on ret_type, which can be passed in both as a method argument and as # a constructor argument. def get_next(self, ret_type: Optional[_RetType] = ...) -> Any: ... def get_prev(self, ret_type: Optional[_RetType] = ...) -> Any: ... def get_current(self, ret_type: Optional[_RetType] = ...) -> Any: ... def __iter__(self: _SelfT) -> _SelfT: ... def __next__(self, ret_type: Optional[_RetType] = ...) -> Any: ... def next(self, ret_type: Optional[_RetType] = ...) -> Any: ... def all_next(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ... def all_prev(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ... def iter(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ... def is_leap(self, year: int) -> bool: ... @classmethod def expand(cls, expr_format: Text) -> Tuple[List[List[str]], Dict[str, Any]]: ... @classmethod def is_valid(cls, expression: Text) -> bool: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/0000755€tŠÔÚ€2›s®0000000000013576752267027541 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032003 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/exceptions.pyi0000644€tŠÔÚ€2›s®0000000040613576752252032437 0ustar jukkaDROPBOX\Domain Users00000000000000class AlreadyFinalized(Exception): ... class AlreadyUpdated(Exception): ... class InvalidKey(Exception): ... class InvalidSignature(Exception): ... class InvalidTag(Exception): ... class NotYetFinalized(Exception): ... class UnsupportedAlgorithm(Exception): ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/fernet.pyi0000644€tŠÔÚ€2›s®0000000120013576752252031532 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Optional class InvalidToken(Exception): ... class Fernet(object): def __init__(self, key: bytes) -> None: ... def decrypt(self, token: bytes, ttl: Optional[int] = ...) -> bytes: ... def encrypt(self, data: bytes) -> bytes: ... def extract_timestamp(self, token: bytes) -> int: ... @classmethod def generate_key(cls) -> bytes: ... class MultiFernet(object): def __init__(self, fernets: List[Fernet]) -> None: ... def decrypt(self, token: bytes, ttl: Optional[int] = ...) -> bytes: ... def encrypt(self, data: bytes) -> bytes: ... def rotate(self, msg: bytes) -> bytes: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/0000755€tŠÔÚ€2›s®0000000000013576752267031025 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/__init__.pyi0000644€tŠÔÚ€2›s®0000000007713576752252033305 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def __getattr__(name: str) -> Any: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/backends/0000755€tŠÔÚ€2›s®0000000000013576752267032577 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/backends/__init__.pyi0000644€tŠÔÚ€2›s®0000000007213576752252035052 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def default_backend() -> Any: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/backends/interfaces.pyi0000644€tŠÔÚ€2›s®0000002005613576752252035442 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from typing import Any, Optional, Union from cryptography.hazmat.primitives.asymmetric.dh import ( DHParameterNumbers, DHParameters, DHPrivateKey, DHPrivateNumbers, DHPublicKey, DHPublicNumbers, ) from cryptography.hazmat.primitives.asymmetric.dsa import ( DSAParameterNumbers, DSAParameters, DSAPrivateKey, DSAPrivateNumbers, DSAPublicKey, DSAPublicNumbers, ) from cryptography.hazmat.primitives.asymmetric.ec import ( EllipticCurve, EllipticCurvePrivateKey, EllipticCurvePrivateNumbers, EllipticCurvePublicKey, EllipticCurvePublicNumbers, EllipticCurveSignatureAlgorithm, ) from cryptography.hazmat.primitives.asymmetric.padding import AsymmetricPadding from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPrivateNumbers, RSAPublicKey, RSAPublicNumbers from cryptography.hazmat.primitives.ciphers import BlockCipherAlgorithm, CipherAlgorithm, CipherContext from cryptography.hazmat.primitives.ciphers.modes import Mode from cryptography.hazmat.primitives.hashes import HashAlgorithm, HashContext from cryptography.x509 import ( Certificate, CertificateBuilder, CertificateRevocationList, CertificateRevocationListBuilder, CertificateSigningRequest, CertificateSigningRequestBuilder, Name, RevokedCertificate, RevokedCertificateBuilder, ) class CipherBackend(metaclass=ABCMeta): @abstractmethod def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: ... @abstractmethod def create_symmetric_encryption_ctx(self, cipher: CipherAlgorithm, mode: Mode) -> CipherContext: ... @abstractmethod def create_symmetric_decryption_ctx(self, cipher: CipherAlgorithm, mode: Mode) -> CipherContext: ... class CMACBackend(metaclass=ABCMeta): @abstractmethod def cmac_algorithm_supported(self, algorithm: BlockCipherAlgorithm) -> bool: ... @abstractmethod def create_cmac_ctx(self, algorithm: BlockCipherAlgorithm) -> Any: ... class DERSerializationBackend(metaclass=ABCMeta): @abstractmethod def load_der_parameters(self, data: bytes) -> Any: ... @abstractmethod def load_der_private_key(self, data: bytes, password: Optional[bytes]) -> Any: ... @abstractmethod def load_der_public_key(self, data: bytes) -> Any: ... class DHBackend(metaclass=ABCMeta): @abstractmethod def dh_parameters_supported(self, p: int, g: int, q: Optional[int]) -> bool: ... @abstractmethod def dh_x942_serialization_supported(self) -> bool: ... @abstractmethod def generate_dh_parameters(self, generator: int, key_size: int) -> DHParameters: ... @abstractmethod def generate_dh_private_key(self, parameters: DHParameters) -> DHPrivateKey: ... @abstractmethod def generate_dh_private_key_and_parameters(self, generator: int, key_size: int) -> DHPrivateKey: ... @abstractmethod def load_dh_parameter_numbers(self, numbers: DHParameterNumbers) -> DHParameters: ... @abstractmethod def load_dh_private_numbers(self, numbers: DHPrivateNumbers) -> DHPrivateKey: ... @abstractmethod def load_dh_public_numbers(self, numbers: DHPublicNumbers) -> DHPublicKey: ... class DSABackend(metaclass=ABCMeta): @abstractmethod def dsa_hash_supported(self, algorithm: HashAlgorithm) -> bool: ... @abstractmethod def dsa_parameters_supported(self, p: int, q: int, g: int) -> bool: ... @abstractmethod def generate_dsa_parameters(self, key_size: int) -> DSAParameters: ... @abstractmethod def generate_dsa_private_key(self, parameters: DSAParameters) -> DSAPrivateKey: ... @abstractmethod def generate_dsa_private_key_and_parameters(self, key_size: int) -> DSAPrivateKey: ... @abstractmethod def load_dsa_parameter_numbers(self, numbers: DSAParameterNumbers) -> DSAParameters: ... @abstractmethod def load_dsa_private_numbers(self, numbers: DSAPrivateNumbers) -> DSAPrivateKey: ... @abstractmethod def load_dsa_public_numbers(self, numbers: DSAPublicNumbers) -> DSAPublicKey: ... class EllipticCurveBackend(metaclass=ABCMeta): @abstractmethod def derive_elliptic_curve_private_key(self, private_value: int, curve: EllipticCurve) -> EllipticCurvePrivateKey: ... @abstractmethod def elliptic_curve_signature_algorithm_supported( self, signature_algorithm: EllipticCurveSignatureAlgorithm, curve: EllipticCurve ) -> bool: ... @abstractmethod def elliptic_curve_supported(self, curve: EllipticCurve) -> bool: ... @abstractmethod def generate_elliptic_curve_private_key(self, curve: EllipticCurve) -> EllipticCurvePrivateKey: ... @abstractmethod def load_elliptic_curve_private_numbers(self, numbers: EllipticCurvePrivateNumbers) -> EllipticCurvePrivateKey: ... @abstractmethod def load_elliptic_curve_public_numbers(self, numbers: EllipticCurvePublicNumbers) -> EllipticCurvePublicKey: ... class HMACBackend(metaclass=ABCMeta): @abstractmethod def create_hmac_ctx(self, key: bytes, algorithm: HashAlgorithm) -> HashContext: ... @abstractmethod def cmac_algorithm_supported(self, algorithm: HashAlgorithm) -> bool: ... class HashBackend(metaclass=ABCMeta): @abstractmethod def create_hash_ctx(self, algorithm: HashAlgorithm) -> HashContext: ... @abstractmethod def hash_supported(self, algorithm: HashAlgorithm) -> bool: ... class PBKDF2HMACBackend(metaclass=ABCMeta): @abstractmethod def derive_pbkdf2_hmac( self, algorithm: HashAlgorithm, length: int, salt: bytes, iterations: int, key_material: bytes ) -> bytes: ... @abstractmethod def pbkdf2_hmac_supported(self, algorithm: HashAlgorithm) -> bool: ... class PEMSerializationBackend(metaclass=ABCMeta): @abstractmethod def load_pem_parameters(self, data: bytes) -> Any: ... @abstractmethod def load_pem_private_key(self, data: bytes, password: Optional[bytes]) -> Any: ... @abstractmethod def load_pem_public_key(self, data: bytes) -> Any: ... class RSABackend(metaclass=ABCMeta): @abstractmethod def generate_rsa_parameters_supported(self, public_exponent: int, key_size: int) -> bool: ... @abstractmethod def generate_rsa_private_key(self, public_exponent: int, key_size: int) -> RSAPrivateKey: ... @abstractmethod def load_rsa_public_numbers(self, numbers: RSAPublicNumbers) -> RSAPublicKey: ... @abstractmethod def load_rsa_private_numbers(self, numbers: RSAPrivateNumbers) -> RSAPrivateKey: ... @abstractmethod def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: ... class ScryptBackend(metaclass=ABCMeta): @abstractmethod def derive_scrypt(self, key_material: bytes, salt: bytes, length: int, n: int, r: int, p: int) -> bytes: ... class X509Backend(metaclass=ABCMeta): @abstractmethod def create_x509_certificate( self, builder: CertificateBuilder, private_key: Union[DSAPrivateKey, EllipticCurvePrivateKey, RSAPrivateKey], algorithm: HashAlgorithm, ) -> Certificate: ... @abstractmethod def create_x509_crl( self, builder: CertificateRevocationListBuilder, private_key: Union[DSAPrivateKey, EllipticCurvePrivateKey, RSAPrivateKey], algorithm: HashAlgorithm, ) -> CertificateRevocationList: ... @abstractmethod def create_x509_csr( self, builder: CertificateSigningRequestBuilder, private_key: Union[DSAPrivateKey, EllipticCurvePrivateKey, RSAPrivateKey], algorithm: HashAlgorithm, ) -> CertificateSigningRequest: ... @abstractmethod def create_x509_revoked_certificate(self, builder: RevokedCertificateBuilder) -> RevokedCertificate: ... @abstractmethod def load_der_x509_certificate(self, data: bytes) -> Certificate: ... @abstractmethod def load_der_x509_csr(self, data: bytes) -> CertificateSigningRequest: ... @abstractmethod def load_pem_x509_certificate(self, data: bytes) -> Certificate: ... @abstractmethod def load_pem_x509_csr(self, data: bytes) -> CertificateSigningRequest: ... @abstractmethod def x509_name_bytes(self, name: Name) -> bytes: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/bindings/0000755€tŠÔÚ€2›s®0000000000013576752267032622 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/bindings/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252035064 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/bindings/openssl/0000755€tŠÔÚ€2›s®0000000000013576752267034305 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/bindings/openssl/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252036547 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/bindings/openssl/binding.pyi0000644€tŠÔÚ€2›s®0000000022413576752252036432 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Binding(object): ffi: Optional[Any] lib: Optional[Any] def init_static_locks(self) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/0000755€tŠÔÚ€2›s®0000000000013576752267033220 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/__init__.pyi0000644€tŠÔÚ€2›s®0000000007713576752252035500 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def __getattr__(name: str) -> Any: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/0000755€tŠÔÚ€2›s®0000000000013576752267035375 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/__init__.pyi0000644€tŠÔÚ€2›s®0000000007713576752252037655 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def __getattr__(name: str) -> Any: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/dh.pyi0000644€tŠÔÚ€2›s®0000000467213576752252036516 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from typing import Optional from cryptography.hazmat.backends.interfaces import DHBackend from cryptography.hazmat.primitives.serialization import ( Encoding, KeySerializationEncryption, ParameterFormat, PrivateFormat, PublicFormat, ) class DHParameters(metaclass=ABCMeta): @abstractmethod def generate_private_key(self) -> DHPrivateKey: ... @abstractmethod def parameter_bytes(self, encoding: Encoding, format: ParameterFormat) -> bytes: ... @abstractmethod def parameter_numbers(self) -> DHParameterNumbers: ... DHParametersWithSerialization = DHParameters class DHParameterNumbers(object): @property def p(self) -> int: ... @property def g(self) -> int: ... @property def q(self) -> int: ... def __init__(self, p: int, g: int, q: Optional[int]) -> None: ... def parameters(self, backend: DHBackend) -> DHParameters: ... class DHPrivateKey(metaclass=ABCMeta): key_size: int @abstractmethod def exchange(self, peer_public_key: DHPublicKey) -> bytes: ... @abstractmethod def parameters(self) -> DHParameters: ... @abstractmethod def public_key(self) -> DHPublicKey: ... class DHPrivateKeyWithSerialization(DHPrivateKey): @abstractmethod def private_bytes( self, encoding: Encoding, format: PrivateFormat, encryption_algorithm: KeySerializationEncryption ) -> bytes: ... @abstractmethod def private_numbers(self) -> DHPrivateNumbers: ... class DHPrivateNumbers(object): @property def public_numbers(self) -> DHPublicNumbers: ... @property def x(self) -> int: ... def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ... def private_key(self, backend: DHBackend) -> DHPrivateKey: ... class DHPublicKey(metaclass=ABCMeta): @property @abstractmethod def key_size(self) -> int: ... @abstractmethod def parameters(self) -> DHParameters: ... @abstractmethod def public_bytes(self, encoding: Encoding, format: PublicFormat) -> bytes: ... @abstractmethod def public_numbers(self) -> DHPublicNumbers: ... DHPublicKeyWithSerialization = DHPublicKey class DHPublicNumbers(object): @property def parameter_numbers(self) -> DHParameterNumbers: ... @property def y(self) -> int: ... def __init__(self, y: int, parameter_numbers: DHParameterNumbers) -> None: ... def public_key(self, backend: DHBackend) -> DHPublicKey: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/dsa.pyi0000644€tŠÔÚ€2›s®0000000553713576752252036673 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from typing import Union from cryptography.hazmat.backends.interfaces import DSABackend from cryptography.hazmat.primitives.asymmetric import AsymmetricVerificationContext from cryptography.hazmat.primitives.asymmetric.utils import Prehashed from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.serialization import Encoding, KeySerializationEncryption, PrivateFormat, PublicFormat class DSAParameters(metaclass=ABCMeta): @abstractmethod def generate_private_key(self) -> DSAPrivateKey: ... class DSAParametersWithNumbers(DSAParameters): @abstractmethod def parameter_numbers(self) -> DSAParameterNumbers: ... class DSAParameterNumbers(object): @property def p(self) -> int: ... @property def q(self) -> int: ... @property def g(self) -> int: ... def __init__(self, p: int, q: int, g: int) -> None: ... def parameters(self, backend: DSABackend) -> DSAParameters: ... class DSAPrivateKey(metaclass=ABCMeta): @property @abstractmethod def key_size(self) -> int: ... @abstractmethod def parameters(self) -> DSAParameters: ... @abstractmethod def public_key(self) -> DSAPublicKey: ... @abstractmethod def sign(self, data: bytes, algorithm: Union[HashAlgorithm, Prehashed]) -> bytes: ... class DSAPrivateKeyWithSerialization(DSAPrivateKey): @abstractmethod def private_bytes( self, encoding: Encoding, format: PrivateFormat, encryption_algorithm: KeySerializationEncryption ) -> bytes: ... @abstractmethod def private_numbers(self) -> DSAPrivateNumbers: ... class DSAPrivateNumbers(object): @property def x(self) -> int: ... @property def public_numbers(self) -> DSAPublicNumbers: ... def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ... class DSAPublicKey(metaclass=ABCMeta): @property @abstractmethod def key_size(self) -> int: ... @abstractmethod def public_bytes(self, encoding: Encoding, format: PublicFormat) -> bytes: ... @abstractmethod def public_numbers(self) -> DSAPublicNumbers: ... @abstractmethod def verifier(self, signature: bytes, signature_algorithm: Union[HashAlgorithm, Prehashed]) -> AsymmetricVerificationContext: ... @abstractmethod def verify(self, signature: bytes, data: bytes, algorithm: Union[HashAlgorithm, Prehashed]) -> None: ... DSAPublicKeyWithSerialization = DSAPublicKey class DSAPublicNumbers(object): @property def y(self) -> int: ... @property def parameter_numbers(self) -> DSAParameterNumbers: ... def __init__(self, y: int, parameter_numbers: DSAParameterNumbers) -> None: ... def generate_parameters(key_size: int, backend: DSABackend) -> DSAParameters: ... def generate_private_key(key_size: int, backend: DSABackend) -> DSAPrivateKey: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/ec.pyi0000644€tŠÔÚ€2›s®0000001601213576752252036501 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from typing import ClassVar, Union from cryptography.hazmat.backends.interfaces import EllipticCurveBackend from cryptography.hazmat.primitives.asymmetric.utils import Prehashed from cryptography.hazmat.primitives.asymmetric import AsymmetricVerificationContext from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.serialization import Encoding, KeySerializationEncryption, PrivateFormat, PublicFormat from cryptography.x509 import ObjectIdentifier class EllipticCurve(metaclass=ABCMeta): @property @abstractmethod def key_size(self) -> int: ... @property @abstractmethod def name(self) -> str: ... class BrainpoolP256R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class BrainpoolP384R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class BrainpoolP512R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECP192R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECP224R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECP256K1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECP256R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECP384R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECP521R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT163K1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT163R2(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT233K1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT233R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT283K1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT283R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT409K1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT409R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT571K1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SECT571R1(EllipticCurve): @property def key_size(self) -> int: ... @property def name(self) -> str: ... class EllipticCurveOID(object): SECP192R1: ClassVar[ObjectIdentifier] SECP224R1: ClassVar[ObjectIdentifier] SECP256K1: ClassVar[ObjectIdentifier] SECP256R1: ClassVar[ObjectIdentifier] SECP384R1: ClassVar[ObjectIdentifier] SECP521R1: ClassVar[ObjectIdentifier] BRAINPOOLP256R1: ClassVar[ObjectIdentifier] BRAINPOOLP384R1: ClassVar[ObjectIdentifier] BRAINPOOLP512R1: ClassVar[ObjectIdentifier] SECT163K1: ClassVar[ObjectIdentifier] SECT163R2: ClassVar[ObjectIdentifier] SECT233K1: ClassVar[ObjectIdentifier] SECT233R1: ClassVar[ObjectIdentifier] SECT283K1: ClassVar[ObjectIdentifier] SECT283R1: ClassVar[ObjectIdentifier] SECT409K1: ClassVar[ObjectIdentifier] SECT409R1: ClassVar[ObjectIdentifier] SECT571K1: ClassVar[ObjectIdentifier] SECT571R1: ClassVar[ObjectIdentifier] class EllipticCurvePrivateKey(metaclass=ABCMeta): @property @abstractmethod def curve(self) -> EllipticCurve: ... @property @abstractmethod def key_size(self) -> int: ... @abstractmethod def exchange(self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey) -> bytes: ... @abstractmethod def public_key(self) -> EllipticCurvePublicKey: ... @abstractmethod def sign(self, data: bytes, signature_algorithm: EllipticCurveSignatureAlgorithm) -> bytes: ... class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey): @abstractmethod def private_bytes( self, encoding: Encoding, format: PrivateFormat, encryption_algorithm: KeySerializationEncryption ) -> bytes: ... @abstractmethod def private_numbers(self) -> EllipticCurvePrivateNumbers: ... class EllipticCurvePrivateNumbers(object): @property def private_value(self) -> int: ... @property def public_numbers(self) -> EllipticCurvePublicNumbers: ... def __init__(self, private_value: int, public_numbers: EllipticCurvePublicNumbers) -> None: ... def private_key(self, backend: EllipticCurveBackend) -> EllipticCurvePrivateKey: ... class EllipticCurvePublicKey(metaclass=ABCMeta): @property @abstractmethod def curve(self) -> EllipticCurve: ... @property @abstractmethod def key_size(self) -> int: ... @classmethod def from_encoded_point(cls, curve: EllipticCurve, data: bytes) -> EllipticCurvePublicKey: ... @abstractmethod def public_bytes(self, encoding: Encoding, format: PublicFormat) -> bytes: ... @abstractmethod def public_numbers(self) -> EllipticCurvePublicNumbers: ... @abstractmethod def verifier(self, signature: bytes, signature_algorithm: EllipticCurveSignatureAlgorithm) -> AsymmetricVerificationContext: ... @abstractmethod def verify(self, signature: bytes, data: bytes, signature_algorithm: EllipticCurveSignatureAlgorithm) -> None: ... EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey class EllipticCurvePublicNumbers(object): @property def curve(self) -> EllipticCurve: ... @property def x(self) -> int: ... @property def y(self) -> int: ... def __init__(self, x: int, y: int, curve: EllipticCurve) -> None: ... @classmethod def from_encoded_point(cls, curve: EllipticCurve, data: bytes) -> EllipticCurvePublicNumbers: ... def public_key(self, backend: EllipticCurveBackend) -> EllipticCurvePublicKey: ... class EllipticCurveSignatureAlgorithm(metaclass=ABCMeta): @property @abstractmethod def algorithm(self) -> Union[HashAlgorithm, Prehashed]: ... class ECDH(object): ... class ECDSA(EllipticCurveSignatureAlgorithm): def __init__(self, algorithm: HashAlgorithm): ... @property def algorithm(self) -> Union[HashAlgorithm, Prehashed]: ... def derive_private_key(private_value: int, curve: EllipticCurve, backend: EllipticCurveBackend) -> EllipticCurvePrivateKey: ... def generate_private_key(curve: EllipticCurve, backend: EllipticCurveBackend) -> EllipticCurvePrivateKey: ... def get_curve_for_oid(oid: ObjectIdentifier) -> EllipticCurve: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/ed25519.pyi0000644€tŠÔÚ€2›s®0000000173113576752252037112 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from cryptography.hazmat.primitives.serialization import Encoding, KeySerializationEncryption, PrivateFormat, PublicFormat class Ed25519PrivateKey(metaclass=ABCMeta): @classmethod def generate(cls) -> Ed25519PrivateKey: ... @classmethod def from_private_bytes(cls, data: bytes) -> Ed25519PrivateKey: ... @abstractmethod def private_bytes( self, encoding: Encoding, format: PrivateFormat, encryption_algorithm: KeySerializationEncryption ) -> bytes: ... @abstractmethod def public_key(self) -> Ed25519PublicKey: ... @abstractmethod def sign(self, data: bytes) -> bytes: ... class Ed25519PublicKey(metaclass=ABCMeta): @classmethod def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey: ... @abstractmethod def public_bytes(self, encoding: Encoding, format: PublicFormat) -> bytes: ... @abstractmethod def verify(self, signature: bytes, data: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/ed448.pyi0000644€tŠÔÚ€2›s®0000000171513576752252036746 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from cryptography.hazmat.primitives.serialization import Encoding, KeySerializationEncryption, PrivateFormat, PublicFormat class Ed448PrivateKey(metaclass=ABCMeta): @classmethod def generate(cls) -> Ed448PrivateKey: ... @classmethod def from_private_bytes(cls, data: bytes) -> Ed448PrivateKey: ... @abstractmethod def private_bytes( self, encoding: Encoding, format: PrivateFormat, encryption_algorithm: KeySerializationEncryption ) -> bytes: ... @abstractmethod def public_key(self) -> Ed448PublicKey: ... @abstractmethod def sign(self, data: bytes) -> bytes: ... class Ed448PublicKey(metaclass=ABCMeta): @classmethod def from_public_bytes(cls, data: bytes) -> Ed448PublicKey: ... @abstractmethod def public_bytes(self, encoding: Encoding, format: PublicFormat) -> bytes: ... @abstractmethod def verify(self, signature: bytes, data: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/padding.pyi0000644€tŠÔÚ€2›s®0000000142313576752252037520 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from typing import ClassVar, Optional, Union from cryptography.hazmat.primitives.hashes import HashAlgorithm class AsymmetricPadding(metaclass=ABCMeta): @property @abstractmethod def name(self) -> str: ... class MGF1(object): def __init__(self, algorithm: HashAlgorithm) -> None: ... class OAEP(AsymmetricPadding): def __init__(self, mgf: MGF1, algorithm: HashAlgorithm, label: Optional[bytes]) -> None: ... @property def name(self) -> str: ... class PKCS1v15(AsymmetricPadding): @property def name(self) -> str: ... class PSS(AsymmetricPadding): MAX_LENGTH: ClassVar[object] def __init__(self, mgf: MGF1, salt_length: Union[int, object]) -> None: ... @property def name(self) -> str: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/rsa.pyi0000644€tŠÔÚ€2›s®0000000620413576752252036701 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from typing import Tuple, Union from cryptography.hazmat.backends.interfaces import RSABackend from cryptography.hazmat.primitives.asymmetric import AsymmetricVerificationContext from cryptography.hazmat.primitives.asymmetric.padding import AsymmetricPadding from cryptography.hazmat.primitives.asymmetric.utils import Prehashed from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.serialization import Encoding, KeySerializationEncryption, PrivateFormat, PublicFormat class RSAPrivateKey(metaclass=ABCMeta): @property @abstractmethod def key_size(self) -> int: ... @abstractmethod def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: ... @abstractmethod def public_key(self) -> RSAPublicKey: ... @abstractmethod def sign(self, data: bytes, padding: AsymmetricPadding, algorithm: Union[HashAlgorithm, Prehashed]) -> bytes: ... class RSAPrivateKeyWithSerialization(RSAPrivateKey): @abstractmethod def private_bytes( self, encoding: Encoding, format: PrivateFormat, encryption_algorithm: KeySerializationEncryption ) -> bytes: ... @abstractmethod def private_numbers(self) -> RSAPrivateNumbers: ... class RSAPublicKey(metaclass=ABCMeta): @property @abstractmethod def key_size(self) -> int: ... @abstractmethod def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: ... @abstractmethod def public_bytes(self, encoding: Encoding, format: PublicFormat) -> bytes: ... @abstractmethod def public_numbers(self) -> RSAPublicNumbers: ... @abstractmethod def verifier(self, signature: bytes, padding: AsymmetricPadding, algorithm: Union[HashAlgorithm, Prehashed]) -> AsymmetricVerificationContext: ... @abstractmethod def verify(self, signature: bytes, data: bytes, padding: AsymmetricPadding, algorithm: Union[HashAlgorithm, Prehashed]) -> None: ... RSAPublicKeyWithSerialization = RSAPublicKey def generate_private_key(public_exponent: int, key_size: int, backend: RSABackend) -> RSAPrivateKeyWithSerialization: ... def rsa_crt_iqmp(p: int, q: int) -> int: ... def rsa_crt_dmp1(private_exponent: int, p: int) -> int: ... def rsa_crt_dmq1(private_exponent: int, q: int) -> int: ... def rsa_recover_prime_factors(n: int, e: int, d: int) -> Tuple[int, int]: ... class RSAPrivateNumbers(object): def __init__(self, p: int, q: int, d: int, dmp1: int, dmq1: int, iqmp: int, public_numbers: RSAPublicNumbers) -> None: ... @property def p(self) -> int: ... @property def q(self) -> int: ... @property def d(self) -> int: ... @property def dmp1(self) -> int: ... @property def dmq1(self) -> int: ... @property def iqmp(self) -> int: ... @property def public_numbers(self) -> RSAPublicNumbers: ... def private_key(self, backend) -> RSAPrivateKey: ... class RSAPublicNumbers(object): def __init__(self, e: int, n: int) -> None: ... @property def p(self) -> int: ... @property def q(self) -> int: ... def public_key(self, backend) -> RSAPublicKey: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/utils.pyi0000644€tŠÔÚ€2›s®0000000026213576752252037252 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple def decode_dss_signature(signature: bytes) -> Tuple[int, int]: ... def encode_dss_signature(r: int, s: int) -> bytes: ... class Prehashed(object): ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/x25519.pyi0000644€tŠÔÚ€2›s®0000000162713576752252036775 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from cryptography.hazmat.primitives.serialization import Encoding, KeySerializationEncryption, PrivateFormat, PublicFormat class X25519PrivateKey(metaclass=ABCMeta): @classmethod def from_private_bytes(cls, data: bytes) -> X25519PrivateKey: ... @classmethod def generate(cls) -> X25519PrivateKey: ... @abstractmethod def exchange(self, peer_public_key: X25519PublicKey) -> bytes: ... @abstractmethod def private_bytes( self, encoding: Encoding, format: PrivateFormat, encryption_algorithm: KeySerializationEncryption ) -> bytes: ... @abstractmethod def public_key(self) -> X25519PublicKey: ... class X25519PublicKey(metaclass=ABCMeta): @classmethod def from_public_bytes(cls, data: bytes) -> X25519PublicKey: ... @abstractmethod def public_bytes(self, encoding: Encoding, format: PublicFormat) -> bytes: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/x448.pyi0000644€tŠÔÚ€2›s®0000000161113576752252036620 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from cryptography.hazmat.primitives.serialization import Encoding, KeySerializationEncryption, PrivateFormat, PublicFormat class X448PrivateKey(metaclass=ABCMeta): @classmethod def from_private_bytes(cls, data: bytes) -> X448PrivateKey: ... @classmethod def generate(cls) -> X448PrivateKey: ... @abstractmethod def exchange(self, peer_public_key: X448PublicKey) -> bytes: ... @abstractmethod def private_bytes( self, encoding: Encoding, format: PrivateFormat, encryption_algorithm: KeySerializationEncryption ) -> bytes: ... @abstractmethod def public_key(self) -> X448PublicKey: ... class X448PublicKey(metaclass=ABCMeta): @classmethod def from_public_bytes(cls, data: bytes) -> X448PublicKey: ... @abstractmethod def public_bytes(self, encoding: Encoding, format: PublicFormat) -> bytes: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/0000755€tŠÔÚ€2›s®0000000000013576752267034655 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/__init__.pyi0000644€tŠÔÚ€2›s®0000000242213576752252037131 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from cryptography.hazmat.backends.interfaces import CipherBackend from cryptography.hazmat.primitives.ciphers.modes import Mode class AEADCipherContext(metaclass=ABCMeta): @abstractmethod def authenticate_additional_data(self, data: bytes) -> None: ... class AEADDecryptionContext(metaclass=ABCMeta): @abstractmethod def finalize_with_tag(self, tag: bytes) -> bytes: ... class AEADEncryptionContext(metaclass=ABCMeta): @property @abstractmethod def tag(self) -> bytes: ... class BlockCipherAlgorithm(metaclass=ABCMeta): @property @abstractmethod def block_size(self) -> int: ... class Cipher(object): def __init__(self, algorithm: CipherAlgorithm, mode: Mode, backend: CipherBackend) -> None: ... def decryptor(self) -> CipherContext: ... def encryptor(self) -> CipherContext: ... class CipherAlgorithm(metaclass=ABCMeta): @property @abstractmethod def key_size(self) -> int: ... @property @abstractmethod def name(self) -> str: ... class CipherContext(metaclass=ABCMeta): @abstractmethod def finalize(self) -> bytes: ... @abstractmethod def update(self, data: bytes) -> bytes: ... @abstractmethod def update_into(self, data: bytes, buf) -> int: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/aead.pyi0000644€tŠÔÚ€2›s®0000000205113576752252036262 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional class AESCCM(object): def __init__(self, key: bytes, tag_length: Optional[int]) -> None: ... def decrypt(self, nonce: bytes, data: bytes, associated_data: Optional[bytes]) -> bytes: ... def encrypt(self, nonce: bytes, data: bytes, associated_data: Optional[bytes]) -> bytes: ... @classmethod def generate_key(cls, bit_length: int) -> bytes: ... class AESGCM(object): def __init__(self, key: bytes) -> None: ... def decrypt(self, nonce: bytes, data: bytes, associated_data: Optional[bytes]) -> bytes: ... def encrypt(self, nonce: bytes, data: bytes, associated_data: Optional[bytes]) -> bytes: ... @classmethod def generate_key(cls, bit_length: int) -> bytes: ... class ChaCha20Poly1305(object): def __init__(self, key: bytes) -> None: ... def decrypt(self, nonce: bytes, data: bytes, associated_data: Optional[bytes]) -> bytes: ... def encrypt(self, nonce: bytes, data: bytes, associated_data: Optional[bytes]) -> bytes: ... @classmethod def generate_key(cls) -> bytes: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/algorithms.pyi0000644€tŠÔÚ€2›s®0000000427713576752252037555 0ustar jukkaDROPBOX\Domain Users00000000000000from cryptography.hazmat.primitives.ciphers import BlockCipherAlgorithm, CipherAlgorithm from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce class AES(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @property def block_size(self) -> int: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... class ARC4(CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... class Blowfish(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @property def block_size(self) -> int: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... class Camelia(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @property def block_size(self) -> int: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... class CAST5(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @property def block_size(self) -> int: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... class ChaCha20(CipherAlgorithm, ModeWithNonce): def __init__(self, key: bytes, nonce: bytes) -> None: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... @property def nonce(self) -> bytes: ... class IDEA(CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... class SEED(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @property def block_size(self) -> int: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... class TripleDES(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @property def block_size(self) -> int: ... @property def key_size(self) -> int: ... @property def name(self) -> str: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/modes.pyi0000644€tŠÔÚ€2›s®0000000602113576752252036500 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from typing import Optional from cryptography.hazmat.primitives.ciphers import CipherAlgorithm class Mode(metaclass=ABCMeta): @property @abstractmethod def name(self) -> str: ... @abstractmethod def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class ModeWithAuthenticationTag(metaclass=ABCMeta): @property @abstractmethod def tag(self) -> bytes: ... class ModeWithInitializationVector(metaclass=ABCMeta): @property @abstractmethod def initialization_vector(self) -> bytes: ... class ModeWithNonce(metaclass=ABCMeta): @property @abstractmethod def nonce(self) -> bytes: ... class ModeWithTweak(metaclass=ABCMeta): @property @abstractmethod def tweak(self) -> bytes: ... class CBC(Mode, ModeWithInitializationVector): def __init__(self, initialization_vector: bytes) -> None: ... @property def initialization_vector(self) -> bytes: ... @property def name(self) -> str: ... def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class CTR(Mode, ModeWithNonce): def __init__(self, nonce: bytes) -> None: ... @property def name(self) -> str: ... @property def nonce(self) -> bytes: ... def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class CFB(Mode, ModeWithInitializationVector): def __init__(self, initialization_vector: bytes) -> None: ... @property def initialization_vector(self) -> bytes: ... @property def name(self) -> str: ... def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class CFB8(Mode, ModeWithInitializationVector): def __init__(self, initialization_vector: bytes) -> None: ... @property def initialization_vector(self) -> bytes: ... @property def name(self) -> str: ... def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class ECB(Mode): @property def name(self) -> str: ... def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class GCM(Mode, ModeWithInitializationVector, ModeWithAuthenticationTag): def __init__(self, initialization_vector: bytes, tag: Optional[bytes], min_tag_length: Optional[int]) -> None: ... @property def initialization_vector(self) -> bytes: ... @property def name(self) -> str: ... @property def tag(self) -> bytes: ... def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class OFB(Mode, ModeWithInitializationVector): def __init__(self, initialization_vector: bytes) -> None: ... @property def initialization_vector(self) -> bytes: ... @property def name(self) -> str: ... def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class XTS(Mode, ModeWithTweak): def __init__(self, tweak: bytes) -> None: ... @property def name(self) -> str: ... @property def tweak(self) -> bytes: ... def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/cmac.pyi0000644€tŠÔÚ€2›s®0000000064013576752252034640 0ustar jukkaDROPBOX\Domain Users00000000000000from cryptography.hazmat.backends.interfaces import CMACBackend from cryptography.hazmat.primitives.ciphers import BlockCipherAlgorithm class CMAC(object): def __init__(self, algorithm: BlockCipherAlgorithm, backend: CMACBackend) -> None: ... def copy(self) -> CMAC: ... def finalize(self) -> bytes: ... def update(self, data: bytes) -> None: ... def verify(self, signature: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/constant_time.pyi0000644€tŠÔÚ€2›s®0000000005613576752252036605 0ustar jukkaDROPBOX\Domain Users00000000000000def bytes_eq(a: bytes, b: bytes) -> bool: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/hashes.pyi0000644€tŠÔÚ€2›s®0000000242213576752252035210 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod from cryptography.hazmat.backends.interfaces import HashBackend class HashAlgorithm(metaclass=ABCMeta): digest_size: int name: str class HashContext(metaclass=ABCMeta): algorithm: HashAlgorithm @abstractmethod def copy(self) -> HashContext: ... @abstractmethod def finalize(self) -> bytes: ... @abstractmethod def update(self, data: bytes) -> None: ... class BLAKE2b(HashAlgorithm): ... class BLAKE2s(HashAlgorithm): ... class MD5(HashAlgorithm): ... class SHA1(HashAlgorithm): ... class SHA224(HashAlgorithm): ... class SHA256(HashAlgorithm): ... class SHA384(HashAlgorithm): ... class SHA3_224(HashAlgorithm): ... class SHA3_256(HashAlgorithm): ... class SHA3_384(HashAlgorithm): ... class SHA3_512(HashAlgorithm): ... class SHA512(HashAlgorithm): ... class SHA512_224(HashAlgorithm): ... class SHA512_256(HashAlgorithm): ... class SHAKE128(HashAlgorithm): def __init__(self, digest_size: int) -> None: ... class SHAKE256(HashAlgorithm): def __init__(self, digest_size: int) -> None: ... class Hash(HashContext): def __init__(self, algorithm: HashAlgorithm, backend: HashBackend): ... def copy(self) -> Hash: ... def finalize(self) -> bytes: ... def update(self, data: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/hmac.pyi0000644€tŠÔÚ€2›s®0000000063413576752252034650 0ustar jukkaDROPBOX\Domain Users00000000000000from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm class HMAC(object): def __init__(self, key: bytes, algorithm: HashAlgorithm, backend: HMACBackend) -> None: ... def copy(self) -> HMAC: ... def finalize(self) -> bytes: ... def update(self, msg: bytes) -> None: ... def verify(self, signature: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/0000755€tŠÔÚ€2›s®0000000000013576752267033764 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/__init__.pyi0000644€tŠÔÚ€2›s®0000000040513576752252036237 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod class KeyDerivationFunction(metaclass=ABCMeta): @abstractmethod def derive(self, key_material: bytes) -> bytes: ... @abstractmethod def verify(self, key_material: bytes, expected_key: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/concatkdf.pyi0000644€tŠÔÚ€2›s®0000000154013576752252036435 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from cryptography.hazmat.backends.interfaces import HashBackend, HMACBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.kdf import KeyDerivationFunction class ConcatKDFHash(KeyDerivationFunction): def __init__(self, algorithm: HashAlgorithm, length: int, otherinfo: Optional[bytes], backend: HashBackend): ... def derive(self, key_material: bytes) -> bytes: ... def verify(self, key_material: bytes, expected_key: bytes) -> None: ... class ConcatKDFHMAC(KeyDerivationFunction): def __init__( self, algorithm: HashAlgorithm, length: int, salt: Optional[bytes], otherinfo: Optional[bytes], backend: HMACBackend ): ... def derive(self, key_material: bytes) -> bytes: ... def verify(self, key_material: bytes, expected_key: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/hkdf.pyi0000644€tŠÔÚ€2›s®0000000147513576752252035424 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.kdf import KeyDerivationFunction class HKDF(KeyDerivationFunction): def __init__( self, algorithm: HashAlgorithm, length: int, salt: Optional[bytes], info: Optional[bytes], backend: HMACBackend ): ... def derive(self, key_material: bytes) -> bytes: ... def verify(self, key_material: bytes, expected_key: bytes) -> None: ... class HKDFExpand(KeyDerivationFunction): def __init__(self, algorithm: HashAlgorithm, length: int, info: Optional[bytes], backend: HMACBackend): ... def derive(self, key_material: bytes) -> bytes: ... def verify(self, key_material: bytes, expected_key: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/kbkdf.pyi0000644€tŠÔÚ€2›s®0000000152313576752252035563 0ustar jukkaDROPBOX\Domain Users00000000000000from enum import Enum from typing import Optional from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.kdf import KeyDerivationFunction class Mode(Enum): CounterMode: str class CounterLocation(Enum): BeforeFixed: str AfterFixed: str class KBKDFHMAC(KeyDerivationFunction): def __init__( self, algorithm: HashAlgorithm, mode: Mode, length: int, rlen: int, llen: int, location: CounterLocation, label: Optional[bytes], context: Optional[bytes], fixed: Optional[bytes], backend: HMACBackend, ): ... def derive(self, key_material: bytes) -> bytes: ... def verify(self, key_material: bytes, expected_key: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/pbkdf2.pyi0000644€tŠÔÚ€2›s®0000000076613576752252035662 0ustar jukkaDROPBOX\Domain Users00000000000000from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.kdf import KeyDerivationFunction class PBKDF2HMAC(KeyDerivationFunction): def __init__(self, algorithm: HashAlgorithm, length: int, salt: bytes, iterations: int, backend: PBKDF2HMACBackend): ... def derive(self, key_material: bytes) -> bytes: ... def verify(self, key_material: bytes, expected_key: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/scrypt.pyi0000644€tŠÔÚ€2›s®0000000062713576752252036032 0ustar jukkaDROPBOX\Domain Users00000000000000from cryptography.hazmat.backends.interfaces import ScryptBackend from cryptography.hazmat.primitives.kdf import KeyDerivationFunction class Scrypt(KeyDerivationFunction): def __init__(self, salt: bytes, length: int, n: int, r: int, p: int, backend: ScryptBackend): ... def derive(self, key_material: bytes) -> bytes: ... def verify(self, key_material: bytes, expected_key: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/x963kdf.pyi0000644€tŠÔÚ€2›s®0000000100313576752252035671 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from cryptography.hazmat.backends.interfaces import HashBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.kdf import KeyDerivationFunction class X963KDF(KeyDerivationFunction): def __init__(self, algorithm: HashAlgorithm, length: int, sharedinfo: Optional[bytes], backend: HashBackend): ... def derive(self, key_material: bytes) -> bytes: ... def verify(self, key_material: bytes, expected_key: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/keywrap.pyi0000644€tŠÔÚ€2›s®0000000100613576752252035414 0ustar jukkaDROPBOX\Domain Users00000000000000from cryptography.hazmat.backends.interfaces import CipherBackend def aes_key_wrap(wrapping_key: bytes, key_to_wrap: bytes, backend: CipherBackend) -> bytes: ... def aes_key_wrap_with_padding(wrapping_key: bytes, key_to_wrap: bytes, backend: CipherBackend) -> bytes: ... def aes_key_unwrap(wrapping_key: bytes, wrapped_key: bytes, backend: CipherBackend) -> bytes: ... def aes_key_unwrap_with_padding(wrapping_key: bytes, wrapped_key: bytes, backend: CipherBackend) -> bytes: ... class InvalidUnwrap(Exception): ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/padding.pyi0000644€tŠÔÚ€2›s®0000000103413576752252035341 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod class PaddingContext(metaclass=ABCMeta): @abstractmethod def finalize(self) -> bytes: ... @abstractmethod def update(self, data: bytes) -> bytes: ... class ANSIX923(object): def __init__(self, block_size: int) -> None: ... def padder(self) -> PaddingContext: ... def unpadder(self) -> PaddingContext: ... class PKCS7(object): def __init__(self, block_size: int) -> None: ... def padder(self) -> PaddingContext: ... def unpadder(self) -> PaddingContext: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/poly1305.pyi0000644€tŠÔÚ€2›s®0000000077013576752252035235 0ustar jukkaDROPBOX\Domain Users00000000000000from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm class Poly1305(object): def __init__(self, key: bytes) -> None: ... def finalize(self) -> bytes: ... @classmethod def generate_tag(cls, key: bytes, data: bytes) -> bytes: ... def update(self, data: bytes) -> None: ... def verify(self, tag: bytes) -> None: ... @classmethod def verify_tag(cls, key: bytes, data: bytes, tag: bytes) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/serialization/0000755€tŠÔÚ€2›s®0000000000013576752267036075 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/serialization/__init__.pyi0000644€tŠÔÚ€2›s®0000000175613576752252040362 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta from enum import Enum from typing import Optional def load_pem_private_key(data: bytes, password: Optional[bytes], backend): ... def load_pem_public_key(data: bytes, backend): ... def load_der_private_key(data: bytes, password: Optional[bytes], backend): ... def load_der_public_key(data: bytes, backend): ... def load_ssh_public_key(data: bytes, backend): ... class Encoding(Enum): PEM: str DER: str OpenSSH: str Raw: str X962: str class PrivateFormat(Enum): PKCS8: str TraditionalOpenSSL: str Raw: str class PublicFormat(Enum): SubjectPublicKeyInfo: str PKCS1: str OpenSSH: str Raw: str CompressedPoint: str UncompressedPoint: str class ParameterFormat(Enum): PKCS3: str class KeySerializationEncryption(metaclass=ABCMeta): ... class BestAvailableEncryption(KeySerializationEncryption): password: bytes def __init__(self, password: bytes) -> None: ... class NoEncryption(KeySerializationEncryption): ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/serialization/pkcs12.pyi0000644€tŠÔÚ€2›s®0000000016113576752252037713 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional def load_key_and_certificates(data: bytes, password: Optional[bytes], backend): ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/twofactor/0000755€tŠÔÚ€2›s®0000000000013576752267035230 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/twofactor/__init__.pyi0000644€tŠÔÚ€2›s®0000000004313576752252037501 0ustar jukkaDROPBOX\Domain Users00000000000000class InvalidToken(Exception): ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/twofactor/hotp.pyi0000644€tŠÔÚ€2›s®0000000103413576752252036715 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm class HOTP(object): def __init__( self, key: bytes, length: int, algorithm: HashAlgorithm, backend: HMACBackend, enforce_key_length: bool = ... ): ... def generate(self, counter: int) -> bytes: ... def get_provisioning_uri(self, account_name: str, counter: int, issuer: Optional[str]) -> str: ... def verify(self, hotp: bytes, counter: int) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/twofactor/totp.pyi0000644€tŠÔÚ€2›s®0000000111113576752252036725 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives.hashes import HashAlgorithm class TOTP(object): def __init__( self, key: bytes, length: int, algorithm: HashAlgorithm, time_step: int, backend: HMACBackend, enforce_key_length: bool = ..., ): ... def generate(self, time: int) -> bytes: ... def get_provisioning_uri(self, account_name: str, issuer: Optional[str]) -> str: ... def verify(self, totp: bytes, time: int) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/cryptography/x509.pyi0000644€tŠÔÚ€2›s®0000003210613576752252030765 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime from abc import ABCMeta, abstractmethod from enum import Enum from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network from typing import Any, ClassVar, Generator, List, Optional, Union, Text, Iterable, Sequence, Type from cryptography.hazmat.backends.interfaces import X509Backend from cryptography.hazmat.primitives.asymmetric.dsa import DSAPrivateKey, DSAPublicKey from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey, EllipticCurvePublicKey from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey, Ed448PublicKey from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey, Ed25519PublicKey from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey from cryptography.hazmat.primitives.hashes import HashAlgorithm from cryptography.hazmat.primitives.serialization import Encoding class ObjectIdentifier(object): def __init__(self, dotted_string: str) -> None: ... def dotted_string(self) -> str: ... class CRLEntryExtensionOID(object): CERTIFICATE_ISSUER: ClassVar[ObjectIdentifier] CRL_REASON: ClassVar[ObjectIdentifier] INVALIDITY_DATE: ClassVar[ObjectIdentifier] class ExtensionOID(object): AUTHORITY_INFORMATION_ACCESS: ClassVar[ObjectIdentifier] AUTHORITY_KEY_IDENTIFIER: ClassVar[ObjectIdentifier] BASIC_CONSTRAINTS: ClassVar[ObjectIdentifier] CERTIFICATE_POLICIES: ClassVar[ObjectIdentifier] CRL_DISTRIBUTION_POINTS: ClassVar[ObjectIdentifier] CRL_NUMBER: ClassVar[ObjectIdentifier] DELTA_CRL_INDICATOR: ClassVar[ObjectIdentifier] EXTENDED_KEY_USAGE: ClassVar[ObjectIdentifier] FRESHEST_CRL: ClassVar[ObjectIdentifier] INHIBIT_ANY_POLICY: ClassVar[ObjectIdentifier] ISSUER_ALTERNATIVE_NAME: ClassVar[ObjectIdentifier] ISSUING_DISTRIBUTION_POINT: ClassVar[ObjectIdentifier] KEY_USAGE: ClassVar[ObjectIdentifier] NAME_CONSTRAINTS: ClassVar[ObjectIdentifier] OCSP_NO_CHECK: ClassVar[ObjectIdentifier] POLICY_CONSTRAINTS: ClassVar[ObjectIdentifier] POLICY_MAPPINGS: ClassVar[ObjectIdentifier] PRECERT_POISON: ClassVar[ObjectIdentifier] PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ClassVar[ObjectIdentifier] SUBJECT_ALTERNATIVE_NAME: ClassVar[ObjectIdentifier] SUBJECT_DIRECTORY_ATTRIBUTES: ClassVar[ObjectIdentifier] SUBJECT_INFORMATION_ACCESS: ClassVar[ObjectIdentifier] SUBJECT_KEY_IDENTIFIER: ClassVar[ObjectIdentifier] TLS_FEATURE: ClassVar[ObjectIdentifier] class NameOID(object): BUSINESS_CATEGORY: ClassVar[ObjectIdentifier] COMMON_NAME: ClassVar[ObjectIdentifier] COUNTRY_NAME: ClassVar[ObjectIdentifier] DN_QUALIFIER: ClassVar[ObjectIdentifier] DOMAIN_COMPONENT: ClassVar[ObjectIdentifier] EMAIL_ADDRESS: ClassVar[ObjectIdentifier] GENERATION_QUALIFIER: ClassVar[ObjectIdentifier] GIVEN_NAME: ClassVar[ObjectIdentifier] JURISDICTION_COUNTRY_NAME: ClassVar[ObjectIdentifier] JURISDICTION_LOCALITY_NAME: ClassVar[ObjectIdentifier] JURISDICTION_STATE_OR_PROVINCE_NAME: ClassVar[ObjectIdentifier] LOCALITY_NAME: ClassVar[ObjectIdentifier] ORGANIZATIONAL_UNIT_NAME: ClassVar[ObjectIdentifier] ORGANIZATION_NAME: ClassVar[ObjectIdentifier] POSTAL_ADDRESS: ClassVar[ObjectIdentifier] POSTAL_CODE: ClassVar[ObjectIdentifier] PSEUDONYM: ClassVar[ObjectIdentifier] SERIAL_NUMBER: ClassVar[ObjectIdentifier] STATE_OR_PROVINCE_NAME: ClassVar[ObjectIdentifier] STREET_ADDRESS: ClassVar[ObjectIdentifier] SURNAME: ClassVar[ObjectIdentifier] TITLE: ClassVar[ObjectIdentifier] USER_ID: ClassVar[ObjectIdentifier] X500_UNIQUE_IDENTIFIER: ClassVar[ObjectIdentifier] class OCSPExtensionOID(object): NONCE: ClassVar[ObjectIdentifier] class SignatureAlgorithmOID(object): DSA_WITH_SHA1: ClassVar[ObjectIdentifier] DSA_WITH_SHA224: ClassVar[ObjectIdentifier] DSA_WITH_SHA256: ClassVar[ObjectIdentifier] ECDSA_WITH_SHA1: ClassVar[ObjectIdentifier] ECDSA_WITH_SHA224: ClassVar[ObjectIdentifier] ECDSA_WITH_SHA256: ClassVar[ObjectIdentifier] ECDSA_WITH_SHA384: ClassVar[ObjectIdentifier] ECDSA_WITH_SHA512: ClassVar[ObjectIdentifier] ED25519: ClassVar[ObjectIdentifier] ED448: ClassVar[ObjectIdentifier] RSASSA_PSS: ClassVar[ObjectIdentifier] RSA_WITH_MD5: ClassVar[ObjectIdentifier] RSA_WITH_SHA1: ClassVar[ObjectIdentifier] RSA_WITH_SHA224: ClassVar[ObjectIdentifier] RSA_WITH_SHA256: ClassVar[ObjectIdentifier] RSA_WITH_SHA384: ClassVar[ObjectIdentifier] RSA_WITH_SHA512: ClassVar[ObjectIdentifier] class NameAttribute(object): oid: ObjectIdentifier value: Text def __init__(self, oid: ObjectIdentifier, value: Text) -> None: ... def rfc4514_string(self) -> str: ... class RelativeDistinguishedName(object): def __init__(self, attributes: List[NameAttribute]) -> None: ... def __iter__(self) -> Generator[NameAttribute, None, None]: ... def get_attributes_for_oid(self, oid: ObjectIdentifier) -> List[NameAttribute]: ... def rfc4514_string(self) -> str: ... class Name(object): rdns: List[RelativeDistinguishedName] def __init__(self, attributes: Sequence[Union[NameAttribute, RelativeDistinguishedName]]) -> None: ... def __iter__(self) -> Generator[NameAttribute, None, None]: ... def __len__(self) -> int: ... def get_attributes_for_oid(self, oid: ObjectIdentifier) -> List[NameAttribute]: ... def public_bytes(self, backend: X509Backend) -> bytes: ... def rfc4514_string(self) -> str: ... class Version(Enum): v1: int v3: int class Certificate(metaclass=ABCMeta): extensions: Extensions issuer: Name not_valid_after: datetime.datetime not_valid_before: datetime.datetime serial_number: int signature: bytes signature_algorithm_oid: ObjectIdentifier signature_hash_algorithm: HashAlgorithm tbs_certificate_bytes: bytes subject: Name version: Version @abstractmethod def fingerprint(self, algorithm: HashAlgorithm) -> bytes: ... @abstractmethod def public_bytes(self, encoding: Encoding) -> bytes: ... @abstractmethod def public_key(self) -> Union[DSAPublicKey, Ed25519PublicKey, Ed448PublicKey, EllipticCurvePublicKey, RSAPublicKey]: ... class CertificateBuilder(object): def __init__(self, issuer_name: Optional[Name] = ..., subject_name: Optional[Name] = ..., public_key: Union[DSAPublicKey, Ed25519PublicKey, Ed448PublicKey, EllipticCurvePublicKey, RSAPublicKey, None] = ..., serial_number: Optional[int] = ..., not_valid_before: Optional[datetime.datetime] = ..., not_valid_after: Optional[datetime.datetime] = ..., extensions: Optional[Iterable[ExtensionType]] = ...) -> None: ... def add_extension(self, extension: ExtensionType, critical: bool) -> CertificateBuilder: ... def issuer_name(self, name: Name) -> CertificateBuilder: ... def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder: ... def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder: ... def public_key( self, public_key: Union[DSAPublicKey, Ed25519PublicKey, Ed448PublicKey, EllipticCurvePublicKey, RSAPublicKey] ) -> CertificateBuilder: ... def serial_number(self, serial_number: int) -> CertificateBuilder: ... def sign( self, private_key: Union[DSAPrivateKey, Ed25519PrivateKey, Ed448PrivateKey, EllipticCurvePrivateKey, RSAPrivateKey], algorithm: Optional[HashAlgorithm], backend: X509Backend, ) -> Certificate: ... def subject_name(self, name: Name) -> CertificateBuilder: ... class CertificateRevocationList(metaclass=ABCMeta): extensions: Extensions issuer: Name last_update: datetime.datetime next_update: datetime.datetime signature: bytes signature_algorithm_oid: ObjectIdentifier signature_hash_algorithm: HashAlgorithm tbs_certlist_bytes: bytes @abstractmethod def fingerprint(self, algorithm: HashAlgorithm) -> bytes: ... @abstractmethod def get_revoked_certificate_by_serial_number(self, serial_number: int) -> RevokedCertificate: ... @abstractmethod def is_signature_valid( self, public_key: Union[DSAPublicKey, Ed25519PublicKey, Ed448PublicKey, EllipticCurvePublicKey, RSAPublicKey] ) -> bool: ... @abstractmethod def public_bytes(self, encoding: Encoding) -> bytes: ... class CertificateRevocationListBuilder(object): def add_extension(self, extension: ExtensionType, critical: bool) -> CertificateRevocationListBuilder: ... def add_revoked_certificate(self, revoked_certificate: RevokedCertificate) -> CertificateRevocationListBuilder: ... def issuer_name(self, name: Name) -> CertificateRevocationListBuilder: ... def last_update(self, time: datetime.datetime) -> CertificateRevocationListBuilder: ... def next_update(self, time: datetime.datetime) -> CertificateRevocationListBuilder: ... def sign( self, private_key: Union[DSAPrivateKey, Ed25519PrivateKey, Ed448PrivateKey, EllipticCurvePrivateKey, RSAPrivateKey], algorithm: Optional[HashAlgorithm], backend: X509Backend, ) -> CertificateRevocationList: ... class CertificateSigningRequest(metaclass=ABCMeta): extensions: Extensions is_signature_valid: bool signature: bytes signature_algorithm_oid: ObjectIdentifier signature_hash_algorithm: HashAlgorithm subject: Name tbs_certrequest_bytes: bytes @abstractmethod def public_bytes(self, encoding: Encoding) -> bytes: ... @abstractmethod def public_key(self) -> Union[DSAPublicKey, Ed25519PublicKey, Ed448PublicKey, EllipticCurvePublicKey, RSAPublicKey]: ... class CertificateSigningRequestBuilder(object): def add_extension(self, extension: ExtensionType, critical: bool) -> CertificateSigningRequestBuilder: ... def subject_name(self, name: Name) -> CertificateSigningRequestBuilder: ... def sign( self, private_key: Union[DSAPrivateKey, Ed25519PrivateKey, Ed448PrivateKey, EllipticCurvePrivateKey, RSAPrivateKey], algorithm: Optional[HashAlgorithm], backend: X509Backend, ) -> CertificateSigningRequest: ... class RevokedCertificate(metaclass=ABCMeta): extensions: Extensions revocation_date: datetime.datetime serial_number: int class RevokedCertificateBuilder(object): def add_extension(self, extension: ExtensionType, critical: bool) -> RevokedCertificateBuilder: ... def build(self, backend: X509Backend) -> RevokedCertificate: ... def revocation_date(self, time: datetime.datetime) -> RevokedCertificateBuilder: ... def serial_number(self, serial_number: int) -> RevokedCertificateBuilder: ... # General Name Classes class GeneralName(metaclass=ABCMeta): value: Any class DirectoryName(GeneralName): value: Name def __init__(self, value: Name) -> None: ... class DNSName(GeneralName): value: Text def __init__(self, value: Text) -> None: ... class IPAddress(GeneralName): value: Union[IPv4Address, IPv6Address, IPv4Network, IPv6Network] def __init__(self, value: Union[IPv4Address, IPv6Address, IPv4Network, IPv6Network]) -> None: ... class OtherName(GeneralName): type_id: ObjectIdentifier value: bytes def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: ... class RegisteredID(GeneralName): value: ObjectIdentifier def __init__(self, value: ObjectIdentifier) -> None: ... class RFC822Name(GeneralName): value: Text def __init__(self, value: Text) -> None: ... class UniformResourceIdentifier(GeneralName): value: Text def __init__(self, value: Text) -> None: ... # X.509 Extensions class Extension(object): critical: bool oid: ExtensionOID value: ExtensionType class ExtensionType(metaclass=ABCMeta): oid: ExtensionOID class Extensions(object): def __init__(self, general_names: List[Extension]) -> None: ... def __iter__(self) -> Generator[Extension, None, None]: ... def get_extension_for_oid(self, oid: ObjectIdentifier) -> Extension: ... def get_extension_for_class(self, extclass: Type[ExtensionType]) -> Extension: ... class IssuerAlternativeName(ExtensionType): def __init__(self, general_names: List[GeneralName]) -> None: ... def __iter__(self) -> Generator[GeneralName, None, None]: ... def get_values_for_type(self, type: Type[GeneralName]) -> List[Any]: ... class SubjectAlternativeName(ExtensionType): def __init__(self, general_names: List[GeneralName]) -> None: ... def __iter__(self) -> Generator[GeneralName, None, None]: ... def get_values_for_type(self, type: Type[GeneralName]) -> List[Any]: ... def load_der_x509_certificate(data: bytes, backend: X509Backend) -> Certificate: ... def load_pem_x509_certificate(data: bytes, backend: X509Backend) -> Certificate: ... def load_der_x509_crl(data: bytes, backend: X509Backend) -> CertificateRevocationList: ... def load_pem_x509_crl(data: bytes, backend: X509Backend) -> CertificateRevocationList: ... def load_der_x509_csr(data: bytes, backend: X509Backend) -> CertificateSigningRequest: ... def load_pem_x509_csr(data: bytes, backend: X509Backend) -> CertificateSigningRequest: ... def __getattr__(name: str) -> Any: ... # incomplete mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/0000755€tŠÔÚ€2›s®0000000000013576752267026621 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252031063 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/_common.pyi0000644€tŠÔÚ€2›s®0000000053113576752252030764 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, TypeVar _T = TypeVar("_T") class weekday(object): def __init__(self, weekday: int, n: Optional[int] = ...) -> None: ... def __call__(self: _T, n: int) -> _T: ... def __eq__(self, other) -> bool: ... def __repr__(self) -> str: ... def __hash__(self) -> int: ... weekday: int n: int mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/parser.pyi0000644€tŠÔÚ€2›s®0000000331513576752252030634 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict, Mapping, Text from datetime import datetime, tzinfo _FileOrStr = Union[bytes, Text, IO[str], IO[Any]] class parserinfo(object): JUMP: List[str] WEEKDAYS: List[Tuple[str, str]] MONTHS: List[Tuple[str, str]] HMS: List[Tuple[str, str, str]] AMPM: List[Tuple[str, str]] UTCZONE: List[str] PERTAIN: List[str] TZOFFSET: Dict[str, int] def __init__(self, dayfirst: bool = ..., yearfirst: bool = ...) -> None: ... def jump(self, name: Text) -> bool: ... def weekday(self, name: Text) -> Optional[int]: ... def month(self, name: Text) -> Optional[int]: ... def hms(self, name: Text) -> Optional[int]: ... def ampm(self, name: Text) -> Optional[int]: ... def pertain(self, name: Text) -> bool: ... def utczone(self, name: Text) -> bool: ... def tzoffset(self, name: Text) -> Optional[int]: ... def convertyear(self, year: int) -> int: ... def validate(self, res: datetime) -> bool: ... class parser(object): def __init__(self, info: Optional[parserinfo] = ...) -> None: ... def parse(self, timestr: _FileOrStr, default: Optional[datetime] = ..., ignoretz: bool = ..., tzinfos: Optional[Mapping[Text, tzinfo]] = ..., **kwargs: Any) -> datetime: ... def isoparse(dt_str: Union[str, bytes, IO[str], IO[bytes]]) -> datetime: ... DEFAULTPARSER: parser def parse(timestr: _FileOrStr, parserinfo: Optional[parserinfo] = ..., **kwargs: Any) -> datetime: ... class _tzparser: ... DEFAULTTZPARSER: _tzparser class InvalidDatetimeError(ValueError): ... class InvalidDateError(InvalidDatetimeError): ... class InvalidTimeError(InvalidDatetimeError): ... mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/relativedelta.pyi0000644€tŠÔÚ€2›s®0000000640013576752252032163 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, Any, List, Optional, SupportsFloat, TypeVar, Union from datetime import date, datetime, timedelta from ._common import weekday _SelfT = TypeVar('_SelfT', bound=relativedelta) _DateT = TypeVar('_DateT', date, datetime) # Work around attribute and type having the same name. _weekday = weekday MO: weekday TU: weekday WE: weekday TH: weekday FR: weekday SA: weekday SU: weekday class relativedelta(object): years: int months: int days: int leapdays: int hours: int minutes: int seconds: int microseconds: int year: Optional[int] month: Optional[int] weekday: Optional[_weekday] day: Optional[int] hour: Optional[int] minute: Optional[int] second: Optional[int] microsecond: Optional[int] def __init__(self, dt1: Optional[date] = ..., dt2: Optional[date] = ..., years: Optional[int] = ..., months: Optional[int] = ..., days: Optional[int] = ..., leapdays: Optional[int] = ..., weeks: Optional[int] = ..., hours: Optional[int] = ..., minutes: Optional[int] = ..., seconds: Optional[int] = ..., microseconds: Optional[int] = ..., year: Optional[int] = ..., month: Optional[int] = ..., day: Optional[int] = ..., weekday: Optional[Union[int, _weekday]] = ..., yearday: Optional[int] = ..., nlyearday: Optional[int] = ..., hour: Optional[int] = ..., minute: Optional[int] = ..., second: Optional[int] = ..., microsecond: Optional[int] = ...) -> None: ... @property def weeks(self) -> int: ... @weeks.setter def weeks(self, value: int) -> None: ... def normalized(self: _SelfT) -> _SelfT: ... # TODO: use Union when mypy will handle it properly in overloaded operator # methods (#2129, #1442, #1264 in mypy) @overload def __add__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __add__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __add__(self, other: _DateT) -> _DateT: ... @overload def __radd__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __radd__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __radd__(self, other: _DateT) -> _DateT: ... @overload def __rsub__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __rsub__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __rsub__(self, other: _DateT) -> _DateT: ... def __sub__(self: _SelfT, other: relativedelta) -> _SelfT: ... def __neg__(self: _SelfT) -> _SelfT: ... def __bool__(self) -> bool: ... def __nonzero__(self) -> bool: ... def __mul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __rmul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __eq__(self, other) -> bool: ... def __ne__(self, other: object) -> bool: ... def __div__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __truediv__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __repr__(self) -> str: ... def __abs__(self: _SelfT) -> _SelfT: ... def __hash__(self) -> int: ... mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/rrule.pyi0000644€tŠÔÚ€2›s®0000000644013576752252030473 0ustar jukkaDROPBOX\Domain Users00000000000000from ._common import weekday as weekdaybase from typing import Any, Iterable, Optional, Union import datetime YEARLY: int MONTHLY: int WEEKLY: int DAILY: int HOURLY: int MINUTELY: int SECONDLY: int class weekday(weekdaybase): ... MO: weekday TU: weekday WE: weekday TH: weekday FR: weekday SA: weekday SU: weekday class rrulebase: def __init__(self, cache: bool = ...) -> None: ... def __iter__(self): ... def __getitem__(self, item): ... def __contains__(self, item): ... def count(self): ... def before(self, dt, inc: bool = ...): ... def after(self, dt, inc: bool = ...): ... def xafter(self, dt, count: Optional[Any] = ..., inc: bool = ...): ... def between(self, after, before, inc: bool = ..., count: int = ...): ... class rrule(rrulebase): def __init__(self, freq, dtstart: Optional[datetime.date] = ..., interval: int = ..., wkst: Optional[Union[weekday, int]] = ..., count: Optional[int] = ..., until: Optional[Union[datetime.date, int]] = ..., bysetpos: Optional[Union[int, Iterable[int]]] = ..., bymonth: Optional[Union[int, Iterable[int]]] = ..., bymonthday: Optional[Union[int, Iterable[int]]] = ..., byyearday: Optional[Union[int, Iterable[int]]] = ..., byeaster: Optional[Union[int, Iterable[int]]] = ..., byweekno: Optional[Union[int, Iterable[int]]] = ..., byweekday: Optional[Union[int, weekday, Iterable[int], Iterable[weekday]]] = ..., byhour: Optional[Union[int, Iterable[int]]] = ..., byminute: Optional[Union[int, Iterable[int]]] = ..., bysecond: Optional[Union[int, Iterable[int]]] = ..., cache: bool = ...) -> None: ... def replace(self, **kwargs): ... class _iterinfo: rrule: Any = ... def __init__(self, rrule) -> None: ... yearlen: int = ... nextyearlen: int = ... yearordinal: int = ... yearweekday: int = ... mmask: Any = ... mdaymask: Any = ... nmdaymask: Any = ... wdaymask: Any = ... mrange: Any = ... wnomask: Any = ... nwdaymask: Any = ... eastermask: Any = ... lastyear: int = ... lastmonth: int = ... def rebuild(self, year, month): ... def ydayset(self, year, month, day): ... def mdayset(self, year, month, day): ... def wdayset(self, year, month, day): ... def ddayset(self, year, month, day): ... def htimeset(self, hour, minute, second): ... def mtimeset(self, hour, minute, second): ... def stimeset(self, hour, minute, second): ... class rruleset(rrulebase): class _genitem: dt: Any = ... genlist: Any = ... gen: Any = ... def __init__(self, genlist, gen) -> None: ... def __next__(self): ... next: Any = ... def __lt__(self, other): ... def __gt__(self, other): ... def __eq__(self, other): ... def __ne__(self, other): ... def __init__(self, cache: bool = ...) -> None: ... def rrule(self, rrule): ... def rdate(self, rdate): ... def exrule(self, exrule): ... def exdate(self, exdate): ... class _rrulestr: def __call__(self, s, **kwargs): ... rrulestr: _rrulestr mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/tz/0000755€tŠÔÚ€2›s®0000000000013576752267027256 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/tz/__init__.pyi0000644€tŠÔÚ€2›s®0000000052413576752252031533 0ustar jukkaDROPBOX\Domain Users00000000000000from .tz import ( tzutc as tzutc, tzoffset as tzoffset, tzlocal as tzlocal, tzfile as tzfile, tzrange as tzrange, tzstr as tzstr, tzical as tzical, gettz as gettz, datetime_exists as datetime_exists, datetime_ambiguous as datetime_ambiguous, resolve_imaginary as resolve_imaginary, ) UTC: tzutc mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/tz/_common.pyi0000644€tŠÔÚ€2›s®0000000146213576752252031425 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from datetime import datetime, tzinfo, timedelta def tzname_in_python2(namefunc): ... def enfold(dt: datetime, fold: int = ...): ... class _DatetimeWithFold(datetime): @property def fold(self): ... class _tzinfo(tzinfo): def is_ambiguous(self, dt: datetime) -> bool: ... def fromutc(self, dt: datetime) -> datetime: ... class tzrangebase(_tzinfo): def __init__(self) -> None: ... def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def tzname(self, dt: Optional[datetime]) -> str: ... def fromutc(self, dt: datetime) -> datetime: ... def is_ambiguous(self, dt: datetime) -> bool: ... __hash__: Any def __ne__(self, other): ... __reduce__: Any mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/tz/tz.pyi0000644€tŠÔÚ€2›s®0000000753113576752252030436 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Union, IO, Text, Tuple, List import datetime from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo from ._common import tzrangebase as tzrangebase, enfold as enfold from ..relativedelta import relativedelta _FileObj = Union[str, Text, IO[str], IO[Text]] ZERO: datetime.timedelta EPOCH: datetime.datetime EPOCHORDINAL: int class tzutc(datetime.tzinfo): def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def __eq__(self, other): ... __hash__: Any def __ne__(self, other): ... __reduce__: Any class tzoffset(datetime.tzinfo): def __init__(self, name, offset) -> None: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def __eq__(self, other): ... __hash__: Any def __ne__(self, other): ... __reduce__: Any @classmethod def instance(cls, name, offset) -> tzoffset: ... class tzlocal(_tzinfo): def __init__(self) -> None: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def __eq__(self, other): ... __hash__: Any def __ne__(self, other): ... __reduce__: Any class _ttinfo: def __init__(self) -> None: ... def __eq__(self, other): ... __hash__: Any def __ne__(self, other): ... class tzfile(_tzinfo): def __init__(self, fileobj: _FileObj, filename: Optional[Text] = ...) -> None: ... def is_ambiguous(self, dt: Optional[datetime.datetime], idx: Optional[int] = ...) -> bool: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def __eq__(self, other): ... __hash__: Any def __ne__(self, other): ... def __reduce__(self): ... def __reduce_ex__(self, protocol): ... class tzrange(tzrangebase): hasdst: bool def __init__(self, stdabbr: Text, stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Optional[Text] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ... def transitions(self, year: int) -> Tuple[datetime.datetime, datetime.datetime]: ... def __eq__(self, other): ... class tzstr(tzrange): hasdst: bool def __init__(self, s: Union[bytes, _FileObj], posix_offset: bool = ...) -> None: ... @classmethod def instance(cls, name, offset) -> tzoffset: ... class tzical: def __init__(self, fileobj: _FileObj) -> None: ... def keys(self): ... def get(self, tzid: Optional[Any] = ...): ... TZFILES: List[str] TZPATHS: List[str] def datetime_exists(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ... def datetime_ambiguous(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ... def resolve_imaginary(dt: datetime.datetime) -> datetime.datetime: ... class _GetTZ: def __call__(self, name: Optional[Text] = ...) -> Optional[datetime.tzinfo]: ... def nocache(self, name: Optional[Text]) -> Optional[datetime.tzinfo]: ... gettz: _GetTZ mypy-0.761/mypy/typeshed/third_party/2and3/dateutil/utils.pyi0000644€tŠÔÚ€2›s®0000000043213576752252030475 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from datetime import datetime, tzinfo, timedelta def default_tzinfo(dt: datetime, tzinfo: tzinfo) -> datetime: ... def today(tzinfo: Optional[tzinfo] = ...) -> datetime: ... def within_delta(dt1: datetime, dt2: datetime, delta: timedelta) -> bool: ... mypy-0.761/mypy/typeshed/third_party/2and3/decorator.pyi0000644€tŠÔÚ€2›s®0000000536713576752252027520 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Dict, Iterator, List, NamedTuple, Optional, Pattern, Text, Tuple, TypeVar _C = TypeVar("_C", bound=Callable[..., Any]) _Func = TypeVar("_Func", bound=Callable[..., Any]) _T = TypeVar("_T") def get_init(cls): ... if sys.version_info >= (3,): from inspect import iscoroutinefunction as iscoroutinefunction from inspect import getfullargspec as getfullargspec else: class FullArgSpec(NamedTuple): args: List[str] varargs: Optional[str] varkw: Optional[str] defaults: Tuple[Any, ...] kwonlyargs: List[str] kwonlydefaults: Dict[str, Any] annotations: Dict[str, Any] def iscoroutinefunction(f: Callable[..., Any]) -> bool: ... def getfullargspec(func: Any) -> FullArgSpec: ... if sys.version_info >= (3, 2): from contextlib import _GeneratorContextManager else: from contextlib import GeneratorContextManager as _GeneratorContextManager DEF: Pattern[str] class FunctionMaker(object): args: List[Text] varargs: Optional[Text] varkw: Optional[Text] defaults: Tuple[Any, ...] kwonlyargs: List[Text] kwonlydefaults: Optional[Text] shortsignature: Optional[Text] name: Text doc: Optional[Text] module: Optional[Text] annotations: Dict[Text, Any] signature: Text dict: Dict[Text, Any] def __init__( self, func: Optional[Callable[..., Any]] = ..., name: Optional[Text] = ..., signature: Optional[Text] = ..., defaults: Optional[Tuple[Any, ...]] = ..., doc: Optional[Text] = ..., module: Optional[Text] = ..., funcdict: Optional[Dict[Text, Any]] = ... ) -> None: ... def update(self, func: Any, **kw: Any) -> None: ... def make( self, src_templ: Text, evaldict: Optional[Dict[Text, Any]] = ..., addsource: bool = ..., **attrs: Any ) -> Callable[..., Any]: ... @classmethod def create( cls, obj: Any, body: Text, evaldict: Dict[Text, Any], defaults: Optional[Tuple[Any, ...]] = ..., doc: Optional[Text] = ..., module: Optional[Text] = ..., addsource: bool = ..., **attrs: Any ) -> Callable[..., Any]: ... def decorate(func: _Func, caller: Callable[..., Any], extras: Any = ...) -> _Func: ... def decorator(caller: Callable[..., Any], _func: Optional[Callable[..., Any]] = ...) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ... class ContextManager(_GeneratorContextManager[_T]): def __call__(self, func: _C) -> _C: ... def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ... def dispatch_on(*dispatch_args: Any) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/emoji.pyi0000644€tŠÔÚ€2›s®0000000061313576752252026626 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, Pattern, List, Dict, Union, Text _DEFAULT_DELIMITER: str def emojize( string: str, use_aliases: bool = ..., delimiters: Tuple[str, str] = ... ) -> str: ... def demojize( string: str, delimiters: Tuple[str, str] = ... ) -> str: ... def get_emoji_regexp() -> Pattern[Text]: ... def emoji_lis(string: str) -> List[Dict[str, Union[int, str]]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/first.pyi0000644€tŠÔÚ€2›s®0000000074213576752252026655 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterable, Optional, overload, TypeVar, Union _T = TypeVar('_T') _S = TypeVar('_S') @overload def first(iterable: Iterable[_T]) -> Optional[_T]: ... @overload def first(iterable: Iterable[_T], default: _S) -> Union[_T, _S]: ... @overload def first(iterable: Iterable[_T], default: _S, key: Optional[Callable[[_T], Any]]) -> Union[_T, _S]: ... @overload def first(iterable: Iterable[_T], *, key: Optional[Callable[[_T], Any]]) -> Optional[_T]: ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/0000755€tŠÔÚ€2›s®0000000000013576752267026106 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/flask/__init__.pyi0000644€tŠÔÚ€2›s®0000000425113576752252030364 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .app import Flask as Flask from .blueprints import Blueprint as Blueprint from .config import Config as Config from .ctx import after_this_request as after_this_request from .ctx import copy_current_request_context as copy_current_request_context from .ctx import has_app_context as has_app_context from .ctx import has_request_context as has_request_context from .globals import current_app as current_app from .globals import g as g from .globals import request as request from .globals import session as session from .helpers import flash as flash from .helpers import get_flashed_messages as get_flashed_messages from .helpers import get_template_attribute as get_template_attribute from .helpers import make_response as make_response from .helpers import safe_join as safe_join from .helpers import send_file as send_file from .helpers import send_from_directory as send_from_directory from .helpers import stream_with_context as stream_with_context from .helpers import url_for as url_for from .json import jsonify as jsonify from .signals import appcontext_popped as appcontext_popped from .signals import appcontext_pushed as appcontext_pushed from .signals import appcontext_tearing_down as appcontext_tearing_down from .signals import before_render_template as before_render_template from .signals import got_request_exception as got_request_exception from .signals import message_flashed as message_flashed from .signals import request_finished as request_finished from .signals import request_started as request_started from .signals import request_tearing_down as request_tearing_down from .signals import signals_available as signals_available from .signals import template_rendered as template_rendered from .templating import render_template as render_template from .templating import render_template_string as render_template_string from .wrappers import Request as Request from .wrappers import Response as Response from werkzeug.exceptions import abort as abort from werkzeug.utils import redirect as redirect from jinja2 import Markup as Markup from jinja2 import escape as escape mypy-0.761/mypy/typeshed/third_party/2and3/flask/app.pyi0000644€tŠÔÚ€2›s®0000002066713576752252027416 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.app (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .blueprints import Blueprint from .config import Config, ConfigAttribute from .ctx import AppContext, RequestContext, _AppCtxGlobals from .globals import _request_ctx_stack, g, request, session from .helpers import _PackageBoundObject, find_package, get_debug_flag, get_env, get_flashed_messages, get_load_dotenv, locked_cached_property, url_for from .logging import create_logger from .sessions import SecureCookieSessionInterface from .signals import appcontext_tearing_down, got_request_exception, request_finished, request_started, request_tearing_down from .templating import DispatchingJinjaLoader, Environment from .wrappers import Request, Response from .testing import FlaskClient from types import TracebackType from typing import ( Any, Callable, ContextManager, Dict, List, Optional, Type, TypeVar, Union, Text, Tuple, NoReturn, Iterable, ByteString ) from datetime import timedelta def setupmethod(f: Any): ... _T = TypeVar('_T') _ExcInfo = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] _StartResponse = Callable[[str, List[Tuple[str, str]], Optional[_ExcInfo]], Callable[[bytes], Any]] _WSGICallable = Callable[[Dict[Text, Any], _StartResponse], Iterable[bytes]] _Status = Union[str, int] _Headers = Union[Dict[Any, Any], List[Tuple[Any, Any]]] _Body = Union[Text, ByteString, Dict[Text, Any], Response, _WSGICallable] _ViewFuncReturnType = Union[ _Body, Tuple[_Body, _Status, _Headers], Tuple[_Body, _Status], Tuple[_Body, _Headers], ] _ViewFunc = Union[ Callable[..., NoReturn], Callable[..., _ViewFuncReturnType], ] _VT = TypeVar('_VT', bound=_ViewFunc) class Flask(_PackageBoundObject): request_class: type = ... response_class: type = ... jinja_environment: type = ... app_ctx_globals_class: type = ... config_class: Type[Config] = ... testing: Any = ... secret_key: Union[Text, bytes, None] = ... session_cookie_name: Any = ... permanent_session_lifetime: timedelta = ... send_file_max_age_default: timedelta = ... use_x_sendfile: Any = ... json_encoder: Any = ... json_decoder: Any = ... jinja_options: Any = ... default_config: Any = ... url_rule_class: type = ... test_client_class: type = ... test_cli_runner_class: type = ... session_interface: Any = ... import_name: str = ... template_folder: str = ... root_path: Optional[Union[str, Text]] = ... static_url_path: Any = ... static_folder: Optional[str] = ... instance_path: Union[str, Text] = ... config: Config = ... view_functions: Any = ... error_handler_spec: Any = ... url_build_error_handlers: Any = ... before_request_funcs: Dict[Optional[str], List[Callable[[], Any]]] = ... before_first_request_funcs: List[Callable[[], None]] = ... after_request_funcs: Dict[Optional[str], List[Callable[[Response], Response]]] = ... teardown_request_funcs: Dict[Optional[str], List[Callable[[Optional[Exception]], Any]]] = ... teardown_appcontext_funcs: List[Callable[[Optional[Exception]], Any]] = ... url_value_preprocessors: Any = ... url_default_functions: Any = ... template_context_processors: Any = ... shell_context_processors: Any = ... blueprints: Any = ... extensions: Any = ... url_map: Any = ... subdomain_matching: Any = ... cli: Any = ... def __init__(self, import_name: str, static_url_path: Optional[str] = ..., static_folder: Optional[str] = ..., static_host: Optional[str] = ..., host_matching: bool = ..., subdomain_matching: bool = ..., template_folder: str = ..., instance_path: Optional[str] = ..., instance_relative_config: bool = ..., root_path: Optional[str] = ...) -> None: ... @property def name(self) -> str: ... @property def propagate_exceptions(self) -> bool: ... @property def preserve_context_on_exception(self): ... @property def logger(self): ... @property def jinja_env(self): ... @property def got_first_request(self) -> bool: ... def make_config(self, instance_relative: bool = ...): ... def auto_find_instance_path(self): ... def open_instance_resource(self, resource: Union[str, Text], mode: str = ...): ... templates_auto_reload: Any = ... def create_jinja_environment(self): ... def create_global_jinja_loader(self): ... def select_jinja_autoescape(self, filename: Any): ... def update_template_context(self, context: Any) -> None: ... def make_shell_context(self): ... env: Optional[str] = ... debug: bool = ... def run(self, host: Optional[str] = ..., port: Optional[Union[int, str]] = ..., debug: Optional[bool] = ..., load_dotenv: bool = ..., **options: Any) -> None: ... def test_client(self, use_cookies: bool = ..., **kwargs: Any) -> FlaskClient[Response]: ... def test_cli_runner(self, **kwargs: Any): ... def open_session(self, request: Any): ... def save_session(self, session: Any, response: Any): ... def make_null_session(self): ... def register_blueprint(self, blueprint: Blueprint, **options: Any) -> None: ... def iter_blueprints(self): ... def add_url_rule(self, rule: str, endpoint: Optional[str] = ..., view_func: _ViewFunc = ..., provide_automatic_options: Optional[bool] = ..., **options: Any) -> None: ... def route(self, rule: str, **options: Any) -> Callable[[_VT], _VT]: ... def endpoint(self, endpoint: str) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... def errorhandler(self, code_or_exception: Union[int, Type[Exception]]) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... def register_error_handler(self, code_or_exception: Union[int, Type[Exception]], f: Callable[..., Any]) -> None: ... def template_filter(self, name: Optional[Any] = ...): ... def add_template_filter(self, f: Any, name: Optional[Any] = ...) -> None: ... def template_test(self, name: Optional[Any] = ...): ... def add_template_test(self, f: Any, name: Optional[Any] = ...) -> None: ... def template_global(self, name: Optional[Any] = ...): ... def add_template_global(self, f: Any, name: Optional[Any] = ...) -> None: ... def before_request(self, f: Callable[[], _T]) -> Callable[[], _T]: ... def before_first_request(self, f: Callable[[], _T]) -> Callable[[], _T]: ... def after_request(self, f: Callable[[Response], Response]) -> Callable[[Response], Response]: ... def teardown_request(self, f: Callable[[Optional[Exception]], _T]) -> Callable[[Optional[Exception]], _T]: ... def teardown_appcontext(self, f: Callable[[Optional[Exception]], _T]) -> Callable[[Optional[Exception]], _T]: ... def context_processor(self, f: Any): ... def shell_context_processor(self, f: Any): ... def url_value_preprocessor(self, f: Any): ... def url_defaults(self, f: Any): ... def handle_http_exception(self, e: Any): ... def trap_http_exception(self, e: Any): ... def handle_user_exception(self, e: Any): ... def handle_exception(self, e: Any): ... def log_exception(self, exc_info: Any) -> None: ... def raise_routing_exception(self, request: Any) -> None: ... def dispatch_request(self): ... def full_dispatch_request(self): ... def finalize_request(self, rv: Any, from_error_handler: bool = ...): ... def try_trigger_before_first_request_functions(self): ... def make_default_options_response(self): ... def should_ignore_error(self, error: Any): ... def make_response(self, rv: Any): ... def create_url_adapter(self, request: Any): ... def inject_url_defaults(self, endpoint: Any, values: Any) -> None: ... def handle_url_build_error(self, error: Any, endpoint: Any, values: Any): ... def preprocess_request(self): ... def process_response(self, response: Any): ... def do_teardown_request(self, exc: Any = ...) -> None: ... def do_teardown_appcontext(self, exc: Any = ...) -> None: ... def app_context(self): ... def request_context(self, environ: Any): ... def test_request_context(self, *args: Any, **kwargs: Any) -> ContextManager[RequestContext]: ... def wsgi_app(self, environ: Any, start_response: Any): ... def __call__(self, environ: Any, start_response: Any): ... # These are not preset at runtime but we add them since monkeypatching this # class is quite common. def __setattr__(self, name: str, value: Any): ... def __getattr__(self, name: str): ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/blueprints.pyi0000644€tŠÔÚ€2›s®0000000656213576752252031023 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.blueprints (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .helpers import _PackageBoundObject from .app import _ViewFunc from typing import Any, Callable, Optional, Type, TypeVar, Union _T = TypeVar('_T') _VT = TypeVar('_VT', bound=_ViewFunc) class BlueprintSetupState: app: Any = ... blueprint: Any = ... options: Any = ... first_registration: Any = ... subdomain: Any = ... url_prefix: Any = ... url_defaults: Any = ... def __init__(self, blueprint: Any, app: Any, options: Any, first_registration: Any) -> None: ... def add_url_rule(self, rule: str, endpoint: Optional[str] = ..., view_func: _ViewFunc = ..., **options: Any) -> None: ... class Blueprint(_PackageBoundObject): warn_on_modifications: bool = ... json_encoder: Any = ... json_decoder: Any = ... import_name: str = ... template_folder: Optional[str] = ... root_path: str = ... name: str = ... url_prefix: Optional[str] = ... subdomain: Optional[str] = ... static_folder: Optional[str] = ... static_url_path: Optional[str] = ... deferred_functions: Any = ... url_values_defaults: Any = ... def __init__(self, name: str, import_name: str, static_folder: Optional[str] = ..., static_url_path: Optional[str] = ..., template_folder: Optional[str] = ..., url_prefix: Optional[str] = ..., subdomain: Optional[str] = ..., url_defaults: Optional[Any] = ..., root_path: Optional[str] = ...) -> None: ... def record(self, func: Any) -> None: ... def record_once(self, func: Any): ... def make_setup_state(self, app: Any, options: Any, first_registration: bool = ...): ... def register(self, app: Any, options: Any, first_registration: bool = ...) -> None: ... def route(self, rule: str, **options: Any) -> Callable[[_VT], _VT]: ... def add_url_rule(self, rule: str, endpoint: Optional[str] = ..., view_func: _ViewFunc = ..., **options: Any) -> None: ... def endpoint(self, endpoint: str) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... def app_template_filter(self, name: Optional[Any] = ...): ... def add_app_template_filter(self, f: Any, name: Optional[Any] = ...) -> None: ... def app_template_test(self, name: Optional[Any] = ...): ... def add_app_template_test(self, f: Any, name: Optional[Any] = ...) -> None: ... def app_template_global(self, name: Optional[Any] = ...): ... def add_app_template_global(self, f: Any, name: Optional[Any] = ...) -> None: ... def before_request(self, f: Any): ... def before_app_request(self, f: Any): ... def before_app_first_request(self, f: Any): ... def after_request(self, f: Any): ... def after_app_request(self, f: Any): ... def teardown_request(self, f: Any): ... def teardown_app_request(self, f: Any): ... def context_processor(self, f: Any): ... def app_context_processor(self, f: Any): ... def app_errorhandler(self, code: Any): ... def url_value_preprocessor(self, f: Any): ... def url_defaults(self, f: Any): ... def app_url_value_preprocessor(self, f: Any): ... def app_url_defaults(self, f: Any): ... def errorhandler(self, code_or_exception: Union[int, Type[Exception]]) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... def register_error_handler(self, code_or_exception: Union[int, Type[Exception]], f: Callable[..., Any]) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/cli.pyi0000644€tŠÔÚ€2›s®0000000444613576752252027402 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.cli (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. import click from .globals import current_app from .helpers import get_debug_flag, get_env, get_load_dotenv from typing import Any, Optional class NoAppException(click.UsageError): ... def find_best_app(script_info: Any, module: Any): ... def call_factory(script_info: Any, app_factory: Any, arguments: Any = ...): ... def find_app_by_string(script_info: Any, module: Any, app_name: Any): ... def prepare_import(path: Any): ... def locate_app(script_info: Any, module_name: Any, app_name: Any, raise_if_not_found: bool = ...): ... def get_version(ctx: Any, param: Any, value: Any): ... version_option: Any class DispatchingApp: loader: Any = ... def __init__(self, loader: Any, use_eager_loading: bool = ...) -> None: ... def __call__(self, environ: Any, start_response: Any): ... class ScriptInfo: app_import_path: Any = ... create_app: Any = ... data: Any = ... def __init__(self, app_import_path: Optional[Any] = ..., create_app: Optional[Any] = ...) -> None: ... def load_app(self): ... pass_script_info: Any def with_appcontext(f: Any): ... class AppGroup(click.Group): def command(self, *args: Any, **kwargs: Any): ... def group(self, *args: Any, **kwargs: Any): ... class FlaskGroup(AppGroup): create_app: Any = ... load_dotenv: Any = ... def __init__(self, add_default_commands: bool = ..., create_app: Optional[Any] = ..., add_version_option: bool = ..., load_dotenv: bool = ..., **extra: Any) -> None: ... def get_command(self, ctx: Any, name: Any): ... def list_commands(self, ctx: Any): ... def main(self, *args: Any, **kwargs: Any): ... def load_dotenv(path: Optional[Any] = ...): ... def show_server_banner(env: Any, debug: Any, app_import_path: Any, eager_loading: Any): ... class CertParamType(click.ParamType): name: str = ... path_type: Any = ... def __init__(self) -> None: ... def convert(self, value: Any, param: Any, ctx: Any): ... def run_command(info: Any, host: Any, port: Any, reload: Any, debugger: Any, eager_loading: Any, with_threads: Any, cert: Any) -> None: ... def shell_command() -> None: ... def routes_command(sort: Any, all_methods: Any): ... cli: Any def main(as_module: bool = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/config.pyi0000644€tŠÔÚ€2›s®0000000173413576752252030075 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.config (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from typing import Any, Optional, Dict class ConfigAttribute: __name__: Any = ... get_converter: Any = ... def __init__(self, name: Any, get_converter: Optional[Any] = ...) -> None: ... def __get__(self, obj: Any, type: Optional[Any] = ...): ... def __set__(self, obj: Any, value: Any) -> None: ... class Config(Dict[str, Any]): root_path: Any = ... def __init__(self, root_path: Any, defaults: Optional[Any] = ...) -> None: ... def from_envvar(self, variable_name: Any, silent: bool = ...): ... def from_pyfile(self, filename: Any, silent: bool = ...): ... def from_object(self, obj: Any) -> None: ... def from_json(self, filename: Any, silent: bool = ...): ... def from_mapping(self, *mapping: Any, **kwargs: Any): ... def get_namespace(self, namespace: Any, lowercase: bool = ..., trim_namespace: bool = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/ctx.pyi0000644€tŠÔÚ€2›s®0000000303313576752252027420 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.ctx (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .globals import _app_ctx_stack, _request_ctx_stack from .signals import appcontext_popped, appcontext_pushed from typing import Any, Optional class _AppCtxGlobals: def get(self, name: Any, default: Optional[Any] = ...): ... def pop(self, name: Any, default: Any = ...): ... def setdefault(self, name: Any, default: Optional[Any] = ...): ... def __contains__(self, item: Any): ... def __iter__(self): ... def after_this_request(f: Any): ... def copy_current_request_context(f: Any): ... def has_request_context(): ... def has_app_context(): ... class AppContext: app: Any = ... url_adapter: Any = ... g: Any = ... def __init__(self, app: Any) -> None: ... def push(self) -> None: ... def pop(self, exc: Any = ...) -> None: ... def __enter__(self): ... def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... class RequestContext: app: Any = ... request: Any = ... url_adapter: Any = ... flashes: Any = ... session: Any = ... preserved: bool = ... def __init__(self, app: Any, environ: Any, request: Optional[Any] = ...) -> None: ... g: Any = ... def copy(self): ... def match_request(self) -> None: ... def push(self) -> None: ... def pop(self, exc: Any = ...) -> None: ... def auto_pop(self, exc: Any) -> None: ... def __enter__(self): ... def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/debughelpers.pyi0000644€tŠÔÚ€2›s®0000000132713576752252031277 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.debughelpers (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .app import Flask from .blueprints import Blueprint from .globals import _request_ctx_stack from typing import Any class UnexpectedUnicodeError(AssertionError, UnicodeError): ... class DebugFilesKeyError(KeyError, AssertionError): msg: Any = ... def __init__(self, request: Any, key: Any) -> None: ... class FormDataRoutingRedirect(AssertionError): def __init__(self, request: Any) -> None: ... def attach_enctype_error_multidict(request: Any): ... def explain_template_loading_attempts(app: Any, template: Any, attempts: Any) -> None: ... def explain_ignored_app_run() -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/globals.pyi0000644€tŠÔÚ€2›s®0000000066613576752252030256 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.globals (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .app import Flask from .wrappers import Request from typing import Any from werkzeug.local import LocalStack class _FlaskLocalProxy(Flask): def _get_current_object(self) -> Flask: ... _request_ctx_stack: LocalStack _app_ctx_stack: LocalStack current_app: _FlaskLocalProxy request: Request session: Any g: Any mypy-0.761/mypy/typeshed/third_party/2and3/flask/helpers.pyi0000644€tŠÔÚ€2›s®0000000407213576752252030270 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.helpers (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .globals import _app_ctx_stack, _request_ctx_stack, current_app, request, session from .signals import message_flashed from typing import Any, Optional def get_env(): ... def get_debug_flag(): ... def get_load_dotenv(default: bool = ...): ... def stream_with_context(generator_or_function: Any): ... def make_response(*args: Any): ... def url_for(endpoint: Any, **values: Any): ... def get_template_attribute(template_name: Any, attribute: Any): ... def flash(message: Any, category: str = ...) -> None: ... def get_flashed_messages(with_categories: bool = ..., category_filter: Any = ...): ... def send_file(filename_or_fp: Any, mimetype: Optional[Any] = ..., as_attachment: bool = ..., attachment_filename: Optional[Any] = ..., add_etags: bool = ..., cache_timeout: Optional[Any] = ..., conditional: bool = ..., last_modified: Optional[Any] = ...): ... def safe_join(directory: Any, *pathnames: Any): ... def send_from_directory(directory: Any, filename: Any, **options: Any): ... def get_root_path(import_name: Any): ... def find_package(import_name: Any): ... class locked_cached_property: __name__: Any = ... __module__: Any = ... __doc__: Any = ... func: Any = ... lock: Any = ... def __init__(self, func: Any, name: Optional[Any] = ..., doc: Optional[Any] = ...) -> None: ... def __get__(self, obj: Any, type: Optional[Any] = ...): ... class _PackageBoundObject: import_name: Any = ... template_folder: Any = ... root_path: Any = ... def __init__(self, import_name: Any, template_folder: Optional[Any] = ..., root_path: Optional[Any] = ...) -> None: ... static_folder: Any = ... static_url_path: Any = ... @property def has_static_folder(self): ... def jinja_loader(self): ... def get_send_file_max_age(self, filename: Any): ... def send_static_file(self, filename: Any): ... def open_resource(self, resource: Any, mode: str = ...): ... def total_seconds(td: Any): ... def is_ip(value: Any): ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/json/0000755€tŠÔÚ€2›s®0000000000013576752267027057 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/flask/json/__init__.pyi0000644€tŠÔÚ€2›s®0000000126113576752252031333 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.json (Python 3.6) import json as _json from typing import Any from jinja2 import Markup class JSONEncoder(_json.JSONEncoder): def default(self, o: Any): ... class JSONDecoder(_json.JSONDecoder): ... def detect_encoding(data: bytes) -> str: ... # undocumented def dumps(obj: Any, **kwargs: Any): ... def dump(obj: Any, fp: Any, **kwargs: Any) -> None: ... def loads(s: Any, **kwargs: Any): ... def load(fp: Any, **kwargs: Any): ... def htmlsafe_dumps(obj: Any, **kwargs: Any): ... def htmlsafe_dump(obj: Any, fp: Any, **kwargs: Any) -> None: ... def jsonify(*args: Any, **kwargs: Any): ... def tojson_filter(obj: Any, **kwargs: Any) -> Markup: ... # undocumented mypy-0.761/mypy/typeshed/third_party/2and3/flask/json/tag.pyi0000644€tŠÔÚ€2›s®0000000376513576752252030362 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.json.tag (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from typing import Any, Optional class JSONTag: key: Any = ... serializer: Any = ... def __init__(self, serializer: Any) -> None: ... def check(self, value: Any) -> None: ... def to_json(self, value: Any) -> None: ... def to_python(self, value: Any) -> None: ... def tag(self, value: Any): ... class TagDict(JSONTag): key: str = ... def check(self, value: Any): ... def to_json(self, value: Any): ... def to_python(self, value: Any): ... class PassDict(JSONTag): def check(self, value: Any): ... def to_json(self, value: Any): ... tag: Any = ... class TagTuple(JSONTag): key: str = ... def check(self, value: Any): ... def to_json(self, value: Any): ... def to_python(self, value: Any): ... class PassList(JSONTag): def check(self, value: Any): ... def to_json(self, value: Any): ... tag: Any = ... class TagBytes(JSONTag): key: str = ... def check(self, value: Any): ... def to_json(self, value: Any): ... def to_python(self, value: Any): ... class TagMarkup(JSONTag): key: str = ... def check(self, value: Any): ... def to_json(self, value: Any): ... def to_python(self, value: Any): ... class TagUUID(JSONTag): key: str = ... def check(self, value: Any): ... def to_json(self, value: Any): ... def to_python(self, value: Any): ... class TagDateTime(JSONTag): key: str = ... def check(self, value: Any): ... def to_json(self, value: Any): ... def to_python(self, value: Any): ... class TaggedJSONSerializer: default_tags: Any = ... tags: Any = ... order: Any = ... def __init__(self) -> None: ... def register(self, tag_class: Any, force: bool = ..., index: Optional[Any] = ...) -> None: ... def tag(self, value: Any): ... def untag(self, value: Any): ... def dumps(self, value: Any): ... def loads(self, value: Any): ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/logging.pyi0000644€tŠÔÚ€2›s®0000000045113576752252030251 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.logging (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .globals import request from typing import Any def wsgi_errors_stream(): ... def has_level_handler(logger: Any): ... default_handler: Any def create_logger(app: Any): ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/sessions.pyi0000644€tŠÔÚ€2›s®0000000412713576752252030475 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.sessions (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from abc import ABCMeta from typing import Any, MutableMapping, Optional from werkzeug.datastructures import CallbackDict class SessionMixin(MutableMapping[str, Any], metaclass=ABCMeta): @property def permanent(self): ... @permanent.setter def permanent(self, value: Any) -> None: ... new: bool = ... modified: bool = ... accessed: bool = ... class SecureCookieSession(CallbackDict[str, Any], SessionMixin): modified: bool = ... accessed: bool = ... def __init__(self, initial: Optional[Any] = ...) -> None: ... def __getitem__(self, key: Any): ... def get(self, key: Any, default: Optional[Any] = ...): ... def setdefault(self, key: Any, default: Optional[Any] = ...): ... class NullSession(SecureCookieSession): __setitem__: Any = ... __delitem__: Any = ... clear: Any = ... pop: Any = ... popitem: Any = ... update: Any = ... setdefault: Any = ... class SessionInterface: null_session_class: Any = ... pickle_based: bool = ... def make_null_session(self, app: Any): ... def is_null_session(self, obj: Any): ... def get_cookie_domain(self, app: Any): ... def get_cookie_path(self, app: Any): ... def get_cookie_httponly(self, app: Any): ... def get_cookie_secure(self, app: Any): ... def get_cookie_samesite(self, app: Any): ... def get_expiration_time(self, app: Any, session: Any): ... def should_set_cookie(self, app: Any, session: Any): ... def open_session(self, app: Any, request: Any) -> None: ... def save_session(self, app: Any, session: Any, response: Any) -> None: ... session_json_serializer: Any class SecureCookieSessionInterface(SessionInterface): salt: str = ... digest_method: Any = ... key_derivation: str = ... serializer: Any = ... session_class: Any = ... def get_signing_serializer(self, app: Any): ... def open_session(self, app: Any, request: Any): ... def save_session(self, app: Any, session: Any, response: Any): ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/signals.pyi0000644€tŠÔÚ€2›s®0000000150413576752252030263 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.signals (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from typing import Any, Optional signals_available: bool class Namespace: def signal(self, name: Any, doc: Optional[Any] = ...): ... class _FakeSignal: name: Any = ... __doc__: Any = ... def __init__(self, name: Any, doc: Optional[Any] = ...) -> None: ... send: Any = ... connect: Any = ... disconnect: Any = ... has_receivers_for: Any = ... receivers_for: Any = ... temporarily_connected_to: Any = ... connected_to: Any = ... template_rendered: Any before_render_template: Any request_started: Any request_finished: Any request_tearing_down: Any got_request_exception: Any appcontext_tearing_down: Any appcontext_pushed: Any appcontext_popped: Any message_flashed: Any mypy-0.761/mypy/typeshed/third_party/2and3/flask/templating.pyi0000644€tŠÔÚ€2›s®0000000152513576752252030772 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.templating (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .globals import _app_ctx_stack, _request_ctx_stack from .signals import before_render_template, template_rendered from jinja2 import BaseLoader, Environment as BaseEnvironment from typing import Any, Text, Iterable, Union class Environment(BaseEnvironment): app: Any = ... def __init__(self, app: Any, **options: Any) -> None: ... class DispatchingJinjaLoader(BaseLoader): app: Any = ... def __init__(self, app: Any) -> None: ... def get_source(self, environment: Any, template: Any): ... def list_templates(self): ... def render_template( template_name_or_list: Union[Text, Iterable[Text]], **context: Any ) -> Text: ... def render_template_string(source: Text, **context: Any) -> Text: ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/testing.pyi0000644€tŠÔÚ€2›s®0000000273413576752252030306 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.testing (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from click import BaseCommand from click.testing import CliRunner, Result from typing import Any, IO, Iterable, Mapping, Optional, Text, TypeVar, Union from werkzeug.test import Client def make_test_environ_builder(app: Any, path: str = ..., base_url: Optional[Any] = ..., subdomain: Optional[Any] = ..., url_scheme: Optional[Any] = ..., *args: Any, **kwargs: Any): ... # Response type for the client below. # By default _R is Tuple[Iterable[Any], Union[Text, int], werkzeug.datastructures.Headers], however # most commonly it is wrapped in a Reponse object. _R = TypeVar('_R') class FlaskClient(Client[_R]): preserve_context: bool = ... environ_base: Any = ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... def session_transaction(self, *args: Any, **kwargs: Any) -> None: ... def __enter__(self): ... def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... class FlaskCliRunner(CliRunner): app: Any = ... def __init__(self, app: Any, **kwargs: Any) -> None: ... def invoke( self, cli: Optional[BaseCommand] = ..., args: Optional[Union[str, Iterable[str]]] = ..., input: Optional[Union[bytes, IO[Any], Text]] = ..., env: Optional[Mapping[str, str]] = ..., catch_exceptions: bool = ..., color: bool = ..., **extra: Any, ) -> Result: ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/views.pyi0000644€tŠÔÚ€2›s®0000000124313576752252027760 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.views (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from .globals import request from typing import Any http_method_funcs: Any class View: methods: Any = ... provide_automatic_options: Any = ... decorators: Any = ... def dispatch_request(self, *args: Any, **kwargs: Any) -> Any: ... @classmethod def as_view(cls, name: Any, *class_args: Any, **class_kwargs: Any): ... class MethodViewType(type): def __init__(self, name: Any, bases: Any, d: Any) -> None: ... class MethodView(View, metaclass=MethodViewType): def dispatch_request(self, *args: Any, **kwargs: Any) -> Any: ... mypy-0.761/mypy/typeshed/third_party/2and3/flask/wrappers.pyi0000644€tŠÔÚ€2›s®0000000234513576752252030472 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for flask.wrappers (Python 3.6) # # NOTE: This dynamically typed stub was automatically generated by stubgen. from typing import Any, Dict, Optional from werkzeug.exceptions import HTTPException from werkzeug.routing import Rule from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase class JSONMixin: @property def is_json(self) -> bool: ... @property def json(self): ... def get_json(self, force: bool = ..., silent: bool = ..., cache: bool = ...): ... def on_json_loading_failed(self, e: Any) -> None: ... class Request(RequestBase, JSONMixin): url_rule: Optional[Rule] = ... view_args: Dict[str, Any] = ... routing_exception: Optional[HTTPException] = ... # Request is making the max_content_length readonly, where it was not the # case in its supertype. # We would require something like https://github.com/python/typing/issues/241 @property def max_content_length(self) -> Optional[int]: ... # type: ignore @property def endpoint(self) -> Optional[str]: ... @property def blueprint(self) -> Optional[str]: ... class Response(ResponseBase, JSONMixin): default_mimetype: str = ... @property def max_cookie_size(self) -> int: ... mypy-0.761/mypy/typeshed/third_party/2and3/geoip2/0000755€tŠÔÚ€2›s®0000000000013576752267026173 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/geoip2/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252030435 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/geoip2/database.pyi0000644€tŠÔÚ€2›s®0000000210613576752252030453 0ustar jukkaDROPBOX\Domain Users00000000000000from types import TracebackType from typing import Optional, Sequence, Text, Type from maxminddb.reader import Metadata from geoip2.models import AnonymousIP, ASN, City, ConnectionType, Country, Domain, Enterprise, ISP _Locales = Optional[Sequence[Text]] class Reader: def __init__(self, filename: Text, locales: _Locales = ..., mode: int = ...) -> None: ... def __enter__(self) -> Reader: ... def __exit__(self, exc_type: Optional[Type[BaseException]] = ..., exc_val: Optional[BaseException] = ..., exc_tb: Optional[TracebackType] = ...) -> None: ... def country(self, ip_address: Text) -> Country: ... def city(self, ip_address: Text) -> City: ... def anonymous_ip(self, ip_address: Text) -> AnonymousIP: ... def asn(self, ip_address: Text) -> ASN: ... def connection_type(self, ip_address: Text) -> ConnectionType: ... def domain(self, ip_address: Text) -> Domain: ... def enterprise(self, ip_address: Text) -> Enterprise: ... def isp(self, ip_address: Text) -> ISP: ... def metadata(self) -> Metadata: ... def close(self) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/geoip2/errors.pyi0000644€tŠÔÚ€2›s®0000000076213576752252030231 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Text class GeoIP2Error(RuntimeError): ... class AddressNotFoundError(GeoIP2Error): ... class AuthenticationError(GeoIP2Error): ... class HTTPError(GeoIP2Error): http_status: Optional[int] uri: Optional[Text] def __init__(self, message: Text, http_status: Optional[int] = ..., uri: Optional[Text] = ...) -> None: ... class InvalidRequestError(GeoIP2Error): ... class OutOfQueriesError(GeoIP2Error): ... class PermissionRequiredError(GeoIP2Error): ... mypy-0.761/mypy/typeshed/third_party/2and3/geoip2/mixins.pyi0000644€tŠÔÚ€2›s®0000000017013576752252030215 0ustar jukkaDROPBOX\Domain Users00000000000000class SimpleEquality: def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... mypy-0.761/mypy/typeshed/third_party/2and3/geoip2/models.pyi0000644€tŠÔÚ€2›s®0000000351613576752252030200 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Mapping, Optional, Sequence, Text from geoip2 import records from geoip2.mixins import SimpleEquality _Locales = Optional[Sequence[Text]] _RawResponse = Mapping[Text, Mapping[Text, Any]] class Country(SimpleEquality): continent: records.Continent country: records.Country registered_country: records.Country represented_country: records.RepresentedCountry maxmind: records.MaxMind traits: records.Traits raw: _RawResponse def __init__(self, raw_response: _RawResponse, locales: _Locales = ...) -> None: ... class City(Country): city: records.City location: records.Location postal: records.Postal subdivisions: records.Subdivisions def __init__(self, raw_response: _RawResponse, locales: _Locales = ...) -> None: ... class Insights(City): ... class Enterprise(City): ... class SimpleModel(SimpleEquality): ... class AnonymousIP(SimpleModel): is_anonymous: bool is_anonymous_vpn: bool is_hosting_provider: bool is_public_proxy: bool is_tor_exit_node: bool ip_address: Optional[Text] raw: _RawResponse def __init__(self, raw: _RawResponse) -> None: ... class ASN(SimpleModel): autonomous_system_number: Optional[int] autonomous_system_organization: Optional[Text] ip_address: Optional[Text] raw: _RawResponse def __init__(self, raw: _RawResponse) -> None: ... class ConnectionType(SimpleModel): connection_type: Optional[Text] ip_address: Optional[Text] raw: _RawResponse def __init__(self, raw: _RawResponse) -> None: ... class Domain(SimpleModel): domain: Optional[Text] ip_address: Optional[Text] raw: Optional[Text] def __init__(self, raw: _RawResponse) -> None: ... class ISP(ASN): isp: Optional[Text] organization: Optional[Text] def __init__(self, raw: _RawResponse) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/geoip2/records.pyi0000644€tŠÔÚ€2›s®0000000402713576752252030354 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Mapping, Optional, Sequence, Text, Tuple from geoip2.mixins import SimpleEquality _Locales = Optional[Sequence[Text]] _Names = Mapping[Text, Text] class Record(SimpleEquality): def __init__(self, **kwargs: Any) -> None: ... def __setattr__(self, name: Text, value: Any) -> None: ... class PlaceRecord(Record): def __init__(self, locales: _Locales = ..., **kwargs: Any) -> None: ... @property def name(self) -> Text: ... class City(PlaceRecord): confidence: int geoname_id: int names: _Names class Continent(PlaceRecord): code: Text geoname_id: int names: _Names class Country(PlaceRecord): confidence: int geoname_id: int is_in_european_union: bool iso_code: Text names: _Names def __init__(self, locales: _Locales = ..., **kwargs: Any) -> None: ... class RepresentedCountry(Country): type: Text class Location(Record): average_income: int accuracy_radius: int latitude: float longitude: float metro_code: int population_density: int time_zone: Text class MaxMind(Record): queries_remaining: int class Postal(Record): code: Text confidence: int class Subdivision(PlaceRecord): confidence: int geoname_id: int iso_code: Text names: _Names class Subdivisions(Tuple[Subdivision]): def __new__(cls, locales: _Locales, *subdivisions: Subdivision) -> Subdivisions: ... def __init__(self, locales: _Locales, *subdivisions: Subdivision) -> None: ... @property def most_specific(self) -> Subdivision: ... class Traits(Record): autonomous_system_number: int autonomous_system_organization: Text connection_type: Text domain: Text ip_address: Text is_anonymous: bool is_anonymous_proxy: bool is_anonymous_vpn: bool is_hosting_provider: bool is_legitimate_proxy: bool is_public_proxy: bool is_satellite_provider: bool is_tor_exit_node: bool isp: Text organization: Text user_type: Text def __init__(self, **kwargs: Any) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/0000755€tŠÔÚ€2›s®0000000000013576752267026262 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252030524 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/0000755€tŠÔÚ€2›s®0000000000013576752267030122 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/__init__.pyi0000644€tŠÔÚ€2›s®0000000002313576752252032371 0ustar jukkaDROPBOX\Domain Users00000000000000__version__: bytes mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/any_pb2.pyi0000644€tŠÔÚ€2›s®0000000046713576752252032200 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from google.protobuf.internal import well_known_types from typing import Optional, Text class Any(Message, well_known_types.Any_): type_url: Text value: bytes def __init__(self, type_url: Optional[Text] = ..., value: Optional[bytes] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/any_test_pb2.pyi0000644€tŠÔÚ€2›s®0000000105513576752252033231 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.any_pb2 import Any from google.protobuf.internal.containers import RepeatedCompositeFieldContainer from google.protobuf.message import Message from typing import Iterable, Optional class TestAny(Message): int32_value: int @property def any_value(self) -> Any: ... @property def repeated_any_value(self) -> RepeatedCompositeFieldContainer[Any]: ... def __init__( self, int32_value: Optional[int] = ..., any_value: Optional[Any] = ..., repeated_any_value: Optional[Iterable[Any]] = ... ) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/api_pb2.pyi0000644€tŠÔÚ€2›s®0000000342613576752252032160 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer from google.protobuf.message import Message from google.protobuf.source_context_pb2 import SourceContext from google.protobuf.type_pb2 import Option, Syntax from typing import Iterable, Optional, Text class Api(Message): name: Text version: Text syntax: Syntax @property def methods(self) -> RepeatedCompositeFieldContainer[Method]: ... @property def options(self) -> RepeatedCompositeFieldContainer[Option]: ... @property def source_context(self) -> SourceContext: ... @property def mixins(self) -> RepeatedCompositeFieldContainer[Mixin]: ... def __init__( self, name: Optional[Text] = ..., methods: Optional[Iterable[Method]] = ..., options: Optional[Iterable[Option]] = ..., version: Optional[Text] = ..., source_context: Optional[SourceContext] = ..., mixins: Optional[Iterable[Mixin]] = ..., syntax: Optional[Syntax] = ..., ) -> None: ... class Method(Message): name: Text request_type_url: Text request_streaming: bool response_type_url: Text response_streaming: bool syntax: Syntax @property def options(self) -> RepeatedCompositeFieldContainer[Option]: ... def __init__( self, name: Optional[Text] = ..., request_type_url: Optional[Text] = ..., request_streaming: Optional[bool] = ..., response_type_url: Optional[Text] = ..., response_streaming: Optional[bool] = ..., options: Optional[Iterable[Option]] = ..., syntax: Optional[Syntax] = ..., ) -> None: ... class Mixin(Message): name: Text root: Text def __init__(self, name: Optional[Text] = ..., root: Optional[Text] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/compiler/0000755€tŠÔÚ€2›s®0000000000013576752267031734 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/compiler/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252034176 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/compiler/plugin_pb2.pyi0000644€tŠÔÚ€2›s®0000000307113576752252034513 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.descriptor_pb2 import FileDescriptorProto from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from typing import Iterable, Optional, Text class Version(Message): major: int minor: int patch: int suffix: Text def __init__( self, major: Optional[int] = ..., minor: Optional[int] = ..., patch: Optional[int] = ..., suffix: Optional[Text] = ... ) -> None: ... class CodeGeneratorRequest(Message): file_to_generate: RepeatedScalarFieldContainer[Text] parameter: Text @property def proto_file(self) -> RepeatedCompositeFieldContainer[FileDescriptorProto]: ... @property def compiler_version(self) -> Version: ... def __init__( self, file_to_generate: Optional[Iterable[Text]] = ..., parameter: Optional[Text] = ..., proto_file: Optional[Iterable[FileDescriptorProto]] = ..., compiler_version: Optional[Version] = ..., ) -> None: ... class CodeGeneratorResponse(Message): class File(Message): name: Text insertion_point: Text content: Text def __init__( self, name: Optional[Text] = ..., insertion_point: Optional[Text] = ..., content: Optional[Text] = ... ) -> None: ... error: Text @property def file(self) -> RepeatedCompositeFieldContainer[CodeGeneratorResponse.File]: ... def __init__(self, error: Optional[Text] = ..., file: Optional[Iterable[CodeGeneratorResponse.File]] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/descriptor.pyi0000644€tŠÔÚ€2›s®0000001576513576752252033033 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .message import Message from .descriptor_pb2 import ( EnumOptions, EnumValueOptions, FieldOptions, FileOptions, MessageOptions, MethodOptions, OneofOptions, ServiceOptions, ) class Error(Exception): ... class TypeTransformationError(Error): ... class DescriptorMetaclass(type): def __instancecheck__(self, obj): ... class DescriptorBase(metaclass=DescriptorMetaclass): has_options: Any def __init__(self, options, options_class_name) -> None: ... def GetOptions(self): ... class _NestedDescriptorBase(DescriptorBase): name: Any full_name: Any file: Any containing_type: Any def __init__( self, options, options_class_name, name, full_name, file, containing_type, serialized_start=..., serialized_end=... ) -> None: ... def GetTopLevelContainingType(self): ... def CopyToProto(self, proto): ... class Descriptor(_NestedDescriptorBase): def __new__( cls, name, full_name, filename, containing_type, fields, nested_types, enum_types, extensions, options=..., is_extendable=..., extension_ranges=..., oneofs=..., file=..., serialized_start=..., serialized_end=..., syntax=..., ): ... fields: Any fields_by_number: Any fields_by_name: Any nested_types: Any nested_types_by_name: Any enum_types: Any enum_types_by_name: Any enum_values_by_name: Any extensions: Any extensions_by_name: Any is_extendable: Any extension_ranges: Any oneofs: Any oneofs_by_name: Any syntax: Any def __init__( self, name, full_name, filename, containing_type, fields, nested_types, enum_types, extensions, options=..., is_extendable=..., extension_ranges=..., oneofs=..., file=..., serialized_start=..., serialized_end=..., syntax=..., ) -> None: ... def EnumValueName(self, enum, value): ... def CopyToProto(self, proto): ... def GetOptions(self) -> MessageOptions: ... class FieldDescriptor(DescriptorBase): TYPE_DOUBLE: Any TYPE_FLOAT: Any TYPE_INT64: Any TYPE_UINT64: Any TYPE_INT32: Any TYPE_FIXED64: Any TYPE_FIXED32: Any TYPE_BOOL: Any TYPE_STRING: Any TYPE_GROUP: Any TYPE_MESSAGE: Any TYPE_BYTES: Any TYPE_UINT32: Any TYPE_ENUM: Any TYPE_SFIXED32: Any TYPE_SFIXED64: Any TYPE_SINT32: Any TYPE_SINT64: Any MAX_TYPE: Any CPPTYPE_INT32: Any CPPTYPE_INT64: Any CPPTYPE_UINT32: Any CPPTYPE_UINT64: Any CPPTYPE_DOUBLE: Any CPPTYPE_FLOAT: Any CPPTYPE_BOOL: Any CPPTYPE_ENUM: Any CPPTYPE_STRING: Any CPPTYPE_MESSAGE: Any MAX_CPPTYPE: Any LABEL_OPTIONAL: Any LABEL_REQUIRED: Any LABEL_REPEATED: Any MAX_LABEL: Any MAX_FIELD_NUMBER: Any FIRST_RESERVED_FIELD_NUMBER: Any LAST_RESERVED_FIELD_NUMBER: Any def __new__( cls, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options=..., file=..., has_default_value=..., containing_oneof=..., ): ... name: Any full_name: Any index: Any number: Any type: Any cpp_type: Any label: Any has_default_value: Any default_value: Any containing_type: Any message_type: Any enum_type: Any is_extension: Any extension_scope: Any containing_oneof: Any def __init__( self, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options=..., file=..., has_default_value=..., containing_oneof=..., ) -> None: ... @staticmethod def ProtoTypeToCppProtoType(proto_type): ... def GetOptions(self) -> FieldOptions: ... class EnumDescriptor(_NestedDescriptorBase): def __new__( cls, name, full_name, filename, values, containing_type=..., options=..., file=..., serialized_start=..., serialized_end=..., ): ... values: Any values_by_name: Any values_by_number: Any def __init__( self, name, full_name, filename, values, containing_type=..., options=..., file=..., serialized_start=..., serialized_end=..., ) -> None: ... def CopyToProto(self, proto): ... def GetOptions(self) -> EnumOptions: ... class EnumValueDescriptor(DescriptorBase): def __new__(cls, name, index, number, type=..., options=...): ... name: Any index: Any number: Any type: Any def __init__(self, name, index, number, type=..., options=...) -> None: ... def GetOptions(self) -> EnumValueOptions: ... class OneofDescriptor: def __new__(cls, name, full_name, index, containing_type, fields): ... name: Any full_name: Any index: Any containing_type: Any fields: Any def __init__(self, name, full_name, index, containing_type, fields) -> None: ... def GetOptions(self) -> OneofOptions: ... class ServiceDescriptor(_NestedDescriptorBase): index: Any methods: Any methods_by_name: Any def __init__( self, name, full_name, index, methods, options=..., file=..., serialized_start=..., serialized_end=... ) -> None: ... def FindMethodByName(self, name): ... def CopyToProto(self, proto): ... def GetOptions(self) -> ServiceOptions: ... class MethodDescriptor(DescriptorBase): name: Any full_name: Any index: Any containing_service: Any input_type: Any output_type: Any def __init__(self, name, full_name, index, containing_service, input_type, output_type, options=...) -> None: ... def GetOptions(self) -> MethodOptions: ... class FileDescriptor(DescriptorBase): def __new__( cls, name, package, options=..., serialized_pb=..., dependencies=..., public_dependencies=..., syntax=..., pool=... ): ... _options: Any pool: Any message_types_by_name: Any name: Any package: Any syntax: Any serialized_pb: Any enum_types_by_name: Any extensions_by_name: Any services_by_name: Any dependencies: Any public_dependencies: Any def __init__( self, name, package, options=..., serialized_pb=..., dependencies=..., public_dependencies=..., syntax=..., pool=... ) -> None: ... def CopyToProto(self, proto): ... def GetOptions(self) -> FileOptions: ... def MakeDescriptor(desc_proto, package=..., build_file_if_cpp=..., syntax=...): ... def _ParseOptions(message: Message, string: bytes) -> Message: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/descriptor_pb2.pyi0000644€tŠÔÚ€2›s®0000004352513576752252033571 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from typing import Iterable, List, Optional, Text, Tuple, cast class FileDescriptorSet(Message): @property def file(self) -> RepeatedCompositeFieldContainer[FileDescriptorProto]: ... def __init__(self, file: Optional[Iterable[FileDescriptorProto]] = ...) -> None: ... class FileDescriptorProto(Message): name: Text package: Text dependency: RepeatedScalarFieldContainer[Text] public_dependency: RepeatedScalarFieldContainer[int] weak_dependency: RepeatedScalarFieldContainer[int] syntax: Text @property def message_type(self) -> RepeatedCompositeFieldContainer[DescriptorProto]: ... @property def enum_type(self) -> RepeatedCompositeFieldContainer[EnumDescriptorProto]: ... @property def service(self) -> RepeatedCompositeFieldContainer[ServiceDescriptorProto]: ... @property def extension(self) -> RepeatedCompositeFieldContainer[FieldDescriptorProto]: ... @property def options(self) -> FileOptions: ... @property def source_code_info(self) -> SourceCodeInfo: ... def __init__( self, name: Optional[Text] = ..., package: Optional[Text] = ..., dependency: Optional[Iterable[Text]] = ..., public_dependency: Optional[Iterable[int]] = ..., weak_dependency: Optional[Iterable[int]] = ..., message_type: Optional[Iterable[DescriptorProto]] = ..., enum_type: Optional[Iterable[EnumDescriptorProto]] = ..., service: Optional[Iterable[ServiceDescriptorProto]] = ..., extension: Optional[Iterable[FieldDescriptorProto]] = ..., options: Optional[FileOptions] = ..., source_code_info: Optional[SourceCodeInfo] = ..., syntax: Optional[Text] = ..., ) -> None: ... class DescriptorProto(Message): class ExtensionRange(Message): start: int end: int @property def options(self) -> ExtensionRangeOptions: ... def __init__( self, start: Optional[int] = ..., end: Optional[int] = ..., options: Optional[ExtensionRangeOptions] = ... ) -> None: ... class ReservedRange(Message): start: int end: int def __init__(self, start: Optional[int] = ..., end: Optional[int] = ...) -> None: ... name: Text reserved_name: RepeatedScalarFieldContainer[Text] @property def field(self) -> RepeatedCompositeFieldContainer[FieldDescriptorProto]: ... @property def extension(self) -> RepeatedCompositeFieldContainer[FieldDescriptorProto]: ... @property def nested_type(self) -> RepeatedCompositeFieldContainer[DescriptorProto]: ... @property def enum_type(self) -> RepeatedCompositeFieldContainer[EnumDescriptorProto]: ... @property def extension_range(self) -> RepeatedCompositeFieldContainer[DescriptorProto.ExtensionRange]: ... @property def oneof_decl(self) -> RepeatedCompositeFieldContainer[OneofDescriptorProto]: ... @property def options(self) -> MessageOptions: ... @property def reserved_range(self) -> RepeatedCompositeFieldContainer[DescriptorProto.ReservedRange]: ... def __init__( self, name: Optional[Text] = ..., field: Optional[Iterable[FieldDescriptorProto]] = ..., extension: Optional[Iterable[FieldDescriptorProto]] = ..., nested_type: Optional[Iterable[DescriptorProto]] = ..., enum_type: Optional[Iterable[EnumDescriptorProto]] = ..., extension_range: Optional[Iterable[DescriptorProto.ExtensionRange]] = ..., oneof_decl: Optional[Iterable[OneofDescriptorProto]] = ..., options: Optional[MessageOptions] = ..., reserved_range: Optional[Iterable[DescriptorProto.ReservedRange]] = ..., reserved_name: Optional[Iterable[Text]] = ..., ) -> None: ... class ExtensionRangeOptions(Message): @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__(self, uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ...) -> None: ... class FieldDescriptorProto(Message): class Type(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> FieldDescriptorProto.Type: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[FieldDescriptorProto.Type]: ... @classmethod def items(cls) -> List[Tuple[bytes, FieldDescriptorProto.Type]]: ... TYPE_DOUBLE: FieldDescriptorProto.Type TYPE_FLOAT: FieldDescriptorProto.Type TYPE_INT64: FieldDescriptorProto.Type TYPE_UINT64: FieldDescriptorProto.Type TYPE_INT32: FieldDescriptorProto.Type TYPE_FIXED64: FieldDescriptorProto.Type TYPE_FIXED32: FieldDescriptorProto.Type TYPE_BOOL: FieldDescriptorProto.Type TYPE_STRING: FieldDescriptorProto.Type TYPE_GROUP: FieldDescriptorProto.Type TYPE_MESSAGE: FieldDescriptorProto.Type TYPE_BYTES: FieldDescriptorProto.Type TYPE_UINT32: FieldDescriptorProto.Type TYPE_ENUM: FieldDescriptorProto.Type TYPE_SFIXED32: FieldDescriptorProto.Type TYPE_SFIXED64: FieldDescriptorProto.Type TYPE_SINT32: FieldDescriptorProto.Type TYPE_SINT64: FieldDescriptorProto.Type class Label(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> FieldDescriptorProto.Label: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[FieldDescriptorProto.Label]: ... @classmethod def items(cls) -> List[Tuple[bytes, FieldDescriptorProto.Label]]: ... LABEL_OPTIONAL: FieldDescriptorProto.Label LABEL_REQUIRED: FieldDescriptorProto.Label LABEL_REPEATED: FieldDescriptorProto.Label name: Text number: int label: FieldDescriptorProto.Label type: FieldDescriptorProto.Type type_name: Text extendee: Text default_value: Text oneof_index: int json_name: Text @property def options(self) -> FieldOptions: ... def __init__( self, name: Optional[Text] = ..., number: Optional[int] = ..., label: Optional[FieldDescriptorProto.Label] = ..., type: Optional[FieldDescriptorProto.Type] = ..., type_name: Optional[Text] = ..., extendee: Optional[Text] = ..., default_value: Optional[Text] = ..., oneof_index: Optional[int] = ..., json_name: Optional[Text] = ..., options: Optional[FieldOptions] = ..., ) -> None: ... class OneofDescriptorProto(Message): name: Text @property def options(self) -> OneofOptions: ... def __init__(self, name: Optional[Text] = ..., options: Optional[OneofOptions] = ...) -> None: ... class EnumDescriptorProto(Message): class EnumReservedRange(Message): start: int end: int def __init__(self, start: Optional[int] = ..., end: Optional[int] = ...) -> None: ... name: Text reserved_name: RepeatedScalarFieldContainer[Text] @property def value(self) -> RepeatedCompositeFieldContainer[EnumValueDescriptorProto]: ... @property def options(self) -> EnumOptions: ... @property def reserved_range(self) -> RepeatedCompositeFieldContainer[EnumDescriptorProto.EnumReservedRange]: ... def __init__( self, name: Optional[Text] = ..., value: Optional[Iterable[EnumValueDescriptorProto]] = ..., options: Optional[EnumOptions] = ..., reserved_range: Optional[Iterable[EnumDescriptorProto.EnumReservedRange]] = ..., reserved_name: Optional[Iterable[Text]] = ..., ) -> None: ... class EnumValueDescriptorProto(Message): name: Text number: int @property def options(self) -> EnumValueOptions: ... def __init__( self, name: Optional[Text] = ..., number: Optional[int] = ..., options: Optional[EnumValueOptions] = ... ) -> None: ... class ServiceDescriptorProto(Message): name: Text @property def method(self) -> RepeatedCompositeFieldContainer[MethodDescriptorProto]: ... @property def options(self) -> ServiceOptions: ... def __init__( self, name: Optional[Text] = ..., method: Optional[Iterable[MethodDescriptorProto]] = ..., options: Optional[ServiceOptions] = ..., ) -> None: ... class MethodDescriptorProto(Message): name: Text input_type: Text output_type: Text client_streaming: bool server_streaming: bool @property def options(self) -> MethodOptions: ... def __init__( self, name: Optional[Text] = ..., input_type: Optional[Text] = ..., output_type: Optional[Text] = ..., options: Optional[MethodOptions] = ..., client_streaming: Optional[bool] = ..., server_streaming: Optional[bool] = ..., ) -> None: ... class FileOptions(Message): class OptimizeMode(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> FileOptions.OptimizeMode: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[FileOptions.OptimizeMode]: ... @classmethod def items(cls) -> List[Tuple[bytes, FileOptions.OptimizeMode]]: ... SPEED: FileOptions.OptimizeMode CODE_SIZE: FileOptions.OptimizeMode LITE_RUNTIME: FileOptions.OptimizeMode java_package: Text java_outer_classname: Text java_multiple_files: bool java_generate_equals_and_hash: bool java_string_check_utf8: bool optimize_for: FileOptions.OptimizeMode go_package: Text cc_generic_services: bool java_generic_services: bool py_generic_services: bool php_generic_services: bool deprecated: bool cc_enable_arenas: bool objc_class_prefix: Text csharp_namespace: Text swift_prefix: Text php_class_prefix: Text php_namespace: Text @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__( self, java_package: Optional[Text] = ..., java_outer_classname: Optional[Text] = ..., java_multiple_files: Optional[bool] = ..., java_generate_equals_and_hash: Optional[bool] = ..., java_string_check_utf8: Optional[bool] = ..., optimize_for: Optional[FileOptions.OptimizeMode] = ..., go_package: Optional[Text] = ..., cc_generic_services: Optional[bool] = ..., java_generic_services: Optional[bool] = ..., py_generic_services: Optional[bool] = ..., php_generic_services: Optional[bool] = ..., deprecated: Optional[bool] = ..., cc_enable_arenas: Optional[bool] = ..., objc_class_prefix: Optional[Text] = ..., csharp_namespace: Optional[Text] = ..., swift_prefix: Optional[Text] = ..., php_class_prefix: Optional[Text] = ..., php_namespace: Optional[Text] = ..., uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ..., ) -> None: ... class MessageOptions(Message): message_set_wire_format: bool no_standard_descriptor_accessor: bool deprecated: bool map_entry: bool @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__( self, message_set_wire_format: Optional[bool] = ..., no_standard_descriptor_accessor: Optional[bool] = ..., deprecated: Optional[bool] = ..., map_entry: Optional[bool] = ..., uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ..., ) -> None: ... class FieldOptions(Message): class CType(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> FieldOptions.CType: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[FieldOptions.CType]: ... @classmethod def items(cls) -> List[Tuple[bytes, FieldOptions.CType]]: ... STRING: FieldOptions.CType CORD: FieldOptions.CType STRING_PIECE: FieldOptions.CType class JSType(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> FieldOptions.JSType: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[FieldOptions.JSType]: ... @classmethod def items(cls) -> List[Tuple[bytes, FieldOptions.JSType]]: ... JS_NORMAL: FieldOptions.JSType JS_STRING: FieldOptions.JSType JS_NUMBER: FieldOptions.JSType ctype: FieldOptions.CType packed: bool jstype: FieldOptions.JSType lazy: bool deprecated: bool weak: bool @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__( self, ctype: Optional[FieldOptions.CType] = ..., packed: Optional[bool] = ..., jstype: Optional[FieldOptions.JSType] = ..., lazy: Optional[bool] = ..., deprecated: Optional[bool] = ..., weak: Optional[bool] = ..., uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ..., ) -> None: ... class OneofOptions(Message): @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__(self, uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ...) -> None: ... class EnumOptions(Message): allow_alias: bool deprecated: bool @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__( self, allow_alias: Optional[bool] = ..., deprecated: Optional[bool] = ..., uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ..., ) -> None: ... class EnumValueOptions(Message): deprecated: bool @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__( self, deprecated: Optional[bool] = ..., uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ... ) -> None: ... class ServiceOptions(Message): deprecated: bool @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__( self, deprecated: Optional[bool] = ..., uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ... ) -> None: ... class MethodOptions(Message): class IdempotencyLevel(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> MethodOptions.IdempotencyLevel: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[MethodOptions.IdempotencyLevel]: ... @classmethod def items(cls) -> List[Tuple[bytes, MethodOptions.IdempotencyLevel]]: ... IDEMPOTENCY_UNKNOWN: MethodOptions.IdempotencyLevel NO_SIDE_EFFECTS: MethodOptions.IdempotencyLevel IDEMPOTENT: MethodOptions.IdempotencyLevel deprecated: bool idempotency_level: MethodOptions.IdempotencyLevel @property def uninterpreted_option(self) -> RepeatedCompositeFieldContainer[UninterpretedOption]: ... def __init__( self, deprecated: Optional[bool] = ..., idempotency_level: Optional[MethodOptions.IdempotencyLevel] = ..., uninterpreted_option: Optional[Iterable[UninterpretedOption]] = ..., ) -> None: ... class UninterpretedOption(Message): class NamePart(Message): name_part: Text is_extension: bool def __init__(self, name_part: Text, is_extension: bool) -> None: ... identifier_value: Text positive_int_value: int negative_int_value: int double_value: float string_value: bytes aggregate_value: Text @property def name(self) -> RepeatedCompositeFieldContainer[UninterpretedOption.NamePart]: ... def __init__( self, name: Optional[Iterable[UninterpretedOption.NamePart]] = ..., identifier_value: Optional[Text] = ..., positive_int_value: Optional[int] = ..., negative_int_value: Optional[int] = ..., double_value: Optional[float] = ..., string_value: Optional[bytes] = ..., aggregate_value: Optional[Text] = ..., ) -> None: ... class SourceCodeInfo(Message): class Location(Message): path: RepeatedScalarFieldContainer[int] span: RepeatedScalarFieldContainer[int] leading_comments: Text trailing_comments: Text leading_detached_comments: RepeatedScalarFieldContainer[Text] def __init__( self, path: Optional[Iterable[int]] = ..., span: Optional[Iterable[int]] = ..., leading_comments: Optional[Text] = ..., trailing_comments: Optional[Text] = ..., leading_detached_comments: Optional[Iterable[Text]] = ..., ) -> None: ... @property def location(self) -> RepeatedCompositeFieldContainer[SourceCodeInfo.Location]: ... def __init__(self, location: Optional[Iterable[SourceCodeInfo.Location]] = ...) -> None: ... class GeneratedCodeInfo(Message): class Annotation(Message): path: RepeatedScalarFieldContainer[int] source_file: Text begin: int end: int def __init__( self, path: Optional[Iterable[int]] = ..., source_file: Optional[Text] = ..., begin: Optional[int] = ..., end: Optional[int] = ..., ) -> None: ... @property def annotation(self) -> RepeatedCompositeFieldContainer[GeneratedCodeInfo.Annotation]: ... def __init__(self, annotation: Optional[Iterable[GeneratedCodeInfo.Annotation]] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/descriptor_pool.pyi0000644€tŠÔÚ€2›s®0000000135013576752252034045 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class DescriptorPool: def __new__(cls, descriptor_db: Optional[Any] = ...): ... def __init__(self, descriptor_db: Optional[Any] = ...) -> None: ... def Add(self, file_desc_proto): ... def AddSerializedFile(self, serialized_file_desc_proto): ... def AddDescriptor(self, desc): ... def AddEnumDescriptor(self, enum_desc): ... def AddFileDescriptor(self, file_desc): ... def FindFileByName(self, file_name): ... def FindFileContainingSymbol(self, symbol): ... def FindMessageTypeByName(self, full_name): ... def FindEnumTypeByName(self, full_name): ... def FindFieldByName(self, full_name): ... def FindExtensionByName(self, full_name): ... def Default(): ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/duration_pb2.pyi0000644€tŠÔÚ€2›s®0000000046213576752252033231 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from google.protobuf.internal import well_known_types from typing import Optional class Duration(Message, well_known_types.Duration): seconds: int nanos: int def __init__(self, seconds: Optional[int] = ..., nanos: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/empty_pb2.pyi0000644€tŠÔÚ€2›s®0000000015013576752252032534 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message class Empty(Message): def __init__(self,) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/field_mask_pb2.pyi0000644€tŠÔÚ€2›s®0000000061413576752252033501 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedScalarFieldContainer from google.protobuf.internal import well_known_types from google.protobuf.message import Message from typing import Iterable, Optional, Text class FieldMask(Message, well_known_types.FieldMask): paths: RepeatedScalarFieldContainer[Text] def __init__(self, paths: Optional[Iterable[Text]] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/0000755€tŠÔÚ€2›s®0000000000013576752267031736 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252034200 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/containers.pyi0000644€tŠÔÚ€2›s®0000000520113576752252034616 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.descriptor import Descriptor from google.protobuf.internal.message_listener import MessageListener from google.protobuf.message import Message from typing import Sequence, TypeVar, Generic, Any, Iterator, Iterable, Union, Optional, Callable, overload, List _T = TypeVar("_T") class BaseContainer(Sequence[_T]): def __init__(self, message_listener: MessageListener) -> None: ... def __len__(self) -> int: ... def __ne__(self, other: object) -> bool: ... def __hash__(self) -> int: ... def __repr__(self) -> str: ... def sort(self, *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> None: ... @overload def __getitem__(self, key: int) -> _T: ... @overload def __getitem__(self, key: slice) -> List[_T]: ... class RepeatedScalarFieldContainer(BaseContainer[_T]): def __init__(self, message_listener: MessageListener, message_descriptor: Descriptor) -> None: ... def append(self, value: _T) -> None: ... def insert(self, key: int, value: _T) -> None: ... def extend(self, elem_seq: Optional[Iterable[_T]]) -> None: ... def MergeFrom(self, other: RepeatedScalarFieldContainer[_T]) -> None: ... def remove(self, elem: _T) -> None: ... def pop(self, key: int = ...) -> _T: ... @overload def __setitem__(self, key: int, value: _T) -> None: ... @overload def __setitem__(self, key: slice, value: Iterable[_T]) -> None: ... def __getslice__(self, start: int, stop: int) -> List[_T]: ... def __setslice__(self, start: int, stop: int, values: Iterable[_T]) -> None: ... def __delitem__(self, key: Union[int, slice]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __eq__(self, other: object) -> bool: ... class RepeatedCompositeFieldContainer(BaseContainer[_T]): def __init__(self, message_listener: MessageListener, type_checker: Any) -> None: ... def add(self, **kwargs: Any) -> _T: ... def append(self, value: _T) -> None: ... def insert(self, key: int, value: _T) -> None: ... def extend(self, elem_seq: Iterable[_T]) -> None: ... def MergeFrom(self, other: RepeatedCompositeFieldContainer[_T]) -> None: ... def remove(self, elem: _T) -> None: ... def pop(self, key: int = ...) -> _T: ... def __getslice__(self, start: int, stop: int) -> List[_T]: ... def __delitem__(self, key: Union[int, slice]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __eq__(self, other: object) -> bool: ... # Classes not yet typed class Mapping(Any): ... class MutableMapping(Mapping): ... class ScalarMap(MutableMapping): ... class MessageMap(MutableMapping): ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/decoder.pyi0000644€tŠÔÚ€2›s®0000000153413576752252034063 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def ReadTag(buffer, pos): ... def EnumDecoder(field_number, is_repeated, is_packed, key, new_default): ... Int32Decoder: Any Int64Decoder: Any UInt32Decoder: Any UInt64Decoder: Any SInt32Decoder: Any SInt64Decoder: Any Fixed32Decoder: Any Fixed64Decoder: Any SFixed32Decoder: Any SFixed64Decoder: Any FloatDecoder: Any DoubleDecoder: Any BoolDecoder: Any def StringDecoder(field_number, is_repeated, is_packed, key, new_default): ... def BytesDecoder(field_number, is_repeated, is_packed, key, new_default): ... def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): ... def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): ... MESSAGE_SET_ITEM_TAG: Any def MessageSetItemDecoder(extensions_by_number): ... def MapDecoder(field_descriptor, new_default, is_message_map): ... SkipField: Any mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/encoder.pyi0000644€tŠÔÚ€2›s®0000000202513576752252034071 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any Int32Sizer: Any UInt32Sizer: Any SInt32Sizer: Any Fixed32Sizer: Any Fixed64Sizer: Any BoolSizer: Any def StringSizer(field_number, is_repeated, is_packed): ... def BytesSizer(field_number, is_repeated, is_packed): ... def GroupSizer(field_number, is_repeated, is_packed): ... def MessageSizer(field_number, is_repeated, is_packed): ... def MessageSetItemSizer(field_number): ... def MapSizer(field_descriptor): ... def TagBytes(field_number, wire_type): ... Int32Encoder: Any UInt32Encoder: Any SInt32Encoder: Any Fixed32Encoder: Any Fixed64Encoder: Any SFixed32Encoder: Any SFixed64Encoder: Any FloatEncoder: Any DoubleEncoder: Any def BoolEncoder(field_number, is_repeated, is_packed): ... def StringEncoder(field_number, is_repeated, is_packed): ... def BytesEncoder(field_number, is_repeated, is_packed): ... def GroupEncoder(field_number, is_repeated, is_packed): ... def MessageEncoder(field_number, is_repeated, is_packed): ... def MessageSetItemEncoder(field_number): ... def MapEncoder(field_descriptor): ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/enum_type_wrapper.pyi0000644€tŠÔÚ€2›s®0000000054513576752252036224 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Tuple class EnumTypeWrapper(object): def __init__(self, enum_type: Any) -> None: ... def Name(self, number: int) -> bytes: ... def Value(self, name: bytes) -> int: ... def keys(self) -> List[bytes]: ... def values(self) -> List[int]: ... @classmethod def items(cls) -> List[Tuple[bytes, int]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/message_listener.pyi0000644€tŠÔÚ€2›s®0000000022413576752252036002 0ustar jukkaDROPBOX\Domain Users00000000000000class MessageListener(object): def Modified(self) -> None: ... class NullMessageListener(MessageListener): def Modified(self) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/well_known_types.pyi0000644€tŠÔÚ€2›s®0000000716013576752252036062 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from datetime import datetime class Error(Exception): ... class ParseError(Error): ... # This is named 'Any' in the original, but that conflicts with typing.Any, # and we really only need this file to mix in. class Any_: type_url: Any = ... value: Any = ... def Pack(self, msg: Any, type_url_prefix: bytes = ..., deterministic: Optional[Any] = ...) -> None: ... def Unpack(self, msg: Any): ... def TypeName(self): ... def Is(self, descriptor: Any): ... class Timestamp: def ToJsonString(self) -> str: ... seconds: Any = ... nanos: Any = ... def FromJsonString(self, value: Any) -> None: ... def GetCurrentTime(self) -> None: ... def ToNanoseconds(self): ... def ToMicroseconds(self): ... def ToMilliseconds(self): ... def ToSeconds(self): ... def FromNanoseconds(self, nanos: Any) -> None: ... def FromMicroseconds(self, micros: Any) -> None: ... def FromMilliseconds(self, millis: Any) -> None: ... def FromSeconds(self, seconds: Any) -> None: ... def ToDatetime(self) -> datetime: ... def FromDatetime(self, dt: datetime) -> None: ... class Duration: def ToJsonString(self) -> str: ... seconds: Any = ... nanos: Any = ... def FromJsonString(self, value: Any) -> None: ... def ToNanoseconds(self): ... def ToMicroseconds(self): ... def ToMilliseconds(self): ... def ToSeconds(self): ... def FromNanoseconds(self, nanos: Any) -> None: ... def FromMicroseconds(self, micros: Any) -> None: ... def FromMilliseconds(self, millis: Any) -> None: ... def FromSeconds(self, seconds: Any) -> None: ... def ToTimedelta(self): ... def FromTimedelta(self, td: Any) -> None: ... class FieldMask: def ToJsonString(self) -> str: ... def FromJsonString(self, value: Any) -> None: ... def IsValidForDescriptor(self, message_descriptor: Any): ... def AllFieldsFromDescriptor(self, message_descriptor: Any) -> None: ... def CanonicalFormFromMask(self, mask: Any) -> None: ... def Union(self, mask1: Any, mask2: Any) -> None: ... def Intersect(self, mask1: Any, mask2: Any) -> None: ... def MergeMessage( self, source: Any, destination: Any, replace_message_field: bool = ..., replace_repeated_field: bool = ... ) -> None: ... class _FieldMaskTree: def __init__(self, field_mask: Optional[Any] = ...) -> None: ... def MergeFromFieldMask(self, field_mask: Any) -> None: ... def AddPath(self, path: Any): ... def ToFieldMask(self, field_mask: Any) -> None: ... def IntersectPath(self, path: Any, intersection: Any): ... def AddLeafNodes(self, prefix: Any, node: Any) -> None: ... def MergeMessage(self, source: Any, destination: Any, replace_message: Any, replace_repeated: Any) -> None: ... class Struct: def __getitem__(self, key: Any): ... def __contains__(self, item: Any): ... def __setitem__(self, key: Any, value: Any) -> None: ... def __delitem__(self, key: Any) -> None: ... def __len__(self): ... def __iter__(self): ... def keys(self): ... def values(self): ... def items(self): ... def get_or_create_list(self, key: Any): ... def get_or_create_struct(self, key: Any): ... def update(self, dictionary: Any) -> None: ... class ListValue: def __len__(self): ... def append(self, value: Any) -> None: ... def extend(self, elem_seq: Any) -> None: ... def __getitem__(self, index: Any): ... def __setitem__(self, index: Any, value: Any) -> None: ... def __delitem__(self, key: Any) -> None: ... def items(self) -> None: ... def add_struct(self): ... def add_list(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/internal/wire_format.pyi0000644€tŠÔÚ€2›s®0000000302213576752252034766 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any TAG_TYPE_BITS: Any TAG_TYPE_MASK: Any WIRETYPE_VARINT: Any WIRETYPE_FIXED64: Any WIRETYPE_LENGTH_DELIMITED: Any WIRETYPE_START_GROUP: Any WIRETYPE_END_GROUP: Any WIRETYPE_FIXED32: Any INT32_MAX: Any INT32_MIN: Any UINT32_MAX: Any INT64_MAX: Any INT64_MIN: Any UINT64_MAX: Any FORMAT_UINT32_LITTLE_ENDIAN: Any FORMAT_UINT64_LITTLE_ENDIAN: Any FORMAT_FLOAT_LITTLE_ENDIAN: Any FORMAT_DOUBLE_LITTLE_ENDIAN: Any def PackTag(field_number, wire_type): ... def UnpackTag(tag): ... def ZigZagEncode(value): ... def ZigZagDecode(value): ... def Int32ByteSize(field_number, int32): ... def Int32ByteSizeNoTag(int32): ... def Int64ByteSize(field_number, int64): ... def UInt32ByteSize(field_number, uint32): ... def UInt64ByteSize(field_number, uint64): ... def SInt32ByteSize(field_number, int32): ... def SInt64ByteSize(field_number, int64): ... def Fixed32ByteSize(field_number, fixed32): ... def Fixed64ByteSize(field_number, fixed64): ... def SFixed32ByteSize(field_number, sfixed32): ... def SFixed64ByteSize(field_number, sfixed64): ... def FloatByteSize(field_number, flt): ... def DoubleByteSize(field_number, double): ... def BoolByteSize(field_number, b): ... def EnumByteSize(field_number, enum): ... def StringByteSize(field_number, string): ... def BytesByteSize(field_number, b): ... def GroupByteSize(field_number, message): ... def MessageByteSize(field_number, message): ... def MessageSetItemByteSize(field_number, msg): ... def TagByteSize(field_number): ... NON_PACKABLE_TYPES: Any def IsTypePackable(field_type): ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/json_format.pyi0000644€tŠÔÚ€2›s®0000000162113576752252033160 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Dict, Text, TypeVar, Union from google.protobuf.message import Message _MessageVar = TypeVar("_MessageVar", bound=Message) class Error(Exception): ... class ParseError(Error): ... class SerializeToJsonError(Error): ... def MessageToJson( message: Message, including_default_value_fields: bool = ..., preserving_proto_field_name: bool = ..., indent: int = ..., sort_keys: bool = ..., use_integers_for_enums: bool = ..., ) -> str: ... def MessageToDict( message: Message, including_default_value_fields: bool = ..., preserving_proto_field_name: bool = ..., use_integers_for_enums: bool = ..., ) -> Dict[Text, Any]: ... def Parse(text: Union[bytes, Text], message: _MessageVar, ignore_unknown_fields: bool = ...) -> _MessageVar: ... def ParseDict(js_dict: Any, message: _MessageVar, ignore_unknown_fields: bool = ...) -> _MessageVar: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/map_proto2_unittest_pb2.pyi0000644€tŠÔÚ€2›s®0000001777213576752252035441 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from google.protobuf.unittest_import_pb2 import ImportEnumForMap from typing import List, Mapping, MutableMapping, Optional, Text, Tuple, cast class Proto2MapEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> Proto2MapEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[Proto2MapEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, Proto2MapEnum]]: ... PROTO2_MAP_ENUM_FOO: Proto2MapEnum PROTO2_MAP_ENUM_BAR: Proto2MapEnum PROTO2_MAP_ENUM_BAZ: Proto2MapEnum class Proto2MapEnumPlusExtra(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> Proto2MapEnumPlusExtra: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[Proto2MapEnumPlusExtra]: ... @classmethod def items(cls) -> List[Tuple[bytes, Proto2MapEnumPlusExtra]]: ... E_PROTO2_MAP_ENUM_FOO: Proto2MapEnumPlusExtra E_PROTO2_MAP_ENUM_BAR: Proto2MapEnumPlusExtra E_PROTO2_MAP_ENUM_BAZ: Proto2MapEnumPlusExtra E_PROTO2_MAP_ENUM_EXTRA: Proto2MapEnumPlusExtra class TestEnumMap(Message): class KnownMapFieldEntry(Message): key: int value: Proto2MapEnum def __init__(self, key: Optional[int] = ..., value: Optional[Proto2MapEnum] = ...) -> None: ... class UnknownMapFieldEntry(Message): key: int value: Proto2MapEnum def __init__(self, key: Optional[int] = ..., value: Optional[Proto2MapEnum] = ...) -> None: ... @property def known_map_field(self) -> MutableMapping[int, Proto2MapEnum]: ... @property def unknown_map_field(self) -> MutableMapping[int, Proto2MapEnum]: ... def __init__( self, known_map_field: Optional[Mapping[int, Proto2MapEnum]] = ..., unknown_map_field: Optional[Mapping[int, Proto2MapEnum]] = ..., ) -> None: ... class TestEnumMapPlusExtra(Message): class KnownMapFieldEntry(Message): key: int value: Proto2MapEnumPlusExtra def __init__(self, key: Optional[int] = ..., value: Optional[Proto2MapEnumPlusExtra] = ...) -> None: ... class UnknownMapFieldEntry(Message): key: int value: Proto2MapEnumPlusExtra def __init__(self, key: Optional[int] = ..., value: Optional[Proto2MapEnumPlusExtra] = ...) -> None: ... @property def known_map_field(self) -> MutableMapping[int, Proto2MapEnumPlusExtra]: ... @property def unknown_map_field(self) -> MutableMapping[int, Proto2MapEnumPlusExtra]: ... def __init__( self, known_map_field: Optional[Mapping[int, Proto2MapEnumPlusExtra]] = ..., unknown_map_field: Optional[Mapping[int, Proto2MapEnumPlusExtra]] = ..., ) -> None: ... class TestImportEnumMap(Message): class ImportEnumAmpEntry(Message): key: int value: ImportEnumForMap def __init__(self, key: Optional[int] = ..., value: Optional[ImportEnumForMap] = ...) -> None: ... @property def import_enum_amp(self) -> MutableMapping[int, ImportEnumForMap]: ... def __init__(self, import_enum_amp: Optional[Mapping[int, ImportEnumForMap]] = ...) -> None: ... class TestIntIntMap(Message): class MEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... @property def m(self) -> MutableMapping[int, int]: ... def __init__(self, m: Optional[Mapping[int, int]] = ...) -> None: ... class TestMaps(Message): class MInt32Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MInt64Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MUint32Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MUint64Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MSint32Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MSint64Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MFixed32Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MFixed64Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MSfixed32Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MSfixed64Entry(Message): key: int @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MBoolEntry(Message): key: bool @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[bool] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... class MStringEntry(Message): key: Text @property def value(self) -> TestIntIntMap: ... def __init__(self, key: Optional[Text] = ..., value: Optional[TestIntIntMap] = ...) -> None: ... @property def m_int32(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_int64(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_uint32(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_uint64(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_sint32(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_sint64(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_fixed32(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_fixed64(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_sfixed32(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_sfixed64(self) -> MutableMapping[int, TestIntIntMap]: ... @property def m_bool(self) -> MutableMapping[bool, TestIntIntMap]: ... @property def m_string(self) -> MutableMapping[Text, TestIntIntMap]: ... def __init__( self, m_int32: Optional[Mapping[int, TestIntIntMap]] = ..., m_int64: Optional[Mapping[int, TestIntIntMap]] = ..., m_uint32: Optional[Mapping[int, TestIntIntMap]] = ..., m_uint64: Optional[Mapping[int, TestIntIntMap]] = ..., m_sint32: Optional[Mapping[int, TestIntIntMap]] = ..., m_sint64: Optional[Mapping[int, TestIntIntMap]] = ..., m_fixed32: Optional[Mapping[int, TestIntIntMap]] = ..., m_fixed64: Optional[Mapping[int, TestIntIntMap]] = ..., m_sfixed32: Optional[Mapping[int, TestIntIntMap]] = ..., m_sfixed64: Optional[Mapping[int, TestIntIntMap]] = ..., m_bool: Optional[Mapping[bool, TestIntIntMap]] = ..., m_string: Optional[Mapping[Text, TestIntIntMap]] = ..., ) -> None: ... class TestSubmessageMaps(Message): @property def m(self) -> TestMaps: ... def __init__(self, m: Optional[TestMaps] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/map_unittest_pb2.pyi0000644€tŠÔÚ€2›s®0000004012113576752252034114 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from google.protobuf.unittest_no_arena_pb2 import ForeignMessage from google.protobuf.unittest_pb2 import ForeignMessage as ForeignMessage1, TestAllTypes, TestRequired from typing import List, Mapping, MutableMapping, Optional, Text, Tuple, cast class MapEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> MapEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[MapEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, MapEnum]]: ... MAP_ENUM_FOO: MapEnum MAP_ENUM_BAR: MapEnum MAP_ENUM_BAZ: MapEnum class TestMap(Message): class MapInt32Int32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapInt64Int64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapUint32Uint32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapUint64Uint64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSint32Sint32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSint64Sint64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapFixed32Fixed32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapFixed64Fixed64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSfixed32Sfixed32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSfixed64Sfixed64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapInt32FloatEntry(Message): key: int value: float def __init__(self, key: Optional[int] = ..., value: Optional[float] = ...) -> None: ... class MapInt32DoubleEntry(Message): key: int value: float def __init__(self, key: Optional[int] = ..., value: Optional[float] = ...) -> None: ... class MapBoolBoolEntry(Message): key: bool value: bool def __init__(self, key: Optional[bool] = ..., value: Optional[bool] = ...) -> None: ... class MapStringStringEntry(Message): key: Text value: Text def __init__(self, key: Optional[Text] = ..., value: Optional[Text] = ...) -> None: ... class MapInt32BytesEntry(Message): key: int value: bytes def __init__(self, key: Optional[int] = ..., value: Optional[bytes] = ...) -> None: ... class MapInt32EnumEntry(Message): key: int value: MapEnum def __init__(self, key: Optional[int] = ..., value: Optional[MapEnum] = ...) -> None: ... class MapInt32ForeignMessageEntry(Message): key: int @property def value(self) -> ForeignMessage1: ... def __init__(self, key: Optional[int] = ..., value: Optional[ForeignMessage1] = ...) -> None: ... class MapStringForeignMessageEntry(Message): key: Text @property def value(self) -> ForeignMessage1: ... def __init__(self, key: Optional[Text] = ..., value: Optional[ForeignMessage1] = ...) -> None: ... class MapInt32AllTypesEntry(Message): key: int @property def value(self) -> TestAllTypes: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestAllTypes] = ...) -> None: ... @property def map_int32_int32(self) -> MutableMapping[int, int]: ... @property def map_int64_int64(self) -> MutableMapping[int, int]: ... @property def map_uint32_uint32(self) -> MutableMapping[int, int]: ... @property def map_uint64_uint64(self) -> MutableMapping[int, int]: ... @property def map_sint32_sint32(self) -> MutableMapping[int, int]: ... @property def map_sint64_sint64(self) -> MutableMapping[int, int]: ... @property def map_fixed32_fixed32(self) -> MutableMapping[int, int]: ... @property def map_fixed64_fixed64(self) -> MutableMapping[int, int]: ... @property def map_sfixed32_sfixed32(self) -> MutableMapping[int, int]: ... @property def map_sfixed64_sfixed64(self) -> MutableMapping[int, int]: ... @property def map_int32_float(self) -> MutableMapping[int, float]: ... @property def map_int32_double(self) -> MutableMapping[int, float]: ... @property def map_bool_bool(self) -> MutableMapping[bool, bool]: ... @property def map_string_string(self) -> MutableMapping[Text, Text]: ... @property def map_int32_bytes(self) -> MutableMapping[int, bytes]: ... @property def map_int32_enum(self) -> MutableMapping[int, MapEnum]: ... @property def map_int32_foreign_message(self) -> MutableMapping[int, ForeignMessage1]: ... @property def map_string_foreign_message(self) -> MutableMapping[Text, ForeignMessage1]: ... @property def map_int32_all_types(self) -> MutableMapping[int, TestAllTypes]: ... def __init__( self, map_int32_int32: Optional[Mapping[int, int]] = ..., map_int64_int64: Optional[Mapping[int, int]] = ..., map_uint32_uint32: Optional[Mapping[int, int]] = ..., map_uint64_uint64: Optional[Mapping[int, int]] = ..., map_sint32_sint32: Optional[Mapping[int, int]] = ..., map_sint64_sint64: Optional[Mapping[int, int]] = ..., map_fixed32_fixed32: Optional[Mapping[int, int]] = ..., map_fixed64_fixed64: Optional[Mapping[int, int]] = ..., map_sfixed32_sfixed32: Optional[Mapping[int, int]] = ..., map_sfixed64_sfixed64: Optional[Mapping[int, int]] = ..., map_int32_float: Optional[Mapping[int, float]] = ..., map_int32_double: Optional[Mapping[int, float]] = ..., map_bool_bool: Optional[Mapping[bool, bool]] = ..., map_string_string: Optional[Mapping[Text, Text]] = ..., map_int32_bytes: Optional[Mapping[int, bytes]] = ..., map_int32_enum: Optional[Mapping[int, MapEnum]] = ..., map_int32_foreign_message: Optional[Mapping[int, ForeignMessage1]] = ..., map_string_foreign_message: Optional[Mapping[Text, ForeignMessage1]] = ..., map_int32_all_types: Optional[Mapping[int, TestAllTypes]] = ..., ) -> None: ... class TestMapSubmessage(Message): @property def test_map(self) -> TestMap: ... def __init__(self, test_map: Optional[TestMap] = ...) -> None: ... class TestMessageMap(Message): class MapInt32MessageEntry(Message): key: int @property def value(self) -> TestAllTypes: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestAllTypes] = ...) -> None: ... @property def map_int32_message(self) -> MutableMapping[int, TestAllTypes]: ... def __init__(self, map_int32_message: Optional[Mapping[int, TestAllTypes]] = ...) -> None: ... class TestSameTypeMap(Message): class Map1Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class Map2Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... @property def map1(self) -> MutableMapping[int, int]: ... @property def map2(self) -> MutableMapping[int, int]: ... def __init__(self, map1: Optional[Mapping[int, int]] = ..., map2: Optional[Mapping[int, int]] = ...) -> None: ... class TestRequiredMessageMap(Message): class MapFieldEntry(Message): key: int @property def value(self) -> TestRequired: ... def __init__(self, key: Optional[int] = ..., value: Optional[TestRequired] = ...) -> None: ... @property def map_field(self) -> MutableMapping[int, TestRequired]: ... def __init__(self, map_field: Optional[Mapping[int, TestRequired]] = ...) -> None: ... class TestArenaMap(Message): class MapInt32Int32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapInt64Int64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapUint32Uint32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapUint64Uint64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSint32Sint32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSint64Sint64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapFixed32Fixed32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapFixed64Fixed64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSfixed32Sfixed32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSfixed64Sfixed64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapInt32FloatEntry(Message): key: int value: float def __init__(self, key: Optional[int] = ..., value: Optional[float] = ...) -> None: ... class MapInt32DoubleEntry(Message): key: int value: float def __init__(self, key: Optional[int] = ..., value: Optional[float] = ...) -> None: ... class MapBoolBoolEntry(Message): key: bool value: bool def __init__(self, key: Optional[bool] = ..., value: Optional[bool] = ...) -> None: ... class MapStringStringEntry(Message): key: Text value: Text def __init__(self, key: Optional[Text] = ..., value: Optional[Text] = ...) -> None: ... class MapInt32BytesEntry(Message): key: int value: bytes def __init__(self, key: Optional[int] = ..., value: Optional[bytes] = ...) -> None: ... class MapInt32EnumEntry(Message): key: int value: MapEnum def __init__(self, key: Optional[int] = ..., value: Optional[MapEnum] = ...) -> None: ... class MapInt32ForeignMessageEntry(Message): key: int @property def value(self) -> ForeignMessage1: ... def __init__(self, key: Optional[int] = ..., value: Optional[ForeignMessage1] = ...) -> None: ... class MapInt32ForeignMessageNoArenaEntry(Message): key: int @property def value(self) -> ForeignMessage: ... def __init__(self, key: Optional[int] = ..., value: Optional[ForeignMessage] = ...) -> None: ... @property def map_int32_int32(self) -> MutableMapping[int, int]: ... @property def map_int64_int64(self) -> MutableMapping[int, int]: ... @property def map_uint32_uint32(self) -> MutableMapping[int, int]: ... @property def map_uint64_uint64(self) -> MutableMapping[int, int]: ... @property def map_sint32_sint32(self) -> MutableMapping[int, int]: ... @property def map_sint64_sint64(self) -> MutableMapping[int, int]: ... @property def map_fixed32_fixed32(self) -> MutableMapping[int, int]: ... @property def map_fixed64_fixed64(self) -> MutableMapping[int, int]: ... @property def map_sfixed32_sfixed32(self) -> MutableMapping[int, int]: ... @property def map_sfixed64_sfixed64(self) -> MutableMapping[int, int]: ... @property def map_int32_float(self) -> MutableMapping[int, float]: ... @property def map_int32_double(self) -> MutableMapping[int, float]: ... @property def map_bool_bool(self) -> MutableMapping[bool, bool]: ... @property def map_string_string(self) -> MutableMapping[Text, Text]: ... @property def map_int32_bytes(self) -> MutableMapping[int, bytes]: ... @property def map_int32_enum(self) -> MutableMapping[int, MapEnum]: ... @property def map_int32_foreign_message(self) -> MutableMapping[int, ForeignMessage1]: ... @property def map_int32_foreign_message_no_arena(self) -> MutableMapping[int, ForeignMessage]: ... def __init__( self, map_int32_int32: Optional[Mapping[int, int]] = ..., map_int64_int64: Optional[Mapping[int, int]] = ..., map_uint32_uint32: Optional[Mapping[int, int]] = ..., map_uint64_uint64: Optional[Mapping[int, int]] = ..., map_sint32_sint32: Optional[Mapping[int, int]] = ..., map_sint64_sint64: Optional[Mapping[int, int]] = ..., map_fixed32_fixed32: Optional[Mapping[int, int]] = ..., map_fixed64_fixed64: Optional[Mapping[int, int]] = ..., map_sfixed32_sfixed32: Optional[Mapping[int, int]] = ..., map_sfixed64_sfixed64: Optional[Mapping[int, int]] = ..., map_int32_float: Optional[Mapping[int, float]] = ..., map_int32_double: Optional[Mapping[int, float]] = ..., map_bool_bool: Optional[Mapping[bool, bool]] = ..., map_string_string: Optional[Mapping[Text, Text]] = ..., map_int32_bytes: Optional[Mapping[int, bytes]] = ..., map_int32_enum: Optional[Mapping[int, MapEnum]] = ..., map_int32_foreign_message: Optional[Mapping[int, ForeignMessage1]] = ..., map_int32_foreign_message_no_arena: Optional[Mapping[int, ForeignMessage]] = ..., ) -> None: ... class MessageContainingEnumCalledType(Message): class Type(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> MessageContainingEnumCalledType.Type: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[MessageContainingEnumCalledType.Type]: ... @classmethod def items(cls) -> List[Tuple[bytes, MessageContainingEnumCalledType.Type]]: ... TYPE_FOO: MessageContainingEnumCalledType.Type class TypeEntry(Message): key: Text @property def value(self) -> MessageContainingEnumCalledType: ... def __init__(self, key: Optional[Text] = ..., value: Optional[MessageContainingEnumCalledType] = ...) -> None: ... @property def type(self) -> MutableMapping[Text, MessageContainingEnumCalledType]: ... def __init__(self, type: Optional[Mapping[Text, MessageContainingEnumCalledType]] = ...) -> None: ... class MessageContainingMapCalledEntry(Message): class EntryEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... @property def entry(self) -> MutableMapping[int, int]: ... def __init__(self, entry: Optional[Mapping[int, int]] = ...) -> None: ... class TestRecursiveMapMessage(Message): class AEntry(Message): key: Text @property def value(self) -> TestRecursiveMapMessage: ... def __init__(self, key: Optional[Text] = ..., value: Optional[TestRecursiveMapMessage] = ...) -> None: ... @property def a(self) -> MutableMapping[Text, TestRecursiveMapMessage]: ... def __init__(self, a: Optional[Mapping[Text, TestRecursiveMapMessage]] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/message.pyi0000644€tŠÔÚ€2›s®0000000404613576752252032267 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Sequence, Optional, Tuple, Type, TypeVar, Union from .descriptor import DescriptorBase, FieldDescriptor class Error(Exception): ... class DecodeError(Error): ... class EncodeError(Error): ... class _ExtensionDict: def __getitem__(self, extension_handle: DescriptorBase) -> Any: ... def __setitem__(self, extension_handle: DescriptorBase, value: Any) -> None: ... _T = TypeVar("_T") if sys.version_info < (3,): _Serialized = Union[bytes, buffer, unicode] else: _Serialized = bytes class Message: DESCRIPTOR: Any def __deepcopy__(self, memo=...): ... def __eq__(self, other_msg): ... def __ne__(self, other_msg): ... def MergeFrom(self, other_msg: Message) -> None: ... def CopyFrom(self, other_msg: Message) -> None: ... def Clear(self) -> None: ... def SetInParent(self) -> None: ... def IsInitialized(self) -> bool: ... def MergeFromString(self, serialized: _Serialized) -> int: ... def ParseFromString(self, serialized: _Serialized) -> None: ... def SerializeToString(self, deterministic: bool = ...) -> bytes: ... def SerializePartialToString(self, deterministic: bool = ...) -> bytes: ... def ListFields(self) -> Sequence[Tuple[FieldDescriptor, Any]]: ... def HasExtension(self, extension_handle): ... def ClearExtension(self, extension_handle): ... def ByteSize(self) -> int: ... @classmethod def FromString(cls: Type[_T], s: _Serialized) -> _T: ... @property def Extensions(self) -> _ExtensionDict: ... # Intentionally left out typing on these three methods, because they are # stringly typed and it is not useful to call them on a Message directly. # We prefer more specific typing on individual subclasses of Message # See https://github.com/dropbox/mypy-protobuf/issues/62 for details def HasField(self, field_name: Any) -> bool: ... def ClearField(self, field_name: Any) -> None: ... def WhichOneof(self, oneof_group: Any) -> Any: ... # TODO: check kwargs def __init__(self, **kwargs) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/message_factory.pyi0000644€tŠÔÚ€2›s®0000000100313576752252034004 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterable, Optional, Type from .message import Message from .descriptor import Descriptor from .descriptor_pool import DescriptorPool class MessageFactory: pool: Any def __init__(self, pool: Optional[DescriptorPool] = ...) -> None: ... def GetPrototype(self, descriptor: Descriptor) -> Type[Message]: ... def GetMessages(self, files: Iterable[bytes]) -> Dict[bytes, Type[Message]]: ... def GetMessages(file_protos: Iterable[bytes]) -> Dict[bytes, Type[Message]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/reflection.pyi0000644€tŠÔÚ€2›s®0000000034613576752252032774 0ustar jukkaDROPBOX\Domain Users00000000000000class GeneratedProtocolMessageType(type): def __new__(cls, name, bases, dictionary): ... def __init__(self, name, bases, dictionary) -> None: ... def ParseMessage(descriptor, byte_str): ... def MakeClass(descriptor): ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/service.pyi0000644€tŠÔÚ€2›s®0000000253313576752252032302 0ustar jukkaDROPBOX\Domain Users00000000000000from concurrent.futures import Future from typing import Callable, Optional, Text, Type from google.protobuf.descriptor import MethodDescriptor, ServiceDescriptor from google.protobuf.message import Message class RpcException(Exception): ... class Service: @staticmethod def GetDescriptor() -> ServiceDescriptor: ... def CallMethod( self, method_descriptor: MethodDescriptor, rpc_controller: RpcController, request: Message, done: Optional[Callable[[Message], None]], ) -> Optional[Future[Message]]: ... def GetRequestClass(self, method_descriptor: MethodDescriptor) -> Type[Message]: ... def GetResponseClass(self, method_descriptor: MethodDescriptor) -> Type[Message]: ... class RpcController: def Reset(self) -> None: ... def Failed(self) -> bool: ... def ErrorText(self) -> Optional[Text]: ... def StartCancel(self) -> None: ... def SetFailed(self, reason: Text) -> None: ... def IsCanceled(self) -> bool: ... def NotifyOnCancel(self, callback: Callable[[], None]) -> None: ... class RpcChannel: def CallMethod( self, method_descriptor: MethodDescriptor, rpc_controller: RpcController, request: Message, response_class: Type[Message], done: Optional[Callable[[Message], None]], ) -> Optional[Future[Message]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/source_context_pb2.pyi0000644€tŠÔÚ€2›s®0000000030613576752252034445 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from typing import Optional, Text class SourceContext(Message): file_name: Text def __init__(self, file_name: Optional[Text] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/struct_pb2.pyi0000644€tŠÔÚ€2›s®0000000343413576752252032732 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer from google.protobuf.internal import well_known_types from google.protobuf.message import Message from typing import Iterable, List, Mapping, MutableMapping, Optional, Text, Tuple, cast class NullValue(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> NullValue: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[NullValue]: ... @classmethod def items(cls) -> List[Tuple[bytes, NullValue]]: ... NULL_VALUE: NullValue class Struct(Message, well_known_types.Struct): class FieldsEntry(Message): key: Text @property def value(self) -> Value: ... def __init__(self, key: Optional[Text] = ..., value: Optional[Value] = ...) -> None: ... @property def fields(self) -> MutableMapping[Text, Value]: ... def __init__(self, fields: Optional[Mapping[Text, Value]] = ...) -> None: ... class _Value(Message): null_value: NullValue number_value: float string_value: Text bool_value: bool @property def struct_value(self) -> Struct: ... @property def list_value(self) -> ListValue: ... def __init__( self, null_value: Optional[NullValue] = ..., number_value: Optional[float] = ..., string_value: Optional[Text] = ..., bool_value: Optional[bool] = ..., struct_value: Optional[Struct] = ..., list_value: Optional[ListValue] = ..., ) -> None: ... Value = _Value class ListValue(Message, well_known_types.ListValue): @property def values(self) -> RepeatedCompositeFieldContainer[Value]: ... def __init__(self, values: Optional[Iterable[Value]] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/symbol_database.pyi0000644€tŠÔÚ€2›s®0000000121013576752252033762 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Iterable, Type from .descriptor import EnumDescriptor, FileDescriptor from .message import Message from .message_factory import MessageFactory class SymbolDatabase(MessageFactory): def RegisterMessage(self, message: Type[Message]) -> Type[Message]: ... def RegisterEnumDescriptor(self, enum_descriptor: Type[EnumDescriptor]) -> EnumDescriptor: ... def RegisterFileDescriptor(self, file_descriptor: Type[FileDescriptor]) -> FileDescriptor: ... def GetSymbol(self, symbol: bytes) -> Type[Message]: ... def GetMessages(self, files: Iterable[bytes]) -> Dict[bytes, Type[Message]]: ... def Default(): ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/test_messages_proto2_pb2.pyi0000644€tŠÔÚ€2›s®0000003606513576752252035567 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message import builtins from typing import Iterable, List, Mapping, MutableMapping, Optional, Text, Tuple, cast class ForeignEnumProto2(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> ForeignEnumProto2: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[ForeignEnumProto2]: ... @classmethod def items(cls) -> List[Tuple[bytes, ForeignEnumProto2]]: ... FOREIGN_FOO: ForeignEnumProto2 FOREIGN_BAR: ForeignEnumProto2 FOREIGN_BAZ: ForeignEnumProto2 class TestAllTypesProto2(Message): class NestedEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestAllTypesProto2.NestedEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestAllTypesProto2.NestedEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestAllTypesProto2.NestedEnum]]: ... FOO: TestAllTypesProto2.NestedEnum BAR: TestAllTypesProto2.NestedEnum BAZ: TestAllTypesProto2.NestedEnum NEG: TestAllTypesProto2.NestedEnum class NestedMessage(Message): a: int @property def corecursive(self) -> TestAllTypesProto2: ... def __init__(self, a: Optional[int] = ..., corecursive: Optional[TestAllTypesProto2] = ...) -> None: ... class MapInt32Int32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapInt64Int64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapUint32Uint32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapUint64Uint64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSint32Sint32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSint64Sint64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapFixed32Fixed32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapFixed64Fixed64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSfixed32Sfixed32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSfixed64Sfixed64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapInt32FloatEntry(Message): key: int value: float def __init__(self, key: Optional[int] = ..., value: Optional[float] = ...) -> None: ... class MapInt32DoubleEntry(Message): key: int value: float def __init__(self, key: Optional[int] = ..., value: Optional[float] = ...) -> None: ... class MapBoolBoolEntry(Message): key: bool value: bool def __init__(self, key: Optional[bool] = ..., value: Optional[bool] = ...) -> None: ... class MapStringStringEntry(Message): key: Text value: Text def __init__(self, key: Optional[Text] = ..., value: Optional[Text] = ...) -> None: ... class MapStringBytesEntry(Message): key: Text value: bytes def __init__(self, key: Optional[Text] = ..., value: Optional[bytes] = ...) -> None: ... class MapStringNestedMessageEntry(Message): key: Text @property def value(self) -> TestAllTypesProto2.NestedMessage: ... def __init__(self, key: Optional[Text] = ..., value: Optional[TestAllTypesProto2.NestedMessage] = ...) -> None: ... class MapStringForeignMessageEntry(Message): key: Text @property def value(self) -> ForeignMessageProto2: ... def __init__(self, key: Optional[Text] = ..., value: Optional[ForeignMessageProto2] = ...) -> None: ... class MapStringNestedEnumEntry(Message): key: Text value: TestAllTypesProto2.NestedEnum def __init__(self, key: Optional[Text] = ..., value: Optional[TestAllTypesProto2.NestedEnum] = ...) -> None: ... class MapStringForeignEnumEntry(Message): key: Text value: ForeignEnumProto2 def __init__(self, key: Optional[Text] = ..., value: Optional[ForeignEnumProto2] = ...) -> None: ... class Data(Message): group_int32: int group_uint32: int def __init__(self, group_int32: Optional[int] = ..., group_uint32: Optional[int] = ...) -> None: ... class MessageSetCorrect(Message): def __init__(self,) -> None: ... class MessageSetCorrectExtension1(Message): bytes: Text def __init__(self, bytes: Optional[Text] = ...) -> None: ... class MessageSetCorrectExtension2(Message): i: int def __init__(self, i: Optional[int] = ...) -> None: ... optional_int32: int optional_int64: int optional_uint32: int optional_uint64: int optional_sint32: int optional_sint64: int optional_fixed32: int optional_fixed64: int optional_sfixed32: int optional_sfixed64: int optional_float: float optional_double: float optional_bool: bool optional_string: Text optional_bytes: bytes optional_nested_enum: TestAllTypesProto2.NestedEnum optional_foreign_enum: ForeignEnumProto2 optional_string_piece: Text optional_cord: Text repeated_int32: RepeatedScalarFieldContainer[int] repeated_int64: RepeatedScalarFieldContainer[int] repeated_uint32: RepeatedScalarFieldContainer[int] repeated_uint64: RepeatedScalarFieldContainer[int] repeated_sint32: RepeatedScalarFieldContainer[int] repeated_sint64: RepeatedScalarFieldContainer[int] repeated_fixed32: RepeatedScalarFieldContainer[int] repeated_fixed64: RepeatedScalarFieldContainer[int] repeated_sfixed32: RepeatedScalarFieldContainer[int] repeated_sfixed64: RepeatedScalarFieldContainer[int] repeated_float: RepeatedScalarFieldContainer[float] repeated_double: RepeatedScalarFieldContainer[float] repeated_bool: RepeatedScalarFieldContainer[bool] repeated_string: RepeatedScalarFieldContainer[Text] repeated_bytes: RepeatedScalarFieldContainer[bytes] repeated_nested_enum: RepeatedScalarFieldContainer[TestAllTypesProto2.NestedEnum] repeated_foreign_enum: RepeatedScalarFieldContainer[ForeignEnumProto2] repeated_string_piece: RepeatedScalarFieldContainer[Text] repeated_cord: RepeatedScalarFieldContainer[Text] oneof_uint32: int oneof_string: Text oneof_bytes: bytes oneof_bool: bool oneof_uint64: int oneof_float: float oneof_double: float oneof_enum: TestAllTypesProto2.NestedEnum fieldname1: int field_name2: int _field_name3: int field__name4_: int field0name5: int field_0_name6: int fieldName7: int FieldName8: int field_Name9: int Field_Name10: int FIELD_NAME11: int FIELD_name12: int __field_name13: int __Field_name14: int field__name15: int field__Name16: int field_name17__: int Field_name18__: int @property def optional_nested_message(self) -> TestAllTypesProto2.NestedMessage: ... @property def optional_foreign_message(self) -> ForeignMessageProto2: ... @property def recursive_message(self) -> TestAllTypesProto2: ... @property def repeated_nested_message(self) -> RepeatedCompositeFieldContainer[TestAllTypesProto2.NestedMessage]: ... @property def repeated_foreign_message(self) -> RepeatedCompositeFieldContainer[ForeignMessageProto2]: ... @property def map_int32_int32(self) -> MutableMapping[int, int]: ... @property def map_int64_int64(self) -> MutableMapping[int, int]: ... @property def map_uint32_uint32(self) -> MutableMapping[int, int]: ... @property def map_uint64_uint64(self) -> MutableMapping[int, int]: ... @property def map_sint32_sint32(self) -> MutableMapping[int, int]: ... @property def map_sint64_sint64(self) -> MutableMapping[int, int]: ... @property def map_fixed32_fixed32(self) -> MutableMapping[int, int]: ... @property def map_fixed64_fixed64(self) -> MutableMapping[int, int]: ... @property def map_sfixed32_sfixed32(self) -> MutableMapping[int, int]: ... @property def map_sfixed64_sfixed64(self) -> MutableMapping[int, int]: ... @property def map_int32_float(self) -> MutableMapping[int, float]: ... @property def map_int32_double(self) -> MutableMapping[int, float]: ... @property def map_bool_bool(self) -> MutableMapping[bool, bool]: ... @property def map_string_string(self) -> MutableMapping[Text, Text]: ... @property def map_string_bytes(self) -> MutableMapping[Text, bytes]: ... @property def map_string_nested_message(self) -> MutableMapping[Text, TestAllTypesProto2.NestedMessage]: ... @property def map_string_foreign_message(self) -> MutableMapping[Text, ForeignMessageProto2]: ... @property def map_string_nested_enum(self) -> MutableMapping[Text, TestAllTypesProto2.NestedEnum]: ... @property def map_string_foreign_enum(self) -> MutableMapping[Text, ForeignEnumProto2]: ... @property def oneof_nested_message(self) -> TestAllTypesProto2.NestedMessage: ... @property def data(self) -> TestAllTypesProto2.Data: ... def __init__( self, optional_int32: Optional[int] = ..., optional_int64: Optional[int] = ..., optional_uint32: Optional[int] = ..., optional_uint64: Optional[int] = ..., optional_sint32: Optional[int] = ..., optional_sint64: Optional[int] = ..., optional_fixed32: Optional[int] = ..., optional_fixed64: Optional[int] = ..., optional_sfixed32: Optional[int] = ..., optional_sfixed64: Optional[int] = ..., optional_float: Optional[float] = ..., optional_double: Optional[float] = ..., optional_bool: Optional[bool] = ..., optional_string: Optional[Text] = ..., optional_bytes: Optional[bytes] = ..., optional_nested_message: Optional[TestAllTypesProto2.NestedMessage] = ..., optional_foreign_message: Optional[ForeignMessageProto2] = ..., optional_nested_enum: Optional[TestAllTypesProto2.NestedEnum] = ..., optional_foreign_enum: Optional[ForeignEnumProto2] = ..., optional_string_piece: Optional[Text] = ..., optional_cord: Optional[Text] = ..., recursive_message: Optional[TestAllTypesProto2] = ..., repeated_int32: Optional[Iterable[int]] = ..., repeated_int64: Optional[Iterable[int]] = ..., repeated_uint32: Optional[Iterable[int]] = ..., repeated_uint64: Optional[Iterable[int]] = ..., repeated_sint32: Optional[Iterable[int]] = ..., repeated_sint64: Optional[Iterable[int]] = ..., repeated_fixed32: Optional[Iterable[int]] = ..., repeated_fixed64: Optional[Iterable[int]] = ..., repeated_sfixed32: Optional[Iterable[int]] = ..., repeated_sfixed64: Optional[Iterable[int]] = ..., repeated_float: Optional[Iterable[float]] = ..., repeated_double: Optional[Iterable[float]] = ..., repeated_bool: Optional[Iterable[bool]] = ..., repeated_string: Optional[Iterable[Text]] = ..., repeated_bytes: Optional[Iterable[bytes]] = ..., repeated_nested_message: Optional[Iterable[TestAllTypesProto2.NestedMessage]] = ..., repeated_foreign_message: Optional[Iterable[ForeignMessageProto2]] = ..., repeated_nested_enum: Optional[Iterable[TestAllTypesProto2.NestedEnum]] = ..., repeated_foreign_enum: Optional[Iterable[ForeignEnumProto2]] = ..., repeated_string_piece: Optional[Iterable[Text]] = ..., repeated_cord: Optional[Iterable[Text]] = ..., map_int32_int32: Optional[Mapping[int, int]] = ..., map_int64_int64: Optional[Mapping[int, int]] = ..., map_uint32_uint32: Optional[Mapping[int, int]] = ..., map_uint64_uint64: Optional[Mapping[int, int]] = ..., map_sint32_sint32: Optional[Mapping[int, int]] = ..., map_sint64_sint64: Optional[Mapping[int, int]] = ..., map_fixed32_fixed32: Optional[Mapping[int, int]] = ..., map_fixed64_fixed64: Optional[Mapping[int, int]] = ..., map_sfixed32_sfixed32: Optional[Mapping[int, int]] = ..., map_sfixed64_sfixed64: Optional[Mapping[int, int]] = ..., map_int32_float: Optional[Mapping[int, float]] = ..., map_int32_double: Optional[Mapping[int, float]] = ..., map_bool_bool: Optional[Mapping[bool, bool]] = ..., map_string_string: Optional[Mapping[Text, Text]] = ..., map_string_bytes: Optional[Mapping[Text, bytes]] = ..., map_string_nested_message: Optional[Mapping[Text, TestAllTypesProto2.NestedMessage]] = ..., map_string_foreign_message: Optional[Mapping[Text, ForeignMessageProto2]] = ..., map_string_nested_enum: Optional[Mapping[Text, TestAllTypesProto2.NestedEnum]] = ..., map_string_foreign_enum: Optional[Mapping[Text, ForeignEnumProto2]] = ..., oneof_uint32: Optional[int] = ..., oneof_nested_message: Optional[TestAllTypesProto2.NestedMessage] = ..., oneof_string: Optional[Text] = ..., oneof_bytes: Optional[bytes] = ..., oneof_bool: Optional[bool] = ..., oneof_uint64: Optional[int] = ..., oneof_float: Optional[float] = ..., oneof_double: Optional[float] = ..., oneof_enum: Optional[TestAllTypesProto2.NestedEnum] = ..., data: Optional[TestAllTypesProto2.Data] = ..., fieldname1: Optional[int] = ..., field_name2: Optional[int] = ..., _field_name3: Optional[int] = ..., field__name4_: Optional[int] = ..., field0name5: Optional[int] = ..., field_0_name6: Optional[int] = ..., fieldName7: Optional[int] = ..., FieldName8: Optional[int] = ..., field_Name9: Optional[int] = ..., Field_Name10: Optional[int] = ..., FIELD_NAME11: Optional[int] = ..., FIELD_name12: Optional[int] = ..., __field_name13: Optional[int] = ..., __Field_name14: Optional[int] = ..., field__name15: Optional[int] = ..., field__Name16: Optional[int] = ..., field_name17__: Optional[int] = ..., Field_name18__: Optional[int] = ..., ) -> None: ... class ForeignMessageProto2(Message): c: int def __init__(self, c: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/test_messages_proto3_pb2.pyi0000644€tŠÔÚ€2›s®0000004567013576752252035572 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.any_pb2 import Any from google.protobuf.duration_pb2 import Duration from google.protobuf.field_mask_pb2 import FieldMask from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from google.protobuf.struct_pb2 import Struct, Value from google.protobuf.timestamp_pb2 import Timestamp from google.protobuf.wrappers_pb2 import ( BoolValue, BytesValue, DoubleValue, FloatValue, Int32Value, Int64Value, StringValue, UInt32Value, UInt64Value, ) from typing import Iterable, List, Mapping, MutableMapping, Optional, Text, Tuple, cast class ForeignEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> ForeignEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[ForeignEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, ForeignEnum]]: ... FOREIGN_FOO: ForeignEnum FOREIGN_BAR: ForeignEnum FOREIGN_BAZ: ForeignEnum class TestAllTypesProto3(Message): class NestedEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestAllTypesProto3.NestedEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestAllTypesProto3.NestedEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestAllTypesProto3.NestedEnum]]: ... FOO: TestAllTypesProto3.NestedEnum BAR: TestAllTypesProto3.NestedEnum BAZ: TestAllTypesProto3.NestedEnum NEG: TestAllTypesProto3.NestedEnum class NestedMessage(Message): a: int @property def corecursive(self) -> TestAllTypesProto3: ... def __init__(self, a: Optional[int] = ..., corecursive: Optional[TestAllTypesProto3] = ...) -> None: ... class MapInt32Int32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapInt64Int64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapUint32Uint32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapUint64Uint64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSint32Sint32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSint64Sint64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapFixed32Fixed32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapFixed64Fixed64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSfixed32Sfixed32Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapSfixed64Sfixed64Entry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class MapInt32FloatEntry(Message): key: int value: float def __init__(self, key: Optional[int] = ..., value: Optional[float] = ...) -> None: ... class MapInt32DoubleEntry(Message): key: int value: float def __init__(self, key: Optional[int] = ..., value: Optional[float] = ...) -> None: ... class MapBoolBoolEntry(Message): key: bool value: bool def __init__(self, key: Optional[bool] = ..., value: Optional[bool] = ...) -> None: ... class MapStringStringEntry(Message): key: Text value: Text def __init__(self, key: Optional[Text] = ..., value: Optional[Text] = ...) -> None: ... class MapStringBytesEntry(Message): key: Text value: bytes def __init__(self, key: Optional[Text] = ..., value: Optional[bytes] = ...) -> None: ... class MapStringNestedMessageEntry(Message): key: Text @property def value(self) -> TestAllTypesProto3.NestedMessage: ... def __init__(self, key: Optional[Text] = ..., value: Optional[TestAllTypesProto3.NestedMessage] = ...) -> None: ... class MapStringForeignMessageEntry(Message): key: Text @property def value(self) -> ForeignMessage: ... def __init__(self, key: Optional[Text] = ..., value: Optional[ForeignMessage] = ...) -> None: ... class MapStringNestedEnumEntry(Message): key: Text value: TestAllTypesProto3.NestedEnum def __init__(self, key: Optional[Text] = ..., value: Optional[TestAllTypesProto3.NestedEnum] = ...) -> None: ... class MapStringForeignEnumEntry(Message): key: Text value: ForeignEnum def __init__(self, key: Optional[Text] = ..., value: Optional[ForeignEnum] = ...) -> None: ... optional_int32: int optional_int64: int optional_uint32: int optional_uint64: int optional_sint32: int optional_sint64: int optional_fixed32: int optional_fixed64: int optional_sfixed32: int optional_sfixed64: int optional_float: float optional_double: float optional_bool: bool optional_string: Text optional_bytes: bytes optional_nested_enum: TestAllTypesProto3.NestedEnum optional_foreign_enum: ForeignEnum optional_string_piece: Text optional_cord: Text repeated_int32: RepeatedScalarFieldContainer[int] repeated_int64: RepeatedScalarFieldContainer[int] repeated_uint32: RepeatedScalarFieldContainer[int] repeated_uint64: RepeatedScalarFieldContainer[int] repeated_sint32: RepeatedScalarFieldContainer[int] repeated_sint64: RepeatedScalarFieldContainer[int] repeated_fixed32: RepeatedScalarFieldContainer[int] repeated_fixed64: RepeatedScalarFieldContainer[int] repeated_sfixed32: RepeatedScalarFieldContainer[int] repeated_sfixed64: RepeatedScalarFieldContainer[int] repeated_float: RepeatedScalarFieldContainer[float] repeated_double: RepeatedScalarFieldContainer[float] repeated_bool: RepeatedScalarFieldContainer[bool] repeated_string: RepeatedScalarFieldContainer[Text] repeated_bytes: RepeatedScalarFieldContainer[bytes] repeated_nested_enum: RepeatedScalarFieldContainer[TestAllTypesProto3.NestedEnum] repeated_foreign_enum: RepeatedScalarFieldContainer[ForeignEnum] repeated_string_piece: RepeatedScalarFieldContainer[Text] repeated_cord: RepeatedScalarFieldContainer[Text] oneof_uint32: int oneof_string: Text oneof_bytes: bytes oneof_bool: bool oneof_uint64: int oneof_float: float oneof_double: float oneof_enum: TestAllTypesProto3.NestedEnum fieldname1: int field_name2: int _field_name3: int field__name4_: int field0name5: int field_0_name6: int fieldName7: int FieldName8: int field_Name9: int Field_Name10: int FIELD_NAME11: int FIELD_name12: int __field_name13: int __Field_name14: int field__name15: int field__Name16: int field_name17__: int Field_name18__: int @property def optional_nested_message(self) -> TestAllTypesProto3.NestedMessage: ... @property def optional_foreign_message(self) -> ForeignMessage: ... @property def recursive_message(self) -> TestAllTypesProto3: ... @property def repeated_nested_message(self) -> RepeatedCompositeFieldContainer[TestAllTypesProto3.NestedMessage]: ... @property def repeated_foreign_message(self) -> RepeatedCompositeFieldContainer[ForeignMessage]: ... @property def map_int32_int32(self) -> MutableMapping[int, int]: ... @property def map_int64_int64(self) -> MutableMapping[int, int]: ... @property def map_uint32_uint32(self) -> MutableMapping[int, int]: ... @property def map_uint64_uint64(self) -> MutableMapping[int, int]: ... @property def map_sint32_sint32(self) -> MutableMapping[int, int]: ... @property def map_sint64_sint64(self) -> MutableMapping[int, int]: ... @property def map_fixed32_fixed32(self) -> MutableMapping[int, int]: ... @property def map_fixed64_fixed64(self) -> MutableMapping[int, int]: ... @property def map_sfixed32_sfixed32(self) -> MutableMapping[int, int]: ... @property def map_sfixed64_sfixed64(self) -> MutableMapping[int, int]: ... @property def map_int32_float(self) -> MutableMapping[int, float]: ... @property def map_int32_double(self) -> MutableMapping[int, float]: ... @property def map_bool_bool(self) -> MutableMapping[bool, bool]: ... @property def map_string_string(self) -> MutableMapping[Text, Text]: ... @property def map_string_bytes(self) -> MutableMapping[Text, bytes]: ... @property def map_string_nested_message(self) -> MutableMapping[Text, TestAllTypesProto3.NestedMessage]: ... @property def map_string_foreign_message(self) -> MutableMapping[Text, ForeignMessage]: ... @property def map_string_nested_enum(self) -> MutableMapping[Text, TestAllTypesProto3.NestedEnum]: ... @property def map_string_foreign_enum(self) -> MutableMapping[Text, ForeignEnum]: ... @property def oneof_nested_message(self) -> TestAllTypesProto3.NestedMessage: ... @property def optional_bool_wrapper(self) -> BoolValue: ... @property def optional_int32_wrapper(self) -> Int32Value: ... @property def optional_int64_wrapper(self) -> Int64Value: ... @property def optional_uint32_wrapper(self) -> UInt32Value: ... @property def optional_uint64_wrapper(self) -> UInt64Value: ... @property def optional_float_wrapper(self) -> FloatValue: ... @property def optional_double_wrapper(self) -> DoubleValue: ... @property def optional_string_wrapper(self) -> StringValue: ... @property def optional_bytes_wrapper(self) -> BytesValue: ... @property def repeated_bool_wrapper(self) -> RepeatedCompositeFieldContainer[BoolValue]: ... @property def repeated_int32_wrapper(self) -> RepeatedCompositeFieldContainer[Int32Value]: ... @property def repeated_int64_wrapper(self) -> RepeatedCompositeFieldContainer[Int64Value]: ... @property def repeated_uint32_wrapper(self) -> RepeatedCompositeFieldContainer[UInt32Value]: ... @property def repeated_uint64_wrapper(self) -> RepeatedCompositeFieldContainer[UInt64Value]: ... @property def repeated_float_wrapper(self) -> RepeatedCompositeFieldContainer[FloatValue]: ... @property def repeated_double_wrapper(self) -> RepeatedCompositeFieldContainer[DoubleValue]: ... @property def repeated_string_wrapper(self) -> RepeatedCompositeFieldContainer[StringValue]: ... @property def repeated_bytes_wrapper(self) -> RepeatedCompositeFieldContainer[BytesValue]: ... @property def optional_duration(self) -> Duration: ... @property def optional_timestamp(self) -> Timestamp: ... @property def optional_field_mask(self) -> FieldMask: ... @property def optional_struct(self) -> Struct: ... @property def optional_any(self) -> Any: ... @property def optional_value(self) -> Value: ... @property def repeated_duration(self) -> RepeatedCompositeFieldContainer[Duration]: ... @property def repeated_timestamp(self) -> RepeatedCompositeFieldContainer[Timestamp]: ... @property def repeated_fieldmask(self) -> RepeatedCompositeFieldContainer[FieldMask]: ... @property def repeated_struct(self) -> RepeatedCompositeFieldContainer[Struct]: ... @property def repeated_any(self) -> RepeatedCompositeFieldContainer[Any]: ... @property def repeated_value(self) -> RepeatedCompositeFieldContainer[Value]: ... def __init__( self, optional_int32: Optional[int] = ..., optional_int64: Optional[int] = ..., optional_uint32: Optional[int] = ..., optional_uint64: Optional[int] = ..., optional_sint32: Optional[int] = ..., optional_sint64: Optional[int] = ..., optional_fixed32: Optional[int] = ..., optional_fixed64: Optional[int] = ..., optional_sfixed32: Optional[int] = ..., optional_sfixed64: Optional[int] = ..., optional_float: Optional[float] = ..., optional_double: Optional[float] = ..., optional_bool: Optional[bool] = ..., optional_string: Optional[Text] = ..., optional_bytes: Optional[bytes] = ..., optional_nested_message: Optional[TestAllTypesProto3.NestedMessage] = ..., optional_foreign_message: Optional[ForeignMessage] = ..., optional_nested_enum: Optional[TestAllTypesProto3.NestedEnum] = ..., optional_foreign_enum: Optional[ForeignEnum] = ..., optional_string_piece: Optional[Text] = ..., optional_cord: Optional[Text] = ..., recursive_message: Optional[TestAllTypesProto3] = ..., repeated_int32: Optional[Iterable[int]] = ..., repeated_int64: Optional[Iterable[int]] = ..., repeated_uint32: Optional[Iterable[int]] = ..., repeated_uint64: Optional[Iterable[int]] = ..., repeated_sint32: Optional[Iterable[int]] = ..., repeated_sint64: Optional[Iterable[int]] = ..., repeated_fixed32: Optional[Iterable[int]] = ..., repeated_fixed64: Optional[Iterable[int]] = ..., repeated_sfixed32: Optional[Iterable[int]] = ..., repeated_sfixed64: Optional[Iterable[int]] = ..., repeated_float: Optional[Iterable[float]] = ..., repeated_double: Optional[Iterable[float]] = ..., repeated_bool: Optional[Iterable[bool]] = ..., repeated_string: Optional[Iterable[Text]] = ..., repeated_bytes: Optional[Iterable[bytes]] = ..., repeated_nested_message: Optional[Iterable[TestAllTypesProto3.NestedMessage]] = ..., repeated_foreign_message: Optional[Iterable[ForeignMessage]] = ..., repeated_nested_enum: Optional[Iterable[TestAllTypesProto3.NestedEnum]] = ..., repeated_foreign_enum: Optional[Iterable[ForeignEnum]] = ..., repeated_string_piece: Optional[Iterable[Text]] = ..., repeated_cord: Optional[Iterable[Text]] = ..., map_int32_int32: Optional[Mapping[int, int]] = ..., map_int64_int64: Optional[Mapping[int, int]] = ..., map_uint32_uint32: Optional[Mapping[int, int]] = ..., map_uint64_uint64: Optional[Mapping[int, int]] = ..., map_sint32_sint32: Optional[Mapping[int, int]] = ..., map_sint64_sint64: Optional[Mapping[int, int]] = ..., map_fixed32_fixed32: Optional[Mapping[int, int]] = ..., map_fixed64_fixed64: Optional[Mapping[int, int]] = ..., map_sfixed32_sfixed32: Optional[Mapping[int, int]] = ..., map_sfixed64_sfixed64: Optional[Mapping[int, int]] = ..., map_int32_float: Optional[Mapping[int, float]] = ..., map_int32_double: Optional[Mapping[int, float]] = ..., map_bool_bool: Optional[Mapping[bool, bool]] = ..., map_string_string: Optional[Mapping[Text, Text]] = ..., map_string_bytes: Optional[Mapping[Text, bytes]] = ..., map_string_nested_message: Optional[Mapping[Text, TestAllTypesProto3.NestedMessage]] = ..., map_string_foreign_message: Optional[Mapping[Text, ForeignMessage]] = ..., map_string_nested_enum: Optional[Mapping[Text, TestAllTypesProto3.NestedEnum]] = ..., map_string_foreign_enum: Optional[Mapping[Text, ForeignEnum]] = ..., oneof_uint32: Optional[int] = ..., oneof_nested_message: Optional[TestAllTypesProto3.NestedMessage] = ..., oneof_string: Optional[Text] = ..., oneof_bytes: Optional[bytes] = ..., oneof_bool: Optional[bool] = ..., oneof_uint64: Optional[int] = ..., oneof_float: Optional[float] = ..., oneof_double: Optional[float] = ..., oneof_enum: Optional[TestAllTypesProto3.NestedEnum] = ..., optional_bool_wrapper: Optional[BoolValue] = ..., optional_int32_wrapper: Optional[Int32Value] = ..., optional_int64_wrapper: Optional[Int64Value] = ..., optional_uint32_wrapper: Optional[UInt32Value] = ..., optional_uint64_wrapper: Optional[UInt64Value] = ..., optional_float_wrapper: Optional[FloatValue] = ..., optional_double_wrapper: Optional[DoubleValue] = ..., optional_string_wrapper: Optional[StringValue] = ..., optional_bytes_wrapper: Optional[BytesValue] = ..., repeated_bool_wrapper: Optional[Iterable[BoolValue]] = ..., repeated_int32_wrapper: Optional[Iterable[Int32Value]] = ..., repeated_int64_wrapper: Optional[Iterable[Int64Value]] = ..., repeated_uint32_wrapper: Optional[Iterable[UInt32Value]] = ..., repeated_uint64_wrapper: Optional[Iterable[UInt64Value]] = ..., repeated_float_wrapper: Optional[Iterable[FloatValue]] = ..., repeated_double_wrapper: Optional[Iterable[DoubleValue]] = ..., repeated_string_wrapper: Optional[Iterable[StringValue]] = ..., repeated_bytes_wrapper: Optional[Iterable[BytesValue]] = ..., optional_duration: Optional[Duration] = ..., optional_timestamp: Optional[Timestamp] = ..., optional_field_mask: Optional[FieldMask] = ..., optional_struct: Optional[Struct] = ..., optional_any: Optional[Any] = ..., optional_value: Optional[Value] = ..., repeated_duration: Optional[Iterable[Duration]] = ..., repeated_timestamp: Optional[Iterable[Timestamp]] = ..., repeated_fieldmask: Optional[Iterable[FieldMask]] = ..., repeated_struct: Optional[Iterable[Struct]] = ..., repeated_any: Optional[Iterable[Any]] = ..., repeated_value: Optional[Iterable[Value]] = ..., fieldname1: Optional[int] = ..., field_name2: Optional[int] = ..., _field_name3: Optional[int] = ..., field__name4_: Optional[int] = ..., field0name5: Optional[int] = ..., field_0_name6: Optional[int] = ..., fieldName7: Optional[int] = ..., FieldName8: Optional[int] = ..., field_Name9: Optional[int] = ..., Field_Name10: Optional[int] = ..., FIELD_NAME11: Optional[int] = ..., FIELD_name12: Optional[int] = ..., __field_name13: Optional[int] = ..., __Field_name14: Optional[int] = ..., field__name15: Optional[int] = ..., field__Name16: Optional[int] = ..., field_name17__: Optional[int] = ..., Field_name18__: Optional[int] = ..., ) -> None: ... class ForeignMessage(Message): c: int def __init__(self, c: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/timestamp_pb2.pyi0000644€tŠÔÚ€2›s®0000000046413576752252033411 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from google.protobuf.internal import well_known_types from typing import Optional class Timestamp(Message, well_known_types.Timestamp): seconds: int nanos: int def __init__(self, seconds: Optional[int] = ..., nanos: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/type_pb2.pyi0000644€tŠÔÚ€2›s®0000001111313576752252032360 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.any_pb2 import Any from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from google.protobuf.source_context_pb2 import SourceContext from typing import Iterable, List, Optional, Text, Tuple, cast class Syntax(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> Syntax: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[Syntax]: ... @classmethod def items(cls) -> List[Tuple[bytes, Syntax]]: ... SYNTAX_PROTO2: Syntax SYNTAX_PROTO3: Syntax class Type(Message): name: Text oneofs: RepeatedScalarFieldContainer[Text] syntax: Syntax @property def fields(self) -> RepeatedCompositeFieldContainer[Field]: ... @property def options(self) -> RepeatedCompositeFieldContainer[Option]: ... @property def source_context(self) -> SourceContext: ... def __init__( self, name: Optional[Text] = ..., fields: Optional[Iterable[Field]] = ..., oneofs: Optional[Iterable[Text]] = ..., options: Optional[Iterable[Option]] = ..., source_context: Optional[SourceContext] = ..., syntax: Optional[Syntax] = ..., ) -> None: ... class Field(Message): class Kind(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> Field.Kind: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[Field.Kind]: ... @classmethod def items(cls) -> List[Tuple[bytes, Field.Kind]]: ... TYPE_UNKNOWN: Field.Kind TYPE_DOUBLE: Field.Kind TYPE_FLOAT: Field.Kind TYPE_INT64: Field.Kind TYPE_UINT64: Field.Kind TYPE_INT32: Field.Kind TYPE_FIXED64: Field.Kind TYPE_FIXED32: Field.Kind TYPE_BOOL: Field.Kind TYPE_STRING: Field.Kind TYPE_GROUP: Field.Kind TYPE_MESSAGE: Field.Kind TYPE_BYTES: Field.Kind TYPE_UINT32: Field.Kind TYPE_ENUM: Field.Kind TYPE_SFIXED32: Field.Kind TYPE_SFIXED64: Field.Kind TYPE_SINT32: Field.Kind TYPE_SINT64: Field.Kind class Cardinality(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> Field.Cardinality: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[Field.Cardinality]: ... @classmethod def items(cls) -> List[Tuple[bytes, Field.Cardinality]]: ... CARDINALITY_UNKNOWN: Field.Cardinality CARDINALITY_OPTIONAL: Field.Cardinality CARDINALITY_REQUIRED: Field.Cardinality CARDINALITY_REPEATED: Field.Cardinality kind: Field.Kind cardinality: Field.Cardinality number: int name: Text type_url: Text oneof_index: int packed: bool json_name: Text default_value: Text @property def options(self) -> RepeatedCompositeFieldContainer[Option]: ... def __init__( self, kind: Optional[Field.Kind] = ..., cardinality: Optional[Field.Cardinality] = ..., number: Optional[int] = ..., name: Optional[Text] = ..., type_url: Optional[Text] = ..., oneof_index: Optional[int] = ..., packed: Optional[bool] = ..., options: Optional[Iterable[Option]] = ..., json_name: Optional[Text] = ..., default_value: Optional[Text] = ..., ) -> None: ... class Enum(Message): name: Text syntax: Syntax @property def enumvalue(self) -> RepeatedCompositeFieldContainer[EnumValue]: ... @property def options(self) -> RepeatedCompositeFieldContainer[Option]: ... @property def source_context(self) -> SourceContext: ... def __init__( self, name: Optional[Text] = ..., enumvalue: Optional[Iterable[EnumValue]] = ..., options: Optional[Iterable[Option]] = ..., source_context: Optional[SourceContext] = ..., syntax: Optional[Syntax] = ..., ) -> None: ... class EnumValue(Message): name: Text number: int @property def options(self) -> RepeatedCompositeFieldContainer[Option]: ... def __init__( self, name: Optional[Text] = ..., number: Optional[int] = ..., options: Optional[Iterable[Option]] = ... ) -> None: ... class Option(Message): name: Text @property def value(self) -> Any: ... def __init__(self, name: Optional[Text] = ..., value: Optional[Any] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_arena_pb2.pyi0000644€tŠÔÚ€2›s®0000000150313576752252034426 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer from google.protobuf.message import Message from google.protobuf.unittest_no_arena_import_pb2 import ImportNoArenaNestedMessage from typing import Iterable, Optional class NestedMessage(Message): d: int def __init__(self, d: Optional[int] = ...) -> None: ... class ArenaMessage(Message): @property def repeated_nested_message(self) -> RepeatedCompositeFieldContainer[NestedMessage]: ... @property def repeated_import_no_arena_message(self) -> RepeatedCompositeFieldContainer[ImportNoArenaNestedMessage]: ... def __init__( self, repeated_nested_message: Optional[Iterable[NestedMessage]] = ..., repeated_import_no_arena_message: Optional[Iterable[ImportNoArenaNestedMessage]] = ..., ) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_custom_options_pb2.pyi0000644€tŠÔÚ€2›s®0000001703013576752252036427 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.descriptor_pb2 import FileOptions from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from typing import Iterable, List, Optional, Text, Tuple, cast class MethodOpt1(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> MethodOpt1: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[MethodOpt1]: ... @classmethod def items(cls) -> List[Tuple[bytes, MethodOpt1]]: ... METHODOPT1_VAL1: MethodOpt1 METHODOPT1_VAL2: MethodOpt1 class AggregateEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> AggregateEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[AggregateEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, AggregateEnum]]: ... VALUE: AggregateEnum class TestMessageWithCustomOptions(Message): class AnEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestMessageWithCustomOptions.AnEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestMessageWithCustomOptions.AnEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestMessageWithCustomOptions.AnEnum]]: ... ANENUM_VAL1: TestMessageWithCustomOptions.AnEnum ANENUM_VAL2: TestMessageWithCustomOptions.AnEnum field1: Text oneof_field: int def __init__(self, field1: Optional[Text] = ..., oneof_field: Optional[int] = ...) -> None: ... class CustomOptionFooRequest(Message): def __init__(self,) -> None: ... class CustomOptionFooResponse(Message): def __init__(self,) -> None: ... class CustomOptionFooClientMessage(Message): def __init__(self,) -> None: ... class CustomOptionFooServerMessage(Message): def __init__(self,) -> None: ... class DummyMessageContainingEnum(Message): class TestEnumType(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> DummyMessageContainingEnum.TestEnumType: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[DummyMessageContainingEnum.TestEnumType]: ... @classmethod def items(cls) -> List[Tuple[bytes, DummyMessageContainingEnum.TestEnumType]]: ... TEST_OPTION_ENUM_TYPE1: DummyMessageContainingEnum.TestEnumType TEST_OPTION_ENUM_TYPE2: DummyMessageContainingEnum.TestEnumType def __init__(self,) -> None: ... class DummyMessageInvalidAsOptionType(Message): def __init__(self,) -> None: ... class CustomOptionMinIntegerValues(Message): def __init__(self,) -> None: ... class CustomOptionMaxIntegerValues(Message): def __init__(self,) -> None: ... class CustomOptionOtherValues(Message): def __init__(self,) -> None: ... class SettingRealsFromPositiveInts(Message): def __init__(self,) -> None: ... class SettingRealsFromNegativeInts(Message): def __init__(self,) -> None: ... class ComplexOptionType1(Message): foo: int foo2: int foo3: int foo4: RepeatedScalarFieldContainer[int] def __init__( self, foo: Optional[int] = ..., foo2: Optional[int] = ..., foo3: Optional[int] = ..., foo4: Optional[Iterable[int]] = ... ) -> None: ... class ComplexOptionType2(Message): class ComplexOptionType4(Message): waldo: int def __init__(self, waldo: Optional[int] = ...) -> None: ... baz: int @property def bar(self) -> ComplexOptionType1: ... @property def fred(self) -> ComplexOptionType2.ComplexOptionType4: ... @property def barney(self) -> RepeatedCompositeFieldContainer[ComplexOptionType2.ComplexOptionType4]: ... def __init__( self, bar: Optional[ComplexOptionType1] = ..., baz: Optional[int] = ..., fred: Optional[ComplexOptionType2.ComplexOptionType4] = ..., barney: Optional[Iterable[ComplexOptionType2.ComplexOptionType4]] = ..., ) -> None: ... class ComplexOptionType3(Message): class ComplexOptionType5(Message): plugh: int def __init__(self, plugh: Optional[int] = ...) -> None: ... qux: int @property def complexoptiontype5(self) -> ComplexOptionType3.ComplexOptionType5: ... def __init__( self, qux: Optional[int] = ..., complexoptiontype5: Optional[ComplexOptionType3.ComplexOptionType5] = ... ) -> None: ... class ComplexOpt6(Message): xyzzy: int def __init__(self, xyzzy: Optional[int] = ...) -> None: ... class VariousComplexOptions(Message): def __init__(self,) -> None: ... class AggregateMessageSet(Message): def __init__(self,) -> None: ... class AggregateMessageSetElement(Message): s: Text def __init__(self, s: Optional[Text] = ...) -> None: ... class Aggregate(Message): i: int s: Text @property def sub(self) -> Aggregate: ... @property def file(self) -> FileOptions: ... @property def mset(self) -> AggregateMessageSet: ... def __init__( self, i: Optional[int] = ..., s: Optional[Text] = ..., sub: Optional[Aggregate] = ..., file: Optional[FileOptions] = ..., mset: Optional[AggregateMessageSet] = ..., ) -> None: ... class AggregateMessage(Message): fieldname: int def __init__(self, fieldname: Optional[int] = ...) -> None: ... class NestedOptionType(Message): class NestedEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> NestedOptionType.NestedEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[NestedOptionType.NestedEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, NestedOptionType.NestedEnum]]: ... NESTED_ENUM_VALUE: NestedOptionType.NestedEnum class NestedMessage(Message): nested_field: int def __init__(self, nested_field: Optional[int] = ...) -> None: ... def __init__(self,) -> None: ... class OldOptionType(Message): class TestEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> OldOptionType.TestEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[OldOptionType.TestEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, OldOptionType.TestEnum]]: ... OLD_VALUE: OldOptionType.TestEnum value: OldOptionType.TestEnum def __init__(self, value: OldOptionType.TestEnum) -> None: ... class NewOptionType(Message): class TestEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> NewOptionType.TestEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[NewOptionType.TestEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, NewOptionType.TestEnum]]: ... OLD_VALUE: NewOptionType.TestEnum NEW_VALUE: NewOptionType.TestEnum value: NewOptionType.TestEnum def __init__(self, value: NewOptionType.TestEnum) -> None: ... class TestMessageWithRequiredEnumOption(Message): def __init__(self,) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_import_pb2.pyi0000644€tŠÔÚ€2›s®0000000203213576752252034650 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from typing import List, Optional, Tuple, cast class ImportEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> ImportEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[ImportEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, ImportEnum]]: ... IMPORT_FOO: ImportEnum IMPORT_BAR: ImportEnum IMPORT_BAZ: ImportEnum class ImportEnumForMap(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> ImportEnumForMap: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[ImportEnumForMap]: ... @classmethod def items(cls) -> List[Tuple[bytes, ImportEnumForMap]]: ... UNKNOWN: ImportEnumForMap FOO: ImportEnumForMap BAR: ImportEnumForMap class ImportMessage(Message): d: int def __init__(self, d: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_import_public_pb2.pyi0000644€tŠÔÚ€2›s®0000000026413576752252036213 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from typing import Optional class PublicImportMessage(Message): e: int def __init__(self, e: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_mset_pb2.pyi0000644€tŠÔÚ€2›s®0000000200313576752252034304 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer from google.protobuf.message import Message from google.protobuf.unittest_mset_wire_format_pb2 import TestMessageSet import builtins from typing import Iterable, Optional, Text class TestMessageSetContainer(Message): @property def message_set(self) -> TestMessageSet: ... def __init__(self, message_set: Optional[TestMessageSet] = ...) -> None: ... class TestMessageSetExtension1(Message): i: int def __init__(self, i: Optional[int] = ...) -> None: ... class TestMessageSetExtension2(Message): str: Text def __init__(self, bytes: Optional[Text] = ...) -> None: ... class RawMessageSet(Message): class Item(Message): type_id: int message: bytes def __init__(self, type_id: int, message: bytes) -> None: ... @property def item(self) -> RepeatedCompositeFieldContainer[RawMessageSet.Item]: ... def __init__(self, item: Optional[Iterable[RawMessageSet.Item]] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_mset_wire_format_pb2.pyi0000644€tŠÔÚ€2›s®0000000052013576752252036704 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from typing import Optional class TestMessageSet(Message): def __init__(self,) -> None: ... class TestMessageSetWireFormatContainer(Message): @property def message_set(self) -> TestMessageSet: ... def __init__(self, message_set: Optional[TestMessageSet] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_no_arena_import_pb2.pyi0000644€tŠÔÚ€2›s®0000000027313576752252036517 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from typing import Optional class ImportNoArenaNestedMessage(Message): d: int def __init__(self, d: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_no_arena_pb2.pyi0000644€tŠÔÚ€2›s®0000002327513576752252035134 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from google.protobuf.unittest_arena_pb2 import ArenaMessage from google.protobuf.unittest_import_pb2 import ImportEnum, ImportMessage from google.protobuf.unittest_import_public_pb2 import PublicImportMessage from typing import Iterable, List, Optional, Text, Tuple, cast class ForeignEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> ForeignEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[ForeignEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, ForeignEnum]]: ... FOREIGN_FOO: ForeignEnum FOREIGN_BAR: ForeignEnum FOREIGN_BAZ: ForeignEnum class TestAllTypes(Message): class NestedEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestAllTypes.NestedEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestAllTypes.NestedEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestAllTypes.NestedEnum]]: ... FOO: TestAllTypes.NestedEnum BAR: TestAllTypes.NestedEnum BAZ: TestAllTypes.NestedEnum NEG: TestAllTypes.NestedEnum class NestedMessage(Message): bb: int def __init__(self, bb: Optional[int] = ...) -> None: ... class OptionalGroup(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... class RepeatedGroup(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... optional_int32: int optional_int64: int optional_uint32: int optional_uint64: int optional_sint32: int optional_sint64: int optional_fixed32: int optional_fixed64: int optional_sfixed32: int optional_sfixed64: int optional_float: float optional_double: float optional_bool: bool optional_string: Text optional_bytes: bytes optional_nested_enum: TestAllTypes.NestedEnum optional_foreign_enum: ForeignEnum optional_import_enum: ImportEnum optional_string_piece: Text optional_cord: Text repeated_int32: RepeatedScalarFieldContainer[int] repeated_int64: RepeatedScalarFieldContainer[int] repeated_uint32: RepeatedScalarFieldContainer[int] repeated_uint64: RepeatedScalarFieldContainer[int] repeated_sint32: RepeatedScalarFieldContainer[int] repeated_sint64: RepeatedScalarFieldContainer[int] repeated_fixed32: RepeatedScalarFieldContainer[int] repeated_fixed64: RepeatedScalarFieldContainer[int] repeated_sfixed32: RepeatedScalarFieldContainer[int] repeated_sfixed64: RepeatedScalarFieldContainer[int] repeated_float: RepeatedScalarFieldContainer[float] repeated_double: RepeatedScalarFieldContainer[float] repeated_bool: RepeatedScalarFieldContainer[bool] repeated_string: RepeatedScalarFieldContainer[Text] repeated_bytes: RepeatedScalarFieldContainer[bytes] repeated_nested_enum: RepeatedScalarFieldContainer[TestAllTypes.NestedEnum] repeated_foreign_enum: RepeatedScalarFieldContainer[ForeignEnum] repeated_import_enum: RepeatedScalarFieldContainer[ImportEnum] repeated_string_piece: RepeatedScalarFieldContainer[Text] repeated_cord: RepeatedScalarFieldContainer[Text] default_int32: int default_int64: int default_uint32: int default_uint64: int default_sint32: int default_sint64: int default_fixed32: int default_fixed64: int default_sfixed32: int default_sfixed64: int default_float: float default_double: float default_bool: bool default_string: Text default_bytes: bytes default_nested_enum: TestAllTypes.NestedEnum default_foreign_enum: ForeignEnum default_import_enum: ImportEnum default_string_piece: Text default_cord: Text oneof_uint32: int oneof_string: Text oneof_bytes: bytes @property def optionalgroup(self) -> TestAllTypes.OptionalGroup: ... @property def optional_nested_message(self) -> TestAllTypes.NestedMessage: ... @property def optional_foreign_message(self) -> ForeignMessage: ... @property def optional_import_message(self) -> ImportMessage: ... @property def optional_public_import_message(self) -> PublicImportMessage: ... @property def optional_message(self) -> TestAllTypes.NestedMessage: ... @property def repeatedgroup(self) -> RepeatedCompositeFieldContainer[TestAllTypes.RepeatedGroup]: ... @property def repeated_nested_message(self) -> RepeatedCompositeFieldContainer[TestAllTypes.NestedMessage]: ... @property def repeated_foreign_message(self) -> RepeatedCompositeFieldContainer[ForeignMessage]: ... @property def repeated_import_message(self) -> RepeatedCompositeFieldContainer[ImportMessage]: ... @property def repeated_lazy_message(self) -> RepeatedCompositeFieldContainer[TestAllTypes.NestedMessage]: ... @property def oneof_nested_message(self) -> TestAllTypes.NestedMessage: ... @property def lazy_oneof_nested_message(self) -> TestAllTypes.NestedMessage: ... def __init__( self, optional_int32: Optional[int] = ..., optional_int64: Optional[int] = ..., optional_uint32: Optional[int] = ..., optional_uint64: Optional[int] = ..., optional_sint32: Optional[int] = ..., optional_sint64: Optional[int] = ..., optional_fixed32: Optional[int] = ..., optional_fixed64: Optional[int] = ..., optional_sfixed32: Optional[int] = ..., optional_sfixed64: Optional[int] = ..., optional_float: Optional[float] = ..., optional_double: Optional[float] = ..., optional_bool: Optional[bool] = ..., optional_string: Optional[Text] = ..., optional_bytes: Optional[bytes] = ..., optionalgroup: Optional[TestAllTypes.OptionalGroup] = ..., optional_nested_message: Optional[TestAllTypes.NestedMessage] = ..., optional_foreign_message: Optional[ForeignMessage] = ..., optional_import_message: Optional[ImportMessage] = ..., optional_nested_enum: Optional[TestAllTypes.NestedEnum] = ..., optional_foreign_enum: Optional[ForeignEnum] = ..., optional_import_enum: Optional[ImportEnum] = ..., optional_string_piece: Optional[Text] = ..., optional_cord: Optional[Text] = ..., optional_public_import_message: Optional[PublicImportMessage] = ..., optional_message: Optional[TestAllTypes.NestedMessage] = ..., repeated_int32: Optional[Iterable[int]] = ..., repeated_int64: Optional[Iterable[int]] = ..., repeated_uint32: Optional[Iterable[int]] = ..., repeated_uint64: Optional[Iterable[int]] = ..., repeated_sint32: Optional[Iterable[int]] = ..., repeated_sint64: Optional[Iterable[int]] = ..., repeated_fixed32: Optional[Iterable[int]] = ..., repeated_fixed64: Optional[Iterable[int]] = ..., repeated_sfixed32: Optional[Iterable[int]] = ..., repeated_sfixed64: Optional[Iterable[int]] = ..., repeated_float: Optional[Iterable[float]] = ..., repeated_double: Optional[Iterable[float]] = ..., repeated_bool: Optional[Iterable[bool]] = ..., repeated_string: Optional[Iterable[Text]] = ..., repeated_bytes: Optional[Iterable[bytes]] = ..., repeatedgroup: Optional[Iterable[TestAllTypes.RepeatedGroup]] = ..., repeated_nested_message: Optional[Iterable[TestAllTypes.NestedMessage]] = ..., repeated_foreign_message: Optional[Iterable[ForeignMessage]] = ..., repeated_import_message: Optional[Iterable[ImportMessage]] = ..., repeated_nested_enum: Optional[Iterable[TestAllTypes.NestedEnum]] = ..., repeated_foreign_enum: Optional[Iterable[ForeignEnum]] = ..., repeated_import_enum: Optional[Iterable[ImportEnum]] = ..., repeated_string_piece: Optional[Iterable[Text]] = ..., repeated_cord: Optional[Iterable[Text]] = ..., repeated_lazy_message: Optional[Iterable[TestAllTypes.NestedMessage]] = ..., default_int32: Optional[int] = ..., default_int64: Optional[int] = ..., default_uint32: Optional[int] = ..., default_uint64: Optional[int] = ..., default_sint32: Optional[int] = ..., default_sint64: Optional[int] = ..., default_fixed32: Optional[int] = ..., default_fixed64: Optional[int] = ..., default_sfixed32: Optional[int] = ..., default_sfixed64: Optional[int] = ..., default_float: Optional[float] = ..., default_double: Optional[float] = ..., default_bool: Optional[bool] = ..., default_string: Optional[Text] = ..., default_bytes: Optional[bytes] = ..., default_nested_enum: Optional[TestAllTypes.NestedEnum] = ..., default_foreign_enum: Optional[ForeignEnum] = ..., default_import_enum: Optional[ImportEnum] = ..., default_string_piece: Optional[Text] = ..., default_cord: Optional[Text] = ..., oneof_uint32: Optional[int] = ..., oneof_nested_message: Optional[TestAllTypes.NestedMessage] = ..., oneof_string: Optional[Text] = ..., oneof_bytes: Optional[bytes] = ..., lazy_oneof_nested_message: Optional[TestAllTypes.NestedMessage] = ..., ) -> None: ... class ForeignMessage(Message): c: int def __init__(self, c: Optional[int] = ...) -> None: ... class TestNoArenaMessage(Message): @property def arena_message(self) -> ArenaMessage: ... def __init__(self, arena_message: Optional[ArenaMessage] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_no_generic_services_pb2.pyi0000644€tŠÔÚ€2›s®0000000104013576752252037347 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from typing import List, Optional, Tuple, cast class TestEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestEnum]]: ... FOO: TestEnum class TestMessage(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_pb2.pyi0000644€tŠÔÚ€2›s®0000011773113576752252033273 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from google.protobuf.unittest_import_pb2 import ImportEnum, ImportMessage from google.protobuf.unittest_import_public_pb2 import PublicImportMessage from typing import Iterable, List, Mapping, MutableMapping, Optional, Text, Tuple, cast class ForeignEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> ForeignEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[ForeignEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, ForeignEnum]]: ... FOREIGN_FOO: ForeignEnum FOREIGN_BAR: ForeignEnum FOREIGN_BAZ: ForeignEnum class TestEnumWithDupValue(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestEnumWithDupValue: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestEnumWithDupValue]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestEnumWithDupValue]]: ... FOO1: TestEnumWithDupValue BAR1: TestEnumWithDupValue BAZ: TestEnumWithDupValue FOO2: TestEnumWithDupValue BAR2: TestEnumWithDupValue class TestSparseEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestSparseEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestSparseEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestSparseEnum]]: ... SPARSE_A: TestSparseEnum SPARSE_B: TestSparseEnum SPARSE_C: TestSparseEnum SPARSE_D: TestSparseEnum SPARSE_E: TestSparseEnum SPARSE_F: TestSparseEnum SPARSE_G: TestSparseEnum class TestAllTypes(Message): class NestedEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestAllTypes.NestedEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestAllTypes.NestedEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestAllTypes.NestedEnum]]: ... FOO: TestAllTypes.NestedEnum BAR: TestAllTypes.NestedEnum BAZ: TestAllTypes.NestedEnum NEG: TestAllTypes.NestedEnum class NestedMessage(Message): bb: int def __init__(self, bb: Optional[int] = ...) -> None: ... class OptionalGroup(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... class RepeatedGroup(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... optional_int32: int optional_int64: int optional_uint32: int optional_uint64: int optional_sint32: int optional_sint64: int optional_fixed32: int optional_fixed64: int optional_sfixed32: int optional_sfixed64: int optional_float: float optional_double: float optional_bool: bool optional_string: Text optional_bytes: bytes optional_nested_enum: TestAllTypes.NestedEnum optional_foreign_enum: ForeignEnum optional_import_enum: ImportEnum optional_string_piece: Text optional_cord: Text repeated_int32: RepeatedScalarFieldContainer[int] repeated_int64: RepeatedScalarFieldContainer[int] repeated_uint32: RepeatedScalarFieldContainer[int] repeated_uint64: RepeatedScalarFieldContainer[int] repeated_sint32: RepeatedScalarFieldContainer[int] repeated_sint64: RepeatedScalarFieldContainer[int] repeated_fixed32: RepeatedScalarFieldContainer[int] repeated_fixed64: RepeatedScalarFieldContainer[int] repeated_sfixed32: RepeatedScalarFieldContainer[int] repeated_sfixed64: RepeatedScalarFieldContainer[int] repeated_float: RepeatedScalarFieldContainer[float] repeated_double: RepeatedScalarFieldContainer[float] repeated_bool: RepeatedScalarFieldContainer[bool] repeated_string: RepeatedScalarFieldContainer[Text] repeated_bytes: RepeatedScalarFieldContainer[bytes] repeated_nested_enum: RepeatedScalarFieldContainer[TestAllTypes.NestedEnum] repeated_foreign_enum: RepeatedScalarFieldContainer[ForeignEnum] repeated_import_enum: RepeatedScalarFieldContainer[ImportEnum] repeated_string_piece: RepeatedScalarFieldContainer[Text] repeated_cord: RepeatedScalarFieldContainer[Text] default_int32: int default_int64: int default_uint32: int default_uint64: int default_sint32: int default_sint64: int default_fixed32: int default_fixed64: int default_sfixed32: int default_sfixed64: int default_float: float default_double: float default_bool: bool default_string: Text default_bytes: bytes default_nested_enum: TestAllTypes.NestedEnum default_foreign_enum: ForeignEnum default_import_enum: ImportEnum default_string_piece: Text default_cord: Text oneof_uint32: int oneof_string: Text oneof_bytes: bytes @property def optionalgroup(self) -> TestAllTypes.OptionalGroup: ... @property def optional_nested_message(self) -> TestAllTypes.NestedMessage: ... @property def optional_foreign_message(self) -> ForeignMessage: ... @property def optional_import_message(self) -> ImportMessage: ... @property def optional_public_import_message(self) -> PublicImportMessage: ... @property def optional_lazy_message(self) -> TestAllTypes.NestedMessage: ... @property def repeatedgroup(self) -> RepeatedCompositeFieldContainer[TestAllTypes.RepeatedGroup]: ... @property def repeated_nested_message(self) -> RepeatedCompositeFieldContainer[TestAllTypes.NestedMessage]: ... @property def repeated_foreign_message(self) -> RepeatedCompositeFieldContainer[ForeignMessage]: ... @property def repeated_import_message(self) -> RepeatedCompositeFieldContainer[ImportMessage]: ... @property def repeated_lazy_message(self) -> RepeatedCompositeFieldContainer[TestAllTypes.NestedMessage]: ... @property def oneof_nested_message(self) -> TestAllTypes.NestedMessage: ... def __init__( self, optional_int32: Optional[int] = ..., optional_int64: Optional[int] = ..., optional_uint32: Optional[int] = ..., optional_uint64: Optional[int] = ..., optional_sint32: Optional[int] = ..., optional_sint64: Optional[int] = ..., optional_fixed32: Optional[int] = ..., optional_fixed64: Optional[int] = ..., optional_sfixed32: Optional[int] = ..., optional_sfixed64: Optional[int] = ..., optional_float: Optional[float] = ..., optional_double: Optional[float] = ..., optional_bool: Optional[bool] = ..., optional_string: Optional[Text] = ..., optional_bytes: Optional[bytes] = ..., optionalgroup: Optional[TestAllTypes.OptionalGroup] = ..., optional_nested_message: Optional[TestAllTypes.NestedMessage] = ..., optional_foreign_message: Optional[ForeignMessage] = ..., optional_import_message: Optional[ImportMessage] = ..., optional_nested_enum: Optional[TestAllTypes.NestedEnum] = ..., optional_foreign_enum: Optional[ForeignEnum] = ..., optional_import_enum: Optional[ImportEnum] = ..., optional_string_piece: Optional[Text] = ..., optional_cord: Optional[Text] = ..., optional_public_import_message: Optional[PublicImportMessage] = ..., optional_lazy_message: Optional[TestAllTypes.NestedMessage] = ..., repeated_int32: Optional[Iterable[int]] = ..., repeated_int64: Optional[Iterable[int]] = ..., repeated_uint32: Optional[Iterable[int]] = ..., repeated_uint64: Optional[Iterable[int]] = ..., repeated_sint32: Optional[Iterable[int]] = ..., repeated_sint64: Optional[Iterable[int]] = ..., repeated_fixed32: Optional[Iterable[int]] = ..., repeated_fixed64: Optional[Iterable[int]] = ..., repeated_sfixed32: Optional[Iterable[int]] = ..., repeated_sfixed64: Optional[Iterable[int]] = ..., repeated_float: Optional[Iterable[float]] = ..., repeated_double: Optional[Iterable[float]] = ..., repeated_bool: Optional[Iterable[bool]] = ..., repeated_string: Optional[Iterable[Text]] = ..., repeated_bytes: Optional[Iterable[bytes]] = ..., repeatedgroup: Optional[Iterable[TestAllTypes.RepeatedGroup]] = ..., repeated_nested_message: Optional[Iterable[TestAllTypes.NestedMessage]] = ..., repeated_foreign_message: Optional[Iterable[ForeignMessage]] = ..., repeated_import_message: Optional[Iterable[ImportMessage]] = ..., repeated_nested_enum: Optional[Iterable[TestAllTypes.NestedEnum]] = ..., repeated_foreign_enum: Optional[Iterable[ForeignEnum]] = ..., repeated_import_enum: Optional[Iterable[ImportEnum]] = ..., repeated_string_piece: Optional[Iterable[Text]] = ..., repeated_cord: Optional[Iterable[Text]] = ..., repeated_lazy_message: Optional[Iterable[TestAllTypes.NestedMessage]] = ..., default_int32: Optional[int] = ..., default_int64: Optional[int] = ..., default_uint32: Optional[int] = ..., default_uint64: Optional[int] = ..., default_sint32: Optional[int] = ..., default_sint64: Optional[int] = ..., default_fixed32: Optional[int] = ..., default_fixed64: Optional[int] = ..., default_sfixed32: Optional[int] = ..., default_sfixed64: Optional[int] = ..., default_float: Optional[float] = ..., default_double: Optional[float] = ..., default_bool: Optional[bool] = ..., default_string: Optional[Text] = ..., default_bytes: Optional[bytes] = ..., default_nested_enum: Optional[TestAllTypes.NestedEnum] = ..., default_foreign_enum: Optional[ForeignEnum] = ..., default_import_enum: Optional[ImportEnum] = ..., default_string_piece: Optional[Text] = ..., default_cord: Optional[Text] = ..., oneof_uint32: Optional[int] = ..., oneof_nested_message: Optional[TestAllTypes.NestedMessage] = ..., oneof_string: Optional[Text] = ..., oneof_bytes: Optional[bytes] = ..., ) -> None: ... class NestedTestAllTypes(Message): @property def child(self) -> NestedTestAllTypes: ... @property def payload(self) -> TestAllTypes: ... @property def repeated_child(self) -> RepeatedCompositeFieldContainer[NestedTestAllTypes]: ... def __init__( self, child: Optional[NestedTestAllTypes] = ..., payload: Optional[TestAllTypes] = ..., repeated_child: Optional[Iterable[NestedTestAllTypes]] = ..., ) -> None: ... class TestDeprecatedFields(Message): deprecated_int32: int deprecated_int32_in_oneof: int def __init__(self, deprecated_int32: Optional[int] = ..., deprecated_int32_in_oneof: Optional[int] = ...) -> None: ... class TestDeprecatedMessage(Message): def __init__(self,) -> None: ... class ForeignMessage(Message): c: int d: int def __init__(self, c: Optional[int] = ..., d: Optional[int] = ...) -> None: ... class TestReservedFields(Message): def __init__(self,) -> None: ... class TestAllExtensions(Message): def __init__(self,) -> None: ... class OptionalGroup_extension(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... class RepeatedGroup_extension(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... class TestGroup(Message): class OptionalGroup(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... optional_foreign_enum: ForeignEnum @property def optionalgroup(self) -> TestGroup.OptionalGroup: ... def __init__( self, optionalgroup: Optional[TestGroup.OptionalGroup] = ..., optional_foreign_enum: Optional[ForeignEnum] = ... ) -> None: ... class TestGroupExtension(Message): def __init__(self,) -> None: ... class TestNestedExtension(Message): class OptionalGroup_extension(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... def __init__(self,) -> None: ... class TestRequired(Message): a: int dummy2: int b: int dummy4: int dummy5: int dummy6: int dummy7: int dummy8: int dummy9: int dummy10: int dummy11: int dummy12: int dummy13: int dummy14: int dummy15: int dummy16: int dummy17: int dummy18: int dummy19: int dummy20: int dummy21: int dummy22: int dummy23: int dummy24: int dummy25: int dummy26: int dummy27: int dummy28: int dummy29: int dummy30: int dummy31: int dummy32: int c: int def __init__( self, a: int, b: int, c: int, dummy2: Optional[int] = ..., dummy4: Optional[int] = ..., dummy5: Optional[int] = ..., dummy6: Optional[int] = ..., dummy7: Optional[int] = ..., dummy8: Optional[int] = ..., dummy9: Optional[int] = ..., dummy10: Optional[int] = ..., dummy11: Optional[int] = ..., dummy12: Optional[int] = ..., dummy13: Optional[int] = ..., dummy14: Optional[int] = ..., dummy15: Optional[int] = ..., dummy16: Optional[int] = ..., dummy17: Optional[int] = ..., dummy18: Optional[int] = ..., dummy19: Optional[int] = ..., dummy20: Optional[int] = ..., dummy21: Optional[int] = ..., dummy22: Optional[int] = ..., dummy23: Optional[int] = ..., dummy24: Optional[int] = ..., dummy25: Optional[int] = ..., dummy26: Optional[int] = ..., dummy27: Optional[int] = ..., dummy28: Optional[int] = ..., dummy29: Optional[int] = ..., dummy30: Optional[int] = ..., dummy31: Optional[int] = ..., dummy32: Optional[int] = ..., ) -> None: ... class TestRequiredForeign(Message): dummy: int @property def optional_message(self) -> TestRequired: ... @property def repeated_message(self) -> RepeatedCompositeFieldContainer[TestRequired]: ... def __init__( self, optional_message: Optional[TestRequired] = ..., repeated_message: Optional[Iterable[TestRequired]] = ..., dummy: Optional[int] = ..., ) -> None: ... class TestRequiredMessage(Message): @property def optional_message(self) -> TestRequired: ... @property def repeated_message(self) -> RepeatedCompositeFieldContainer[TestRequired]: ... @property def required_message(self) -> TestRequired: ... def __init__( self, required_message: TestRequired, optional_message: Optional[TestRequired] = ..., repeated_message: Optional[Iterable[TestRequired]] = ..., ) -> None: ... class TestForeignNested(Message): @property def foreign_nested(self) -> TestAllTypes.NestedMessage: ... def __init__(self, foreign_nested: Optional[TestAllTypes.NestedMessage] = ...) -> None: ... class TestEmptyMessage(Message): def __init__(self,) -> None: ... class TestEmptyMessageWithExtensions(Message): def __init__(self,) -> None: ... class TestMultipleExtensionRanges(Message): def __init__(self,) -> None: ... class TestReallyLargeTagNumber(Message): a: int bb: int def __init__(self, a: Optional[int] = ..., bb: Optional[int] = ...) -> None: ... class TestRecursiveMessage(Message): i: int @property def a(self) -> TestRecursiveMessage: ... def __init__(self, a: Optional[TestRecursiveMessage] = ..., i: Optional[int] = ...) -> None: ... class TestMutualRecursionA(Message): class SubMessage(Message): @property def b(self) -> TestMutualRecursionB: ... def __init__(self, b: Optional[TestMutualRecursionB] = ...) -> None: ... class SubGroup(Message): @property def sub_message(self) -> TestMutualRecursionA.SubMessage: ... @property def not_in_this_scc(self) -> TestAllTypes: ... def __init__( self, sub_message: Optional[TestMutualRecursionA.SubMessage] = ..., not_in_this_scc: Optional[TestAllTypes] = ... ) -> None: ... @property def bb(self) -> TestMutualRecursionB: ... @property def subgroup(self) -> TestMutualRecursionA.SubGroup: ... def __init__( self, bb: Optional[TestMutualRecursionB] = ..., subgroup: Optional[TestMutualRecursionA.SubGroup] = ... ) -> None: ... class TestMutualRecursionB(Message): optional_int32: int @property def a(self) -> TestMutualRecursionA: ... def __init__(self, a: Optional[TestMutualRecursionA] = ..., optional_int32: Optional[int] = ...) -> None: ... class TestIsInitialized(Message): class SubMessage(Message): class SubGroup(Message): i: int def __init__(self, i: int) -> None: ... @property def subgroup(self) -> TestIsInitialized.SubMessage.SubGroup: ... def __init__(self, subgroup: Optional[TestIsInitialized.SubMessage.SubGroup] = ...) -> None: ... @property def sub_message(self) -> TestIsInitialized.SubMessage: ... def __init__(self, sub_message: Optional[TestIsInitialized.SubMessage] = ...) -> None: ... class TestDupFieldNumber(Message): class Foo(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... class Bar(Message): a: int def __init__(self, a: Optional[int] = ...) -> None: ... a: int @property def foo(self) -> TestDupFieldNumber.Foo: ... @property def bar(self) -> TestDupFieldNumber.Bar: ... def __init__( self, a: Optional[int] = ..., foo: Optional[TestDupFieldNumber.Foo] = ..., bar: Optional[TestDupFieldNumber.Bar] = ... ) -> None: ... class TestEagerMessage(Message): @property def sub_message(self) -> TestAllTypes: ... def __init__(self, sub_message: Optional[TestAllTypes] = ...) -> None: ... class TestLazyMessage(Message): @property def sub_message(self) -> TestAllTypes: ... def __init__(self, sub_message: Optional[TestAllTypes] = ...) -> None: ... class TestNestedMessageHasBits(Message): class NestedMessage(Message): nestedmessage_repeated_int32: RepeatedScalarFieldContainer[int] @property def nestedmessage_repeated_foreignmessage(self) -> RepeatedCompositeFieldContainer[ForeignMessage]: ... def __init__( self, nestedmessage_repeated_int32: Optional[Iterable[int]] = ..., nestedmessage_repeated_foreignmessage: Optional[Iterable[ForeignMessage]] = ..., ) -> None: ... @property def optional_nested_message(self) -> TestNestedMessageHasBits.NestedMessage: ... def __init__(self, optional_nested_message: Optional[TestNestedMessageHasBits.NestedMessage] = ...) -> None: ... class TestCamelCaseFieldNames(Message): PrimitiveField: int StringField: Text EnumField: ForeignEnum StringPieceField: Text CordField: Text RepeatedPrimitiveField: RepeatedScalarFieldContainer[int] RepeatedStringField: RepeatedScalarFieldContainer[Text] RepeatedEnumField: RepeatedScalarFieldContainer[ForeignEnum] RepeatedStringPieceField: RepeatedScalarFieldContainer[Text] RepeatedCordField: RepeatedScalarFieldContainer[Text] @property def MessageField(self) -> ForeignMessage: ... @property def RepeatedMessageField(self) -> RepeatedCompositeFieldContainer[ForeignMessage]: ... def __init__( self, PrimitiveField: Optional[int] = ..., StringField: Optional[Text] = ..., EnumField: Optional[ForeignEnum] = ..., MessageField: Optional[ForeignMessage] = ..., StringPieceField: Optional[Text] = ..., CordField: Optional[Text] = ..., RepeatedPrimitiveField: Optional[Iterable[int]] = ..., RepeatedStringField: Optional[Iterable[Text]] = ..., RepeatedEnumField: Optional[Iterable[ForeignEnum]] = ..., RepeatedMessageField: Optional[Iterable[ForeignMessage]] = ..., RepeatedStringPieceField: Optional[Iterable[Text]] = ..., RepeatedCordField: Optional[Iterable[Text]] = ..., ) -> None: ... class TestFieldOrderings(Message): class NestedMessage(Message): oo: int bb: int def __init__(self, oo: Optional[int] = ..., bb: Optional[int] = ...) -> None: ... my_string: Text my_int: int my_float: float @property def optional_nested_message(self) -> TestFieldOrderings.NestedMessage: ... def __init__( self, my_string: Optional[Text] = ..., my_int: Optional[int] = ..., my_float: Optional[float] = ..., optional_nested_message: Optional[TestFieldOrderings.NestedMessage] = ..., ) -> None: ... class TestExtensionOrderings1(Message): my_string: Text def __init__(self, my_string: Optional[Text] = ...) -> None: ... class TestExtensionOrderings2(Message): class TestExtensionOrderings3(Message): my_string: Text def __init__(self, my_string: Optional[Text] = ...) -> None: ... my_string: Text def __init__(self, my_string: Optional[Text] = ...) -> None: ... class TestExtremeDefaultValues(Message): escaped_bytes: bytes large_uint32: int large_uint64: int small_int32: int small_int64: int really_small_int32: int really_small_int64: int utf8_string: Text zero_float: float one_float: float small_float: float negative_one_float: float negative_float: float large_float: float small_negative_float: float inf_double: float neg_inf_double: float nan_double: float inf_float: float neg_inf_float: float nan_float: float cpp_trigraph: Text string_with_zero: Text bytes_with_zero: bytes string_piece_with_zero: Text cord_with_zero: Text replacement_string: Text def __init__( self, escaped_bytes: Optional[bytes] = ..., large_uint32: Optional[int] = ..., large_uint64: Optional[int] = ..., small_int32: Optional[int] = ..., small_int64: Optional[int] = ..., really_small_int32: Optional[int] = ..., really_small_int64: Optional[int] = ..., utf8_string: Optional[Text] = ..., zero_float: Optional[float] = ..., one_float: Optional[float] = ..., small_float: Optional[float] = ..., negative_one_float: Optional[float] = ..., negative_float: Optional[float] = ..., large_float: Optional[float] = ..., small_negative_float: Optional[float] = ..., inf_double: Optional[float] = ..., neg_inf_double: Optional[float] = ..., nan_double: Optional[float] = ..., inf_float: Optional[float] = ..., neg_inf_float: Optional[float] = ..., nan_float: Optional[float] = ..., cpp_trigraph: Optional[Text] = ..., string_with_zero: Optional[Text] = ..., bytes_with_zero: Optional[bytes] = ..., string_piece_with_zero: Optional[Text] = ..., cord_with_zero: Optional[Text] = ..., replacement_string: Optional[Text] = ..., ) -> None: ... class SparseEnumMessage(Message): sparse_enum: TestSparseEnum def __init__(self, sparse_enum: Optional[TestSparseEnum] = ...) -> None: ... class OneString(Message): data: Text def __init__(self, data: Optional[Text] = ...) -> None: ... class MoreString(Message): data: RepeatedScalarFieldContainer[Text] def __init__(self, data: Optional[Iterable[Text]] = ...) -> None: ... class OneBytes(Message): data: bytes def __init__(self, data: Optional[bytes] = ...) -> None: ... class MoreBytes(Message): data: RepeatedScalarFieldContainer[bytes] def __init__(self, data: Optional[Iterable[bytes]] = ...) -> None: ... class Int32Message(Message): data: int def __init__(self, data: Optional[int] = ...) -> None: ... class Uint32Message(Message): data: int def __init__(self, data: Optional[int] = ...) -> None: ... class Int64Message(Message): data: int def __init__(self, data: Optional[int] = ...) -> None: ... class Uint64Message(Message): data: int def __init__(self, data: Optional[int] = ...) -> None: ... class BoolMessage(Message): data: bool def __init__(self, data: Optional[bool] = ...) -> None: ... class TestOneof(Message): class FooGroup(Message): a: int b: Text def __init__(self, a: Optional[int] = ..., b: Optional[Text] = ...) -> None: ... foo_int: int foo_string: Text @property def foo_message(self) -> TestAllTypes: ... @property def foogroup(self) -> TestOneof.FooGroup: ... def __init__( self, foo_int: Optional[int] = ..., foo_string: Optional[Text] = ..., foo_message: Optional[TestAllTypes] = ..., foogroup: Optional[TestOneof.FooGroup] = ..., ) -> None: ... class TestOneofBackwardsCompatible(Message): class FooGroup(Message): a: int b: Text def __init__(self, a: Optional[int] = ..., b: Optional[Text] = ...) -> None: ... foo_int: int foo_string: Text @property def foo_message(self) -> TestAllTypes: ... @property def foogroup(self) -> TestOneofBackwardsCompatible.FooGroup: ... def __init__( self, foo_int: Optional[int] = ..., foo_string: Optional[Text] = ..., foo_message: Optional[TestAllTypes] = ..., foogroup: Optional[TestOneofBackwardsCompatible.FooGroup] = ..., ) -> None: ... class TestOneof2(Message): class NestedEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestOneof2.NestedEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestOneof2.NestedEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestOneof2.NestedEnum]]: ... FOO: TestOneof2.NestedEnum BAR: TestOneof2.NestedEnum BAZ: TestOneof2.NestedEnum class FooGroup(Message): a: int b: Text def __init__(self, a: Optional[int] = ..., b: Optional[Text] = ...) -> None: ... class NestedMessage(Message): qux_int: int corge_int: RepeatedScalarFieldContainer[int] def __init__(self, qux_int: Optional[int] = ..., corge_int: Optional[Iterable[int]] = ...) -> None: ... foo_int: int foo_string: Text foo_cord: Text foo_string_piece: Text foo_bytes: bytes foo_enum: TestOneof2.NestedEnum bar_int: int bar_string: Text bar_cord: Text bar_string_piece: Text bar_bytes: bytes bar_enum: TestOneof2.NestedEnum baz_int: int baz_string: Text @property def foo_message(self) -> TestOneof2.NestedMessage: ... @property def foogroup(self) -> TestOneof2.FooGroup: ... @property def foo_lazy_message(self) -> TestOneof2.NestedMessage: ... def __init__( self, foo_int: Optional[int] = ..., foo_string: Optional[Text] = ..., foo_cord: Optional[Text] = ..., foo_string_piece: Optional[Text] = ..., foo_bytes: Optional[bytes] = ..., foo_enum: Optional[TestOneof2.NestedEnum] = ..., foo_message: Optional[TestOneof2.NestedMessage] = ..., foogroup: Optional[TestOneof2.FooGroup] = ..., foo_lazy_message: Optional[TestOneof2.NestedMessage] = ..., bar_int: Optional[int] = ..., bar_string: Optional[Text] = ..., bar_cord: Optional[Text] = ..., bar_string_piece: Optional[Text] = ..., bar_bytes: Optional[bytes] = ..., bar_enum: Optional[TestOneof2.NestedEnum] = ..., baz_int: Optional[int] = ..., baz_string: Optional[Text] = ..., ) -> None: ... class TestRequiredOneof(Message): class NestedMessage(Message): required_double: float def __init__(self, required_double: float) -> None: ... foo_int: int foo_string: Text @property def foo_message(self) -> TestRequiredOneof.NestedMessage: ... def __init__( self, foo_int: Optional[int] = ..., foo_string: Optional[Text] = ..., foo_message: Optional[TestRequiredOneof.NestedMessage] = ..., ) -> None: ... class TestPackedTypes(Message): packed_int32: RepeatedScalarFieldContainer[int] packed_int64: RepeatedScalarFieldContainer[int] packed_uint32: RepeatedScalarFieldContainer[int] packed_uint64: RepeatedScalarFieldContainer[int] packed_sint32: RepeatedScalarFieldContainer[int] packed_sint64: RepeatedScalarFieldContainer[int] packed_fixed32: RepeatedScalarFieldContainer[int] packed_fixed64: RepeatedScalarFieldContainer[int] packed_sfixed32: RepeatedScalarFieldContainer[int] packed_sfixed64: RepeatedScalarFieldContainer[int] packed_float: RepeatedScalarFieldContainer[float] packed_double: RepeatedScalarFieldContainer[float] packed_bool: RepeatedScalarFieldContainer[bool] packed_enum: RepeatedScalarFieldContainer[ForeignEnum] def __init__( self, packed_int32: Optional[Iterable[int]] = ..., packed_int64: Optional[Iterable[int]] = ..., packed_uint32: Optional[Iterable[int]] = ..., packed_uint64: Optional[Iterable[int]] = ..., packed_sint32: Optional[Iterable[int]] = ..., packed_sint64: Optional[Iterable[int]] = ..., packed_fixed32: Optional[Iterable[int]] = ..., packed_fixed64: Optional[Iterable[int]] = ..., packed_sfixed32: Optional[Iterable[int]] = ..., packed_sfixed64: Optional[Iterable[int]] = ..., packed_float: Optional[Iterable[float]] = ..., packed_double: Optional[Iterable[float]] = ..., packed_bool: Optional[Iterable[bool]] = ..., packed_enum: Optional[Iterable[ForeignEnum]] = ..., ) -> None: ... class TestUnpackedTypes(Message): unpacked_int32: RepeatedScalarFieldContainer[int] unpacked_int64: RepeatedScalarFieldContainer[int] unpacked_uint32: RepeatedScalarFieldContainer[int] unpacked_uint64: RepeatedScalarFieldContainer[int] unpacked_sint32: RepeatedScalarFieldContainer[int] unpacked_sint64: RepeatedScalarFieldContainer[int] unpacked_fixed32: RepeatedScalarFieldContainer[int] unpacked_fixed64: RepeatedScalarFieldContainer[int] unpacked_sfixed32: RepeatedScalarFieldContainer[int] unpacked_sfixed64: RepeatedScalarFieldContainer[int] unpacked_float: RepeatedScalarFieldContainer[float] unpacked_double: RepeatedScalarFieldContainer[float] unpacked_bool: RepeatedScalarFieldContainer[bool] unpacked_enum: RepeatedScalarFieldContainer[ForeignEnum] def __init__( self, unpacked_int32: Optional[Iterable[int]] = ..., unpacked_int64: Optional[Iterable[int]] = ..., unpacked_uint32: Optional[Iterable[int]] = ..., unpacked_uint64: Optional[Iterable[int]] = ..., unpacked_sint32: Optional[Iterable[int]] = ..., unpacked_sint64: Optional[Iterable[int]] = ..., unpacked_fixed32: Optional[Iterable[int]] = ..., unpacked_fixed64: Optional[Iterable[int]] = ..., unpacked_sfixed32: Optional[Iterable[int]] = ..., unpacked_sfixed64: Optional[Iterable[int]] = ..., unpacked_float: Optional[Iterable[float]] = ..., unpacked_double: Optional[Iterable[float]] = ..., unpacked_bool: Optional[Iterable[bool]] = ..., unpacked_enum: Optional[Iterable[ForeignEnum]] = ..., ) -> None: ... class TestPackedExtensions(Message): def __init__(self,) -> None: ... class TestUnpackedExtensions(Message): def __init__(self,) -> None: ... class TestDynamicExtensions(Message): class DynamicEnumType(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestDynamicExtensions.DynamicEnumType: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestDynamicExtensions.DynamicEnumType]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestDynamicExtensions.DynamicEnumType]]: ... DYNAMIC_FOO: TestDynamicExtensions.DynamicEnumType DYNAMIC_BAR: TestDynamicExtensions.DynamicEnumType DYNAMIC_BAZ: TestDynamicExtensions.DynamicEnumType class DynamicMessageType(Message): dynamic_field: int def __init__(self, dynamic_field: Optional[int] = ...) -> None: ... scalar_extension: int enum_extension: ForeignEnum dynamic_enum_extension: TestDynamicExtensions.DynamicEnumType repeated_extension: RepeatedScalarFieldContainer[Text] packed_extension: RepeatedScalarFieldContainer[int] @property def message_extension(self) -> ForeignMessage: ... @property def dynamic_message_extension(self) -> TestDynamicExtensions.DynamicMessageType: ... def __init__( self, scalar_extension: Optional[int] = ..., enum_extension: Optional[ForeignEnum] = ..., dynamic_enum_extension: Optional[TestDynamicExtensions.DynamicEnumType] = ..., message_extension: Optional[ForeignMessage] = ..., dynamic_message_extension: Optional[TestDynamicExtensions.DynamicMessageType] = ..., repeated_extension: Optional[Iterable[Text]] = ..., packed_extension: Optional[Iterable[int]] = ..., ) -> None: ... class TestRepeatedScalarDifferentTagSizes(Message): repeated_fixed32: RepeatedScalarFieldContainer[int] repeated_int32: RepeatedScalarFieldContainer[int] repeated_fixed64: RepeatedScalarFieldContainer[int] repeated_int64: RepeatedScalarFieldContainer[int] repeated_float: RepeatedScalarFieldContainer[float] repeated_uint64: RepeatedScalarFieldContainer[int] def __init__( self, repeated_fixed32: Optional[Iterable[int]] = ..., repeated_int32: Optional[Iterable[int]] = ..., repeated_fixed64: Optional[Iterable[int]] = ..., repeated_int64: Optional[Iterable[int]] = ..., repeated_float: Optional[Iterable[float]] = ..., repeated_uint64: Optional[Iterable[int]] = ..., ) -> None: ... class TestParsingMerge(Message): class RepeatedFieldsGenerator(Message): class Group1(Message): @property def field1(self) -> TestAllTypes: ... def __init__(self, field1: Optional[TestAllTypes] = ...) -> None: ... class Group2(Message): @property def field1(self) -> TestAllTypes: ... def __init__(self, field1: Optional[TestAllTypes] = ...) -> None: ... @property def field1(self) -> RepeatedCompositeFieldContainer[TestAllTypes]: ... @property def field2(self) -> RepeatedCompositeFieldContainer[TestAllTypes]: ... @property def field3(self) -> RepeatedCompositeFieldContainer[TestAllTypes]: ... @property def group1(self) -> RepeatedCompositeFieldContainer[TestParsingMerge.RepeatedFieldsGenerator.Group1]: ... @property def group2(self) -> RepeatedCompositeFieldContainer[TestParsingMerge.RepeatedFieldsGenerator.Group2]: ... @property def ext1(self) -> RepeatedCompositeFieldContainer[TestAllTypes]: ... @property def ext2(self) -> RepeatedCompositeFieldContainer[TestAllTypes]: ... def __init__( self, field1: Optional[Iterable[TestAllTypes]] = ..., field2: Optional[Iterable[TestAllTypes]] = ..., field3: Optional[Iterable[TestAllTypes]] = ..., group1: Optional[Iterable[TestParsingMerge.RepeatedFieldsGenerator.Group1]] = ..., group2: Optional[Iterable[TestParsingMerge.RepeatedFieldsGenerator.Group2]] = ..., ext1: Optional[Iterable[TestAllTypes]] = ..., ext2: Optional[Iterable[TestAllTypes]] = ..., ) -> None: ... class OptionalGroup(Message): @property def optional_group_all_types(self) -> TestAllTypes: ... def __init__(self, optional_group_all_types: Optional[TestAllTypes] = ...) -> None: ... class RepeatedGroup(Message): @property def repeated_group_all_types(self) -> TestAllTypes: ... def __init__(self, repeated_group_all_types: Optional[TestAllTypes] = ...) -> None: ... @property def required_all_types(self) -> TestAllTypes: ... @property def optional_all_types(self) -> TestAllTypes: ... @property def repeated_all_types(self) -> RepeatedCompositeFieldContainer[TestAllTypes]: ... @property def optionalgroup(self) -> TestParsingMerge.OptionalGroup: ... @property def repeatedgroup(self) -> RepeatedCompositeFieldContainer[TestParsingMerge.RepeatedGroup]: ... def __init__( self, required_all_types: TestAllTypes, optional_all_types: Optional[TestAllTypes] = ..., repeated_all_types: Optional[Iterable[TestAllTypes]] = ..., optionalgroup: Optional[TestParsingMerge.OptionalGroup] = ..., repeatedgroup: Optional[Iterable[TestParsingMerge.RepeatedGroup]] = ..., ) -> None: ... class TestCommentInjectionMessage(Message): a: Text def __init__(self, a: Optional[Text] = ...) -> None: ... class FooRequest(Message): def __init__(self,) -> None: ... class FooResponse(Message): def __init__(self,) -> None: ... class FooClientMessage(Message): def __init__(self,) -> None: ... class FooServerMessage(Message): def __init__(self,) -> None: ... class BarRequest(Message): def __init__(self,) -> None: ... class BarResponse(Message): def __init__(self,) -> None: ... class TestJsonName(Message): field_name1: int fieldName2: int FieldName3: int _field_name4: int FIELD_NAME5: int field_name6: int def __init__( self, field_name1: Optional[int] = ..., fieldName2: Optional[int] = ..., FieldName3: Optional[int] = ..., _field_name4: Optional[int] = ..., FIELD_NAME5: Optional[int] = ..., field_name6: Optional[int] = ..., ) -> None: ... class TestHugeFieldNumbers(Message): class OptionalGroup(Message): group_a: int def __init__(self, group_a: Optional[int] = ...) -> None: ... class StringStringMapEntry(Message): key: Text value: Text def __init__(self, key: Optional[Text] = ..., value: Optional[Text] = ...) -> None: ... optional_int32: int fixed_32: int repeated_int32: RepeatedScalarFieldContainer[int] packed_int32: RepeatedScalarFieldContainer[int] optional_enum: ForeignEnum optional_string: Text optional_bytes: bytes oneof_uint32: int oneof_string: Text oneof_bytes: bytes @property def optional_message(self) -> ForeignMessage: ... @property def optionalgroup(self) -> TestHugeFieldNumbers.OptionalGroup: ... @property def string_string_map(self) -> MutableMapping[Text, Text]: ... @property def oneof_test_all_types(self) -> TestAllTypes: ... def __init__( self, optional_int32: Optional[int] = ..., fixed_32: Optional[int] = ..., repeated_int32: Optional[Iterable[int]] = ..., packed_int32: Optional[Iterable[int]] = ..., optional_enum: Optional[ForeignEnum] = ..., optional_string: Optional[Text] = ..., optional_bytes: Optional[bytes] = ..., optional_message: Optional[ForeignMessage] = ..., optionalgroup: Optional[TestHugeFieldNumbers.OptionalGroup] = ..., string_string_map: Optional[Mapping[Text, Text]] = ..., oneof_uint32: Optional[int] = ..., oneof_test_all_types: Optional[TestAllTypes] = ..., oneof_string: Optional[Text] = ..., oneof_bytes: Optional[bytes] = ..., ) -> None: ... class TestExtensionInsideTable(Message): field1: int field2: int field3: int field4: int field6: int field7: int field8: int field9: int field10: int def __init__( self, field1: Optional[int] = ..., field2: Optional[int] = ..., field3: Optional[int] = ..., field4: Optional[int] = ..., field6: Optional[int] = ..., field7: Optional[int] = ..., field8: Optional[int] = ..., field9: Optional[int] = ..., field10: Optional[int] = ..., ) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/unittest_proto3_arena_pb2.pyi0000644€tŠÔÚ€2›s®0000002601113576752252035735 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from google.protobuf.unittest_import_pb2 import ImportMessage from google.protobuf.unittest_import_public_pb2 import PublicImportMessage from typing import Iterable, List, Optional, Text, Tuple, cast class ForeignEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> ForeignEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[ForeignEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, ForeignEnum]]: ... FOREIGN_ZERO: ForeignEnum FOREIGN_FOO: ForeignEnum FOREIGN_BAR: ForeignEnum FOREIGN_BAZ: ForeignEnum class TestAllTypes(Message): class NestedEnum(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> TestAllTypes.NestedEnum: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[TestAllTypes.NestedEnum]: ... @classmethod def items(cls) -> List[Tuple[bytes, TestAllTypes.NestedEnum]]: ... ZERO: TestAllTypes.NestedEnum FOO: TestAllTypes.NestedEnum BAR: TestAllTypes.NestedEnum BAZ: TestAllTypes.NestedEnum NEG: TestAllTypes.NestedEnum class NestedMessage(Message): bb: int def __init__(self, bb: Optional[int] = ...) -> None: ... optional_int32: int optional_int64: int optional_uint32: int optional_uint64: int optional_sint32: int optional_sint64: int optional_fixed32: int optional_fixed64: int optional_sfixed32: int optional_sfixed64: int optional_float: float optional_double: float optional_bool: bool optional_string: Text optional_bytes: bytes optional_nested_enum: TestAllTypes.NestedEnum optional_foreign_enum: ForeignEnum optional_string_piece: Text optional_cord: Text repeated_int32: RepeatedScalarFieldContainer[int] repeated_int64: RepeatedScalarFieldContainer[int] repeated_uint32: RepeatedScalarFieldContainer[int] repeated_uint64: RepeatedScalarFieldContainer[int] repeated_sint32: RepeatedScalarFieldContainer[int] repeated_sint64: RepeatedScalarFieldContainer[int] repeated_fixed32: RepeatedScalarFieldContainer[int] repeated_fixed64: RepeatedScalarFieldContainer[int] repeated_sfixed32: RepeatedScalarFieldContainer[int] repeated_sfixed64: RepeatedScalarFieldContainer[int] repeated_float: RepeatedScalarFieldContainer[float] repeated_double: RepeatedScalarFieldContainer[float] repeated_bool: RepeatedScalarFieldContainer[bool] repeated_string: RepeatedScalarFieldContainer[Text] repeated_bytes: RepeatedScalarFieldContainer[bytes] repeated_nested_enum: RepeatedScalarFieldContainer[TestAllTypes.NestedEnum] repeated_foreign_enum: RepeatedScalarFieldContainer[ForeignEnum] repeated_string_piece: RepeatedScalarFieldContainer[Text] repeated_cord: RepeatedScalarFieldContainer[Text] oneof_uint32: int oneof_string: Text oneof_bytes: bytes @property def optional_nested_message(self) -> TestAllTypes.NestedMessage: ... @property def optional_foreign_message(self) -> ForeignMessage: ... @property def optional_import_message(self) -> ImportMessage: ... @property def optional_public_import_message(self) -> PublicImportMessage: ... @property def optional_lazy_message(self) -> TestAllTypes.NestedMessage: ... @property def optional_lazy_import_message(self) -> ImportMessage: ... @property def repeated_nested_message(self) -> RepeatedCompositeFieldContainer[TestAllTypes.NestedMessage]: ... @property def repeated_foreign_message(self) -> RepeatedCompositeFieldContainer[ForeignMessage]: ... @property def repeated_import_message(self) -> RepeatedCompositeFieldContainer[ImportMessage]: ... @property def repeated_lazy_message(self) -> RepeatedCompositeFieldContainer[TestAllTypes.NestedMessage]: ... @property def oneof_nested_message(self) -> TestAllTypes.NestedMessage: ... def __init__( self, optional_int32: Optional[int] = ..., optional_int64: Optional[int] = ..., optional_uint32: Optional[int] = ..., optional_uint64: Optional[int] = ..., optional_sint32: Optional[int] = ..., optional_sint64: Optional[int] = ..., optional_fixed32: Optional[int] = ..., optional_fixed64: Optional[int] = ..., optional_sfixed32: Optional[int] = ..., optional_sfixed64: Optional[int] = ..., optional_float: Optional[float] = ..., optional_double: Optional[float] = ..., optional_bool: Optional[bool] = ..., optional_string: Optional[Text] = ..., optional_bytes: Optional[bytes] = ..., optional_nested_message: Optional[TestAllTypes.NestedMessage] = ..., optional_foreign_message: Optional[ForeignMessage] = ..., optional_import_message: Optional[ImportMessage] = ..., optional_nested_enum: Optional[TestAllTypes.NestedEnum] = ..., optional_foreign_enum: Optional[ForeignEnum] = ..., optional_string_piece: Optional[Text] = ..., optional_cord: Optional[Text] = ..., optional_public_import_message: Optional[PublicImportMessage] = ..., optional_lazy_message: Optional[TestAllTypes.NestedMessage] = ..., optional_lazy_import_message: Optional[ImportMessage] = ..., repeated_int32: Optional[Iterable[int]] = ..., repeated_int64: Optional[Iterable[int]] = ..., repeated_uint32: Optional[Iterable[int]] = ..., repeated_uint64: Optional[Iterable[int]] = ..., repeated_sint32: Optional[Iterable[int]] = ..., repeated_sint64: Optional[Iterable[int]] = ..., repeated_fixed32: Optional[Iterable[int]] = ..., repeated_fixed64: Optional[Iterable[int]] = ..., repeated_sfixed32: Optional[Iterable[int]] = ..., repeated_sfixed64: Optional[Iterable[int]] = ..., repeated_float: Optional[Iterable[float]] = ..., repeated_double: Optional[Iterable[float]] = ..., repeated_bool: Optional[Iterable[bool]] = ..., repeated_string: Optional[Iterable[Text]] = ..., repeated_bytes: Optional[Iterable[bytes]] = ..., repeated_nested_message: Optional[Iterable[TestAllTypes.NestedMessage]] = ..., repeated_foreign_message: Optional[Iterable[ForeignMessage]] = ..., repeated_import_message: Optional[Iterable[ImportMessage]] = ..., repeated_nested_enum: Optional[Iterable[TestAllTypes.NestedEnum]] = ..., repeated_foreign_enum: Optional[Iterable[ForeignEnum]] = ..., repeated_string_piece: Optional[Iterable[Text]] = ..., repeated_cord: Optional[Iterable[Text]] = ..., repeated_lazy_message: Optional[Iterable[TestAllTypes.NestedMessage]] = ..., oneof_uint32: Optional[int] = ..., oneof_nested_message: Optional[TestAllTypes.NestedMessage] = ..., oneof_string: Optional[Text] = ..., oneof_bytes: Optional[bytes] = ..., ) -> None: ... class TestPackedTypes(Message): packed_int32: RepeatedScalarFieldContainer[int] packed_int64: RepeatedScalarFieldContainer[int] packed_uint32: RepeatedScalarFieldContainer[int] packed_uint64: RepeatedScalarFieldContainer[int] packed_sint32: RepeatedScalarFieldContainer[int] packed_sint64: RepeatedScalarFieldContainer[int] packed_fixed32: RepeatedScalarFieldContainer[int] packed_fixed64: RepeatedScalarFieldContainer[int] packed_sfixed32: RepeatedScalarFieldContainer[int] packed_sfixed64: RepeatedScalarFieldContainer[int] packed_float: RepeatedScalarFieldContainer[float] packed_double: RepeatedScalarFieldContainer[float] packed_bool: RepeatedScalarFieldContainer[bool] packed_enum: RepeatedScalarFieldContainer[ForeignEnum] def __init__( self, packed_int32: Optional[Iterable[int]] = ..., packed_int64: Optional[Iterable[int]] = ..., packed_uint32: Optional[Iterable[int]] = ..., packed_uint64: Optional[Iterable[int]] = ..., packed_sint32: Optional[Iterable[int]] = ..., packed_sint64: Optional[Iterable[int]] = ..., packed_fixed32: Optional[Iterable[int]] = ..., packed_fixed64: Optional[Iterable[int]] = ..., packed_sfixed32: Optional[Iterable[int]] = ..., packed_sfixed64: Optional[Iterable[int]] = ..., packed_float: Optional[Iterable[float]] = ..., packed_double: Optional[Iterable[float]] = ..., packed_bool: Optional[Iterable[bool]] = ..., packed_enum: Optional[Iterable[ForeignEnum]] = ..., ) -> None: ... class TestUnpackedTypes(Message): repeated_int32: RepeatedScalarFieldContainer[int] repeated_int64: RepeatedScalarFieldContainer[int] repeated_uint32: RepeatedScalarFieldContainer[int] repeated_uint64: RepeatedScalarFieldContainer[int] repeated_sint32: RepeatedScalarFieldContainer[int] repeated_sint64: RepeatedScalarFieldContainer[int] repeated_fixed32: RepeatedScalarFieldContainer[int] repeated_fixed64: RepeatedScalarFieldContainer[int] repeated_sfixed32: RepeatedScalarFieldContainer[int] repeated_sfixed64: RepeatedScalarFieldContainer[int] repeated_float: RepeatedScalarFieldContainer[float] repeated_double: RepeatedScalarFieldContainer[float] repeated_bool: RepeatedScalarFieldContainer[bool] repeated_nested_enum: RepeatedScalarFieldContainer[TestAllTypes.NestedEnum] def __init__( self, repeated_int32: Optional[Iterable[int]] = ..., repeated_int64: Optional[Iterable[int]] = ..., repeated_uint32: Optional[Iterable[int]] = ..., repeated_uint64: Optional[Iterable[int]] = ..., repeated_sint32: Optional[Iterable[int]] = ..., repeated_sint64: Optional[Iterable[int]] = ..., repeated_fixed32: Optional[Iterable[int]] = ..., repeated_fixed64: Optional[Iterable[int]] = ..., repeated_sfixed32: Optional[Iterable[int]] = ..., repeated_sfixed64: Optional[Iterable[int]] = ..., repeated_float: Optional[Iterable[float]] = ..., repeated_double: Optional[Iterable[float]] = ..., repeated_bool: Optional[Iterable[bool]] = ..., repeated_nested_enum: Optional[Iterable[TestAllTypes.NestedEnum]] = ..., ) -> None: ... class NestedTestAllTypes(Message): @property def child(self) -> NestedTestAllTypes: ... @property def payload(self) -> TestAllTypes: ... @property def repeated_child(self) -> RepeatedCompositeFieldContainer[NestedTestAllTypes]: ... def __init__( self, child: Optional[NestedTestAllTypes] = ..., payload: Optional[TestAllTypes] = ..., repeated_child: Optional[Iterable[NestedTestAllTypes]] = ..., ) -> None: ... class ForeignMessage(Message): c: int def __init__(self, c: Optional[int] = ...) -> None: ... class TestEmptyMessage(Message): def __init__(self,) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/util/0000755€tŠÔÚ€2›s®0000000000013576752267031077 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/util/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252033341 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/util/json_format_proto3_pb2.pyi0000644€tŠÔÚ€2›s®0000003225713576752252036217 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.any_pb2 import Any from google.protobuf.duration_pb2 import Duration from google.protobuf.field_mask_pb2 import FieldMask from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer from google.protobuf.message import Message from google.protobuf.struct_pb2 import ListValue, Struct, Value from google.protobuf.timestamp_pb2 import Timestamp from google.protobuf.unittest_pb2 import TestAllExtensions from google.protobuf.wrappers_pb2 import ( BoolValue, BytesValue, DoubleValue, FloatValue, Int32Value, Int64Value, StringValue, UInt32Value, UInt64Value, ) from typing import Iterable, List, Mapping, MutableMapping, Optional, Text, Tuple, cast class EnumType(int): @classmethod def Name(cls, number: int) -> bytes: ... @classmethod def Value(cls, name: bytes) -> EnumType: ... @classmethod def keys(cls) -> List[bytes]: ... @classmethod def values(cls) -> List[EnumType]: ... @classmethod def items(cls) -> List[Tuple[bytes, EnumType]]: ... FOO: EnumType BAR: EnumType class MessageType(Message): value: int def __init__(self, value: Optional[int] = ...) -> None: ... class TestMessage(Message): bool_value: bool int32_value: int int64_value: int uint32_value: int uint64_value: int float_value: float double_value: float string_value: Text bytes_value: bytes enum_value: EnumType repeated_bool_value: RepeatedScalarFieldContainer[bool] repeated_int32_value: RepeatedScalarFieldContainer[int] repeated_int64_value: RepeatedScalarFieldContainer[int] repeated_uint32_value: RepeatedScalarFieldContainer[int] repeated_uint64_value: RepeatedScalarFieldContainer[int] repeated_float_value: RepeatedScalarFieldContainer[float] repeated_double_value: RepeatedScalarFieldContainer[float] repeated_string_value: RepeatedScalarFieldContainer[Text] repeated_bytes_value: RepeatedScalarFieldContainer[bytes] repeated_enum_value: RepeatedScalarFieldContainer[EnumType] @property def message_value(self) -> MessageType: ... @property def repeated_message_value(self) -> RepeatedCompositeFieldContainer[MessageType]: ... def __init__( self, bool_value: Optional[bool] = ..., int32_value: Optional[int] = ..., int64_value: Optional[int] = ..., uint32_value: Optional[int] = ..., uint64_value: Optional[int] = ..., float_value: Optional[float] = ..., double_value: Optional[float] = ..., string_value: Optional[Text] = ..., bytes_value: Optional[bytes] = ..., enum_value: Optional[EnumType] = ..., message_value: Optional[MessageType] = ..., repeated_bool_value: Optional[Iterable[bool]] = ..., repeated_int32_value: Optional[Iterable[int]] = ..., repeated_int64_value: Optional[Iterable[int]] = ..., repeated_uint32_value: Optional[Iterable[int]] = ..., repeated_uint64_value: Optional[Iterable[int]] = ..., repeated_float_value: Optional[Iterable[float]] = ..., repeated_double_value: Optional[Iterable[float]] = ..., repeated_string_value: Optional[Iterable[Text]] = ..., repeated_bytes_value: Optional[Iterable[bytes]] = ..., repeated_enum_value: Optional[Iterable[EnumType]] = ..., repeated_message_value: Optional[Iterable[MessageType]] = ..., ) -> None: ... class TestOneof(Message): oneof_int32_value: int oneof_string_value: Text oneof_bytes_value: bytes oneof_enum_value: EnumType @property def oneof_message_value(self) -> MessageType: ... def __init__( self, oneof_int32_value: Optional[int] = ..., oneof_string_value: Optional[Text] = ..., oneof_bytes_value: Optional[bytes] = ..., oneof_enum_value: Optional[EnumType] = ..., oneof_message_value: Optional[MessageType] = ..., ) -> None: ... class TestMap(Message): class BoolMapEntry(Message): key: bool value: int def __init__(self, key: Optional[bool] = ..., value: Optional[int] = ...) -> None: ... class Int32MapEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class Int64MapEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class Uint32MapEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class Uint64MapEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class StringMapEntry(Message): key: Text value: int def __init__(self, key: Optional[Text] = ..., value: Optional[int] = ...) -> None: ... @property def bool_map(self) -> MutableMapping[bool, int]: ... @property def int32_map(self) -> MutableMapping[int, int]: ... @property def int64_map(self) -> MutableMapping[int, int]: ... @property def uint32_map(self) -> MutableMapping[int, int]: ... @property def uint64_map(self) -> MutableMapping[int, int]: ... @property def string_map(self) -> MutableMapping[Text, int]: ... def __init__( self, bool_map: Optional[Mapping[bool, int]] = ..., int32_map: Optional[Mapping[int, int]] = ..., int64_map: Optional[Mapping[int, int]] = ..., uint32_map: Optional[Mapping[int, int]] = ..., uint64_map: Optional[Mapping[int, int]] = ..., string_map: Optional[Mapping[Text, int]] = ..., ) -> None: ... class TestNestedMap(Message): class BoolMapEntry(Message): key: bool value: int def __init__(self, key: Optional[bool] = ..., value: Optional[int] = ...) -> None: ... class Int32MapEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class Int64MapEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class Uint32MapEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class Uint64MapEntry(Message): key: int value: int def __init__(self, key: Optional[int] = ..., value: Optional[int] = ...) -> None: ... class StringMapEntry(Message): key: Text value: int def __init__(self, key: Optional[Text] = ..., value: Optional[int] = ...) -> None: ... class MapMapEntry(Message): key: Text @property def value(self) -> TestNestedMap: ... def __init__(self, key: Optional[Text] = ..., value: Optional[TestNestedMap] = ...) -> None: ... @property def bool_map(self) -> MutableMapping[bool, int]: ... @property def int32_map(self) -> MutableMapping[int, int]: ... @property def int64_map(self) -> MutableMapping[int, int]: ... @property def uint32_map(self) -> MutableMapping[int, int]: ... @property def uint64_map(self) -> MutableMapping[int, int]: ... @property def string_map(self) -> MutableMapping[Text, int]: ... @property def map_map(self) -> MutableMapping[Text, TestNestedMap]: ... def __init__( self, bool_map: Optional[Mapping[bool, int]] = ..., int32_map: Optional[Mapping[int, int]] = ..., int64_map: Optional[Mapping[int, int]] = ..., uint32_map: Optional[Mapping[int, int]] = ..., uint64_map: Optional[Mapping[int, int]] = ..., string_map: Optional[Mapping[Text, int]] = ..., map_map: Optional[Mapping[Text, TestNestedMap]] = ..., ) -> None: ... class TestWrapper(Message): @property def bool_value(self) -> BoolValue: ... @property def int32_value(self) -> Int32Value: ... @property def int64_value(self) -> Int64Value: ... @property def uint32_value(self) -> UInt32Value: ... @property def uint64_value(self) -> UInt64Value: ... @property def float_value(self) -> FloatValue: ... @property def double_value(self) -> DoubleValue: ... @property def string_value(self) -> StringValue: ... @property def bytes_value(self) -> BytesValue: ... @property def repeated_bool_value(self) -> RepeatedCompositeFieldContainer[BoolValue]: ... @property def repeated_int32_value(self) -> RepeatedCompositeFieldContainer[Int32Value]: ... @property def repeated_int64_value(self) -> RepeatedCompositeFieldContainer[Int64Value]: ... @property def repeated_uint32_value(self) -> RepeatedCompositeFieldContainer[UInt32Value]: ... @property def repeated_uint64_value(self) -> RepeatedCompositeFieldContainer[UInt64Value]: ... @property def repeated_float_value(self) -> RepeatedCompositeFieldContainer[FloatValue]: ... @property def repeated_double_value(self) -> RepeatedCompositeFieldContainer[DoubleValue]: ... @property def repeated_string_value(self) -> RepeatedCompositeFieldContainer[StringValue]: ... @property def repeated_bytes_value(self) -> RepeatedCompositeFieldContainer[BytesValue]: ... def __init__( self, bool_value: Optional[BoolValue] = ..., int32_value: Optional[Int32Value] = ..., int64_value: Optional[Int64Value] = ..., uint32_value: Optional[UInt32Value] = ..., uint64_value: Optional[UInt64Value] = ..., float_value: Optional[FloatValue] = ..., double_value: Optional[DoubleValue] = ..., string_value: Optional[StringValue] = ..., bytes_value: Optional[BytesValue] = ..., repeated_bool_value: Optional[Iterable[BoolValue]] = ..., repeated_int32_value: Optional[Iterable[Int32Value]] = ..., repeated_int64_value: Optional[Iterable[Int64Value]] = ..., repeated_uint32_value: Optional[Iterable[UInt32Value]] = ..., repeated_uint64_value: Optional[Iterable[UInt64Value]] = ..., repeated_float_value: Optional[Iterable[FloatValue]] = ..., repeated_double_value: Optional[Iterable[DoubleValue]] = ..., repeated_string_value: Optional[Iterable[StringValue]] = ..., repeated_bytes_value: Optional[Iterable[BytesValue]] = ..., ) -> None: ... class TestTimestamp(Message): @property def value(self) -> Timestamp: ... @property def repeated_value(self) -> RepeatedCompositeFieldContainer[Timestamp]: ... def __init__(self, value: Optional[Timestamp] = ..., repeated_value: Optional[Iterable[Timestamp]] = ...) -> None: ... class TestDuration(Message): @property def value(self) -> Duration: ... @property def repeated_value(self) -> RepeatedCompositeFieldContainer[Duration]: ... def __init__(self, value: Optional[Duration] = ..., repeated_value: Optional[Iterable[Duration]] = ...) -> None: ... class TestFieldMask(Message): @property def value(self) -> FieldMask: ... def __init__(self, value: Optional[FieldMask] = ...) -> None: ... class TestStruct(Message): @property def value(self) -> Struct: ... @property def repeated_value(self) -> RepeatedCompositeFieldContainer[Struct]: ... def __init__(self, value: Optional[Struct] = ..., repeated_value: Optional[Iterable[Struct]] = ...) -> None: ... class TestAny(Message): @property def value(self) -> Any: ... @property def repeated_value(self) -> RepeatedCompositeFieldContainer[Any]: ... def __init__(self, value: Optional[Any] = ..., repeated_value: Optional[Iterable[Any]] = ...) -> None: ... class TestValue(Message): @property def value(self) -> Value: ... @property def repeated_value(self) -> RepeatedCompositeFieldContainer[Value]: ... def __init__(self, value: Optional[Value] = ..., repeated_value: Optional[Iterable[Value]] = ...) -> None: ... class TestListValue(Message): @property def value(self) -> ListValue: ... @property def repeated_value(self) -> RepeatedCompositeFieldContainer[ListValue]: ... def __init__(self, value: Optional[ListValue] = ..., repeated_value: Optional[Iterable[ListValue]] = ...) -> None: ... class TestBoolValue(Message): class BoolMapEntry(Message): key: bool value: int def __init__(self, key: Optional[bool] = ..., value: Optional[int] = ...) -> None: ... bool_value: bool @property def bool_map(self) -> MutableMapping[bool, int]: ... def __init__(self, bool_value: Optional[bool] = ..., bool_map: Optional[Mapping[bool, int]] = ...) -> None: ... class TestCustomJsonName(Message): value: int def __init__(self, value: Optional[int] = ...) -> None: ... class TestExtensions(Message): @property def extensions(self) -> TestAllExtensions: ... def __init__(self, extensions: Optional[TestAllExtensions] = ...) -> None: ... class TestEnumValue(Message): enum_value1: EnumType enum_value2: EnumType enum_value3: EnumType def __init__( self, enum_value1: Optional[EnumType] = ..., enum_value2: Optional[EnumType] = ..., enum_value3: Optional[EnumType] = ... ) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/google/protobuf/wrappers_pb2.pyi0000644€tŠÔÚ€2›s®0000000204413576752252033245 0ustar jukkaDROPBOX\Domain Users00000000000000from google.protobuf.message import Message from typing import Optional, Text class DoubleValue(Message): value: float def __init__(self, value: Optional[float] = ...) -> None: ... class FloatValue(Message): value: float def __init__(self, value: Optional[float] = ...) -> None: ... class Int64Value(Message): value: int def __init__(self, value: Optional[int] = ...) -> None: ... class UInt64Value(Message): value: int def __init__(self, value: Optional[int] = ...) -> None: ... class Int32Value(Message): value: int def __init__(self, value: Optional[int] = ...) -> None: ... class UInt32Value(Message): value: int def __init__(self, value: Optional[int] = ...) -> None: ... class BoolValue(Message): value: bool def __init__(self, value: Optional[bool] = ...) -> None: ... class StringValue(Message): value: Text def __init__(self, value: Optional[Text] = ...) -> None: ... class BytesValue(Message): value: bytes def __init__(self, value: Optional[bytes] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/itsdangerous.pyi0000644€tŠÔÚ€2›s®0000002020313576752252030227 0ustar jukkaDROPBOX\Domain Users00000000000000from datetime import datetime from typing import Any, Callable, IO, Mapping, MutableMapping, Optional, Tuple, Union, Text, Generator _serializer = Any # must be an object that has "dumps" and "loads" attributes (e.g. the json module) def want_bytes(s: Union[Text, bytes], encoding: Text = ..., errors: Text = ...) -> bytes: ... class BadData(Exception): message: str def __init__(self, message: str) -> None: ... class BadPayload(BadData): original_error: Optional[Exception] def __init__(self, message: str, original_error: Optional[Exception] = ...) -> None: ... class BadSignature(BadData): payload: Optional[Any] def __init__(self, message: str, payload: Optional[Any] = ...) -> None: ... class BadTimeSignature(BadSignature): date_signed: Optional[int] def __init__(self, message: str, payload: Optional[Any] = ..., date_signed: Optional[int] = ...) -> None: ... class BadHeader(BadSignature): header: Any original_error: Any def __init__(self, message, payload: Optional[Any] = ..., header: Optional[Any] = ..., original_error: Optional[Any] = ...) -> None: ... class SignatureExpired(BadTimeSignature): ... def base64_encode(string: Union[Text, bytes]) -> bytes: ... def base64_decode(string: Union[Text, bytes]) -> bytes: ... class SigningAlgorithm(object): def get_signature(self, key: bytes, value: bytes) -> bytes: ... def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: ... class NoneAlgorithm(SigningAlgorithm): def get_signature(self, key: bytes, value: bytes) -> bytes: ... class HMACAlgorithm(SigningAlgorithm): default_digest_method: Callable[..., Any] digest_method: Callable[..., Any] def __init__(self, digest_method: Optional[Callable[..., Any]] = ...) -> None: ... def get_signature(self, key: bytes, value: bytes) -> bytes: ... class Signer(object): default_digest_method: Callable[..., Any] = ... default_key_derivation: str = ... secret_key: bytes sep: bytes salt: Union[Text, bytes] key_derivation: str digest_method: Callable[..., Any] algorithm: SigningAlgorithm def __init__(self, secret_key: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., sep: Optional[Union[Text, bytes]] = ..., key_derivation: Optional[str] = ..., digest_method: Optional[Callable[..., Any]] = ..., algorithm: Optional[SigningAlgorithm] = ...) -> None: ... def derive_key(self) -> bytes: ... def get_signature(self, value: Union[Text, bytes]) -> bytes: ... def sign(self, value: Union[Text, bytes]) -> bytes: ... def verify_signature(self, value: bytes, sig: Union[Text, bytes]) -> bool: ... def unsign(self, signed_value: Union[Text, bytes]) -> bytes: ... def validate(self, signed_value: Union[Text, bytes]) -> bool: ... class TimestampSigner(Signer): def get_timestamp(self) -> int: ... def timestamp_to_datetime(self, ts: float) -> datetime: ... def sign(self, value: Union[Text, bytes]) -> bytes: ... def unsign(self, value: Union[Text, bytes], max_age: Optional[int] = ..., return_timestamp: bool = ...) -> Any: ... # morally -> Union[bytes, Tuple[bytes, datetime]] def validate(self, signed_value: Union[Text, bytes], max_age: Optional[int] = ...) -> bool: ... class Serializer(object): default_serializer: _serializer = ... default_signer: Callable[..., Signer] = ... secret_key: bytes salt: bytes serializer: _serializer is_text_serializer: bool signer: Callable[..., Signer] signer_kwargs: MutableMapping[str, Any] def __init__(self, secret_key: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., serializer: Optional[_serializer] = ..., signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping[str, Any]] = ...) -> None: ... def load_payload(self, payload: bytes, serializer: Optional[_serializer] = ...) -> Any: ... def dump_payload(self, obj: Any) -> bytes: ... def make_signer(self, salt: Optional[Union[Text, bytes]] = ...) -> Signer: ... def iter_unsigners(self, salt: Optional[Union[Text, bytes]] = ...) -> Generator[Any, None, None]: ... def dumps(self, obj: Any, salt: Optional[Union[Text, bytes]] = ...) -> Any: ... # morally -> Union[str, bytes] def dump(self, obj: Any, f: IO[Any], salt: Optional[Union[Text, bytes]] = ...) -> None: ... def loads(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ...) -> Any: ... def load(self, f: IO[Any], salt: Optional[Union[Text, bytes]] = ...): ... def loads_unsafe(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ...) -> Tuple[bool, Optional[Any]]: ... def load_unsafe(self, f: IO[Any], salt: Optional[Union[Text, bytes]] = ...) -> Tuple[bool, Optional[Any]]: ... class TimedSerializer(Serializer): def loads(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., max_age: Optional[int] = ..., return_timestamp: bool = ...) -> Any: ... # morally -> Union[Any, Tuple[Any, datetime]] def loads_unsafe(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., max_age: Optional[int] = ...) -> Tuple[bool, Any]: ... class JSONWebSignatureSerializer(Serializer): jws_algorithms: MutableMapping[Text, SigningAlgorithm] = ... default_algorithm: Text = ... default_serializer: Any = ... algorithm_name: Text algorithm: SigningAlgorithm def __init__(self, secret_key: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., serializer: Optional[_serializer] = ..., signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping[str, Any]] = ..., algorithm_name: Optional[Text] = ...) -> None: ... def load_payload(self, payload: Union[Text, bytes], serializer: Optional[_serializer] = ..., return_header: bool = ...) -> Any: ... # morally -> Union[Any, Tuple[Any, MutableMapping[str, Any]]] def dump_payload(self, header: Mapping[str, Any], obj: Any) -> bytes: ... # type: ignore def make_algorithm(self, algorithm_name: Text) -> SigningAlgorithm: ... def make_signer(self, salt: Optional[Union[Text, bytes]] = ..., algorithm: SigningAlgorithm = ...) -> Signer: ... def make_header(self, header_fields: Optional[Mapping[str, Any]]) -> MutableMapping[str, Any]: ... def dumps(self, obj: Any, salt: Optional[Union[Text, bytes]] = ..., header_fields: Optional[Mapping[str, Any]] = ...) -> bytes: ... def loads(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., return_header: bool = ...) -> Any: ... # morally -> Union[Any, Tuple[Any, MutableMapping[str, Any]]] def loads_unsafe(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., return_header: bool = ...) -> Tuple[bool, Any]: ... class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer): DEFAULT_EXPIRES_IN: int = ... expires_in: int def __init__(self, secret_key: Union[Text, bytes], expires_in: Optional[int] = ..., salt: Optional[Union[Text, bytes]] = ..., serializer: Optional[_serializer] = ..., signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping[str, Any]] = ..., algorithm_name: Optional[Text] = ...) -> None: ... def make_header(self, header_fields: Optional[Mapping[str, Any]]) -> MutableMapping[str, Any]: ... def loads(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., return_header: bool = ...) -> Any: ... # morally -> Union[Any, Tuple[Any, MutableMapping[str, Any]]] def get_issue_date(self, header: Mapping[str, Any]) -> Optional[datetime]: ... def now(self) -> int: ... class _URLSafeSerializerMixin(object): default_serializer: _serializer = ... def load_payload(self, payload: bytes, serializer: Optional[_serializer] = ...) -> Any: ... def dump_payload(self, obj: Any) -> bytes: ... class URLSafeSerializer(_URLSafeSerializerMixin, Serializer): ... class URLSafeTimedSerializer(_URLSafeSerializerMixin, TimedSerializer): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/0000755€tŠÔÚ€2›s®0000000000013576752267026163 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/__init__.pyi0000644€tŠÔÚ€2›s®0000000253313576752252030442 0ustar jukkaDROPBOX\Domain Users00000000000000from jinja2.environment import Environment as Environment, Template as Template from jinja2.loaders import BaseLoader as BaseLoader, FileSystemLoader as FileSystemLoader, PackageLoader as PackageLoader, DictLoader as DictLoader, FunctionLoader as FunctionLoader, PrefixLoader as PrefixLoader, ChoiceLoader as ChoiceLoader, ModuleLoader as ModuleLoader from jinja2.bccache import BytecodeCache as BytecodeCache, FileSystemBytecodeCache as FileSystemBytecodeCache, MemcachedBytecodeCache as MemcachedBytecodeCache from jinja2.runtime import Undefined as Undefined, DebugUndefined as DebugUndefined, StrictUndefined as StrictUndefined, make_logging_undefined as make_logging_undefined from jinja2.exceptions import TemplateError as TemplateError, UndefinedError as UndefinedError, TemplateNotFound as TemplateNotFound, TemplatesNotFound as TemplatesNotFound, TemplateSyntaxError as TemplateSyntaxError, TemplateAssertionError as TemplateAssertionError from jinja2.filters import environmentfilter as environmentfilter, contextfilter as contextfilter, evalcontextfilter as evalcontextfilter from jinja2.utils import Markup as Markup, escape as escape, clear_caches as clear_caches, environmentfunction as environmentfunction, evalcontextfunction as evalcontextfunction, contextfunction as contextfunction, is_undefined as is_undefined, select_autoescape as select_autoescape mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/_compat.pyi0000644€tŠÔÚ€2›s®0000000120413576752252030317 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional import sys if sys.version_info[0] >= 3: from io import BytesIO from urllib.parse import quote_from_bytes as url_quote else: from cStringIO import StringIO as BytesIO from urllib import quote as url_quote PY2: Any PYPY: Any unichr: Any range_type: Any text_type: Any string_types: Any integer_types: Any iterkeys: Any itervalues: Any iteritems: Any NativeStringIO: Any def reraise(tp, value, tb: Optional[Any] = ...): ... ifilter: Any imap: Any izip: Any intern: Any implements_iterator: Any implements_to_string: Any encode_filename: Any get_next: Any def with_metaclass(meta, *bases): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/_stringdefs.pyi0000644€tŠÔÚ€2›s®0000000055013576752252031207 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any Cc: str Cf: str Cn: str Co: str Cs: Any Ll: str Lm: str Lo: str Lt: str Lu: str Mc: str Me: str Mn: str Nd: str Nl: str No: str Pc: str Pd: str Pe: str Pf: str Pi: str Po: str Ps: str Sc: str Sk: str Sm: str So: str Zl: str Zp: str Zs: str cats: Any def combine(*args): ... xid_start: str xid_continue: str def allexcept(*args): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/bccache.pyi0000644€tŠÔÚ€2›s®0000000256413576752252030257 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional marshal_dump: Any marshal_load: Any bc_version: int bc_magic: Any class Bucket: environment: Any key: Any checksum: Any def __init__(self, environment, key, checksum) -> None: ... code: Any def reset(self): ... def load_bytecode(self, f): ... def write_bytecode(self, f): ... def bytecode_from_string(self, string): ... def bytecode_to_string(self): ... class BytecodeCache: def load_bytecode(self, bucket): ... def dump_bytecode(self, bucket): ... def clear(self): ... def get_cache_key(self, name, filename: Optional[Any] = ...): ... def get_source_checksum(self, source): ... def get_bucket(self, environment, name, filename, source): ... def set_bucket(self, bucket): ... class FileSystemBytecodeCache(BytecodeCache): directory: Any pattern: Any def __init__(self, directory: Optional[Any] = ..., pattern: str = ...) -> None: ... def load_bytecode(self, bucket): ... def dump_bytecode(self, bucket): ... def clear(self): ... class MemcachedBytecodeCache(BytecodeCache): client: Any prefix: Any timeout: Any ignore_memcache_errors: Any def __init__(self, client, prefix: str = ..., timeout: Optional[Any] = ..., ignore_memcache_errors: bool = ...) -> None: ... def load_bytecode(self, bucket): ... def dump_bytecode(self, bucket): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/compiler.pyi0000644€tŠÔÚ€2›s®0000001433213576752252030515 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from keyword import iskeyword as is_python_keyword from jinja2.visitor import NodeVisitor operators: Any dict_item_iter: str unoptimize_before_dead_code: bool def generate(node, environment, name, filename, stream: Optional[Any] = ..., defer_init: bool = ...): ... def has_safe_repr(value): ... def find_undeclared(nodes, names): ... class Identifiers: declared: Any outer_undeclared: Any undeclared: Any declared_locally: Any declared_parameter: Any def __init__(self) -> None: ... def add_special(self, name): ... def is_declared(self, name): ... def copy(self): ... class Frame: eval_ctx: Any identifiers: Any toplevel: bool rootlevel: bool require_output_check: Any buffer: Any block: Any assigned_names: Any parent: Any def __init__(self, eval_ctx, parent: Optional[Any] = ...) -> None: ... def copy(self): ... def inspect(self, nodes): ... def find_shadowed(self, extra: Any = ...): ... def inner(self): ... def soft(self): ... __copy__: Any class VisitorExit(RuntimeError): ... class DependencyFinderVisitor(NodeVisitor): filters: Any tests: Any def __init__(self) -> None: ... def visit_Filter(self, node): ... def visit_Test(self, node): ... def visit_Block(self, node): ... class UndeclaredNameVisitor(NodeVisitor): names: Any undeclared: Any def __init__(self, names) -> None: ... def visit_Name(self, node): ... def visit_Block(self, node): ... class FrameIdentifierVisitor(NodeVisitor): identifiers: Any def __init__(self, identifiers) -> None: ... def visit_Name(self, node): ... def visit_If(self, node): ... def visit_Macro(self, node): ... def visit_Import(self, node): ... def visit_FromImport(self, node): ... def visit_Assign(self, node): ... def visit_For(self, node): ... def visit_CallBlock(self, node): ... def visit_FilterBlock(self, node): ... def visit_AssignBlock(self, node): ... def visit_Scope(self, node): ... def visit_Block(self, node): ... class CompilerExit(Exception): ... class CodeGenerator(NodeVisitor): environment: Any name: Any filename: Any stream: Any created_block_context: bool defer_init: Any import_aliases: Any blocks: Any extends_so_far: int has_known_extends: bool code_lineno: int tests: Any filters: Any debug_info: Any def __init__(self, environment, name, filename, stream: Optional[Any] = ..., defer_init: bool = ...) -> None: ... def fail(self, msg, lineno): ... def temporary_identifier(self): ... def buffer(self, frame): ... def return_buffer_contents(self, frame): ... def indent(self): ... def outdent(self, step: int = ...): ... def start_write(self, frame, node: Optional[Any] = ...): ... def end_write(self, frame): ... def simple_write(self, s, frame, node: Optional[Any] = ...): ... def blockvisit(self, nodes, frame): ... def write(self, x): ... def writeline(self, x, node: Optional[Any] = ..., extra: int = ...): ... def newline(self, node: Optional[Any] = ..., extra: int = ...): ... def signature(self, node, frame, extra_kwargs: Optional[Any] = ...): ... def pull_locals(self, frame): ... def pull_dependencies(self, nodes): ... def unoptimize_scope(self, frame): ... def push_scope(self, frame, extra_vars: Any = ...): ... def pop_scope(self, aliases, frame): ... def function_scoping(self, node, frame, children: Optional[Any] = ..., find_special: bool = ...): ... def macro_body(self, node, frame, children: Optional[Any] = ...): ... def macro_def(self, node, frame): ... def position(self, node): ... def visit_Template(self, node, frame: Optional[Any] = ...): ... def visit_Block(self, node, frame): ... def visit_Extends(self, node, frame): ... def visit_Include(self, node, frame): ... def visit_Import(self, node, frame): ... def visit_FromImport(self, node, frame): ... def visit_For(self, node, frame): ... def visit_If(self, node, frame): ... def visit_Macro(self, node, frame): ... def visit_CallBlock(self, node, frame): ... def visit_FilterBlock(self, node, frame): ... def visit_ExprStmt(self, node, frame): ... def visit_Output(self, node, frame): ... def make_assignment_frame(self, frame): ... def export_assigned_vars(self, frame, assignment_frame): ... def visit_Assign(self, node, frame): ... def visit_AssignBlock(self, node, frame): ... def visit_Name(self, node, frame): ... def visit_Const(self, node, frame): ... def visit_TemplateData(self, node, frame): ... def visit_Tuple(self, node, frame): ... def visit_List(self, node, frame): ... def visit_Dict(self, node, frame): ... def binop(self, interceptable: bool = ...): ... def uaop(self, interceptable: bool = ...): ... visit_Add: Any visit_Sub: Any visit_Mul: Any visit_Div: Any visit_FloorDiv: Any visit_Pow: Any visit_Mod: Any visit_And: Any visit_Or: Any visit_Pos: Any visit_Neg: Any visit_Not: Any def visit_Concat(self, node, frame): ... def visit_Compare(self, node, frame): ... def visit_Operand(self, node, frame): ... def visit_Getattr(self, node, frame): ... def visit_Getitem(self, node, frame): ... def visit_Slice(self, node, frame): ... def visit_Filter(self, node, frame): ... def visit_Test(self, node, frame): ... def visit_CondExpr(self, node, frame): ... def visit_Call(self, node, frame, forward_caller: bool = ...): ... def visit_Keyword(self, node, frame): ... def visit_MarkSafe(self, node, frame): ... def visit_MarkSafeIfAutoescape(self, node, frame): ... def visit_EnvironmentAttribute(self, node, frame): ... def visit_ExtensionAttribute(self, node, frame): ... def visit_ImportedName(self, node, frame): ... def visit_InternalName(self, node, frame): ... def visit_ContextReference(self, node, frame): ... def visit_Continue(self, node, frame): ... def visit_Break(self, node, frame): ... def visit_Scope(self, node, frame): ... def visit_EvalContextModifier(self, node, frame): ... def visit_ScopedEvalContextModifier(self, node, frame): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/constants.pyi0000644€tŠÔÚ€2›s®0000000002713576752252030713 0ustar jukkaDROPBOX\Domain Users00000000000000LOREM_IPSUM_WORDS: str mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/debug.pyi0000644€tŠÔÚ€2›s®0000000177213576752252027775 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional tproxy: Any raise_helper: str class TracebackFrameProxy: tb: Any def __init__(self, tb) -> None: ... @property def tb_next(self): ... def set_next(self, next): ... @property def is_jinja_frame(self): ... def __getattr__(self, name): ... def make_frame_proxy(frame): ... class ProcessedTraceback: exc_type: Any exc_value: Any frames: Any def __init__(self, exc_type, exc_value, frames) -> None: ... def render_as_text(self, limit: Optional[Any] = ...): ... def render_as_html(self, full: bool = ...): ... @property def is_template_syntax_error(self): ... @property def exc_info(self): ... @property def standard_exc_info(self): ... def make_traceback(exc_info, source_hint: Optional[Any] = ...): ... def translate_syntax_error(error, source: Optional[Any] = ...): ... def translate_exception(exc_info, initial_skip: int = ...): ... def fake_exc_info(exc_info, filename, lineno): ... tb_set_next: Any mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/defaults.pyi0000644€tŠÔÚ€2›s®0000000100313576752252030501 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jinja2.filters import FILTERS as DEFAULT_FILTERS from jinja2.tests import TESTS as DEFAULT_TESTS BLOCK_START_STRING: str BLOCK_END_STRING: str VARIABLE_START_STRING: str VARIABLE_END_STRING: str COMMENT_START_STRING: str COMMENT_END_STRING: str LINE_STATEMENT_PREFIX: Any LINE_COMMENT_PREFIX: Any TRIM_BLOCKS: bool LSTRIP_BLOCKS: bool NEWLINE_SEQUENCE: str KEEP_TRAILING_NEWLINE: bool DEFAULT_NAMESPACE: Any # Names in __all__ with no definition: # DEFAULT_FILTERS # DEFAULT_TESTS mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/environment.pyi0000644€tŠÔÚ€2›s®0000001745213576752252031255 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Dict, Iterator, List, Optional, Text, Type, Union from .bccache import BytecodeCache from .loaders import BaseLoader from .runtime import Context, Undefined if sys.version_info >= (3, 6): from typing import Awaitable, AsyncIterator def get_spontaneous_environment(*args): ... def create_cache(size): ... def copy_cache(cache): ... def load_extensions(environment, extensions): ... class Environment: sandboxed: bool overlayed: bool linked_to: Any shared: bool exception_handler: Any exception_formatter: Any code_generator_class: Any context_class: Any block_start_string: Text block_end_string: Text variable_start_string: Text variable_end_string: Text comment_start_string: Text comment_end_string: Text line_statement_prefix: Text line_comment_prefix: Text trim_blocks: bool lstrip_blocks: Any newline_sequence: Text keep_trailing_newline: bool undefined: Type[Undefined] optimized: bool finalize: Callable[..., Any] autoescape: Any filters: Any tests: Any globals: Dict[str, Any] loader: BaseLoader cache: Any bytecode_cache: BytecodeCache auto_reload: bool extensions: List[Any] def __init__( self, block_start_string: Text = ..., block_end_string: Text = ..., variable_start_string: Text = ..., variable_end_string: Text = ..., comment_start_string: Any = ..., comment_end_string: Text = ..., line_statement_prefix: Text = ..., line_comment_prefix: Text = ..., trim_blocks: bool = ..., lstrip_blocks: bool = ..., newline_sequence: Text = ..., keep_trailing_newline: bool = ..., extensions: List[Any] = ..., optimized: bool = ..., undefined: Type[Undefined] = ..., finalize: Optional[Callable[..., Any]] = ..., autoescape: Union[bool, Callable[[str], bool]] = ..., loader: Optional[BaseLoader] = ..., cache_size: int = ..., auto_reload: bool = ..., bytecode_cache: Optional[BytecodeCache] = ..., enable_async: bool = ..., ) -> None: ... def add_extension(self, extension): ... def extend(self, **attributes): ... def overlay( self, block_start_string: Text = ..., block_end_string: Text = ..., variable_start_string: Text = ..., variable_end_string: Text = ..., comment_start_string: Any = ..., comment_end_string: Text = ..., line_statement_prefix: Text = ..., line_comment_prefix: Text = ..., trim_blocks: bool = ..., lstrip_blocks: bool = ..., extensions: List[Any] = ..., optimized: bool = ..., undefined: Type[Undefined] = ..., finalize: Callable[..., Any] = ..., autoescape: bool = ..., loader: Optional[BaseLoader] = ..., cache_size: int = ..., auto_reload: bool = ..., bytecode_cache: Optional[BytecodeCache] = ..., ): ... lexer: Any def iter_extensions(self): ... def getitem(self, obj, argument): ... def getattr(self, obj, attribute): ... def call_filter(self, name, value, args: Optional[Any] = ..., kwargs: Optional[Any] = ..., context: Optional[Any] = ..., eval_ctx: Optional[Any] = ...): ... def call_test(self, name, value, args: Optional[Any] = ..., kwargs: Optional[Any] = ...): ... def parse(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ...): ... def lex(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ...): ... def preprocess(self, source: Text, name: Optional[Any] = ..., filename: Optional[Any] = ...): ... def compile(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ..., raw: bool = ..., defer_init: bool = ...): ... def compile_expression(self, source: Text, undefined_to_none: bool = ...): ... def compile_templates(self, target, extensions: Optional[Any] = ..., filter_func: Optional[Any] = ..., zip: str = ..., log_function: Optional[Any] = ..., ignore_errors: bool = ..., py_compile: bool = ...): ... def list_templates(self, extensions: Optional[Any] = ..., filter_func: Optional[Any] = ...): ... def handle_exception(self, exc_info: Optional[Any] = ..., rendered: bool = ..., source_hint: Optional[Any] = ...): ... def join_path(self, template: Union[Template, Text], parent: Text) -> Text: ... def get_template(self, name: Union[Template, Text], parent: Optional[Text] = ..., globals: Optional[Any] = ...) -> Template: ... def select_template(self, names: List[Union[Template, Text]], parent: Optional[Text] = ..., globals: Optional[Dict[str, Any]] = ...) -> Template: ... def get_or_select_template(self, template_name_or_list: Union[Union[Template, Text], List[Union[Template, Text]]], parent: Optional[Text] = ..., globals: Optional[Dict[str, Any]] = ...) -> Template: ... def from_string(self, source: Text, globals: Optional[Dict[str, Any]] = ..., template_class: Optional[Type[Template]] = ...) -> Template: ... def make_globals(self, d: Optional[Dict[str, Any]]) -> Dict[str, Any]: ... # Frequently added extensions are included here: # from InternationalizationExtension: def install_gettext_translations(self, translations: Any, newstyle: Optional[bool]): ... def install_null_translations(self, newstyle: Optional[bool]): ... def install_gettext_callables(self, gettext: Callable[..., Any], ngettext: Callable[..., Any], newstyle: Optional[bool]): ... def uninstall_gettext_translations(self, translations: Any): ... def extract_translations(self, source: Any, gettext_functions: Any): ... newstyle_gettext: bool class Template: def __new__(cls, source, block_start_string: Any = ..., block_end_string: Any = ..., variable_start_string: Any = ..., variable_end_string: Any = ..., comment_start_string: Any = ..., comment_end_string: Any = ..., line_statement_prefix: Any = ..., line_comment_prefix: Any = ..., trim_blocks: Any = ..., lstrip_blocks: Any = ..., newline_sequence: Any = ..., keep_trailing_newline: Any = ..., extensions: Any = ..., optimized: bool = ..., undefined: Any = ..., finalize: Optional[Any] = ..., autoescape: bool = ...): ... environment: Environment = ... @classmethod def from_code(cls, environment, code, globals, uptodate: Optional[Any] = ...): ... @classmethod def from_module_dict(cls, environment, module_dict, globals): ... def render(self, *args, **kwargs) -> Text: ... def stream(self, *args, **kwargs) -> TemplateStream: ... def generate(self, *args, **kwargs) -> Iterator[Text]: ... def new_context(self, vars: Optional[Dict[str, Any]] = ..., shared: bool = ..., locals: Optional[Dict[str, Any]] = ...) -> Context: ... def make_module(self, vars: Optional[Dict[str, Any]] = ..., shared: bool = ..., locals: Optional[Dict[str, Any]] = ...) -> Context: ... @property def module(self) -> Any: ... def get_corresponding_lineno(self, lineno): ... @property def is_up_to_date(self) -> bool: ... @property def debug_info(self): ... if sys.version_info >= (3, 6): def render_async(self, *args, **kwargs) -> Awaitable[Text]: ... def generate_async(self, *args, **kwargs) -> AsyncIterator[Text]: ... class TemplateModule: __name__: Any def __init__(self, template, context) -> None: ... def __html__(self): ... class TemplateExpression: def __init__(self, template, undefined_to_none) -> None: ... def __call__(self, *args, **kwargs): ... class TemplateStream: def __init__(self, gen) -> None: ... def dump(self, fp, encoding: Optional[Text] = ..., errors: Text = ...): ... buffered: bool def disable_buffering(self) -> None: ... def enable_buffering(self, size: int = ...) -> None: ... def __iter__(self): ... def __next__(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/exceptions.pyi0000644€tŠÔÚ€2›s®0000000203213576752252031056 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text class TemplateError(Exception): def __init__(self, message: Optional[Text] = ...) -> None: ... @property def message(self): ... def __unicode__(self): ... class TemplateNotFound(IOError, LookupError, TemplateError): message: Any name: Any templates: Any def __init__(self, name, message: Optional[Text] = ...) -> None: ... class TemplatesNotFound(TemplateNotFound): templates: Any def __init__(self, names: Any = ..., message: Optional[Text] = ...) -> None: ... class TemplateSyntaxError(TemplateError): lineno: int name: Text filename: Text source: Text translated: bool def __init__(self, message: Text, lineno: int, name: Optional[Text] = ..., filename: Optional[Text] = ...) -> None: ... class TemplateAssertionError(TemplateSyntaxError): ... class TemplateRuntimeError(TemplateError): ... class UndefinedError(TemplateRuntimeError): ... class SecurityError(TemplateRuntimeError): ... class FilterArgumentError(TemplateRuntimeError): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/ext.pyi0000644€tŠÔÚ€2›s®0000000312513576752252027501 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional GETTEXT_FUNCTIONS: Any class ExtensionRegistry(type): def __new__(cls, name, bases, d): ... class Extension: tags: Any priority: int environment: Any def __init__(self, environment) -> None: ... def bind(self, environment): ... def preprocess(self, source, name, filename: Optional[Any] = ...): ... def filter_stream(self, stream): ... def parse(self, parser): ... def attr(self, name, lineno: Optional[Any] = ...): ... def call_method(self, name, args: Optional[Any] = ..., kwargs: Optional[Any] = ..., dyn_args: Optional[Any] = ..., dyn_kwargs: Optional[Any] = ..., lineno: Optional[Any] = ...): ... class InternationalizationExtension(Extension): tags: Any def __init__(self, environment) -> None: ... def parse(self, parser): ... class ExprStmtExtension(Extension): tags: Any def parse(self, parser): ... class LoopControlExtension(Extension): tags: Any def parse(self, parser): ... class WithExtension(Extension): tags: Any def parse(self, parser): ... class AutoEscapeExtension(Extension): tags: Any def parse(self, parser): ... def extract_from_ast(node, gettext_functions: Any = ..., babel_style: bool = ...): ... class _CommentFinder: tokens: Any comment_tags: Any offset: int last_lineno: int def __init__(self, tokens, comment_tags) -> None: ... def find_backwards(self, offset): ... def find_comments(self, lineno): ... def babel_extract(fileobj, keywords, comment_tags, options): ... i18n: Any do: Any loopcontrols: Any with_: Any autoescape: Any mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/filters.pyi0000644€tŠÔÚ€2›s®0000000457113576752252030357 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, NamedTuple, Optional def contextfilter(f): ... def evalcontextfilter(f): ... def environmentfilter(f): ... def make_attrgetter(environment, attribute): ... def do_forceescape(value): ... def do_urlencode(value): ... def do_replace(eval_ctx, s, old, new, count: Optional[Any] = ...): ... def do_upper(s): ... def do_lower(s): ... def do_xmlattr(_eval_ctx, d, autospace: bool = ...): ... def do_capitalize(s): ... def do_title(s): ... def do_dictsort(value, case_sensitive: bool = ..., by: str = ...): ... def do_sort(environment, value, reverse: bool = ..., case_sensitive: bool = ..., attribute: Optional[Any] = ...): ... def do_default(value, default_value: str = ..., boolean: bool = ...): ... def do_join(eval_ctx, value, d: str = ..., attribute: Optional[Any] = ...): ... def do_center(value, width: int = ...): ... def do_first(environment, seq): ... def do_last(environment, seq): ... def do_random(environment, seq): ... def do_filesizeformat(value, binary: bool = ...): ... def do_pprint(value, verbose: bool = ...): ... def do_urlize(eval_ctx, value, trim_url_limit: Optional[Any] = ..., nofollow: bool = ..., target: Optional[Any] = ...): ... def do_indent(s, width: int = ..., indentfirst: bool = ...): ... def do_truncate(s, length: int = ..., killwords: bool = ..., end: str = ...): ... def do_wordwrap(environment, s, width: int = ..., break_long_words: bool = ..., wrapstring: Optional[Any] = ...): ... def do_wordcount(s): ... def do_int(value, default: int = ..., base: int = ...): ... def do_float(value, default: float = ...): ... def do_format(value, *args, **kwargs): ... def do_trim(value): ... def do_striptags(value): ... def do_slice(value, slices, fill_with: Optional[Any] = ...): ... def do_batch(value, linecount, fill_with: Optional[Any] = ...): ... def do_round(value, precision: int = ..., method: str = ...): ... def do_groupby(environment, value, attribute): ... class _GroupTuple(NamedTuple): grouper: Any list: Any def do_sum(environment, iterable, attribute: Optional[Any] = ..., start: int = ...): ... def do_list(value): ... def do_mark_safe(value): ... def do_mark_unsafe(value): ... def do_reverse(value): ... def do_attr(environment, obj, name): ... def do_map(*args, **kwargs): ... def do_select(*args, **kwargs): ... def do_reject(*args, **kwargs): ... def do_selectattr(*args, **kwargs): ... def do_rejectattr(*args, **kwargs): ... FILTERS: Any mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/lexer.pyi0000644€tŠÔÚ€2›s®0000000531413576752252030022 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Tuple whitespace_re: Any string_re: Any integer_re: Any name_re: Any float_re: Any newline_re: Any TOKEN_ADD: Any TOKEN_ASSIGN: Any TOKEN_COLON: Any TOKEN_COMMA: Any TOKEN_DIV: Any TOKEN_DOT: Any TOKEN_EQ: Any TOKEN_FLOORDIV: Any TOKEN_GT: Any TOKEN_GTEQ: Any TOKEN_LBRACE: Any TOKEN_LBRACKET: Any TOKEN_LPAREN: Any TOKEN_LT: Any TOKEN_LTEQ: Any TOKEN_MOD: Any TOKEN_MUL: Any TOKEN_NE: Any TOKEN_PIPE: Any TOKEN_POW: Any TOKEN_RBRACE: Any TOKEN_RBRACKET: Any TOKEN_RPAREN: Any TOKEN_SEMICOLON: Any TOKEN_SUB: Any TOKEN_TILDE: Any TOKEN_WHITESPACE: Any TOKEN_FLOAT: Any TOKEN_INTEGER: Any TOKEN_NAME: Any TOKEN_STRING: Any TOKEN_OPERATOR: Any TOKEN_BLOCK_BEGIN: Any TOKEN_BLOCK_END: Any TOKEN_VARIABLE_BEGIN: Any TOKEN_VARIABLE_END: Any TOKEN_RAW_BEGIN: Any TOKEN_RAW_END: Any TOKEN_COMMENT_BEGIN: Any TOKEN_COMMENT_END: Any TOKEN_COMMENT: Any TOKEN_LINESTATEMENT_BEGIN: Any TOKEN_LINESTATEMENT_END: Any TOKEN_LINECOMMENT_BEGIN: Any TOKEN_LINECOMMENT_END: Any TOKEN_LINECOMMENT: Any TOKEN_DATA: Any TOKEN_INITIAL: Any TOKEN_EOF: Any operators: Any reverse_operators: Any operator_re: Any ignored_tokens: Any ignore_if_empty: Any def describe_token(token): ... def describe_token_expr(expr): ... def count_newlines(value): ... def compile_rules(environment): ... class Failure: message: Any error_class: Any def __init__(self, message, cls: Any = ...) -> None: ... def __call__(self, lineno, filename): ... class Token(Tuple[int, Any, Any]): lineno: Any type: Any value: Any def __new__(cls, lineno, type, value): ... def test(self, expr): ... def test_any(self, *iterable): ... class TokenStreamIterator: stream: Any def __init__(self, stream) -> None: ... def __iter__(self): ... def __next__(self): ... class TokenStream: name: Any filename: Any closed: bool current: Any def __init__(self, generator, name, filename) -> None: ... def __iter__(self): ... def __bool__(self): ... __nonzero__: Any eos: Any def push(self, token): ... def look(self): ... def skip(self, n: int = ...): ... def next_if(self, expr): ... def skip_if(self, expr): ... def __next__(self): ... def close(self): ... def expect(self, expr): ... def get_lexer(environment): ... class Lexer: newline_sequence: Any keep_trailing_newline: Any rules: Any def __init__(self, environment) -> None: ... def tokenize(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ..., state: Optional[Any] = ...): ... def wrap(self, stream, name: Optional[Any] = ..., filename: Optional[Any] = ...): ... def tokeniter(self, source, name, filename: Optional[Any] = ..., state: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/loaders.pyi0000644€tŠÔÚ€2›s®0000000525513576752252030340 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterable, List, Optional, Text, Tuple, Union from types import ModuleType from .environment import Environment def split_template_path(template: Text) -> List[Text]: ... class BaseLoader: has_source_access: bool def get_source(self, environment, template): ... def list_templates(self): ... def load(self, environment, name, globals: Optional[Any] = ...): ... class FileSystemLoader(BaseLoader): searchpath: Text encoding: Any followlinks: Any def __init__(self, searchpath: Union[Text, Iterable[Text]], encoding: Text = ..., followlinks: bool = ...) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... def list_templates(self): ... class PackageLoader(BaseLoader): encoding: Text manager: Any filesystem_bound: Any provider: Any package_path: Any def __init__(self, package_name: Text, package_path: Text = ..., encoding: Text = ...) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... def list_templates(self): ... class DictLoader(BaseLoader): mapping: Any def __init__(self, mapping) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... def list_templates(self): ... class FunctionLoader(BaseLoader): load_func: Any def __init__(self, load_func) -> None: ... def get_source( self, environment: Environment, template: Text, ) -> Tuple[Text, Optional[Text], Optional[Callable[..., Any]]]: ... class PrefixLoader(BaseLoader): mapping: Any delimiter: Any def __init__(self, mapping, delimiter: str = ...) -> None: ... def get_loader(self, template): ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... def load(self, environment, name, globals: Optional[Any] = ...): ... def list_templates(self): ... class ChoiceLoader(BaseLoader): loaders: Any def __init__(self, loaders) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... def load(self, environment, name, globals: Optional[Any] = ...): ... def list_templates(self): ... class _TemplateModule(ModuleType): ... class ModuleLoader(BaseLoader): has_source_access: bool module: Any package_name: Any def __init__(self, path) -> None: ... @staticmethod def get_template_key(name): ... @staticmethod def get_module_filename(name): ... def load(self, environment, name, globals: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/meta.pyi0000644€tŠÔÚ€2›s®0000000052213576752252027625 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jinja2.compiler import CodeGenerator class TrackingCodeGenerator(CodeGenerator): undeclared_identifiers: Any def __init__(self, environment) -> None: ... def write(self, x): ... def pull_locals(self, frame): ... def find_undeclared_variables(ast): ... def find_referenced_templates(ast): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/nodes.pyi0000644€tŠÔÚ€2›s®0000001204613576752252030013 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Impossible(Exception): ... class NodeType(type): def __new__(cls, name, bases, d): ... class EvalContext: environment: Any autoescape: Any volatile: bool def __init__(self, environment, template_name: Optional[Any] = ...) -> None: ... def save(self): ... def revert(self, old): ... def get_eval_context(node, ctx): ... class Node: fields: Any attributes: Any abstract: bool def __init__(self, *fields, **attributes) -> None: ... def iter_fields(self, exclude: Optional[Any] = ..., only: Optional[Any] = ...): ... def iter_child_nodes(self, exclude: Optional[Any] = ..., only: Optional[Any] = ...): ... def find(self, node_type): ... def find_all(self, node_type): ... def set_ctx(self, ctx): ... def set_lineno(self, lineno, override: bool = ...): ... def set_environment(self, environment): ... def __eq__(self, other): ... def __ne__(self, other): ... __hash__: Any class Stmt(Node): abstract: bool class Helper(Node): abstract: bool class Template(Node): fields: Any class Output(Stmt): fields: Any class Extends(Stmt): fields: Any class For(Stmt): fields: Any class If(Stmt): fields: Any class Macro(Stmt): fields: Any class CallBlock(Stmt): fields: Any class FilterBlock(Stmt): fields: Any class Block(Stmt): fields: Any class Include(Stmt): fields: Any class Import(Stmt): fields: Any class FromImport(Stmt): fields: Any class ExprStmt(Stmt): fields: Any class Assign(Stmt): fields: Any class AssignBlock(Stmt): fields: Any class Expr(Node): abstract: bool def as_const(self, eval_ctx: Optional[Any] = ...): ... def can_assign(self): ... class BinExpr(Expr): fields: Any operator: Any abstract: bool def as_const(self, eval_ctx: Optional[Any] = ...): ... class UnaryExpr(Expr): fields: Any operator: Any abstract: bool def as_const(self, eval_ctx: Optional[Any] = ...): ... class Name(Expr): fields: Any def can_assign(self): ... class Literal(Expr): abstract: bool class Const(Literal): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... @classmethod def from_untrusted(cls, value, lineno: Optional[Any] = ..., environment: Optional[Any] = ...): ... class TemplateData(Literal): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Tuple(Literal): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... def can_assign(self): ... class List(Literal): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Dict(Literal): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Pair(Helper): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Keyword(Helper): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class CondExpr(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Filter(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Test(Expr): fields: Any class Call(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Getitem(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... def can_assign(self): ... class Getattr(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... def can_assign(self): ... class Slice(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Concat(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Compare(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Operand(Helper): fields: Any class Mul(BinExpr): operator: str class Div(BinExpr): operator: str class FloorDiv(BinExpr): operator: str class Add(BinExpr): operator: str class Sub(BinExpr): operator: str class Mod(BinExpr): operator: str class Pow(BinExpr): operator: str class And(BinExpr): operator: str def as_const(self, eval_ctx: Optional[Any] = ...): ... class Or(BinExpr): operator: str def as_const(self, eval_ctx: Optional[Any] = ...): ... class Not(UnaryExpr): operator: str class Neg(UnaryExpr): operator: str class Pos(UnaryExpr): operator: str class EnvironmentAttribute(Expr): fields: Any class ExtensionAttribute(Expr): fields: Any class ImportedName(Expr): fields: Any class InternalName(Expr): fields: Any def __init__(self) -> None: ... class MarkSafe(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class MarkSafeIfAutoescape(Expr): fields: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class ContextReference(Expr): ... class Continue(Stmt): ... class Break(Stmt): ... class Scope(Stmt): fields: Any class EvalContextModifier(Stmt): fields: Any class ScopedEvalContextModifier(EvalContextModifier): fields: Any mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/optimizer.pyi0000644€tŠÔÚ€2›s®0000000122413576752252030721 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jinja2.visitor import NodeTransformer def optimize(node, environment): ... class Optimizer(NodeTransformer): environment: Any def __init__(self, environment) -> None: ... def visit_If(self, node): ... def fold(self, node): ... visit_Add: Any visit_Sub: Any visit_Mul: Any visit_Div: Any visit_FloorDiv: Any visit_Pow: Any visit_Mod: Any visit_And: Any visit_Or: Any visit_Pos: Any visit_Neg: Any visit_Not: Any visit_Compare: Any visit_Getitem: Any visit_Getattr: Any visit_Call: Any visit_Filter: Any visit_Test: Any visit_CondExpr: Any mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/parser.pyi0000644€tŠÔÚ€2›s®0000000472313576752252030202 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Parser: environment: Any stream: Any name: Any filename: Any closed: bool extensions: Any def __init__(self, environment, source, name: Optional[Any] = ..., filename: Optional[Any] = ..., state: Optional[Any] = ...) -> None: ... def fail(self, msg, lineno: Optional[Any] = ..., exc: Any = ...): ... def fail_unknown_tag(self, name, lineno: Optional[Any] = ...): ... def fail_eof(self, end_tokens: Optional[Any] = ..., lineno: Optional[Any] = ...): ... def is_tuple_end(self, extra_end_rules: Optional[Any] = ...): ... def free_identifier(self, lineno: Optional[Any] = ...): ... def parse_statement(self): ... def parse_statements(self, end_tokens, drop_needle: bool = ...): ... def parse_set(self): ... def parse_for(self): ... def parse_if(self): ... def parse_block(self): ... def parse_extends(self): ... def parse_import_context(self, node, default): ... def parse_include(self): ... def parse_import(self): ... def parse_from(self): ... def parse_signature(self, node): ... def parse_call_block(self): ... def parse_filter_block(self): ... def parse_macro(self): ... def parse_print(self): ... def parse_assign_target(self, with_tuple: bool = ..., name_only: bool = ..., extra_end_rules: Optional[Any] = ...): ... def parse_expression(self, with_condexpr: bool = ...): ... def parse_condexpr(self): ... def parse_or(self): ... def parse_and(self): ... def parse_not(self): ... def parse_compare(self): ... def parse_add(self): ... def parse_sub(self): ... def parse_concat(self): ... def parse_mul(self): ... def parse_div(self): ... def parse_floordiv(self): ... def parse_mod(self): ... def parse_pow(self): ... def parse_unary(self, with_filter: bool = ...): ... def parse_primary(self): ... def parse_tuple(self, simplified: bool = ..., with_condexpr: bool = ..., extra_end_rules: Optional[Any] = ..., explicit_parentheses: bool = ...): ... def parse_list(self): ... def parse_dict(self): ... def parse_postfix(self, node): ... def parse_filter_expr(self, node): ... def parse_subscript(self, node): ... def parse_subscribed(self): ... def parse_call(self, node): ... def parse_filter(self, node, start_inline: bool = ...): ... def parse_test(self, node): ... def subparse(self, end_tokens: Optional[Any] = ...): ... def parse(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/runtime.pyi0000644€tŠÔÚ€2›s®0000000657113576752252030374 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Optional, Text, Union from jinja2.utils import Markup as Markup, escape as escape, missing as missing, concat as concat from jinja2.exceptions import TemplateRuntimeError as TemplateRuntimeError, TemplateNotFound as TemplateNotFound from jinja2.environment import Environment to_string: Any identity: Any def markup_join(seq): ... def unicode_join(seq): ... class TemplateReference: def __init__(self, context) -> None: ... def __getitem__(self, name): ... class Context: parent: Union[Context, Dict[str, Any]] vars: Dict[str, Any] environment: Environment eval_ctx: Any exported_vars: Any name: Text blocks: Dict[str, Any] def __init__(self, environment: Environment, parent: Union[Context, Dict[str, Any]], name: Text, blocks: Dict[str, Any]) -> None: ... def super(self, name, current): ... def get(self, key, default: Optional[Any] = ...): ... def resolve(self, key): ... def get_exported(self): ... def get_all(self): ... def call(__self, __obj, *args, **kwargs): ... def derived(self, locals: Optional[Any] = ...): ... keys: Any values: Any items: Any iterkeys: Any itervalues: Any iteritems: Any def __contains__(self, name): ... def __getitem__(self, key): ... class BlockReference: name: Any def __init__(self, name, context, stack, depth) -> None: ... @property def super(self): ... def __call__(self): ... class LoopContext: index0: int depth0: Any def __init__(self, iterable, recurse: Optional[Any] = ..., depth0: int = ...) -> None: ... def cycle(self, *args): ... first: Any last: Any index: Any revindex: Any revindex0: Any depth: Any def __len__(self): ... def __iter__(self): ... def loop(self, iterable): ... __call__: Any @property def length(self): ... class LoopContextIterator: context: Any def __init__(self, context) -> None: ... def __iter__(self): ... def __next__(self): ... class Macro: name: Any arguments: Any defaults: Any catch_kwargs: Any catch_varargs: Any caller: Any def __init__(self, environment, func, name, arguments, defaults, catch_kwargs, catch_varargs, caller) -> None: ... def __call__(self, *args, **kwargs): ... class Undefined: def __init__(self, hint: Optional[Any] = ..., obj: Any = ..., name: Optional[Any] = ..., exc: Any = ...) -> None: ... def __getattr__(self, name): ... __add__: Any __radd__: Any __mul__: Any __rmul__: Any __div__: Any __rdiv__: Any __truediv__: Any __rtruediv__: Any __floordiv__: Any __rfloordiv__: Any __mod__: Any __rmod__: Any __pos__: Any __neg__: Any __call__: Any __getitem__: Any __lt__: Any __le__: Any __gt__: Any __ge__: Any __int__: Any __float__: Any __complex__: Any __pow__: Any __rpow__: Any def __eq__(self, other): ... def __ne__(self, other): ... def __hash__(self): ... def __len__(self): ... def __iter__(self): ... def __nonzero__(self): ... __bool__: Any def make_logging_undefined(logger: Optional[Any] = ..., base: Optional[Any] = ...): ... class DebugUndefined(Undefined): ... class StrictUndefined(Undefined): __iter__: Any __len__: Any __nonzero__: Any __eq__: Any __ne__: Any __bool__: Any __hash__: Any mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/sandbox.pyi0000644€tŠÔÚ€2›s®0000000217113576752252030337 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jinja2.environment import Environment MAX_RANGE: int UNSAFE_FUNCTION_ATTRIBUTES: Any UNSAFE_METHOD_ATTRIBUTES: Any UNSAFE_GENERATOR_ATTRIBUTES: Any def safe_range(*args): ... def unsafe(f): ... def is_internal_attribute(obj, attr): ... def modifies_known_mutable(obj, attr): ... class SandboxedEnvironment(Environment): sandboxed: bool default_binop_table: Any default_unop_table: Any intercepted_binops: Any intercepted_unops: Any def intercept_unop(self, operator): ... binop_table: Any unop_table: Any def __init__(self, *args, **kwargs) -> None: ... def is_safe_attribute(self, obj, attr, value): ... def is_safe_callable(self, obj): ... def call_binop(self, context, operator, left, right): ... def call_unop(self, context, operator, arg): ... def getitem(self, obj, argument): ... def getattr(self, obj, attribute): ... def unsafe_undefined(self, obj, attribute): ... def call(__self, __context, __obj, *args, **kwargs): ... class ImmutableSandboxedEnvironment(SandboxedEnvironment): def is_safe_attribute(self, obj, attr, value): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/tests.pyi0000644€tŠÔÚ€2›s®0000000106113576752252030040 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any number_re: Any regex_type: Any test_callable: Any def test_odd(value): ... def test_even(value): ... def test_divisibleby(value, num): ... def test_defined(value): ... def test_undefined(value): ... def test_none(value): ... def test_lower(value): ... def test_upper(value): ... def test_string(value): ... def test_mapping(value): ... def test_number(value): ... def test_sequence(value): ... def test_equalto(value, other): ... def test_sameas(value, other): ... def test_iterable(value): ... def test_escaped(value): ... TESTS: Any mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/utils.pyi0000644€tŠÔÚ€2›s®0000000375313576752252030050 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterable, Optional from markupsafe import Markup as Markup, escape as escape, soft_unicode as soft_unicode missing: Any internal_code: Any concat: Any def contextfunction(f): ... def evalcontextfunction(f): ... def environmentfunction(f): ... def internalcode(f): ... def is_undefined(obj): ... def select_autoescape(enabled_extensions: Iterable[str] = ..., disabled_extensions: Iterable[str] = ..., default_for_string: bool = ..., default: bool = ...) -> Callable[[str], bool]: ... def consume(iterable): ... def clear_caches(): ... def import_string(import_name, silent: bool = ...): ... def open_if_exists(filename, mode: str = ...): ... def object_type_repr(obj): ... def pformat(obj, verbose: bool = ...): ... def urlize(text, trim_url_limit: Optional[Any] = ..., nofollow: bool = ..., target: Optional[Any] = ...): ... def generate_lorem_ipsum(n: int = ..., html: bool = ..., min: int = ..., max: int = ...): ... def unicode_urlencode(obj, charset: str = ..., for_qs: bool = ...): ... class LRUCache: capacity: Any def __init__(self, capacity) -> None: ... def __getnewargs__(self): ... def copy(self): ... def get(self, key, default: Optional[Any] = ...): ... def setdefault(self, key, default: Optional[Any] = ...): ... def clear(self): ... def __contains__(self, key): ... def __len__(self): ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def items(self): ... def iteritems(self): ... def values(self): ... def itervalue(self): ... def keys(self): ... def iterkeys(self): ... __iter__: Any def __reversed__(self): ... __copy__: Any class Cycler: items: Any def __init__(self, *items) -> None: ... pos: int def reset(self): ... @property def current(self): ... def __next__(self): ... class Joiner: sep: Any used: bool def __init__(self, sep: str = ...) -> None: ... def __call__(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/jinja2/visitor.pyi0000644€tŠÔÚ€2›s®0000000046213576752252030401 0ustar jukkaDROPBOX\Domain Users00000000000000class NodeVisitor: def get_visitor(self, node): ... def visit(self, node, *args, **kwargs): ... def generic_visit(self, node, *args, **kwargs): ... class NodeTransformer(NodeVisitor): def generic_visit(self, node, *args, **kwargs): ... def visit_list(self, node, *args, **kwargs): ... mypy-0.761/mypy/typeshed/third_party/2and3/markupsafe/0000755€tŠÔÚ€2›s®0000000000013576752267027144 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/markupsafe/__init__.pyi0000644€tŠÔÚ€2›s®0000000541113576752252031421 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Text, Tuple, Union from collections import Mapping from markupsafe._compat import text_type import string from markupsafe._native import escape as escape, escape_silent as escape_silent, soft_unicode as soft_unicode class Markup(text_type): def __new__(cls, base: Text = ..., encoding: Optional[Text] = ..., errors: Text = ...) -> Markup: ... def __html__(self) -> Markup: ... def __add__(self, other: text_type) -> Markup: ... def __radd__(self, other: text_type) -> Markup: ... def __mul__(self, num: int) -> Markup: ... def __rmul__(self, num: int) -> Markup: ... def __mod__(self, *args: Any) -> Markup: ... def join(self, seq: Iterable[text_type]): ... def split(self, sep: Optional[text_type] = ..., maxsplit: int = ...) -> List[text_type]: ... def rsplit(self, sep: Optional[text_type] = ..., maxsplit: int = ...) -> List[text_type]: ... def splitlines(self, keepends: bool = ...) -> List[text_type]: ... def unescape(self) -> Text: ... def striptags(self) -> Text: ... @classmethod def escape(cls, s: text_type) -> Markup: ... def partition(self, sep: text_type) -> Tuple[Markup, Markup, Markup]: ... def rpartition(self, sep: text_type) -> Tuple[Markup, Markup, Markup]: ... def format(self, *args, **kwargs) -> Markup: ... def __html_format__(self, format_spec) -> Markup: ... def __getslice__(self, start: int, stop: int) -> Markup: ... def __getitem__(self, i: Union[int, slice]) -> Markup: ... def capitalize(self) -> Markup: ... def title(self) -> Markup: ... def lower(self) -> Markup: ... def upper(self) -> Markup: ... def swapcase(self) -> Markup: ... def replace(self, old: text_type, new: text_type, count: int = ...) -> Markup: ... def ljust(self, width: int, fillchar: text_type = ...) -> Markup: ... def rjust(self, width: int, fillchar: text_type = ...) -> Markup: ... def lstrip(self, chars: Optional[text_type] = ...) -> Markup: ... def rstrip(self, chars: Optional[text_type] = ...) -> Markup: ... def strip(self, chars: Optional[text_type] = ...) -> Markup: ... def center(self, width: int, fillchar: text_type = ...) -> Markup: ... def zfill(self, width: int) -> Markup: ... def translate(self, table: Union[Mapping[int, Union[int, text_type, None]], Sequence[Union[int, text_type, None]]]) -> Markup: ... def expandtabs(self, tabsize: int = ...) -> Markup: ... class EscapeFormatter(string.Formatter): escape: Callable[[text_type], Markup] def __init__(self, escape: Callable[[text_type], Markup]) -> None: ... def format_field(self, value: text_type, format_spec: text_type) -> Markup: ... if sys.version_info >= (3,): soft_str = soft_unicode mypy-0.761/mypy/typeshed/third_party/2and3/markupsafe/_compat.pyi0000644€tŠÔÚ€2›s®0000000067013576752252031306 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Iterator, Mapping, Text, Tuple, TypeVar _K = TypeVar('_K') _V = TypeVar('_V') PY2: bool def iteritems(d: Mapping[_K, _V]) -> Iterator[Tuple[_K, _V]]: ... if sys.version_info >= (3,): text_type = str string_types = str, unichr = chr int_types = int, else: from __builtin__ import unichr as unichr text_type = unicode string_types = (str, unicode) int_types = (int, long) mypy-0.761/mypy/typeshed/third_party/2and3/markupsafe/_constants.pyi0000644€tŠÔÚ€2›s®0000000010313576752252032026 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Text HTML_ENTITIES: Dict[Text, int] mypy-0.761/mypy/typeshed/third_party/2and3/markupsafe/_native.pyi0000644€tŠÔÚ€2›s®0000000037713576752252031315 0ustar jukkaDROPBOX\Domain Users00000000000000from . import Markup from ._compat import text_type, string_types from typing import Union, Text def escape(s: Union[Markup, Text]) -> Markup: ... def escape_silent(s: Union[None, Markup, Text]) -> Markup: ... def soft_unicode(s: Text) -> text_type: ... mypy-0.761/mypy/typeshed/third_party/2and3/markupsafe/_speedups.pyi0000644€tŠÔÚ€2›s®0000000037713576752252031657 0ustar jukkaDROPBOX\Domain Users00000000000000from . import Markup from ._compat import text_type, string_types from typing import Union, Text def escape(s: Union[Markup, Text]) -> Markup: ... def escape_silent(s: Union[None, Markup, Text]) -> Markup: ... def soft_unicode(s: Text) -> text_type: ... mypy-0.761/mypy/typeshed/third_party/2and3/maxminddb/0000755€tŠÔÚ€2›s®0000000000013576752267026751 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/maxminddb/__init__.pyi0000644€tŠÔÚ€2›s®0000000026213576752252031225 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Text from maxminddb import reader def open_database(database: Text, mode: int = ...) -> reader.Reader: ... def Reader(database: Text) -> reader.Reader: ... mypy-0.761/mypy/typeshed/third_party/2and3/maxminddb/compat.pyi0000644€tŠÔÚ€2›s®0000000036213576752252030752 0ustar jukkaDROPBOX\Domain Users00000000000000from ipaddress import IPv4Address, IPv6Address from typing import Any def compat_ip_address(address: object) -> Any: ... def int_from_byte(x: int) -> int: ... def int_from_bytes(x: bytes) -> int: ... def byte_from_int(x: int) -> bytes: ... mypy-0.761/mypy/typeshed/third_party/2and3/maxminddb/const.pyi0000644€tŠÔÚ€2›s®0000000020213576752252030606 0ustar jukkaDROPBOX\Domain Users00000000000000MODE_AUTO: int = ... MODE_MMAP_EXT: int = ... MODE_MMAP: int = ... MODE_FILE: int = ... MODE_MEMORY: int = ... MODE_FD: int = ... mypy-0.761/mypy/typeshed/third_party/2and3/maxminddb/decoder.pyi0000644€tŠÔÚ€2›s®0000000032713576752252031075 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Tuple class Decoder: def __init__(self, database_buffer: bytes, pointer_base: int = ..., pointer_test: bool = ...) -> None: ... def decode(self, offset: int) -> Tuple[Any, int]: ... mypy-0.761/mypy/typeshed/third_party/2and3/maxminddb/errors.pyi0000644€tŠÔÚ€2›s®0000000005613576752252031003 0ustar jukkaDROPBOX\Domain Users00000000000000class InvalidDatabaseError(RuntimeError): ... mypy-0.761/mypy/typeshed/third_party/2and3/maxminddb/extension.pyi0000644€tŠÔÚ€2›s®0000000214213576752252031501 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Mapping, Sequence, Text from maxminddb.errors import InvalidDatabaseError as InvalidDatabaseError class Reader: closed: bool = ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... def close(self, *args: Any, **kwargs: Any) -> Any: ... def get(self, *args: Any, **kwargs: Any) -> Any: ... def metadata(self, *args: Any, **kwargs: Any) -> Any: ... def __enter__(self, *args: Any, **kwargs: Any) -> Any: ... def __exit__(self, *args: Any, **kwargs: Any) -> Any: ... class extension: @property def node_count(self) -> int: ... @property def record_size(self) -> int: ... @property def ip_version(self) -> int: ... @property def database_type(self) -> Text: ... @property def languages(self) -> Sequence[Text]: ... @property def binary_format_major_version(self) -> int: ... @property def binary_format_minor_version(self) -> int: ... @property def build_epoch(self) -> int: ... @property def description(self) -> Mapping[Text, Text]: ... def __init__(self, **kwargs: Any) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/maxminddb/reader.pyi0000644€tŠÔÚ€2›s®0000000231713576752252030733 0ustar jukkaDROPBOX\Domain Users00000000000000from ipaddress import IPv4Address, IPv6Address from types import TracebackType from typing import Any, Mapping, Optional, Sequence, Text, Tuple, Type, Union class Reader: closed: bool = ... def __init__(self, database: bytes, mode: int = ...) -> None: ... def metadata(self) -> Metadata: ... def get(self, ip_address: Union[Text, IPv4Address, IPv6Address]) -> Optional[Any]: ... def get_with_prefix_len(self, ip_address: Union[Text, IPv4Address, IPv6Address]) -> Tuple[Optional[Any], int]: ... def close(self) -> None: ... def __enter__(self) -> Reader: ... def __exit__(self, exc_type: Optional[Type[BaseException]] = ..., exc_val: Optional[BaseException] = ..., exc_tb: Optional[TracebackType] = ...) -> None: ... class Metadata: node_count: int = ... record_size: int = ... ip_version: int = ... database_type: Text = ... languages: Sequence[Text] = ... binary_format_major_version: int = ... binary_format_minor_version: int = ... build_epoch: int = ... description: Mapping[Text, Text] = ... def __init__(self, **kwargs: Any) -> None: ... @property def node_byte_size(self) -> int: ... @property def search_tree_size(self) -> int: ... mypy-0.761/mypy/typeshed/third_party/2and3/mock.pyi0000644€tŠÔÚ€2›s®0000001227013576752252026456 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for mock import sys from typing import Any, List, Optional, Text, Tuple, Type, TypeVar _T = TypeVar("_T") FILTER_DIR: Any class _slotted: ... class _SentinelObject: name: Any def __init__(self, name: Any) -> None: ... class _Sentinel: def __init__(self) -> None: ... def __getattr__(self, name: str) -> Any: ... sentinel: Any DEFAULT: Any class _CallList(List[_T]): def __contains__(self, value: Any) -> bool: ... class _MockIter: obj: Any def __init__(self, obj: Any) -> None: ... def __iter__(self) -> Any: ... def __next__(self) -> Any: ... class Base: def __init__(self, *args: Any, **kwargs: Any) -> None: ... # TODO: Defining this and other mock classes as classes in this stub causes # many false positives with mypy and production code. See if we can # improve mypy somehow and use a class with an "Any" base class. NonCallableMock = Any class CallableMixin(Base): side_effect: Any def __init__(self, spec: Optional[Any] = ..., side_effect: Optional[Any] = ..., return_value: Any = ..., wraps: Optional[Any] = ..., name: Optional[Any] = ..., spec_set: Optional[Any] = ..., parent: Optional[Any] = ..., _spec_state: Optional[Any] = ..., _new_name: Any = ..., _new_parent: Optional[Any] = ..., **kwargs: Any) -> None: ... def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ... Mock = Any class _patch: attribute_name: Any getter: Any attribute: Any new: Any new_callable: Any spec: Any create: bool has_local: Any spec_set: Any autospec: Any kwargs: Any additional_patchers: Any def __init__(self, getter: Any, attribute: Any, new: Any, spec: Any, create: Any, spec_set: Any, autospec: Any, new_callable: Any, kwargs: Any) -> None: ... def copy(self) -> Any: ... def __call__(self, func: Any) -> Any: ... def decorate_class(self, klass: Any) -> Any: ... def decorate_callable(self, func: Any) -> Any: ... def get_original(self) -> Any: ... target: Any temp_original: Any is_local: Any def __enter__(self) -> Any: ... def __exit__(self, *exc_info: Any) -> Any: ... def start(self) -> Any: ... def stop(self) -> Any: ... class _patch_dict: in_dict: Any values: Any clear: Any def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ... def __call__(self, f: Any) -> Any: ... def decorate_class(self, klass: Any) -> Any: ... def __enter__(self) -> Any: ... def __exit__(self, *args: Any) -> Any: ... start: Any stop: Any class _patcher: TEST_PREFIX: str dict: Type[_patch_dict] def __call__(self, target: Any, new: Optional[Any] = ..., spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> _patch: ... def object(self, target: Any, attribute: Text, new: Optional[Any] = ..., spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> _patch: ... def multiple(self, target: Any, spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> _patch: ... def stopall(self) -> None: ... patch: _patcher class MagicMixin: def __init__(self, *args: Any, **kw: Any) -> None: ... NonCallableMagicMock = Any MagicMock = Any if sys.version_info >= (3, 8): AsyncMock = Any class MagicProxy: name: Any parent: Any def __init__(self, name: Any, parent: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def create_mock(self) -> Any: ... def __get__(self, obj: Any, _type: Optional[Any] = ...) -> Any: ... class _ANY: def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... ANY: Any class _Call(Tuple[Any, ...]): def __new__(cls, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...) -> Any: ... name: Any parent: Any from_kall: Any def __init__(self, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...) -> None: ... def __eq__(self, other: Any) -> bool: ... __ne__: Any def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __getattr__(self, attr: Any) -> Any: ... def count(self, *args: Any, **kwargs: Any) -> Any: ... def index(self, *args: Any, **kwargs: Any) -> Any: ... def call_list(self) -> Any: ... call: Any def create_autospec(spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Optional[Any] = ..., _name: Optional[Any] = ..., **kwargs: Any) -> Any: ... class _SpecState: spec: Any ids: Any spec_set: Any parent: Any instance: Any name: Any def __init__(self, spec: Any, spec_set: Any = ..., parent: Optional[Any] = ..., name: Optional[Any] = ..., ids: Optional[Any] = ..., instance: Any = ...) -> None: ... def mock_open(mock: Optional[Any] = ..., read_data: Any = ...) -> Any: ... PropertyMock = Any if sys.version_info >= (3, 7): def seal(mock: Any) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/mypy_extensions.pyi0000644€tŠÔÚ€2›s®0000000400313576752252030775 0ustar jukkaDROPBOX\Domain Users00000000000000import abc import sys from typing import ( Dict, Type, TypeVar, Optional, Union, Any, Generic, Mapping, ItemsView, KeysView, ValuesView, Callable, ) _T = TypeVar('_T') _U = TypeVar('_U') # Internal mypy fallback type for all typed dicts (does not exist at runtime) class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def copy(self: _T) -> _T: ... # Using NoReturn so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... def update(self: _T, __m: _T) -> None: ... if sys.version_info < (3, 0): def has_key(self, k: str) -> bool: ... def viewitems(self) -> ItemsView[str, object]: ... def viewkeys(self) -> KeysView[str]: ... def viewvalues(self) -> ValuesView[object]: ... def __delitem__(self, k: NoReturn) -> None: ... def TypedDict(typename: str, fields: Dict[str, Type[_T]], total: bool = ...) -> Type[Dict[str, Any]]: ... def Arg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def DefaultArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def NamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def DefaultNamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def VarArg(type: _T = ...) -> _T: ... def KwArg(type: _T = ...) -> _T: ... # Return type that indicates a function does not return. # This type is equivalent to the None type, but the no-op Union is necessary to # distinguish the None type from the None value. NoReturn = Union[None] # Deprecated: Use typing.NoReturn instead. # This is intended as a class decorator, but mypy rejects abstract classes # when a Type[_T] is expected, so we can't give it the type we want def trait(cls: Any) -> Any: ... def mypyc_attr(*attrs: str, **kwattrs: object) -> Callable[[_T], _T]: ... class FlexibleAlias(Generic[_T, _U]): ... mypy-0.761/mypy/typeshed/third_party/2and3/pycurl.pyi0000644€tŠÔÚ€2›s®0000003063113576752252027044 0ustar jukkaDROPBOX\Domain Users00000000000000# TODO(MichalPokorny): more precise types from typing import Any, List, Tuple GLOBAL_ACK_EINTR: int GLOBAL_ALL: int GLOBAL_DEFAULT: int GLOBAL_NOTHING: int GLOBAL_SSL: int GLOBAL_WIN32: int def global_init(option: int) -> None: ... def global_cleanup() -> None: ... version: str def version_info() -> Tuple[int, str, int, str, int, str, int, str, Tuple[str, ...], Any, int, Any]: ... class error(Exception): ... class Curl(object): def close(self) -> None: ... def setopt(self, option: int, value: Any) -> None: ... def perform(self) -> None: ... def getinfo(self, info: Any) -> Any: ... def reset(self) -> None: ... def unsetopt(self, option: int) -> Any: ... def pause(self, bitmask: Any) -> Any: ... def errstr(self) -> str: ... # TODO(MichalPokorny): wat? USERPWD: int class CurlMulti(object): def close(self) -> None: ... def add_handle(self, obj: Curl) -> None: ... def remove_handle(self, obj: Curl) -> None: ... def perform(self) -> Tuple[Any, int]: ... def fdset(self) -> Tuple[List[Any], List[Any], List[Any]]: ... def select(self, timeout: float = ...) -> int: ... def info_read(self, max_objects: int = ...) -> Tuple[int, List[Any], List[Any]]: ... class CurlShare(object): def close(self) -> None: ... def setopt(self, option: int, value: Any) -> Any: ... ACCEPTTIMEOUT_MS: int ACCEPT_ENCODING: int ADDRESS_SCOPE: int APPCONNECT_TIME: int APPEND: int AUTOREFERER: int BUFFERSIZE: int CAINFO: int CAPATH: int CLOSESOCKETFUNCTION: int COMPILE_DATE: str COMPILE_LIBCURL_VERSION_NUM: int COMPILE_PY_VERSION_HEX: int CONDITION_UNMET: int CONNECTTIMEOUT: int CONNECTTIMEOUT_MS: int CONNECT_ONLY: int CONNECT_TIME: int CONTENT_LENGTH_DOWNLOAD: int CONTENT_LENGTH_UPLOAD: int CONTENT_TYPE: int COOKIE: int COOKIEFILE: int COOKIEJAR: int COOKIELIST: int COOKIESESSION: int COPYPOSTFIELDS: int CRLF: int CRLFILE: int CSELECT_ERR: int CSELECT_IN: int CSELECT_OUT: int CURL_HTTP_VERSION_1_0: int CURL_HTTP_VERSION_1_1: int CURL_HTTP_VERSION_2: int CURL_HTTP_VERSION_2_0: int CURL_HTTP_VERSION_LAST: int CURL_HTTP_VERSION_NONE: int CUSTOMREQUEST: int DEBUGFUNCTION: int DIRLISTONLY: int DNS_CACHE_TIMEOUT: int DNS_SERVERS: int DNS_USE_GLOBAL_CACHE: int EFFECTIVE_URL: int EGDSOCKET: int ENCODING: int EXPECT_100_TIMEOUT_MS: int FAILONERROR: int FILE: int FOLLOWLOCATION: int FORBID_REUSE: int FORM_BUFFER: int FORM_BUFFERPTR: int FORM_CONTENTS: int FORM_CONTENTTYPE: int FORM_FILE: int FORM_FILENAME: int FRESH_CONNECT: int FTPAPPEND: int FTPAUTH_DEFAULT: int FTPAUTH_SSL: int FTPAUTH_TLS: int FTPLISTONLY: int FTPMETHOD_DEFAULT: int FTPMETHOD_MULTICWD: int FTPMETHOD_NOCWD: int FTPMETHOD_SINGLECWD: int FTPPORT: int FTPSSLAUTH: int FTPSSL_ALL: int FTPSSL_CONTROL: int FTPSSL_NONE: int FTPSSL_TRY: int FTP_ACCOUNT: int FTP_ALTERNATIVE_TO_USER: int FTP_CREATE_MISSING_DIRS: int FTP_ENTRY_PATH: int FTP_FILEMETHOD: int FTP_RESPONSE_TIMEOUT: int FTP_SKIP_PASV_IP: int FTP_SSL: int FTP_SSL_CCC: int FTP_USE_EPRT: int FTP_USE_EPSV: int FTP_USE_PRET: int GSSAPI_DELEGATION: int GSSAPI_DELEGATION_FLAG: int GSSAPI_DELEGATION_NONE: int GSSAPI_DELEGATION_POLICY_FLAG: int HEADER: int HEADERFUNCTION: int HEADEROPT: int HEADER_SEPARATE: int HEADER_SIZE: int HEADER_UNIFIED: int HTTP200ALIASES: int HTTPAUTH: int HTTPAUTH_ANY: int HTTPAUTH_ANYSAFE: int HTTPAUTH_AVAIL: int HTTPAUTH_BASIC: int HTTPAUTH_DIGEST: int HTTPAUTH_DIGEST_IE: int HTTPAUTH_GSSNEGOTIATE: int HTTPAUTH_NEGOTIATE: int HTTPAUTH_NONE: int HTTPAUTH_NTLM: int HTTPAUTH_NTLM_WB: int HTTPAUTH_ONLY: int HTTPGET: int HTTPHEADER: int HTTPPOST: int HTTPPROXYTUNNEL: int HTTP_CODE: int HTTP_CONNECTCODE: int HTTP_CONTENT_DECODING: int HTTP_TRANSFER_DECODING: int HTTP_VERSION: int IGNORE_CONTENT_LENGTH: int INFILE: int INFILESIZE: int INFILESIZE_LARGE: int INFOTYPE_DATA_IN: int INFOTYPE_DATA_OUT: int INFOTYPE_HEADER_IN: int INFOTYPE_HEADER_OUT: int INFOTYPE_SSL_DATA_IN: int INFOTYPE_SSL_DATA_OUT: int INFOTYPE_TEXT: int INFO_CERTINFO: int INFO_COOKIELIST: int INFO_FILETIME: int INFO_RTSP_CLIENT_CSEQ: int INFO_RTSP_CSEQ_RECV: int INFO_RTSP_SERVER_CSEQ: int INFO_RTSP_SESSION_ID: int INTERFACE: int IOCMD_NOP: int IOCMD_RESTARTREAD: int IOCTLDATA: int IOCTLFUNCTION: int IOE_FAILRESTART: int IOE_OK: int IOE_UNKNOWNCMD: int IPRESOLVE: int IPRESOLVE_V4: int IPRESOLVE_V6: int IPRESOLVE_WHATEVER: int ISSUERCERT: int KEYPASSWD: int KHMATCH_MISMATCH: int KHMATCH_MISSING: int KHMATCH_OK: int KHSTAT_DEFER: int KHSTAT_FINE: int KHSTAT_FINE_ADD_TO_FILE: int KHSTAT_REJECT: int KHTYPE_DSS: int KHTYPE_RSA: int KHTYPE_RSA1: int KHTYPE_UNKNOWN: int KRB4LEVEL: int KRBLEVEL: int LASTSOCKET: int LOCALPORT: int LOCALPORTRANGE: int LOCAL_IP: int LOCAL_PORT: int LOCK_DATA_COOKIE: int LOCK_DATA_DNS: int LOCK_DATA_SSL_SESSION: int LOGIN_OPTIONS: int LOW_SPEED_LIMIT: int LOW_SPEED_TIME: int MAIL_AUTH: int MAIL_FROM: int MAIL_RCPT: int MAXCONNECTS: int MAXFILESIZE: int MAXFILESIZE_LARGE: int MAXREDIRS: int MAX_RECV_SPEED_LARGE: int MAX_SEND_SPEED_LARGE: int M_CHUNK_LENGTH_PENALTY_SIZE: int M_CONTENT_LENGTH_PENALTY_SIZE: int M_MAXCONNECTS: int M_MAX_HOST_CONNECTIONS: int M_MAX_PIPELINE_LENGTH: int M_MAX_TOTAL_CONNECTIONS: int M_PIPELINING: int M_PIPELINING_SERVER_BL: int M_PIPELINING_SITE_BL: int M_SOCKETFUNCTION: int M_TIMERFUNCTION: int NAMELOOKUP_TIME: int NETRC: int NETRC_FILE: int NETRC_IGNORED: int NETRC_OPTIONAL: int NETRC_REQUIRED: int NEW_DIRECTORY_PERMS: int NEW_FILE_PERMS: int NOBODY: int NOPROGRESS: int NOPROXY: int NOSIGNAL: int NUM_CONNECTS: int OPENSOCKETFUNCTION: int OPT_CERTINFO: int OPT_FILETIME: int OS_ERRNO: int PASSWORD: int PATH_AS_IS: int PAUSE_ALL: int PAUSE_CONT: int PAUSE_RECV: int PAUSE_SEND: int PINNEDPUBLICKEY: int PIPEWAIT: int PIPE_HTTP1: int PIPE_MULTIPLEX: int PIPE_NOTHING: int POLL_IN: int POLL_INOUT: int POLL_NONE: int POLL_OUT: int POLL_REMOVE: int PORT: int POST: int POST301: int POSTFIELDS: int POSTFIELDSIZE: int POSTFIELDSIZE_LARGE: int POSTQUOTE: int POSTREDIR: int PREQUOTE: int PRETRANSFER_TIME: int PRIMARY_IP: int PRIMARY_PORT: int PROGRESSFUNCTION: int PROTOCOLS: int PROTO_ALL: int PROTO_DICT: int PROTO_FILE: int PROTO_FTP: int PROTO_FTPS: int PROTO_GOPHER: int PROTO_HTTP: int PROTO_HTTPS: int PROTO_IMAP: int PROTO_IMAPS: int PROTO_LDAP: int PROTO_LDAPS: int PROTO_POP3: int PROTO_POP3S: int PROTO_RTMP: int PROTO_RTMPE: int PROTO_RTMPS: int PROTO_RTMPT: int PROTO_RTMPTE: int PROTO_RTMPTS: int PROTO_RTSP: int PROTO_SCP: int PROTO_SFTP: int PROTO_SMB: int PROTO_SMBS: int PROTO_SMTP: int PROTO_SMTPS: int PROTO_TELNET: int PROTO_TFTP: int PROXY: int PROXYAUTH: int PROXYAUTH_AVAIL: int PROXYHEADER: int PROXYPASSWORD: int PROXYPORT: int PROXYTYPE: int PROXYTYPE_HTTP: int PROXYTYPE_HTTP_1_0: int PROXYTYPE_SOCKS4: int PROXYTYPE_SOCKS4A: int PROXYTYPE_SOCKS5: int PROXYTYPE_SOCKS5_HOSTNAME: int PROXYUSERNAME: int PROXYUSERPWD: int PROXY_SERVICE_NAME: int PROXY_TRANSFER_MODE: int PUT: int QUOTE: int RANDOM_FILE: int RANGE: int READDATA: int READFUNCTION: int READFUNC_ABORT: int READFUNC_PAUSE: int REDIRECT_COUNT: int REDIRECT_TIME: int REDIRECT_URL: int REDIR_POST_301: int REDIR_POST_302: int REDIR_POST_303: int REDIR_POST_ALL: int REDIR_PROTOCOLS: int REFERER: int REQUEST_SIZE: int RESOLVE: int RESPONSE_CODE: int RESUME_FROM: int RESUME_FROM_LARGE: int SASL_IR: int SEEKFUNCTION: int SEEKFUNC_CANTSEEK: int SEEKFUNC_FAIL: int SEEKFUNC_OK: int SERVICE_NAME: int SHARE: int SH_SHARE: int SH_UNSHARE: int SIZE_DOWNLOAD: int SIZE_UPLOAD: int SOCKET_TIMEOUT: int SOCKOPTFUNCTION: int SOCKOPT_ALREADY_CONNECTED: int SOCKOPT_ERROR: int SOCKOPT_OK: int SOCKS5_GSSAPI_NEC: int SOCKS5_GSSAPI_SERVICE: int SOCKTYPE_ACCEPT: int SOCKTYPE_IPCXN: int SPEED_DOWNLOAD: int SPEED_UPLOAD: int SSH_AUTH_ANY: int SSH_AUTH_DEFAULT: int SSH_AUTH_HOST: int SSH_AUTH_KEYBOARD: int SSH_AUTH_NONE: int SSH_AUTH_PASSWORD: int SSH_AUTH_PUBLICKEY: int SSH_AUTH_TYPES: int SSH_HOST_PUBLIC_KEY_MD5: int SSH_KEYFUNCTION: int SSH_KNOWNHOSTS: int SSH_PRIVATE_KEYFILE: int SSH_PUBLIC_KEYFILE: int SSLCERT: int SSLCERTPASSWD: int SSLCERTTYPE: int SSLENGINE: int SSLENGINE_DEFAULT: int SSLKEY: int SSLKEYPASSWD: int SSLKEYTYPE: int SSLOPT_ALLOW_BEAST: int SSLVERSION: int SSLVERSION_DEFAULT: int SSLVERSION_SSLv2: int SSLVERSION_SSLv3: int SSLVERSION_TLSv1: int SSLVERSION_TLSv1_0: int SSLVERSION_TLSv1_1: int SSLVERSION_TLSv1_2: int SSL_CIPHER_LIST: int SSL_ENABLE_ALPN: int SSL_ENABLE_NPN: int SSL_ENGINES: int SSL_FALSESTART: int SSL_OPTIONS: int SSL_SESSIONID_CACHE: int SSL_VERIFYHOST: int SSL_VERIFYPEER: int SSL_VERIFYRESULT: int SSL_VERIFYSTATUS: int STARTTRANSFER_TIME: int STDERR: int TCP_KEEPALIVE: int TCP_KEEPIDLE: int TCP_KEEPINTVL: int TCP_NODELAY: int TELNETOPTIONS: int TFTP_BLKSIZE: int TIMECONDITION: int TIMECONDITION_IFMODSINCE: int TIMECONDITION_IFUNMODSINCE: int TIMECONDITION_LASTMOD: int TIMECONDITION_NONE: int TIMEOUT: int TIMEOUT_MS: int TIMEVALUE: int TLSAUTH_PASSWORD: int TLSAUTH_TYPE: int TLSAUTH_USERNAME: int TOTAL_TIME: int TRANSFERTEXT: int TRANSFER_ENCODING: int UNIX_SOCKET_PATH: int UNRESTRICTED_AUTH: int UPLOAD: int URL: int USERAGENT: int USERNAME: int USERPWD: int USESSL_ALL: int USESSL_CONTROL: int USESSL_NONE: int USESSL_TRY: int USE_SSL: int VERBOSE: int VERSION_ASYNCHDNS: int VERSION_CONV: int VERSION_CURLDEBUG: int VERSION_DEBUG: int VERSION_GSSAPI: int VERSION_GSSNEGOTIATE: int VERSION_HTTP2: int VERSION_IDN: int VERSION_IPV6: int VERSION_KERBEROS4: int VERSION_KERBEROS5: int VERSION_LARGEFILE: int VERSION_LIBZ: int VERSION_NTLM: int VERSION_NTLM_WB: int VERSION_SPNEGO: int VERSION_SSL: int VERSION_SSPI: int VERSION_TLSAUTH_SRP: int VERSION_UNIX_SOCKETS: int WILDCARDMATCH: int WRITEDATA: int WRITEFUNCTION: int WRITEFUNC_PAUSE: int WRITEHEADER: int XFERINFOFUNCTION: int XOAUTH2_BEARER: int E_ABORTED_BY_CALLBACK: int E_AGAIN: int E_ALREADY_COMPLETE: int E_BAD_CALLING_ORDER: int E_BAD_CONTENT_ENCODING: int E_BAD_DOWNLOAD_RESUME: int E_BAD_FUNCTION_ARGUMENT: int E_BAD_PASSWORD_ENTERED: int E_CALL_MULTI_PERFORM: int E_CHUNK_FAILED: int E_CONV_FAILED: int E_CONV_REQD: int E_COULDNT_CONNECT: int E_COULDNT_RESOLVE_HOST: int E_COULDNT_RESOLVE_PROXY: int E_FAILED_INIT: int E_FILESIZE_EXCEEDED: int E_FILE_COULDNT_READ_FILE: int E_FTP_ACCEPT_FAILED: int E_FTP_ACCEPT_TIMEOUT: int E_FTP_ACCESS_DENIED: int E_FTP_BAD_DOWNLOAD_RESUME: int E_FTP_BAD_FILE_LIST: int E_FTP_CANT_GET_HOST: int E_FTP_CANT_RECONNECT: int E_FTP_COULDNT_GET_SIZE: int E_FTP_COULDNT_RETR_FILE: int E_FTP_COULDNT_SET_ASCII: int E_FTP_COULDNT_SET_BINARY: int E_FTP_COULDNT_SET_TYPE: int E_FTP_COULDNT_STOR_FILE: int E_FTP_COULDNT_USE_REST: int E_FTP_PARTIAL_FILE: int E_FTP_PORT_FAILED: int E_FTP_PRET_FAILED: int E_FTP_QUOTE_ERROR: int E_FTP_SSL_FAILED: int E_FTP_USER_PASSWORD_INCORRECT: int E_FTP_WEIRD_227_FORMAT: int E_FTP_WEIRD_PASS_REPLY: int E_FTP_WEIRD_PASV_REPLY: int E_FTP_WEIRD_SERVER_REPLY: int E_FTP_WEIRD_USER_REPLY: int E_FTP_WRITE_ERROR: int E_FUNCTION_NOT_FOUND: int E_GOT_NOTHING: int E_HTTP2: int E_HTTP_NOT_FOUND: int E_HTTP_PORT_FAILED: int E_HTTP_POST_ERROR: int E_HTTP_RANGE_ERROR: int E_HTTP_RETURNED_ERROR: int E_INTERFACE_FAILED: int E_LDAP_CANNOT_BIND: int E_LDAP_INVALID_URL: int E_LDAP_SEARCH_FAILED: int E_LIBRARY_NOT_FOUND: int E_LOGIN_DENIED: int E_MALFORMAT_USER: int E_MULTI_ADDED_ALREADY: int E_MULTI_BAD_EASY_HANDLE: int E_MULTI_BAD_HANDLE: int E_MULTI_BAD_SOCKET: int E_MULTI_CALL_MULTI_PERFORM: int E_MULTI_CALL_MULTI_SOCKET: int E_MULTI_INTERNAL_ERROR: int E_MULTI_OK: int E_MULTI_OUT_OF_MEMORY: int E_MULTI_UNKNOWN_OPTION: int E_NOT_BUILT_IN: int E_NO_CONNECTION_AVAILABLE: int E_OK: int E_OPERATION_TIMEDOUT: int E_OPERATION_TIMEOUTED: int E_OUT_OF_MEMORY: int E_PARTIAL_FILE: int E_PEER_FAILED_VERIFICATION: int E_QUOTE_ERROR: int E_RANGE_ERROR: int E_READ_ERROR: int E_RECV_ERROR: int E_REMOTE_ACCESS_DENIED: int E_REMOTE_DISK_FULL: int E_REMOTE_FILE_EXISTS: int E_REMOTE_FILE_NOT_FOUND: int E_RTSP_CSEQ_ERROR: int E_RTSP_SESSION_ERROR: int E_SEND_ERROR: int E_SEND_FAIL_REWIND: int E_SHARE_IN_USE: int E_SSH: int E_SSL_CACERT: int E_SSL_CACERT_BADFILE: int E_SSL_CERTPROBLEM: int E_SSL_CIPHER: int E_SSL_CONNECT_ERROR: int E_SSL_CRL_BADFILE: int E_SSL_ENGINE_INITFAILED: int E_SSL_ENGINE_NOTFOUND: int E_SSL_ENGINE_SETFAILED: int E_SSL_INVALIDCERTSTATUS: int E_SSL_ISSUER_ERROR: int E_SSL_PEER_CERTIFICATE: int E_SSL_PINNEDPUBKEYNOTMATCH: int E_SSL_SHUTDOWN_FAILED: int E_TELNET_OPTION_SYNTAX: int E_TFTP_DISKFULL: int E_TFTP_EXISTS: int E_TFTP_ILLEGAL: int E_TFTP_NOSUCHUSER: int E_TFTP_NOTFOUND: int E_TFTP_PERM: int E_TFTP_UNKNOWNID: int E_TOO_MANY_REDIRECTS: int E_UNKNOWN_OPTION: int E_UNKNOWN_TELNET_OPTION: int E_UNSUPPORTED_PROTOCOL: int E_UPLOAD_FAILED: int E_URL_MALFORMAT: int E_URL_MALFORMAT_USER: int E_USE_SSL_FAILED: int E_WRITE_ERROR: int mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/0000755€tŠÔÚ€2›s®0000000000013576752267026524 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/__init__.pyi0000644€tŠÔÚ€2›s®0000000313613576752252031003 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Union, Tuple, Callable, FrozenSet from .connections import Connection as _Connection from .constants import FIELD_TYPE as FIELD_TYPE from .converters import escape_dict as escape_dict, escape_sequence as escape_sequence, escape_string as escape_string from .err import ( Warning as Warning, Error as Error, InterfaceError as InterfaceError, DataError as DataError, DatabaseError as DatabaseError, OperationalError as OperationalError, IntegrityError as IntegrityError, InternalError as InternalError, NotSupportedError as NotSupportedError, ProgrammingError as ProgrammingError, MySQLError as MySQLError, ) from .times import ( Date as Date, Time as Time, Timestamp as Timestamp, DateFromTicks as DateFromTicks, TimeFromTicks as TimeFromTicks, TimestampFromTicks as TimestampFromTicks, ) threadsafety: int apilevel: str paramstyle: str class DBAPISet(FrozenSet[int]): def __ne__(self, other) -> bool: ... def __eq__(self, other) -> bool: ... def __hash__(self) -> int: ... STRING: DBAPISet BINARY: DBAPISet NUMBER: DBAPISet DATE: DBAPISet TIME: DBAPISet TIMESTAMP: DBAPISet DATETIME: DBAPISet ROWID: DBAPISet if sys.version_info >= (3, 0): def Binary(x) -> bytes: ... else: def Binary(x) -> bytearray: ... def Connect(*args, **kwargs) -> _Connection: ... def get_client_info() -> str: ... connect: Callable[..., _Connection] Connection: Callable[..., _Connection] __version__: str version_info: Tuple[int, int, int, str, int] NULL: str def thread_safe() -> bool: ... def install_as_MySQLdb() -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/charset.pyi0000644€tŠÔÚ€2›s®0000000050713576752252030674 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any MBLENGTH: Any class Charset: is_default: Any def __init__(self, id, name, collation, is_default): ... class Charsets: def __init__(self): ... def add(self, c): ... def by_id(self, id): ... def by_name(self, name): ... def charset_by_name(name): ... def charset_by_id(id): ... mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/connections.pyi0000644€tŠÔÚ€2›s®0000001140513576752252031564 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Type from .charset import MBLENGTH as MBLENGTH, charset_by_name as charset_by_name, charset_by_id as charset_by_id from .cursors import Cursor as Cursor from .constants import FIELD_TYPE as FIELD_TYPE, FLAG as FLAG from .constants import SERVER_STATUS as SERVER_STATUS from .constants import CLIENT as CLIENT from .constants import COMMAND as COMMAND from .util import join_bytes as join_bytes, byte2int as byte2int, int2byte as int2byte from .converters import escape_item as escape_item, encoders as encoders, decoders as decoders from .err import raise_mysql_exception as raise_mysql_exception, Warning as Warning, Error as Error, InterfaceError as InterfaceError, DataError as DataError, DatabaseError as DatabaseError, OperationalError as OperationalError, IntegrityError as IntegrityError, InternalError as InternalError, NotSupportedError as NotSupportedError, ProgrammingError as ProgrammingError sha_new: Any SSL_ENABLED: Any DEFAULT_USER: Any DEBUG: Any NULL_COLUMN: Any UNSIGNED_CHAR_COLUMN: Any UNSIGNED_SHORT_COLUMN: Any UNSIGNED_INT24_COLUMN: Any UNSIGNED_INT64_COLUMN: Any UNSIGNED_CHAR_LENGTH: Any UNSIGNED_SHORT_LENGTH: Any UNSIGNED_INT24_LENGTH: Any UNSIGNED_INT64_LENGTH: Any DEFAULT_CHARSET: Any def dump_packet(data): ... SCRAMBLE_LENGTH_323: Any class RandStruct_323: max_value: Any seed1: Any seed2: Any def __init__(self, seed1, seed2): ... def my_rnd(self): ... def pack_int24(n): ... def unpack_uint16(n): ... def unpack_int24(n): ... def unpack_int32(n): ... def unpack_int64(n): ... def defaulterrorhandler(connection, cursor, errorclass, errorvalue): ... class MysqlPacket: connection: Any def __init__(self, connection): ... def packet_number(self): ... def get_all_data(self): ... def read(self, size): ... def read_all(self): ... def advance(self, length): ... def rewind(self, position: int = ...): ... def peek(self, size): ... def get_bytes(self, position, length: int = ...): ... def read_length_coded_binary(self): ... def read_length_coded_string(self): ... def is_ok_packet(self): ... def is_eof_packet(self): ... def is_resultset_packet(self): ... def is_error_packet(self): ... def check_error(self): ... def dump(self): ... class FieldDescriptorPacket(MysqlPacket): def __init__(self, *args): ... def description(self): ... def get_column_length(self): ... class Connection: errorhandler: Any ssl: Any host: Any port: Any user: Any password: Any db: Any unix_socket: Any charset: Any use_unicode: Any client_flag: Any cursorclass: Any connect_timeout: Any messages: Any encoders: Any decoders: Any host_info: Any def __init__(self, host: str = ..., user: Optional[Any] = ..., passwd: str = ..., db: Optional[Any] = ..., port: int = ..., unix_socket: Optional[Any] = ..., charset: str = ..., sql_mode: Optional[Any] = ..., read_default_file: Optional[Any] = ..., conv=..., use_unicode: Optional[Any] = ..., client_flag: int = ..., cursorclass=..., init_command: Optional[Any] = ..., connect_timeout: Optional[Any] = ..., ssl: Optional[Any] = ..., read_default_group: Optional[Any] = ..., compress: Optional[Any] = ..., named_pipe: Optional[Any] = ...): ... socket: Any rfile: Any wfile: Any def close(self) -> None: ... def autocommit(self, value): ... def commit(self): ... def begin(self) -> None: ... def rollback(self): ... def escape(self, obj): ... def literal(self, obj): ... def cursor(self, cursor: Optional[Type[Cursor]] = ...) -> Cursor: ... def __enter__(self): ... def __exit__(self, exc, value, traceback): ... def query(self, sql): ... def next_result(self, unbuffered: bool = ...): ... def affected_rows(self): ... def kill(self, thread_id): ... def ping(self, reconnect: bool = ...): ... def set_charset(self, charset): ... def read_packet(self, packet_type=...): ... def insert_id(self): ... def thread_id(self): ... def character_set_name(self): ... def get_host_info(self): ... def get_proto_info(self): ... def get_server_info(self): ... def show_warnings(self): ... Warning: Any Error: Any InterfaceError: Any DatabaseError: Any DataError: Any OperationalError: Any IntegrityError: Any InternalError: Any ProgrammingError: Any NotSupportedError: Any class MySQLResult: connection: Any affected_rows: Any insert_id: Any server_status: Any warning_count: Any message: Any field_count: Any description: Any rows: Any has_next: Any def __init__(self, connection): ... first_packet: Any def read(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/constants/0000755€tŠÔÚ€2›s®0000000000013576752267030540 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/constants/CLIENT.pyi0000644€tŠÔÚ€2›s®0000000046413576752252032237 0ustar jukkaDROPBOX\Domain Users00000000000000LONG_PASSWORD: int FOUND_ROWS: int LONG_FLAG: int CONNECT_WITH_DB: int NO_SCHEMA: int COMPRESS: int ODBC: int LOCAL_FILES: int IGNORE_SPACE: int PROTOCOL_41: int INTERACTIVE: int SSL: int IGNORE_SIGPIPE: int TRANSACTIONS: int SECURE_CONNECTION: int MULTI_STATEMENTS: int MULTI_RESULTS: int CAPABILITIES: int mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/constants/COMMAND.pyi0000644€tŠÔÚ€2›s®0000000062713576752252032340 0ustar jukkaDROPBOX\Domain Users00000000000000COM_SLEEP: int COM_QUIT: int COM_INIT_DB: int COM_QUERY: int COM_FIELD_LIST: int COM_CREATE_DB: int COM_DROP_DB: int COM_REFRESH: int COM_SHUTDOWN: int COM_STATISTICS: int COM_PROCESS_INFO: int COM_CONNECT: int COM_PROCESS_KILL: int COM_DEBUG: int COM_PING: int COM_TIME: int COM_DELAYED_INSERT: int COM_CHANGE_USER: int COM_BINLOG_DUMP: int COM_TABLE_DUMP: int COM_CONNECT_OUT: int COM_REGISTER_SLAVE: int mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/constants/ER.pyi0000644€tŠÔÚ€2›s®0000002602013576752252031563 0ustar jukkaDROPBOX\Domain Users00000000000000ERROR_FIRST: int HASHCHK: int NISAMCHK: int NO: int YES: int CANT_CREATE_FILE: int CANT_CREATE_TABLE: int CANT_CREATE_DB: int DB_CREATE_EXISTS: int DB_DROP_EXISTS: int DB_DROP_DELETE: int DB_DROP_RMDIR: int CANT_DELETE_FILE: int CANT_FIND_SYSTEM_REC: int CANT_GET_STAT: int CANT_GET_WD: int CANT_LOCK: int CANT_OPEN_FILE: int FILE_NOT_FOUND: int CANT_READ_DIR: int CANT_SET_WD: int CHECKREAD: int DISK_FULL: int DUP_KEY: int ERROR_ON_CLOSE: int ERROR_ON_READ: int ERROR_ON_RENAME: int ERROR_ON_WRITE: int FILE_USED: int FILSORT_ABORT: int FORM_NOT_FOUND: int GET_ERRNO: int ILLEGAL_HA: int KEY_NOT_FOUND: int NOT_FORM_FILE: int NOT_KEYFILE: int OLD_KEYFILE: int OPEN_AS_READONLY: int OUTOFMEMORY: int OUT_OF_SORTMEMORY: int UNEXPECTED_EOF: int CON_COUNT_ERROR: int OUT_OF_RESOURCES: int BAD_HOST_ERROR: int HANDSHAKE_ERROR: int DBACCESS_DENIED_ERROR: int ACCESS_DENIED_ERROR: int NO_DB_ERROR: int UNKNOWN_COM_ERROR: int BAD_NULL_ERROR: int BAD_DB_ERROR: int TABLE_EXISTS_ERROR: int BAD_TABLE_ERROR: int NON_UNIQ_ERROR: int SERVER_SHUTDOWN: int BAD_FIELD_ERROR: int WRONG_FIELD_WITH_GROUP: int WRONG_GROUP_FIELD: int WRONG_SUM_SELECT: int WRONG_VALUE_COUNT: int TOO_LONG_IDENT: int DUP_FIELDNAME: int DUP_KEYNAME: int DUP_ENTRY: int WRONG_FIELD_SPEC: int PARSE_ERROR: int EMPTY_QUERY: int NONUNIQ_TABLE: int INVALID_DEFAULT: int MULTIPLE_PRI_KEY: int TOO_MANY_KEYS: int TOO_MANY_KEY_PARTS: int TOO_LONG_KEY: int KEY_COLUMN_DOES_NOT_EXITS: int BLOB_USED_AS_KEY: int TOO_BIG_FIELDLENGTH: int WRONG_AUTO_KEY: int READY: int NORMAL_SHUTDOWN: int GOT_SIGNAL: int SHUTDOWN_COMPLETE: int FORCING_CLOSE: int IPSOCK_ERROR: int NO_SUCH_INDEX: int WRONG_FIELD_TERMINATORS: int BLOBS_AND_NO_TERMINATED: int TEXTFILE_NOT_READABLE: int FILE_EXISTS_ERROR: int LOAD_INFO: int ALTER_INFO: int WRONG_SUB_KEY: int CANT_REMOVE_ALL_FIELDS: int CANT_DROP_FIELD_OR_KEY: int INSERT_INFO: int UPDATE_TABLE_USED: int NO_SUCH_THREAD: int KILL_DENIED_ERROR: int NO_TABLES_USED: int TOO_BIG_SET: int NO_UNIQUE_LOGFILE: int TABLE_NOT_LOCKED_FOR_WRITE: int TABLE_NOT_LOCKED: int BLOB_CANT_HAVE_DEFAULT: int WRONG_DB_NAME: int WRONG_TABLE_NAME: int TOO_BIG_SELECT: int UNKNOWN_ERROR: int UNKNOWN_PROCEDURE: int WRONG_PARAMCOUNT_TO_PROCEDURE: int WRONG_PARAMETERS_TO_PROCEDURE: int UNKNOWN_TABLE: int FIELD_SPECIFIED_TWICE: int INVALID_GROUP_FUNC_USE: int UNSUPPORTED_EXTENSION: int TABLE_MUST_HAVE_COLUMNS: int RECORD_FILE_FULL: int UNKNOWN_CHARACTER_SET: int TOO_MANY_TABLES: int TOO_MANY_FIELDS: int TOO_BIG_ROWSIZE: int STACK_OVERRUN: int WRONG_OUTER_JOIN: int NULL_COLUMN_IN_INDEX: int CANT_FIND_UDF: int CANT_INITIALIZE_UDF: int UDF_NO_PATHS: int UDF_EXISTS: int CANT_OPEN_LIBRARY: int CANT_FIND_DL_ENTRY: int FUNCTION_NOT_DEFINED: int HOST_IS_BLOCKED: int HOST_NOT_PRIVILEGED: int PASSWORD_ANONYMOUS_USER: int PASSWORD_NOT_ALLOWED: int PASSWORD_NO_MATCH: int UPDATE_INFO: int CANT_CREATE_THREAD: int WRONG_VALUE_COUNT_ON_ROW: int CANT_REOPEN_TABLE: int INVALID_USE_OF_NULL: int REGEXP_ERROR: int MIX_OF_GROUP_FUNC_AND_FIELDS: int NONEXISTING_GRANT: int TABLEACCESS_DENIED_ERROR: int COLUMNACCESS_DENIED_ERROR: int ILLEGAL_GRANT_FOR_TABLE: int GRANT_WRONG_HOST_OR_USER: int NO_SUCH_TABLE: int NONEXISTING_TABLE_GRANT: int NOT_ALLOWED_COMMAND: int SYNTAX_ERROR: int DELAYED_CANT_CHANGE_LOCK: int TOO_MANY_DELAYED_THREADS: int ABORTING_CONNECTION: int NET_PACKET_TOO_LARGE: int NET_READ_ERROR_FROM_PIPE: int NET_FCNTL_ERROR: int NET_PACKETS_OUT_OF_ORDER: int NET_UNCOMPRESS_ERROR: int NET_READ_ERROR: int NET_READ_INTERRUPTED: int NET_ERROR_ON_WRITE: int NET_WRITE_INTERRUPTED: int TOO_LONG_STRING: int TABLE_CANT_HANDLE_BLOB: int TABLE_CANT_HANDLE_AUTO_INCREMENT: int DELAYED_INSERT_TABLE_LOCKED: int WRONG_COLUMN_NAME: int WRONG_KEY_COLUMN: int WRONG_MRG_TABLE: int DUP_UNIQUE: int BLOB_KEY_WITHOUT_LENGTH: int PRIMARY_CANT_HAVE_NULL: int TOO_MANY_ROWS: int REQUIRES_PRIMARY_KEY: int NO_RAID_COMPILED: int UPDATE_WITHOUT_KEY_IN_SAFE_MODE: int KEY_DOES_NOT_EXITS: int CHECK_NO_SUCH_TABLE: int CHECK_NOT_IMPLEMENTED: int CANT_DO_THIS_DURING_AN_TRANSACTION: int ERROR_DURING_COMMIT: int ERROR_DURING_ROLLBACK: int ERROR_DURING_FLUSH_LOGS: int ERROR_DURING_CHECKPOINT: int NEW_ABORTING_CONNECTION: int DUMP_NOT_IMPLEMENTED: int FLUSH_MASTER_BINLOG_CLOSED: int INDEX_REBUILD: int MASTER: int MASTER_NET_READ: int MASTER_NET_WRITE: int FT_MATCHING_KEY_NOT_FOUND: int LOCK_OR_ACTIVE_TRANSACTION: int UNKNOWN_SYSTEM_VARIABLE: int CRASHED_ON_USAGE: int CRASHED_ON_REPAIR: int WARNING_NOT_COMPLETE_ROLLBACK: int TRANS_CACHE_FULL: int SLAVE_MUST_STOP: int SLAVE_NOT_RUNNING: int BAD_SLAVE: int MASTER_INFO: int SLAVE_THREAD: int TOO_MANY_USER_CONNECTIONS: int SET_CONSTANTS_ONLY: int LOCK_WAIT_TIMEOUT: int LOCK_TABLE_FULL: int READ_ONLY_TRANSACTION: int DROP_DB_WITH_READ_LOCK: int CREATE_DB_WITH_READ_LOCK: int WRONG_ARGUMENTS: int NO_PERMISSION_TO_CREATE_USER: int UNION_TABLES_IN_DIFFERENT_DIR: int LOCK_DEADLOCK: int TABLE_CANT_HANDLE_FT: int CANNOT_ADD_FOREIGN: int NO_REFERENCED_ROW: int ROW_IS_REFERENCED: int CONNECT_TO_MASTER: int QUERY_ON_MASTER: int ERROR_WHEN_EXECUTING_COMMAND: int WRONG_USAGE: int WRONG_NUMBER_OF_COLUMNS_IN_SELECT: int CANT_UPDATE_WITH_READLOCK: int MIXING_NOT_ALLOWED: int DUP_ARGUMENT: int USER_LIMIT_REACHED: int SPECIFIC_ACCESS_DENIED_ERROR: int LOCAL_VARIABLE: int GLOBAL_VARIABLE: int NO_DEFAULT: int WRONG_VALUE_FOR_VAR: int WRONG_TYPE_FOR_VAR: int VAR_CANT_BE_READ: int CANT_USE_OPTION_HERE: int NOT_SUPPORTED_YET: int MASTER_FATAL_ERROR_READING_BINLOG: int SLAVE_IGNORED_TABLE: int INCORRECT_GLOBAL_LOCAL_VAR: int WRONG_FK_DEF: int KEY_REF_DO_NOT_MATCH_TABLE_REF: int OPERAND_COLUMNS: int SUBQUERY_NO_1_ROW: int UNKNOWN_STMT_HANDLER: int CORRUPT_HELP_DB: int CYCLIC_REFERENCE: int AUTO_CONVERT: int ILLEGAL_REFERENCE: int DERIVED_MUST_HAVE_ALIAS: int SELECT_REDUCED: int TABLENAME_NOT_ALLOWED_HERE: int NOT_SUPPORTED_AUTH_MODE: int SPATIAL_CANT_HAVE_NULL: int COLLATION_CHARSET_MISMATCH: int SLAVE_WAS_RUNNING: int SLAVE_WAS_NOT_RUNNING: int TOO_BIG_FOR_UNCOMPRESS: int ZLIB_Z_MEM_ERROR: int ZLIB_Z_BUF_ERROR: int ZLIB_Z_DATA_ERROR: int CUT_VALUE_GROUP_CONCAT: int WARN_TOO_FEW_RECORDS: int WARN_TOO_MANY_RECORDS: int WARN_NULL_TO_NOTNULL: int WARN_DATA_OUT_OF_RANGE: int WARN_DATA_TRUNCATED: int WARN_USING_OTHER_HANDLER: int CANT_AGGREGATE_2COLLATIONS: int DROP_USER: int REVOKE_GRANTS: int CANT_AGGREGATE_3COLLATIONS: int CANT_AGGREGATE_NCOLLATIONS: int VARIABLE_IS_NOT_STRUCT: int UNKNOWN_COLLATION: int SLAVE_IGNORED_SSL_PARAMS: int SERVER_IS_IN_SECURE_AUTH_MODE: int WARN_FIELD_RESOLVED: int BAD_SLAVE_UNTIL_COND: int MISSING_SKIP_SLAVE: int UNTIL_COND_IGNORED: int WRONG_NAME_FOR_INDEX: int WRONG_NAME_FOR_CATALOG: int WARN_QC_RESIZE: int BAD_FT_COLUMN: int UNKNOWN_KEY_CACHE: int WARN_HOSTNAME_WONT_WORK: int UNKNOWN_STORAGE_ENGINE: int WARN_DEPRECATED_SYNTAX: int NON_UPDATABLE_TABLE: int FEATURE_DISABLED: int OPTION_PREVENTS_STATEMENT: int DUPLICATED_VALUE_IN_TYPE: int TRUNCATED_WRONG_VALUE: int TOO_MUCH_AUTO_TIMESTAMP_COLS: int INVALID_ON_UPDATE: int UNSUPPORTED_PS: int GET_ERRMSG: int GET_TEMPORARY_ERRMSG: int UNKNOWN_TIME_ZONE: int WARN_INVALID_TIMESTAMP: int INVALID_CHARACTER_STRING: int WARN_ALLOWED_PACKET_OVERFLOWED: int CONFLICTING_DECLARATIONS: int SP_NO_RECURSIVE_CREATE: int SP_ALREADY_EXISTS: int SP_DOES_NOT_EXIST: int SP_DROP_FAILED: int SP_STORE_FAILED: int SP_LILABEL_MISMATCH: int SP_LABEL_REDEFINE: int SP_LABEL_MISMATCH: int SP_UNINIT_VAR: int SP_BADSELECT: int SP_BADRETURN: int SP_BADSTATEMENT: int UPDATE_LOG_DEPRECATED_IGNORED: int UPDATE_LOG_DEPRECATED_TRANSLATED: int QUERY_INTERRUPTED: int SP_WRONG_NO_OF_ARGS: int SP_COND_MISMATCH: int SP_NORETURN: int SP_NORETURNEND: int SP_BAD_CURSOR_QUERY: int SP_BAD_CURSOR_SELECT: int SP_CURSOR_MISMATCH: int SP_CURSOR_ALREADY_OPEN: int SP_CURSOR_NOT_OPEN: int SP_UNDECLARED_VAR: int SP_WRONG_NO_OF_FETCH_ARGS: int SP_FETCH_NO_DATA: int SP_DUP_PARAM: int SP_DUP_VAR: int SP_DUP_COND: int SP_DUP_CURS: int SP_CANT_ALTER: int SP_SUBSELECT_NYI: int STMT_NOT_ALLOWED_IN_SF_OR_TRG: int SP_VARCOND_AFTER_CURSHNDLR: int SP_CURSOR_AFTER_HANDLER: int SP_CASE_NOT_FOUND: int FPARSER_TOO_BIG_FILE: int FPARSER_BAD_HEADER: int FPARSER_EOF_IN_COMMENT: int FPARSER_ERROR_IN_PARAMETER: int FPARSER_EOF_IN_UNKNOWN_PARAMETER: int VIEW_NO_EXPLAIN: int FRM_UNKNOWN_TYPE: int WRONG_OBJECT: int NONUPDATEABLE_COLUMN: int VIEW_SELECT_DERIVED: int VIEW_SELECT_CLAUSE: int VIEW_SELECT_VARIABLE: int VIEW_SELECT_TMPTABLE: int VIEW_WRONG_LIST: int WARN_VIEW_MERGE: int WARN_VIEW_WITHOUT_KEY: int VIEW_INVALID: int SP_NO_DROP_SP: int SP_GOTO_IN_HNDLR: int TRG_ALREADY_EXISTS: int TRG_DOES_NOT_EXIST: int TRG_ON_VIEW_OR_TEMP_TABLE: int TRG_CANT_CHANGE_ROW: int TRG_NO_SUCH_ROW_IN_TRG: int NO_DEFAULT_FOR_FIELD: int DIVISION_BY_ZERO: int TRUNCATED_WRONG_VALUE_FOR_FIELD: int ILLEGAL_VALUE_FOR_TYPE: int VIEW_NONUPD_CHECK: int VIEW_CHECK_FAILED: int PROCACCESS_DENIED_ERROR: int RELAY_LOG_FAIL: int PASSWD_LENGTH: int UNKNOWN_TARGET_BINLOG: int IO_ERR_LOG_INDEX_READ: int BINLOG_PURGE_PROHIBITED: int FSEEK_FAIL: int BINLOG_PURGE_FATAL_ERR: int LOG_IN_USE: int LOG_PURGE_UNKNOWN_ERR: int RELAY_LOG_INIT: int NO_BINARY_LOGGING: int RESERVED_SYNTAX: int WSAS_FAILED: int DIFF_GROUPS_PROC: int NO_GROUP_FOR_PROC: int ORDER_WITH_PROC: int LOGGING_PROHIBIT_CHANGING_OF: int NO_FILE_MAPPING: int WRONG_MAGIC: int PS_MANY_PARAM: int KEY_PART_0: int VIEW_CHECKSUM: int VIEW_MULTIUPDATE: int VIEW_NO_INSERT_FIELD_LIST: int VIEW_DELETE_MERGE_VIEW: int CANNOT_USER: int XAER_NOTA: int XAER_INVAL: int XAER_RMFAIL: int XAER_OUTSIDE: int XAER_RMERR: int XA_RBROLLBACK: int NONEXISTING_PROC_GRANT: int PROC_AUTO_GRANT_FAIL: int PROC_AUTO_REVOKE_FAIL: int DATA_TOO_LONG: int SP_BAD_SQLSTATE: int STARTUP: int LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR: int CANT_CREATE_USER_WITH_GRANT: int WRONG_VALUE_FOR_TYPE: int TABLE_DEF_CHANGED: int SP_DUP_HANDLER: int SP_NOT_VAR_ARG: int SP_NO_RETSET: int CANT_CREATE_GEOMETRY_OBJECT: int FAILED_ROUTINE_BREAK_BINLOG: int BINLOG_UNSAFE_ROUTINE: int BINLOG_CREATE_ROUTINE_NEED_SUPER: int EXEC_STMT_WITH_OPEN_CURSOR: int STMT_HAS_NO_OPEN_CURSOR: int COMMIT_NOT_ALLOWED_IN_SF_OR_TRG: int NO_DEFAULT_FOR_VIEW_FIELD: int SP_NO_RECURSION: int TOO_BIG_SCALE: int TOO_BIG_PRECISION: int M_BIGGER_THAN_D: int WRONG_LOCK_OF_SYSTEM_TABLE: int CONNECT_TO_FOREIGN_DATA_SOURCE: int QUERY_ON_FOREIGN_DATA_SOURCE: int FOREIGN_DATA_SOURCE_DOESNT_EXIST: int FOREIGN_DATA_STRING_INVALID_CANT_CREATE: int FOREIGN_DATA_STRING_INVALID: int CANT_CREATE_FEDERATED_TABLE: int TRG_IN_WRONG_SCHEMA: int STACK_OVERRUN_NEED_MORE: int TOO_LONG_BODY: int WARN_CANT_DROP_DEFAULT_KEYCACHE: int TOO_BIG_DISPLAYWIDTH: int XAER_DUPID: int DATETIME_FUNCTION_OVERFLOW: int CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG: int VIEW_PREVENT_UPDATE: int PS_NO_RECURSION: int SP_CANT_SET_AUTOCOMMIT: int MALFORMED_DEFINER: int VIEW_FRM_NO_USER: int VIEW_OTHER_USER: int NO_SUCH_USER: int FORBID_SCHEMA_CHANGE: int ROW_IS_REFERENCED_2: int NO_REFERENCED_ROW_2: int SP_BAD_VAR_SHADOW: int TRG_NO_DEFINER: int OLD_FILE_FORMAT: int SP_RECURSION_LIMIT: int SP_PROC_TABLE_CORRUPT: int SP_WRONG_NAME: int TABLE_NEEDS_UPGRADE: int SP_NO_AGGREGATE: int MAX_PREPARED_STMT_COUNT_REACHED: int VIEW_RECURSIVE: int NON_GROUPING_FIELD_USED: int TABLE_CANT_HANDLE_SPKEYS: int NO_TRIGGERS_ON_SYSTEM_SCHEMA: int USERNAME: int HOSTNAME: int WRONG_STRING_LENGTH: int ERROR_LAST: int mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/constants/FIELD_TYPE.pyi0000644€tŠÔÚ€2›s®0000000054213576752252032742 0ustar jukkaDROPBOX\Domain Users00000000000000DECIMAL: int TINY: int SHORT: int LONG: int FLOAT: int DOUBLE: int NULL: int TIMESTAMP: int LONGLONG: int INT24: int DATE: int TIME: int DATETIME: int YEAR: int NEWDATE: int VARCHAR: int BIT: int NEWDECIMAL: int ENUM: int SET: int TINY_BLOB: int MEDIUM_BLOB: int LONG_BLOB: int BLOB: int VAR_STRING: int STRING: int GEOMETRY: int CHAR: int INTERVAL: int mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/constants/FLAG.pyi0000644€tŠÔÚ€2›s®0000000034213576752252031765 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any NOT_NULL: Any PRI_KEY: Any UNIQUE_KEY: Any MULTIPLE_KEY: Any BLOB: Any UNSIGNED: Any ZEROFILL: Any BINARY: Any ENUM: Any AUTO_INCREMENT: Any TIMESTAMP: Any SET: Any PART_KEY: Any GROUP: Any UNIQUE: Any mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi0000644€tŠÔÚ€2›s®0000000051313576752252033365 0ustar jukkaDROPBOX\Domain Users00000000000000SERVER_STATUS_IN_TRANS: int SERVER_STATUS_AUTOCOMMIT: int SERVER_MORE_RESULTS_EXISTS: int SERVER_QUERY_NO_GOOD_INDEX_USED: int SERVER_QUERY_NO_INDEX_USED: int SERVER_STATUS_CURSOR_EXISTS: int SERVER_STATUS_LAST_ROW_SENT: int SERVER_STATUS_DB_DROPPED: int SERVER_STATUS_NO_BACKSLASH_ESCAPES: int SERVER_STATUS_METADATA_CHANGED: int mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/constants/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252033002 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/converters.pyi0000644€tŠÔÚ€2›s®0000000246213576752252031437 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .constants import FIELD_TYPE as FIELD_TYPE, FLAG as FLAG from .charset import charset_by_id as charset_by_id PYTHON3: Any ESCAPE_REGEX: Any ESCAPE_MAP: Any def escape_item(val, charset): ... def escape_dict(val, charset): ... def escape_sequence(val, charset): ... def escape_set(val, charset): ... def escape_bool(value): ... def escape_object(value): ... escape_int: Any escape_long: Any def escape_float(value): ... def escape_string(value): ... def escape_unicode(value): ... def escape_None(value): ... def escape_timedelta(obj): ... def escape_time(obj): ... def escape_datetime(obj): ... def escape_date(obj): ... def escape_struct_time(obj): ... def convert_datetime(connection, field, obj): ... def convert_timedelta(connection, field, obj): ... def convert_time(connection, field, obj): ... def convert_date(connection, field, obj): ... def convert_mysql_timestamp(connection, field, timestamp): ... def convert_set(s): ... def convert_bit(connection, field, b): ... def convert_characters(connection, field, data): ... def convert_int(connection, field, data): ... def convert_long(connection, field, data): ... def convert_float(connection, field, data): ... encoders: Any decoders: Any conversions: Any def convert_decimal(connection, field, data): ... def escape_decimal(obj): ... mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/cursors.pyi0000644€tŠÔÚ€2›s®0000000357513576752252030753 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterator, List, Optional, Text, Tuple, TypeVar, Union from .connections import Connection Gen = Union[Tuple[Any, ...], Dict[str, Any]] _SelfT = TypeVar("_SelfT") class Cursor: connection: Connection description: Tuple[Text, ...] rownumber: int rowcount: int arraysize: int messages: Any errorhandler: Any lastrowid: int def __init__(self, connection: Connection) -> None: ... def __del__(self) -> None: ... def close(self) -> None: ... def setinputsizes(self, *args): ... def setoutputsizes(self, *args): ... def nextset(self): ... def execute(self, query: str, args: Optional[Any] = ...) -> int: ... def executemany(self, query: str, args) -> int: ... def callproc(self, procname, args=...): ... def fetchone(self) -> Optional[Gen]: ... def fetchmany(self, size: Optional[int] = ...) -> Union[Optional[Gen], List[Gen]]: ... def fetchall(self) -> Optional[Tuple[Gen, ...]]: ... def scroll(self, value: int, mode: str = ...): ... def __iter__(self): ... def __enter__(self: _SelfT) -> _SelfT: ... def __exit__(self, *exc_info: Any) -> None: ... class DictCursor(Cursor): def fetchone(self) -> Optional[Dict[str, Any]]: ... def fetchmany(self, size: Optional[int] = ...) -> Optional[Tuple[Dict[str, Any], ...]]: ... def fetchall(self) -> Optional[Tuple[Dict[str, Any], ...]]: ... class DictCursorMixin: dict_type: Any class SSCursor(Cursor): # fetchall return type is incompatible with the supertype. def fetchall(self) -> List[Gen]: ... # type: ignore def fetchall_unbuffered(self) -> Iterator[Tuple[Gen, ...]]: ... def __iter__(self) -> Iterator[Tuple[Gen, ...]]: ... def fetchmany(self, size: Optional[int] = ...) -> List[Gen]: ... def scroll(self, value: int, mode: str = ...) -> None: ... class SSDictCursor(DictCursorMixin, SSCursor): ... mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/err.pyi0000644€tŠÔÚ€2›s®0000000107313576752252030032 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, NoReturn, Type from .constants import ER as ER class MySQLError(Exception): ... class Warning(MySQLError): ... class Error(MySQLError): ... class InterfaceError(Error): ... class DatabaseError(Error): ... class DataError(DatabaseError): ... class OperationalError(DatabaseError): ... class IntegrityError(DatabaseError): ... class InternalError(DatabaseError): ... class ProgrammingError(DatabaseError): ... class NotSupportedError(DatabaseError): ... error_map: Dict[int, Type[DatabaseError]] def raise_mysql_exception(data) -> NoReturn: ... mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/times.pyi0000644€tŠÔÚ€2›s®0000000025213576752252030361 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any Date: Any Time: Any TimeDelta: Any Timestamp: Any def DateFromTicks(ticks): ... def TimeFromTicks(ticks): ... def TimestampFromTicks(ticks): ... mypy-0.761/mypy/typeshed/third_party/2and3/pymysql/util.pyi0000644€tŠÔÚ€2›s®0000000010213576752252030207 0ustar jukkaDROPBOX\Domain Users00000000000000def byte2int(b): ... def int2byte(i): ... def join_bytes(bs): ... mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/0000755€tŠÔÚ€2›s®0000000000013576752267026617 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/__init__.pyi0000644€tŠÔÚ€2›s®0000000002113576752252031064 0ustar jukkaDROPBOX\Domain Users00000000000000__license__: str mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/attributes.pyi0000644€tŠÔÚ€2›s®0000001014213576752252031520 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Generic, Iterable, List, Mapping, Optional, Text, Type, TypeVar, Union, Set from datetime import datetime _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') _MT = TypeVar('_MT', bound=MapAttribute[Any, Any]) class Attribute(Generic[_T]): attr_name: Optional[Text] attr_type: Text null: bool default: Any is_hash_key: bool is_range_key: bool def __init__(self, hash_key: bool = ..., range_key: bool = ..., null: Optional[bool] = ..., default: Optional[Union[_T, Callable[..., _T]]] = ..., attr_name: Optional[Text] = ...) -> None: ... def __set__(self, instance: Any, value: Optional[_T]) -> None: ... def serialize(self, value: Any) -> Any: ... def deserialize(self, value: Any) -> Any: ... def get_value(self, value: Any) -> Any: ... def between(self, lower: Any, upper: Any) -> Any: ... def is_in(self, *values: Any) -> Any: ... def exists(self) -> Any: ... def does_not_exist(self) -> Any: ... def is_type(self) -> Any: ... def startswith(self, prefix: str) -> Any: ... def contains(self, item: Any) -> Any: ... def append(self, other: Any) -> Any: ... def prepend(self, other: Any) -> Any: ... def set(self, value: Any) -> Any: ... def remove(self) -> Any: ... def add(self, *values: Any) -> Any: ... def delete(self, *values: Any) -> Any: ... class SetMixin(object): def serialize(self, value): ... def deserialize(self, value): ... class BinaryAttribute(Attribute[bytes]): def __get__(self, instance: Any, owner: Any) -> bytes: ... class BinarySetAttribute(SetMixin, Attribute[Set[bytes]]): def __get__(self, instance: Any, owner: Any) -> Set[bytes]: ... class UnicodeSetAttribute(SetMixin, Attribute[Set[Text]]): def element_serialize(self, value: Any) -> Any: ... def element_deserialize(self, value: Any) -> Any: ... def __get__(self, instance: Any, owner: Any) -> Set[Text]: ... class UnicodeAttribute(Attribute[Text]): def __get__(self, instance: Any, owner: Any) -> Text: ... class JSONAttribute(Attribute[Any]): def __get__(self, instance: Any, owner: Any) -> Any: ... class LegacyBooleanAttribute(Attribute[bool]): def __get__(self, instance: Any, owner: Any) -> bool: ... class BooleanAttribute(Attribute[bool]): def __get__(self, instance: Any, owner: Any) -> bool: ... class NumberSetAttribute(SetMixin, Attribute[Set[float]]): def __get__(self, instance: Any, owner: Any) -> Set[float]: ... class NumberAttribute(Attribute[float]): def __get__(self, instance: Any, owner: Any) -> float: ... class UTCDateTimeAttribute(Attribute[datetime]): def __get__(self, instance: Any, owner: Any) -> datetime: ... class NullAttribute(Attribute[None]): def __get__(self, instance: Any, owner: Any) -> None: ... class MapAttributeMeta(type): def __init__(self, name, bases, attrs) -> None: ... class MapAttribute(Generic[_KT, _VT], Attribute[Mapping[_KT, _VT]], metaclass=MapAttributeMeta): attribute_values: Any def __init__(self, hash_key: bool = ..., range_key: bool = ..., null: Optional[bool] = ..., default: Optional[Union[Any, Callable[..., Any]]] = ..., attr_name: Optional[Text] = ..., **attrs) -> None: ... def __iter__(self) -> Iterable[_VT]: ... def __getattr__(self, attr: str) -> _VT: ... def __getitem__(self, item: _KT) -> _VT: ... def __set__(self, instance: Any, value: Union[None, MapAttribute[_KT, _VT], Mapping[_KT, _VT]]) -> None: ... def __get__(self: _MT, instance: Any, owner: Any) -> _MT: ... def is_type_safe(self, key: Any, value: Any) -> bool: ... def validate(self) -> bool: ... class ListAttribute(Generic[_T], Attribute[List[_T]]): element_type: Any def __init__(self, hash_key: bool = ..., range_key: bool = ..., null: Optional[bool] = ..., default: Optional[Union[Any, Callable[..., Any]]] = ..., attr_name: Optional[Text] = ..., of: Optional[Type[_T]] = ...) -> None: ... def __get__(self, instance: Any, owner: Any) -> List[_T]: ... DESERIALIZE_CLASS_MAP: Dict[Text, Attribute[Any]] SERIALIZE_CLASS_MAP: Dict[Type[Any], Attribute[Any]] SERIALIZE_KEY_MAP: Dict[Type[Any], Text] mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/connection/0000755€tŠÔÚ€2›s®0000000000013576752267030756 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/connection/__init__.pyi0000644€tŠÔÚ€2›s®0000000020713576752252033231 0ustar jukkaDROPBOX\Domain Users00000000000000from pynamodb.connection.base import Connection as Connection from pynamodb.connection.table import TableConnection as TableConnection mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/connection/base.pyi0000644€tŠÔÚ€2›s®0000001307013576752252032406 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Optional, Text BOTOCORE_EXCEPTIONS: Any log: Any class MetaTable: data: Dict[Any, Any] def __init__(self, data: Dict[Any, Any]) -> None: ... @property def range_keyname(self) -> Optional[Text]: ... @property def hash_keyname(self) -> Text: ... def get_index_hash_keyname(self, index_name: Text) -> Optional[Text]: ... def get_item_attribute_map(self, attributes, item_key: Any = ..., pythonic_key: bool = ...): ... def get_attribute_type(self, attribute_name, value: Optional[Any] = ...): ... def get_identifier_map(self, hash_key, range_key: Optional[Any] = ..., key: Any = ...): ... def get_exclusive_start_key_map(self, exclusive_start_key): ... class Connection: host: Any region: Any session_cls: Any def __init__(self, region: Optional[Any] = ..., host: Optional[Any] = ..., session_cls: Optional[Any] = ..., request_timeout_seconds: Optional[Any] = ..., max_retry_attempts: Optional[Any] = ..., base_backoff_ms: Optional[Any] = ...) -> None: ... def dispatch(self, operation_name, operation_kwargs): ... @property def session(self): ... @property def requests_session(self): ... @property def client(self): ... def get_meta_table(self, table_name: Text, refresh: bool = ...): ... def create_table(self, table_name: Text, attribute_definitions: Optional[Any] = ..., key_schema: Optional[Any] = ..., read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_indexes: Optional[Any] = ..., local_secondary_indexes: Optional[Any] = ..., stream_specification: Optional[Any] = ...): ... def delete_table(self, table_name: Text): ... def update_table(self, table_name: Text, read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_index_updates: Optional[Any] = ...): ... def list_tables(self, exclusive_start_table_name: Optional[Any] = ..., limit: Optional[Any] = ...): ... def describe_table(self, table_name: Text): ... def get_conditional_operator(self, operator): ... def get_item_attribute_map(self, table_name: Text, attributes, item_key: Any = ..., pythonic_key: bool = ...): ... def get_expected_map(self, table_name: Text, expected): ... def parse_attribute(self, attribute, return_type: bool = ...): ... def get_attribute_type(self, table_name: Text, attribute_name, value: Optional[Any] = ...): ... def get_identifier_map(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., key: Any = ...): ... def get_query_filter_map(self, table_name: Text, query_filters): ... def get_consumed_capacity_map(self, return_consumed_capacity): ... def get_return_values_map(self, return_values): ... def get_item_collection_map(self, return_item_collection_metrics): ... def get_exclusive_start_key_map(self, table_name: Text, exclusive_start_key): ... def delete_item(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def update_item(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., attribute_updates: Optional[Any] = ..., expected: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ..., return_values: Optional[Any] = ...): ... def put_item(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., attributes: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def batch_write_item(self, table_name: Text, put_items: Optional[Any] = ..., delete_items: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def batch_get_item(self, table_name: Text, keys, consistent_read: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., attributes_to_get: Optional[Any] = ...): ... def get_item(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., consistent_read: bool = ..., attributes_to_get: Optional[Any] = ...): ... def scan(self, table_name: Text, attributes_to_get: Optional[Any] = ..., limit: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., scan_filter: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., exclusive_start_key: Optional[Any] = ..., segment: Optional[Any] = ..., total_segments: Optional[Any] = ...): ... def query(self, table_name: Text, hash_key, attributes_to_get: Optional[Any] = ..., consistent_read: bool = ..., exclusive_start_key: Optional[Any] = ..., index_name: Optional[Any] = ..., key_conditions: Optional[Any] = ..., query_filters: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., limit: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., scan_index_forward: Optional[Any] = ..., select: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/connection/table.pyi0000644€tŠÔÚ€2›s®0000000605513576752252032570 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class TableConnection: table_name: Any connection: Any def __init__(self, table_name, region: Optional[Any] = ..., host: Optional[Any] = ..., session_cls: Optional[Any] = ..., request_timeout_seconds: Optional[Any] = ..., max_retry_attempts: Optional[Any] = ..., base_backoff_ms: Optional[Any] = ...) -> None: ... def delete_item(self, hash_key, range_key: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def update_item(self, hash_key, range_key: Optional[Any] = ..., attribute_updates: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ..., return_values: Optional[Any] = ...): ... def put_item(self, hash_key, range_key: Optional[Any] = ..., attributes: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def batch_write_item(self, put_items: Optional[Any] = ..., delete_items: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def batch_get_item(self, keys, consistent_read: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., attributes_to_get: Optional[Any] = ...): ... def get_item(self, hash_key, range_key: Optional[Any] = ..., consistent_read: bool = ..., attributes_to_get: Optional[Any] = ...): ... def scan(self, attributes_to_get: Optional[Any] = ..., limit: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., scan_filter: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., segment: Optional[Any] = ..., total_segments: Optional[Any] = ..., exclusive_start_key: Optional[Any] = ...): ... def query(self, hash_key, attributes_to_get: Optional[Any] = ..., consistent_read: bool = ..., exclusive_start_key: Optional[Any] = ..., index_name: Optional[Any] = ..., key_conditions: Optional[Any] = ..., query_filters: Optional[Any] = ..., limit: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., scan_index_forward: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., select: Optional[Any] = ...): ... def describe_table(self): ... def delete_table(self): ... def update_table(self, read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_index_updates: Optional[Any] = ...): ... def create_table(self, attribute_definitions: Optional[Any] = ..., key_schema: Optional[Any] = ..., read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_indexes: Optional[Any] = ..., local_secondary_indexes: Optional[Any] = ..., stream_specification: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/connection/util.pyi0000644€tŠÔÚ€2›s®0000000010313576752252032442 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Text def pythonic(var_name: Text) -> Text: ... mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/constants.pyi0000644€tŠÔÚ€2›s®0000000573613576752252031363 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any BATCH_WRITE_ITEM: str DESCRIBE_TABLE: str BATCH_GET_ITEM: str CREATE_TABLE: str UPDATE_TABLE: str DELETE_TABLE: str LIST_TABLES: str UPDATE_ITEM: str DELETE_ITEM: str GET_ITEM: str PUT_ITEM: str QUERY: str SCAN: str GLOBAL_SECONDARY_INDEX_UPDATES: str RETURN_ITEM_COLL_METRICS: str EXCLUSIVE_START_TABLE_NAME: str RETURN_CONSUMED_CAPACITY: str COMPARISON_OPERATOR: str SCAN_INDEX_FORWARD: str ATTR_DEFINITIONS: str ATTR_VALUE_LIST: str TABLE_DESCRIPTION: str UNPROCESSED_KEYS: str UNPROCESSED_ITEMS: str CONSISTENT_READ: str DELETE_REQUEST: str RETURN_VALUES: str REQUEST_ITEMS: str ATTRS_TO_GET: str ATTR_UPDATES: str TABLE_STATUS: str SCAN_FILTER: str TABLE_NAME: str KEY_SCHEMA: str ATTR_NAME: str ATTR_TYPE: str ITEM_COUNT: str CAMEL_COUNT: str PUT_REQUEST: str INDEX_NAME: str ATTRIBUTES: str TABLE_KEY: str RESPONSES: str RANGE_KEY: str KEY_TYPE: str ACTION: str UPDATE: str EXISTS: str SELECT: str ACTIVE: str LIMIT: str ITEMS: str ITEM: str KEYS: str UTC: str KEY: str DEFAULT_ENCODING: str DEFAULT_REGION: str DATETIME_FORMAT: str SERVICE_NAME: str HTTP_OK: int HTTP_BAD_REQUEST: int PROVISIONED_THROUGHPUT: str READ_CAPACITY_UNITS: str WRITE_CAPACITY_UNITS: str STRING_SHORT: str STRING_SET_SHORT: str NUMBER_SHORT: str NUMBER_SET_SHORT: str BINARY_SHORT: str BINARY_SET_SHORT: str MAP_SHORT: str LIST_SHORT: str BOOLEAN: str BOOLEAN_SHORT: str STRING: str STRING_SET: str NUMBER: str NUMBER_SET: str BINARY: str BINARY_SET: str MAP: str LIST: str SHORT_ATTR_TYPES: Any ATTR_TYPE_MAP: Any LOCAL_SECONDARY_INDEX: str LOCAL_SECONDARY_INDEXES: str GLOBAL_SECONDARY_INDEX: str GLOBAL_SECONDARY_INDEXES: str PROJECTION: str PROJECTION_TYPE: str NON_KEY_ATTRIBUTES: str KEYS_ONLY: str ALL: str INCLUDE: str STREAM_VIEW_TYPE: str STREAM_SPECIFICATION: str STREAM_ENABLED: str STREAM_NEW_IMAGE: str STREAM_OLD_IMAGE: str STREAM_NEW_AND_OLD_IMAGE: str STREAM_KEYS_ONLY: str EXCLUSIVE_START_KEY: str LAST_EVALUATED_KEY: str QUERY_FILTER: str BEGINS_WITH: str BETWEEN: str EQ: str NE: str LE: str LT: str GE: str GT: str IN: str KEY_CONDITIONS: str COMPARISON_OPERATOR_VALUES: Any QUERY_OPERATOR_MAP: Any NOT_NULL: str NULL: str CONTAINS: str NOT_CONTAINS: str ALL_ATTRIBUTES: str ALL_PROJECTED_ATTRIBUTES: str SPECIFIC_ATTRIBUTES: str COUNT: str SELECT_VALUES: Any SCAN_OPERATOR_MAP: Any QUERY_FILTER_OPERATOR_MAP: Any DELETE_FILTER_OPERATOR_MAP: Any UPDATE_FILTER_OPERATOR_MAP: Any PUT_FILTER_OPERATOR_MAP: Any SEGMENT: str TOTAL_SEGMENTS: str SCAN_FILTER_VALUES: Any QUERY_FILTER_VALUES: Any DELETE_FILTER_VALUES: Any VALUE: str EXPECTED: str CONSUMED_CAPACITY: str CAPACITY_UNITS: str INDEXES: str TOTAL: str NONE: str RETURN_CONSUMED_CAPACITY_VALUES: Any SIZE: str RETURN_ITEM_COLL_METRICS_VALUES: Any ALL_OLD: str UPDATED_OLD: str ALL_NEW: str UPDATED_NEW: str RETURN_VALUES_VALUES: Any PUT: str DELETE: str ADD: str ATTR_UPDATE_ACTIONS: Any BATCH_GET_PAGE_LIMIT: int BATCH_WRITE_PAGE_LIMIT: int META_CLASS_NAME: str REGION: str HOST: str CONDITIONAL_OPERATOR: str AND: str OR: str CONDITIONAL_OPERATORS: Any mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/exceptions.pyi0000644€tŠÔÚ€2›s®0000000156713576752252031526 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text class PynamoDBException(Exception): msg: str cause: Any def __init__(self, msg: Optional[Text] = ..., cause: Optional[Exception] = ...) -> None: ... class PynamoDBConnectionError(PynamoDBException): ... class DeleteError(PynamoDBConnectionError): ... class QueryError(PynamoDBConnectionError): ... class ScanError(PynamoDBConnectionError): ... class PutError(PynamoDBConnectionError): ... class UpdateError(PynamoDBConnectionError): ... class GetError(PynamoDBConnectionError): ... class TableError(PynamoDBConnectionError): ... class DoesNotExist(PynamoDBException): ... class TableDoesNotExist(PynamoDBException): def __init__(self, table_name) -> None: ... class VerboseClientError(Exception): MSG_TEMPLATE: Any def __init__(self, error_response, operation_name, verbose_properties: Optional[Any] = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/indexes.pyi0000644€tŠÔÚ€2›s®0000000172613576752252031001 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class IndexMeta(type): def __init__(self, name, bases, attrs) -> None: ... class Index(metaclass=IndexMeta): Meta: Any def __init__(self) -> None: ... @classmethod def count(cls, hash_key, consistent_read: bool = ..., **filters) -> int: ... @classmethod def query(self, hash_key, scan_index_forward: Optional[Any] = ..., consistent_read: bool = ..., limit: Optional[Any] = ..., last_evaluated_key: Optional[Any] = ..., attributes_to_get: Optional[Any] = ..., **filters): ... class GlobalSecondaryIndex(Index): ... class LocalSecondaryIndex(Index): ... class Projection(object): projection_type: Any non_key_attributes: Any class KeysOnlyProjection(Projection): projection_type: Any class IncludeProjection(Projection): projection_type: Any non_key_attributes: Any def __init__(self, non_attr_keys: Optional[Any] = ...) -> None: ... class AllProjection(Projection): projection_type: Any mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/models.pyi0000644€tŠÔÚ€2›s®0000001230313576752252030616 0ustar jukkaDROPBOX\Domain Users00000000000000from .attributes import Attribute from .exceptions import DoesNotExist as DoesNotExist from typing import Any, Dict, Generic, Iterable, Iterator, List, Optional, Sequence, Tuple, Type, TypeVar, Text, Union log: Any class DefaultMeta: ... class ResultSet(object): results: Any operation: Any arguments: Any def __init__(self, results, operation, arguments) -> None: ... def __iter__(self): ... class MetaModel(type): def __init__(self, name: Text, bases: Tuple[type, ...], attrs: Dict[Any, Any]) -> None: ... _T = TypeVar('_T', bound='Model') KeyType = Union[Text, bytes, float, int, Tuple[Any, ...]] class Model(metaclass=MetaModel): DoesNotExist = DoesNotExist attribute_values: Dict[Text, Any] def __init__(self, hash_key: Optional[KeyType] = ..., range_key: Optional[Any] = ..., **attrs) -> None: ... @classmethod def has_map_or_list_attributes(cls: Type[_T]) -> bool: ... @classmethod def batch_get(cls: Type[_T], items: Iterable[Union[KeyType, Iterable[KeyType]]], consistent_read: Optional[bool] = ..., attributes_to_get: Optional[Sequence[Text]] = ...) -> Iterator[_T]: ... @classmethod def batch_write(cls: Type[_T], auto_commit: bool = ...) -> BatchWrite[_T]: ... def delete(self, condition: Optional[Any] = ..., conditional_operator: Optional[Text] = ..., **expected_values) -> Any: ... def update(self, attributes: Optional[Dict[Text, Dict[Text, Any]]] = ..., actions: Optional[List[Any]] = ..., condition: Optional[Any] = ..., conditional_operator: Optional[Text] = ..., **expected_values) -> Any: ... def update_item(self, attribute: Text, value: Optional[Any] = ..., action: Optional[Text] = ..., conditional_operator: Optional[Text] = ..., **expected_values): ... def save(self, condition: Optional[Any] = ..., conditional_operator: Optional[Text] = ..., **expected_values) -> Dict[str, Any]: ... def refresh(self, consistent_read: bool = ...): ... @classmethod def get(cls: Type[_T], hash_key: KeyType, range_key: Optional[KeyType] = ..., consistent_read: bool = ...) -> _T: ... @classmethod def from_raw_data(cls: Type[_T], data) -> _T: ... @classmethod def count(cls: Type[_T], hash_key: Optional[KeyType] = ..., consistent_read: bool = ..., index_name: Optional[Text] = ..., limit: Optional[int] = ..., **filters) -> int: ... @classmethod def query(cls: Type[_T], hash_key: KeyType, consistent_read: bool = ..., index_name: Optional[Text] = ..., scan_index_forward: Optional[Any] = ..., conditional_operator: Optional[Text] = ..., limit: Optional[int] = ..., last_evaluated_key: Optional[Any] = ..., attributes_to_get: Optional[Iterable[Text]] = ..., page_size: Optional[int] = ..., **filters) -> Iterator[_T]: ... @classmethod def rate_limited_scan( cls: Type[_T], # TODO: annotate Condition class filter_condition: Optional[Any] = ..., attributes_to_get: Optional[Sequence[Text]] = ..., segment: Optional[int] = ..., total_segments: Optional[int] = ..., limit: Optional[int] = ..., conditional_operator: Optional[Text] = ..., last_evaluated_key: Optional[Any] = ..., page_size: Optional[int] = ..., timeout_seconds: Optional[int] = ..., read_capacity_to_consume_per_second: int = ..., allow_rate_limited_scan_without_consumed_capacity: Optional[bool] = ..., max_sleep_between_retry: int = ..., max_consecutive_exceptions: int = ..., consistent_read: Optional[bool] = ..., index_name: Optional[str] = ..., **filters: Any ) -> Iterator[_T]: ... @classmethod def scan(cls: Type[_T], segment: Optional[int] = ..., total_segments: Optional[int] = ..., limit: Optional[int] = ..., conditional_operator: Optional[Text] = ..., last_evaluated_key: Optional[Any] = ..., page_size: Optional[int] = ..., **filters) -> Iterator[_T]: ... @classmethod def exists(cls: Type[_T]) -> bool: ... @classmethod def delete_table(cls): ... @classmethod def describe_table(cls): ... @classmethod def create_table(cls: Type[_T], wait: bool = ..., read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ...): ... @classmethod def dumps(cls): ... @classmethod def dump(cls, filename): ... @classmethod def loads(cls, data): ... @classmethod def load(cls, filename): ... @classmethod def add_throttle_record(cls, records): ... @classmethod def get_throttle(cls): ... @classmethod def get_attributes(cls) -> Dict[str, Attribute[Any]]: ... @classmethod def _get_attributes(cls) -> Dict[str, Attribute[Any]]: ... class ModelContextManager(Generic[_T]): model: Type[_T] auto_commit: bool max_operations: int pending_operations: List[Dict[Text, Any]] def __init__(self, model: Type[_T], auto_commit: bool = ...) -> None: ... def __enter__(self) -> ModelContextManager[_T]: ... class BatchWrite(Generic[_T], ModelContextManager[_T]): def save(self, put_item: _T) -> None: ... def delete(self, del_item: _T) -> None: ... def __enter__(self) -> BatchWrite[_T]: ... def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... pending_operations: Any def commit(self) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/settings.pyi0000644€tŠÔÚ€2›s®0000000022113576752252031167 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any log: Any default_settings_dict: Any OVERRIDE_SETTINGS_PATH: Any override_settings: Any def get_settings_value(key): ... mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/throttle.pyi0000644€tŠÔÚ€2›s®0000000073013576752252031201 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional log: Any class ThrottleBase: capacity: Any window: Any records: Any sleep_interval: Any def __init__(self, capacity, window: int = ..., initial_sleep: Optional[Any] = ...) -> None: ... def add_record(self, record): ... def throttle(self): ... class NoThrottle(ThrottleBase): def __init__(self) -> None: ... def add_record(self, record): ... class Throttle(ThrottleBase): def throttle(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/pynamodb/types.pyi0000644€tŠÔÚ€2›s®0000000007113576752252030476 0ustar jukkaDROPBOX\Domain Users00000000000000STRING: str NUMBER: str BINARY: str HASH: str RANGE: str mypy-0.761/mypy/typeshed/third_party/2and3/pyre_extensions.pyi0000644€tŠÔÚ€2›s®0000000041313576752252030757 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional, Type, TypeVar _T = TypeVar("_T") def none_throws(optional: Optional[_T], message: str = ...) -> _T: ... def safe_cast(new_type: Type[_T], value: Any) -> _T: ... def ParameterSpecification(__name: str) -> List[Type[Any]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/pytz/0000755€tŠÔÚ€2›s®0000000000013576752267026014 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/pytz/__init__.pyi0000644€tŠÔÚ€2›s®0000000354313576752252030275 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, List, Set, Mapping, Union import datetime class BaseTzInfo(datetime.tzinfo): zone: str = ... def localize(self, dt: datetime.datetime, is_dst: Optional[bool] = ...) -> datetime.datetime: ... def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... class _UTCclass(BaseTzInfo): def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> datetime.timedelta: ... def dst(self, dt: Optional[datetime.datetime]) -> datetime.timedelta: ... class _StaticTzInfo(BaseTzInfo): def tzname(self, dt: Optional[datetime.datetime], is_dst: Optional[bool] = ...) -> str: ... def utcoffset(self, dt: Optional[datetime.datetime], is_dst: Optional[bool] = ...) -> datetime.timedelta: ... def dst(self, dt: Optional[datetime.datetime], is_dst: Optional[bool] = ...) -> datetime.timedelta: ... class _DstTzInfo(BaseTzInfo): def tzname(self, dt: Optional[datetime.datetime], is_dst: Optional[bool] = ...) -> str: ... def utcoffset(self, dt: Optional[datetime.datetime], is_dst: Optional[bool] = ...) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime], is_dst: Optional[bool] = ...) -> Optional[datetime.timedelta]: ... class UnknownTimeZoneError(KeyError): ... class InvalidTimeError(Exception): ... class AmbiguousTimeError(InvalidTimeError): ... class NonExistentTimeError(InvalidTimeError): ... utc: _UTCclass UTC: _UTCclass def timezone(zone: str) -> Union[_UTCclass, _StaticTzInfo, _DstTzInfo]: ... def FixedOffset(offset: int) -> Union[_UTCclass, datetime.tzinfo]: ... all_timezones: List[str] all_timezones_set: Set[str] common_timezones: List[str] common_timezones_set: Set[str] country_timezones: Mapping[str, List[str]] country_names: Mapping[str, str] ZERO: datetime.timedelta HOUR: datetime.timedelta VERSION: str mypy-0.761/mypy/typeshed/third_party/2and3/redis/0000755€tŠÔÚ€2›s®0000000000013576752267026114 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/redis/__init__.pyi0000644€tŠÔÚ€2›s®0000000154413576752252030374 0ustar jukkaDROPBOX\Domain Users00000000000000from . import client from . import connection from . import utils from . import exceptions Redis = client.Redis StrictRedis = client.StrictRedis BlockingConnectionPool = connection.BlockingConnectionPool ConnectionPool = connection.ConnectionPool Connection = connection.Connection SSLConnection = connection.SSLConnection UnixDomainSocketConnection = connection.UnixDomainSocketConnection from_url = utils.from_url AuthenticationError = exceptions.AuthenticationError BusyLoadingError = exceptions.BusyLoadingError ConnectionError = exceptions.ConnectionError DataError = exceptions.DataError InvalidResponse = exceptions.InvalidResponse PubSubError = exceptions.PubSubError ReadOnlyError = exceptions.ReadOnlyError RedisError = exceptions.RedisError ResponseError = exceptions.ResponseError TimeoutError = exceptions.TimeoutError WatchError = exceptions.WatchError mypy-0.761/mypy/typeshed/third_party/2and3/redis/client.pyi0000644€tŠÔÚ€2›s®0000003555313576752252030122 0ustar jukkaDROPBOX\Domain Users00000000000000from datetime import timedelta from typing import Any, Iterable, Text, Optional, Mapping, Tuple, Union, Callable, List, Dict from .connection import ConnectionPool SYM_EMPTY: Any def list_or_args(keys, args): ... def timestamp_to_datetime(response): ... def string_keys_to_dict(key_string, callback): ... def dict_merge(*dicts): ... def parse_debug_object(response): ... def parse_object(response, infotype): ... def parse_info(response): ... SENTINEL_STATE_TYPES: Any def parse_sentinel_state(item): ... def parse_sentinel_master(response): ... def parse_sentinel_masters(response): ... def parse_sentinel_slaves_and_sentinels(response): ... def parse_sentinel_get_master(response): ... def pairs_to_dict(response): ... def pairs_to_dict_typed(response, type_info): ... def zset_score_pairs(response, **options): ... def sort_return_tuples(response, **options): ... def int_or_none(response): ... def float_or_none(response): ... def bool_ok(response): ... def parse_client_list(response, **options): ... def parse_config_get(response, **options): ... def parse_scan(response, **options): ... def parse_hscan(response, **options): ... def parse_zscan(response, **options): ... def parse_slowlog_get(response, **options): ... _Str = Union[bytes, float, int, Text] class Redis(object): RESPONSE_CALLBACKS: Any @classmethod def from_url(cls, url: Text, db: Optional[int] = ..., **kwargs) -> Redis: ... connection_pool: Any response_callbacks: Any def __init__( self, host: Text = ..., port: int = ..., db: int = ..., password: Optional[Text] = ..., socket_timeout: Optional[float] = ..., socket_connect_timeout: Optional[float] = ..., socket_keepalive: Optional[bool] = ..., socket_keepalive_options: Optional[Mapping[str, Union[int, str]]] = ..., connection_pool: Optional[ConnectionPool] = ..., unix_socket_path: Optional[Text] = ..., encoding: Text = ..., encoding_errors: Text = ..., charset: Optional[Text] = ..., errors: Optional[Text] = ..., decode_responses: bool = ..., retry_on_timeout: bool = ..., ssl: bool = ..., ssl_keyfile: Optional[Text] = ..., ssl_certfile: Optional[Text] = ..., ssl_cert_reqs: Optional[Union[str, int]] = ..., ssl_ca_certs: Optional[Text] = ..., max_connections: Optional[int] = ..., ) -> None: ... def set_response_callback(self, command, callback): ... def pipeline(self, transaction=..., shard_hint=...): ... def transaction(self, func, *watches, **kwargs): ... def lock(self, name, timeout=..., sleep=..., blocking_timeout=..., lock_class=..., thread_local=...): ... def pubsub(self, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ... def execute_command(self, *args, **options): ... def parse_response(self, connection, command_name, **options): ... def bgrewriteaof(self): ... def bgsave(self): ... def client_kill(self, address): ... def client_list(self): ... def client_getname(self): ... def client_setname(self, name): ... def config_get(self, pattern=...): ... def config_set(self, name, value): ... def config_resetstat(self): ... def config_rewrite(self): ... def dbsize(self) -> int: ... def debug_object(self, key): ... def echo(self, value): ... def flushall(self) -> bool: ... def flushdb(self) -> bool: ... def info(self, section: Optional[Text] = ...) -> Mapping[str, Any]: ... def lastsave(self): ... def object(self, infotype, key): ... def ping(self) -> bool: ... def save(self) -> bool: ... def sentinel(self, *args): ... def sentinel_get_master_addr_by_name(self, service_name): ... def sentinel_master(self, service_name): ... def sentinel_masters(self): ... def sentinel_monitor(self, name, ip, port, quorum): ... def sentinel_remove(self, name): ... def sentinel_sentinels(self, service_name): ... def sentinel_set(self, name, option, value): ... def sentinel_slaves(self, service_name): ... def shutdown(self): ... def slaveof(self, host=..., port=...): ... def slowlog_get(self, num=...): ... def slowlog_len(self): ... def slowlog_reset(self): ... def time(self): ... def append(self, key, value): ... def bitcount(self, key, start=..., end=...): ... def bitop(self, operation, dest, *keys): ... def bitpos(self, key, bit, start=..., end=...): ... def decr(self, name, amount=...): ... def delete(self, *names: Text): ... def __delitem__(self, name): ... def dump(self, name): ... def exists(self, *names: Text) -> int: ... __contains__: Any def expire(self, name: Union[Text, bytes], time: Union[int, timedelta]) -> bool: ... def expireat(self, name, when): ... def get(self, name: Union[Text, bytes]) -> Optional[bytes]: ... def __getitem__(self, name): ... def getbit(self, name, offset): ... def getrange(self, key, start, end): ... def getset(self, name, value): ... def incr(self, name, amount=...): ... def incrby(self, name, amount=...): ... def incrbyfloat(self, name, amount=...): ... def keys(self, pattern=...): ... def mget(self, keys, *args): ... def mset(self, *args, **kwargs): ... def msetnx(self, *args, **kwargs): ... def move(self, name, db): ... def persist(self, name): ... def pexpire(self, name, time): ... def pexpireat(self, name, when): ... def psetex(self, name, time_ms, value): ... def pttl(self, name): ... def randomkey(self): ... def rename(self, src, dst): ... def renamenx(self, src, dst): ... def restore(self, name, ttl, value): ... def set( self, name: Union[Text, bytes], value: _Str, ex: Union[None, int, timedelta] = ..., px: Union[None, int, timedelta] = ..., nx: bool = ..., xx: bool = ..., ) -> Optional[bool]: ... def __setitem__(self, name, value): ... def setbit(self, name, offset, value): ... def setex(self, name, time, value): ... def setnx(self, name, value): ... def setrange(self, name, offset, value): ... def strlen(self, name): ... def substr(self, name, start, end=...): ... def ttl(self, name): ... def type(self, name): ... def watch(self, *names): ... def unwatch(self): ... def blpop(self, keys: Union[_Str, Iterable[_Str]], timeout: int = ...) -> Optional[Tuple[bytes, bytes]]: ... def brpop(self, keys: Union[_Str, Iterable[_Str]], timeout: int = ...) -> Optional[Tuple[bytes, bytes]]: ... def brpoplpush(self, src, dst, timeout=...): ... def lindex(self, name, index): ... def linsert(self, name, where, refvalue, value): ... def llen(self, name): ... def lpop(self, name): ... def lpush(self, name: _Str, *values: _Str) -> int: ... def lpushx(self, name, value): ... def lrange(self, name, start, end): ... def lrem(self, name, count, value): ... def lset(self, name, index, value): ... def ltrim(self, name, start, end): ... def rpop(self, name): ... def rpoplpush(self, src, dst): ... def rpush(self, name: _Str, *values: _Str) -> int: ... def rpushx(self, name, value): ... def sort(self, name, start=..., num=..., by=..., get=..., desc=..., alpha=..., store=..., groups=...): ... def scan(self, cursor: int = ..., match: Optional[Text] = ..., count: Optional[int] = ...) -> List[Text]: ... def scan_iter(self, match: Optional[Text] = ..., count: Optional[int] = ...) -> List[Text]: ... def sscan(self, name, cursor=..., match=..., count=...): ... def sscan_iter(self, name, match=..., count=...): ... def hscan(self, name, cursor=..., match=..., count=...): ... def hscan_iter(self, name, match=..., count=...): ... def zscan(self, name, cursor=..., match=..., count=..., score_cast_func=...): ... def zscan_iter(self, name, match=..., count=..., score_cast_func=...): ... def sadd(self, name, *values): ... def scard(self, name): ... def sdiff(self, keys, *args): ... def sdiffstore(self, dest, keys, *args): ... def sinter(self, keys, *args): ... def sinterstore(self, dest, keys, *args): ... def sismember(self, name, value): ... def smembers(self, name): ... def smove(self, src, dst, value): ... def spop(self, name): ... def srandmember(self, name, number=...): ... def srem(self, name, *values): ... def sunion(self, keys, *args): ... def sunionstore(self, dest, keys, *args): ... def xack(self, name, groupname, *ids): ... def xadd(self, name, fields, id=..., maxlen=..., approximate=...): ... def xclaim( self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=..., ): ... def xdel(self, name, *ids): ... def xgroup_create(self, name, groupname, id=..., mkstream=...): ... def xgroup_delconsumer(self, name, groupname, consumername): ... def xgroup_destroy(self, name, groupname): ... def xgroup_setid(self, name, groupname, id): ... def xinfo_consumers(self, name, groupname): ... def xinfo_groups(self, name): ... def xinfo_stream(self, name): ... def xlen(self, name): ... def xpending(self, name, groupname): ... def xpending_range(self, name, groupname, min, max, count, consumername=...): ... def xrange(self, name, min=..., max=..., count=...): ... def xread(self, streams, count=..., block=...): ... def xreadgroup(self, groupname, consumername, streams, count=..., block=..., noack=...): ... def xrevrange(self, name, max=..., min=..., count=...): ... def xtrim(self, name, maxlen, approximate=...): ... def zadd(self, name, mapping, nx: bool = ..., xx: bool = ..., ch: bool = ..., incr: bool = ...): ... def zcard(self, name): ... def zcount(self, name, min, max): ... def zincrby(self, name, value, amount=...): ... def zinterstore(self, dest, keys, aggregate=...): ... def zlexcount(self, name, min, max): ... def zrange(self, name, start, end, desc=..., withscores=..., score_cast_func=...): ... def zrangebylex(self, name, min, max, start=..., num=...): ... def zrangebyscore(self, name, min, max, start=..., num=..., withscores=..., score_cast_func=...): ... def zrank(self, name, value): ... def zrem(self, name, *values): ... def zremrangebylex(self, name, min, max): ... def zremrangebyrank(self, name, min, max): ... def zremrangebyscore(self, name, min, max): ... def zrevrange(self, name, start, end, withscores=..., score_cast_func=...): ... def zrevrangebyscore(self, name, max, min, start=..., num=..., withscores=..., score_cast_func=...): ... def zrevrank(self, name, value): ... def zscore(self, name, value): ... def zunionstore(self, dest, keys, aggregate=...): ... def pfadd(self, name, *values): ... def pfcount(self, name): ... def pfmerge(self, dest, *sources): ... def hdel(self, name, *keys): ... def hexists(self, name, key): ... def hget(self, name, key): ... def hgetall(self, name): ... def hincrby(self, name, key, amount=...): ... def hincrbyfloat(self, name, key, amount=...): ... def hkeys(self, name): ... def hlen(self, name): ... def hset(self, name, key, value): ... def hsetnx(self, name, key, value): ... def hmset(self, name, mapping): ... def hmget(self, name, keys, *args): ... def hvals(self, name): ... def publish(self, channel: Text, message: _Str) -> int: ... def eval(self, script, numkeys, *keys_and_args): ... def evalsha(self, sha, numkeys, *keys_and_args): ... def script_exists(self, *args): ... def script_flush(self): ... def script_kill(self): ... def script_load(self, script): ... def register_script(self, script): ... def pubsub_channels(self, pattern: Text = ...) -> List[Text]: ... def pubsub_numsub(self, *args: Text) -> List[Tuple[Text, int]]: ... def pubsub_numpat(self) -> int: ... def monitor(self) -> Monitor: ... StrictRedis = Redis class PubSub: PUBLISH_MESSAGE_TYPES: Any UNSUBSCRIBE_MESSAGE_TYPES: Any connection_pool: Any shard_hint: Any ignore_subscribe_messages: Any connection: Any encoding: Any encoding_errors: Any decode_responses: Any def __init__(self, connection_pool, shard_hint=..., ignore_subscribe_messages=...) -> None: ... def __del__(self): ... channels: Any patterns: Any def reset(self): ... def close(self) -> None: ... def on_connect(self, connection): ... def encode(self, value): ... @property def subscribed(self): ... def execute_command(self, *args, **kwargs): ... def parse_response(self, block=...): ... def psubscribe(self, *args: Text, **kwargs: Callable[[Any], None]): ... def punsubscribe(self, *args: Text) -> None: ... def subscribe(self, *args: Text, **kwargs: Callable[[Any], None]) -> None: ... def unsubscribe(self, *args: Text) -> None: ... def listen(self): ... def get_message(self, ignore_subscribe_messages: bool = ..., timeout: float = ...) -> Optional[Dict[str, Any]]: ... def handle_message(self, response, ignore_subscribe_messages: bool = ...) -> Optional[Dict[str, Any]]: ... def run_in_thread(self, sleep_time=...): ... class BasePipeline: UNWATCH_COMMANDS: Any connection_pool: Any connection: Any response_callbacks: Any transaction: Any shard_hint: Any watching: Any def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, traceback): ... def __del__(self): ... def __len__(self): ... command_stack: Any scripts: Any explicit_transaction: Any def reset(self): ... def multi(self): ... def execute_command(self, *args, **kwargs): ... def immediate_execute_command(self, *args, **options): ... def pipeline_execute_command(self, *args, **options): ... def raise_first_error(self, commands, response): ... def annotate_exception(self, exception, number, command): ... def parse_response(self, connection, command_name, **options): ... def load_scripts(self): ... def execute(self, raise_on_error=...): ... def watch(self, *names): ... def unwatch(self): ... def script_load_for_pipeline(self, script): ... class StrictPipeline(BasePipeline, StrictRedis): ... class Pipeline(BasePipeline, Redis): ... class Script: registered_client: Any script: Any sha: Any def __init__(self, registered_client, script) -> None: ... def __call__(self, keys=..., args=..., client=...): ... class Monitor(object): def __init__(self, connection_pool) -> None: ... def __enter__(self) -> Monitor: ... def __exit__(self, *args: Any) -> None: ... def next_command(self) -> Dict[Text, Any]: ... def listen(self) -> Iterable[Dict[Text, Any]]: ... mypy-0.761/mypy/typeshed/third_party/2and3/redis/connection.pyi0000644€tŠÔÚ€2›s®0000001007113576752252030767 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Text, Optional ssl_available: Any hiredis_version: Any HIREDIS_SUPPORTS_CALLABLE_ERRORS: Any HIREDIS_SUPPORTS_BYTE_BUFFER: Any msg: Any HIREDIS_USE_BYTE_BUFFER: Any SYM_STAR: Any SYM_DOLLAR: Any SYM_CRLF: Any SYM_EMPTY: Any SERVER_CLOSED_CONNECTION_ERROR: Any class Token: value: Any def __init__(self, value) -> None: ... class BaseParser: EXCEPTION_CLASSES: Any def parse_error(self, response): ... class SocketBuffer: socket_read_size: Any bytes_written: Any bytes_read: Any def __init__(self, socket, socket_read_size) -> None: ... @property def length(self): ... def read(self, length): ... def readline(self): ... def purge(self): ... def close(self): ... class PythonParser(BaseParser): encoding: Any socket_read_size: Any def __init__(self, socket_read_size) -> None: ... def __del__(self): ... def on_connect(self, connection): ... def on_disconnect(self): ... def can_read(self): ... def read_response(self): ... class HiredisParser(BaseParser): socket_read_size: Any def __init__(self, socket_read_size) -> None: ... def __del__(self): ... def on_connect(self, connection): ... def on_disconnect(self): ... def can_read(self): ... def read_response(self): ... DefaultParser: Any class Connection: description_format: Any pid: Any host: Any port: Any db: Any password: Any socket_timeout: Any socket_connect_timeout: Any socket_keepalive: Any socket_keepalive_options: Any retry_on_timeout: Any encoding: Any encoding_errors: Any decode_responses: Any def __init__(self, host=..., port=..., db=..., password=..., socket_timeout=..., socket_connect_timeout=..., socket_keepalive=..., socket_keepalive_options=..., retry_on_timeout=..., encoding=..., encoding_errors=..., decode_responses=..., parser_class=..., socket_read_size=...) -> None: ... def __del__(self): ... def register_connect_callback(self, callback): ... def clear_connect_callbacks(self): ... def connect(self): ... def on_connect(self): ... def disconnect(self): ... def send_packed_command(self, command): ... def send_command(self, *args): ... def can_read(self): ... def read_response(self): ... def encode(self, value): ... def pack_command(self, *args): ... def pack_commands(self, commands): ... class SSLConnection(Connection): description_format: Any keyfile: Any certfile: Any cert_reqs: Any ca_certs: Any def __init__(self, ssl_keyfile=..., ssl_certfile=..., ssl_cert_reqs=..., ssl_ca_certs=..., **kwargs) -> None: ... class UnixDomainSocketConnection(Connection): description_format: Any pid: Any path: Any db: Any password: Any socket_timeout: Any retry_on_timeout: Any encoding: Any encoding_errors: Any decode_responses: Any def __init__(self, path=..., db=..., password=..., socket_timeout=..., encoding=..., encoding_errors=..., decode_responses=..., retry_on_timeout=..., parser_class=..., socket_read_size=...) -> None: ... class ConnectionPool: @classmethod def from_url(cls, url: Text, db: Optional[int] = ..., **kwargs) -> ConnectionPool: ... connection_class: Any connection_kwargs: Any max_connections: Any def __init__(self, connection_class=..., max_connections=..., **connection_kwargs) -> None: ... pid: Any def reset(self): ... def get_connection(self, command_name, *keys, **options): ... def make_connection(self): ... def release(self, connection): ... def disconnect(self): ... class BlockingConnectionPool(ConnectionPool): queue_class: Any timeout: Any def __init__(self, max_connections=..., timeout=..., connection_class=..., queue_class=..., **connection_kwargs) -> None: ... pid: Any pool: Any def reset(self): ... def make_connection(self): ... def get_connection(self, command_name, *keys, **options): ... def release(self, connection): ... def disconnect(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/redis/exceptions.pyi0000644€tŠÔÚ€2›s®0000000107113576752252031011 0ustar jukkaDROPBOX\Domain Users00000000000000class RedisError(Exception): ... def __unicode__(self): ... class AuthenticationError(RedisError): ... class ConnectionError(RedisError): ... class TimeoutError(RedisError): ... class BusyLoadingError(ConnectionError): ... class InvalidResponse(RedisError): ... class ResponseError(RedisError): ... class DataError(RedisError): ... class PubSubError(RedisError): ... class WatchError(RedisError): ... class NoScriptError(ResponseError): ... class ExecAbortError(ResponseError): ... class ReadOnlyError(ResponseError): ... class LockError(RedisError, ValueError): ... mypy-0.761/mypy/typeshed/third_party/2and3/redis/utils.pyi0000644€tŠÔÚ€2›s®0000000021013576752252027762 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any HIREDIS_AVAILABLE: Any def from_url(url, db=..., **kwargs): ... def pipeline(redis_obj): ... class dummy: ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/0000755€tŠÔÚ€2›s®0000000000013576752267026661 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/__init__.pyi0000644€tŠÔÚ€2›s®0000000165413576752252031143 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests (based on version 2.6.0, Python 3) from typing import Any from . import models from . import api from . import sessions from . import status_codes from . import exceptions from . import packages import logging __title__: Any __build__: Any __license__: Any __copyright__: Any __version__: Any Request = models.Request Response = models.Response PreparedRequest = models.PreparedRequest request = api.request get = api.get head = api.head post = api.post patch = api.patch put = api.put delete = api.delete options = api.options session = sessions.session Session = sessions.Session codes = status_codes.codes RequestException = exceptions.RequestException Timeout = exceptions.Timeout URLRequired = exceptions.URLRequired TooManyRedirects = exceptions.TooManyRedirects HTTPError = exceptions.HTTPError ConnectionError = exceptions.ConnectionError class NullHandler(logging.Handler): def emit(self, record): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/adapters.pyi0000644€tŠÔÚ€2›s®0000000606313576752252031206 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.adapters (Python 3) from typing import Any, Container, Union, Text, Tuple, Optional, Mapping from . import models from .packages.urllib3 import poolmanager from .packages.urllib3 import response from .packages.urllib3.util import retry from . import compat from . import utils from . import structures from .packages.urllib3 import exceptions as urllib3_exceptions from . import cookies from . import exceptions from . import auth PreparedRequest = models.PreparedRequest Response = models.Response PoolManager = poolmanager.PoolManager proxy_from_url = poolmanager.proxy_from_url HTTPResponse = response.HTTPResponse Retry = retry.Retry DEFAULT_CA_BUNDLE_PATH = utils.DEFAULT_CA_BUNDLE_PATH get_encoding_from_headers = utils.get_encoding_from_headers prepend_scheme_if_needed = utils.prepend_scheme_if_needed get_auth_from_url = utils.get_auth_from_url urldefragauth = utils.urldefragauth CaseInsensitiveDict = structures.CaseInsensitiveDict ConnectTimeoutError = urllib3_exceptions.ConnectTimeoutError MaxRetryError = urllib3_exceptions.MaxRetryError ProtocolError = urllib3_exceptions.ProtocolError ReadTimeoutError = urllib3_exceptions.ReadTimeoutError ResponseError = urllib3_exceptions.ResponseError extract_cookies_to_jar = cookies.extract_cookies_to_jar ConnectionError = exceptions.ConnectionError ConnectTimeout = exceptions.ConnectTimeout ReadTimeout = exceptions.ReadTimeout SSLError = exceptions.SSLError ProxyError = exceptions.ProxyError RetryError = exceptions.RetryError DEFAULT_POOLBLOCK: Any DEFAULT_POOLSIZE: Any DEFAULT_RETRIES: Any class BaseAdapter: def __init__(self) -> None: ... def send(self, request: PreparedRequest, stream: bool = ..., timeout: Union[None, float, Tuple[float, float]] = ..., verify: Union[bool, str] = ..., cert: Union[None, Union[bytes, Text], Container[Union[bytes, Text]]] = ..., proxies: Optional[Mapping[str, str]] = ...) -> Response: ... def close(self) -> None: ... class HTTPAdapter(BaseAdapter): __attrs__: Any max_retries: Any config: Any proxy_manager: Any def __init__(self, pool_connections=..., pool_maxsize=..., max_retries=..., pool_block=...) -> None: ... poolmanager: Any def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs): ... def proxy_manager_for(self, proxy, **proxy_kwargs): ... def cert_verify(self, conn, url, verify, cert): ... def build_response(self, req, resp): ... def get_connection(self, url, proxies=...): ... def close(self): ... def request_url(self, request, proxies): ... def add_headers(self, request, **kwargs): ... def proxy_headers(self, proxy): ... def send(self, request: PreparedRequest, stream: bool = ..., timeout: Union[None, float, Tuple[float, float]] = ..., verify: Union[bool, str] = ..., cert: Union[None, Union[bytes, Text], Container[Union[bytes, Text]]] = ..., proxies: Optional[Mapping[str, str]] = ...) -> Response: ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/api.pyi0000644€tŠÔÚ€2›s®0000000250113576752252030145 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.api (Python 3) import sys from typing import Optional, Union, Any, Iterable, Mapping, MutableMapping, Tuple, IO, Text from .models import Response from .sessions import _Data _ParamsMappingValueType = Union[Text, bytes, int, float, Iterable[Union[Text, bytes, int, float]]] def request(method: str, url: str, **kwargs) -> Response: ... def get( url: Union[Text, bytes], params: Optional[ Union[ Mapping[Union[Text, bytes, int, float], _ParamsMappingValueType], Union[Text, bytes], Tuple[Union[Text, bytes, int, float], _ParamsMappingValueType], Mapping[Text, _ParamsMappingValueType], Mapping[bytes, _ParamsMappingValueType], Mapping[int, _ParamsMappingValueType], Mapping[float, _ParamsMappingValueType], ] ] = ..., **kwargs, ) -> Response: ... def options(url: Union[Text, bytes], **kwargs) -> Response: ... def head(url: Union[Text, bytes], **kwargs) -> Response: ... def post(url: Union[Text, bytes], data: _Data = ..., json=..., **kwargs) -> Response: ... def put(url: Union[Text, bytes], data: _Data = ..., json=..., **kwargs) -> Response: ... def patch(url: Union[Text, bytes], data: _Data = ..., json=..., **kwargs) -> Response: ... def delete(url: Union[Text, bytes], **kwargs) -> Response: ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/auth.pyi0000644€tŠÔÚ€2›s®0000000230713576752252030341 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.auth (Python 3) from typing import Any, Text, Union from . import compat from . import cookies from . import models from . import utils from . import status_codes extract_cookies_to_jar = cookies.extract_cookies_to_jar parse_dict_header = utils.parse_dict_header to_native_string = utils.to_native_string codes = status_codes.codes CONTENT_TYPE_FORM_URLENCODED: Any CONTENT_TYPE_MULTI_PART: Any def _basic_auth_str(username: Union[bytes, Text], password: Union[bytes, Text]) -> str: ... class AuthBase: def __call__(self, r: models.PreparedRequest) -> models.PreparedRequest: ... class HTTPBasicAuth(AuthBase): username: Any password: Any def __init__(self, username, password) -> None: ... def __call__(self, r): ... class HTTPProxyAuth(HTTPBasicAuth): def __call__(self, r): ... class HTTPDigestAuth(AuthBase): username: Any password: Any last_nonce: Any nonce_count: Any chal: Any pos: Any num_401_calls: Any def __init__(self, username, password) -> None: ... def build_digest_header(self, method, url): ... def handle_redirect(self, r, **kwargs): ... def handle_401(self, r, **kwargs): ... def __call__(self, r): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/compat.pyi0000644€tŠÔÚ€2›s®0000000017313576752252030662 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.compat (Python 3.4) from typing import Any import collections OrderedDict = collections.OrderedDict mypy-0.761/mypy/typeshed/third_party/2and3/requests/cookies.pyi0000644€tŠÔÚ€2›s®0000000404413576752252031034 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.cookies (Python 3) import sys from typing import Any, MutableMapping import collections from . import compat if sys.version_info < (3, 0): from cookielib import CookieJar else: from http.cookiejar import CookieJar class MockRequest: type: Any def __init__(self, request) -> None: ... def get_type(self): ... def get_host(self): ... def get_origin_req_host(self): ... def get_full_url(self): ... def is_unverifiable(self): ... def has_header(self, name): ... def get_header(self, name, default=...): ... def add_header(self, key, val): ... def add_unredirected_header(self, name, value): ... def get_new_headers(self): ... @property def unverifiable(self): ... @property def origin_req_host(self): ... @property def host(self): ... class MockResponse: def __init__(self, headers) -> None: ... def info(self): ... def getheaders(self, name): ... def extract_cookies_to_jar(jar, request, response): ... def get_cookie_header(jar, request): ... def remove_cookie_by_name(cookiejar, name, domain=..., path=...): ... class CookieConflictError(RuntimeError): ... class RequestsCookieJar(CookieJar, MutableMapping[Any, Any]): def get(self, name, default=..., domain=..., path=...): ... def set(self, name, value, **kwargs): ... def iterkeys(self): ... def keys(self): ... def itervalues(self): ... def values(self): ... def iteritems(self): ... def items(self): ... def list_domains(self): ... def list_paths(self): ... def multiple_domains(self): ... def get_dict(self, domain=..., path=...): ... def __getitem__(self, name): ... def __setitem__(self, name, value): ... def __delitem__(self, name): ... def set_cookie(self, cookie, *args, **kwargs): ... def update(self, other): ... def copy(self): ... def create_cookie(name, value, **kwargs): ... def morsel_to_cookie(morsel): ... def cookiejar_from_dict(cookie_dict, cookiejar=..., overwrite=...): ... def merge_cookies(cookiejar, cookies): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/exceptions.pyi0000644€tŠÔÚ€2›s®0000000175313576752252031565 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.exceptions (Python 3) from typing import Any from .packages.urllib3.exceptions import HTTPError as BaseHTTPError class RequestException(IOError): response: Any request: Any def __init__(self, *args, **kwargs) -> None: ... class HTTPError(RequestException): ... class ConnectionError(RequestException): ... class ProxyError(ConnectionError): ... class SSLError(ConnectionError): ... class Timeout(RequestException): ... class ConnectTimeout(ConnectionError, Timeout): ... class ReadTimeout(Timeout): ... class URLRequired(RequestException): ... class TooManyRedirects(RequestException): ... class MissingSchema(RequestException, ValueError): ... class InvalidSchema(RequestException, ValueError): ... class InvalidURL(RequestException, ValueError): ... class ChunkedEncodingError(RequestException): ... class ContentDecodingError(RequestException, BaseHTTPError): ... class StreamConsumedError(RequestException, TypeError): ... class RetryError(RequestException): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/hooks.pyi0000644€tŠÔÚ€2›s®0000000023413576752252030520 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.hooks (Python 3) from typing import Any HOOKS: Any def default_hooks(): ... def dispatch_hook(key, hooks, hook_data, **kwargs): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/models.pyi0000644€tŠÔÚ€2›s®0000001124713576752252030666 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.models (Python 3) from typing import (Any, Dict, Iterator, List, MutableMapping, Optional, Text, Union) import datetime import types from . import hooks from . import structures from . import auth from . import cookies from .cookies import RequestsCookieJar from .packages.urllib3 import fields from .packages.urllib3 import filepost from .packages.urllib3 import util from .packages.urllib3 import exceptions as urllib3_exceptions from . import exceptions from . import utils from . import compat from . import status_codes default_hooks = hooks.default_hooks CaseInsensitiveDict = structures.CaseInsensitiveDict HTTPBasicAuth = auth.HTTPBasicAuth cookiejar_from_dict = cookies.cookiejar_from_dict get_cookie_header = cookies.get_cookie_header RequestField = fields.RequestField encode_multipart_formdata = filepost.encode_multipart_formdata parse_url = util.parse_url DecodeError = urllib3_exceptions.DecodeError ReadTimeoutError = urllib3_exceptions.ReadTimeoutError ProtocolError = urllib3_exceptions.ProtocolError LocationParseError = urllib3_exceptions.LocationParseError HTTPError = exceptions.HTTPError MissingSchema = exceptions.MissingSchema InvalidURL = exceptions.InvalidURL ChunkedEncodingError = exceptions.ChunkedEncodingError ContentDecodingError = exceptions.ContentDecodingError ConnectionError = exceptions.ConnectionError StreamConsumedError = exceptions.StreamConsumedError guess_filename = utils.guess_filename get_auth_from_url = utils.get_auth_from_url requote_uri = utils.requote_uri stream_decode_response_unicode = utils.stream_decode_response_unicode to_key_val_list = utils.to_key_val_list parse_header_links = utils.parse_header_links iter_slices = utils.iter_slices guess_json_utf = utils.guess_json_utf super_len = utils.super_len to_native_string = utils.to_native_string codes = status_codes.codes REDIRECT_STATI: Any DEFAULT_REDIRECT_LIMIT: Any CONTENT_CHUNK_SIZE: Any ITER_CHUNK_SIZE: Any json_dumps: Any class RequestEncodingMixin: @property def path_url(self): ... class RequestHooksMixin: def register_hook(self, event, hook): ... def deregister_hook(self, event, hook): ... class Request(RequestHooksMixin): hooks: Any method: Any url: Any headers: Any files: Any data: Any json: Any params: Any auth: Any cookies: Any def __init__(self, method=..., url=..., headers=..., files=..., data=..., params=..., auth=..., cookies=..., hooks=..., json=...) -> None: ... def prepare(self): ... class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): method: Optional[Union[str, Text]] url: Optional[Union[str, Text]] headers: CaseInsensitiveDict[str] body: Optional[Union[bytes, Text]] hooks: Any def __init__(self) -> None: ... def prepare(self, method=..., url=..., headers=..., files=..., data=..., params=..., auth=..., cookies=..., hooks=..., json=...): ... def copy(self): ... def prepare_method(self, method): ... def prepare_url(self, url, params): ... def prepare_headers(self, headers): ... def prepare_body(self, data, files, json=...): ... def prepare_content_length(self, body): ... def prepare_auth(self, auth, url=...): ... def prepare_cookies(self, cookies): ... def prepare_hooks(self, hooks): ... class Response: __attrs__: Any status_code: int headers: MutableMapping[str, str] raw: Any url: str encoding: str history: List[Response] reason: str cookies: RequestsCookieJar elapsed: datetime.timedelta request: PreparedRequest def __init__(self) -> None: ... def __bool__(self) -> bool: ... def __nonzero__(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __enter__(self) -> Response: ... def __exit__(self, *args: Any) -> None: ... @property def next(self) -> Optional[PreparedRequest]: ... @property def ok(self) -> bool: ... @property def is_redirect(self) -> bool: ... @property def is_permanent_redirect(self) -> bool: ... @property def apparent_encoding(self) -> str: ... def iter_content(self, chunk_size: Optional[int] = ..., decode_unicode: bool = ...) -> Iterator[Any]: ... def iter_lines(self, chunk_size: Optional[int] = ..., decode_unicode: bool = ..., delimiter: Union[Text, bytes] = ...) -> Iterator[Any]: ... @property def content(self) -> bytes: ... @property def text(self) -> str: ... def json(self, **kwargs) -> Any: ... @property def links(self) -> Dict[Any, Any]: ... def raise_for_status(self) -> None: ... def close(self) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/0000755€tŠÔÚ€2›s®0000000000013576752267030437 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/__init__.pyi0000644€tŠÔÚ€2›s®0000000023613576752252032714 0ustar jukkaDROPBOX\Domain Users00000000000000class VendorAlias: def __init__(self, package_names) -> None: ... def find_module(self, fullname, path=...): ... def load_module(self, name): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/0000755€tŠÔÚ€2›s®0000000000013576752267032013 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/__init__.pyi0000644€tŠÔÚ€2›s®0000000162213576752252034270 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from . import connectionpool from . import filepost from . import poolmanager from . import response from .util import request as _request from .util import url from .util import timeout from .util import retry import logging __license__: Any HTTPConnectionPool = connectionpool.HTTPConnectionPool HTTPSConnectionPool = connectionpool.HTTPSConnectionPool connection_from_url = connectionpool.connection_from_url encode_multipart_formdata = filepost.encode_multipart_formdata PoolManager = poolmanager.PoolManager ProxyManager = poolmanager.ProxyManager proxy_from_url = poolmanager.proxy_from_url HTTPResponse = response.HTTPResponse make_headers = _request.make_headers get_host = url.get_host Timeout = timeout.Timeout Retry = retry.Retry class NullHandler(logging.Handler): def emit(self, record): ... def add_stderr_logger(level=...): ... def disable_warnings(category=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/_collections.pyi0000644€tŠÔÚ€2›s®0000000277713576752252035222 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, NoReturn, TypeVar from collections import MutableMapping _KT = TypeVar("_KT") _VT = TypeVar("_VT") class RLock: def __enter__(self): ... def __exit__(self, exc_type, exc_value, traceback): ... class RecentlyUsedContainer(MutableMapping[_KT, _VT]): ContainerCls: Any dispose_func: Any lock: Any def __init__(self, maxsize=..., dispose_func=...) -> None: ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def __len__(self): ... def __iter__(self): ... def clear(self): ... def keys(self): ... class HTTPHeaderDict(MutableMapping[str, str]): def __init__(self, headers=..., **kwargs) -> None: ... def __setitem__(self, key, val): ... def __getitem__(self, key): ... def __delitem__(self, key): ... def __contains__(self, key): ... def __eq__(self, other): ... def __iter__(self) -> NoReturn: ... def __len__(self) -> int: ... def __ne__(self, other): ... values: Any get: Any update: Any iterkeys: Any itervalues: Any def pop(self, key, default=...): ... def discard(self, key): ... def add(self, key, val): ... def extend(self, *args, **kwargs): ... def getlist(self, key): ... getheaders: Any getallmatchingheaders: Any iget: Any def copy(self): ... def iteritems(self): ... def itermerged(self): ... def items(self): ... @classmethod def from_httplib(cls, message, duplicates=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/connection.pyi0000644€tŠÔÚ€2›s®0000000401013576752252034662 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.packages.urllib3.connection (Python 3.4) import sys from typing import Any from . import packages import ssl from . import exceptions from .packages import ssl_match_hostname from .util import ssl_ from . import util if sys.version_info < (3, 0): from httplib import HTTPConnection as _HTTPConnection from httplib import HTTPException as HTTPException class ConnectionError(Exception): ... else: from http.client import HTTPConnection as _HTTPConnection from http.client import HTTPException as HTTPException from builtins import ConnectionError as ConnectionError class DummyConnection: ... BaseSSLError = ssl.SSLError ConnectTimeoutError = exceptions.ConnectTimeoutError SystemTimeWarning = exceptions.SystemTimeWarning SecurityWarning = exceptions.SecurityWarning match_hostname = ssl_match_hostname.match_hostname resolve_cert_reqs = ssl_.resolve_cert_reqs resolve_ssl_version = ssl_.resolve_ssl_version ssl_wrap_socket = ssl_.ssl_wrap_socket assert_fingerprint = ssl_.assert_fingerprint connection = util.connection port_by_scheme: Any RECENT_DATE: Any class HTTPConnection(_HTTPConnection): default_port: Any default_socket_options: Any is_verified: Any source_address: Any socket_options: Any def __init__(self, *args, **kw) -> None: ... def connect(self): ... class HTTPSConnection(HTTPConnection): default_port: Any key_file: Any cert_file: Any def __init__(self, host, port=..., key_file=..., cert_file=..., strict=..., timeout=..., **kw) -> None: ... sock: Any def connect(self): ... class VerifiedHTTPSConnection(HTTPSConnection): cert_reqs: Any ca_certs: Any ssl_version: Any assert_fingerprint: Any key_file: Any cert_file: Any assert_hostname: Any def set_cert(self, key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., assert_hostname=..., assert_fingerprint=...): ... sock: Any auto_open: Any is_verified: Any def connect(self): ... UnverifiedHTTPSConnection = HTTPSConnection mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/connectionpool.pyi0000644€tŠÔÚ€2›s®0000000556613576752252035575 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from . import exceptions from .packages import ssl_match_hostname from . import packages from .connection import ( HTTPException as HTTPException, BaseSSLError as BaseSSLError, ConnectionError as ConnectionError, ) from . import request from . import response from . import connection from .util import connection as _connection from .util import retry from .util import timeout from .util import url ClosedPoolError = exceptions.ClosedPoolError ProtocolError = exceptions.ProtocolError EmptyPoolError = exceptions.EmptyPoolError HostChangedError = exceptions.HostChangedError LocationValueError = exceptions.LocationValueError MaxRetryError = exceptions.MaxRetryError ProxyError = exceptions.ProxyError ReadTimeoutError = exceptions.ReadTimeoutError SSLError = exceptions.SSLError TimeoutError = exceptions.TimeoutError InsecureRequestWarning = exceptions.InsecureRequestWarning CertificateError = ssl_match_hostname.CertificateError port_by_scheme = connection.port_by_scheme DummyConnection = connection.DummyConnection HTTPConnection = connection.HTTPConnection HTTPSConnection = connection.HTTPSConnection VerifiedHTTPSConnection = connection.VerifiedHTTPSConnection RequestMethods = request.RequestMethods HTTPResponse = response.HTTPResponse is_connection_dropped = _connection.is_connection_dropped Retry = retry.Retry Timeout = timeout.Timeout get_host = url.get_host xrange: Any log: Any class ConnectionPool: scheme: Any QueueCls: Any host: Any port: Any def __init__(self, host, port=...) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_val, exc_tb): ... def close(self): ... class HTTPConnectionPool(ConnectionPool, RequestMethods): scheme: Any ConnectionCls: Any strict: Any timeout: Any retries: Any pool: Any block: Any proxy: Any proxy_headers: Any num_connections: Any num_requests: Any conn_kw: Any def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., **conn_kw) -> None: ... def close(self): ... def is_same_host(self, url): ... def urlopen(self, method, url, body=..., headers=..., retries=..., redirect=..., assert_same_host=..., timeout=..., pool_timeout=..., release_conn=..., **response_kw): ... class HTTPSConnectionPool(HTTPConnectionPool): scheme: Any ConnectionCls: Any key_file: Any cert_file: Any cert_reqs: Any ca_certs: Any ssl_version: Any assert_hostname: Any assert_fingerprint: Any def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., ssl_version=..., assert_hostname=..., assert_fingerprint=..., **conn_kw) -> None: ... def connection_from_url(url, **kw): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/contrib/0000755€tŠÔÚ€2›s®0000000000013576752267033453 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/contrib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252035715 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/exceptions.pyi0000644€tŠÔÚ€2›s®0000000260513576752252034714 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class HTTPError(Exception): ... class HTTPWarning(Warning): ... class PoolError(HTTPError): pool: Any def __init__(self, pool, message) -> None: ... def __reduce__(self): ... class RequestError(PoolError): url: Any def __init__(self, pool, url, message) -> None: ... def __reduce__(self): ... class SSLError(HTTPError): ... class ProxyError(HTTPError): ... class DecodeError(HTTPError): ... class ProtocolError(HTTPError): ... ConnectionError: Any class MaxRetryError(RequestError): reason: Any def __init__(self, pool, url, reason=...) -> None: ... class HostChangedError(RequestError): retries: Any def __init__(self, pool, url, retries=...) -> None: ... class TimeoutStateError(HTTPError): ... class TimeoutError(HTTPError): ... class ReadTimeoutError(TimeoutError, RequestError): ... class ConnectTimeoutError(TimeoutError): ... class EmptyPoolError(PoolError): ... class ClosedPoolError(PoolError): ... class LocationValueError(ValueError, HTTPError): ... class LocationParseError(LocationValueError): location: Any def __init__(self, location) -> None: ... class ResponseError(HTTPError): GENERIC_ERROR: Any SPECIFIC_ERROR: Any class SecurityWarning(HTTPWarning): ... class InsecureRequestWarning(SecurityWarning): ... class SystemTimeWarning(SecurityWarning): ... class InsecurePlatformWarning(SecurityWarning): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/fields.pyi0000644€tŠÔÚ€2›s®0000000101413576752252033772 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.packages.urllib3.fields (Python 3.4) from typing import Any from . import packages def guess_content_type(filename, default=...): ... def format_header_param(name, value): ... class RequestField: data: Any headers: Any def __init__(self, name, data, filename=..., headers=...) -> None: ... @classmethod def from_tuples(cls, fieldname, value): ... def render_headers(self): ... def make_multipart(self, content_disposition=..., content_type=..., content_location=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/filepost.pyi0000644€tŠÔÚ€2›s®0000000050713576752252034357 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from . import packages # from .packages import six from . import fields # six = packages.six # b = six.b RequestField = fields.RequestField writer: Any def choose_boundary(): ... def iter_field_objects(fields): ... def iter_fields(fields): ... def encode_multipart_formdata(fields, boundary=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/0000755€tŠÔÚ€2›s®0000000000013576752267033571 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252036033 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/0000755€tŠÔÚ€2›s®0000000000013576752267037444 5ustar jukkaDROPBOX\Domain Users00000000000000././@LongLink0000000000000000000000000000015600000000000011217 Lustar 00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyimypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__i0000644€tŠÔÚ€2›s®0000000013013576752252040101 0ustar jukkaDROPBOX\Domain Users00000000000000import ssl CertificateError = ssl.CertificateError match_hostname = ssl.match_hostname ././@LongLink0000000000000000000000000000016500000000000011217 Lustar 00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyimypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_im0000644€tŠÔÚ€2›s®0000000012113576752252040117 0ustar jukkaDROPBOX\Domain Users00000000000000class CertificateError(ValueError): ... def match_hostname(cert, hostname): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/poolmanager.pyi0000644€tŠÔÚ€2›s®0000000243413576752252035037 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .request import RequestMethods class PoolManager(RequestMethods): proxy: Any connection_pool_kw: Any pools: Any def __init__(self, num_pools=..., headers=..., **connection_pool_kw) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_val, exc_tb): ... def clear(self): ... def connection_from_host(self, host, port=..., scheme=...): ... def connection_from_url(self, url): ... # TODO: This was the original signature -- copied another one from base class to fix complaint. # def urlopen(self, method, url, redirect=True, **kw): ... def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... class ProxyManager(PoolManager): proxy: Any proxy_headers: Any def __init__(self, proxy_url, num_pools=..., headers=..., proxy_headers=..., **connection_pool_kw) -> None: ... def connection_from_host(self, host, port=..., scheme=...): ... # TODO: This was the original signature -- copied another one from base class to fix complaint. # def urlopen(self, method, url, redirect=True, **kw): ... def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... def proxy_from_url(url, **kw): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/request.pyi0000644€tŠÔÚ€2›s®0000000101013576752252034210 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class RequestMethods: headers: Any def __init__(self, headers=...) -> None: ... def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... def request(self, method, url, fields=..., headers=..., **urlopen_kw): ... def request_encode_url(self, method, url, fields=..., **urlopen_kw): ... def request_encode_body(self, method, url, fields=..., headers=..., encode_multipart=..., multipart_boundary=..., **urlopen_kw): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/response.pyi0000644€tŠÔÚ€2›s®0000000324113576752252034366 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import io from . import _collections from . import exceptions from .connection import HTTPException as HTTPException, BaseSSLError as BaseSSLError from .util import response HTTPHeaderDict = _collections.HTTPHeaderDict ProtocolError = exceptions.ProtocolError DecodeError = exceptions.DecodeError ReadTimeoutError = exceptions.ReadTimeoutError binary_type = bytes # six.binary_type PY3 = True # six.PY3 is_fp_closed = response.is_fp_closed class DeflateDecoder: def __init__(self) -> None: ... def __getattr__(self, name): ... def decompress(self, data): ... class GzipDecoder: def __init__(self) -> None: ... def __getattr__(self, name): ... def decompress(self, data): ... class HTTPResponse(io.IOBase): CONTENT_DECODERS: Any REDIRECT_STATUSES: Any headers: Any status: Any version: Any reason: Any strict: Any decode_content: Any def __init__(self, body=..., headers=..., status=..., version=..., reason=..., strict=..., preload_content=..., decode_content=..., original_response=..., pool=..., connection=...) -> None: ... def get_redirect_location(self): ... def release_conn(self): ... @property def data(self): ... def tell(self): ... def read(self, amt=..., decode_content=..., cache_content=...): ... def stream(self, amt=..., decode_content=...): ... @classmethod def from_httplib(cls, r, **response_kw): ... def getheaders(self): ... def getheader(self, name, default=...): ... def close(self): ... @property def closed(self): ... def fileno(self): ... def flush(self): ... def readable(self): ... def readinto(self, b): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/0000755€tŠÔÚ€2›s®0000000000013576752267032770 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/__init__.pyi0000644€tŠÔÚ€2›s®0000000126413576752252035247 0ustar jukkaDROPBOX\Domain Users00000000000000from . import connection from . import request from . import response from . import ssl_ from . import timeout from . import retry from . import url import ssl is_connection_dropped = connection.is_connection_dropped make_headers = request.make_headers is_fp_closed = response.is_fp_closed SSLContext = ssl.SSLContext HAS_SNI = ssl_.HAS_SNI assert_fingerprint = ssl_.assert_fingerprint resolve_cert_reqs = ssl_.resolve_cert_reqs resolve_ssl_version = ssl_.resolve_ssl_version ssl_wrap_socket = ssl_.ssl_wrap_socket current_time = timeout.current_time Timeout = timeout.Timeout Retry = retry.Retry get_host = url.get_host parse_url = url.parse_url split_first = url.split_first Url = url.Url mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/connection.pyi0000644€tŠÔÚ€2›s®0000000027413576752252035647 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any poll: Any select: Any HAS_IPV6: bool def is_connection_dropped(conn): ... def create_connection(address, timeout=..., source_address=..., socket_options=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/request.pyi0000644€tŠÔÚ€2›s®0000000033413576752252035175 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any # from ..packages import six # b = six.b ACCEPT_ENCODING: Any def make_headers(keep_alive=..., accept_encoding=..., user_agent=..., basic_auth=..., proxy_basic_auth=..., disable_cache=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/response.pyi0000644€tŠÔÚ€2›s®0000000003313576752252035337 0ustar jukkaDROPBOX\Domain Users00000000000000def is_fp_closed(obj): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/retry.pyi0000644€tŠÔÚ€2›s®0000000211113576752252034645 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .. import exceptions from .. import packages ConnectTimeoutError = exceptions.ConnectTimeoutError MaxRetryError = exceptions.MaxRetryError ProtocolError = exceptions.ProtocolError ReadTimeoutError = exceptions.ReadTimeoutError ResponseError = exceptions.ResponseError log: Any class Retry: DEFAULT_METHOD_WHITELIST: Any BACKOFF_MAX: Any total: Any connect: Any read: Any redirect: Any status_forcelist: Any method_whitelist: Any backoff_factor: Any raise_on_redirect: Any def __init__(self, total=..., connect=..., read=..., redirect=..., method_whitelist=..., status_forcelist=..., backoff_factor=..., raise_on_redirect=..., _observed_errors=...) -> None: ... def new(self, **kw): ... @classmethod def from_int(cls, retries, redirect=..., default=...): ... def get_backoff_time(self): ... def sleep(self): ... def is_forced_retry(self, method, status_code): ... def is_exhausted(self): ... def increment(self, method=..., url=..., response=..., error=..., _pool=..., _stacktrace=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/ssl_.pyi0000644€tŠÔÚ€2›s®0000000123713576752252034450 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .. import exceptions import ssl SSLError = exceptions.SSLError InsecurePlatformWarning = exceptions.InsecurePlatformWarning SSLContext = ssl.SSLContext HAS_SNI: Any create_default_context: Any OP_NO_SSLv2: Any OP_NO_SSLv3: Any OP_NO_COMPRESSION: Any def assert_fingerprint(cert, fingerprint): ... def resolve_cert_reqs(candidate): ... def resolve_ssl_version(candidate): ... def create_urllib3_context(ssl_version=..., cert_reqs=..., options=..., ciphers=...): ... def ssl_wrap_socket(sock, keyfile=..., certfile=..., cert_reqs=..., ca_certs=..., server_hostname=..., ssl_version=..., ciphers=..., ssl_context=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/timeout.pyi0000644€tŠÔÚ€2›s®0000000076213576752252035200 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .. import exceptions TimeoutStateError = exceptions.TimeoutStateError def current_time(): ... class Timeout: DEFAULT_TIMEOUT: Any total: Any def __init__(self, total=..., connect=..., read=...) -> None: ... @classmethod def from_float(cls, timeout): ... def clone(self): ... def start_connect(self): ... def get_connect_duration(self): ... @property def connect_timeout(self): ... @property def read_timeout(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/url.pyi0000644€tŠÔÚ€2›s®0000000075313576752252034314 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .. import exceptions LocationParseError = exceptions.LocationParseError url_attrs: Any class Url: slots: Any def __new__(cls, scheme=..., auth=..., host=..., port=..., path=..., query=..., fragment=...): ... @property def hostname(self): ... @property def request_uri(self): ... @property def netloc(self): ... @property def url(self): ... def split_first(s, delims): ... def parse_url(url): ... def get_host(url): ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/sessions.pyi0000644€tŠÔÚ€2›s®0000001163413576752252031251 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.sessions (Python 3) from typing import Any, Union, List, MutableMapping, Text, Optional, IO, Tuple, Callable, Iterable from . import adapters from . import auth as _auth from . import compat from . import cookies from . import models from .models import Response from . import hooks from . import utils from . import exceptions from .packages.urllib3 import _collections from . import structures from . import status_codes BaseAdapter = adapters.BaseAdapter OrderedDict = compat.OrderedDict cookiejar_from_dict = cookies.cookiejar_from_dict extract_cookies_to_jar = cookies.extract_cookies_to_jar RequestsCookieJar = cookies.RequestsCookieJar merge_cookies = cookies.merge_cookies Request = models.Request PreparedRequest = models.PreparedRequest DEFAULT_REDIRECT_LIMIT = models.DEFAULT_REDIRECT_LIMIT default_hooks = hooks.default_hooks dispatch_hook = hooks.dispatch_hook to_key_val_list = utils.to_key_val_list default_headers = utils.default_headers to_native_string = utils.to_native_string TooManyRedirects = exceptions.TooManyRedirects InvalidSchema = exceptions.InvalidSchema ChunkedEncodingError = exceptions.ChunkedEncodingError ContentDecodingError = exceptions.ContentDecodingError RecentlyUsedContainer = _collections.RecentlyUsedContainer CaseInsensitiveDict = structures.CaseInsensitiveDict HTTPAdapter = adapters.HTTPAdapter requote_uri = utils.requote_uri get_environ_proxies = utils.get_environ_proxies get_netrc_auth = utils.get_netrc_auth should_bypass_proxies = utils.should_bypass_proxies get_auth_from_url = utils.get_auth_from_url codes = status_codes.codes REDIRECT_STATI = models.REDIRECT_STATI REDIRECT_CACHE_SIZE: Any def merge_setting(request_setting, session_setting, dict_class=...): ... def merge_hooks(request_hooks, session_hooks, dict_class=...): ... class SessionRedirectMixin: def resolve_redirects(self, resp, req, stream=..., timeout=..., verify=..., cert=..., proxies=...): ... def rebuild_auth(self, prepared_request, response): ... def rebuild_proxies(self, prepared_request, proxies): ... _Data = Union[None, Text, bytes, MutableMapping[str, Any], MutableMapping[Text, Any], Iterable[Tuple[Text, Optional[Text]]], IO] _Hook = Callable[[Response], Any] _Hooks = MutableMapping[Text, List[_Hook]] _HooksInput = MutableMapping[Text, Union[Iterable[_Hook], _Hook]] class Session(SessionRedirectMixin): __attrs__: Any headers: CaseInsensitiveDict[Text] auth: Union[None, Tuple[Text, Text], _auth.AuthBase, Callable[[Request], Request]] proxies: MutableMapping[Text, Text] hooks: _Hooks params: Union[bytes, MutableMapping[Text, Text]] stream: bool verify: Union[None, bool, Text] cert: Union[None, Text, Tuple[Text, Text]] max_redirects: int trust_env: bool cookies: RequestsCookieJar adapters: MutableMapping[Any, Any] redirect_cache: RecentlyUsedContainer[Any, Any] def __init__(self) -> None: ... def __enter__(self) -> Session: ... def __exit__(self, *args) -> None: ... def prepare_request(self, request): ... def request(self, method: str, url: Union[str, bytes, Text], params: Union[None, bytes, MutableMapping[Text, Text]] = ..., data: _Data = ..., headers: Optional[MutableMapping[Text, Text]] = ..., cookies: Union[None, RequestsCookieJar, MutableMapping[Text, Text]] = ..., files: Optional[MutableMapping[Text, IO[Any]]] = ..., auth: Union[None, Tuple[Text, Text], _auth.AuthBase, Callable[[Request], Request]] = ..., timeout: Union[None, float, Tuple[float, float]] = ..., allow_redirects: Optional[bool] = ..., proxies: Optional[MutableMapping[Text, Text]] = ..., hooks: Optional[_HooksInput] = ..., stream: Optional[bool] = ..., verify: Union[None, bool, Text] = ..., cert: Union[Text, Tuple[Text, Text], None] = ..., json: Optional[Any] = ..., ) -> Response: ... def get(self, url: Union[Text, bytes], **kwargs) -> Response: ... def options(self, url: Union[Text, bytes], **kwargs) -> Response: ... def head(self, url: Union[Text, bytes], **kwargs) -> Response: ... def post(self, url: Union[Text, bytes], data: _Data = ..., json: Optional[Any] = ..., **kwargs) -> Response: ... def put(self, url: Union[Text, bytes], data: _Data = ..., **kwargs) -> Response: ... def patch(self, url: Union[Text, bytes], data: _Data = ..., **kwargs) -> Response: ... def delete(self, url: Union[Text, bytes], **kwargs) -> Response: ... def send(self, request, **kwargs): ... def merge_environment_settings(self, url, proxies, stream, verify, cert): ... def get_adapter(self, url): ... def close(self) -> None: ... def mount(self, prefix: Union[Text, bytes], adapter: BaseAdapter) -> None: ... def session() -> Session: ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/status_codes.pyi0000644€tŠÔÚ€2›s®0000000010613576752252032073 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .structures import LookupDict codes: Any mypy-0.761/mypy/typeshed/third_party/2and3/requests/structures.pyi0000644€tŠÔÚ€2›s®0000000171013576752252031620 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterable, Iterator, Mapping, MutableMapping, Optional, Tuple, TypeVar, Union, Generic _VT = TypeVar('_VT') class CaseInsensitiveDict(MutableMapping[str, _VT], Generic[_VT]): def __init__(self, data: Optional[Union[Mapping[str, _VT], Iterable[Tuple[str, _VT]]]] = ..., **kwargs: _VT) -> None: ... def lower_items(self) -> Iterator[Tuple[str, _VT]]: ... def __setitem__(self, key: str, value: _VT) -> None: ... def __getitem__(self, key: str) -> _VT: ... def __delitem__(self, key: str) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... def copy(self) -> CaseInsensitiveDict[_VT]: ... class LookupDict(Dict[str, _VT]): name: Any def __init__(self, name: Any = ...) -> None: ... def __getitem__(self, key: str) -> Optional[_VT]: ... # type: ignore def __getattr__(self, attr: str) -> _VT: ... def __setattr__(self, attr: str, value: _VT) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/requests/utils.pyi0000644€tŠÔÚ€2›s®0000000315113576752252030536 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.utils (Python 3) from typing import Any from . import compat from . import cookies from . import structures from . import exceptions OrderedDict = compat.OrderedDict RequestsCookieJar = cookies.RequestsCookieJar cookiejar_from_dict = cookies.cookiejar_from_dict CaseInsensitiveDict = structures.CaseInsensitiveDict InvalidURL = exceptions.InvalidURL NETRC_FILES: Any DEFAULT_CA_BUNDLE_PATH: Any def dict_to_sequence(d): ... def super_len(o): ... def get_netrc_auth(url): ... def guess_filename(obj): ... def from_key_val_list(value): ... def to_key_val_list(value): ... def parse_list_header(value): ... def parse_dict_header(value): ... def unquote_header_value(value, is_filename=...): ... def dict_from_cookiejar(cj): ... def add_dict_to_cookiejar(cj, cookie_dict): ... def get_encodings_from_content(content): ... def get_encoding_from_headers(headers): ... def stream_decode_response_unicode(iterator, r): ... def iter_slices(string, slice_length): ... def get_unicode_from_response(r): ... UNRESERVED_SET: Any def unquote_unreserved(uri): ... def requote_uri(uri): ... def address_in_network(ip, net): ... def dotted_netmask(mask): ... def is_ipv4_address(string_ip): ... def is_valid_cidr(string_network): ... def set_environ(env_name, value): ... def should_bypass_proxies(url): ... def get_environ_proxies(url): ... def default_user_agent(name=...): ... def default_headers(): ... def parse_header_links(value): ... def guess_json_utf(data): ... def prepend_scheme_if_needed(url, new_scheme): ... def get_auth_from_url(url): ... def to_native_string(string, encoding=...): ... def urldefragauth(url): ... mypy-0.761/mypy/typeshed/third_party/2and3/simplejson/0000755€tŠÔÚ€2›s®0000000000013576752267027171 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/simplejson/__init__.pyi0000644€tŠÔÚ€2›s®0000000103313576752252031442 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Text, Union from simplejson.scanner import JSONDecodeError as JSONDecodeError from simplejson.decoder import JSONDecoder as JSONDecoder from simplejson.encoder import JSONEncoder as JSONEncoder, JSONEncoderForHTML as JSONEncoderForHTML _LoadsString = Union[Text, bytes, bytearray] def dumps(obj: Any, *args: Any, **kwds: Any) -> str: ... def dump(obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ... def loads(s: _LoadsString, **kwds: Any) -> Any: ... def load(fp: IO[str], **kwds: Any) -> Any: ... mypy-0.761/mypy/typeshed/third_party/2and3/simplejson/decoder.pyi0000644€tŠÔÚ€2›s®0000000035213576752252031313 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Match class JSONDecoder(object): def __init__(self, **kwargs): ... def decode(self, s: str, _w: Match[str], _PY3: bool): ... def raw_decode(self, s: str, idx: int, _w: Match[str], _PY3: bool): ... mypy-0.761/mypy/typeshed/third_party/2and3/simplejson/encoder.pyi0000644€tŠÔÚ€2›s®0000000041413576752252031324 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO class JSONEncoder(object): def __init__(self, *args, **kwargs): ... def encode(self, o: Any): ... def default(self, o: Any): ... def iterencode(self, o: Any, _one_shot: bool): ... class JSONEncoderForHTML(JSONEncoder): ... mypy-0.761/mypy/typeshed/third_party/2and3/simplejson/scanner.pyi0000644€tŠÔÚ€2›s®0000000040613576752252031337 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional class JSONDecodeError(ValueError): msg: str = ... doc: str = ... pos: int = ... end: Optional[int] = ... lineno: int = ... colno: int = ... endlineno: Optional[int] = ... endcolno: Optional[int] = ... mypy-0.761/mypy/typeshed/third_party/2and3/singledispatch.pyi0000644€tŠÔÚ€2›s®0000000117413576752252030527 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generic, Mapping, Optional, TypeVar, overload _T = TypeVar("_T") class _SingleDispatchCallable(Generic[_T]): registry: Mapping[Any, Callable[..., _T]] def dispatch(self, cls: Any) -> Callable[..., _T]: ... @overload def register(self, cls: Any) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload def register(self, cls: Any, func: Callable[..., _T]) -> Callable[..., _T]: ... def _clear_cache(self) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> _T: ... def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... mypy-0.761/mypy/typeshed/third_party/2and3/tabulate.pyi0000644€tŠÔÚ€2›s®0000000265713576752252027336 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub for tabulate: https://github.com/astanin/python-tabulate from typing import Any, Callable, Container, Iterable, List, Mapping, NamedTuple, Optional, Sequence, Union PRESERVE_WHITESPACE: bool WIDE_CHARS_MODE: bool tabulate_formats: List[str] class Line(NamedTuple): begin: str hline: str sep: str end: str class DataRow(NamedTuple): begin: str sep: str end: str _TableFormatLine = Union[None, Line, Callable[[List[int], List[str]], str]] _TableFormatRow = Union[None, DataRow, Callable[[List[Any], List[int], List[str]], str]] class TableFormat(NamedTuple): lineabove: _TableFormatLine linebelowheader: _TableFormatLine linebetweenrows: _TableFormatLine linebelow: _TableFormatLine headerrow: _TableFormatRow datarow: _TableFormatRow padding: int with_header_hide: Optional[Container[str]] def simple_separated_format(separator: str) -> TableFormat: ... def tabulate( tabular_data: Union[Mapping[str, Iterable[Any]], Iterable[Iterable[Any]]], headers: Union[str, Sequence[str]] = ..., tablefmt: Union[str, TableFormat] = ..., floatfmt: Union[str, Iterable[str]] = ..., numalign: Optional[str] = ..., stralign: Optional[str] = ..., missingval: Union[str, Iterable[str]] = ..., showindex: Union[str, bool, Iterable[Any]] = ..., disable_numparse: Union[bool, Iterable[int]] = ..., colalign: Optional[Iterable[Optional[str]]] = ..., ) -> str: ... mypy-0.761/mypy/typeshed/third_party/2and3/termcolor.pyi0000644€tŠÔÚ€2›s®0000000070413576752252027532 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub for termcolor: https://pypi.python.org/pypi/termcolor from typing import Any, Iterable, Optional, Text def colored( text: Text, color: Optional[Text] = ..., on_color: Optional[Text] = ..., attrs: Optional[Iterable[Text]] = ..., ) -> Text: ... def cprint( text: Text, color: Optional[Text] = ..., on_color: Optional[Text] = ..., attrs: Optional[Iterable[Text]] = ..., **kwargs: Any, ) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/toml.pyi0000644€tŠÔÚ€2›s®0000000146013576752252026477 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, List, Mapping, MutableMapping, Optional, Protocol, Text, Type, Union import datetime import sys if sys.version_info >= (3, 4): import pathlib if sys.version_info >= (3, 6): import os _PathLike = Union[Text, pathlib.PurePath, os.PathLike] else: _PathLike = Union[Text, pathlib.PurePath] else: _PathLike = Text class _Writable(Protocol): def write(self, obj: str) -> Any: ... class TomlDecodeError(Exception): ... def load(f: Union[_PathLike, List[Text], IO[str]], _dict: Type[MutableMapping[str, Any]] = ...) -> MutableMapping[str, Any]: ... def loads(s: Text, _dict: Type[MutableMapping[str, Any]] = ...) -> MutableMapping[str, Any]: ... def dump(o: Mapping[str, Any], f: _Writable) -> str: ... def dumps(o: Mapping[str, Any]) -> str: ... mypy-0.761/mypy/typeshed/third_party/2and3/typing_extensions.pyi0000644€tŠÔÚ€2›s®0000000525113576752252031317 0ustar jukkaDROPBOX\Domain Users00000000000000import abc import sys from typing import Callable from typing import ClassVar as ClassVar from typing import ContextManager as ContextManager from typing import Counter as Counter from typing import DefaultDict as DefaultDict from typing import Deque as Deque from typing import NewType as NewType from typing import NoReturn as NoReturn from typing import overload as overload from typing import Text as Text from typing import Type as Type from typing import TYPE_CHECKING as TYPE_CHECKING from typing import TypeVar, Any, Mapping, ItemsView, KeysView, Optional, ValuesView, Dict, Type _T = TypeVar('_T') _F = TypeVar('_F', bound=Callable[..., Any]) _TC = TypeVar('_TC', bound=Type[object]) class _SpecialForm: def __getitem__(self, typeargs: Any) -> Any: ... def runtime_checkable(cls: _TC) -> _TC: ... # This alias for above is kept here for backwards compatibility. runtime = runtime_checkable Protocol: _SpecialForm = ... Final: _SpecialForm = ... def final(f: _F) -> _F: ... Literal: _SpecialForm = ... def IntVar(__name: str) -> Any: ... # returns a new TypeVar # Internal mypy fallback type for all typed dicts (does not exist at runtime) class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def copy(self: _T) -> _T: ... # Using NoReturn so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... def update(self: _T, __m: _T) -> None: ... if sys.version_info < (3, 0): def has_key(self, k: str) -> bool: ... def viewitems(self) -> ItemsView[str, object]: ... def viewkeys(self) -> KeysView[str]: ... def viewvalues(self) -> ValuesView[object]: ... def __delitem__(self, k: NoReturn) -> None: ... # TypedDict is a (non-subscriptable) special form. TypedDict: object = ... if sys.version_info >= (3, 3): from typing import ChainMap as ChainMap if sys.version_info >= (3, 5): from typing import AsyncIterable as AsyncIterable from typing import AsyncIterator as AsyncIterator from typing import AsyncContextManager as AsyncContextManager from typing import Awaitable as Awaitable from typing import Coroutine as Coroutine if sys.version_info >= (3, 6): from typing import AsyncGenerator as AsyncGenerator def get_type_hints( obj: Callable[..., Any], globalns: Optional[Dict[str, Any]] = ..., localns: Optional[Dict[str, Any]] = ..., include_extras: bool = ... ) -> Dict[str, Any]: ... Annotated: _SpecialForm = ... _AnnotatedAlias: Any = ... # undocumented mypy-0.761/mypy/typeshed/third_party/2and3/ujson.pyi0000644€tŠÔÚ€2›s®0000000202613576752252026661 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for ujson # See: https://pypi.python.org/pypi/ujson from typing import Any, AnyStr, IO, Optional __version__: str def encode( obj: Any, ensure_ascii: bool = ..., double_precision: int = ..., encode_html_chars: bool = ..., escape_forward_slashes: bool = ..., sort_keys: bool = ..., indent: int = ..., ) -> str: ... def dumps( obj: Any, ensure_ascii: bool = ..., double_precision: int = ..., encode_html_chars: bool = ..., escape_forward_slashes: bool = ..., sort_keys: bool = ..., indent: int = ..., ) -> str: ... def dump( obj: Any, fp: IO[str], ensure_ascii: bool = ..., double_precision: int = ..., encode_html_chars: bool = ..., escape_forward_slashes: bool = ..., sort_keys: bool = ..., indent: int = ..., ) -> None: ... def decode( s: AnyStr, precise_float: bool = ..., ) -> Any: ... def loads( s: AnyStr, precise_float: bool = ..., ) -> Any: ... def load( fp: IO[AnyStr], precise_float: bool = ..., ) -> Any: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/0000755€tŠÔÚ€2›s®0000000000013576752267026651 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/__init__.pyi0000644€tŠÔÚ€2›s®0000001264313576752252031133 0ustar jukkaDROPBOX\Domain Users00000000000000from types import ModuleType from typing import Any from werkzeug import _internal from werkzeug import datastructures from werkzeug import debug from werkzeug import exceptions from werkzeug import formparser from werkzeug import http from werkzeug import local from werkzeug import security from werkzeug import serving from werkzeug import test from werkzeug import testapp from werkzeug import urls from werkzeug import useragents from werkzeug import utils from werkzeug import wrappers from werkzeug import wsgi class module(ModuleType): def __getattr__(self, name): ... def __dir__(self): ... __version__: Any run_simple = serving.run_simple test_app = testapp.test_app UserAgent = useragents.UserAgent _easteregg = _internal._easteregg DebuggedApplication = debug.DebuggedApplication MultiDict = datastructures.MultiDict CombinedMultiDict = datastructures.CombinedMultiDict Headers = datastructures.Headers EnvironHeaders = datastructures.EnvironHeaders ImmutableList = datastructures.ImmutableList ImmutableDict = datastructures.ImmutableDict ImmutableMultiDict = datastructures.ImmutableMultiDict TypeConversionDict = datastructures.TypeConversionDict ImmutableTypeConversionDict = datastructures.ImmutableTypeConversionDict Accept = datastructures.Accept MIMEAccept = datastructures.MIMEAccept CharsetAccept = datastructures.CharsetAccept LanguageAccept = datastructures.LanguageAccept RequestCacheControl = datastructures.RequestCacheControl ResponseCacheControl = datastructures.ResponseCacheControl ETags = datastructures.ETags HeaderSet = datastructures.HeaderSet WWWAuthenticate = datastructures.WWWAuthenticate Authorization = datastructures.Authorization FileMultiDict = datastructures.FileMultiDict CallbackDict = datastructures.CallbackDict FileStorage = datastructures.FileStorage OrderedMultiDict = datastructures.OrderedMultiDict ImmutableOrderedMultiDict = datastructures.ImmutableOrderedMultiDict escape = utils.escape environ_property = utils.environ_property append_slash_redirect = utils.append_slash_redirect redirect = utils.redirect cached_property = utils.cached_property import_string = utils.import_string dump_cookie = http.dump_cookie parse_cookie = http.parse_cookie unescape = utils.unescape format_string = utils.format_string find_modules = utils.find_modules header_property = utils.header_property html = utils.html xhtml = utils.xhtml HTMLBuilder = utils.HTMLBuilder validate_arguments = utils.validate_arguments ArgumentValidationError = utils.ArgumentValidationError bind_arguments = utils.bind_arguments secure_filename = utils.secure_filename BaseResponse = wrappers.BaseResponse BaseRequest = wrappers.BaseRequest Request = wrappers.Request Response = wrappers.Response AcceptMixin = wrappers.AcceptMixin ETagRequestMixin = wrappers.ETagRequestMixin ETagResponseMixin = wrappers.ETagResponseMixin ResponseStreamMixin = wrappers.ResponseStreamMixin CommonResponseDescriptorsMixin = wrappers.CommonResponseDescriptorsMixin UserAgentMixin = wrappers.UserAgentMixin AuthorizationMixin = wrappers.AuthorizationMixin WWWAuthenticateMixin = wrappers.WWWAuthenticateMixin CommonRequestDescriptorsMixin = wrappers.CommonRequestDescriptorsMixin Local = local.Local LocalManager = local.LocalManager LocalProxy = local.LocalProxy LocalStack = local.LocalStack release_local = local.release_local generate_password_hash = security.generate_password_hash check_password_hash = security.check_password_hash Client = test.Client EnvironBuilder = test.EnvironBuilder create_environ = test.create_environ run_wsgi_app = test.run_wsgi_app get_current_url = wsgi.get_current_url get_host = wsgi.get_host pop_path_info = wsgi.pop_path_info peek_path_info = wsgi.peek_path_info SharedDataMiddleware = wsgi.SharedDataMiddleware DispatcherMiddleware = wsgi.DispatcherMiddleware ClosingIterator = wsgi.ClosingIterator FileWrapper = wsgi.FileWrapper make_line_iter = wsgi.make_line_iter LimitedStream = wsgi.LimitedStream responder = wsgi.responder wrap_file = wsgi.wrap_file extract_path_info = wsgi.extract_path_info parse_etags = http.parse_etags parse_date = http.parse_date http_date = http.http_date cookie_date = http.cookie_date parse_cache_control_header = http.parse_cache_control_header is_resource_modified = http.is_resource_modified parse_accept_header = http.parse_accept_header parse_set_header = http.parse_set_header quote_etag = http.quote_etag unquote_etag = http.unquote_etag generate_etag = http.generate_etag dump_header = http.dump_header parse_list_header = http.parse_list_header parse_dict_header = http.parse_dict_header parse_authorization_header = http.parse_authorization_header parse_www_authenticate_header = http.parse_www_authenticate_header remove_entity_headers = http.remove_entity_headers is_entity_header = http.is_entity_header remove_hop_by_hop_headers = http.remove_hop_by_hop_headers parse_options_header = http.parse_options_header dump_options_header = http.dump_options_header is_hop_by_hop_header = http.is_hop_by_hop_header unquote_header_value = http.unquote_header_value quote_header_value = http.quote_header_value HTTP_STATUS_CODES = http.HTTP_STATUS_CODES url_decode = urls.url_decode url_encode = urls.url_encode url_quote = urls.url_quote url_quote_plus = urls.url_quote_plus url_unquote = urls.url_unquote url_unquote_plus = urls.url_unquote_plus url_fix = urls.url_fix Href = urls.Href iri_to_uri = urls.iri_to_uri uri_to_iri = urls.uri_to_iri parse_form_data = formparser.parse_form_data abort = exceptions.Aborter Aborter = exceptions.Aborter mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/_compat.pyi0000644€tŠÔÚ€2›s®0000000216313576752252031012 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Optional, Text if sys.version_info < (3,): import StringIO as BytesIO else: from io import StringIO as BytesIO PY2: Any WIN: Any unichr: Any text_type: Any string_types: Any integer_types: Any iterkeys: Any itervalues: Any iteritems: Any iterlists: Any iterlistvalues: Any int_to_byte: Any iter_bytes: Any def fix_tuple_repr(obj): ... def implements_iterator(cls): ... def implements_to_string(cls): ... def native_string_result(func): ... def implements_bool(cls): ... range_type: Any NativeStringIO: Any def make_literal_wrapper(reference): ... def normalize_string_tuple(tup): ... def try_coerce_native(s): ... wsgi_get_bytes: Any def wsgi_decoding_dance(s, charset: Text = ..., errors: Text = ...): ... def wsgi_encoding_dance(s, charset: Text = ..., errors: Text = ...): ... def to_bytes(x, charset: Text = ..., errors: Text = ...): ... def to_native(x, charset: Text = ..., errors: Text = ...): ... def reraise(tp, value, tb: Optional[Any] = ...): ... imap: Any izip: Any ifilter: Any def to_unicode(x, charset: Text = ..., errors: Text = ..., allow_none_charset: bool = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/_internal.pyi0000644€tŠÔÚ€2›s®0000000112613576752252031341 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class _Missing: def __reduce__(self): ... class _DictAccessorProperty: read_only: Any name: Any default: Any load_func: Any dump_func: Any __doc__: Any def __init__(self, name, default: Optional[Any] = ..., load_func: Optional[Any] = ..., dump_func: Optional[Any] = ..., read_only: Optional[Any] = ..., doc: Optional[Any] = ...): ... def __get__(self, obj, type: Optional[Any] = ...): ... def __set__(self, obj, value): ... def __delete__(self, obj): ... def _easteregg(app: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/_reloader.pyi0000644€tŠÔÚ€2›s®0000000147213576752252031326 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class ReloaderLoop: name: Any extra_files: Any interval: float def __init__(self, extra_files: Optional[Any] = ..., interval: float = ...): ... def run(self): ... def restart_with_reloader(self): ... def trigger_reload(self, filename): ... def log_reload(self, filename): ... class StatReloaderLoop(ReloaderLoop): name: Any def run(self): ... class WatchdogReloaderLoop(ReloaderLoop): observable_paths: Any name: Any observer_class: Any event_handler: Any should_reload: Any def __init__(self, *args, **kwargs): ... def trigger_reload(self, filename): ... def run(self): ... reloader_loops: Any def run_with_reloader(main_func, extra_files: Optional[Any] = ..., interval: float = ..., reloader_type: str = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/0000755€tŠÔÚ€2›s®0000000000013576752267030311 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252032553 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/atom.pyi0000644€tŠÔÚ€2›s®0000000216013576752252031765 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional XHTML_NAMESPACE: Any def format_iso8601(obj): ... class AtomFeed: default_generator: Any title: Any title_type: Any url: Any feed_url: Any id: Any updated: Any author: Any icon: Any logo: Any rights: Any rights_type: Any subtitle: Any subtitle_type: Any generator: Any links: Any entries: Any def __init__(self, title: Optional[Any] = ..., entries: Optional[Any] = ..., **kwargs): ... def add(self, *args, **kwargs): ... def generate(self): ... def to_string(self): ... def get_response(self): ... def __call__(self, environ, start_response): ... class FeedEntry: title: Any title_type: Any content: Any content_type: Any url: Any id: Any updated: Any summary: Any summary_type: Any author: Any published: Any rights: Any links: Any categories: Any xml_base: Any def __init__(self, title: Optional[Any] = ..., content: Optional[Any] = ..., feed_url: Optional[Any] = ..., **kwargs): ... def generate(self): ... def to_string(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/cache.pyi0000644€tŠÔÚ€2›s®0000000637113576752252032100 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class BaseCache: default_timeout: float def __init__(self, default_timeout: float = ...): ... def get(self, key): ... def delete(self, key): ... def get_many(self, *keys): ... def get_dict(self, *keys): ... def set(self, key, value, timeout: Optional[float] = ...): ... def add(self, key, value, timeout: Optional[float] = ...): ... def set_many(self, mapping, timeout: Optional[float] = ...): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=...): ... def dec(self, key, delta=...): ... class NullCache(BaseCache): ... class SimpleCache(BaseCache): clear: Any def __init__(self, threshold: int = ..., default_timeout: float = ...): ... def get(self, key): ... def set(self, key, value, timeout: Optional[float] = ...): ... def add(self, key, value, timeout: Optional[float] = ...): ... def delete(self, key): ... def has(self, key): ... class MemcachedCache(BaseCache): key_prefix: Any def __init__(self, servers: Optional[Any] = ..., default_timeout: float = ..., key_prefix: Optional[Any] = ...): ... def get(self, key): ... def get_dict(self, *keys): ... def add(self, key, value, timeout: Optional[float] = ...): ... def set(self, key, value, timeout: Optional[float] = ...): ... def get_many(self, *keys): ... def set_many(self, mapping, timeout: Optional[float] = ...): ... def delete(self, key): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=...): ... def dec(self, key, delta=...): ... def import_preferred_memcache_lib(self, servers): ... GAEMemcachedCache: Any class RedisCache(BaseCache): key_prefix: Any def __init__(self, host: str = ..., port: int = ..., password: Optional[Any] = ..., db: int = ..., default_timeout: float = ..., key_prefix: Optional[Any] = ..., **kwargs): ... def dump_object(self, value): ... def load_object(self, value): ... def get(self, key): ... def get_many(self, *keys): ... def set(self, key, value, timeout: Optional[float] = ...): ... def add(self, key, value, timeout: Optional[float] = ...): ... def set_many(self, mapping, timeout: Optional[float] = ...): ... def delete(self, key): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=...): ... def dec(self, key, delta=...): ... class FileSystemCache(BaseCache): def __init__(self, cache_dir, threshold: int = ..., default_timeout: float = ..., mode: int = ...): ... def clear(self): ... def get(self, key): ... def add(self, key, value, timeout: Optional[float] = ...): ... def set(self, key, value, timeout: Optional[float] = ...): ... def delete(self, key): ... def has(self, key): ... class UWSGICache(BaseCache): cache: Any def __init__(self, default_timeout: float = ..., cache: str = ...): ... def get(self, key): ... def delete(self, key): ... def set(self, key, value, timeout: Optional[float] = ...): ... def add(self, key, value, timeout: Optional[float] = ...): ... def clear(self): ... def has(self, key): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/fixers.pyi0000644€tŠÔÚ€2›s®0000000317413576752252032333 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterable, List, Mapping, Optional, Sequence, Set, Text from wsgiref.types import WSGIApplication, WSGIEnvironment, StartResponse from ..middleware.proxy_fix import ProxyFix as ProxyFix class CGIRootFix(object): app: WSGIApplication app_root: Text def __init__(self, app: WSGIApplication, app_root: Text = ...) -> None: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... class LighttpdCGIRootFix(CGIRootFix): ... class PathInfoFromRequestUriFix(object): app: WSGIApplication def __init__(self, app: WSGIApplication) -> None: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... class HeaderRewriterFix(object): app: WSGIApplication remove_headers: Set[Text] add_headers: List[Text] def __init__( self, app: WSGIApplication, remove_headers: Optional[Iterable[Text]] = ..., add_headers: Optional[Iterable[Text]] = ..., ) -> None: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... class InternetExplorerFix(object): app: WSGIApplication fix_vary: bool fix_attach: bool def __init__(self, app: WSGIApplication, fix_vary: bool = ..., fix_attach: bool = ...) -> None: ... def fix_headers(self, environ: WSGIEnvironment, headers: Mapping[str, str], status: Optional[Any] = ...) -> None: ... def run_fixed(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/iterio.pyi0000644€tŠÔÚ€2›s®0000000226213576752252032323 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text, Union greenlet: Any class IterIO: def __new__(cls, obj, sentinel: Union[Text, bytes] = ...): ... def __iter__(self): ... def tell(self): ... def isatty(self): ... def seek(self, pos, mode: int = ...): ... def truncate(self, size: Optional[Any] = ...): ... def write(self, s): ... def writelines(self, list): ... def read(self, n: int = ...): ... def readlines(self, sizehint: int = ...): ... def readline(self, length: Optional[Any] = ...): ... def flush(self): ... def __next__(self): ... class IterI(IterIO): sentinel: Any def __new__(cls, func, sentinel: Union[Text, bytes] = ...): ... closed: Any def close(self): ... def write(self, s): ... def writelines(self, list): ... def flush(self): ... class IterO(IterIO): sentinel: Any closed: Any pos: Any def __new__(cls, gen, sentinel: Union[Text, bytes] = ...): ... def __iter__(self): ... def close(self): ... def seek(self, pos, mode: int = ...): ... def read(self, n: int = ...): ... def readline(self, length: Optional[Any] = ...): ... def readlines(self, sizehint: int = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/jsrouting.pyi0000644€tŠÔÚ€2›s®0000000050513576752252033052 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def dumps(*args): ... def render_template(name_parts, rules, converters): ... def generate_map(map, name: str = ...): ... def generate_adapter(adapter, name: str = ..., map_name: str = ...): ... def js_to_url_function(converter): ... def NumberConverter_js_to_url(conv): ... js_to_url_functions: Any mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/limiter.pyi0000644€tŠÔÚ€2›s®0000000030013576752252032464 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class StreamLimitMiddleware: app: Any maximum_size: Any def __init__(self, app, maximum_size=...): ... def __call__(self, environ, start_response): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/lint.pyi0000644€tŠÔÚ€2›s®0000000004013576752252031766 0ustar jukkaDROPBOX\Domain Users00000000000000from ..middleware.lint import * mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/profiler.pyi0000644€tŠÔÚ€2›s®0000000070113576752252032646 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, AnyStr, Generic, Optional, Protocol, Tuple, TypeVar from ..middleware.profiler import * _T = TypeVar("_T") _T_contra = TypeVar("_T_contra", contravariant=True) class _Writable(Protocol[_T_contra]): def write(self, __s: _T_contra) -> Any: ... class MergeStream(Generic[_T]): streams: Tuple[_Writable[_T], ...] def __init__(self, *streams: _Writable[_T]) -> None: ... def write(self, data: _T) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/securecookie.pyi0000644€tŠÔÚ€2›s®0000000220413576752252033504 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from hmac import new as hmac from hashlib import sha1 as _default_hash from werkzeug.contrib.sessions import ModificationTrackingDict class UnquoteError(Exception): ... class SecureCookie(ModificationTrackingDict[Any, Any]): hash_method: Any serialization_method: Any quote_base64: Any secret_key: Any new: Any def __init__(self, data: Optional[Any] = ..., secret_key: Optional[Any] = ..., new: bool = ...): ... @property def should_save(self): ... @classmethod def quote(cls, value): ... @classmethod def unquote(cls, value): ... def serialize(self, expires: Optional[Any] = ...): ... @classmethod def unserialize(cls, string, secret_key): ... @classmethod def load_cookie(cls, request, key: str = ..., secret_key: Optional[Any] = ...): ... def save_cookie(self, response, key: str = ..., expires: Optional[Any] = ..., session_expires: Optional[Any] = ..., max_age: Optional[Any] = ..., path: str = ..., domain: Optional[Any] = ..., secure: Optional[Any] = ..., httponly: bool = ..., force: bool = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/sessions.pyi0000644€tŠÔÚ€2›s®0000000366613576752252032707 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text, TypeVar from werkzeug.datastructures import CallbackDict _K = TypeVar("_K") _V = TypeVar("_V") def generate_key(salt: Optional[Any] = ...): ... class ModificationTrackingDict(CallbackDict[_K, _V]): modified: Any def __init__(self, *args, **kwargs): ... def copy(self): ... def __copy__(self): ... class Session(ModificationTrackingDict[_K, _V]): sid: Any new: Any def __init__(self, data, sid, new: bool = ...): ... @property def should_save(self): ... class SessionStore: session_class: Any def __init__(self, session_class: Optional[Any] = ...): ... def is_valid_key(self, key): ... def generate_key(self, salt: Optional[Any] = ...): ... def new(self): ... def save(self, session): ... def save_if_modified(self, session): ... def delete(self, session): ... def get(self, sid): ... class FilesystemSessionStore(SessionStore): path: Any filename_template: str renew_missing: Any mode: Any def __init__(self, path: Optional[Any] = ..., filename_template: Text = ..., session_class: Optional[Any] = ..., renew_missing: bool = ..., mode: int = ...): ... def get_session_filename(self, sid): ... def save(self, session): ... def delete(self, session): ... def get(self, sid): ... def list(self): ... class SessionMiddleware: app: Any store: Any cookie_name: Any cookie_age: Any cookie_expires: Any cookie_path: Any cookie_domain: Any cookie_secure: Any cookie_httponly: Any environ_key: Any def __init__(self, app, store, cookie_name: str = ..., cookie_age: Optional[Any] = ..., cookie_expires: Optional[Any] = ..., cookie_path: str = ..., cookie_domain: Optional[Any] = ..., cookie_secure: Optional[Any] = ..., cookie_httponly: bool = ..., environ_key: str = ...): ... def __call__(self, environ, start_response): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/testtools.pyi0000644€tŠÔÚ€2›s®0000000032313576752252033064 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.wrappers import Response class ContentAccessors: def xml(self): ... def lxml(self): ... def json(self): ... class TestResponse(Response, ContentAccessors): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/contrib/wrappers.pyi0000644€tŠÔÚ€2›s®0000000113313576752252032667 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def is_known_charset(charset): ... class JSONRequestMixin: def json(self): ... class ProtobufRequestMixin: protobuf_check_initialization: Any def parse_protobuf(self, proto_type): ... class RoutingArgsRequestMixin: routing_args: Any routing_vars: Any class ReverseSlashBehaviorRequestMixin: def path(self): ... def script_root(self): ... class DynamicCharsetRequestMixin: default_charset: Any def unknown_charset(self, charset): ... def charset(self): ... class DynamicCharsetResponseMixin: default_charset: Any charset: Any mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/datastructures.pyi0000644€tŠÔÚ€2›s®0000003644713576752252032461 0ustar jukkaDROPBOX\Domain Users00000000000000import collections from typing import ( Any, Callable, Container, Dict, Generic, IO, Iterable, Iterator, List, Mapping, MutableSet, NoReturn, Optional, Protocol, Text, Tuple, Type, TypeVar, Union, overload, ) _K = TypeVar("_K") _V = TypeVar("_V") _R = TypeVar("_R") _D = TypeVar("_D") def is_immutable(self) -> NoReturn: ... def iter_multi_items(mapping): ... def native_itermethods(names): ... class ImmutableListMixin(object, Generic[_V]): def __hash__(self) -> int: ... def __reduce_ex__(self: _D, protocol) -> Tuple[Type[_D], List[_V]]: ... def __delitem__(self, key: _V) -> NoReturn: ... def __iadd__(self, other: Any) -> NoReturn: ... def __imul__(self, other: Any) -> NoReturn: ... def __setitem__(self, key: str, value: Any) -> NoReturn: ... def append(self, item: Any) -> NoReturn: ... def remove(self, item: Any) -> NoReturn: ... def extend(self, iterable: Any) -> NoReturn: ... def insert(self, pos: int, value: Any) -> NoReturn: ... def pop(self, index: int = ...) -> NoReturn: ... def reverse(self) -> NoReturn: ... def sort(self, cmp: Optional[Any] = ..., key: Optional[Any] = ..., reverse: Optional[Any] = ...) -> NoReturn: ... class ImmutableList(ImmutableListMixin[_V], List[_V]): ... # type: ignore class ImmutableDictMixin(object): @classmethod def fromkeys(cls, *args, **kwargs): ... def __reduce_ex__(self, protocol): ... def __hash__(self) -> int: ... def setdefault(self, key, default: Optional[Any] = ...): ... def update(self, *args, **kwargs): ... def pop(self, key, default: Optional[Any] = ...): ... def popitem(self): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def clear(self): ... class ImmutableMultiDictMixin(ImmutableDictMixin): def __reduce_ex__(self, protocol): ... def add(self, key, value): ... def popitemlist(self): ... def poplist(self, key): ... def setlist(self, key, new_list): ... def setlistdefault(self, key, default_list: Optional[Any] = ...): ... class UpdateDictMixin(object): on_update: Any def setdefault(self, key, default: Optional[Any] = ...): ... def pop(self, key, default=...): ... __setitem__: Any __delitem__: Any clear: Any popitem: Any update: Any class TypeConversionDict(Dict[_K, _V]): @overload def get(self, key: _K, *, type: None = ...) -> Optional[_V]: ... @overload def get(self, key: _K, default: _D, type: None = ...) -> Union[_V, _D]: ... @overload def get(self, key: _K, *, type: Callable[[_V], _R]) -> Optional[_R]: ... @overload def get(self, key: _K, default: _D, type: Callable[[_V], _R]) -> Union[_R, _D]: ... class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict[_K, _V]): # type: ignore def copy(self) -> TypeConversionDict[_K, _V]: ... def __copy__(self) -> ImmutableTypeConversionDict[_K, _V]: ... class ViewItems: def __init__(self, multi_dict, method, repr_name, *a, **kw): ... def __iter__(self): ... class MultiDict(TypeConversionDict[_K, _V]): def __init__(self, mapping: Optional[Any] = ...): ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def add(self, key, value): ... def getlist(self, key, type: Optional[Any] = ...): ... def setlist(self, key, new_list): ... def setdefault(self, key, default: Optional[Any] = ...): ... def setlistdefault(self, key, default_list: Optional[Any] = ...): ... def items(self, multi: bool = ...): ... def lists(self): ... def keys(self): ... __iter__: Any def values(self): ... def listvalues(self): ... def copy(self): ... def deepcopy(self, memo: Optional[Any] = ...): ... def to_dict(self, flat: bool = ...): ... def update(self, other_dict): ... def pop(self, key, default=...): ... def popitem(self): ... def poplist(self, key): ... def popitemlist(self): ... def __copy__(self): ... def __deepcopy__(self, memo): ... class _omd_bucket: prev: Any key: Any value: Any next: Any def __init__(self, omd, key, value): ... def unlink(self, omd): ... class OrderedMultiDict(MultiDict[_K, _V]): def __init__(self, mapping: Optional[Any] = ...): ... def __eq__(self, other): ... def __ne__(self, other): ... def __reduce_ex__(self, protocol): ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def keys(self): ... __iter__: Any def values(self): ... def items(self, multi: bool = ...): ... def lists(self): ... def listvalues(self): ... def add(self, key, value): ... def getlist(self, key, type: Optional[Any] = ...): ... def setlist(self, key, new_list): ... def setlistdefault(self, key, default_list: Optional[Any] = ...): ... def update(self, mapping): ... def poplist(self, key): ... def pop(self, key, default=...): ... def popitem(self): ... def popitemlist(self): ... class Headers(object): def __init__(self, defaults: Optional[Any] = ...): ... def __getitem__(self, key, _get_mode: bool = ...): ... def __eq__(self, other): ... def __ne__(self, other): ... @overload def get(self, key: str, *, type: None = ...) -> Optional[str]: ... @overload def get(self, key: str, default: _D, type: None = ...) -> Union[str, _D]: ... @overload def get(self, key: str, *, type: Callable[[str], _R]) -> Optional[_R]: ... @overload def get(self, key: str, default: _D, type: Callable[[str], _R]) -> Union[_R, _D]: ... @overload def get(self, key: str, *, as_bytes: bool) -> Any: ... @overload def get(self, key: str, *, type: None, as_bytes: bool) -> Any: ... @overload def get(self, key: str, *, type: Callable[[Any], _R], as_bytes: bool) -> Optional[_R]: ... @overload def get(self, key: str, default: Any, type: None, as_bytes: bool) -> Any: ... @overload def get(self, key: str, default: _D, type: Callable[[Any], _R], as_bytes: bool) -> Union[_R, _D]: ... def getlist(self, key, type: Optional[Any] = ..., as_bytes: bool = ...): ... def get_all(self, name): ... def items(self, lower: bool = ...): ... def keys(self, lower: bool = ...): ... def values(self): ... def extend(self, iterable): ... def __delitem__(self, key: Any) -> None: ... def remove(self, key): ... def pop(self, **kwargs): ... def popitem(self): ... def __contains__(self, key): ... has_key: Any def __iter__(self): ... def __len__(self): ... def add(self, _key, _value, **kw): ... def add_header(self, _key, _value, **_kw): ... def clear(self): ... def set(self, _key, _value, **kw): ... def setdefault(self, key, value): ... def __setitem__(self, key, value): ... def to_list(self, charset: Text = ...): ... def to_wsgi_list(self): ... def copy(self): ... def __copy__(self): ... class ImmutableHeadersMixin: def __delitem__(self, key: str) -> None: ... def __setitem__(self, key, value): ... set: Any def add(self, *args, **kwargs): ... remove: Any add_header: Any def extend(self, iterable): ... def insert(self, pos, value): ... def pop(self, **kwargs): ... def popitem(self): ... def setdefault(self, key, default): ... class EnvironHeaders(ImmutableHeadersMixin, Headers): environ: Any def __init__(self, environ): ... def __eq__(self, other): ... def __getitem__(self, key, _get_mode: bool = ...): ... def __len__(self): ... def __iter__(self): ... def copy(self): ... class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict[_K, _V]): # type: ignore def __reduce_ex__(self, protocol): ... dicts: Any def __init__(self, dicts: Optional[Any] = ...): ... @classmethod def fromkeys(cls): ... def __getitem__(self, key): ... def get(self, key, default: Optional[Any] = ..., type: Optional[Any] = ...): ... def getlist(self, key, type: Optional[Any] = ...): ... def keys(self): ... __iter__: Any def items(self, multi: bool = ...): ... def values(self): ... def lists(self): ... def listvalues(self): ... def copy(self): ... def to_dict(self, flat: bool = ...): ... def __len__(self): ... def __contains__(self, key): ... has_key: Any class FileMultiDict(MultiDict[_K, _V]): def add_file(self, name, file, filename: Optional[Any] = ..., content_type: Optional[Any] = ...): ... class ImmutableDict(ImmutableDictMixin, Dict[_K, _V]): # type: ignore def copy(self): ... def __copy__(self): ... class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict[_K, _V]): # type: ignore def copy(self): ... def __copy__(self): ... class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict[_K, _V]): # type: ignore def copy(self): ... def __copy__(self): ... class Accept(ImmutableList[Tuple[str, float]]): provided: bool def __init__(self, values: Union[None, Accept, Iterable[Tuple[str, float]]] = ...) -> None: ... @overload def __getitem__(self, key: int) -> Tuple[str, float]: ... @overload def __getitem__(self, s: slice) -> List[Tuple[str, float]]: ... @overload def __getitem__(self, key: str) -> float: ... def quality(self, key: str) -> float: ... def __contains__(self, value: str) -> bool: ... # type: ignore def index(self, key: Union[str, Tuple[str, float]]) -> int: ... # type: ignore def find(self, key: Union[str, Tuple[str, float]]) -> int: ... def values(self) -> Iterator[str]: ... def to_header(self) -> str: ... @overload def best_match(self, matches: Iterable[str], default: None = ...) -> Optional[str]: ... @overload def best_match(self, matches: Iterable[str], default: _D) -> Union[str, _D]: ... @property def best(self) -> Optional[str]: ... class MIMEAccept(Accept): @property def accept_html(self) -> bool: ... @property def accept_xhtml(self) -> bool: ... @property def accept_json(self) -> bool: ... class LanguageAccept(Accept): ... class CharsetAccept(Accept): ... def cache_property(key, empty, type): ... class _CacheControl(UpdateDictMixin, Dict[str, Any]): no_cache: Any no_store: Any max_age: Any no_transform: Any on_update: Any provided: Any def __init__(self, values=..., on_update: Optional[Any] = ...): ... def to_header(self): ... class RequestCacheControl(ImmutableDictMixin, _CacheControl): # type: ignore max_stale: Any min_fresh: Any no_transform: Any only_if_cached: Any class ResponseCacheControl(_CacheControl): public: Any private: Any must_revalidate: Any proxy_revalidate: Any s_maxage: Any class CallbackDict(UpdateDictMixin, Dict[_K, _V]): on_update: Any def __init__(self, initial: Optional[Any] = ..., on_update: Optional[Any] = ...): ... class HeaderSet(MutableSet[str]): on_update: Any def __init__(self, headers: Optional[Any] = ..., on_update: Optional[Any] = ...): ... def add(self, header): ... def remove(self, header): ... def update(self, iterable): ... def discard(self, header): ... def find(self, header): ... def index(self, header): ... def clear(self): ... def as_set(self, preserve_casing: bool = ...): ... def to_header(self): ... def __getitem__(self, idx): ... def __delitem__(self, idx): ... def __setitem__(self, idx, value): ... def __contains__(self, header): ... def __len__(self): ... def __iter__(self): ... def __nonzero__(self): ... class ETags(Container[str], Iterable[str]): star_tag: Any def __init__(self, strong_etags: Optional[Any] = ..., weak_etags: Optional[Any] = ..., star_tag: bool = ...): ... def as_set(self, include_weak: bool = ...): ... def is_weak(self, etag): ... def contains_weak(self, etag): ... def contains(self, etag): ... def contains_raw(self, etag): ... def to_header(self): ... def __call__(self, etag: Optional[Any] = ..., data: Optional[Any] = ..., include_weak: bool = ...): ... def __bool__(self): ... __nonzero__: Any def __iter__(self): ... def __contains__(self, etag): ... class IfRange: etag: Any date: Any def __init__(self, etag: Optional[Any] = ..., date: Optional[Any] = ...): ... def to_header(self): ... class Range: units: Any ranges: Any def __init__(self, units, ranges): ... def range_for_length(self, length): ... def make_content_range(self, length): ... def to_header(self): ... def to_content_range_header(self, length): ... class ContentRange: on_update: Any units: Optional[str] start: Any stop: Any length: Any def __init__(self, units: Optional[str], start, stop, length: Optional[Any] = ..., on_update: Optional[Any] = ...): ... def set(self, start, stop, length: Optional[Any] = ..., units: Optional[str] = ...): ... def unset(self) -> None: ... def to_header(self): ... def __nonzero__(self): ... __bool__: Any class Authorization(ImmutableDictMixin, Dict[str, Any]): # type: ignore type: str def __init__(self, auth_type: str, data: Optional[Mapping[str, Any]] = ...) -> None: ... @property def username(self) -> Optional[str]: ... @property def password(self) -> Optional[str]: ... @property def realm(self) -> Optional[str]: ... @property def nonce(self) -> Optional[str]: ... @property def uri(self) -> Optional[str]: ... @property def nc(self) -> Optional[str]: ... @property def cnonce(self) -> Optional[str]: ... @property def response(self) -> Optional[str]: ... @property def opaque(self) -> Optional[str]: ... @property def qop(self) -> Optional[str]: ... class WWWAuthenticate(UpdateDictMixin, Dict[str, Any]): on_update: Any def __init__(self, auth_type: Optional[Any] = ..., values: Optional[Any] = ..., on_update: Optional[Any] = ...): ... def set_basic(self, realm: str = ...): ... def set_digest(self, realm, nonce, qop=..., opaque: Optional[Any] = ..., algorithm: Optional[Any] = ..., stale: bool = ...): ... def to_header(self): ... @staticmethod def auth_property(name, doc: Optional[Any] = ...): ... type: Any realm: Any domain: Any nonce: Any opaque: Any algorithm: Any qop: Any stale: Any class _Writer(Protocol): def write(self, data: bytes) -> Any: ... class FileStorage(object): name: Optional[Text] stream: IO[bytes] filename: Optional[Text] headers: Headers def __init__( self, stream: Optional[IO[bytes]] = ..., filename: Union[None, Text, bytes] = ..., name: Optional[Text] = ..., content_type: Optional[Text] = ..., content_length: Optional[int] = ..., headers: Optional[Headers] = ..., ): ... @property def content_type(self) -> Optional[Text]: ... @property def content_length(self) -> int: ... @property def mimetype(self) -> str: ... @property def mimetype_params(self) -> Dict[str, str]: ... def save(self, dst: Union[Text, _Writer], buffer_size: int = ...): ... def close(self) -> None: ... def __nonzero__(self) -> bool: ... def __bool__(self) -> bool: ... def __getattr__(self, name: Text) -> Any: ... def __iter__(self) -> Iterator[bytes]: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/debug/0000755€tŠÔÚ€2›s®0000000000013576752267027737 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/debug/__init__.pyi0000644€tŠÔÚ€2›s®0000000246213576752252032217 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response PIN_TIME: Any def hash_pin(pin): ... def get_machine_id(): ... class _ConsoleFrame: console: Any id: Any def __init__(self, namespace): ... def get_pin_and_cookie_name(app): ... class DebuggedApplication: app: Any evalex: Any frames: Any tracebacks: Any request_key: Any console_path: Any console_init_func: Any show_hidden_frames: Any secret: Any pin_logging: Any pin: Any def __init__(self, app, evalex: bool = ..., request_key: str = ..., console_path: str = ..., console_init_func: Optional[Any] = ..., show_hidden_frames: bool = ..., lodgeit_url: Optional[Any] = ..., pin_security: bool = ..., pin_logging: bool = ...): ... @property def pin_cookie_name(self): ... def debug_application(self, environ, start_response): ... def execute_command(self, request, command, frame): ... def display_console(self, request): ... def paste_traceback(self, request, traceback): ... def get_resource(self, request, filename): ... def check_pin_trust(self, environ): ... def pin_auth(self, request): ... def log_pin_request(self): ... def __call__(self, environ, start_response): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/debug/console.pyi0000644€tŠÔÚ€2›s®0000000226713576752252032125 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional import code class HTMLStringO: def __init__(self): ... def isatty(self): ... def close(self): ... def flush(self): ... def seek(self, n, mode: int = ...): ... def readline(self): ... def reset(self): ... def write(self, x): ... def writelines(self, x): ... class ThreadedStream: @staticmethod def push(): ... @staticmethod def fetch(): ... @staticmethod def displayhook(obj): ... def __setattr__(self, name, value): ... def __dir__(self): ... def __getattribute__(self, name): ... class _ConsoleLoader: def __init__(self): ... def register(self, code, source): ... def get_source_by_code(self, code): ... class _InteractiveConsole(code.InteractiveInterpreter): globals: Any more: Any buffer: Any def __init__(self, globals, locals): ... def runsource(self, source): ... def runcode(self, code): ... def showtraceback(self): ... def showsyntaxerror(self, filename: Optional[Any] = ...): ... def write(self, data): ... class Console: def __init__(self, globals: Optional[Any] = ..., locals: Optional[Any] = ...): ... def eval(self, code): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/debug/repr.pyi0000644€tŠÔÚ€2›s®0000000151613576752252031427 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional deque: Any missing: Any RegexType: Any HELP_HTML: Any OBJECT_DUMP_HTML: Any def debug_repr(obj): ... def dump(obj=...): ... class _Helper: def __call__(self, topic: Optional[Any] = ...): ... helper: Any class DebugReprGenerator: def __init__(self): ... list_repr: Any tuple_repr: Any set_repr: Any frozenset_repr: Any deque_repr: Any def regex_repr(self, obj): ... def string_repr(self, obj, limit: int = ...): ... def dict_repr(self, d, recursive, limit: int = ...): ... def object_repr(self, obj): ... def dispatch_repr(self, obj, recursive): ... def fallback_repr(self): ... def repr(self, obj): ... def dump_object(self, obj): ... def dump_locals(self, d): ... def render_object_dump(self, items, title, repr: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/debug/tbtools.pyi0000644€tŠÔÚ€2›s®0000000322713576752252032146 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional UTF8_COOKIE: Any system_exceptions: Any HEADER: Any FOOTER: Any PAGE_HTML: Any CONSOLE_HTML: Any SUMMARY_HTML: Any FRAME_HTML: Any SOURCE_LINE_HTML: Any def render_console_html(secret, evalex_trusted: bool = ...): ... def get_current_traceback(ignore_system_exceptions: bool = ..., show_hidden_frames: bool = ..., skip: int = ...): ... class Line: lineno: Any code: Any in_frame: Any current: Any def __init__(self, lineno, code): ... def classes(self): ... def render(self): ... class Traceback: exc_type: Any exc_value: Any exception_type: Any frames: Any def __init__(self, exc_type, exc_value, tb): ... def filter_hidden_frames(self): ... def is_syntax_error(self): ... def exception(self): ... def log(self, logfile: Optional[Any] = ...): ... def paste(self): ... def render_summary(self, include_title: bool = ...): ... def render_full(self, evalex: bool = ..., secret: Optional[Any] = ..., evalex_trusted: bool = ...): ... def generate_plaintext_traceback(self): ... def plaintext(self): ... id: Any class Frame: lineno: Any function_name: Any locals: Any globals: Any filename: Any module: Any loader: Any code: Any hide: Any info: Any def __init__(self, exc_type, exc_value, tb): ... def render(self): ... def render_line_context(self): ... def get_annotated_lines(self): ... def eval(self, code, mode: str = ...): ... def sourcelines(self): ... def get_context_lines(self, context: int = ...): ... @property def current_line(self): ... def console(self): ... id: Any mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/exceptions.pyi0000644€tŠÔÚ€2›s®0000001056713576752252031560 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Tuple, List, Text, NoReturn, Optional, Protocol, Type, Union, Iterable from wsgiref.types import WSGIEnvironment, StartResponse from werkzeug.wrappers import Response class _EnvironContainer(Protocol): @property def environ(self) -> WSGIEnvironment: ... class HTTPException(Exception): code: Optional[int] description: Optional[Text] response: Optional[Response] def __init__(self, description: Optional[Text] = ..., response: Optional[Response] = ...) -> None: ... @classmethod def wrap(cls, exception: Type[Exception], name: Optional[str] = ...) -> Any: ... @property def name(self) -> str: ... def get_description(self, environ: Optional[WSGIEnvironment] = ...) -> Text: ... def get_body(self, environ: Optional[WSGIEnvironment] = ...) -> Text: ... def get_headers(self, environ: Optional[WSGIEnvironment] = ...) -> List[Tuple[str, str]]: ... def get_response(self, environ: Optional[Union[WSGIEnvironment, _EnvironContainer]] = ...) -> Response: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... default_exceptions: Dict[int, Type[HTTPException]] class BadRequest(HTTPException): code: int description: Text class ClientDisconnected(BadRequest): ... class SecurityError(BadRequest): ... class BadHost(BadRequest): ... class Unauthorized(HTTPException): code: int description: Text www_authenticate: Optional[Iterable[object]] def __init__( self, description: Optional[Text] = ..., response: Optional[Response] = ..., www_authenticate: Union[None, Tuple[object, ...], List[object], object] = ..., ) -> None: ... class Forbidden(HTTPException): code: int description: Text class NotFound(HTTPException): code: int description: Text class MethodNotAllowed(HTTPException): code: int description: Text valid_methods: Any def __init__(self, valid_methods: Optional[Any] = ..., description: Optional[Any] = ...): ... class NotAcceptable(HTTPException): code: int description: Text class RequestTimeout(HTTPException): code: int description: Text class Conflict(HTTPException): code: int description: Text class Gone(HTTPException): code: int description: Text class LengthRequired(HTTPException): code: int description: Text class PreconditionFailed(HTTPException): code: int description: Text class RequestEntityTooLarge(HTTPException): code: int description: Text class RequestURITooLarge(HTTPException): code: int description: Text class UnsupportedMediaType(HTTPException): code: int description: Text class RequestedRangeNotSatisfiable(HTTPException): code: int description: Text length: Any units: str def __init__(self, length: Optional[Any] = ..., units: str = ..., description: Optional[Any] = ...): ... class ExpectationFailed(HTTPException): code: int description: Text class ImATeapot(HTTPException): code: int description: Text class UnprocessableEntity(HTTPException): code: int description: Text class Locked(HTTPException): code: int description: Text class FailedDependency(HTTPException): code: int description: Text class PreconditionRequired(HTTPException): code: int description: Text class TooManyRequests(HTTPException): code: int description: Text class RequestHeaderFieldsTooLarge(HTTPException): code: int description: Text class UnavailableForLegalReasons(HTTPException): code: int description: Text class InternalServerError(HTTPException): code: int description: Text class NotImplemented(HTTPException): code: int description: Text class BadGateway(HTTPException): code: int description: Text class ServiceUnavailable(HTTPException): code: int description: Text class GatewayTimeout(HTTPException): code: int description: Text class HTTPVersionNotSupported(HTTPException): code: int description: Text class Aborter: mapping: Any def __init__(self, mapping: Optional[Any] = ..., extra: Optional[Any] = ...) -> None: ... def __call__(self, code: Union[int, Response], *args: Any, **kwargs: Any) -> NoReturn: ... def abort(status: Union[int, Response], *args: Any, **kwargs: Any) -> NoReturn: ... class BadRequestKeyError(BadRequest, KeyError): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/filesystem.pyi0000644€tŠÔÚ€2›s®0000000025113576752252031550 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any has_likely_buggy_unicode_filesystem: Any class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning): ... def get_filesystem_encoding(): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/formparser.pyi0000644€tŠÔÚ€2›s®0000000707513576752252031557 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text, Tuple, Callable, Iterable, TypeVar, NoReturn, Protocol, IO, Generator, Dict, Mapping, Union from wsgiref.types import WSGIEnvironment from .datastructures import Headers _Dict = Any _ParseFunc = Callable[[IO[bytes], str, Optional[int], Mapping[str, str]], Tuple[IO[bytes], _Dict, _Dict]] _F = TypeVar("_F", bound=Callable[..., Any]) class _StreamFactory(Protocol): def __call__( self, total_content_length: Optional[int], filename: str, content_type: str, content_length: Optional[int] = ..., ) -> IO[bytes]: ... def default_stream_factory( total_content_length: Optional[int], filename: str, content_type: str, content_length: Optional[int] = ..., ) -> IO[bytes]: ... def parse_form_data( environ: WSGIEnvironment, stream_factory: Optional[_StreamFactory] = ..., charset: Text = ..., errors: Text = ..., max_form_memory_size: Optional[int] = ..., max_content_length: Optional[int] = ..., cls: Optional[Callable[[], _Dict]] = ..., silent: bool = ..., ) -> Tuple[IO[bytes], _Dict, _Dict]: ... def exhaust_stream(f: _F) -> _F: ... class FormDataParser(object): stream_factory: _StreamFactory charset: Text errors: Text max_form_memory_size: Optional[int] max_content_length: Optional[int] cls: Callable[[], _Dict] silent: bool def __init__( self, stream_factory: Optional[_StreamFactory] = ..., charset: Text = ..., errors: Text = ..., max_form_memory_size: Optional[int] = ..., max_content_length: Optional[int] = ..., cls: Optional[Callable[[], _Dict]] = ..., silent: bool = ..., ) -> None: ... def get_parse_func(self, mimetype: str, options: Any) -> Optional[_ParseFunc]: ... def parse_from_environ(self, environ: WSGIEnvironment) -> Tuple[IO[bytes], _Dict, _Dict]: ... def parse( self, stream: IO[bytes], mimetype: Text, content_length: Optional[int], options: Optional[Mapping[str, str]] = ..., ) -> Tuple[IO[bytes], _Dict, _Dict]: ... parse_functions: Dict[Text, _ParseFunc] def is_valid_multipart_boundary(boundary: str) -> bool: ... def parse_multipart_headers(iterable: Iterable[Union[Text, bytes]]) -> Headers: ... class MultiPartParser(object): charset: Text errors: Text max_form_memory_size: Optional[int] stream_factory: _StreamFactory cls: Callable[[], _Dict] buffer_size: int def __init__( self, stream_factory: Optional[_StreamFactory] = ..., charset: Text = ..., errors: Text = ..., max_form_memory_size: Optional[int] = ..., cls: Optional[Callable[[], _Dict]] = ..., buffer_size: int = ..., ) -> None: ... def fail(self, message: Text) -> NoReturn: ... def get_part_encoding(self, headers: Mapping[str, str]) -> Optional[str]: ... def get_part_charset(self, headers: Mapping[str, str]) -> Text: ... def start_file_streaming( self, filename: Union[Text, bytes], headers: Mapping[str, str], total_content_length: Optional[int], ) -> Tuple[Text, IO[bytes]]: ... def in_memory_threshold_reached(self, bytes: Any) -> NoReturn: ... def validate_boundary(self, boundary: Optional[str]) -> None: ... def parse_lines( self, file: Any, boundary: bytes, content_length: int, cap_at_buffer: bool = ..., ) -> Generator[Tuple[str, Any], None, None]: ... def parse_parts(self, file: Any, boundary: bytes, content_length: int) -> Generator[Tuple[str, Any], None, None]: ... def parse(self, file: Any, boundary: bytes, content_length: int) -> Tuple[_Dict, _Dict]: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/http.pyi0000644€tŠÔÚ€2›s®0000001243313576752252030350 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from datetime import datetime, timedelta from typing import ( Dict, Text, Union, Tuple, Any, Optional, Mapping, Iterable, Callable, List, Type, TypeVar, Protocol, overload, SupportsInt, ) from wsgiref.types import WSGIEnvironment from .datastructures import ( Headers, Accept, RequestCacheControl, HeaderSet, Authorization, WWWAuthenticate, IfRange, Range, ContentRange, ETags, TypeConversionDict, ) if sys.version_info < (3,): _Str = TypeVar('_Str', str, unicode) _ToBytes = Union[bytes, bytearray, buffer, unicode] _ETagData = Union[str, unicode, bytearray, buffer, memoryview] else: _Str = str _ToBytes = Union[bytes, bytearray, memoryview, str] _ETagData = Union[bytes, bytearray, memoryview] _T = TypeVar("_T") _U = TypeVar("_U") HTTP_STATUS_CODES: Dict[int, str] def wsgi_to_bytes(data: Union[bytes, Text]) -> bytes: ... def bytes_to_wsgi(data: bytes) -> str: ... def quote_header_value(value: Any, extra_chars: str = ..., allow_token: bool = ...) -> str: ... def unquote_header_value(value: _Str, is_filename: bool = ...) -> _Str: ... def dump_options_header(header: Optional[_Str], options: Mapping[_Str, Any]) -> _Str: ... def dump_header(iterable: Union[Iterable[Any], Dict[_Str, Any]], allow_token: bool = ...) -> _Str: ... def parse_list_header(value: _Str) -> List[_Str]: ... @overload def parse_dict_header(value: Union[bytes, Text]) -> Dict[Text, Optional[Text]]: ... @overload def parse_dict_header(value: Union[bytes, Text], cls: Type[_T]) -> _T: ... @overload def parse_options_header(value: None, multiple: bool = ...) -> Tuple[str, Dict[str, Optional[str]]]: ... @overload def parse_options_header(value: _Str) -> Tuple[_Str, Dict[_Str, Optional[_Str]]]: ... # actually returns Tuple[_Str, Dict[_Str, Optional[_Str]], ...] @overload def parse_options_header(value: _Str, multiple: bool = ...) -> Tuple[Any, ...]: ... @overload def parse_accept_header(value: Optional[Text]) -> Accept: ... @overload def parse_accept_header(value: Optional[_Str], cls: Callable[[Optional[List[Tuple[str, float]]]], _T]) -> _T: ... @overload def parse_cache_control_header(value: Union[None, bytes, Text], on_update: Optional[Callable[[RequestCacheControl], Any]] = ...) -> RequestCacheControl: ... @overload def parse_cache_control_header(value: Union[None, bytes, Text], on_update: _T, cls: Callable[[Dict[Text, Optional[Text]], _T], _U]) -> _U: ... @overload def parse_cache_control_header(value: Union[None, bytes, Text], *, cls: Callable[[Dict[Text, Optional[Text]], None], _U]) -> _U: ... def parse_set_header(value: Text, on_update: Optional[Callable[[HeaderSet], Any]] = ...) -> HeaderSet: ... def parse_authorization_header(value: Union[None, bytes, Text]) -> Optional[Authorization]: ... def parse_www_authenticate_header(value: Union[None, bytes, Text], on_update: Optional[Callable[[WWWAuthenticate], Any]] = ...) -> WWWAuthenticate: ... def parse_if_range_header(value: Optional[Text]) -> IfRange: ... def parse_range_header(value: Optional[Text], make_inclusive: bool = ...) -> Optional[Range]: ... def parse_content_range_header(value: Optional[Text], on_update: Optional[Callable[[ContentRange], Any]] = ...) -> Optional[ContentRange]: ... def quote_etag(etag: _Str, weak: bool = ...) -> _Str: ... def unquote_etag(etag: Optional[_Str]) -> Tuple[Optional[_Str], Optional[_Str]]: ... def parse_etags(value: Optional[Text]) -> ETags: ... def generate_etag(data: _ETagData) -> str: ... def parse_date(value: Optional[str]) -> Optional[datetime]: ... def cookie_date(expires: Union[None, float, datetime] = ...) -> str: ... def http_date(timestamp: Union[None, float, datetime] = ...) -> str: ... def parse_age(value: Optional[SupportsInt] = ...) -> Optional[timedelta]: ... def dump_age(age: Union[None, timedelta, SupportsInt]) -> Optional[str]: ... def is_resource_modified(environ: WSGIEnvironment, etag: Optional[Text] = ..., data: Optional[_ETagData] = ..., last_modified: Union[None, Text, datetime] = ..., ignore_if_range: bool = ...) -> bool: ... def remove_entity_headers(headers: Union[List[Tuple[Text, Text]], Headers], allowed: Iterable[Text] = ...) -> None: ... def remove_hop_by_hop_headers(headers: Union[List[Tuple[Text, Text]], Headers]) -> None: ... def is_entity_header(header: Text) -> bool: ... def is_hop_by_hop_header(header: Text) -> bool: ... @overload def parse_cookie(header: Union[None, WSGIEnvironment, Text, bytes], charset: Text = ..., errors: Text = ...) -> TypeConversionDict[Any, Any]: ... @overload def parse_cookie(header: Union[None, WSGIEnvironment, Text, bytes], charset: Text = ..., errors: Text = ..., cls: Optional[Callable[[Iterable[Tuple[Text, Text]]], _T]] = ...) -> _T: ... def dump_cookie(key: _ToBytes, value: _ToBytes = ..., max_age: Union[None, float, timedelta] = ..., expires: Union[None, Text, float, datetime] = ..., path: Union[None, Tuple[Any, ...], str, bytes] = ..., domain: Union[None, str, bytes] = ..., secure: bool = ..., httponly: bool = ..., charset: Text = ..., sync_expires: bool = ...) -> str: ... def is_byte_range_valid(start: Optional[int], stop: Optional[int], length: Optional[int]) -> bool: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/local.pyi0000644€tŠÔÚ€2›s®0000000441313576752252030462 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional def release_local(local): ... class Local: def __init__(self): ... def __iter__(self): ... def __call__(self, proxy): ... def __release_local__(self): ... def __getattr__(self, name): ... def __setattr__(self, name, value): ... def __delattr__(self, name): ... class LocalStack: def __init__(self): ... def __release_local__(self): ... def _get__ident_func__(self): ... def _set__ident_func__(self, value): ... __ident_func__: Any def __call__(self): ... def push(self, obj): ... def pop(self): ... @property def top(self): ... class LocalManager: locals: Any ident_func: Any def __init__(self, locals: Optional[Any] = ..., ident_func: Optional[Any] = ...): ... def get_ident(self): ... def cleanup(self): ... def make_middleware(self, app): ... def middleware(self, func): ... class LocalProxy: def __init__(self, local, name: Optional[Any] = ...): ... @property def __dict__(self): ... def __bool__(self): ... def __unicode__(self): ... def __dir__(self): ... def __getattr__(self, name): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... __getslice__: Any def __setslice__(self, i, j, seq): ... def __delslice__(self, i, j): ... __setattr__: Any __delattr__: Any __lt__: Any __le__: Any __eq__: Any __ne__: Any __gt__: Any __ge__: Any __cmp__: Any __hash__: Any __call__: Any __len__: Any __getitem__: Any __iter__: Any __contains__: Any __add__: Any __sub__: Any __mul__: Any __floordiv__: Any __mod__: Any __divmod__: Any __pow__: Any __lshift__: Any __rshift__: Any __and__: Any __xor__: Any __or__: Any __div__: Any __truediv__: Any __neg__: Any __pos__: Any __abs__: Any __invert__: Any __complex__: Any __int__: Any __long__: Any __float__: Any __oct__: Any __hex__: Any __index__: Any __coerce__: Any __enter__: Any __exit__: Any __radd__: Any __rsub__: Any __rmul__: Any __rdiv__: Any __rtruediv__: Any __rfloordiv__: Any __rmod__: Any __rdivmod__: Any __copy__: Any __deepcopy__: Any mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/middleware/0000755€tŠÔÚ€2›s®0000000000013576752267030766 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/middleware/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252033230 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/middleware/dispatcher.pyi0000644€tŠÔÚ€2›s®0000000070713576752252033635 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterable, Mapping, Optional, Text from wsgiref.types import StartResponse, WSGIApplication, WSGIEnvironment class DispatcherMiddleware(object): app: WSGIApplication mounts: Mapping[Text, WSGIApplication] def __init__(self, app: WSGIApplication, mounts: Optional[Mapping[Text, WSGIApplication]] = ...) -> None: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/middleware/http_proxy.pyi0000644€tŠÔÚ€2›s®0000000125413576752252033725 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterable, Mapping, MutableMapping, Text from wsgiref.types import StartResponse, WSGIApplication, WSGIEnvironment _Opts = Mapping[Text, Any] _MutableOpts = MutableMapping[Text, Any] class ProxyMiddleware(object): app: WSGIApplication targets: Dict[Text, _MutableOpts] def __init__( self, app: WSGIApplication, targets: Mapping[Text, _MutableOpts], chunk_size: int = ..., timeout: int = ..., ) -> None: ... def proxy_to(self, opts: _Opts, path: Text, prefix: Text) -> WSGIApplication: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/middleware/lint.pyi0000644€tŠÔÚ€2›s®0000000456213576752252032460 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Iterable, Iterator, List, Mapping, Optional, Protocol, Tuple from wsgiref.types import StartResponse, WSGIApplication, WSGIEnvironment from ..datastructures import Headers class WSGIWarning(Warning): ... class HTTPWarning(Warning): ... def check_string(context: str, obj: object, stacklevel: int = ...) -> None: ... class _Readable(Protocol): def read(self, __size: int = ...) -> bytes: ... def readline(self, __size: int = ...) -> bytes: ... def __iter__(self) -> Iterator[bytes]: ... def close(self) -> Any: ... class InputStream(object): def __init__(self, stream: _Readable) -> None: ... def read(self, __size: int = ...) -> bytes: ... def readline(self, __size: int = ...) -> bytes: ... def __iter__(self) -> Iterator[bytes]: ... def close(self) -> None: ... class _FullyWritable(Protocol): def write(self, __s: str) -> Any: ... def flush(self) -> Any: ... def close(self) -> Any: ... class ErrorStream(object): def __init__(self, stream: _FullyWritable) -> None: ... def write(self, s: str) -> None: ... def flush(self) -> None: ... def writelines(self, seq: Iterable[str]) -> None: ... def close(self) -> None: ... class _Writable(Protocol): def write(self, __s: str) -> Any: ... class GuardedWrite(object): def __init__(self, write: _Writable, chunks: List[int]) -> None: ... def __call__(self, s: str) -> None: ... class GuardedIterator(object): closed: bool headers_set: bool chunks: List[int] def __init__(self, iterator: Iterable[str], headers_set: bool, chunks: List[int]) -> None: ... def __iter__(self) -> GuardedIterator: ... if sys.version_info < (3,): def next(self) -> str: ... else: def __next__(self) -> str: ... def close(self) -> None: ... class LintMiddleware(object): def __init__(self, app: WSGIApplication) -> None: ... def check_environ(self, environ: WSGIEnvironment) -> None: ... def check_start_response( self, status: str, headers: List[Tuple[str, str]], exc_info: Optional[Tuple[Any, ...]], ) -> Tuple[int, Headers]: ... def check_headers(self, headers: Mapping[str, str]) -> None: ... def check_iterator(self, app_iter: Iterable[bytes]) -> None: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> GuardedIterator: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/middleware/profiler.pyi0000644€tŠÔÚ€2›s®0000000107013576752252033323 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import IO, Iterable, List, Optional, Text, Tuple, Union from wsgiref.types import StartResponse, WSGIApplication, WSGIEnvironment class ProfilerMiddleware(object): def __init__( self, app: WSGIApplication, stream: IO[str] = ..., sort_by: Tuple[Text, Text] = ..., restrictions: Iterable[Union[str, float]] = ..., profile_dir: Optional[Text] = ..., filename_format: Text = ..., ) -> None: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> List[bytes]: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/middleware/proxy_fix.pyi0000644€tŠÔÚ€2›s®0000000130713576752252033533 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, Optional from wsgiref.types import StartResponse, WSGIApplication, WSGIEnvironment class ProxyFix(object): app: WSGIApplication x_for: int x_proto: int x_host: int x_port: int x_prefix: int num_proxies: int def __init__( self, app: WSGIApplication, num_proxies: Optional[int] = ..., x_for: int = ..., x_proto: int = ..., x_host: int = ..., x_port: int = ..., x_prefix: int = ..., ) -> None: ... def get_remote_addr(self, forwarded_for: Iterable[str]) -> Optional[str]: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/middleware/shared_data.pyi0000644€tŠÔÚ€2›s®0000000241613576752252033745 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime from typing import Callable, IO, Iterable, List, Mapping, Optional, Text, Tuple, Union from wsgiref.types import StartResponse, WSGIApplication, WSGIEnvironment _V = Union[Tuple[Text, Text], Text] _Opener = Callable[[], Tuple[IO[bytes], datetime.datetime, int]] _Loader = Callable[[Optional[Text]], Union[Tuple[None, None], Tuple[Text, _Opener]]] class SharedDataMiddleware(object): app: WSGIApplication exports: List[Tuple[Text, _Loader]] cache: bool cache_timeout: float def __init__( self, app: WSGIApplication, exports: Union[Mapping[Text, _V], Iterable[Tuple[Text, _V]]], disallow: Optional[Text] = ..., cache: bool = ..., cache_timeout: float = ..., fallback_mimetype: Text = ..., ) -> None: ... def is_allowed(self, filename: Text) -> bool: ... def get_file_loader(self, filename: Text) -> _Loader: ... def get_package_loader(self, package: Text, package_path: Text) -> _Loader: ... def get_directory_loader(self, directory: Text) -> _Loader: ... def generate_etag(self, mtime: datetime.datetime, file_size: int, real_filename: Union[Text, bytes]) -> str: ... def __call__(self, environment: WSGIEnvironment, start_response: StartResponse) -> WSGIApplication: ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/posixemulation.pyi0000644€tŠÔÚ€2›s®0000000030613576752252032445 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from ._compat import to_unicode as to_unicode from .filesystem import get_filesystem_encoding as get_filesystem_encoding can_rename_open_file: Any def rename(src, dst): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/routing.pyi0000644€tŠÔÚ€2›s®0000001462513576752252031065 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text from werkzeug.exceptions import HTTPException def parse_converter_args(argstr): ... def parse_rule(rule): ... class RoutingException(Exception): ... class RequestRedirect(HTTPException, RoutingException): code: Any new_url: Any def __init__(self, new_url): ... def get_response(self, environ): ... class RequestSlash(RoutingException): ... class RequestAliasRedirect(RoutingException): matched_values: Any def __init__(self, matched_values): ... class BuildError(RoutingException, LookupError): endpoint: Any values: Any method: Any adapter: Optional[MapAdapter] def __init__(self, endpoint, values, method, adapter: Optional[MapAdapter] = ...) -> None: ... @property def suggested(self) -> Optional[Rule]: ... def closest_rule(self, adapter: Optional[MapAdapter]) -> Optional[Rule]: ... class ValidationError(ValueError): ... class RuleFactory: def get_rules(self, map): ... class Subdomain(RuleFactory): subdomain: Any rules: Any def __init__(self, subdomain, rules): ... def get_rules(self, map): ... class Submount(RuleFactory): path: Any rules: Any def __init__(self, path, rules): ... def get_rules(self, map): ... class EndpointPrefix(RuleFactory): prefix: Any rules: Any def __init__(self, prefix, rules): ... def get_rules(self, map): ... class RuleTemplate: rules: Any def __init__(self, rules): ... def __call__(self, *args, **kwargs): ... class RuleTemplateFactory(RuleFactory): rules: Any context: Any def __init__(self, rules, context): ... def get_rules(self, map): ... class Rule(RuleFactory): rule: Any is_leaf: Any map: Any strict_slashes: Any subdomain: Any host: Any defaults: Any build_only: Any alias: Any methods: Any endpoint: Any redirect_to: Any arguments: Any def __init__(self, string, defaults: Optional[Any] = ..., subdomain: Optional[Any] = ..., methods: Optional[Any] = ..., build_only: bool = ..., endpoint: Optional[Any] = ..., strict_slashes: Optional[Any] = ..., redirect_to: Optional[Any] = ..., alias: bool = ..., host: Optional[Any] = ...): ... def empty(self): ... def get_empty_kwargs(self): ... def get_rules(self, map): ... def refresh(self): ... def bind(self, map, rebind: bool = ...): ... def get_converter(self, variable_name, converter_name, args, kwargs): ... def compile(self): ... def match(self, path, method: Optional[Any] = ...): ... def build(self, values, append_unknown: bool = ...): ... def provides_defaults_for(self, rule): ... def suitable_for(self, values, method: Optional[Any] = ...): ... def match_compare_key(self): ... def build_compare_key(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class BaseConverter: regex: Any weight: Any map: Any def __init__(self, map): ... def to_python(self, value): ... def to_url(self, value): ... class UnicodeConverter(BaseConverter): regex: Any def __init__(self, map, minlength: int = ..., maxlength: Optional[Any] = ..., length: Optional[Any] = ...): ... class AnyConverter(BaseConverter): regex: Any def __init__(self, map, *items): ... class PathConverter(BaseConverter): regex: Any weight: Any class NumberConverter(BaseConverter): weight: Any fixed_digits: Any min: Any max: Any def __init__(self, map, fixed_digits: int = ..., min: Optional[Any] = ..., max: Optional[Any] = ...): ... def to_python(self, value): ... def to_url(self, value): ... class IntegerConverter(NumberConverter): regex: Any num_convert: Any class FloatConverter(NumberConverter): regex: Any num_convert: Any def __init__(self, map, min: Optional[Any] = ..., max: Optional[Any] = ...): ... class UUIDConverter(BaseConverter): regex: Any def to_python(self, value): ... def to_url(self, value): ... DEFAULT_CONVERTERS: Any class Map: default_converters: Any default_subdomain: Any charset: Text encoding_errors: Text strict_slashes: Any redirect_defaults: Any host_matching: Any converters: Any sort_parameters: Any sort_key: Any def __init__(self, rules: Optional[Any] = ..., default_subdomain: str = ..., charset: Text = ..., strict_slashes: bool = ..., redirect_defaults: bool = ..., converters: Optional[Any] = ..., sort_parameters: bool = ..., sort_key: Optional[Any] = ..., encoding_errors: Text = ..., host_matching: bool = ...): ... def is_endpoint_expecting(self, endpoint, *arguments): ... def iter_rules(self, endpoint: Optional[Any] = ...): ... def add(self, rulefactory): ... def bind(self, server_name, script_name: Optional[Any] = ..., subdomain: Optional[Any] = ..., url_scheme: str = ..., default_method: str = ..., path_info: Optional[Any] = ..., query_args: Optional[Any] = ...): ... def bind_to_environ(self, environ, server_name: Optional[Any] = ..., subdomain: Optional[Any] = ...): ... def update(self): ... class MapAdapter: map: Any server_name: Any script_name: Any subdomain: Any url_scheme: Any path_info: Any default_method: Any query_args: Any def __init__(self, map, server_name, script_name, subdomain, url_scheme, path_info, default_method, query_args: Optional[Any] = ...): ... def dispatch(self, view_func, path_info: Optional[Any] = ..., method: Optional[Any] = ..., catch_http_exceptions: bool = ...): ... def match(self, path_info: Optional[Any] = ..., method: Optional[Any] = ..., return_rule: bool = ..., query_args: Optional[Any] = ...): ... def test(self, path_info: Optional[Any] = ..., method: Optional[Any] = ...): ... def allowed_methods(self, path_info: Optional[Any] = ...): ... def get_host(self, domain_part): ... def get_default_redirect(self, rule, method, values, query_args): ... def encode_query_args(self, query_args): ... def make_redirect_url(self, path_info, query_args: Optional[Any] = ..., domain_part: Optional[Any] = ...): ... def make_alias_redirect_url(self, path, endpoint, values, method, query_args): ... def build(self, endpoint, values: Optional[Any] = ..., method: Optional[Any] = ..., force_external: bool = ..., append_unknown: bool = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/script.pyi0000644€tŠÔÚ€2›s®0000000136713576752252030701 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional argument_types: Any converters: Any def run(namespace: Optional[Any] = ..., action_prefix: str = ..., args: Optional[Any] = ...): ... def fail(message, code: int = ...): ... def find_actions(namespace, action_prefix): ... def print_usage(actions): ... def analyse_action(func): ... def make_shell(init_func: Optional[Any] = ..., banner: Optional[Any] = ..., use_ipython: bool = ...): ... def make_runserver(app_factory, hostname: str = ..., port: int = ..., use_reloader: bool = ..., use_debugger: bool = ..., use_evalex: bool = ..., threaded: bool = ..., processes: int = ..., static_files: Optional[Any] = ..., extra_files: Optional[Any] = ..., ssl_context: Optional[Any] = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/security.pyi0000644€tŠÔÚ€2›s®0000000101413576752252031231 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional SALT_CHARS: Any DEFAULT_PBKDF2_ITERATIONS: Any def pbkdf2_hex(data, salt, iterations=..., keylen: Optional[Any] = ..., hashfunc: Optional[Any] = ...): ... def pbkdf2_bin(data, salt, iterations=..., keylen: Optional[Any] = ..., hashfunc: Optional[Any] = ...): ... def safe_str_cmp(a, b): ... def gen_salt(length): ... def generate_password_hash(password, method: str = ..., salt_length: int = ...): ... def check_password_hash(pwhash, password): ... def safe_join(directory, filename): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/serving.pyi0000644€tŠÔÚ€2›s®0000000717213576752252031052 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Optional if sys.version_info < (3,): from SocketServer import ThreadingMixIn, ForkingMixIn from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler else: from socketserver import ThreadingMixIn, ForkingMixIn from http.server import HTTPServer, BaseHTTPRequestHandler class _SslDummy: def __getattr__(self, name): ... ssl: Any LISTEN_QUEUE: Any can_open_by_fd: Any class WSGIRequestHandler(BaseHTTPRequestHandler): @property def server_version(self): ... def make_environ(self): ... environ: Any close_connection: Any def run_wsgi(self): ... def handle(self): ... def initiate_shutdown(self): ... def connection_dropped(self, error, environ: Optional[Any] = ...): ... raw_requestline: Any def handle_one_request(self): ... def send_response(self, code, message: Optional[Any] = ...): ... def version_string(self): ... def address_string(self): ... def port_integer(self): ... def log_request(self, code: object = ..., size: object = ...) -> None: ... def log_error(self, *args): ... def log_message(self, format, *args): ... def log(self, type, message, *args): ... BaseRequestHandler: Any def generate_adhoc_ssl_pair(cn: Optional[Any] = ...): ... def make_ssl_devcert(base_path, host: Optional[Any] = ..., cn: Optional[Any] = ...): ... def generate_adhoc_ssl_context(): ... def load_ssl_context(cert_file, pkey_file: Optional[Any] = ..., protocol: Optional[Any] = ...): ... class _SSLContext: def __init__(self, protocol): ... def load_cert_chain(self, certfile, keyfile: Optional[Any] = ..., password: Optional[Any] = ...): ... def wrap_socket(self, sock, **kwargs): ... def is_ssl_error(error: Optional[Any] = ...): ... def select_ip_version(host, port): ... class BaseWSGIServer(HTTPServer): multithread: Any multiprocess: Any request_queue_size: Any address_family: Any app: Any passthrough_errors: Any shutdown_signal: Any host: Any port: Any socket: Any server_address: Any ssl_context: Any def __init__(self, host, port, app, handler: Optional[Any] = ..., passthrough_errors: bool = ..., ssl_context: Optional[Any] = ..., fd: Optional[Any] = ...): ... def log(self, type, message, *args): ... def serve_forever(self): ... def handle_error(self, request, client_address): ... def get_request(self): ... class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer): multithread: Any daemon_threads: Any class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): multiprocess: Any max_children: Any def __init__(self, host, port, app, processes: int = ..., handler: Optional[Any] = ..., passthrough_errors: bool = ..., ssl_context: Optional[Any] = ..., fd: Optional[Any] = ...): ... def make_server(host: Optional[Any] = ..., port: Optional[Any] = ..., app: Optional[Any] = ..., threaded: bool = ..., processes: int = ..., request_handler: Optional[Any] = ..., passthrough_errors: bool = ..., ssl_context: Optional[Any] = ..., fd: Optional[Any] = ...): ... def is_running_from_reloader(): ... def run_simple(hostname, port, application, use_reloader: bool = ..., use_debugger: bool = ..., use_evalex: bool = ..., extra_files: Optional[Any] = ..., reloader_interval: int = ..., reloader_type: str = ..., threaded: bool = ..., processes: int = ..., request_handler: Optional[Any] = ..., static_files: Optional[Any] = ..., passthrough_errors: bool = ..., ssl_context: Optional[Any] = ...): ... def run_with_reloader(*args, **kwargs): ... def main(): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/test.pyi0000644€tŠÔÚ€2›s®0000001350513576752252030351 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from wsgiref.types import WSGIEnvironment from typing import Any, Generic, Optional, Text, Tuple, Type, TypeVar, overload from typing_extensions import Literal if sys.version_info < (3,): from urllib2 import Request as U2Request from cookielib import CookieJar else: from urllib.request import Request as U2Request from http.cookiejar import CookieJar def stream_encode_multipart(values, use_tempfile: int = ..., threshold=..., boundary: Optional[Any] = ..., charset: Text = ...): ... def encode_multipart(values, boundary: Optional[Any] = ..., charset: Text = ...): ... def File(fd, filename: Optional[Any] = ..., mimetype: Optional[Any] = ...): ... class _TestCookieHeaders: headers: Any def __init__(self, headers): ... def getheaders(self, name): ... def get_all(self, name, default: Optional[Any] = ...): ... class _TestCookieResponse: headers: Any def __init__(self, headers): ... def info(self): ... class _TestCookieJar(CookieJar): def inject_wsgi(self, environ): ... def extract_wsgi(self, environ, headers): ... class EnvironBuilder: server_protocol: Any wsgi_version: Any request_class: Any charset: Text path: Any base_url: Any query_string: Any args: Any method: Any headers: Any content_type: Any errors_stream: Any multithread: Any multiprocess: Any run_once: Any environ_base: Any environ_overrides: Any input_stream: Any content_length: Any closed: Any def __init__(self, path: str = ..., base_url: Optional[Any] = ..., query_string: Optional[Any] = ..., method: str = ..., input_stream: Optional[Any] = ..., content_type: Optional[Any] = ..., content_length: Optional[Any] = ..., errors_stream: Optional[Any] = ..., multithread: bool = ..., multiprocess: bool = ..., run_once: bool = ..., headers: Optional[Any] = ..., data: Optional[Any] = ..., environ_base: Optional[Any] = ..., environ_overrides: Optional[Any] = ..., charset: Text = ...): ... form: Any files: Any @property def server_name(self): ... @property def server_port(self): ... def __del__(self): ... def close(self): ... def get_environ(self): ... def get_request(self, cls: Optional[Any] = ...): ... class ClientRedirectError(Exception): ... # Response type for the client below. # By default _R is Tuple[Iterable[Any], Union[Text, int], datastructures.Headers] _R = TypeVar('_R') class Client(Generic[_R]): application: Any response_wrapper: Optional[Type[_R]] cookie_jar: Any allow_subdomain_redirects: Any def __init__(self, application, response_wrapper: Optional[Type[_R]] = ..., use_cookies: bool = ..., allow_subdomain_redirects: bool = ...): ... def set_cookie(self, server_name, key, value: str = ..., max_age: Optional[Any] = ..., expires: Optional[Any] = ..., path: str = ..., domain: Optional[Any] = ..., secure: Optional[Any] = ..., httponly: bool = ..., charset: Text = ...): ... def delete_cookie(self, server_name, key, path: str = ..., domain: Optional[Any] = ...): ... def run_wsgi_app(self, environ, buffered: bool = ...): ... def resolve_redirect(self, response, new_location, environ, buffered: bool = ...): ... @overload def open(self, *args, as_tuple: Literal[True], **kwargs) -> Tuple[WSGIEnvironment, _R]: ... @overload def open(self, *args, as_tuple: Literal[False] = ..., **kwargs) -> _R: ... @overload def open(self, *args, as_tuple: bool, **kwargs) -> Any: ... @overload def get(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... @overload def get(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... @overload def get(self, *args, as_tuple: bool, **kw) -> Any: ... @overload def patch(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... @overload def patch(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... @overload def patch(self, *args, as_tuple: bool, **kw) -> Any: ... @overload def post(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... @overload def post(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... @overload def post(self, *args, as_tuple: bool, **kw) -> Any: ... @overload def head(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... @overload def head(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... @overload def head(self, *args, as_tuple: bool, **kw) -> Any: ... @overload def put(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... @overload def put(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... @overload def put(self, *args, as_tuple: bool, **kw) -> Any: ... @overload def delete(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... @overload def delete(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... @overload def delete(self, *args, as_tuple: bool, **kw) -> Any: ... @overload def options(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... @overload def options(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... @overload def options(self, *args, as_tuple: bool, **kw) -> Any: ... @overload def trace(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... @overload def trace(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... @overload def trace(self, *args, as_tuple: bool, **kw) -> Any: ... def create_environ(*args, **kwargs): ... def run_wsgi_app(app, environ, buffered: bool = ...): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/testapp.pyi0000644€tŠÔÚ€2›s®0000000034113576752252031044 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response logo: Any TEMPLATE: Any def iter_sys_path(): ... def render_testapp(req): ... def test_app(environ, start_response): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/urls.pyi0000644€tŠÔÚ€2›s®0000000550713576752252030362 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, NamedTuple, Optional, Text class _URLTuple(NamedTuple): scheme: Any netloc: Any path: Any query: Any fragment: Any class BaseURL(_URLTuple): def replace(self, **kwargs): ... @property def host(self): ... @property def ascii_host(self): ... @property def port(self): ... @property def auth(self): ... @property def username(self): ... @property def raw_username(self): ... @property def password(self): ... @property def raw_password(self): ... def decode_query(self, *args, **kwargs): ... def join(self, *args, **kwargs): ... def to_url(self): ... def decode_netloc(self): ... def to_uri_tuple(self): ... def to_iri_tuple(self): ... def get_file_location(self, pathformat: Optional[Any] = ...): ... class URL(BaseURL): def encode_netloc(self): ... def encode(self, charset: Text = ..., errors: Text = ...): ... class BytesURL(BaseURL): def encode_netloc(self): ... def decode(self, charset: Text = ..., errors: Text = ...): ... def url_parse(url, scheme: Optional[Any] = ..., allow_fragments: bool = ...): ... def url_quote(string, charset: Text = ..., errors: Text = ..., safe: str = ..., unsafe: str = ...): ... def url_quote_plus(string, charset: Text = ..., errors: Text = ..., safe: str = ...): ... def url_unparse(components): ... def url_unquote(string, charset: Text = ..., errors: Text = ..., unsafe: str = ...): ... def url_unquote_plus(s, charset: Text = ..., errors: Text = ...): ... def url_fix(s, charset: Text = ...): ... def uri_to_iri(uri, charset: Text = ..., errors: Text = ...): ... def iri_to_uri(iri, charset: Text = ..., errors: Text = ..., safe_conversion: bool = ...): ... def url_decode(s, charset: Text = ..., decode_keys: bool = ..., include_empty: bool = ..., errors: Text = ..., separator: str = ..., cls: Optional[Any] = ...): ... def url_decode_stream(stream, charset: Text = ..., decode_keys: bool = ..., include_empty: bool = ..., errors: Text = ..., separator: str = ..., cls: Optional[Any] = ..., limit: Optional[Any] = ..., return_iterator: bool = ...): ... def url_encode(obj, charset: Text = ..., encode_keys: bool = ..., sort: bool = ..., key: Optional[Any] = ..., separator: bytes = ...): ... def url_encode_stream(obj, stream: Optional[Any] = ..., charset: Text = ..., encode_keys: bool = ..., sort: bool = ..., key: Optional[Any] = ..., separator: bytes = ...): ... def url_join(base, url, allow_fragments: bool = ...): ... class Href: base: Any charset: Text sort: Any key: Any def __init__(self, base: str = ..., charset: Text = ..., sort: bool = ..., key: Optional[Any] = ...): ... def __getattr__(self, name): ... def __call__(self, *path, **query): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/useragents.pyi0000644€tŠÔÚ€2›s®0000000046713576752252031555 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class UserAgentParser: platforms: Any browsers: Any def __init__(self): ... def __call__(self, user_agent): ... class UserAgent: string: Any def __init__(self, environ_or_string): ... def to_header(self): ... def __nonzero__(self): ... __bool__: Any mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/utils.pyi0000644€tŠÔÚ€2›s®0000000362613576752252030535 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, overload, Type, TypeVar from werkzeug._internal import _DictAccessorProperty from werkzeug.wrappers import Response class cached_property(property): __name__: Any __module__: Any __doc__: Any func: Any def __init__(self, func, name: Optional[Any] = ..., doc: Optional[Any] = ...): ... def __set__(self, obj, value): ... def __get__(self, obj, type: Optional[Any] = ...): ... class environ_property(_DictAccessorProperty): read_only: Any def lookup(self, obj): ... class header_property(_DictAccessorProperty): def lookup(self, obj): ... class HTMLBuilder: def __init__(self, dialect): ... def __call__(self, s): ... def __getattr__(self, tag): ... html: Any xhtml: Any def get_content_type(mimetype, charset): ... def format_string(string, context): ... def secure_filename(filename): ... def escape(s, quote: Optional[Any] = ...): ... def unescape(s): ... # 'redirect' returns a werkzeug Response, unless you give it # another Response type to use instead. _RC = TypeVar("_RC", bound=Response) @overload def redirect(location, code: int = ..., Response: None = ...) -> Response: ... @overload def redirect(location, code: int = ..., Response: Type[_RC] = ...) -> _RC: ... def append_slash_redirect(environ, code: int = ...): ... def import_string(import_name, silent: bool = ...): ... def find_modules(import_path, include_packages: bool = ..., recursive: bool = ...): ... def validate_arguments(func, args, kwargs, drop_extra: bool = ...): ... def bind_arguments(func, args, kwargs): ... class ArgumentValidationError(ValueError): missing: Any extra: Any extra_positional: Any def __init__(self, missing: Optional[Any] = ..., extra: Optional[Any] = ..., extra_positional: Optional[Any] = ...): ... class ImportStringError(ImportError): import_name: Any exception: Any def __init__(self, import_name, exception): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/wrappers.pyi0000644€tŠÔÚ€2›s®0000002210313576752252031227 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from datetime import datetime from typing import ( Any, Callable, Iterable, Iterator, Mapping, MutableMapping, Optional, Sequence, Text, Tuple, Type, TypeVar, Union, overload ) from wsgiref.types import WSGIEnvironment, InputStream from .datastructures import ( Authorization, CombinedMultiDict, EnvironHeaders, Headers, ImmutableMultiDict, MultiDict, ImmutableTypeConversionDict, HeaderSet, Accept, MIMEAccept, CharsetAccept, LanguageAccept, ) from .useragents import UserAgent if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal class BaseRequest: charset: str encoding_errors: str max_content_length: Optional[int] max_form_memory_size: int parameter_storage_class: Type[Any] list_storage_class: Type[Any] dict_storage_class: Type[Any] form_data_parser_class: Type[Any] trusted_hosts: Optional[Sequence[Text]] disable_data_descriptor: Any environ: WSGIEnvironment = ... shallow: Any def __init__(self, environ: WSGIEnvironment, populate_request: bool = ..., shallow: bool = ...) -> None: ... @property def url_charset(self) -> str: ... @classmethod def from_values(cls, *args, **kwargs) -> BaseRequest: ... @classmethod def application(cls, f): ... @property def want_form_data_parsed(self): ... def make_form_data_parser(self): ... def close(self) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, tb): ... @property def stream(self) -> InputStream: ... input_stream: InputStream args: ImmutableMultiDict[Any, Any] @property def data(self) -> bytes: ... @overload def get_data(self, cache: bool = ..., as_text: Literal[False] = ..., parse_form_data: bool = ...) -> bytes: ... @overload def get_data(self, cache: bool, as_text: Literal[True], parse_form_data: bool = ...) -> Text: ... @overload def get_data(self, *, as_text: Literal[True], parse_form_data: bool = ...) -> Text: ... @overload def get_data(self, cache: bool, as_text: bool, parse_form_data: bool = ...) -> Any: ... @overload def get_data(self, *, as_text: bool, parse_form_data: bool = ...) -> Any: ... form: ImmutableMultiDict[Any, Any] values: CombinedMultiDict[Any, Any] files: MultiDict[Any, Any] @property def cookies(self) -> ImmutableTypeConversionDict[str, str]: ... headers: EnvironHeaders path: Text full_path: Text script_root: Text url: Text base_url: Text url_root: Text host_url: Text host: Text query_string: bytes method: Text @property def access_route(self) -> Sequence[str]: ... @property def remote_addr(self) -> str: ... remote_user: Text scheme: str is_xhr: bool is_secure: bool is_multithread: bool is_multiprocess: bool is_run_once: bool # These are not preset at runtime but we add them since monkeypatching this # class is quite common. def __setattr__(self, name: str, value: Any): ... def __getattr__(self, name: str): ... _OnCloseT = TypeVar('_OnCloseT', bound=Callable[[], Any]) _SelfT = TypeVar('_SelfT', bound=BaseResponse) class BaseResponse: charset: str default_status: int default_mimetype: str implicit_sequence_conversion: bool autocorrect_location_header: bool automatically_set_content_length: bool headers: Headers status_code: int status: str direct_passthrough: bool response: Iterable[bytes] def __init__(self, response: Optional[Union[str, bytes, bytearray, Iterable[str], Iterable[bytes]]] = ..., status: Optional[Union[Text, int]] = ..., headers: Optional[Union[Headers, Mapping[Text, Text], Sequence[Tuple[Text, Text]]]] = ..., mimetype: Optional[Text] = ..., content_type: Optional[Text] = ..., direct_passthrough: bool = ...) -> None: ... def call_on_close(self, func: _OnCloseT) -> _OnCloseT: ... @classmethod def force_type(cls: Type[_SelfT], response: object, environ: Optional[WSGIEnvironment] = ...) -> _SelfT: ... @classmethod def from_app(cls: Type[_SelfT], app: Any, environ: WSGIEnvironment, buffered: bool = ...) -> _SelfT: ... @overload def get_data(self, as_text: Literal[False] = ...) -> bytes: ... @overload def get_data(self, as_text: Literal[True]) -> Text: ... @overload def get_data(self, as_text: bool) -> Any: ... def set_data(self, value: Union[bytes, Text]) -> None: ... data: Any def calculate_content_length(self) -> Optional[int]: ... def make_sequence(self) -> None: ... def iter_encoded(self) -> Iterator[bytes]: ... def set_cookie(self, key, value: str = ..., max_age: Optional[Any] = ..., expires: Optional[Any] = ..., path: str = ..., domain: Optional[Any] = ..., secure: bool = ..., httponly: bool = ...): ... def delete_cookie(self, key, path: str = ..., domain: Optional[Any] = ...): ... @property def is_streamed(self) -> bool: ... @property def is_sequence(self) -> bool: ... def close(self) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, tb): ... # The no_etag argument if fictional, but required for compatibility with # ETagResponseMixin def freeze(self, no_etag: bool = ...) -> None: ... def get_wsgi_headers(self, environ): ... def get_app_iter(self, environ): ... def get_wsgi_response(self, environ): ... def __call__(self, environ, start_response): ... class AcceptMixin(object): @property def accept_mimetypes(self) -> MIMEAccept: ... @property def accept_charsets(self) -> CharsetAccept: ... @property def accept_encodings(self) -> Accept: ... @property def accept_languages(self) -> LanguageAccept: ... class ETagRequestMixin: @property def cache_control(self): ... @property def if_match(self): ... @property def if_none_match(self): ... @property def if_modified_since(self): ... @property def if_unmodified_since(self): ... @property def if_range(self): ... @property def range(self): ... class UserAgentMixin: @property def user_agent(self) -> UserAgent: ... class AuthorizationMixin: @property def authorization(self) -> Optional[Authorization]: ... class StreamOnlyMixin: disable_data_descriptor: Any want_form_data_parsed: Any class ETagResponseMixin: @property def cache_control(self): ... status_code: Any def make_conditional(self, request_or_environ, accept_ranges: bool = ..., complete_length: Optional[Any] = ...): ... def add_etag(self, overwrite: bool = ..., weak: bool = ...): ... def set_etag(self, etag, weak: bool = ...): ... def get_etag(self): ... def freeze(self, no_etag: bool = ...) -> None: ... accept_ranges: Any content_range: Any class ResponseStream: mode: Any response: Any closed: Any def __init__(self, response): ... def write(self, value): ... def writelines(self, seq): ... def close(self): ... def flush(self): ... def isatty(self): ... @property def encoding(self): ... class ResponseStreamMixin: @property def stream(self) -> ResponseStream: ... class CommonRequestDescriptorsMixin: @property def content_type(self) -> Optional[str]: ... @property def content_length(self) -> Optional[int]: ... @property def content_encoding(self) -> Optional[str]: ... @property def content_md5(self) -> Optional[str]: ... @property def referrer(self) -> Optional[str]: ... @property def date(self) -> Optional[datetime]: ... @property def max_forwards(self) -> Optional[int]: ... @property def mimetype(self) -> str: ... @property def mimetype_params(self) -> Mapping[str, str]: ... @property def pragma(self) -> HeaderSet: ... class CommonResponseDescriptorsMixin: mimetype: Optional[str] = ... @property def mimetype_params(self) -> MutableMapping[str, str]: ... location: Optional[str] = ... age: Any = ... # get: Optional[datetime.timedelta] content_type: Optional[str] = ... content_length: Optional[int] = ... content_location: Optional[str] = ... content_encoding: Optional[str] = ... content_md5: Optional[str] = ... date: Any = ... # get: Optional[datetime.datetime] expires: Any = ... # get: Optional[datetime.datetime] last_modified: Any = ... # get: Optional[datetime.datetime] retry_after: Any = ... # get: Optional[datetime.datetime] vary: Optional[str] = ... content_language: Optional[str] = ... allow: Optional[str] = ... class WWWAuthenticateMixin: @property def www_authenticate(self): ... class Request(BaseRequest, AcceptMixin, ETagRequestMixin, UserAgentMixin, AuthorizationMixin, CommonRequestDescriptorsMixin): ... class PlainRequest(StreamOnlyMixin, Request): ... class Response(BaseResponse, ETagResponseMixin, ResponseStreamMixin, CommonResponseDescriptorsMixin, WWWAuthenticateMixin): ... mypy-0.761/mypy/typeshed/third_party/2and3/werkzeug/wsgi.pyi0000644€tŠÔÚ€2›s®0000000601213576752252030336 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Protocol, Iterable, Text from wsgiref.types import WSGIEnvironment, InputStream from .middleware.dispatcher import DispatcherMiddleware as DispatcherMiddleware from .middleware.http_proxy import ProxyMiddleware as ProxyMiddleware from .middleware.shared_data import SharedDataMiddleware as SharedDataMiddleware def responder(f): ... def get_current_url(environ, root_only: bool = ..., strip_querystring: bool = ..., host_only: bool = ..., trusted_hosts: Optional[Any] = ...): ... def host_is_trusted(hostname, trusted_list): ... def get_host(environ, trusted_hosts: Optional[Any] = ...): ... def get_content_length(environ: WSGIEnvironment) -> Optional[int]: ... def get_input_stream(environ: WSGIEnvironment, safe_fallback: bool = ...) -> InputStream: ... def get_query_string(environ): ... def get_path_info(environ, charset: Text = ..., errors: Text = ...): ... def get_script_name(environ, charset: Text = ..., errors: Text = ...): ... def pop_path_info(environ, charset: Text = ..., errors: Text = ...): ... def peek_path_info(environ, charset: Text = ..., errors: Text = ...): ... def extract_path_info(environ_or_baseurl, path_or_url, charset: Text = ..., errors: Text = ..., collapse_http_schemes: bool = ...): ... class ClosingIterator: def __init__(self, iterable, callbacks: Optional[Any] = ...): ... def __iter__(self): ... def __next__(self): ... def close(self): ... class _Readable(Protocol): def read(self, size: int = ...) -> bytes: ... def wrap_file(environ: WSGIEnvironment, file: _Readable, buffer_size: int = ...) -> Iterable[bytes]: ... class FileWrapper: file: _Readable buffer_size: int def __init__(self, file: _Readable, buffer_size: int = ...) -> None: ... def close(self) -> None: ... def seekable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> None: ... def tell(self) -> Optional[int]: ... def __iter__(self) -> FileWrapper: ... def __next__(self) -> bytes: ... class _RangeWrapper: iterable: Any byte_range: Any start_byte: Any end_byte: Any read_length: Any seekable: Any end_reached: Any def __init__(self, iterable, start_byte: int = ..., byte_range: Optional[Any] = ...): ... def __iter__(self): ... def __next__(self): ... def close(self): ... def make_line_iter(stream, limit: Optional[Any] = ..., buffer_size=..., cap_at_buffer: bool = ...): ... def make_chunk_iter(stream, separator, limit: Optional[Any] = ..., buffer_size=..., cap_at_buffer: bool = ...): ... class LimitedStream: limit: Any def __init__(self, stream, limit): ... def __iter__(self): ... @property def is_exhausted(self): ... def on_exhausted(self): ... def on_disconnect(self): ... def exhaust(self, chunk_size=...): ... def read(self, size: Optional[Any] = ...): ... def readline(self, size: Optional[Any] = ...): ... def readlines(self, size: Optional[Any] = ...): ... def tell(self): ... def __next__(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/0000755€tŠÔÚ€2›s®0000000000013576752267025750 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/2and3/yaml/__init__.pyi0000644€tŠÔÚ€2›s®0000001271013576752252030225 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Iterator, Optional, overload, Sequence, Text, Union import sys from yaml.error import * # noqa: F403 from yaml.tokens import * # noqa: F403 from yaml.events import * # noqa: F403 from yaml.nodes import * # noqa: F403 from yaml.loader import * # noqa: F403 from yaml.dumper import * # noqa: F403 from . import resolver # Help mypy a bit; this is implied by loader and dumper from .cyaml import * if sys.version_info < (3,): _Str = Union[Text, str] else: _Str = str # FIXME: the functions really return py2:unicode/py3:str if encoding is None, otherwise py2:str/py3:bytes. Waiting for python/mypy#5621 _Yaml = Any __with_libyaml__: Any __version__: str def scan(stream, Loader=...): ... def parse(stream, Loader=...): ... def compose(stream, Loader=...): ... def compose_all(stream, Loader=...): ... def load(stream: Union[bytes, IO[bytes], str, IO[str]], Loader=...) -> Any: ... def load_all(stream: Union[bytes, IO[bytes], str, IO[str]], Loader=...) -> Iterator[Any]: ... def full_load(stream: Union[bytes, IO[bytes], str, IO[str]]) -> Any: ... def full_load_all(stream: Union[bytes, IO[bytes], str, IO[str]]) -> Iterator[Any]: ... def safe_load(stream: Union[bytes, IO[bytes], str, IO[str]]) -> Any: ... def safe_load_all(stream: Union[bytes, IO[bytes], str, IO[str]]) -> Iterator[Any]: ... def emit(events, stream=..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=...): ... @overload def serialize_all(nodes, stream: IO[str], Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ... @overload def serialize_all(nodes, stream: None = ..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding: Optional[_Str] = ..., explicit_start=..., explicit_end=..., version=..., tags=...) -> _Yaml: ... @overload def serialize(node, stream: IO[str], Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ... @overload def serialize(node, stream: None = ..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding: Optional[_Str] = ..., explicit_start=..., explicit_end=..., version=..., tags=...) -> _Yaml: ... @overload def dump_all(documents: Sequence[Any], stream: IO[str], Dumper=..., default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> None: ... @overload def dump_all(documents: Sequence[Any], stream: None = ..., Dumper=..., default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding: Optional[_Str] = ..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> _Yaml: ... @overload def dump(data: Any, stream: IO[str], Dumper=..., default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> None: ... @overload def dump(data: Any, stream: None = ..., Dumper=..., default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding: Optional[_Str] = ..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> _Yaml: ... @overload def safe_dump_all(documents: Sequence[Any], stream: IO[str], default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> None: ... @overload def safe_dump_all(documents: Sequence[Any], stream: None = ..., default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding: Optional[_Str] = ..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> _Yaml: ... @overload def safe_dump(data: Any, stream: IO[str], default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> None: ... @overload def safe_dump(data: Any, stream: None = ..., default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding: Optional[_Str] = ..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> _Yaml: ... def add_implicit_resolver(tag, regexp, first=..., Loader=..., Dumper=...): ... def add_path_resolver(tag, path, kind=..., Loader=..., Dumper=...): ... def add_constructor(tag, constructor, Loader=...): ... def add_multi_constructor(tag_prefix, multi_constructor, Loader=...): ... def add_representer(data_type, representer, Dumper=...): ... def add_multi_representer(data_type, multi_representer, Dumper=...): ... class YAMLObjectMetaclass(type): def __init__(self, name, bases, kwds) -> None: ... class YAMLObject(metaclass=YAMLObjectMetaclass): yaml_loader: Any yaml_dumper: Any yaml_tag: Any yaml_flow_style: Any @classmethod def from_yaml(cls, loader, node): ... @classmethod def to_yaml(cls, dumper, data): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/composer.pyi0000644€tŠÔÚ€2›s®0000000112313576752252030311 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import Mark, YAMLError, MarkedYAMLError from yaml.nodes import Node, ScalarNode, CollectionNode, SequenceNode, MappingNode class ComposerError(MarkedYAMLError): ... class Composer: anchors: Any def __init__(self) -> None: ... def check_node(self): ... def get_node(self): ... def get_single_node(self): ... def compose_document(self): ... def compose_node(self, parent, index): ... def compose_scalar_node(self, anchor): ... def compose_sequence_node(self, anchor): ... def compose_mapping_node(self, anchor): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/constructor.pyi0000644€tŠÔÚ€2›s®0000000710613576752252031056 0ustar jukkaDROPBOX\Domain Users00000000000000from yaml.error import Mark, YAMLError, MarkedYAMLError from yaml.nodes import Node, ScalarNode, CollectionNode, SequenceNode, MappingNode from typing import Any class ConstructorError(MarkedYAMLError): ... class BaseConstructor: yaml_constructors: Any yaml_multi_constructors: Any constructed_objects: Any recursive_objects: Any state_generators: Any deep_construct: Any def __init__(self) -> None: ... def check_data(self): ... def get_data(self): ... def get_single_data(self): ... def construct_document(self, node): ... def construct_object(self, node, deep=...): ... def construct_scalar(self, node): ... def construct_sequence(self, node, deep=...): ... def construct_mapping(self, node, deep=...): ... def construct_pairs(self, node, deep=...): ... @classmethod def add_constructor(cls, tag, constructor): ... @classmethod def add_multi_constructor(cls, tag_prefix, multi_constructor): ... class SafeConstructor(BaseConstructor): def construct_scalar(self, node): ... def flatten_mapping(self, node): ... def construct_mapping(self, node, deep=...): ... def construct_yaml_null(self, node): ... bool_values: Any def construct_yaml_bool(self, node): ... def construct_yaml_int(self, node): ... inf_value: Any nan_value: Any def construct_yaml_float(self, node): ... def construct_yaml_binary(self, node): ... timestamp_regexp: Any def construct_yaml_timestamp(self, node): ... def construct_yaml_omap(self, node): ... def construct_yaml_pairs(self, node): ... def construct_yaml_set(self, node): ... def construct_yaml_str(self, node): ... def construct_yaml_seq(self, node): ... def construct_yaml_map(self, node): ... def construct_yaml_object(self, node, cls): ... def construct_undefined(self, node): ... class FullConstructor(SafeConstructor): def construct_python_str(self, node): ... def construct_python_unicode(self, node): ... def construct_python_bytes(self, node): ... def construct_python_long(self, node): ... def construct_python_complex(self, node): ... def construct_python_tuple(self, node): ... def find_python_module(self, name, mark, unsafe=...): ... def find_python_name(self, name, mark, unsafe=...): ... def construct_python_name(self, suffix, node): ... def construct_python_module(self, suffix, node): ... def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=..., unsafe=...): ... def set_python_instance_state(self, instance, state): ... def construct_python_object(self, suffix, node): ... def construct_python_object_apply(self, suffix, node, newobj=...): ... def construct_python_object_new(self, suffix, node): ... class Constructor(SafeConstructor): def construct_python_str(self, node): ... def construct_python_unicode(self, node): ... def construct_python_long(self, node): ... def construct_python_complex(self, node): ... def construct_python_tuple(self, node): ... def find_python_module(self, name, mark): ... def find_python_name(self, name, mark): ... def construct_python_name(self, suffix, node): ... def construct_python_module(self, suffix, node): ... class classobj: ... def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=...): ... def set_python_instance_state(self, instance, state): ... def construct_python_object(self, suffix, node): ... def construct_python_object_apply(self, suffix, node, newobj=...): ... def construct_python_object_new(self, suffix, node): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/cyaml.pyi0000644€tŠÔÚ€2›s®0000000437413576752252027602 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Mapping, Optional, Sequence, Text, Union from typing_extensions import Protocol from yaml.constructor import BaseConstructor, Constructor, SafeConstructor from yaml.representer import BaseRepresenter, Representer, SafeRepresenter from yaml.resolver import BaseResolver, Resolver from yaml.serializer import Serializer class _Readable(Protocol): def read(self, size: int) -> Union[Text, bytes]: ... class CParser: def __init__(self, stream: Union[str, bytes, _Readable]) -> None: ... class CBaseLoader(CParser, BaseConstructor, BaseResolver): def __init__(self, stream: Union[str, bytes, _Readable]) -> None: ... class CLoader(CParser, SafeConstructor, Resolver): def __init__(self, stream: Union[str, bytes, _Readable]) -> None: ... class CSafeLoader(CParser, SafeConstructor, Resolver): def __init__(self, stream: Union[str, bytes, _Readable]) -> None: ... class CDangerLoader(CParser, Constructor, Resolver): ... # undocumented class CEmitter(object): def __init__(self, stream: IO[Any], canonical: Optional[Any] = ..., indent: Optional[int] = ..., width: Optional[int] = ..., allow_unicode: Optional[Any] = ..., line_break: Optional[str] = ..., encoding: Optional[Text] = ..., explicit_start: Optional[Any] = ..., explicit_end: Optional[Any] = ..., version: Optional[Sequence[int]] = ..., tags: Optional[Mapping[Text, Text]] = ...) -> None: ... class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): def __init__(self, stream: IO[Any], default_style: Optional[str] = ..., default_flow_style: Optional[bool] = ..., canonical: Optional[Any] = ..., indent: Optional[int] = ..., width: Optional[int] = ..., allow_unicode: Optional[Any] = ..., line_break: Optional[str] = ..., encoding: Optional[Text] = ..., explicit_start: Optional[Any] = ..., explicit_end: Optional[Any] = ..., version: Optional[Sequence[int]] = ..., tags: Optional[Mapping[Text, Text]] = ...) -> None: ... class CDumper(CEmitter, SafeRepresenter, Resolver): ... CSafeDumper = CDumper class CDangerDumper(CEmitter, Serializer, Representer, Resolver): ... # undocumented mypy-0.761/mypy/typeshed/third_party/2and3/yaml/dumper.pyi0000644€tŠÔÚ€2›s®0000000221713576752252027763 0ustar jukkaDROPBOX\Domain Users00000000000000from yaml.emitter import Emitter from yaml.serializer import Serializer from yaml.representer import BaseRepresenter, Representer, SafeRepresenter from yaml.resolver import BaseResolver, Resolver class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> None: ... class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> None: ... class Dumper(Emitter, Serializer, Representer, Resolver): def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=..., sort_keys: bool = ...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/emitter.pyi0000644€tŠÔÚ€2›s®0000000731313576752252030142 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class EmitterError(YAMLError): ... class ScalarAnalysis: scalar: Any empty: Any multiline: Any allow_flow_plain: Any allow_block_plain: Any allow_single_quoted: Any allow_double_quoted: Any allow_block: Any def __init__(self, scalar, empty, multiline, allow_flow_plain, allow_block_plain, allow_single_quoted, allow_double_quoted, allow_block) -> None: ... class Emitter: DEFAULT_TAG_PREFIXES: Any stream: Any encoding: Any states: Any state: Any events: Any event: Any indents: Any indent: Any flow_level: Any root_context: Any sequence_context: Any mapping_context: Any simple_key_context: Any line: Any column: Any whitespace: Any indention: Any open_ended: Any canonical: Any allow_unicode: Any best_indent: Any best_width: Any best_line_break: Any tag_prefixes: Any prepared_anchor: Any prepared_tag: Any analysis: Any style: Any def __init__(self, stream, canonical=..., indent=..., width=..., allow_unicode=..., line_break=...) -> None: ... def dispose(self): ... def emit(self, event): ... def need_more_events(self): ... def need_events(self, count): ... def increase_indent(self, flow=..., indentless=...): ... def expect_stream_start(self): ... def expect_nothing(self): ... def expect_first_document_start(self): ... def expect_document_start(self, first=...): ... def expect_document_end(self): ... def expect_document_root(self): ... def expect_node(self, root=..., sequence=..., mapping=..., simple_key=...): ... def expect_alias(self): ... def expect_scalar(self): ... def expect_flow_sequence(self): ... def expect_first_flow_sequence_item(self): ... def expect_flow_sequence_item(self): ... def expect_flow_mapping(self): ... def expect_first_flow_mapping_key(self): ... def expect_flow_mapping_key(self): ... def expect_flow_mapping_simple_value(self): ... def expect_flow_mapping_value(self): ... def expect_block_sequence(self): ... def expect_first_block_sequence_item(self): ... def expect_block_sequence_item(self, first=...): ... def expect_block_mapping(self): ... def expect_first_block_mapping_key(self): ... def expect_block_mapping_key(self, first=...): ... def expect_block_mapping_simple_value(self): ... def expect_block_mapping_value(self): ... def check_empty_sequence(self): ... def check_empty_mapping(self): ... def check_empty_document(self): ... def check_simple_key(self): ... def process_anchor(self, indicator): ... def process_tag(self): ... def choose_scalar_style(self): ... def process_scalar(self): ... def prepare_version(self, version): ... def prepare_tag_handle(self, handle): ... def prepare_tag_prefix(self, prefix): ... def prepare_tag(self, tag): ... def prepare_anchor(self, anchor): ... def analyze_scalar(self, scalar): ... def flush_stream(self): ... def write_stream_start(self): ... def write_stream_end(self): ... def write_indicator(self, indicator, need_whitespace, whitespace=..., indention=...): ... def write_indent(self): ... def write_line_break(self, data=...): ... def write_version_directive(self, version_text): ... def write_tag_directive(self, handle_text, prefix_text): ... def write_single_quoted(self, text, split=...): ... ESCAPE_REPLACEMENTS: Any def write_double_quoted(self, text, split=...): ... def determine_block_hints(self, text): ... def write_folded(self, text): ... def write_literal(self, text): ... def write_plain(self, text, split=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/error.pyi0000644€tŠÔÚ€2›s®0000000102713576752252027616 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Mark: name: Any index: Any line: Any column: Any buffer: Any pointer: Any def __init__(self, name, index, line, column, buffer, pointer) -> None: ... def get_snippet(self, indent=..., max_length=...): ... class YAMLError(Exception): ... class MarkedYAMLError(YAMLError): context: Any context_mark: Any problem: Any problem_mark: Any note: Any def __init__(self, context=..., context_mark=..., problem=..., problem_mark=..., note=...) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/events.pyi0000644€tŠÔÚ€2›s®0000000317613576752252030000 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Event: start_mark: Any end_mark: Any def __init__(self, start_mark=..., end_mark=...) -> None: ... class NodeEvent(Event): anchor: Any start_mark: Any end_mark: Any def __init__(self, anchor, start_mark=..., end_mark=...) -> None: ... class CollectionStartEvent(NodeEvent): anchor: Any tag: Any implicit: Any start_mark: Any end_mark: Any flow_style: Any def __init__(self, anchor, tag, implicit, start_mark=..., end_mark=..., flow_style=...) -> None: ... class CollectionEndEvent(Event): ... class StreamStartEvent(Event): start_mark: Any end_mark: Any encoding: Any def __init__(self, start_mark=..., end_mark=..., encoding=...) -> None: ... class StreamEndEvent(Event): ... class DocumentStartEvent(Event): start_mark: Any end_mark: Any explicit: Any version: Any tags: Any def __init__(self, start_mark=..., end_mark=..., explicit=..., version=..., tags=...) -> None: ... class DocumentEndEvent(Event): start_mark: Any end_mark: Any explicit: Any def __init__(self, start_mark=..., end_mark=..., explicit=...) -> None: ... class AliasEvent(NodeEvent): ... class ScalarEvent(NodeEvent): anchor: Any tag: Any implicit: Any value: Any start_mark: Any end_mark: Any style: Any def __init__(self, anchor, tag, implicit, value, start_mark=..., end_mark=..., style=...) -> None: ... class SequenceStartEvent(CollectionStartEvent): ... class SequenceEndEvent(CollectionEndEvent): ... class MappingStartEvent(CollectionStartEvent): ... class MappingEndEvent(CollectionEndEvent): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/loader.pyi0000644€tŠÔÚ€2›s®0000000137713576752252027743 0ustar jukkaDROPBOX\Domain Users00000000000000from yaml.reader import Reader from yaml.scanner import Scanner from yaml.parser import Parser from yaml.composer import Composer from yaml.constructor import BaseConstructor, FullConstructor, SafeConstructor, Constructor from yaml.resolver import BaseResolver, Resolver class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): def __init__(self, stream) -> None: ... class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver): def __init__(self, stream) -> None: ... class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): def __init__(self, stream) -> None: ... class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): def __init__(self, stream) -> None: ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/nodes.pyi0000644€tŠÔÚ€2›s®0000000125513576752252027600 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Node: tag: Any value: Any start_mark: Any end_mark: Any def __init__(self, tag, value, start_mark, end_mark) -> None: ... class ScalarNode(Node): id: Any tag: Any value: Any start_mark: Any end_mark: Any style: Any def __init__(self, tag, value, start_mark=..., end_mark=..., style=...) -> None: ... class CollectionNode(Node): tag: Any value: Any start_mark: Any end_mark: Any flow_style: Any def __init__(self, tag, value, start_mark=..., end_mark=..., flow_style=...) -> None: ... class SequenceNode(CollectionNode): id: Any class MappingNode(CollectionNode): id: Any mypy-0.761/mypy/typeshed/third_party/2and3/yaml/parser.pyi0000644€tŠÔÚ€2›s®0000000317713576752252027771 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import MarkedYAMLError class ParserError(MarkedYAMLError): ... class Parser: DEFAULT_TAGS: Any current_event: Any yaml_version: Any tag_handles: Any states: Any marks: Any state: Any def __init__(self) -> None: ... def dispose(self): ... def check_event(self, *choices): ... def peek_event(self): ... def get_event(self): ... def parse_stream_start(self): ... def parse_implicit_document_start(self): ... def parse_document_start(self): ... def parse_document_end(self): ... def parse_document_content(self): ... def process_directives(self): ... def parse_block_node(self): ... def parse_flow_node(self): ... def parse_block_node_or_indentless_sequence(self): ... def parse_node(self, block=..., indentless_sequence=...): ... def parse_block_sequence_first_entry(self): ... def parse_block_sequence_entry(self): ... def parse_indentless_sequence_entry(self): ... def parse_block_mapping_first_key(self): ... def parse_block_mapping_key(self): ... def parse_block_mapping_value(self): ... def parse_flow_sequence_first_entry(self): ... def parse_flow_sequence_entry(self, first=...): ... def parse_flow_sequence_entry_mapping_key(self): ... def parse_flow_sequence_entry_mapping_value(self): ... def parse_flow_sequence_entry_mapping_end(self): ... def parse_flow_mapping_first_key(self): ... def parse_flow_mapping_key(self, first=...): ... def parse_flow_mapping_value(self): ... def parse_flow_mapping_empty_value(self): ... def process_empty_scalar(self, mark): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/reader.pyi0000644€tŠÔÚ€2›s®0000000147713576752252027740 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class ReaderError(YAMLError): name: Any character: Any position: Any encoding: Any reason: Any def __init__(self, name, position, character, encoding, reason) -> None: ... class Reader: name: Any stream: Any stream_pointer: Any eof: Any buffer: Any pointer: Any raw_buffer: Any raw_decode: Any encoding: Any index: Any line: Any column: Any def __init__(self, stream) -> None: ... def peek(self, index=...): ... def prefix(self, length=...): ... def forward(self, length=...): ... def get_mark(self): ... def determine_encoding(self): ... NON_PRINTABLE: Any def check_printable(self, data): ... def update(self, length): ... def update_raw(self, size=...): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/representer.pyi0000644€tŠÔÚ€2›s®0000000377713576752252031041 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class RepresenterError(YAMLError): ... class BaseRepresenter: yaml_representers: Any yaml_multi_representers: Any default_style: Any default_flow_style: Any sort_keys: bool represented_objects: Any object_keeper: Any alias_key: Any def __init__(self, default_style=..., default_flow_style=..., sort_keys: bool = ...) -> None: ... def represent(self, data): ... def get_classobj_bases(self, cls): ... def represent_data(self, data): ... @classmethod def add_representer(cls, data_type, representer): ... @classmethod def add_multi_representer(cls, data_type, representer): ... def represent_scalar(self, tag, value, style=...): ... def represent_sequence(self, tag, sequence, flow_style=...): ... def represent_mapping(self, tag, mapping, flow_style=...): ... def ignore_aliases(self, data): ... class SafeRepresenter(BaseRepresenter): def ignore_aliases(self, data): ... def represent_none(self, data): ... def represent_str(self, data): ... def represent_unicode(self, data): ... def represent_bool(self, data): ... def represent_int(self, data): ... def represent_long(self, data): ... inf_value: Any def represent_float(self, data): ... def represent_list(self, data): ... def represent_dict(self, data): ... def represent_set(self, data): ... def represent_date(self, data): ... def represent_datetime(self, data): ... def represent_yaml_object(self, tag, data, cls, flow_style=...): ... def represent_undefined(self, data): ... class Representer(SafeRepresenter): def represent_str(self, data): ... def represent_unicode(self, data): ... def represent_long(self, data): ... def represent_complex(self, data): ... def represent_tuple(self, data): ... def represent_name(self, data): ... def represent_module(self, data): ... def represent_instance(self, data): ... def represent_object(self, data): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/resolver.pyi0000644€tŠÔÚ€2›s®0000000142113576752252030324 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class ResolverError(YAMLError): ... class BaseResolver: DEFAULT_SCALAR_TAG: Any DEFAULT_SEQUENCE_TAG: Any DEFAULT_MAPPING_TAG: Any yaml_implicit_resolvers: Any yaml_path_resolvers: Any resolver_exact_paths: Any resolver_prefix_paths: Any def __init__(self) -> None: ... @classmethod def add_implicit_resolver(cls, tag, regexp, first): ... @classmethod def add_path_resolver(cls, tag, path, kind=...): ... def descend_resolver(self, current_node, current_index): ... def ascend_resolver(self): ... def check_resolver_prefix(self, depth, path, kind, current_node, current_index): ... def resolve(self, kind, value, implicit): ... class Resolver(BaseResolver): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/scanner.pyi0000644€tŠÔÚ€2›s®0000000676413576752252030133 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import MarkedYAMLError class ScannerError(MarkedYAMLError): ... class SimpleKey: token_number: Any required: Any index: Any line: Any column: Any mark: Any def __init__(self, token_number, required, index, line, column, mark) -> None: ... class Scanner: done: Any flow_level: Any tokens: Any tokens_taken: Any indent: Any indents: Any allow_simple_key: Any possible_simple_keys: Any def __init__(self) -> None: ... def check_token(self, *choices): ... def peek_token(self): ... def get_token(self): ... def need_more_tokens(self): ... def fetch_more_tokens(self): ... def next_possible_simple_key(self): ... def stale_possible_simple_keys(self): ... def save_possible_simple_key(self): ... def remove_possible_simple_key(self): ... def unwind_indent(self, column): ... def add_indent(self, column): ... def fetch_stream_start(self): ... def fetch_stream_end(self): ... def fetch_directive(self): ... def fetch_document_start(self): ... def fetch_document_end(self): ... def fetch_document_indicator(self, TokenClass): ... def fetch_flow_sequence_start(self): ... def fetch_flow_mapping_start(self): ... def fetch_flow_collection_start(self, TokenClass): ... def fetch_flow_sequence_end(self): ... def fetch_flow_mapping_end(self): ... def fetch_flow_collection_end(self, TokenClass): ... def fetch_flow_entry(self): ... def fetch_block_entry(self): ... def fetch_key(self): ... def fetch_value(self): ... def fetch_alias(self): ... def fetch_anchor(self): ... def fetch_tag(self): ... def fetch_literal(self): ... def fetch_folded(self): ... def fetch_block_scalar(self, style): ... def fetch_single(self): ... def fetch_double(self): ... def fetch_flow_scalar(self, style): ... def fetch_plain(self): ... def check_directive(self): ... def check_document_start(self): ... def check_document_end(self): ... def check_block_entry(self): ... def check_key(self): ... def check_value(self): ... def check_plain(self): ... def scan_to_next_token(self): ... def scan_directive(self): ... def scan_directive_name(self, start_mark): ... def scan_yaml_directive_value(self, start_mark): ... def scan_yaml_directive_number(self, start_mark): ... def scan_tag_directive_value(self, start_mark): ... def scan_tag_directive_handle(self, start_mark): ... def scan_tag_directive_prefix(self, start_mark): ... def scan_directive_ignored_line(self, start_mark): ... def scan_anchor(self, TokenClass): ... def scan_tag(self): ... def scan_block_scalar(self, style): ... def scan_block_scalar_indicators(self, start_mark): ... def scan_block_scalar_ignored_line(self, start_mark): ... def scan_block_scalar_indentation(self): ... def scan_block_scalar_breaks(self, indent): ... def scan_flow_scalar(self, style): ... ESCAPE_REPLACEMENTS: Any ESCAPE_CODES: Any def scan_flow_scalar_non_spaces(self, double, start_mark): ... def scan_flow_scalar_spaces(self, double, start_mark): ... def scan_flow_scalar_breaks(self, double, start_mark): ... def scan_plain(self): ... def scan_plain_spaces(self, indent, start_mark): ... def scan_tag_handle(self, name, start_mark): ... def scan_tag_uri(self, name, start_mark): ... def scan_uri_escapes(self, name, start_mark): ... def scan_line_break(self): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/serializer.pyi0000644€tŠÔÚ€2›s®0000000123113576752252030633 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class SerializerError(YAMLError): ... class Serializer: ANCHOR_TEMPLATE: Any use_encoding: Any use_explicit_start: Any use_explicit_end: Any use_version: Any use_tags: Any serialized_nodes: Any anchors: Any last_anchor_id: Any closed: Any def __init__(self, encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ... def open(self): ... def close(self): ... def serialize(self, node): ... def anchor_node(self, node): ... def generate_anchor(self, node): ... def serialize_node(self, node, parent, index): ... mypy-0.761/mypy/typeshed/third_party/2and3/yaml/tokens.pyi0000644€tŠÔÚ€2›s®0000000340013576752252027765 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Token: start_mark: Any end_mark: Any def __init__(self, start_mark, end_mark) -> None: ... class DirectiveToken(Token): id: Any name: Any value: Any start_mark: Any end_mark: Any def __init__(self, name, value, start_mark, end_mark) -> None: ... class DocumentStartToken(Token): id: Any class DocumentEndToken(Token): id: Any class StreamStartToken(Token): id: Any start_mark: Any end_mark: Any encoding: Any def __init__(self, start_mark=..., end_mark=..., encoding=...) -> None: ... class StreamEndToken(Token): id: Any class BlockSequenceStartToken(Token): id: Any class BlockMappingStartToken(Token): id: Any class BlockEndToken(Token): id: Any class FlowSequenceStartToken(Token): id: Any class FlowMappingStartToken(Token): id: Any class FlowSequenceEndToken(Token): id: Any class FlowMappingEndToken(Token): id: Any class KeyToken(Token): id: Any class ValueToken(Token): id: Any class BlockEntryToken(Token): id: Any class FlowEntryToken(Token): id: Any class AliasToken(Token): id: Any value: Any start_mark: Any end_mark: Any def __init__(self, value, start_mark, end_mark) -> None: ... class AnchorToken(Token): id: Any value: Any start_mark: Any end_mark: Any def __init__(self, value, start_mark, end_mark) -> None: ... class TagToken(Token): id: Any value: Any start_mark: Any end_mark: Any def __init__(self, value, start_mark, end_mark) -> None: ... class ScalarToken(Token): id: Any value: Any plain: Any start_mark: Any end_mark: Any style: Any def __init__(self, value, plain, start_mark, end_mark, style=...) -> None: ... mypy-0.761/mypy/typeshed/third_party/3/0000755€tŠÔÚ€2›s®0000000000013576752267024241 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/contextvars.pyi0000644€tŠÔÚ€2›s®0000000214313576752252027336 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, ClassVar, Generic, Iterator, Mapping, TypeVar _T = TypeVar('_T') class ContextVar(Generic[_T]): def __init__(self, name: str, *, default: _T = ...) -> None: ... @property def name(self) -> str: ... def get(self, default: _T = ...) -> _T: ... def set(self, value: _T) -> Token[_T]: ... def reset(self, token: Token[_T]) -> None: ... class Token(Generic[_T]): @property def var(self) -> ContextVar[_T]: ... @property def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express MISSING: ClassVar[object] def copy_context() -> Context: ... # It doesn't make sense to make this generic, because for most Contexts each ContextVar will have # a different value. class Context(Mapping[ContextVar[Any], Any]): def __init__(self) -> None: ... def run(self, callable: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ... def copy(self) -> Context: ... def __getitem__(self, key: ContextVar[Any]) -> Any: ... def __iter__(self) -> Iterator[ContextVar[Any]]: ... def __len__(self) -> int: ... mypy-0.761/mypy/typeshed/third_party/3/dataclasses.pyi0000644€tŠÔÚ€2›s®0000000450313576752252027247 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, Any, Callable, Dict, Generic, Iterable, List, Mapping, Optional, Tuple, Type, TypeVar, Union _T = TypeVar('_T') class _MISSING_TYPE: ... MISSING: _MISSING_TYPE @overload def asdict(obj: Any) -> Dict[str, Any]: ... @overload def asdict(obj: Any, *, dict_factory: Callable[[List[Tuple[str, Any]]], _T]) -> _T: ... @overload def astuple(obj: Any) -> Tuple[Any, ...]: ... @overload def astuple(obj: Any, *, tuple_factory: Callable[[List[Any]], _T]) -> _T: ... @overload def dataclass(_cls: Type[_T]) -> Type[_T]: ... @overload def dataclass(*, init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., unsafe_hash: bool = ..., frozen: bool = ...) -> Callable[[Type[_T]], Type[_T]]: ... class Field(Generic[_T]): name: str type: Type[_T] default: _T default_factory: Callable[[], _T] repr: bool hash: Optional[bool] init: bool compare: bool metadata: Mapping[str, Any] # NOTE: Actual return type is 'Field[_T]', but we want to help type checkers # to understand the magic that happens at runtime. @overload # `default` and `default_factory` are optional and mutually exclusive. def field(*, default: _T, init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> _T: ... @overload def field(*, default_factory: Callable[[], _T], init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> _T: ... @overload def field(*, init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> Any: ... def fields(class_or_instance: Any) -> Tuple[Field[Any], ...]: ... def is_dataclass(obj: Any) -> bool: ... class FrozenInstanceError(AttributeError): ... class InitVar(Generic[_T]): ... def make_dataclass(cls_name: str, fields: Iterable[Union[str, Tuple[str, type], Tuple[str, type, Field[Any]]]], *, bases: Tuple[type, ...] = ..., namespace: Optional[Dict[str, Any]] = ..., init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., hash: bool = ..., frozen: bool = ...): ... def replace(obj: _T, **changes: Any) -> _T: ... mypy-0.761/mypy/typeshed/third_party/3/docutils/0000755€tŠÔÚ€2›s®0000000000013576752267026067 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/docutils/__init__.pyi0000644€tŠÔÚ€2›s®0000000007213576752252030342 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def __getattr__(name) -> Any: ... mypy-0.761/mypy/typeshed/third_party/3/docutils/examples.pyi0000644€tŠÔÚ€2›s®0000000011313576752252030415 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any html_parts: Any def __getattr__(name) -> Any: ... mypy-0.761/mypy/typeshed/third_party/3/docutils/nodes.pyi0000644€tŠÔÚ€2›s®0000000040513576752252027713 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List class reference: def __init__(self, rawsource: str = ..., text: str = ..., *children: List[Any], **attributes) -> None: ... def __getattr__(name) -> Any: ... mypy-0.761/mypy/typeshed/third_party/3/docutils/parsers/0000755€tŠÔÚ€2›s®0000000000013576752267027546 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/docutils/parsers/__init__.pyi0000644€tŠÔÚ€2›s®0000000007213576752252032021 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def __getattr__(name) -> Any: ... mypy-0.761/mypy/typeshed/third_party/3/docutils/parsers/rst/0000755€tŠÔÚ€2›s®0000000000013576752267030356 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/docutils/parsers/rst/__init__.pyi0000644€tŠÔÚ€2›s®0000000007213576752252032631 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def __getattr__(name) -> Any: ... mypy-0.761/mypy/typeshed/third_party/3/docutils/parsers/rst/nodes.pyi0000644€tŠÔÚ€2›s®0000000007213576752252032202 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def __getattr__(name) -> Any: ... mypy-0.761/mypy/typeshed/third_party/3/docutils/parsers/rst/roles.pyi0000644€tŠÔÚ€2›s®0000000064313576752252032222 0ustar jukkaDROPBOX\Domain Users00000000000000import docutils.nodes import docutils.parsers.rst.states from typing import Callable, Any, List, Dict, Tuple _RoleFn = Callable[ [str, str, str, int, docutils.parsers.rst.states.Inliner, Dict[str, Any], List[str]], Tuple[List[docutils.nodes.reference], List[docutils.nodes.reference]], ] def register_local_role(name: str, role_fn: _RoleFn) -> None: ... def __getattr__(name: str) -> Any: ... # incomplete mypy-0.761/mypy/typeshed/third_party/3/docutils/parsers/rst/states.pyi0000644€tŠÔÚ€2›s®0000000020413576752252032372 0ustar jukkaDROPBOX\Domain Users00000000000000import typing from typing import Any class Inliner: def __init__(self) -> None: ... def __getattr__(name) -> Any: ... mypy-0.761/mypy/typeshed/third_party/3/jwt/0000755€tŠÔÚ€2›s®0000000000013576752267025045 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/jwt/__init__.pyi0000644€tŠÔÚ€2›s®0000000330313576752252027320 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Mapping, Any, Optional, Union, Dict from . import algorithms from cryptography.hazmat.primitives.asymmetric import rsa def decode(jwt: Union[str, bytes], key: Union[str, bytes, rsa.RSAPublicKey, rsa.RSAPrivateKey] = ..., verify: bool = ..., algorithms: Optional[Any] = ..., options: Optional[Mapping[Any, Any]] = ..., **kwargs: Any) -> Dict[str, Any]: ... def encode(payload: Mapping[str, Any], key: Union[str, bytes, rsa.RSAPublicKey, rsa.RSAPrivateKey], algorithm: str = ..., headers: Optional[Mapping[str, Any]] = ..., json_encoder: Optional[Any] = ...) -> bytes: ... def register_algorithm(alg_id: str, alg_obj: algorithms.Algorithm[Any]) -> None: ... def unregister_algorithm(alg_id: str) -> None: ... class PyJWTError(Exception): ... class InvalidTokenError(PyJWTError): ... class DecodeError(InvalidTokenError): ... class ExpiredSignatureError(InvalidTokenError): ... class InvalidAudienceError(InvalidTokenError): ... class InvalidIssuerError(InvalidTokenError): ... class InvalidIssuedAtError(InvalidTokenError): ... class ImmatureSignatureError(InvalidTokenError): ... class InvalidKeyError(PyJWTError): ... class InvalidAlgorithmError(InvalidTokenError): ... class MissingRequiredClaimError(InvalidTokenError): ... class InvalidSignatureError(DecodeError): ... # Compatibility aliases (deprecated) ExpiredSignature = ExpiredSignatureError InvalidAudience = InvalidAudienceError InvalidIssuer = InvalidIssuerError # These aren't actually documented, but the package # exports them in __init__.py, so we should at least # make sure that mypy doesn't raise spurious errors # if they're used. get_unverified_header: Any PyJWT: Any PyJWS: Any mypy-0.761/mypy/typeshed/third_party/3/jwt/algorithms.pyi0000644€tŠÔÚ€2›s®0000000546113576752252027741 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from hashlib import _Hash from typing import Any, Set, Dict, Optional, ClassVar, Union, Generic, TypeVar requires_cryptography = Set[str] def get_default_algorithms() -> Dict[str, Algorithm[Any]]: ... _K = TypeVar("_K") class Algorithm(Generic[_K]): def prepare_key(self, key: _K) -> _K: ... def sign(self, msg: bytes, key: _K) -> bytes: ... def verify(self, msg: bytes, key: _K, sig: bytes) -> bool: ... @staticmethod def to_jwk(key_obj: _K) -> str: ... @staticmethod def from_jwk(jwk: str) -> _K: ... class NoneAlgorithm(Algorithm[None]): def prepare_key(self, key: Optional[str]) -> None: ... class _HashAlg: def __call__(self, arg: Union[bytes, bytearray, memoryview] = ...) -> _Hash: ... if sys.version_info >= (3, 6): _LoadsString = Union[str, bytes, bytearray] else: _LoadsString = str class HMACAlgorithm(Algorithm[bytes]): SHA256: ClassVar[_HashAlg] SHA384: ClassVar[_HashAlg] SHA512: ClassVar[_HashAlg] hash_alg: _HashAlg def __init__(self, _HashAlg) -> None: ... def prepare_key(self, key: Union[str, bytes]) -> bytes: ... @staticmethod def to_jwk(key_obj: Union[str, bytes]) -> str: ... @staticmethod def from_jwk(jwk: _LoadsString) -> bytes: ... # Only defined if cryptography is installed. Types should be tightened when # cryptography gets type hints. # See https://github.com/python/typeshed/issues/2542 class RSAAlgorithm(Algorithm[Any]): SHA256: ClassVar[Any] SHA384: ClassVar[Any] SHA512: ClassVar[Any] hash_alg: Any def __init__(self, hash_alg: Any) -> None: ... def prepare_key(self, key: Any) -> Any: ... @staticmethod def to_jwk(key_obj: Any) -> str: ... @staticmethod def from_jwk(jwk: _LoadsString) -> Any: ... def sign(self, msg: bytes, key: Any) -> bytes: ... def verify(self, msg: bytes, key: Any, sig: bytes) -> bool: ... # Only defined if cryptography is installed. Types should be tightened when # cryptography gets type hints. # See https://github.com/python/typeshed/issues/2542 class ECAlgorithm(Algorithm[Any]): SHA256: ClassVar[Any] SHA384: ClassVar[Any] SHA512: ClassVar[Any] hash_alg: Any def __init__(self, hash_alg: Any) -> None: ... def prepare_key(self, key: Any) -> Any: ... @staticmethod def to_jwk(key_obj: Any) -> str: ... @staticmethod def from_jwk(jwk: _LoadsString) -> Any: ... def sign(self, msg: bytes, key: Any) -> bytes: ... def verify(self, msg: bytes, key: Any, sig: bytes) -> bool: ... # Only defined if cryptography is installed. Types should be tightened when # cryptography gets type hints. # See https://github.com/python/typeshed/issues/2542 class RSAPSSAlgorithm(RSAAlgorithm): def sign(self, msg: bytes, key: Any) -> bytes: ... def verify(self, msg: bytes, key: Any, sig: bytes) -> bool: ... mypy-0.761/mypy/typeshed/third_party/3/jwt/contrib/0000755€tŠÔÚ€2›s®0000000000013576752267026505 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/jwt/contrib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752252030747 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/jwt/contrib/algorithms/0000755€tŠÔÚ€2›s®0000000000013576752267030656 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/jwt/contrib/algorithms/__init__.pyi0000644€tŠÔÚ€2›s®0000000004613576752252033132 0ustar jukkaDROPBOX\Domain Users00000000000000from hashlib import _Hash as _HashAlg mypy-0.761/mypy/typeshed/third_party/3/jwt/contrib/algorithms/py_ecdsa.pyi0000644€tŠÔÚ€2›s®0000000035713576752252033167 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jwt.algorithms import Algorithm from . import _HashAlg class ECAlgorithm(Algorithm[Any]): SHA256: _HashAlg SHA384: _HashAlg SHA512: _HashAlg def __init__(self, hash_alg: _HashAlg) -> None: ... mypy-0.761/mypy/typeshed/third_party/3/jwt/contrib/algorithms/pycrypto.pyi0000644€tŠÔÚ€2›s®0000000036013576752252033263 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jwt.algorithms import Algorithm from . import _HashAlg class RSAAlgorithm(Algorithm[Any]): SHA256: _HashAlg SHA384: _HashAlg SHA512: _HashAlg def __init__(self, hash_alg: _HashAlg) -> None: ... mypy-0.761/mypy/typeshed/third_party/3/orjson.pyi0000644€tŠÔÚ€2›s®0000000075213576752252026274 0ustar jukkaDROPBOX\Domain Users00000000000000# https://github.com/ijl/orjson/blob/master/orjson.pyi from typing import Any, Callable, Optional, Union __version__ = str def dumps( __obj: Any, default: Optional[Callable[[Any], Any]] = ..., option: Optional[int] = ..., ) -> bytes: ... def loads(__obj: Union[bytes, bytearray, str]) -> Any: ... class JSONDecodeError(ValueError): ... class JSONEncodeError(TypeError): ... OPT_SERIALIZE_DATACLASS: int OPT_NAIVE_UTC: int OPT_OMIT_MICROSECONDS: int OPT_STRICT_INTEGER: int mypy-0.761/mypy/typeshed/third_party/3/pkg_resources/0000755€tŠÔÚ€2›s®0000000000013576752267027114 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/pkg_resources/__init__.pyi0000644€tŠÔÚ€2›s®0000002724113576752252031376 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pkg_resources (Python 3.4) from typing import Any, Callable, Dict, IO, Iterable, Generator, Optional, Sequence, Tuple, List, Set, Union, TypeVar, overload from abc import ABCMeta import importlib.abc import types import zipimport _T = TypeVar("_T") _NestedStr = Union[str, Iterable[Union[str, Iterable[Any]]]] _InstallerType = Callable[[Requirement], Optional[Distribution]] _EPDistType = Union[Distribution, Requirement, str] _MetadataType = Optional[IResourceProvider] _PkgReqType = Union[str, Requirement] _DistFinderType = Callable[[str, _Importer, bool], Generator[Distribution, None, None]] _NSHandlerType = Callable[[_Importer, str, str, types.ModuleType], str] def declare_namespace(name: str) -> None: ... def fixup_namespace_packages(path_item: str) -> None: ... class WorkingSet: entries: List[str] def __init__(self, entries: Optional[Iterable[str]] = ...) -> None: ... def require(self, *requirements: _NestedStr) -> Sequence[Distribution]: ... def run_script(self, requires: str, script_name: str) -> None: ... def iter_entry_points(self, group: str, name: Optional[str] = ...) -> Generator[EntryPoint, None, None]: ... def add_entry(self, entry: str) -> None: ... def __contains__(self, dist: Distribution) -> bool: ... def __iter__(self) -> Generator[Distribution, None, None]: ... def find(self, req: Requirement) -> Optional[Distribution]: ... def resolve( self, requirements: Sequence[Requirement], env: Optional[Environment] = ..., installer: Optional[_InstallerType] = ... ) -> List[Distribution]: ... def add(self, dist: Distribution, entry: Optional[str] = ..., insert: bool = ..., replace: bool = ...) -> None: ... def subscribe(self, callback: Callable[[Distribution], None]) -> None: ... def find_plugins( self, plugin_env: Environment, full_env: Optional[Environment] = ..., fallback: bool = ... ) -> Tuple[List[Distribution], Dict[Distribution, Exception]]: ... working_set: WorkingSet def require(*requirements: Union[str, Sequence[str]]) -> Sequence[Distribution]: ... def run_script(requires: str, script_name: str) -> None: ... def iter_entry_points(group: str, name: Optional[str] = ...) -> Generator[EntryPoint, None, None]: ... def add_activation_listener(callback: Callable[[Distribution], None]) -> None: ... class Environment: def __init__( self, search_path: Optional[Sequence[str]] = ..., platform: Optional[str] = ..., python: Optional[str] = ... ) -> None: ... def __getitem__(self, project_name: str) -> List[Distribution]: ... def __iter__(self) -> Generator[str, None, None]: ... def add(self, dist: Distribution) -> None: ... def remove(self, dist: Distribution) -> None: ... def can_add(self, dist: Distribution) -> bool: ... def __add__(self, other: Union[Distribution, Environment]) -> Environment: ... def __iadd__(self, other: Union[Distribution, Environment]) -> Environment: ... @overload def best_match(self, req: Requirement, working_set: WorkingSet) -> Distribution: ... @overload def best_match(self, req: Requirement, working_set: WorkingSet, installer: Callable[[Requirement], _T] = ...) -> _T: ... @overload def obtain(self, requirement: Requirement) -> None: ... @overload def obtain(self, requirement: Requirement, installer: Callable[[Requirement], _T] = ...) -> _T: ... def scan(self, search_path: Optional[Sequence[str]] = ...) -> None: ... def parse_requirements(strs: Union[str, Iterable[str]]) -> Generator[Requirement, None, None]: ... class Requirement: unsafe_name: str project_name: str key: str extras: Tuple[str, ...] specs: List[Tuple[str, str]] # TODO: change this to Optional[packaging.markers.Marker] once we can import # packaging.markers marker: Optional[Any] @staticmethod def parse(s: Union[str, Iterable[str]]) -> Requirement: ... def __contains__(self, item: Union[Distribution, str, Tuple[str, ...]]) -> bool: ... def __eq__(self, other_requirement: Any) -> bool: ... def load_entry_point(dist: _EPDistType, group: str, name: str) -> None: ... def get_entry_info(dist: _EPDistType, group: str, name: str) -> Optional[EntryPoint]: ... @overload def get_entry_map(dist: _EPDistType) -> Dict[str, Dict[str, EntryPoint]]: ... @overload def get_entry_map(dist: _EPDistType, group: str) -> Dict[str, EntryPoint]: ... class EntryPoint: name: str module_name: str attrs: Tuple[str, ...] extras: Tuple[str, ...] dist: Optional[Distribution] def __init__( self, name: str, module_name: str, attrs: Tuple[str, ...] = ..., extras: Tuple[str, ...] = ..., dist: Optional[Distribution] = ..., ) -> None: ... @classmethod def parse(cls, src: str, dist: Optional[Distribution] = ...) -> EntryPoint: ... @classmethod def parse_group( cls, group: str, lines: Union[str, Sequence[str]], dist: Optional[Distribution] = ... ) -> Dict[str, EntryPoint]: ... @classmethod def parse_map( cls, data: Union[Dict[str, Union[str, Sequence[str]]], str, Sequence[str]], dist: Optional[Distribution] = ... ) -> Dict[str, EntryPoint]: ... def load(self, require: bool = ..., env: Optional[Environment] = ..., installer: Optional[_InstallerType] = ...) -> Any: ... def require(self, env: Optional[Environment] = ..., installer: Optional[_InstallerType] = ...) -> None: ... def resolve(self) -> Any: ... def find_distributions(path_item: str, only: bool = ...) -> Generator[Distribution, None, None]: ... def get_distribution(dist: Union[Requirement, str, Distribution]) -> Distribution: ... class Distribution(IResourceProvider, IMetadataProvider): PKG_INFO: str location: str project_name: str key: str extras: List[str] version: str parsed_version: Tuple[str, ...] py_version: str platform: Optional[str] precedence: int def __init__( self, location: Optional[str] = ..., metadata: Optional[str] = ..., project_name: Optional[str] = ..., version: Optional[str] = ..., py_version: str = ..., platform: Optional[str] = ..., precedence: int = ..., ) -> None: ... @classmethod def from_location( cls, location: str, basename: str, metadata: Optional[str] = ..., **kw: Union[str, None, int] ) -> Distribution: ... @classmethod def from_filename(cls, filename: str, metadata: Optional[str] = ..., **kw: Union[str, None, int]) -> Distribution: ... def activate(self, path: Optional[List[str]] = ...) -> None: ... def as_requirement(self) -> Requirement: ... def requires(self, extras: Tuple[str, ...] = ...) -> List[Requirement]: ... def clone(self, **kw: Union[str, int, None]) -> Requirement: ... def egg_name(self) -> str: ... def __cmp__(self, other: Any) -> bool: ... def get_entry_info(self, group: str, name: str) -> Optional[EntryPoint]: ... @overload def get_entry_map(self) -> Dict[str, Dict[str, EntryPoint]]: ... @overload def get_entry_map(self, group: str) -> Dict[str, EntryPoint]: ... def load_entry_point(self, group: str, name: str) -> None: ... EGG_DIST: int BINARY_DIST: int SOURCE_DIST: int CHECKOUT_DIST: int DEVELOP_DIST: int def resource_exists(package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... def resource_stream(package_or_requirement: _PkgReqType, resource_name: str) -> IO[bytes]: ... def resource_string(package_or_requirement: _PkgReqType, resource_name: str) -> bytes: ... def resource_isdir(package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... def resource_listdir(package_or_requirement: _PkgReqType, resource_name: str) -> List[str]: ... def resource_filename(package_or_requirement: _PkgReqType, resource_name: str) -> str: ... def set_extraction_path(path: str) -> None: ... def cleanup_resources(force: bool = ...) -> List[str]: ... class IResourceManager: def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str) -> IO[bytes]: ... def resource_string(self, package_or_requirement: _PkgReqType, resource_name: str) -> bytes: ... def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str) -> List[str]: ... def resource_filename(self, package_or_requirement: _PkgReqType, resource_name: str) -> str: ... def set_extraction_path(self, path: str) -> None: ... def cleanup_resources(self, force: bool = ...) -> List[str]: ... def get_cache_path(self, archive_name: str, names: Tuple[str, ...] = ...) -> str: ... def extraction_error(self) -> None: ... def postprocess(self, tempname: str, filename: str) -> None: ... @overload def get_provider(package_or_requirement: str) -> IResourceProvider: ... @overload def get_provider(package_or_requirement: Requirement) -> Distribution: ... class IMetadataProvider: def has_metadata(self, name: str) -> bool: ... def metadata_isdir(self, name: str) -> bool: ... def metadata_listdir(self, name: str) -> List[str]: ... def get_metadata(self, name: str) -> str: ... def get_metadata_lines(self, name: str) -> Generator[str, None, None]: ... def run_script(self, script_name: str, namespace: Dict[str, Any]) -> None: ... class ResolutionError(Exception): ... class DistributionNotFound(ResolutionError): ... class VersionConflict(ResolutionError): @property def dist(self) -> Any: ... @property def req(self) -> Any: ... def report(self) -> str: ... def with_context(self, required_by: Dict[str, Any]) -> VersionConflict: ... class ContextualVersionConflict(VersionConflict): @property def required_by(self) -> Set[Any]: ... class UnknownExtra(ResolutionError): ... class ExtractionError(Exception): manager: IResourceManager cache_path: str original_error: Exception class _Importer(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader, metaclass=ABCMeta): ... def register_finder(importer_type: type, distribution_finder: _DistFinderType) -> None: ... def register_loader_type(loader_type: type, provider_factory: Callable[[types.ModuleType], IResourceProvider]) -> None: ... def register_namespace_handler(importer_type: type, namespace_handler: _NSHandlerType) -> None: ... class IResourceProvider(IMetadataProvider): ... class NullProvider: ... class EggProvider(NullProvider): ... class DefaultProvider(EggProvider): ... class PathMetadata(DefaultProvider, IResourceProvider): def __init__(self, path: str, egg_info: str) -> None: ... class ZipProvider(EggProvider): ... class EggMetadata(ZipProvider, IResourceProvider): def __init__(self, zipimporter: zipimport.zipimporter) -> None: ... class EmptyProvider(NullProvider): ... empty_provider: EmptyProvider class FileMetadata(EmptyProvider, IResourceProvider): def __init__(self, path_to_pkg_info: str) -> None: ... def parse_version(v: str) -> Tuple[str, ...]: ... def yield_lines(strs: _NestedStr) -> Generator[str, None, None]: ... def split_sections(strs: _NestedStr) -> Generator[Tuple[Optional[str], str], None, None]: ... def safe_name(name: str) -> str: ... def safe_version(version: str) -> str: ... def safe_extra(extra: str) -> str: ... def to_filename(name_or_version: str) -> str: ... def get_build_platform() -> str: ... def get_platform() -> str: ... def get_supported_platform() -> str: ... def compatible_platforms(provided: Optional[str], required: Optional[str]) -> bool: ... def get_default_cache() -> str: ... def get_importer(path_item: str) -> _Importer: ... def ensure_directory(path: str) -> None: ... def normalize_path(filename: str) -> str: ... mypy-0.761/mypy/typeshed/third_party/3/pkg_resources/py31compat.pyi0000644€tŠÔÚ€2›s®0000000013313576752252031626 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Text import os import sys needs_makedirs: bool makedirs = os.makedirs mypy-0.761/mypy/typeshed/third_party/3/six/0000755€tŠÔÚ€2›s®0000000000013576752267025044 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/six/__init__.pyi0000644€tŠÔÚ€2›s®0000000671713576752252027333 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six (Python 3.5) from __future__ import print_function from typing import ( Any, AnyStr, Callable, Dict, ItemsView, Iterable, KeysView, Mapping, NoReturn, Optional, Pattern, Text, Tuple, Type, TypeVar, Union, ValuesView, overload, ) import types import typing import unittest # Exports from io import StringIO as StringIO, BytesIO as BytesIO from builtins import next as next from functools import wraps as wraps from . import moves _T = TypeVar('_T') _K = TypeVar('_K') _V = TypeVar('_V') # TODO make constant, then move this stub to 2and3 # https://github.com/python/typeshed/issues/17 PY2 = False PY3 = True PY34: bool string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE: int # def add_move # def remove_move def callable(obj: object) -> bool: ... def get_unbound_function(unbound: types.FunctionType) -> types.FunctionType: ... def create_bound_method(func: types.FunctionType, obj: object) -> types.MethodType: ... def create_unbound_method(func: types.FunctionType, cls: type) -> types.FunctionType: ... Iterator = object def get_method_function(meth: types.MethodType) -> types.FunctionType: ... def get_method_self(meth: types.MethodType) -> Optional[object]: ... def get_function_closure(fun: types.FunctionType) -> Optional[Tuple[types._Cell, ...]]: ... def get_function_code(fun: types.FunctionType) -> types.CodeType: ... def get_function_defaults(fun: types.FunctionType) -> Optional[Tuple[Any, ...]]: ... def get_function_globals(fun: types.FunctionType) -> Dict[str, Any]: ... def iterkeys(d: Mapping[_K, _V]) -> typing.Iterator[_K]: ... def itervalues(d: Mapping[_K, _V]) -> typing.Iterator[_V]: ... def iteritems(d: Mapping[_K, _V]) -> typing.Iterator[Tuple[_K, _V]]: ... # def iterlists def viewkeys(d: Mapping[_K, _V]) -> KeysView[_K]: ... def viewvalues(d: Mapping[_K, _V]) -> ValuesView[_V]: ... def viewitems(d: Mapping[_K, _V]) -> ItemsView[_K, _V]: ... def b(s: str) -> binary_type: ... def u(s: str) -> text_type: ... unichr = chr def int2byte(i: int) -> bytes: ... def byte2int(bs: binary_type) -> int: ... def indexbytes(buf: binary_type, i: int) -> int: ... def iterbytes(buf: binary_type) -> typing.Iterator[int]: ... def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: Optional[str] = ...) -> None: ... @overload def assertRaisesRegex(self: unittest.TestCase, msg: Optional[str] = ...) -> Any: ... @overload def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ... def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: Optional[str] = ...) -> None: ... exec_ = exec def reraise(tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = ...) -> NoReturn: ... def raise_from(value: Union[BaseException, Type[BaseException]], from_value: Optional[BaseException]) -> NoReturn: ... print_ = print def with_metaclass(meta: type, *bases: type) -> type: ... def add_metaclass(metaclass: type) -> Callable[[_T], _T]: ... def ensure_binary(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> bytes: ... def ensure_str(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> str: ... def ensure_text(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> Text: ... def python_2_unicode_compatible(klass: _T) -> _T: ... mypy-0.761/mypy/typeshed/third_party/3/six/moves/0000755€tŠÔÚ€2›s®0000000000013576752267026175 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/six/moves/BaseHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000003213576752252031446 0ustar jukkaDROPBOX\Domain Users00000000000000from http.server import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/CGIHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000003213576752252031176 0ustar jukkaDROPBOX\Domain Users00000000000000from http.server import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/SimpleHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000003213576752252032025 0ustar jukkaDROPBOX\Domain Users00000000000000from http.server import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/__init__.pyi0000644€tŠÔÚ€2›s®0000000437613576752252030463 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. import sys from io import StringIO as cStringIO from builtins import filter as filter from itertools import filterfalse as filterfalse from builtins import input as input from sys import intern as intern from builtins import map as map from os import getcwd as getcwd from os import getcwdb as getcwdb from builtins import range as range from functools import reduce as reduce from shlex import quote as shlex_quote from io import StringIO as StringIO from collections import UserDict as UserDict from collections import UserList as UserList from collections import UserString as UserString from builtins import range as xrange from builtins import zip as zip from itertools import zip_longest as zip_longest from . import builtins from . import configparser # import copyreg as copyreg # import dbm.gnu as dbm_gnu from . import _dummy_thread from . import http_cookiejar from . import http_cookies from . import html_entities from . import html_parser from . import http_client from . import email_mime_multipart from . import email_mime_nonmultipart from . import email_mime_text from . import email_mime_base from . import BaseHTTPServer from . import CGIHTTPServer from . import SimpleHTTPServer from . import cPickle from . import queue from . import reprlib from . import socketserver from . import _thread from . import tkinter from . import tkinter_dialog from . import tkinter_filedialog # import tkinter.scrolledtext as tkinter_scrolledtext # import tkinter.simpledialog as tkinter_simpledialog # import tkinter.tix as tkinter_tix from . import tkinter_ttk from . import tkinter_constants # import tkinter.dnd as tkinter_dnd # import tkinter.colorchooser as tkinter_colorchooser from . import tkinter_commondialog from . import tkinter_tkfiledialog # import tkinter.font as tkinter_font # import tkinter.messagebox as tkinter_messagebox # import tkinter.simpledialog as tkinter_tksimpledialog from . import urllib_parse from . import urllib_error from . import urllib from . import urllib_robotparser # import xmlrpc.client as xmlrpc_client # import xmlrpc.server as xmlrpc_server from importlib import reload as reload_module mypy-0.761/mypy/typeshed/third_party/3/six/moves/_dummy_thread.pyi0000644€tŠÔÚ€2›s®0000000003413576752252031530 0ustar jukkaDROPBOX\Domain Users00000000000000from _dummy_thread import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/_thread.pyi0000644€tŠÔÚ€2›s®0000000002613576752252030316 0ustar jukkaDROPBOX\Domain Users00000000000000from _thread import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/builtins.pyi0000644€tŠÔÚ€2›s®0000000002713576752252030542 0ustar jukkaDROPBOX\Domain Users00000000000000from builtins import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/cPickle.pyi0000644€tŠÔÚ€2›s®0000000002513576752252030261 0ustar jukkaDROPBOX\Domain Users00000000000000from pickle import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/configparser.pyi0000644€tŠÔÚ€2›s®0000000003313576752252031370 0ustar jukkaDROPBOX\Domain Users00000000000000from configparser import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/email_mime_base.pyi0000644€tŠÔÚ€2›s®0000000003613576752252032001 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.base import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/email_mime_multipart.pyi0000644€tŠÔÚ€2›s®0000000004313576752252033106 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.multipart import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/email_mime_nonmultipart.pyi0000644€tŠÔÚ€2›s®0000000004613576752252033624 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/email_mime_text.pyi0000644€tŠÔÚ€2›s®0000000003613576752252032053 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.text import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/html_entities.pyi0000644€tŠÔÚ€2›s®0000000003413576752252031557 0ustar jukkaDROPBOX\Domain Users00000000000000from html.entities import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/html_parser.pyi0000644€tŠÔÚ€2›s®0000000003213576752252031225 0ustar jukkaDROPBOX\Domain Users00000000000000from html.parser import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/http_client.pyi0000644€tŠÔÚ€2›s®0000000003213576752252031222 0ustar jukkaDROPBOX\Domain Users00000000000000from http.client import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/http_cookiejar.pyi0000644€tŠÔÚ€2›s®0000000003513576752252031715 0ustar jukkaDROPBOX\Domain Users00000000000000from http.cookiejar import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/http_cookies.pyi0000644€tŠÔÚ€2›s®0000000003313576752252031401 0ustar jukkaDROPBOX\Domain Users00000000000000from http.cookies import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/queue.pyi0000644€tŠÔÚ€2›s®0000000002413576752252030032 0ustar jukkaDROPBOX\Domain Users00000000000000from queue import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/reprlib.pyi0000644€tŠÔÚ€2›s®0000000002613576752252030347 0ustar jukkaDROPBOX\Domain Users00000000000000from reprlib import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/socketserver.pyi0000644€tŠÔÚ€2›s®0000000003313576752252031425 0ustar jukkaDROPBOX\Domain Users00000000000000from socketserver import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/tkinter.pyi0000644€tŠÔÚ€2›s®0000000002613576752252030370 0ustar jukkaDROPBOX\Domain Users00000000000000from tkinter import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/tkinter_commondialog.pyi0000644€tŠÔÚ€2›s®0000000004313576752252033117 0ustar jukkaDROPBOX\Domain Users00000000000000from tkinter.commondialog import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/tkinter_constants.pyi0000644€tŠÔÚ€2›s®0000000004013576752252032460 0ustar jukkaDROPBOX\Domain Users00000000000000from tkinter.constants import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/tkinter_dialog.pyi0000644€tŠÔÚ€2›s®0000000003513576752252031707 0ustar jukkaDROPBOX\Domain Users00000000000000from tkinter.dialog import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/tkinter_filedialog.pyi0000644€tŠÔÚ€2›s®0000000004113576752252032544 0ustar jukkaDROPBOX\Domain Users00000000000000from tkinter.filedialog import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/tkinter_tkfiledialog.pyi0000644€tŠÔÚ€2›s®0000000004113576752252033103 0ustar jukkaDROPBOX\Domain Users00000000000000from tkinter.filedialog import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/tkinter_ttk.pyi0000644€tŠÔÚ€2›s®0000000003213576752252031247 0ustar jukkaDROPBOX\Domain Users00000000000000from tkinter.ttk import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib/0000755€tŠÔÚ€2›s®0000000000013576752267027466 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib/__init__.pyi0000644€tŠÔÚ€2›s®0000000033113576752252031737 0ustar jukkaDROPBOX\Domain Users00000000000000import six.moves.urllib.error as error import six.moves.urllib.parse as parse import six.moves.urllib.request as request import six.moves.urllib.response as response import six.moves.urllib.robotparser as robotparser mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib/error.pyi0000644€tŠÔÚ€2›s®0000000024413576752252031334 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib.error import URLError as URLError from urllib.error import HTTPError as HTTPError from urllib.error import ContentTooShortError as ContentTooShortError mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib/parse.pyi0000644€tŠÔÚ€2›s®0000000253413576752252031321 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.parse # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. from urllib.parse import ParseResult as ParseResult from urllib.parse import SplitResult as SplitResult from urllib.parse import parse_qs as parse_qs from urllib.parse import parse_qsl as parse_qsl from urllib.parse import urldefrag as urldefrag from urllib.parse import urljoin as urljoin from urllib.parse import urlparse as urlparse from urllib.parse import urlsplit as urlsplit from urllib.parse import urlunparse as urlunparse from urllib.parse import urlunsplit as urlunsplit from urllib.parse import quote as quote from urllib.parse import quote_plus as quote_plus from urllib.parse import unquote as unquote from urllib.parse import unquote_plus as unquote_plus from urllib.parse import unquote_to_bytes as unquote_to_bytes from urllib.parse import urlencode as urlencode # from urllib.parse import splitquery as splitquery # from urllib.parse import splittag as splittag # from urllib.parse import splituser as splituser from urllib.parse import uses_fragment as uses_fragment from urllib.parse import uses_netloc as uses_netloc from urllib.parse import uses_params as uses_params from urllib.parse import uses_query as uses_query from urllib.parse import uses_relative as uses_relative mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib/request.pyi0000644€tŠÔÚ€2›s®0000000446413576752252031703 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.request # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. from urllib.request import urlopen as urlopen from urllib.request import install_opener as install_opener from urllib.request import build_opener as build_opener from urllib.request import pathname2url as pathname2url from urllib.request import url2pathname as url2pathname from urllib.request import getproxies as getproxies from urllib.request import Request as Request from urllib.request import OpenerDirector as OpenerDirector from urllib.request import HTTPDefaultErrorHandler as HTTPDefaultErrorHandler from urllib.request import HTTPRedirectHandler as HTTPRedirectHandler from urllib.request import HTTPCookieProcessor as HTTPCookieProcessor from urllib.request import ProxyHandler as ProxyHandler from urllib.request import BaseHandler as BaseHandler from urllib.request import HTTPPasswordMgr as HTTPPasswordMgr from urllib.request import HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm from urllib.request import AbstractBasicAuthHandler as AbstractBasicAuthHandler from urllib.request import HTTPBasicAuthHandler as HTTPBasicAuthHandler from urllib.request import ProxyBasicAuthHandler as ProxyBasicAuthHandler from urllib.request import AbstractDigestAuthHandler as AbstractDigestAuthHandler from urllib.request import HTTPDigestAuthHandler as HTTPDigestAuthHandler from urllib.request import ProxyDigestAuthHandler as ProxyDigestAuthHandler from urllib.request import HTTPHandler as HTTPHandler from urllib.request import HTTPSHandler as HTTPSHandler from urllib.request import FileHandler as FileHandler from urllib.request import FTPHandler as FTPHandler from urllib.request import CacheFTPHandler as CacheFTPHandler from urllib.request import UnknownHandler as UnknownHandler from urllib.request import HTTPErrorProcessor as HTTPErrorProcessor from urllib.request import urlretrieve as urlretrieve from urllib.request import urlcleanup as urlcleanup from urllib.request import URLopener as URLopener from urllib.request import FancyURLopener as FancyURLopener # from urllib.request import proxy_bypass as proxy_bypass from urllib.request import parse_http_list as parse_http_list from urllib.request import parse_keqv_list as parse_keqv_list mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib/response.pyi0000644€tŠÔÚ€2›s®0000000060513576752252032042 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.response # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. # from urllib.response import addbase as addbase # from urllib.response import addclosehook as addclosehook # from urllib.response import addinfo as addinfo from urllib.response import addinfourl as addinfourl mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib/robotparser.pyi0000644€tŠÔÚ€2›s®0000000010213576752252032536 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib.robotparser import RobotFileParser as RobotFileParser mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib_error.pyi0000644€tŠÔÚ€2›s®0000000003313576752252031410 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib.error import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib_parse.pyi0000644€tŠÔÚ€2›s®0000000003313576752252031371 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib.parse import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib_request.pyi0000644€tŠÔÚ€2›s®0000000003613576752252031752 0ustar jukkaDROPBOX\Domain Users00000000000000from .urllib.request import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib_response.pyi0000644€tŠÔÚ€2›s®0000000003713576752252032121 0ustar jukkaDROPBOX\Domain Users00000000000000from .urllib.response import * mypy-0.761/mypy/typeshed/third_party/3/six/moves/urllib_robotparser.pyi0000644€tŠÔÚ€2›s®0000000004113576752252032620 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib.robotparser import * mypy-0.761/mypy/typeshed/third_party/3/typed_ast/0000755€tŠÔÚ€2›s®0000000000013576752267026235 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/typeshed/third_party/3/typed_ast/__init__.pyi0000644€tŠÔÚ€2›s®0000000017413576752252030513 0ustar jukkaDROPBOX\Domain Users00000000000000# This module is a fork of the CPython 2 and 3 ast modules with PEP 484 support. # See: https://github.com/python/typed_ast mypy-0.761/mypy/typeshed/third_party/3/typed_ast/ast27.pyi0000644€tŠÔÚ€2›s®0000001551113576752252027715 0ustar jukkaDROPBOX\Domain Users00000000000000import typing from typing import Any, Optional, Union, Generic, Iterator class NodeVisitor(): def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> None: ... class NodeTransformer(NodeVisitor): def generic_visit(self, node: AST) -> None: ... def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ... def copy_location(new_node: AST, old_node: AST) -> AST: ... def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> Optional[bytes]: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ... def literal_eval(node_or_string: Union[str, AST]) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... PyCF_ONLY_AST: int # ast classes identifier = str class AST: _attributes: typing.Tuple[str, ...] _fields: typing.Tuple[str, ...] def __init__(self, *args, **kwargs) -> None: ... class mod(AST): ... class Module(mod): body: typing.List[stmt] type_ignores: typing.List[TypeIgnore] class Interactive(mod): body: typing.List[stmt] class Expression(mod): body: expr class FunctionType(mod): argtypes: typing.List[expr] returns: expr class Suite(mod): body: typing.List[stmt] class stmt(AST): lineno: int col_offset: int class FunctionDef(stmt): name: identifier args: arguments body: typing.List[stmt] decorator_list: typing.List[expr] type_comment: Optional[str] class ClassDef(stmt): name: identifier bases: typing.List[expr] body: typing.List[stmt] decorator_list: typing.List[expr] class Return(stmt): value: Optional[expr] class Delete(stmt): targets: typing.List[expr] class Assign(stmt): targets: typing.List[expr] value: expr type_comment: Optional[str] class AugAssign(stmt): target: expr op: operator value: expr class Print(stmt): dest: Optional[expr] values: typing.List[expr] nl: bool class For(stmt): target: expr iter: expr body: typing.List[stmt] orelse: typing.List[stmt] type_comment: Optional[str] class While(stmt): test: expr body: typing.List[stmt] orelse: typing.List[stmt] class If(stmt): test: expr body: typing.List[stmt] orelse: typing.List[stmt] class With(stmt): context_expr: expr optional_vars: Optional[expr] body: typing.List[stmt] type_comment: Optional[str] class Raise(stmt): type: Optional[expr] inst: Optional[expr] tback: Optional[expr] class TryExcept(stmt): body: typing.List[stmt] handlers: typing.List[ExceptHandler] orelse: typing.List[stmt] class TryFinally(stmt): body: typing.List[stmt] finalbody: typing.List[stmt] class Assert(stmt): test: expr msg: Optional[expr] class Import(stmt): names: typing.List[alias] class ImportFrom(stmt): module: Optional[identifier] names: typing.List[alias] level: Optional[int] class Exec(stmt): body: expr globals: Optional[expr] locals: Optional[expr] class Global(stmt): names: typing.List[identifier] class Expr(stmt): value: expr class Pass(stmt): ... class Break(stmt): ... class Continue(stmt): ... class slice(AST): ... _slice = slice # this lets us type the variable named 'slice' below class Slice(slice): lower: Optional[expr] upper: Optional[expr] step: Optional[expr] class ExtSlice(slice): dims: typing.List[slice] class Index(slice): value: expr class Ellipsis(slice): ... class expr(AST): lineno: int col_offset: int class BoolOp(expr): op: boolop values: typing.List[expr] class BinOp(expr): left: expr op: operator right: expr class UnaryOp(expr): op: unaryop operand: expr class Lambda(expr): args: arguments body: expr class IfExp(expr): test: expr body: expr orelse: expr class Dict(expr): keys: typing.List[expr] values: typing.List[expr] class Set(expr): elts: typing.List[expr] class ListComp(expr): elt: expr generators: typing.List[comprehension] class SetComp(expr): elt: expr generators: typing.List[comprehension] class DictComp(expr): key: expr value: expr generators: typing.List[comprehension] class GeneratorExp(expr): elt: expr generators: typing.List[comprehension] class Yield(expr): value: Optional[expr] class Compare(expr): left: expr ops: typing.List[cmpop] comparators: typing.List[expr] class Call(expr): func: expr args: typing.List[expr] keywords: typing.List[keyword] starargs: Optional[expr] kwargs: Optional[expr] class Repr(expr): value: expr class Num(expr): n: Union[int, float, complex] class Str(expr): s: bytes kind: str class Attribute(expr): value: expr attr: identifier ctx: expr_context class Subscript(expr): value: expr slice: _slice ctx: expr_context class Name(expr): id: identifier ctx: expr_context class List(expr): elts: typing.List[expr] ctx: expr_context class Tuple(expr): elts: typing.List[expr] ctx: expr_context class expr_context(AST): ... class AugLoad(expr_context): ... class AugStore(expr_context): ... class Del(expr_context): ... class Load(expr_context): ... class Param(expr_context): ... class Store(expr_context): ... class boolop(AST): ... class And(boolop): ... class Or(boolop): ... class operator(AST): ... class Add(operator): ... class BitAnd(operator): ... class BitOr(operator): ... class BitXor(operator): ... class Div(operator): ... class FloorDiv(operator): ... class LShift(operator): ... class Mod(operator): ... class Mult(operator): ... class Pow(operator): ... class RShift(operator): ... class Sub(operator): ... class unaryop(AST): ... class Invert(unaryop): ... class Not(unaryop): ... class UAdd(unaryop): ... class USub(unaryop): ... class cmpop(AST): ... class Eq(cmpop): ... class Gt(cmpop): ... class GtE(cmpop): ... class In(cmpop): ... class Is(cmpop): ... class IsNot(cmpop): ... class Lt(cmpop): ... class LtE(cmpop): ... class NotEq(cmpop): ... class NotIn(cmpop): ... class comprehension(AST): target: expr iter: expr ifs: typing.List[expr] class ExceptHandler(AST): type: Optional[expr] name: Optional[expr] body: typing.List[stmt] lineno: int col_offset: int class arguments(AST): args: typing.List[expr] vararg: Optional[identifier] kwarg: Optional[identifier] defaults: typing.List[expr] type_comments: typing.List[Optional[str]] class keyword(AST): arg: identifier value: expr class alias(AST): name: identifier asname: Optional[identifier] class TypeIgnore(AST): lineno: int mypy-0.761/mypy/typeshed/third_party/3/typed_ast/ast3.pyi0000644€tŠÔÚ€2›s®0000001751613576752252027636 0ustar jukkaDROPBOX\Domain Users00000000000000import typing from typing import Any, Optional, Union, Generic, Iterator class NodeVisitor(): def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> None: ... class NodeTransformer(NodeVisitor): def generic_visit(self, node: AST) -> None: ... def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ..., feature_version: int = ...) -> AST: ... def copy_location(new_node: AST, old_node: AST) -> AST: ... def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> str: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ... def literal_eval(node_or_string: Union[str, AST]) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... PyCF_ONLY_AST: int # ast classes identifier = str class AST: _attributes: typing.Tuple[str, ...] _fields: typing.Tuple[str, ...] def __init__(self, *args, **kwargs) -> None: ... class mod(AST): ... class Module(mod): body: typing.List[stmt] type_ignores: typing.List[TypeIgnore] class Interactive(mod): body: typing.List[stmt] class Expression(mod): body: expr class FunctionType(mod): argtypes: typing.List[expr] returns: expr class Suite(mod): body: typing.List[stmt] class stmt(AST): lineno: int col_offset: int class FunctionDef(stmt): name: identifier args: arguments body: typing.List[stmt] decorator_list: typing.List[expr] returns: Optional[expr] type_comment: Optional[str] class AsyncFunctionDef(stmt): name: identifier args: arguments body: typing.List[stmt] decorator_list: typing.List[expr] returns: Optional[expr] type_comment: Optional[str] class ClassDef(stmt): name: identifier bases: typing.List[expr] keywords: typing.List[keyword] body: typing.List[stmt] decorator_list: typing.List[expr] class Return(stmt): value: Optional[expr] class Delete(stmt): targets: typing.List[expr] class Assign(stmt): targets: typing.List[expr] value: expr type_comment: Optional[str] class AugAssign(stmt): target: expr op: operator value: expr class AnnAssign(stmt): target: expr annotation: expr value: Optional[expr] simple: int class For(stmt): target: expr iter: expr body: typing.List[stmt] orelse: typing.List[stmt] type_comment: Optional[str] class AsyncFor(stmt): target: expr iter: expr body: typing.List[stmt] orelse: typing.List[stmt] type_comment: Optional[str] class While(stmt): test: expr body: typing.List[stmt] orelse: typing.List[stmt] class If(stmt): test: expr body: typing.List[stmt] orelse: typing.List[stmt] class With(stmt): items: typing.List[withitem] body: typing.List[stmt] type_comment: Optional[str] class AsyncWith(stmt): items: typing.List[withitem] body: typing.List[stmt] type_comment: Optional[str] class Raise(stmt): exc: Optional[expr] cause: Optional[expr] class Try(stmt): body: typing.List[stmt] handlers: typing.List[ExceptHandler] orelse: typing.List[stmt] finalbody: typing.List[stmt] class Assert(stmt): test: expr msg: Optional[expr] class Import(stmt): names: typing.List[alias] class ImportFrom(stmt): module: Optional[identifier] names: typing.List[alias] level: Optional[int] class Global(stmt): names: typing.List[identifier] class Nonlocal(stmt): names: typing.List[identifier] class Expr(stmt): value: expr class Pass(stmt): ... class Break(stmt): ... class Continue(stmt): ... class slice(AST): ... _slice = slice # this lets us type the variable named 'slice' below class Slice(slice): lower: Optional[expr] upper: Optional[expr] step: Optional[expr] class ExtSlice(slice): dims: typing.List[slice] class Index(slice): value: expr class expr(AST): lineno: int col_offset: int class BoolOp(expr): op: boolop values: typing.List[expr] class BinOp(expr): left: expr op: operator right: expr class UnaryOp(expr): op: unaryop operand: expr class Lambda(expr): args: arguments body: expr class IfExp(expr): test: expr body: expr orelse: expr class Dict(expr): keys: typing.List[expr] values: typing.List[expr] class Set(expr): elts: typing.List[expr] class ListComp(expr): elt: expr generators: typing.List[comprehension] class SetComp(expr): elt: expr generators: typing.List[comprehension] class DictComp(expr): key: expr value: expr generators: typing.List[comprehension] class GeneratorExp(expr): elt: expr generators: typing.List[comprehension] class Await(expr): value: expr class Yield(expr): value: Optional[expr] class YieldFrom(expr): value: expr class Compare(expr): left: expr ops: typing.List[cmpop] comparators: typing.List[expr] class Call(expr): func: expr args: typing.List[expr] keywords: typing.List[keyword] class Num(expr): n: Union[float, int, complex] class Str(expr): s: str kind: str class FormattedValue(expr): value: expr conversion: typing.Optional[int] format_spec: typing.Optional[expr] class JoinedStr(expr): values: typing.List[expr] class Bytes(expr): s: bytes class NameConstant(expr): value: Any class Ellipsis(expr): ... class Attribute(expr): value: expr attr: identifier ctx: expr_context class Subscript(expr): value: expr slice: _slice ctx: expr_context class Starred(expr): value: expr ctx: expr_context class Name(expr): id: identifier ctx: expr_context class List(expr): elts: typing.List[expr] ctx: expr_context class Tuple(expr): elts: typing.List[expr] ctx: expr_context class expr_context(AST): ... class AugLoad(expr_context): ... class AugStore(expr_context): ... class Del(expr_context): ... class Load(expr_context): ... class Param(expr_context): ... class Store(expr_context): ... class boolop(AST): ... class And(boolop): ... class Or(boolop): ... class operator(AST): ... class Add(operator): ... class BitAnd(operator): ... class BitOr(operator): ... class BitXor(operator): ... class Div(operator): ... class FloorDiv(operator): ... class LShift(operator): ... class Mod(operator): ... class Mult(operator): ... class MatMult(operator): ... class Pow(operator): ... class RShift(operator): ... class Sub(operator): ... class unaryop(AST): ... class Invert(unaryop): ... class Not(unaryop): ... class UAdd(unaryop): ... class USub(unaryop): ... class cmpop(AST): ... class Eq(cmpop): ... class Gt(cmpop): ... class GtE(cmpop): ... class In(cmpop): ... class Is(cmpop): ... class IsNot(cmpop): ... class Lt(cmpop): ... class LtE(cmpop): ... class NotEq(cmpop): ... class NotIn(cmpop): ... class comprehension(AST): target: expr iter: expr ifs: typing.List[expr] is_async: int class ExceptHandler(AST): type: Optional[expr] name: Optional[identifier] body: typing.List[stmt] lineno: int col_offset: int class arguments(AST): args: typing.List[arg] vararg: Optional[arg] kwonlyargs: typing.List[arg] kw_defaults: typing.List[expr] kwarg: Optional[arg] defaults: typing.List[expr] class arg(AST): arg: identifier annotation: Optional[expr] lineno: int col_offset: int type_comment: typing.Optional[str] class keyword(AST): arg: Optional[identifier] value: expr class alias(AST): name: identifier asname: Optional[identifier] class withitem(AST): context_expr: expr optional_vars: Optional[expr] class TypeIgnore(AST): lineno: int mypy-0.761/mypy/typeshed/third_party/3/typed_ast/conversions.pyi0000644€tŠÔÚ€2›s®0000000012413576752252031317 0ustar jukkaDROPBOX\Domain Users00000000000000from . import ast27 from . import ast3 def py2to3(ast: ast27.AST) -> ast3.AST: ... mypy-0.761/mypy/typestate.py0000644€tŠÔÚ€2›s®0000003124613576752246022320 0ustar jukkaDROPBOX\Domain Users00000000000000""" A shared state for all TypeInfos that holds global cache and dependency information, and potentially other mutable TypeInfo state. This module contains mutable global state. """ from typing import Dict, Set, Tuple, Optional, List from typing_extensions import ClassVar, Final from mypy.nodes import TypeInfo from mypy.types import Instance, TypeAliasType, get_proper_type, Type from mypy.server.trigger import make_trigger from mypy import state # Represents that the 'left' instance is a subtype of the 'right' instance SubtypeRelationship = Tuple[Instance, Instance] # A tuple encoding the specific conditions under which we performed the subtype check. # (e.g. did we want a proper subtype? A regular subtype while ignoring variance?) SubtypeKind = Tuple[bool, ...] # A cache that keeps track of whether the given TypeInfo is a part of a particular # subtype relationship SubtypeCache = Dict[TypeInfo, Dict[SubtypeKind, Set[SubtypeRelationship]]] class TypeState: """This class provides subtype caching to improve performance of subtype checks. It also holds protocol fine grained dependencies. Note: to avoid leaking global state, 'reset_all_subtype_caches()' should be called after a build has finished and after a daemon shutdown. This subtype cache only exists for performance reasons, resetting subtype caches for a class has no semantic effect. The protocol dependencies however are only stored here, and shouldn't be deleted unless not needed any more (e.g. during daemon shutdown). """ # '_subtype_caches' keeps track of (subtype, supertype) pairs where supertypes are # instances of the given TypeInfo. The cache also keeps track of whether the check # was done in strict optional mode and of the specific *kind* of subtyping relationship, # which we represent as an arbitrary hashable tuple. # We need the caches, since subtype checks for structural types are very slow. _subtype_caches = {} # type: Final[SubtypeCache] # This contains protocol dependencies generated after running a full build, # or after an update. These dependencies are special because: # * They are a global property of the program; i.e. some dependencies for imported # classes can be generated in the importing modules. # * Because of the above, they are serialized separately, after a full run, # or a full update. # `proto_deps` can be None if after deserialization it turns out that they are # inconsistent with the other cache files (or an error occurred during deserialization). # A blocking error will be generated in this case, since we can't proceed safely. # For the description of kinds of protocol dependencies and corresponding examples, # see _snapshot_protocol_deps. proto_deps = {} # type: ClassVar[Optional[Dict[str, Set[str]]]] # Protocols (full names) a given class attempted to implement. # Used to calculate fine grained protocol dependencies and optimize protocol # subtype cache invalidation in fine grained mode. For example, if we pass a value # of type a.A to a function expecting something compatible with protocol p.P, # we'd have 'a.A' -> {'p.P', ...} in the map. This map is flushed after every incremental # update. _attempted_protocols = {} # type: Final[Dict[str, Set[str]]] # We also snapshot protocol members of the above protocols. For example, if we pass # a value of type a.A to a function expecting something compatible with Iterable, we'd have # 'a.A' -> {'__iter__', ...} in the map. This map is also flushed after every incremental # update. This map is needed to only generate dependencies like -> # instead of a wildcard to avoid unnecessarily invalidating classes. _checked_against_members = {} # type: Final[Dict[str, Set[str]]] # TypeInfos that appeared as a left type (subtype) in a subtype check since latest # dependency snapshot update. This is an optimisation for fine grained mode; during a full # run we only take a dependency snapshot at the very end, so this set will contain all # subtype-checked TypeInfos. After a fine grained update however, we can gather only new # dependencies generated from (typically) few TypeInfos that were subtype-checked # (i.e. appeared as r.h.s. in an assignment or an argument in a function call in # a re-checked target) during the update. _rechecked_types = set() # type: Final[Set[TypeInfo]] # The two attributes below are assumption stacks for subtyping relationships between # recursive type aliases. Normally, one would pass type assumptions as an additional # arguments to is_subtype(), but this would mean updating dozens of related functions # threading this through all callsites (see also comment for TypeInfo.assuming). _assuming = [] # type: Final[List[Tuple[TypeAliasType, TypeAliasType]]] _assuming_proper = [] # type: Final[List[Tuple[TypeAliasType, TypeAliasType]]] # Ditto for inference of generic constraints against recursive type aliases. _inferring = [] # type: Final[List[TypeAliasType]] # N.B: We do all of the accesses to these properties through # TypeState, instead of making these classmethods and accessing # via the cls parameter, since mypyc can optimize accesses to # Final attributes of a directly referenced type. @staticmethod def is_assumed_subtype(left: Type, right: Type) -> bool: for (l, r) in reversed(TypeState._assuming): if (get_proper_type(l) == get_proper_type(left) and get_proper_type(r) == get_proper_type(right)): return True return False @staticmethod def is_assumed_proper_subtype(left: Type, right: Type) -> bool: for (l, r) in reversed(TypeState._assuming_proper): if (get_proper_type(l) == get_proper_type(left) and get_proper_type(r) == get_proper_type(right)): return True return False @staticmethod def reset_all_subtype_caches() -> None: """Completely reset all known subtype caches.""" TypeState._subtype_caches.clear() @staticmethod def reset_subtype_caches_for(info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo.""" if info in TypeState._subtype_caches: TypeState._subtype_caches[info].clear() @staticmethod def reset_all_subtype_caches_for(info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo and its MRO.""" for item in info.mro: TypeState.reset_subtype_caches_for(item) @staticmethod def is_cached_subtype_check(kind: SubtypeKind, left: Instance, right: Instance) -> bool: info = right.type if info not in TypeState._subtype_caches: return False cache = TypeState._subtype_caches[info] key = (state.strict_optional,) + kind if key not in cache: return False return (left, right) in cache[key] @staticmethod def record_subtype_cache_entry(kind: SubtypeKind, left: Instance, right: Instance) -> None: cache = TypeState._subtype_caches.setdefault(right.type, dict()) cache.setdefault((state.strict_optional,) + kind, set()).add((left, right)) @staticmethod def reset_protocol_deps() -> None: """Reset dependencies after a full run or before a daemon shutdown.""" TypeState.proto_deps = {} TypeState._attempted_protocols.clear() TypeState._checked_against_members.clear() TypeState._rechecked_types.clear() @staticmethod def record_protocol_subtype_check(left_type: TypeInfo, right_type: TypeInfo) -> None: assert right_type.is_protocol TypeState._rechecked_types.add(left_type) TypeState._attempted_protocols.setdefault( left_type.fullname, set()).add(right_type.fullname) TypeState._checked_against_members.setdefault( left_type.fullname, set()).update(right_type.protocol_members) @staticmethod def _snapshot_protocol_deps() -> Dict[str, Set[str]]: """Collect protocol attribute dependencies found so far from registered subtype checks. There are three kinds of protocol dependencies. For example, after a subtype check: x: Proto = C() the following dependencies will be generated: 1. ..., , -> 2. ..., , -> [for every attr in Proto members] 3. -> Proto # this one to invalidate the subtype cache The first kind is generated immediately per-module in deps.py (see also an example there for motivation why it is needed). While two other kinds are generated here after all modules are type checked and we have recorded all the subtype checks. To understand these two kinds, consider a simple example: class A: def __iter__(self) -> Iterator[int]: ... it: Iterable[int] = A() We add -> to invalidate the assignment (module target in this case), whenever the signature of a.A.__iter__ changes. We also add -> typing.Iterable, to invalidate the subtype caches of the latter. (Note that the same logic applies to proper subtype checks, and calculating meets and joins, if this involves calling 'subtypes.is_protocol_implementation'). """ deps = {} # type: Dict[str, Set[str]] for info in TypeState._rechecked_types: for attr in TypeState._checked_against_members[info.fullname]: # The need for full MRO here is subtle, during an update, base classes of # a concrete class may not be reprocessed, so not all -> deps # are added. for base_info in info.mro[:-1]: trigger = make_trigger('%s.%s' % (base_info.fullname, attr)) if 'typing' in trigger or 'builtins' in trigger: # TODO: avoid everything from typeshed continue deps.setdefault(trigger, set()).add(make_trigger(info.fullname)) for proto in TypeState._attempted_protocols[info.fullname]: trigger = make_trigger(info.fullname) if 'typing' in trigger or 'builtins' in trigger: continue # If any class that was checked against a protocol changes, # we need to reset the subtype cache for the protocol. # # Note: strictly speaking, the protocol doesn't need to be # re-checked, we only need to reset the cache, and its uses # elsewhere are still valid (unless invalidated by other deps). deps.setdefault(trigger, set()).add(proto) return deps @staticmethod def update_protocol_deps(second_map: Optional[Dict[str, Set[str]]] = None) -> None: """Update global protocol dependency map. We update the global map incrementally, using a snapshot only from recently type checked types. If second_map is given, update it as well. This is currently used by FineGrainedBuildManager that maintains normal (non-protocol) dependencies. """ assert TypeState.proto_deps is not None, ( "This should not be called after failed cache load") new_deps = TypeState._snapshot_protocol_deps() for trigger, targets in new_deps.items(): TypeState.proto_deps.setdefault(trigger, set()).update(targets) if second_map is not None: for trigger, targets in new_deps.items(): second_map.setdefault(trigger, set()).update(targets) TypeState._rechecked_types.clear() TypeState._attempted_protocols.clear() TypeState._checked_against_members.clear() @staticmethod def add_all_protocol_deps(deps: Dict[str, Set[str]]) -> None: """Add all known protocol dependencies to deps. This is used by tests and debug output, and also when collecting all collected or loaded dependencies as part of build. """ TypeState.update_protocol_deps() # just in case if TypeState.proto_deps is not None: for trigger, targets in TypeState.proto_deps.items(): deps.setdefault(trigger, set()).update(targets) def reset_global_state() -> None: """Reset most existing global state. Currently most of it is in this module. Few exceptions are strict optional status and and functools.lru_cache. """ TypeState.reset_all_subtype_caches() TypeState.reset_protocol_deps() mypy-0.761/mypy/typetraverser.py0000644€tŠÔÚ€2›s®0000000603113576752246023207 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable from mypy_extensions import trait from mypy.types import ( Type, SyntheticTypeVisitor, AnyType, UninhabitedType, NoneType, ErasedType, DeletedType, TypeVarType, LiteralType, Instance, CallableType, TupleType, TypedDictType, UnionType, Overloaded, TypeType, CallableArgument, UnboundType, TypeList, StarType, EllipsisType, PlaceholderType, PartialType, RawExpressionType, TypeAliasType ) @trait class TypeTraverserVisitor(SyntheticTypeVisitor[None]): """Visitor that traverses all components of a type""" # Atomic types def visit_any(self, t: AnyType) -> None: pass def visit_uninhabited_type(self, t: UninhabitedType) -> None: pass def visit_none_type(self, t: NoneType) -> None: pass def visit_erased_type(self, t: ErasedType) -> None: pass def visit_deleted_type(self, t: DeletedType) -> None: pass def visit_type_var(self, t: TypeVarType) -> None: # Note that type variable values and upper bound aren't treated as # components, since they are components of the type variable # definition. We want to traverse everything just once. pass def visit_literal_type(self, t: LiteralType) -> None: t.fallback.accept(self) # Composite types def visit_instance(self, t: Instance) -> None: self.traverse_types(t.args) def visit_callable_type(self, t: CallableType) -> None: # FIX generics self.traverse_types(t.arg_types) t.ret_type.accept(self) t.fallback.accept(self) def visit_tuple_type(self, t: TupleType) -> None: self.traverse_types(t.items) t.partial_fallback.accept(self) def visit_typeddict_type(self, t: TypedDictType) -> None: self.traverse_types(t.items.values()) t.fallback.accept(self) def visit_union_type(self, t: UnionType) -> None: self.traverse_types(t.items) def visit_overloaded(self, t: Overloaded) -> None: self.traverse_types(t.items()) def visit_type_type(self, t: TypeType) -> None: t.item.accept(self) # Special types (not real types) def visit_callable_argument(self, t: CallableArgument) -> None: t.typ.accept(self) def visit_unbound_type(self, t: UnboundType) -> None: self.traverse_types(t.args) def visit_type_list(self, t: TypeList) -> None: self.traverse_types(t.items) def visit_star_type(self, t: StarType) -> None: t.type.accept(self) def visit_ellipsis_type(self, t: EllipsisType) -> None: pass def visit_placeholder_type(self, t: PlaceholderType) -> None: self.traverse_types(t.args) def visit_partial_type(self, t: PartialType) -> None: pass def visit_raw_expression_type(self, t: RawExpressionType) -> None: pass def visit_type_alias_type(self, t: TypeAliasType) -> None: self.traverse_types(t.args) # Helpers def traverse_types(self, types: Iterable[Type]) -> None: for typ in types: typ.accept(self) mypy-0.761/mypy/typevars.py0000644€tŠÔÚ€2›s®0000000323613576752246022151 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Union, List from mypy.nodes import TypeInfo from mypy.erasetype import erase_typevars from mypy.types import Instance, TypeVarType, TupleType, Type, TypeOfAny, AnyType def fill_typevars(typ: TypeInfo) -> Union[Instance, TupleType]: """For a non-generic type, return instance type representing the type. For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn]. """ tv = [] # type: List[Type] # TODO: why do we need to keep both typ.type_vars and typ.defn.type_vars? for i in range(len(typ.defn.type_vars)): tv.append(TypeVarType(typ.defn.type_vars[i])) inst = Instance(typ, tv) if typ.tuple_type is None: return inst return typ.tuple_type.copy_modified(fallback=inst) def fill_typevars_with_any(typ: TypeInfo) -> Union[Instance, TupleType]: """Apply a correct number of Any's as type arguments to a type.""" inst = Instance(typ, [AnyType(TypeOfAny.special_form)] * len(typ.defn.type_vars)) if typ.tuple_type is None: return inst return typ.tuple_type.copy_modified(fallback=inst) def has_no_typevars(typ: Type) -> bool: # We test if a type contains type variables by erasing all type variables # and comparing the result to the original type. We use comparison by equality that # in turn uses `__eq__` defined for types. Note: we can't use `is_same_type` because # it is not safe with unresolved forward references, while this function may be called # before forward references resolution patch pass. Note also that it is not safe to use # `is` comparison because `erase_typevars` doesn't preserve type identity. return typ == erase_typevars(typ) mypy-0.761/mypy/util.py0000644€tŠÔÚ€2›s®0000006143113576752246021252 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utility functions with no non-trivial dependencies.""" import os import pathlib import re import subprocess import sys import os from typing import ( TypeVar, List, Tuple, Optional, Dict, Sequence, Iterable, Container, IO, Callable ) from typing_extensions import Final, Type, Literal try: import curses import _curses # noqa CURSES_ENABLED = True except ImportError: CURSES_ENABLED = False T = TypeVar('T') ENCODING_RE = \ re.compile(br'([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)') # type: Final # This works in most default terminals works (because it is ANSI standard). The problem # this tries to solve is that although it is a basic ANSI "feature", terminfo files # for most default terminals don't have dim termcap entry, so curses doesn't report it. # Potentially, we can choose a grey color that would look good on both white and black # background, but it is not easy, and again most default terminals are 8-color, not 256-color, # so we can't get the color code from curses. PLAIN_ANSI_DIM = '\x1b[2m' # type: Final DEFAULT_SOURCE_OFFSET = 4 # type: Final DEFAULT_COLUMNS = 80 # type: Final # At least this number of columns will be shown on each side of # error location when printing source code snippet. MINIMUM_WIDTH = 20 # VT100 color code processing was added in Windows 10, but only the second major update, # Threshold 2. Fortunately, everyone (even on LTSB, Long Term Support Branch) should # have a version of Windows 10 newer than this. Note that Windows 8 and below are not # supported, but are either going out of support, or make up only a few % of the market. MINIMUM_WINDOWS_MAJOR_VT100 = 10 MINIMUM_WINDOWS_BUILD_VT100 = 10586 default_python2_interpreter = \ ['python2', 'python', '/usr/bin/python', 'C:\\Python27\\python.exe'] # type: Final def split_module_names(mod_name: str) -> List[str]: """Return the module and all parent module names. So, if `mod_name` is 'a.b.c', this function will return ['a.b.c', 'a.b', and 'a']. """ out = [mod_name] while '.' in mod_name: mod_name = mod_name.rsplit('.', 1)[0] out.append(mod_name) return out def module_prefix(modules: Iterable[str], target: str) -> Optional[str]: result = split_target(modules, target) if result is None: return None return result[0] def split_target(modules: Iterable[str], target: str) -> Optional[Tuple[str, str]]: remaining = [] # type: List[str] while True: if target in modules: return target, '.'.join(remaining) components = target.rsplit('.', 1) if len(components) == 1: return None target = components[0] remaining.insert(0, components[1]) def short_type(obj: object) -> str: """Return the last component of the type name of an object. If obj is None, return 'nil'. For example, if obj is 1, return 'int'. """ if obj is None: return 'nil' t = str(type(obj)) return t.split('.')[-1].rstrip("'>") def find_python_encoding(text: bytes, pyversion: Tuple[int, int]) -> Tuple[str, int]: """PEP-263 for detecting Python file encoding""" result = ENCODING_RE.match(text) if result: line = 2 if result.group(1) else 1 encoding = result.group(3).decode('ascii') # Handle some aliases that Python is happy to accept and that are used in the wild. if encoding.startswith(('iso-latin-1-', 'latin-1-')) or encoding == 'iso-latin-1': encoding = 'latin-1' return encoding, line else: default_encoding = 'utf8' if pyversion[0] >= 3 else 'ascii' return default_encoding, -1 class DecodeError(Exception): """Exception raised when a file cannot be decoded due to an unknown encoding type. Essentially a wrapper for the LookupError raised by `bytearray.decode` """ def decode_python_encoding(source: bytes, pyversion: Tuple[int, int]) -> str: """Read the Python file with while obeying PEP-263 encoding detection. Returns the source as a string. """ # check for BOM UTF-8 encoding and strip it out if present if source.startswith(b'\xef\xbb\xbf'): encoding = 'utf8' source = source[3:] else: # look at first two lines and check if PEP-263 coding is present encoding, _ = find_python_encoding(source, pyversion) try: source_text = source.decode(encoding) except LookupError as lookuperr: raise DecodeError(str(lookuperr)) return source_text def read_py_file(path: str, read: Callable[[str], bytes], pyversion: Tuple[int, int]) -> Optional[List[str]]: """Try reading a Python file as list of source lines. Return None if something goes wrong. """ try: source = read(path) except (IOError, OSError): return None else: try: source_lines = decode_python_encoding(source, pyversion).splitlines() except DecodeError: return None return source_lines def trim_source_line(line: str, max_len: int, col: int, min_width: int) -> Tuple[str, int]: """Trim a line of source code to fit into max_len. Show 'min_width' characters on each side of 'col' (an error location). If either start or end is trimmed, this is indicated by adding '...' there. A typical result looks like this: ...some_variable = function_to_call(one_arg, other_arg) or... Return the trimmed string and the column offset to to adjust error location. """ if max_len < 2 * min_width + 1: # In case the window is too tiny it is better to still show something. max_len = 2 * min_width + 1 # Trivial case: line already fits in. if len(line) <= max_len: return line, 0 # If column is not too large so that there is still min_width after it, # the line doesn't need to be trimmed at the start. if col + min_width < max_len: return line[:max_len] + '...', 0 # Otherwise, if the column is not too close to the end, trim both sides. if col < len(line) - min_width - 1: offset = col - max_len + min_width + 1 return '...' + line[offset:col + min_width + 1] + '...', offset - 3 # Finally, if the column is near the end, just trim the start. return '...' + line[-max_len:], len(line) - max_len - 3 def get_mypy_comments(source: str) -> List[Tuple[int, str]]: PREFIX = '# mypy: ' # Don't bother splitting up the lines unless we know it is useful if PREFIX not in source: return [] lines = source.split('\n') results = [] for i, line in enumerate(lines): if line.startswith(PREFIX): results.append((i + 1, line[len(PREFIX):])) return results _python2_interpreter = None # type: Optional[str] def try_find_python2_interpreter() -> Optional[str]: global _python2_interpreter if _python2_interpreter: return _python2_interpreter for interpreter in default_python2_interpreter: try: retcode = subprocess.Popen([ interpreter, '-c', 'import sys, typing; assert sys.version_info[:2] == (2, 7)' ]).wait() if not retcode: _python2_interpreter = interpreter return interpreter except OSError: pass return None PASS_TEMPLATE = """ """ # type: Final FAIL_TEMPLATE = """ {text} """ # type: Final ERROR_TEMPLATE = """ {text} """ # type: Final def write_junit_xml(dt: float, serious: bool, messages: List[str], path: str, version: str, platform: str) -> None: from xml.sax.saxutils import escape if not messages and not serious: xml = PASS_TEMPLATE.format(time=dt, ver=version, platform=platform) elif not serious: xml = FAIL_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt, ver=version, platform=platform) else: xml = ERROR_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt, ver=version, platform=platform) # checks for a directory structure in path and creates folders if needed xml_dirs = os.path.dirname(os.path.abspath(path)) if not os.path.isdir(xml_dirs): os.makedirs(xml_dirs) with open(path, 'wb') as f: f.write(xml.encode('utf-8')) class IdMapper: """Generate integer ids for objects. Unlike id(), these start from 0 and increment by 1, and ids won't get reused across the life-time of IdMapper. Assume objects don't redefine __eq__ or __hash__. """ def __init__(self) -> None: self.id_map = {} # type: Dict[object, int] self.next_id = 0 def id(self, o: object) -> int: if o not in self.id_map: self.id_map[o] = self.next_id self.next_id += 1 return self.id_map[o] def get_prefix(fullname: str) -> str: """Drop the final component of a qualified name (e.g. ('x.y' -> 'x').""" return fullname.rsplit('.', 1)[0] def correct_relative_import(cur_mod_id: str, relative: int, target: str, is_cur_package_init_file: bool) -> Tuple[str, bool]: if relative == 0: return target, True parts = cur_mod_id.split(".") rel = relative if is_cur_package_init_file: rel -= 1 ok = len(parts) >= rel if rel != 0: cur_mod_id = ".".join(parts[:-rel]) return cur_mod_id + (("." + target) if target else ""), ok fields_cache = {} # type: Final[Dict[Type[object], List[str]]] def get_class_descriptors(cls: 'Type[object]') -> Sequence[str]: import inspect # Lazy import for minor startup speed win # Maintain a cache of type -> attributes defined by descriptors in the class # (that is, attributes from __slots__ and C extension classes) if cls not in fields_cache: members = inspect.getmembers( cls, lambda o: inspect.isgetsetdescriptor(o) or inspect.ismemberdescriptor(o)) fields_cache[cls] = [x for x, y in members if x != '__weakref__' and x != '__dict__'] return fields_cache[cls] def replace_object_state(new: object, old: object, copy_dict: bool = False) -> None: """Copy state of old node to the new node. This handles cases where there is __dict__ and/or attribute descriptors (either from slots or because the type is defined in a C extension module). Assume that both objects have the same __class__. """ if hasattr(old, '__dict__'): if copy_dict: new.__dict__ = dict(old.__dict__) else: new.__dict__ = old.__dict__ for attr in get_class_descriptors(old.__class__): try: if hasattr(old, attr): setattr(new, attr, getattr(old, attr)) elif hasattr(new, attr): delattr(new, attr) # There is no way to distinguish getsetdescriptors that allow # writes from ones that don't (I think?), so we just ignore # AttributeErrors if we need to. # TODO: What about getsetdescriptors that act like properties??? except AttributeError: pass def is_sub_path(path1: str, path2: str) -> bool: """Given two paths, return if path1 is a sub-path of path2.""" return pathlib.Path(path2) in pathlib.Path(path1).parents def hard_exit(status: int = 0) -> None: """Kill the current process without fully cleaning up. This can be quite a bit faster than a normal exit() since objects are not freed. """ sys.stdout.flush() sys.stderr.flush() os._exit(status) def unmangle(name: str) -> str: """Remove internal suffixes from a short name.""" return name.rstrip("'") def get_unique_redefinition_name(name: str, existing: Container[str]) -> str: """Get a simple redefinition name not present among existing. For example, for name 'foo' we try 'foo-redefinition', 'foo-redefinition2', 'foo-redefinition3', etc. until we find one that is not in existing. """ r_name = name + '-redefinition' if r_name not in existing: return r_name i = 2 while r_name + str(i) in existing: i += 1 return r_name + str(i) def check_python_version(program: str) -> None: """Report issues with the Python used to run mypy, dmypy, or stubgen""" # Check for known bad Python versions. if sys.version_info[:2] < (3, 5): sys.exit("Running {name} with Python 3.4 or lower is not supported; " "please upgrade to 3.5 or newer".format(name=program)) # this can be deleted once we drop support for 3.5 if sys.version_info[:3] == (3, 5, 0): sys.exit("Running {name} with Python 3.5.0 is not supported; " "please upgrade to 3.5.1 or newer".format(name=program)) def count_stats(errors: List[str]) -> Tuple[int, int]: """Count total number of errors and files in error list.""" errors = [e for e in errors if ': error:' in e] files = {e.split(':')[0] for e in errors} return len(errors), len(files) def split_words(msg: str) -> List[str]: """Split line of text into words (but not within quoted groups).""" next_word = '' res = [] # type: List[str] allow_break = True for c in msg: if c == ' ' and allow_break: res.append(next_word) next_word = '' continue if c == '"': allow_break = not allow_break next_word += c res.append(next_word) return res def get_terminal_width() -> int: """Get current terminal width if possible, otherwise return the default one.""" try: cols, _ = os.get_terminal_size() return cols except OSError: return DEFAULT_COLUMNS def soft_wrap(msg: str, max_len: int, first_offset: int, num_indent: int = 0) -> str: """Wrap a long error message into few lines. Breaks will only happen between words, and never inside a quoted group (to avoid breaking types such as "Union[int, str]"). The 'first_offset' is the width before the start of first line. Pad every next line with 'num_indent' spaces. Every line will be at most 'max_len' characters, except if it is a single word or quoted group. For example: first_offset ------------------------ path/to/file: error: 58: Some very long error message that needs to be split in separate lines. "Long[Type, Names]" are never split. ^^^^-------------------------------------------------- num_indent max_len """ words = split_words(msg) next_line = words.pop(0) lines = [] # type: List[str] while words: next_word = words.pop(0) max_line_len = max_len - num_indent if lines else max_len - first_offset # Add 1 to account for space between words. if len(next_line) + len(next_word) + 1 <= max_line_len: next_line += ' ' + next_word else: lines.append(next_line) next_line = next_word lines.append(next_line) padding = '\n' + ' ' * num_indent return padding.join(lines) class FancyFormatter: """Apply color and bold font to terminal output. This currently only works on Linux and Mac. """ def __init__(self, f_out: IO[str], f_err: IO[str], show_error_codes: bool) -> None: self.show_error_codes = show_error_codes # Check if we are in a human-facing terminal on a supported platform. if sys.platform not in ('linux', 'darwin', 'win32'): self.dummy_term = True return force_color = int(os.getenv('MYPY_FORCE_COLOR', '0')) if not force_color and (not f_out.isatty() or not f_err.isatty()): self.dummy_term = True return if sys.platform == 'win32': self.dummy_term = not self.initialize_win_colors() else: self.dummy_term = not self.initialize_unix_colors() if not self.dummy_term: self.colors = {'red': self.RED, 'green': self.GREEN, 'blue': self.BLUE, 'yellow': self.YELLOW, 'none': ''} def initialize_win_colors(self) -> bool: """Return True if initialization was successful and we can use colors, False otherwise""" # Windows ANSI escape sequences are only supported on Threshold 2 and above. # we check with an assert at runtime and an if check for mypy, as asserts do not # yet narrow platform assert sys.platform == 'win32' if sys.platform == 'win32': winver = sys.getwindowsversion() if (winver.major < MINIMUM_WINDOWS_MAJOR_VT100 or winver.build < MINIMUM_WINDOWS_BUILD_VT100): return False import ctypes kernel32 = ctypes.windll.kernel32 ENABLE_PROCESSED_OUTPUT = 0x1 ENABLE_WRAP_AT_EOL_OUTPUT = 0x2 ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4 STD_OUTPUT_HANDLE = -11 kernel32.SetConsoleMode(kernel32.GetStdHandle(STD_OUTPUT_HANDLE), ENABLE_PROCESSED_OUTPUT | ENABLE_WRAP_AT_EOL_OUTPUT | ENABLE_VIRTUAL_TERMINAL_PROCESSING) self.BOLD = '\033[1m' self.UNDER = '\033[4m' self.BLUE = '\033[94m' self.GREEN = '\033[92m' self.RED = '\033[91m' self.YELLOW = '\033[93m' self.NORMAL = '\033[0m' self.DIM = '\033[2m' return True return False def initialize_unix_colors(self) -> bool: """Return True if initialization was successful and we can use colors, False otherwise""" if not CURSES_ENABLED: return False try: curses.setupterm() except curses.error: # Most likely terminfo not found. return False bold = curses.tigetstr('bold') under = curses.tigetstr('smul') set_color = curses.tigetstr('setaf') if not (bold and under and set_color): return False self.NORMAL = curses.tigetstr('sgr0').decode() self.BOLD = bold.decode() self.UNDER = under.decode() dim = curses.tigetstr('dim') # TODO: more reliable way to get gray color good for both dark and light schemes. self.DIM = dim.decode() if dim else PLAIN_ANSI_DIM self.BLUE = curses.tparm(set_color, curses.COLOR_BLUE).decode() self.GREEN = curses.tparm(set_color, curses.COLOR_GREEN).decode() self.RED = curses.tparm(set_color, curses.COLOR_RED).decode() self.YELLOW = curses.tparm(set_color, curses.COLOR_YELLOW).decode() return True def style(self, text: str, color: Literal['red', 'green', 'blue', 'yellow', 'none'], bold: bool = False, underline: bool = False, dim: bool = False) -> str: """Apply simple color and style (underlined or bold).""" if self.dummy_term: return text if bold: start = self.BOLD else: start = '' if underline: start += self.UNDER if dim: start += self.DIM return start + self.colors[color] + text + self.NORMAL def fit_in_terminal(self, messages: List[str], fixed_terminal_width: Optional[int] = None) -> List[str]: """Improve readability by wrapping error messages and trimming source code.""" width = (fixed_terminal_width or int(os.getenv('MYPY_FORCE_TERMINAL_WIDTH', '0')) or get_terminal_width()) new_messages = messages.copy() for i, error in enumerate(messages): if ': error:' in error: loc, msg = error.split('error:', maxsplit=1) msg = soft_wrap(msg, width, first_offset=len(loc) + len('error: ')) new_messages[i] = loc + 'error:' + msg if error.startswith(' ' * DEFAULT_SOURCE_OFFSET) and '^' not in error: # TODO: detecting source code highlights through an indent can be surprising. # Restore original error message and error location. error = error[DEFAULT_SOURCE_OFFSET:] column = messages[i+1].index('^') - DEFAULT_SOURCE_OFFSET # Let source have some space also on the right side, plus 6 # to accommodate ... on each side. max_len = width - DEFAULT_SOURCE_OFFSET - 6 source_line, offset = trim_source_line(error, max_len, column, MINIMUM_WIDTH) new_messages[i] = ' ' * DEFAULT_SOURCE_OFFSET + source_line # Also adjust the error marker position. new_messages[i+1] = ' ' * (DEFAULT_SOURCE_OFFSET + column - offset) + '^' return new_messages def colorize(self, error: str) -> str: """Colorize an output line by highlighting the status and error code. If fixed_terminal_width is given, use it instead of calling get_terminal_width() (used by the daemon). """ if ': error:' in error: loc, msg = error.split('error:', maxsplit=1) if not self.show_error_codes: return (loc + self.style('error:', 'red', bold=True) + self.highlight_quote_groups(msg)) codepos = msg.rfind('[') code = msg[codepos:] msg = msg[:codepos] return (loc + self.style('error:', 'red', bold=True) + self.highlight_quote_groups(msg) + self.style(code, 'yellow')) elif ': note:' in error: loc, msg = error.split('note:', maxsplit=1) return loc + self.style('note:', 'blue') + self.underline_link(msg) elif error.startswith(' ' * DEFAULT_SOURCE_OFFSET): # TODO: detecting source code highlights through an indent can be surprising. if '^' not in error: return self.style(error, 'none', dim=True) return self.style(error, 'red') else: return error def highlight_quote_groups(self, msg: str) -> str: """Make groups quoted with double quotes bold (including quotes). This is used to highlight types, attribute names etc. """ if msg.count('"') % 2: # Broken error message, don't do any formatting. return msg parts = msg.split('"') out = '' for i, part in enumerate(parts): if i % 2 == 0: out += self.style(part, 'none') else: out += self.style('"' + part + '"', 'none', bold=True) return out def underline_link(self, note: str) -> str: """Underline a link in a note message (if any). This assumes there is at most one link in the message. """ match = re.search(r'https?://\S*', note) if not match: return note start = match.start() end = match.end() return (note[:start] + self.style(note[start:end], 'none', underline=True) + note[end:]) def format_success(self, n_sources: int, use_color: bool = True) -> str: """Format short summary in case of success. n_sources is total number of files passed directly on command line, i.e. excluding stubs and followed imports. """ msg = 'Success: no issues found in {}' \ ' source file{}'.format(n_sources, 's' if n_sources != 1 else '') if not use_color: return msg return self.style(msg, 'green', bold=True) def format_error(self, n_errors: int, n_files: int, n_sources: int, use_color: bool = True) -> str: """Format a short summary in case of errors.""" msg = 'Found {} error{} in {} file{}' \ ' (checked {} source file{})'.format(n_errors, 's' if n_errors != 1 else '', n_files, 's' if n_files != 1 else '', n_sources, 's' if n_sources != 1 else '') if not use_color: return msg return self.style(msg, 'red', bold=True) mypy-0.761/mypy/version.py0000644€tŠÔÚ€2›s®0000000106213576752246021754 0ustar jukkaDROPBOX\Domain Users00000000000000import os from mypy import git # Base version. # - Release versions have the form "0.NNN". # - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). # - For 1.0 we'll switch back to 1.2.3 form. __version__ = '0.761' base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) if __version__.endswith('+dev') and git.is_git_repo(mypy_dir) and git.have_git(): __version__ += '.' + git.git_revision(mypy_dir).decode('utf-8') if git.is_dirty(mypy_dir): __version__ += '.dirty' del mypy_dir mypy-0.761/mypy/visitor.py0000644€tŠÔÚ€2›s®0000003370213576752246021774 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generic abstract syntax tree node visitor""" from abc import abstractmethod from typing import TypeVar, Generic from typing_extensions import TYPE_CHECKING from mypy_extensions import trait if TYPE_CHECKING: # break import cycle only needed for mypy import mypy.nodes T = TypeVar('T') @trait class ExpressionVisitor(Generic[T]): @abstractmethod def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> T: pass @abstractmethod def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> T: pass @abstractmethod def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> T: pass @abstractmethod def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> T: pass @abstractmethod def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> T: pass @abstractmethod def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> T: pass @abstractmethod def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> T: pass @abstractmethod def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> T: pass @abstractmethod def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> T: pass @abstractmethod def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> T: pass @abstractmethod def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> T: pass @abstractmethod def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> T: pass @abstractmethod def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> T: pass @abstractmethod def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> T: pass @abstractmethod def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> T: pass @abstractmethod def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> T: pass @abstractmethod def visit_reveal_expr(self, o: 'mypy.nodes.RevealExpr') -> T: pass @abstractmethod def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> T: pass @abstractmethod def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> T: pass @abstractmethod def visit_assignment_expr(self, o: 'mypy.nodes.AssignmentExpr') -> T: pass @abstractmethod def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> T: pass @abstractmethod def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> T: pass @abstractmethod def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> T: pass @abstractmethod def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> T: pass @abstractmethod def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> T: pass @abstractmethod def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> T: pass @abstractmethod def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> T: pass @abstractmethod def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> T: pass @abstractmethod def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> T: pass @abstractmethod def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> T: pass @abstractmethod def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> T: pass @abstractmethod def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> T: pass @abstractmethod def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> T: pass @abstractmethod def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> T: pass @abstractmethod def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> T: pass @abstractmethod def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> T: pass @abstractmethod def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T: pass @abstractmethod def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> T: pass @abstractmethod def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T: pass @abstractmethod def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> T: pass @abstractmethod def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> T: pass @abstractmethod def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> T: pass @abstractmethod def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> T: pass @trait class StatementVisitor(Generic[T]): # Definitions @abstractmethod def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> T: pass @abstractmethod def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> T: pass @abstractmethod def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> T: pass @abstractmethod def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> T: pass @abstractmethod def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> T: pass @abstractmethod def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> T: pass @abstractmethod def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> T: pass @abstractmethod def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> T: pass @abstractmethod def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> T: pass @abstractmethod def visit_decorator(self, o: 'mypy.nodes.Decorator') -> T: pass # Module structure @abstractmethod def visit_import(self, o: 'mypy.nodes.Import') -> T: pass @abstractmethod def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> T: pass @abstractmethod def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> T: pass # Statements @abstractmethod def visit_block(self, o: 'mypy.nodes.Block') -> T: pass @abstractmethod def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> T: pass @abstractmethod def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') -> T: pass @abstractmethod def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> T: pass @abstractmethod def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> T: pass @abstractmethod def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> T: pass @abstractmethod def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> T: pass @abstractmethod def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> T: pass @abstractmethod def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> T: pass @abstractmethod def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> T: pass @abstractmethod def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> T: pass @abstractmethod def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> T: pass @abstractmethod def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> T: pass @abstractmethod def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> T: pass @trait class NodeVisitor(Generic[T], ExpressionVisitor[T], StatementVisitor[T]): """Empty base class for parse tree node visitors. The T type argument specifies the return type of the visit methods. As all methods defined here return None by default, subclasses do not always need to override all the methods. TODO make the default return value explicit """ # Not in superclasses: def visit_mypy_file(self, o: 'mypy.nodes.MypyFile') -> T: pass # TODO: We have a visit_var method, but no visit_typeinfo or any # other non-Statement SymbolNode (accepting those will raise a # runtime error). Maybe this should be resolved in some direction. def visit_var(self, o: 'mypy.nodes.Var') -> T: pass # Module structure def visit_import(self, o: 'mypy.nodes.Import') -> T: pass def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> T: pass def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> T: pass # Definitions def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> T: pass def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> T: pass def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> T: pass def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> T: pass def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> T: pass def visit_decorator(self, o: 'mypy.nodes.Decorator') -> T: pass def visit_type_alias(self, o: 'mypy.nodes.TypeAlias') -> T: pass def visit_placeholder_node(self, o: 'mypy.nodes.PlaceholderNode') -> T: pass # Statements def visit_block(self, o: 'mypy.nodes.Block') -> T: pass def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> T: pass def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> T: pass def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') -> T: pass def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> T: pass def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> T: pass def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> T: pass def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> T: pass def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> T: pass def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> T: pass def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> T: pass def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> T: pass def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> T: pass def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> T: pass def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> T: pass def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> T: pass def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> T: pass def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> T: pass # Expressions (default no-op implementation) def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> T: pass def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> T: pass def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> T: pass def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> T: pass def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> T: pass def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> T: pass def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> T: pass def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> T: pass def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> T: pass def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> T: pass def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> T: pass def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> T: pass def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> T: pass def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> T: pass def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> T: pass def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> T: pass def visit_reveal_expr(self, o: 'mypy.nodes.RevealExpr') -> T: pass def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> T: pass def visit_assignment_expr(self, o: 'mypy.nodes.AssignmentExpr') -> T: pass def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> T: pass def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> T: pass def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> T: pass def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> T: pass def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> T: pass def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> T: pass def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> T: pass def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> T: pass def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> T: pass def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> T: pass def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> T: pass def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> T: pass def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> T: pass def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> T: pass def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> T: pass def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> T: pass def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> T: pass def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T: pass def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> T: pass def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T: pass def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> T: pass def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> T: pass def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> T: pass def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> T: pass mypy-0.761/mypy/xml/0000755€tŠÔÚ€2›s®0000000000013576752267020521 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy/xml/mypy-html.css0000644€tŠÔÚ€2›s®0000000260113576752246023167 0ustar jukkaDROPBOX\Domain Users00000000000000/* CSS for type check coverage reports */ /* Used by both summary and file. */ body { font-family: "Helvetica Neue", sans-serif; } /* Used only by summary. */ h1 { text-align: center; font-size: 135%; margin: 20px; } table.summary { border-collapse: collapse; margin-left: 7%; margin-right: 7%; width: 85%; } table caption { margin: 1em; } table.summary, tr.summary, th.summary, td.summary { border: 1px solid #aaa; } th.summary, td.summary { padding: 0.4em; } td.summary a { text-decoration: none; } .summary-quality-0 { background-color: #dfd; } .summary-quality-1 { background-color: #ffa; } .summary-quality-2 { background-color: #faa; } td.summary-filename, th.summary-filename { text-align: left; } td.summary-filename { width: 50%; } .summary-precision { text-align: center; } .summary-lines { text-align: center; } /* Used only by file. */ td.table-lines { text-align: right; padding-right: 0.5em; } td.table-code { } span.lineno { text-align: right; } a:link.lineno, a:visited.lineno { color: #999; text-decoration: none; } a:hover.lineno, a:active.lineno { color: #000; text-decoration: underline; } .line-empty, .line-precise { background-color: #dfd; } .line-imprecise { background-color: #ffa; } .line-any, .line-unanalyzed { background-color: #faa; } mypy-0.761/mypy/xml/mypy-html.xslt0000644€tŠÔÚ€2›s®0000000736013576752246023400 0ustar jukkaDROPBOX\Domain Users00000000000000

Mypy Type Check Coverage Summary

Summary from
File Imprecision Lines
Total imprecise LOC
imprecise LOC

                  
                    

                  
                
                  
                    

                  
                
mypy-0.761/mypy/xml/mypy-txt.xslt0000644€tŠÔÚ€2›s®0000001111613576752246023245 0ustar jukkaDROPBOX\Domain Users00000000000000 Mypy Type Check Coverage Summary ================================ Script: +- -+- -+- -+ | | | | +- -+- -+- -+ | | | | +- -+- -+- -+ | | | | +- -+- -+- -+ mypy-0.761/mypy/xml/mypy.xsd0000644€tŠÔÚ€2›s®0000000417513576752246022243 0ustar jukkaDROPBOX\Domain Users00000000000000 mypy-0.761/mypy.egg-info/0000755€tŠÔÚ€2›s®0000000000013576752266021412 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypy.egg-info/PKG-INFO0000644€tŠÔÚ€2›s®0000000234413576752266022512 0ustar jukkaDROPBOX\Domain Users00000000000000Metadata-Version: 2.1 Name: mypy Version: 0.761 Summary: Optional static typing for Python Home-page: http://www.mypy-lang.org/ Author: Jukka Lehtosalo Author-email: jukka.lehtosalo@iki.fi License: MIT License Description: Mypy -- Optional Static Typing for Python ========================================= Add type annotations to your Python programs, and use mypy to type check them. Mypy is essentially a Python linter on steroids, and it can catch many programming errors by analyzing your program, without actually having to run it. Mypy has a powerful type system with features such as type inference, gradual typing, generics and union types. Platform: UNKNOWN Classifier: Development Status :: 4 - Beta Classifier: Environment :: Console Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Topic :: Software Development Requires-Python: >=3.5 Provides-Extra: dmypy mypy-0.761/mypy.egg-info/SOURCES.txt0000644€tŠÔÚ€2›s®0000021527013576752266023305 0ustar jukkaDROPBOX\Domain Users00000000000000LICENSE MANIFEST.in README.md mypy_bootstrap.ini mypy_self_check.ini setup.cfg setup.py docs/Makefile docs/README.md docs/make.bat docs/requirements-docs.txt docs/source/additional_features.rst docs/source/builtin_types.rst docs/source/casts.rst docs/source/cheat_sheet.rst docs/source/cheat_sheet_py3.rst docs/source/class_basics.rst docs/source/command_line.rst docs/source/common_issues.rst docs/source/conf.py docs/source/config_file.rst docs/source/duck_type_compatibility.rst docs/source/dynamic_typing.rst docs/source/error_code_list.rst docs/source/error_code_list2.rst docs/source/error_codes.rst docs/source/existing_code.rst docs/source/extending_mypy.rst docs/source/faq.rst docs/source/final_attrs.rst docs/source/generics.rst docs/source/getting_started.rst docs/source/index.rst docs/source/inline_config.rst docs/source/installed_packages.rst docs/source/introduction.rst docs/source/kinds_of_types.rst docs/source/literal_types.rst docs/source/metaclasses.rst docs/source/more_types.rst docs/source/mypy_daemon.rst docs/source/protocols.rst docs/source/python2.rst docs/source/python36.rst docs/source/running_mypy.rst docs/source/stubgen.rst docs/source/stubs.rst docs/source/supported_python_features.rst docs/source/type_inference_and_annotations.rst mypy/__init__.py mypy/__main__.py mypy/api.py mypy/applytype.py mypy/argmap.py mypy/binder.py mypy/bogus_type.py mypy/build.py mypy/checker.py mypy/checkexpr.py mypy/checkmember.py mypy/checkstrformat.py mypy/config_parser.py mypy/constraints.py mypy/defaults.py mypy/dmypy_os.py mypy/dmypy_server.py mypy/dmypy_util.py mypy/erasetype.py mypy/errorcodes.py mypy/errors.py mypy/expandtype.py mypy/exprtotype.py mypy/fastparse.py mypy/fastparse2.py mypy/find_sources.py mypy/fixup.py mypy/freetree.py mypy/fscache.py mypy/fswatcher.py mypy/gclogger.py mypy/git.py mypy/indirection.py mypy/infer.py mypy/ipc.py mypy/join.py mypy/literals.py mypy/lookup.py mypy/main.py mypy/maptype.py mypy/meet.py mypy/memprofile.py mypy/message_registry.py mypy/messages.py mypy/metastore.py mypy/mixedtraverser.py mypy/modulefinder.py mypy/moduleinfo.py mypy/moduleinspect.py mypy/mro.py mypy/nodes.py mypy/options.py mypy/parse.py mypy/plugin.py mypy/reachability.py mypy/renaming.py mypy/report.py mypy/sametypes.py mypy/scope.py mypy/semanal.py mypy/semanal_classprop.py mypy/semanal_enum.py mypy/semanal_infer.py mypy/semanal_main.py mypy/semanal_namedtuple.py mypy/semanal_newtype.py mypy/semanal_pass1.py mypy/semanal_shared.py mypy/semanal_typeargs.py mypy/semanal_typeddict.py mypy/sharedparse.py mypy/sitepkgs.py mypy/solve.py mypy/split_namespace.py mypy/state.py mypy/stats.py mypy/strconv.py mypy/stubdoc.py mypy/stubgen.py mypy/stubgenc.py mypy/stubutil.py mypy/subtypes.py mypy/suggestions.py mypy/traverser.py mypy/treetransform.py mypy/tvar_scope.py mypy/type_visitor.py mypy/typeanal.py mypy/typeops.py mypy/types.py mypy/typestate.py mypy/typetraverser.py mypy/typevars.py mypy/util.py mypy/version.py mypy/visitor.py mypy.egg-info/PKG-INFO mypy.egg-info/SOURCES.txt mypy.egg-info/dependency_links.txt mypy.egg-info/entry_points.txt mypy.egg-info/requires.txt mypy.egg-info/top_level.txt mypy/dmypy/__init__.py mypy/dmypy/__main__.py mypy/dmypy/client.py mypy/plugins/__init__.py mypy/plugins/attrs.py mypy/plugins/common.py mypy/plugins/ctypes.py mypy/plugins/dataclasses.py mypy/plugins/default.py mypy/plugins/enums.py mypy/server/__init__.py mypy/server/astdiff.py mypy/server/astmerge.py mypy/server/aststrip.py mypy/server/deps.py mypy/server/mergecheck.py mypy/server/objgraph.py mypy/server/subexpr.py mypy/server/target.py mypy/server/trigger.py mypy/server/update.py mypy/test/__init__.py mypy/test/collect.py mypy/test/config.py mypy/test/data.py mypy/test/helpers.py mypy/test/testapi.py mypy/test/testargs.py mypy/test/testcheck.py mypy/test/testcmdline.py mypy/test/testdaemon.py mypy/test/testdeps.py mypy/test/testdiff.py mypy/test/testerrorstream.py mypy/test/testfinegrained.py mypy/test/testfinegrainedcache.py mypy/test/testformatter.py mypy/test/testgraph.py mypy/test/testinfer.py mypy/test/testipc.py mypy/test/testmerge.py mypy/test/testmodulefinder.py mypy/test/testmoduleinfo.py mypy/test/testmypyc.py mypy/test/testparse.py mypy/test/testpep561.py mypy/test/testpythoneval.py mypy/test/testreports.py mypy/test/testsamples.py mypy/test/testsemanal.py mypy/test/testsolve.py mypy/test/teststubgen.py mypy/test/testsubtypes.py mypy/test/testtransform.py mypy/test/testtypegen.py mypy/test/testtypes.py mypy/test/typefixture.py mypy/test/update.py mypy/test/visitors.py mypy/typeshed/stdlib/2/BaseHTTPServer.pyi mypy/typeshed/stdlib/2/CGIHTTPServer.pyi mypy/typeshed/stdlib/2/ConfigParser.pyi mypy/typeshed/stdlib/2/Cookie.pyi mypy/typeshed/stdlib/2/HTMLParser.pyi mypy/typeshed/stdlib/2/Queue.pyi mypy/typeshed/stdlib/2/SimpleHTTPServer.pyi mypy/typeshed/stdlib/2/SocketServer.pyi mypy/typeshed/stdlib/2/StringIO.pyi mypy/typeshed/stdlib/2/UserDict.pyi mypy/typeshed/stdlib/2/UserList.pyi mypy/typeshed/stdlib/2/UserString.pyi mypy/typeshed/stdlib/2/__builtin__.pyi mypy/typeshed/stdlib/2/_ast.pyi mypy/typeshed/stdlib/2/_collections.pyi mypy/typeshed/stdlib/2/_functools.pyi mypy/typeshed/stdlib/2/_hotshot.pyi mypy/typeshed/stdlib/2/_io.pyi mypy/typeshed/stdlib/2/_json.pyi mypy/typeshed/stdlib/2/_md5.pyi mypy/typeshed/stdlib/2/_sha.pyi mypy/typeshed/stdlib/2/_sha256.pyi mypy/typeshed/stdlib/2/_sha512.pyi mypy/typeshed/stdlib/2/_socket.pyi mypy/typeshed/stdlib/2/_sre.pyi mypy/typeshed/stdlib/2/_struct.pyi mypy/typeshed/stdlib/2/_symtable.pyi mypy/typeshed/stdlib/2/_threading_local.pyi mypy/typeshed/stdlib/2/abc.pyi mypy/typeshed/stdlib/2/ast.pyi mypy/typeshed/stdlib/2/atexit.pyi mypy/typeshed/stdlib/2/cPickle.pyi mypy/typeshed/stdlib/2/cStringIO.pyi mypy/typeshed/stdlib/2/collections.pyi mypy/typeshed/stdlib/2/commands.pyi mypy/typeshed/stdlib/2/compileall.pyi mypy/typeshed/stdlib/2/cookielib.pyi mypy/typeshed/stdlib/2/copy_reg.pyi mypy/typeshed/stdlib/2/dircache.pyi mypy/typeshed/stdlib/2/dummy_thread.pyi mypy/typeshed/stdlib/2/exceptions.pyi mypy/typeshed/stdlib/2/fcntl.pyi mypy/typeshed/stdlib/2/fnmatch.pyi mypy/typeshed/stdlib/2/functools.pyi mypy/typeshed/stdlib/2/future_builtins.pyi mypy/typeshed/stdlib/2/gc.pyi mypy/typeshed/stdlib/2/getopt.pyi mypy/typeshed/stdlib/2/getpass.pyi mypy/typeshed/stdlib/2/gettext.pyi mypy/typeshed/stdlib/2/glob.pyi mypy/typeshed/stdlib/2/gzip.pyi mypy/typeshed/stdlib/2/hashlib.pyi mypy/typeshed/stdlib/2/heapq.pyi mypy/typeshed/stdlib/2/htmlentitydefs.pyi mypy/typeshed/stdlib/2/httplib.pyi mypy/typeshed/stdlib/2/imp.pyi mypy/typeshed/stdlib/2/importlib.pyi mypy/typeshed/stdlib/2/inspect.pyi mypy/typeshed/stdlib/2/io.pyi mypy/typeshed/stdlib/2/itertools.pyi mypy/typeshed/stdlib/2/json.pyi mypy/typeshed/stdlib/2/markupbase.pyi mypy/typeshed/stdlib/2/md5.pyi mypy/typeshed/stdlib/2/mimetools.pyi mypy/typeshed/stdlib/2/mutex.pyi mypy/typeshed/stdlib/2/nturl2path.pyi mypy/typeshed/stdlib/2/os2emxpath.pyi mypy/typeshed/stdlib/2/pipes.pyi mypy/typeshed/stdlib/2/platform.pyi mypy/typeshed/stdlib/2/popen2.pyi mypy/typeshed/stdlib/2/posix.pyi mypy/typeshed/stdlib/2/random.pyi mypy/typeshed/stdlib/2/re.pyi mypy/typeshed/stdlib/2/repr.pyi mypy/typeshed/stdlib/2/resource.pyi mypy/typeshed/stdlib/2/rfc822.pyi mypy/typeshed/stdlib/2/robotparser.pyi mypy/typeshed/stdlib/2/runpy.pyi mypy/typeshed/stdlib/2/sets.pyi mypy/typeshed/stdlib/2/sha.pyi mypy/typeshed/stdlib/2/shelve.pyi mypy/typeshed/stdlib/2/shlex.pyi mypy/typeshed/stdlib/2/signal.pyi mypy/typeshed/stdlib/2/smtplib.pyi mypy/typeshed/stdlib/2/spwd.pyi mypy/typeshed/stdlib/2/sre_constants.pyi mypy/typeshed/stdlib/2/sre_parse.pyi mypy/typeshed/stdlib/2/stat.pyi mypy/typeshed/stdlib/2/string.pyi mypy/typeshed/stdlib/2/stringold.pyi mypy/typeshed/stdlib/2/strop.pyi mypy/typeshed/stdlib/2/subprocess.pyi mypy/typeshed/stdlib/2/symbol.pyi mypy/typeshed/stdlib/2/sys.pyi mypy/typeshed/stdlib/2/tempfile.pyi mypy/typeshed/stdlib/2/textwrap.pyi mypy/typeshed/stdlib/2/thread.pyi mypy/typeshed/stdlib/2/toaiff.pyi mypy/typeshed/stdlib/2/tokenize.pyi mypy/typeshed/stdlib/2/types.pyi mypy/typeshed/stdlib/2/typing.pyi mypy/typeshed/stdlib/2/unittest.pyi mypy/typeshed/stdlib/2/urllib.pyi mypy/typeshed/stdlib/2/urllib2.pyi mypy/typeshed/stdlib/2/urlparse.pyi mypy/typeshed/stdlib/2/user.pyi mypy/typeshed/stdlib/2/whichdb.pyi mypy/typeshed/stdlib/2/xmlrpclib.pyi mypy/typeshed/stdlib/2/distutils/__init__.pyi mypy/typeshed/stdlib/2/distutils/emxccompiler.pyi mypy/typeshed/stdlib/2/email/MIMEText.pyi mypy/typeshed/stdlib/2/email/__init__.pyi mypy/typeshed/stdlib/2/email/_parseaddr.pyi mypy/typeshed/stdlib/2/email/base64mime.pyi mypy/typeshed/stdlib/2/email/charset.pyi mypy/typeshed/stdlib/2/email/encoders.pyi mypy/typeshed/stdlib/2/email/feedparser.pyi mypy/typeshed/stdlib/2/email/generator.pyi mypy/typeshed/stdlib/2/email/header.pyi mypy/typeshed/stdlib/2/email/iterators.pyi mypy/typeshed/stdlib/2/email/message.pyi mypy/typeshed/stdlib/2/email/parser.pyi mypy/typeshed/stdlib/2/email/quoprimime.pyi mypy/typeshed/stdlib/2/email/utils.pyi mypy/typeshed/stdlib/2/email/mime/__init__.pyi mypy/typeshed/stdlib/2/email/mime/application.pyi mypy/typeshed/stdlib/2/email/mime/audio.pyi mypy/typeshed/stdlib/2/email/mime/base.pyi mypy/typeshed/stdlib/2/email/mime/image.pyi mypy/typeshed/stdlib/2/email/mime/message.pyi mypy/typeshed/stdlib/2/email/mime/multipart.pyi mypy/typeshed/stdlib/2/email/mime/nonmultipart.pyi mypy/typeshed/stdlib/2/email/mime/text.pyi mypy/typeshed/stdlib/2/encodings/__init__.pyi mypy/typeshed/stdlib/2/encodings/utf_8.pyi mypy/typeshed/stdlib/2/multiprocessing/__init__.pyi mypy/typeshed/stdlib/2/multiprocessing/pool.pyi mypy/typeshed/stdlib/2/multiprocessing/process.pyi mypy/typeshed/stdlib/2/multiprocessing/util.pyi mypy/typeshed/stdlib/2/multiprocessing/dummy/__init__.pyi mypy/typeshed/stdlib/2/multiprocessing/dummy/connection.pyi mypy/typeshed/stdlib/2/os/__init__.pyi mypy/typeshed/stdlib/2/os/path.pyi mypy/typeshed/stdlib/2and3/__future__.pyi mypy/typeshed/stdlib/2and3/_bisect.pyi mypy/typeshed/stdlib/2and3/_codecs.pyi mypy/typeshed/stdlib/2and3/_csv.pyi mypy/typeshed/stdlib/2and3/_curses.pyi mypy/typeshed/stdlib/2and3/_heapq.pyi mypy/typeshed/stdlib/2and3/_random.pyi mypy/typeshed/stdlib/2and3/_warnings.pyi mypy/typeshed/stdlib/2and3/_weakref.pyi mypy/typeshed/stdlib/2and3/_weakrefset.pyi mypy/typeshed/stdlib/2and3/aifc.pyi mypy/typeshed/stdlib/2and3/argparse.pyi mypy/typeshed/stdlib/2and3/array.pyi mypy/typeshed/stdlib/2and3/asynchat.pyi mypy/typeshed/stdlib/2and3/asyncore.pyi mypy/typeshed/stdlib/2and3/audioop.pyi mypy/typeshed/stdlib/2and3/base64.pyi mypy/typeshed/stdlib/2and3/bdb.pyi mypy/typeshed/stdlib/2and3/binascii.pyi mypy/typeshed/stdlib/2and3/binhex.pyi mypy/typeshed/stdlib/2and3/bisect.pyi mypy/typeshed/stdlib/2and3/builtins.pyi mypy/typeshed/stdlib/2and3/bz2.pyi mypy/typeshed/stdlib/2and3/cProfile.pyi mypy/typeshed/stdlib/2and3/calendar.pyi mypy/typeshed/stdlib/2and3/cgi.pyi mypy/typeshed/stdlib/2and3/cgitb.pyi mypy/typeshed/stdlib/2and3/chunk.pyi mypy/typeshed/stdlib/2and3/cmath.pyi mypy/typeshed/stdlib/2and3/cmd.pyi mypy/typeshed/stdlib/2and3/code.pyi mypy/typeshed/stdlib/2and3/codecs.pyi mypy/typeshed/stdlib/2and3/codeop.pyi mypy/typeshed/stdlib/2and3/colorsys.pyi mypy/typeshed/stdlib/2and3/contextlib.pyi mypy/typeshed/stdlib/2and3/copy.pyi mypy/typeshed/stdlib/2and3/crypt.pyi mypy/typeshed/stdlib/2and3/csv.pyi mypy/typeshed/stdlib/2and3/datetime.pyi mypy/typeshed/stdlib/2and3/decimal.pyi mypy/typeshed/stdlib/2and3/difflib.pyi mypy/typeshed/stdlib/2and3/dis.pyi mypy/typeshed/stdlib/2and3/doctest.pyi mypy/typeshed/stdlib/2and3/errno.pyi mypy/typeshed/stdlib/2and3/filecmp.pyi mypy/typeshed/stdlib/2and3/fileinput.pyi mypy/typeshed/stdlib/2and3/formatter.pyi mypy/typeshed/stdlib/2and3/fractions.pyi mypy/typeshed/stdlib/2and3/ftplib.pyi mypy/typeshed/stdlib/2and3/genericpath.pyi mypy/typeshed/stdlib/2and3/grp.pyi mypy/typeshed/stdlib/2and3/hmac.pyi mypy/typeshed/stdlib/2and3/imaplib.pyi mypy/typeshed/stdlib/2and3/imghdr.pyi mypy/typeshed/stdlib/2and3/keyword.pyi mypy/typeshed/stdlib/2and3/linecache.pyi mypy/typeshed/stdlib/2and3/locale.pyi mypy/typeshed/stdlib/2and3/macpath.pyi mypy/typeshed/stdlib/2and3/mailbox.pyi mypy/typeshed/stdlib/2and3/mailcap.pyi mypy/typeshed/stdlib/2and3/marshal.pyi mypy/typeshed/stdlib/2and3/math.pyi mypy/typeshed/stdlib/2and3/mimetypes.pyi mypy/typeshed/stdlib/2and3/mmap.pyi mypy/typeshed/stdlib/2and3/modulefinder.pyi mypy/typeshed/stdlib/2and3/netrc.pyi mypy/typeshed/stdlib/2and3/nis.pyi mypy/typeshed/stdlib/2and3/ntpath.pyi mypy/typeshed/stdlib/2and3/numbers.pyi mypy/typeshed/stdlib/2and3/opcode.pyi mypy/typeshed/stdlib/2and3/operator.pyi mypy/typeshed/stdlib/2and3/optparse.pyi mypy/typeshed/stdlib/2and3/pdb.pyi mypy/typeshed/stdlib/2and3/pickle.pyi mypy/typeshed/stdlib/2and3/pickletools.pyi mypy/typeshed/stdlib/2and3/pkgutil.pyi mypy/typeshed/stdlib/2and3/plistlib.pyi mypy/typeshed/stdlib/2and3/poplib.pyi mypy/typeshed/stdlib/2and3/posixpath.pyi mypy/typeshed/stdlib/2and3/pprint.pyi mypy/typeshed/stdlib/2and3/profile.pyi mypy/typeshed/stdlib/2and3/pstats.pyi mypy/typeshed/stdlib/2and3/pty.pyi mypy/typeshed/stdlib/2and3/pwd.pyi mypy/typeshed/stdlib/2and3/py_compile.pyi mypy/typeshed/stdlib/2and3/pyclbr.pyi mypy/typeshed/stdlib/2and3/pydoc.pyi mypy/typeshed/stdlib/2and3/quopri.pyi mypy/typeshed/stdlib/2and3/readline.pyi mypy/typeshed/stdlib/2and3/rlcompleter.pyi mypy/typeshed/stdlib/2and3/sched.pyi mypy/typeshed/stdlib/2and3/select.pyi mypy/typeshed/stdlib/2and3/shutil.pyi mypy/typeshed/stdlib/2and3/site.pyi mypy/typeshed/stdlib/2and3/smtpd.pyi mypy/typeshed/stdlib/2and3/sndhdr.pyi mypy/typeshed/stdlib/2and3/socket.pyi mypy/typeshed/stdlib/2and3/sre_compile.pyi mypy/typeshed/stdlib/2and3/ssl.pyi mypy/typeshed/stdlib/2and3/stringprep.pyi mypy/typeshed/stdlib/2and3/struct.pyi mypy/typeshed/stdlib/2and3/sunau.pyi mypy/typeshed/stdlib/2and3/symtable.pyi mypy/typeshed/stdlib/2and3/sysconfig.pyi mypy/typeshed/stdlib/2and3/syslog.pyi mypy/typeshed/stdlib/2and3/tabnanny.pyi mypy/typeshed/stdlib/2and3/tarfile.pyi mypy/typeshed/stdlib/2and3/telnetlib.pyi mypy/typeshed/stdlib/2and3/termios.pyi mypy/typeshed/stdlib/2and3/threading.pyi mypy/typeshed/stdlib/2and3/time.pyi mypy/typeshed/stdlib/2and3/timeit.pyi mypy/typeshed/stdlib/2and3/token.pyi mypy/typeshed/stdlib/2and3/trace.pyi mypy/typeshed/stdlib/2and3/traceback.pyi mypy/typeshed/stdlib/2and3/tty.pyi mypy/typeshed/stdlib/2and3/turtle.pyi mypy/typeshed/stdlib/2and3/unicodedata.pyi mypy/typeshed/stdlib/2and3/uu.pyi mypy/typeshed/stdlib/2and3/uuid.pyi mypy/typeshed/stdlib/2and3/warnings.pyi mypy/typeshed/stdlib/2and3/wave.pyi mypy/typeshed/stdlib/2and3/weakref.pyi mypy/typeshed/stdlib/2and3/webbrowser.pyi mypy/typeshed/stdlib/2and3/xdrlib.pyi mypy/typeshed/stdlib/2and3/zipfile.pyi mypy/typeshed/stdlib/2and3/zipimport.pyi mypy/typeshed/stdlib/2and3/zlib.pyi mypy/typeshed/stdlib/2and3/ctypes/__init__.pyi mypy/typeshed/stdlib/2and3/ctypes/util.pyi mypy/typeshed/stdlib/2and3/ctypes/wintypes.pyi mypy/typeshed/stdlib/2and3/curses/__init__.pyi mypy/typeshed/stdlib/2and3/curses/ascii.pyi mypy/typeshed/stdlib/2and3/curses/panel.pyi mypy/typeshed/stdlib/2and3/curses/textpad.pyi mypy/typeshed/stdlib/2and3/distutils/__init__.pyi mypy/typeshed/stdlib/2and3/distutils/archive_util.pyi mypy/typeshed/stdlib/2and3/distutils/bcppcompiler.pyi mypy/typeshed/stdlib/2and3/distutils/ccompiler.pyi mypy/typeshed/stdlib/2and3/distutils/cmd.pyi mypy/typeshed/stdlib/2and3/distutils/core.pyi mypy/typeshed/stdlib/2and3/distutils/cygwinccompiler.pyi mypy/typeshed/stdlib/2and3/distutils/debug.pyi mypy/typeshed/stdlib/2and3/distutils/dep_util.pyi mypy/typeshed/stdlib/2and3/distutils/dir_util.pyi mypy/typeshed/stdlib/2and3/distutils/dist.pyi mypy/typeshed/stdlib/2and3/distutils/errors.pyi mypy/typeshed/stdlib/2and3/distutils/extension.pyi mypy/typeshed/stdlib/2and3/distutils/fancy_getopt.pyi mypy/typeshed/stdlib/2and3/distutils/file_util.pyi mypy/typeshed/stdlib/2and3/distutils/filelist.pyi mypy/typeshed/stdlib/2and3/distutils/log.pyi mypy/typeshed/stdlib/2and3/distutils/msvccompiler.pyi mypy/typeshed/stdlib/2and3/distutils/spawn.pyi mypy/typeshed/stdlib/2and3/distutils/sysconfig.pyi mypy/typeshed/stdlib/2and3/distutils/text_file.pyi mypy/typeshed/stdlib/2and3/distutils/unixccompiler.pyi mypy/typeshed/stdlib/2and3/distutils/util.pyi mypy/typeshed/stdlib/2and3/distutils/version.pyi mypy/typeshed/stdlib/2and3/distutils/command/__init__.pyi mypy/typeshed/stdlib/2and3/distutils/command/bdist.pyi mypy/typeshed/stdlib/2and3/distutils/command/bdist_dumb.pyi mypy/typeshed/stdlib/2and3/distutils/command/bdist_msi.pyi mypy/typeshed/stdlib/2and3/distutils/command/bdist_packager.pyi mypy/typeshed/stdlib/2and3/distutils/command/bdist_rpm.pyi mypy/typeshed/stdlib/2and3/distutils/command/bdist_wininst.pyi mypy/typeshed/stdlib/2and3/distutils/command/build.pyi mypy/typeshed/stdlib/2and3/distutils/command/build_clib.pyi mypy/typeshed/stdlib/2and3/distutils/command/build_ext.pyi mypy/typeshed/stdlib/2and3/distutils/command/build_py.pyi mypy/typeshed/stdlib/2and3/distutils/command/build_scripts.pyi mypy/typeshed/stdlib/2and3/distutils/command/check.pyi mypy/typeshed/stdlib/2and3/distutils/command/clean.pyi mypy/typeshed/stdlib/2and3/distutils/command/config.pyi mypy/typeshed/stdlib/2and3/distutils/command/install.pyi mypy/typeshed/stdlib/2and3/distutils/command/install_data.pyi mypy/typeshed/stdlib/2and3/distutils/command/install_headers.pyi mypy/typeshed/stdlib/2and3/distutils/command/install_lib.pyi mypy/typeshed/stdlib/2and3/distutils/command/install_scripts.pyi mypy/typeshed/stdlib/2and3/distutils/command/register.pyi mypy/typeshed/stdlib/2and3/distutils/command/sdist.pyi mypy/typeshed/stdlib/2and3/ensurepip/__init__.pyi mypy/typeshed/stdlib/2and3/lib2to3/__init__.pyi mypy/typeshed/stdlib/2and3/lib2to3/pygram.pyi mypy/typeshed/stdlib/2and3/lib2to3/pytree.pyi mypy/typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi mypy/typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi mypy/typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi mypy/typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi mypy/typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi mypy/typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi mypy/typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi mypy/typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi mypy/typeshed/stdlib/2and3/logging/__init__.pyi mypy/typeshed/stdlib/2and3/logging/config.pyi mypy/typeshed/stdlib/2and3/logging/handlers.pyi mypy/typeshed/stdlib/2and3/pyexpat/__init__.pyi mypy/typeshed/stdlib/2and3/pyexpat/errors.pyi mypy/typeshed/stdlib/2and3/pyexpat/model.pyi mypy/typeshed/stdlib/2and3/sqlite3/__init__.pyi mypy/typeshed/stdlib/2and3/sqlite3/dbapi2.pyi mypy/typeshed/stdlib/2and3/wsgiref/__init__.pyi mypy/typeshed/stdlib/2and3/wsgiref/handlers.pyi mypy/typeshed/stdlib/2and3/wsgiref/headers.pyi mypy/typeshed/stdlib/2and3/wsgiref/simple_server.pyi mypy/typeshed/stdlib/2and3/wsgiref/types.pyi mypy/typeshed/stdlib/2and3/wsgiref/util.pyi mypy/typeshed/stdlib/2and3/wsgiref/validate.pyi mypy/typeshed/stdlib/2and3/xml/__init__.pyi mypy/typeshed/stdlib/2and3/xml/etree/ElementInclude.pyi mypy/typeshed/stdlib/2and3/xml/etree/ElementPath.pyi mypy/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi mypy/typeshed/stdlib/2and3/xml/etree/__init__.pyi mypy/typeshed/stdlib/2and3/xml/etree/cElementTree.pyi mypy/typeshed/stdlib/2and3/xml/parsers/__init__.pyi mypy/typeshed/stdlib/2and3/xml/parsers/expat/__init__.pyi mypy/typeshed/stdlib/2and3/xml/parsers/expat/errors.pyi mypy/typeshed/stdlib/2and3/xml/parsers/expat/model.pyi mypy/typeshed/stdlib/2and3/xml/sax/__init__.pyi mypy/typeshed/stdlib/2and3/xml/sax/handler.pyi mypy/typeshed/stdlib/2and3/xml/sax/saxutils.pyi mypy/typeshed/stdlib/2and3/xml/sax/xmlreader.pyi mypy/typeshed/stdlib/3/_ast.pyi mypy/typeshed/stdlib/3/_compression.pyi mypy/typeshed/stdlib/3/_dummy_thread.pyi mypy/typeshed/stdlib/3/_imp.pyi mypy/typeshed/stdlib/3/_importlib_modulespec.pyi mypy/typeshed/stdlib/3/_json.pyi mypy/typeshed/stdlib/3/_markupbase.pyi mypy/typeshed/stdlib/3/_operator.pyi mypy/typeshed/stdlib/3/_posixsubprocess.pyi mypy/typeshed/stdlib/3/_stat.pyi mypy/typeshed/stdlib/3/_subprocess.pyi mypy/typeshed/stdlib/3/_thread.pyi mypy/typeshed/stdlib/3/_threading_local.pyi mypy/typeshed/stdlib/3/_tracemalloc.pyi mypy/typeshed/stdlib/3/_winapi.pyi mypy/typeshed/stdlib/3/abc.pyi mypy/typeshed/stdlib/3/ast.pyi mypy/typeshed/stdlib/3/atexit.pyi mypy/typeshed/stdlib/3/compileall.pyi mypy/typeshed/stdlib/3/configparser.pyi mypy/typeshed/stdlib/3/copyreg.pyi mypy/typeshed/stdlib/3/enum.pyi mypy/typeshed/stdlib/3/faulthandler.pyi mypy/typeshed/stdlib/3/fcntl.pyi mypy/typeshed/stdlib/3/fnmatch.pyi mypy/typeshed/stdlib/3/functools.pyi mypy/typeshed/stdlib/3/gc.pyi mypy/typeshed/stdlib/3/getopt.pyi mypy/typeshed/stdlib/3/getpass.pyi mypy/typeshed/stdlib/3/gettext.pyi mypy/typeshed/stdlib/3/glob.pyi mypy/typeshed/stdlib/3/gzip.pyi mypy/typeshed/stdlib/3/hashlib.pyi mypy/typeshed/stdlib/3/heapq.pyi mypy/typeshed/stdlib/3/imp.pyi mypy/typeshed/stdlib/3/inspect.pyi mypy/typeshed/stdlib/3/io.pyi mypy/typeshed/stdlib/3/ipaddress.pyi mypy/typeshed/stdlib/3/itertools.pyi mypy/typeshed/stdlib/3/lzma.pyi mypy/typeshed/stdlib/3/msvcrt.pyi mypy/typeshed/stdlib/3/nntplib.pyi mypy/typeshed/stdlib/3/nturl2path.pyi mypy/typeshed/stdlib/3/pathlib.pyi mypy/typeshed/stdlib/3/pipes.pyi mypy/typeshed/stdlib/3/platform.pyi mypy/typeshed/stdlib/3/posix.pyi mypy/typeshed/stdlib/3/queue.pyi mypy/typeshed/stdlib/3/random.pyi mypy/typeshed/stdlib/3/re.pyi mypy/typeshed/stdlib/3/reprlib.pyi mypy/typeshed/stdlib/3/resource.pyi mypy/typeshed/stdlib/3/runpy.pyi mypy/typeshed/stdlib/3/selectors.pyi mypy/typeshed/stdlib/3/shelve.pyi mypy/typeshed/stdlib/3/shlex.pyi mypy/typeshed/stdlib/3/signal.pyi mypy/typeshed/stdlib/3/smtplib.pyi mypy/typeshed/stdlib/3/socketserver.pyi mypy/typeshed/stdlib/3/spwd.pyi mypy/typeshed/stdlib/3/sre_constants.pyi mypy/typeshed/stdlib/3/sre_parse.pyi mypy/typeshed/stdlib/3/stat.pyi mypy/typeshed/stdlib/3/statistics.pyi mypy/typeshed/stdlib/3/string.pyi mypy/typeshed/stdlib/3/subprocess.pyi mypy/typeshed/stdlib/3/symbol.pyi mypy/typeshed/stdlib/3/sys.pyi mypy/typeshed/stdlib/3/tempfile.pyi mypy/typeshed/stdlib/3/textwrap.pyi mypy/typeshed/stdlib/3/tokenize.pyi mypy/typeshed/stdlib/3/tracemalloc.pyi mypy/typeshed/stdlib/3/types.pyi mypy/typeshed/stdlib/3/typing.pyi mypy/typeshed/stdlib/3/zipapp.pyi mypy/typeshed/stdlib/3.6/secrets.pyi mypy/typeshed/stdlib/3.7/contextvars.pyi mypy/typeshed/stdlib/3.7/dataclasses.pyi mypy/typeshed/stdlib/3/asyncio/__init__.pyi mypy/typeshed/stdlib/3/asyncio/base_events.pyi mypy/typeshed/stdlib/3/asyncio/constants.pyi mypy/typeshed/stdlib/3/asyncio/coroutines.pyi mypy/typeshed/stdlib/3/asyncio/events.pyi mypy/typeshed/stdlib/3/asyncio/exceptions.pyi mypy/typeshed/stdlib/3/asyncio/futures.pyi mypy/typeshed/stdlib/3/asyncio/locks.pyi mypy/typeshed/stdlib/3/asyncio/proactor_events.pyi mypy/typeshed/stdlib/3/asyncio/protocols.pyi mypy/typeshed/stdlib/3/asyncio/queues.pyi mypy/typeshed/stdlib/3/asyncio/runners.pyi mypy/typeshed/stdlib/3/asyncio/selector_events.pyi mypy/typeshed/stdlib/3/asyncio/streams.pyi mypy/typeshed/stdlib/3/asyncio/subprocess.pyi mypy/typeshed/stdlib/3/asyncio/tasks.pyi mypy/typeshed/stdlib/3/asyncio/transports.pyi mypy/typeshed/stdlib/3/asyncio/windows_events.pyi mypy/typeshed/stdlib/3/asyncio/windows_utils.pyi mypy/typeshed/stdlib/3/collections/__init__.pyi mypy/typeshed/stdlib/3/collections/abc.pyi mypy/typeshed/stdlib/3/concurrent/__init__.pyi mypy/typeshed/stdlib/3/concurrent/futures/__init__.pyi mypy/typeshed/stdlib/3/concurrent/futures/_base.pyi mypy/typeshed/stdlib/3/concurrent/futures/process.pyi mypy/typeshed/stdlib/3/concurrent/futures/thread.pyi mypy/typeshed/stdlib/3/email/__init__.pyi mypy/typeshed/stdlib/3/email/charset.pyi mypy/typeshed/stdlib/3/email/contentmanager.pyi mypy/typeshed/stdlib/3/email/encoders.pyi mypy/typeshed/stdlib/3/email/errors.pyi mypy/typeshed/stdlib/3/email/feedparser.pyi mypy/typeshed/stdlib/3/email/generator.pyi mypy/typeshed/stdlib/3/email/header.pyi mypy/typeshed/stdlib/3/email/headerregistry.pyi mypy/typeshed/stdlib/3/email/iterators.pyi mypy/typeshed/stdlib/3/email/message.pyi mypy/typeshed/stdlib/3/email/parser.pyi mypy/typeshed/stdlib/3/email/policy.pyi mypy/typeshed/stdlib/3/email/utils.pyi mypy/typeshed/stdlib/3/email/mime/__init__.pyi mypy/typeshed/stdlib/3/email/mime/application.pyi mypy/typeshed/stdlib/3/email/mime/audio.pyi mypy/typeshed/stdlib/3/email/mime/base.pyi mypy/typeshed/stdlib/3/email/mime/image.pyi mypy/typeshed/stdlib/3/email/mime/message.pyi mypy/typeshed/stdlib/3/email/mime/multipart.pyi mypy/typeshed/stdlib/3/email/mime/nonmultipart.pyi mypy/typeshed/stdlib/3/email/mime/text.pyi mypy/typeshed/stdlib/3/encodings/__init__.pyi mypy/typeshed/stdlib/3/encodings/utf_8.pyi mypy/typeshed/stdlib/3/html/__init__.pyi mypy/typeshed/stdlib/3/html/entities.pyi mypy/typeshed/stdlib/3/html/parser.pyi mypy/typeshed/stdlib/3/http/__init__.pyi mypy/typeshed/stdlib/3/http/client.pyi mypy/typeshed/stdlib/3/http/cookiejar.pyi mypy/typeshed/stdlib/3/http/cookies.pyi mypy/typeshed/stdlib/3/http/server.pyi mypy/typeshed/stdlib/3/importlib/__init__.pyi mypy/typeshed/stdlib/3/importlib/abc.pyi mypy/typeshed/stdlib/3/importlib/machinery.pyi mypy/typeshed/stdlib/3/importlib/metadata.pyi mypy/typeshed/stdlib/3/importlib/resources.pyi mypy/typeshed/stdlib/3/importlib/util.pyi mypy/typeshed/stdlib/3/json/__init__.pyi mypy/typeshed/stdlib/3/json/decoder.pyi mypy/typeshed/stdlib/3/json/encoder.pyi mypy/typeshed/stdlib/3/multiprocessing/__init__.pyi mypy/typeshed/stdlib/3/multiprocessing/connection.pyi mypy/typeshed/stdlib/3/multiprocessing/context.pyi mypy/typeshed/stdlib/3/multiprocessing/managers.pyi mypy/typeshed/stdlib/3/multiprocessing/pool.pyi mypy/typeshed/stdlib/3/multiprocessing/process.pyi mypy/typeshed/stdlib/3/multiprocessing/queues.pyi mypy/typeshed/stdlib/3/multiprocessing/shared_memory.pyi mypy/typeshed/stdlib/3/multiprocessing/spawn.pyi mypy/typeshed/stdlib/3/multiprocessing/synchronize.pyi mypy/typeshed/stdlib/3/multiprocessing/dummy/__init__.pyi mypy/typeshed/stdlib/3/multiprocessing/dummy/connection.pyi mypy/typeshed/stdlib/3/os/__init__.pyi mypy/typeshed/stdlib/3/os/path.pyi mypy/typeshed/stdlib/3/tkinter/__init__.pyi mypy/typeshed/stdlib/3/tkinter/commondialog.pyi mypy/typeshed/stdlib/3/tkinter/constants.pyi mypy/typeshed/stdlib/3/tkinter/dialog.pyi mypy/typeshed/stdlib/3/tkinter/filedialog.pyi mypy/typeshed/stdlib/3/tkinter/messagebox.pyi mypy/typeshed/stdlib/3/tkinter/ttk.pyi mypy/typeshed/stdlib/3/unittest/__init__.pyi mypy/typeshed/stdlib/3/unittest/async_case.pyi mypy/typeshed/stdlib/3/unittest/case.pyi mypy/typeshed/stdlib/3/unittest/loader.pyi mypy/typeshed/stdlib/3/unittest/mock.pyi mypy/typeshed/stdlib/3/unittest/result.pyi mypy/typeshed/stdlib/3/unittest/runner.pyi mypy/typeshed/stdlib/3/unittest/signals.pyi mypy/typeshed/stdlib/3/unittest/suite.pyi mypy/typeshed/stdlib/3/unittest/util.pyi mypy/typeshed/stdlib/3/urllib/__init__.pyi mypy/typeshed/stdlib/3/urllib/error.pyi mypy/typeshed/stdlib/3/urllib/parse.pyi mypy/typeshed/stdlib/3/urllib/request.pyi mypy/typeshed/stdlib/3/urllib/response.pyi mypy/typeshed/stdlib/3/urllib/robotparser.pyi mypy/typeshed/tests/check_consistent.py mypy/typeshed/tests/mypy_selftest.py mypy/typeshed/tests/mypy_test.py mypy/typeshed/tests/pytype_test.py mypy/typeshed/third_party/2/enum.pyi mypy/typeshed/third_party/2/gflags.pyi mypy/typeshed/third_party/2/ipaddress.pyi mypy/typeshed/third_party/2/pathlib2.pyi mypy/typeshed/third_party/2/pymssql.pyi mypy/typeshed/third_party/2/OpenSSL/__init__.pyi mypy/typeshed/third_party/2/OpenSSL/crypto.pyi mypy/typeshed/third_party/2/concurrent/__init__.pyi mypy/typeshed/third_party/2/concurrent/futures/__init__.pyi mypy/typeshed/third_party/2/concurrent/futures/_base.pyi mypy/typeshed/third_party/2/concurrent/futures/process.pyi mypy/typeshed/third_party/2/concurrent/futures/thread.pyi mypy/typeshed/third_party/2/fb303/FacebookService.pyi mypy/typeshed/third_party/2/fb303/__init__.pyi mypy/typeshed/third_party/2/kazoo/__init__.pyi mypy/typeshed/third_party/2/kazoo/client.pyi mypy/typeshed/third_party/2/kazoo/exceptions.pyi mypy/typeshed/third_party/2/kazoo/recipe/__init__.pyi mypy/typeshed/third_party/2/kazoo/recipe/watchers.pyi mypy/typeshed/third_party/2/routes/__init__.pyi mypy/typeshed/third_party/2/routes/mapper.pyi mypy/typeshed/third_party/2/routes/util.pyi mypy/typeshed/third_party/2/scribe/__init__.pyi mypy/typeshed/third_party/2/scribe/scribe.pyi mypy/typeshed/third_party/2/scribe/ttypes.pyi mypy/typeshed/third_party/2/six/__init__.pyi mypy/typeshed/third_party/2/six/moves/BaseHTTPServer.pyi mypy/typeshed/third_party/2/six/moves/CGIHTTPServer.pyi mypy/typeshed/third_party/2/six/moves/SimpleHTTPServer.pyi mypy/typeshed/third_party/2/six/moves/__init__.pyi mypy/typeshed/third_party/2/six/moves/_dummy_thread.pyi mypy/typeshed/third_party/2/six/moves/_thread.pyi mypy/typeshed/third_party/2/six/moves/cPickle.pyi mypy/typeshed/third_party/2/six/moves/configparser.pyi mypy/typeshed/third_party/2/six/moves/email_mime_base.pyi mypy/typeshed/third_party/2/six/moves/email_mime_multipart.pyi mypy/typeshed/third_party/2/six/moves/email_mime_nonmultipart.pyi mypy/typeshed/third_party/2/six/moves/email_mime_text.pyi mypy/typeshed/third_party/2/six/moves/html_entities.pyi mypy/typeshed/third_party/2/six/moves/html_parser.pyi mypy/typeshed/third_party/2/six/moves/http_client.pyi mypy/typeshed/third_party/2/six/moves/http_cookiejar.pyi mypy/typeshed/third_party/2/six/moves/http_cookies.pyi mypy/typeshed/third_party/2/six/moves/queue.pyi mypy/typeshed/third_party/2/six/moves/reprlib.pyi mypy/typeshed/third_party/2/six/moves/socketserver.pyi mypy/typeshed/third_party/2/six/moves/urllib_error.pyi mypy/typeshed/third_party/2/six/moves/urllib_parse.pyi mypy/typeshed/third_party/2/six/moves/urllib_request.pyi mypy/typeshed/third_party/2/six/moves/urllib_response.pyi mypy/typeshed/third_party/2/six/moves/urllib_robotparser.pyi mypy/typeshed/third_party/2/six/moves/xmlrpc_client.pyi mypy/typeshed/third_party/2/six/moves/urllib/__init__.pyi mypy/typeshed/third_party/2/six/moves/urllib/error.pyi mypy/typeshed/third_party/2/six/moves/urllib/parse.pyi mypy/typeshed/third_party/2/six/moves/urllib/request.pyi mypy/typeshed/third_party/2/six/moves/urllib/response.pyi mypy/typeshed/third_party/2/six/moves/urllib/robotparser.pyi mypy/typeshed/third_party/2/tornado/__init__.pyi mypy/typeshed/third_party/2/tornado/concurrent.pyi mypy/typeshed/third_party/2/tornado/gen.pyi mypy/typeshed/third_party/2/tornado/httpclient.pyi mypy/typeshed/third_party/2/tornado/httpserver.pyi mypy/typeshed/third_party/2/tornado/httputil.pyi mypy/typeshed/third_party/2/tornado/ioloop.pyi mypy/typeshed/third_party/2/tornado/locks.pyi mypy/typeshed/third_party/2/tornado/netutil.pyi mypy/typeshed/third_party/2/tornado/process.pyi mypy/typeshed/third_party/2/tornado/tcpserver.pyi mypy/typeshed/third_party/2/tornado/testing.pyi mypy/typeshed/third_party/2/tornado/util.pyi mypy/typeshed/third_party/2/tornado/web.pyi mypy/typeshed/third_party/2and3/backports_abc.pyi mypy/typeshed/third_party/2and3/certifi.pyi mypy/typeshed/third_party/2and3/croniter.pyi mypy/typeshed/third_party/2and3/decorator.pyi mypy/typeshed/third_party/2and3/emoji.pyi mypy/typeshed/third_party/2and3/first.pyi mypy/typeshed/third_party/2and3/itsdangerous.pyi mypy/typeshed/third_party/2and3/mock.pyi mypy/typeshed/third_party/2and3/mypy_extensions.pyi mypy/typeshed/third_party/2and3/pycurl.pyi mypy/typeshed/third_party/2and3/pyre_extensions.pyi mypy/typeshed/third_party/2and3/singledispatch.pyi mypy/typeshed/third_party/2and3/tabulate.pyi mypy/typeshed/third_party/2and3/termcolor.pyi mypy/typeshed/third_party/2and3/toml.pyi mypy/typeshed/third_party/2and3/typing_extensions.pyi mypy/typeshed/third_party/2and3/ujson.pyi mypy/typeshed/third_party/2and3/Crypto/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/pct_warnings.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/AES.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/ARC2.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/ARC4.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/Blowfish.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/CAST.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/DES.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/DES3.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/PKCS1_OAEP.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/PKCS1_v1_5.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/XOR.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/Cipher/blockalgo.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/HMAC.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/MD2.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/MD4.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/MD5.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/RIPEMD.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/SHA.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/SHA224.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/SHA256.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/SHA384.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/SHA512.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/Hash/hashalgo.pyi mypy/typeshed/third_party/2and3/Crypto/Protocol/AllOrNothing.pyi mypy/typeshed/third_party/2and3/Crypto/Protocol/Chaffing.pyi mypy/typeshed/third_party/2and3/Crypto/Protocol/KDF.pyi mypy/typeshed/third_party/2and3/Crypto/Protocol/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/PublicKey/DSA.pyi mypy/typeshed/third_party/2and3/Crypto/PublicKey/ElGamal.pyi mypy/typeshed/third_party/2and3/Crypto/PublicKey/RSA.pyi mypy/typeshed/third_party/2and3/Crypto/PublicKey/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/PublicKey/pubkey.pyi mypy/typeshed/third_party/2and3/Crypto/Random/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/Random/random.pyi mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/FortunaAccumulator.pyi mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/FortunaGenerator.pyi mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/SHAd256.pyi mypy/typeshed/third_party/2and3/Crypto/Random/Fortuna/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/fallback.pyi mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/posix.pyi mypy/typeshed/third_party/2and3/Crypto/Random/OSRNG/rng_base.pyi mypy/typeshed/third_party/2and3/Crypto/Signature/PKCS1_PSS.pyi mypy/typeshed/third_party/2and3/Crypto/Signature/PKCS1_v1_5.pyi mypy/typeshed/third_party/2and3/Crypto/Signature/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/Util/Counter.pyi mypy/typeshed/third_party/2and3/Crypto/Util/RFC1751.pyi mypy/typeshed/third_party/2and3/Crypto/Util/__init__.pyi mypy/typeshed/third_party/2and3/Crypto/Util/asn1.pyi mypy/typeshed/third_party/2and3/Crypto/Util/number.pyi mypy/typeshed/third_party/2and3/Crypto/Util/randpool.pyi mypy/typeshed/third_party/2and3/Crypto/Util/strxor.pyi mypy/typeshed/third_party/2and3/atomicwrites/__init__.pyi mypy/typeshed/third_party/2and3/attr/__init__.pyi mypy/typeshed/third_party/2and3/attr/converters.pyi mypy/typeshed/third_party/2and3/attr/exceptions.pyi mypy/typeshed/third_party/2and3/attr/filters.pyi mypy/typeshed/third_party/2and3/attr/validators.pyi mypy/typeshed/third_party/2and3/backports/__init__.pyi mypy/typeshed/third_party/2and3/backports/ssl_match_hostname.pyi mypy/typeshed/third_party/2and3/bleach/__init__.pyi mypy/typeshed/third_party/2and3/bleach/callbacks.pyi mypy/typeshed/third_party/2and3/bleach/linkifier.pyi mypy/typeshed/third_party/2and3/bleach/sanitizer.pyi mypy/typeshed/third_party/2and3/bleach/utils.pyi mypy/typeshed/third_party/2and3/boto/__init__.pyi mypy/typeshed/third_party/2and3/boto/auth.pyi mypy/typeshed/third_party/2and3/boto/auth_handler.pyi mypy/typeshed/third_party/2and3/boto/compat.pyi mypy/typeshed/third_party/2and3/boto/connection.pyi mypy/typeshed/third_party/2and3/boto/exception.pyi mypy/typeshed/third_party/2and3/boto/plugin.pyi mypy/typeshed/third_party/2and3/boto/regioninfo.pyi mypy/typeshed/third_party/2and3/boto/utils.pyi mypy/typeshed/third_party/2and3/boto/ec2/__init__.pyi mypy/typeshed/third_party/2and3/boto/elb/__init__.pyi mypy/typeshed/third_party/2and3/boto/kms/__init__.pyi mypy/typeshed/third_party/2and3/boto/kms/exceptions.pyi mypy/typeshed/third_party/2and3/boto/kms/layer1.pyi mypy/typeshed/third_party/2and3/boto/s3/__init__.pyi mypy/typeshed/third_party/2and3/boto/s3/acl.pyi mypy/typeshed/third_party/2and3/boto/s3/bucket.pyi mypy/typeshed/third_party/2and3/boto/s3/bucketlistresultset.pyi mypy/typeshed/third_party/2and3/boto/s3/bucketlogging.pyi mypy/typeshed/third_party/2and3/boto/s3/connection.pyi mypy/typeshed/third_party/2and3/boto/s3/cors.pyi mypy/typeshed/third_party/2and3/boto/s3/deletemarker.pyi mypy/typeshed/third_party/2and3/boto/s3/key.pyi mypy/typeshed/third_party/2and3/boto/s3/keyfile.pyi mypy/typeshed/third_party/2and3/boto/s3/lifecycle.pyi mypy/typeshed/third_party/2and3/boto/s3/multidelete.pyi mypy/typeshed/third_party/2and3/boto/s3/multipart.pyi mypy/typeshed/third_party/2and3/boto/s3/prefix.pyi mypy/typeshed/third_party/2and3/boto/s3/tagging.pyi mypy/typeshed/third_party/2and3/boto/s3/user.pyi mypy/typeshed/third_party/2and3/boto/s3/website.pyi mypy/typeshed/third_party/2and3/characteristic/__init__.pyi mypy/typeshed/third_party/2and3/click/__init__.pyi mypy/typeshed/third_party/2and3/click/_termui_impl.pyi mypy/typeshed/third_party/2and3/click/core.pyi mypy/typeshed/third_party/2and3/click/decorators.pyi mypy/typeshed/third_party/2and3/click/exceptions.pyi mypy/typeshed/third_party/2and3/click/formatting.pyi mypy/typeshed/third_party/2and3/click/globals.pyi mypy/typeshed/third_party/2and3/click/parser.pyi mypy/typeshed/third_party/2and3/click/termui.pyi mypy/typeshed/third_party/2and3/click/testing.pyi mypy/typeshed/third_party/2and3/click/types.pyi mypy/typeshed/third_party/2and3/click/utils.pyi mypy/typeshed/third_party/2and3/cryptography/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/exceptions.pyi mypy/typeshed/third_party/2and3/cryptography/fernet.pyi mypy/typeshed/third_party/2and3/cryptography/x509.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/backends/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/backends/interfaces.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/bindings/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/bindings/openssl/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/bindings/openssl/binding.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/cmac.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/constant_time.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/hashes.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/hmac.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/keywrap.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/padding.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/poly1305.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/dh.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/dsa.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/ec.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/ed25519.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/ed448.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/padding.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/rsa.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/utils.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/x25519.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/asymmetric/x448.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/aead.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/algorithms.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/ciphers/modes.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/concatkdf.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/hkdf.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/kbkdf.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/pbkdf2.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/scrypt.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/kdf/x963kdf.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/serialization/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/serialization/pkcs12.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/twofactor/__init__.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/twofactor/hotp.pyi mypy/typeshed/third_party/2and3/cryptography/hazmat/primitives/twofactor/totp.pyi mypy/typeshed/third_party/2and3/dateutil/__init__.pyi mypy/typeshed/third_party/2and3/dateutil/_common.pyi mypy/typeshed/third_party/2and3/dateutil/parser.pyi mypy/typeshed/third_party/2and3/dateutil/relativedelta.pyi mypy/typeshed/third_party/2and3/dateutil/rrule.pyi mypy/typeshed/third_party/2and3/dateutil/utils.pyi mypy/typeshed/third_party/2and3/dateutil/tz/__init__.pyi mypy/typeshed/third_party/2and3/dateutil/tz/_common.pyi mypy/typeshed/third_party/2and3/dateutil/tz/tz.pyi mypy/typeshed/third_party/2and3/flask/__init__.pyi mypy/typeshed/third_party/2and3/flask/app.pyi mypy/typeshed/third_party/2and3/flask/blueprints.pyi mypy/typeshed/third_party/2and3/flask/cli.pyi mypy/typeshed/third_party/2and3/flask/config.pyi mypy/typeshed/third_party/2and3/flask/ctx.pyi mypy/typeshed/third_party/2and3/flask/debughelpers.pyi mypy/typeshed/third_party/2and3/flask/globals.pyi mypy/typeshed/third_party/2and3/flask/helpers.pyi mypy/typeshed/third_party/2and3/flask/logging.pyi mypy/typeshed/third_party/2and3/flask/sessions.pyi mypy/typeshed/third_party/2and3/flask/signals.pyi mypy/typeshed/third_party/2and3/flask/templating.pyi mypy/typeshed/third_party/2and3/flask/testing.pyi mypy/typeshed/third_party/2and3/flask/views.pyi mypy/typeshed/third_party/2and3/flask/wrappers.pyi mypy/typeshed/third_party/2and3/flask/json/__init__.pyi mypy/typeshed/third_party/2and3/flask/json/tag.pyi mypy/typeshed/third_party/2and3/geoip2/__init__.pyi mypy/typeshed/third_party/2and3/geoip2/database.pyi mypy/typeshed/third_party/2and3/geoip2/errors.pyi mypy/typeshed/third_party/2and3/geoip2/mixins.pyi mypy/typeshed/third_party/2and3/geoip2/models.pyi mypy/typeshed/third_party/2and3/geoip2/records.pyi mypy/typeshed/third_party/2and3/google/__init__.pyi mypy/typeshed/third_party/2and3/google/protobuf/__init__.pyi mypy/typeshed/third_party/2and3/google/protobuf/any_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/any_test_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/api_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/descriptor.pyi mypy/typeshed/third_party/2and3/google/protobuf/descriptor_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/descriptor_pool.pyi mypy/typeshed/third_party/2and3/google/protobuf/duration_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/empty_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/field_mask_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/json_format.pyi mypy/typeshed/third_party/2and3/google/protobuf/map_proto2_unittest_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/map_unittest_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/message.pyi mypy/typeshed/third_party/2and3/google/protobuf/message_factory.pyi mypy/typeshed/third_party/2and3/google/protobuf/reflection.pyi mypy/typeshed/third_party/2and3/google/protobuf/service.pyi mypy/typeshed/third_party/2and3/google/protobuf/source_context_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/struct_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/symbol_database.pyi mypy/typeshed/third_party/2and3/google/protobuf/test_messages_proto2_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/test_messages_proto3_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/timestamp_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/type_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_arena_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_custom_options_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_import_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_import_public_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_mset_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_mset_wire_format_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_no_arena_import_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_no_arena_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_no_generic_services_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/unittest_proto3_arena_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/wrappers_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/compiler/__init__.pyi mypy/typeshed/third_party/2and3/google/protobuf/compiler/plugin_pb2.pyi mypy/typeshed/third_party/2and3/google/protobuf/internal/__init__.pyi mypy/typeshed/third_party/2and3/google/protobuf/internal/containers.pyi mypy/typeshed/third_party/2and3/google/protobuf/internal/decoder.pyi mypy/typeshed/third_party/2and3/google/protobuf/internal/encoder.pyi mypy/typeshed/third_party/2and3/google/protobuf/internal/enum_type_wrapper.pyi mypy/typeshed/third_party/2and3/google/protobuf/internal/message_listener.pyi mypy/typeshed/third_party/2and3/google/protobuf/internal/well_known_types.pyi mypy/typeshed/third_party/2and3/google/protobuf/internal/wire_format.pyi mypy/typeshed/third_party/2and3/google/protobuf/util/__init__.pyi mypy/typeshed/third_party/2and3/google/protobuf/util/json_format_proto3_pb2.pyi mypy/typeshed/third_party/2and3/jinja2/__init__.pyi mypy/typeshed/third_party/2and3/jinja2/_compat.pyi mypy/typeshed/third_party/2and3/jinja2/_stringdefs.pyi mypy/typeshed/third_party/2and3/jinja2/bccache.pyi mypy/typeshed/third_party/2and3/jinja2/compiler.pyi mypy/typeshed/third_party/2and3/jinja2/constants.pyi mypy/typeshed/third_party/2and3/jinja2/debug.pyi mypy/typeshed/third_party/2and3/jinja2/defaults.pyi mypy/typeshed/third_party/2and3/jinja2/environment.pyi mypy/typeshed/third_party/2and3/jinja2/exceptions.pyi mypy/typeshed/third_party/2and3/jinja2/ext.pyi mypy/typeshed/third_party/2and3/jinja2/filters.pyi mypy/typeshed/third_party/2and3/jinja2/lexer.pyi mypy/typeshed/third_party/2and3/jinja2/loaders.pyi mypy/typeshed/third_party/2and3/jinja2/meta.pyi mypy/typeshed/third_party/2and3/jinja2/nodes.pyi mypy/typeshed/third_party/2and3/jinja2/optimizer.pyi mypy/typeshed/third_party/2and3/jinja2/parser.pyi mypy/typeshed/third_party/2and3/jinja2/runtime.pyi mypy/typeshed/third_party/2and3/jinja2/sandbox.pyi mypy/typeshed/third_party/2and3/jinja2/tests.pyi mypy/typeshed/third_party/2and3/jinja2/utils.pyi mypy/typeshed/third_party/2and3/jinja2/visitor.pyi mypy/typeshed/third_party/2and3/markupsafe/__init__.pyi mypy/typeshed/third_party/2and3/markupsafe/_compat.pyi mypy/typeshed/third_party/2and3/markupsafe/_constants.pyi mypy/typeshed/third_party/2and3/markupsafe/_native.pyi mypy/typeshed/third_party/2and3/markupsafe/_speedups.pyi mypy/typeshed/third_party/2and3/maxminddb/__init__.pyi mypy/typeshed/third_party/2and3/maxminddb/compat.pyi mypy/typeshed/third_party/2and3/maxminddb/const.pyi mypy/typeshed/third_party/2and3/maxminddb/decoder.pyi mypy/typeshed/third_party/2and3/maxminddb/errors.pyi mypy/typeshed/third_party/2and3/maxminddb/extension.pyi mypy/typeshed/third_party/2and3/maxminddb/reader.pyi mypy/typeshed/third_party/2and3/pymysql/__init__.pyi mypy/typeshed/third_party/2and3/pymysql/charset.pyi mypy/typeshed/third_party/2and3/pymysql/connections.pyi mypy/typeshed/third_party/2and3/pymysql/converters.pyi mypy/typeshed/third_party/2and3/pymysql/cursors.pyi mypy/typeshed/third_party/2and3/pymysql/err.pyi mypy/typeshed/third_party/2and3/pymysql/times.pyi mypy/typeshed/third_party/2and3/pymysql/util.pyi mypy/typeshed/third_party/2and3/pymysql/constants/CLIENT.pyi mypy/typeshed/third_party/2and3/pymysql/constants/COMMAND.pyi mypy/typeshed/third_party/2and3/pymysql/constants/ER.pyi mypy/typeshed/third_party/2and3/pymysql/constants/FIELD_TYPE.pyi mypy/typeshed/third_party/2and3/pymysql/constants/FLAG.pyi mypy/typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi mypy/typeshed/third_party/2and3/pymysql/constants/__init__.pyi mypy/typeshed/third_party/2and3/pynamodb/__init__.pyi mypy/typeshed/third_party/2and3/pynamodb/attributes.pyi mypy/typeshed/third_party/2and3/pynamodb/constants.pyi mypy/typeshed/third_party/2and3/pynamodb/exceptions.pyi mypy/typeshed/third_party/2and3/pynamodb/indexes.pyi mypy/typeshed/third_party/2and3/pynamodb/models.pyi mypy/typeshed/third_party/2and3/pynamodb/settings.pyi mypy/typeshed/third_party/2and3/pynamodb/throttle.pyi mypy/typeshed/third_party/2and3/pynamodb/types.pyi mypy/typeshed/third_party/2and3/pynamodb/connection/__init__.pyi mypy/typeshed/third_party/2and3/pynamodb/connection/base.pyi mypy/typeshed/third_party/2and3/pynamodb/connection/table.pyi mypy/typeshed/third_party/2and3/pynamodb/connection/util.pyi mypy/typeshed/third_party/2and3/pytz/__init__.pyi mypy/typeshed/third_party/2and3/redis/__init__.pyi mypy/typeshed/third_party/2and3/redis/client.pyi mypy/typeshed/third_party/2and3/redis/connection.pyi mypy/typeshed/third_party/2and3/redis/exceptions.pyi mypy/typeshed/third_party/2and3/redis/utils.pyi mypy/typeshed/third_party/2and3/requests/__init__.pyi mypy/typeshed/third_party/2and3/requests/adapters.pyi mypy/typeshed/third_party/2and3/requests/api.pyi mypy/typeshed/third_party/2and3/requests/auth.pyi mypy/typeshed/third_party/2and3/requests/compat.pyi mypy/typeshed/third_party/2and3/requests/cookies.pyi mypy/typeshed/third_party/2and3/requests/exceptions.pyi mypy/typeshed/third_party/2and3/requests/hooks.pyi mypy/typeshed/third_party/2and3/requests/models.pyi mypy/typeshed/third_party/2and3/requests/sessions.pyi mypy/typeshed/third_party/2and3/requests/status_codes.pyi mypy/typeshed/third_party/2and3/requests/structures.pyi mypy/typeshed/third_party/2and3/requests/utils.pyi mypy/typeshed/third_party/2and3/requests/packages/__init__.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/__init__.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/_collections.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/connection.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/connectionpool.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/exceptions.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/fields.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/filepost.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/poolmanager.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/request.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/response.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/contrib/__init__.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/__init__.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/__init__.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/connection.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/request.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/response.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/retry.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/ssl_.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/timeout.pyi mypy/typeshed/third_party/2and3/requests/packages/urllib3/util/url.pyi mypy/typeshed/third_party/2and3/simplejson/__init__.pyi mypy/typeshed/third_party/2and3/simplejson/decoder.pyi mypy/typeshed/third_party/2and3/simplejson/encoder.pyi mypy/typeshed/third_party/2and3/simplejson/scanner.pyi mypy/typeshed/third_party/2and3/werkzeug/__init__.pyi mypy/typeshed/third_party/2and3/werkzeug/_compat.pyi mypy/typeshed/third_party/2and3/werkzeug/_internal.pyi mypy/typeshed/third_party/2and3/werkzeug/_reloader.pyi mypy/typeshed/third_party/2and3/werkzeug/datastructures.pyi mypy/typeshed/third_party/2and3/werkzeug/exceptions.pyi mypy/typeshed/third_party/2and3/werkzeug/filesystem.pyi mypy/typeshed/third_party/2and3/werkzeug/formparser.pyi mypy/typeshed/third_party/2and3/werkzeug/http.pyi mypy/typeshed/third_party/2and3/werkzeug/local.pyi mypy/typeshed/third_party/2and3/werkzeug/posixemulation.pyi mypy/typeshed/third_party/2and3/werkzeug/routing.pyi mypy/typeshed/third_party/2and3/werkzeug/script.pyi mypy/typeshed/third_party/2and3/werkzeug/security.pyi mypy/typeshed/third_party/2and3/werkzeug/serving.pyi mypy/typeshed/third_party/2and3/werkzeug/test.pyi mypy/typeshed/third_party/2and3/werkzeug/testapp.pyi mypy/typeshed/third_party/2and3/werkzeug/urls.pyi mypy/typeshed/third_party/2and3/werkzeug/useragents.pyi mypy/typeshed/third_party/2and3/werkzeug/utils.pyi mypy/typeshed/third_party/2and3/werkzeug/wrappers.pyi mypy/typeshed/third_party/2and3/werkzeug/wsgi.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/__init__.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/atom.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/cache.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/fixers.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/iterio.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/jsrouting.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/limiter.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/lint.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/profiler.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/securecookie.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/sessions.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/testtools.pyi mypy/typeshed/third_party/2and3/werkzeug/contrib/wrappers.pyi mypy/typeshed/third_party/2and3/werkzeug/debug/__init__.pyi mypy/typeshed/third_party/2and3/werkzeug/debug/console.pyi mypy/typeshed/third_party/2and3/werkzeug/debug/repr.pyi mypy/typeshed/third_party/2and3/werkzeug/debug/tbtools.pyi mypy/typeshed/third_party/2and3/werkzeug/middleware/__init__.pyi mypy/typeshed/third_party/2and3/werkzeug/middleware/dispatcher.pyi mypy/typeshed/third_party/2and3/werkzeug/middleware/http_proxy.pyi mypy/typeshed/third_party/2and3/werkzeug/middleware/lint.pyi mypy/typeshed/third_party/2and3/werkzeug/middleware/profiler.pyi mypy/typeshed/third_party/2and3/werkzeug/middleware/proxy_fix.pyi mypy/typeshed/third_party/2and3/werkzeug/middleware/shared_data.pyi mypy/typeshed/third_party/2and3/yaml/__init__.pyi mypy/typeshed/third_party/2and3/yaml/composer.pyi mypy/typeshed/third_party/2and3/yaml/constructor.pyi mypy/typeshed/third_party/2and3/yaml/cyaml.pyi mypy/typeshed/third_party/2and3/yaml/dumper.pyi mypy/typeshed/third_party/2and3/yaml/emitter.pyi mypy/typeshed/third_party/2and3/yaml/error.pyi mypy/typeshed/third_party/2and3/yaml/events.pyi mypy/typeshed/third_party/2and3/yaml/loader.pyi mypy/typeshed/third_party/2and3/yaml/nodes.pyi mypy/typeshed/third_party/2and3/yaml/parser.pyi mypy/typeshed/third_party/2and3/yaml/reader.pyi mypy/typeshed/third_party/2and3/yaml/representer.pyi mypy/typeshed/third_party/2and3/yaml/resolver.pyi mypy/typeshed/third_party/2and3/yaml/scanner.pyi mypy/typeshed/third_party/2and3/yaml/serializer.pyi mypy/typeshed/third_party/2and3/yaml/tokens.pyi mypy/typeshed/third_party/3/contextvars.pyi mypy/typeshed/third_party/3/dataclasses.pyi mypy/typeshed/third_party/3/orjson.pyi mypy/typeshed/third_party/3/docutils/__init__.pyi mypy/typeshed/third_party/3/docutils/examples.pyi mypy/typeshed/third_party/3/docutils/nodes.pyi mypy/typeshed/third_party/3/docutils/parsers/__init__.pyi mypy/typeshed/third_party/3/docutils/parsers/rst/__init__.pyi mypy/typeshed/third_party/3/docutils/parsers/rst/nodes.pyi mypy/typeshed/third_party/3/docutils/parsers/rst/roles.pyi mypy/typeshed/third_party/3/docutils/parsers/rst/states.pyi mypy/typeshed/third_party/3/jwt/__init__.pyi mypy/typeshed/third_party/3/jwt/algorithms.pyi mypy/typeshed/third_party/3/jwt/contrib/__init__.pyi mypy/typeshed/third_party/3/jwt/contrib/algorithms/__init__.pyi mypy/typeshed/third_party/3/jwt/contrib/algorithms/py_ecdsa.pyi mypy/typeshed/third_party/3/jwt/contrib/algorithms/pycrypto.pyi mypy/typeshed/third_party/3/pkg_resources/__init__.pyi mypy/typeshed/third_party/3/pkg_resources/py31compat.pyi mypy/typeshed/third_party/3/six/__init__.pyi mypy/typeshed/third_party/3/six/moves/BaseHTTPServer.pyi mypy/typeshed/third_party/3/six/moves/CGIHTTPServer.pyi mypy/typeshed/third_party/3/six/moves/SimpleHTTPServer.pyi mypy/typeshed/third_party/3/six/moves/__init__.pyi mypy/typeshed/third_party/3/six/moves/_dummy_thread.pyi mypy/typeshed/third_party/3/six/moves/_thread.pyi mypy/typeshed/third_party/3/six/moves/builtins.pyi mypy/typeshed/third_party/3/six/moves/cPickle.pyi mypy/typeshed/third_party/3/six/moves/configparser.pyi mypy/typeshed/third_party/3/six/moves/email_mime_base.pyi mypy/typeshed/third_party/3/six/moves/email_mime_multipart.pyi mypy/typeshed/third_party/3/six/moves/email_mime_nonmultipart.pyi mypy/typeshed/third_party/3/six/moves/email_mime_text.pyi mypy/typeshed/third_party/3/six/moves/html_entities.pyi mypy/typeshed/third_party/3/six/moves/html_parser.pyi mypy/typeshed/third_party/3/six/moves/http_client.pyi mypy/typeshed/third_party/3/six/moves/http_cookiejar.pyi mypy/typeshed/third_party/3/six/moves/http_cookies.pyi mypy/typeshed/third_party/3/six/moves/queue.pyi mypy/typeshed/third_party/3/six/moves/reprlib.pyi mypy/typeshed/third_party/3/six/moves/socketserver.pyi mypy/typeshed/third_party/3/six/moves/tkinter.pyi mypy/typeshed/third_party/3/six/moves/tkinter_commondialog.pyi mypy/typeshed/third_party/3/six/moves/tkinter_constants.pyi mypy/typeshed/third_party/3/six/moves/tkinter_dialog.pyi mypy/typeshed/third_party/3/six/moves/tkinter_filedialog.pyi mypy/typeshed/third_party/3/six/moves/tkinter_tkfiledialog.pyi mypy/typeshed/third_party/3/six/moves/tkinter_ttk.pyi mypy/typeshed/third_party/3/six/moves/urllib_error.pyi mypy/typeshed/third_party/3/six/moves/urllib_parse.pyi mypy/typeshed/third_party/3/six/moves/urllib_request.pyi mypy/typeshed/third_party/3/six/moves/urllib_response.pyi mypy/typeshed/third_party/3/six/moves/urllib_robotparser.pyi mypy/typeshed/third_party/3/six/moves/urllib/__init__.pyi mypy/typeshed/third_party/3/six/moves/urllib/error.pyi mypy/typeshed/third_party/3/six/moves/urllib/parse.pyi mypy/typeshed/third_party/3/six/moves/urllib/request.pyi mypy/typeshed/third_party/3/six/moves/urllib/response.pyi mypy/typeshed/third_party/3/six/moves/urllib/robotparser.pyi mypy/typeshed/third_party/3/typed_ast/__init__.pyi mypy/typeshed/third_party/3/typed_ast/ast27.pyi mypy/typeshed/third_party/3/typed_ast/ast3.pyi mypy/typeshed/third_party/3/typed_ast/conversions.pyi mypy/xml/mypy-html.css mypy/xml/mypy-html.xslt mypy/xml/mypy-txt.xslt mypy/xml/mypy.xsd mypyc/__init__.py mypyc/analysis.py mypyc/build.py mypyc/common.py mypyc/crash.py mypyc/cstring.py mypyc/emit.py mypyc/emitclass.py mypyc/emitfunc.py mypyc/emitmodule.py mypyc/emitwrapper.py mypyc/errors.py mypyc/exceptions.py mypyc/genops.py mypyc/genops_for.py mypyc/namegen.py mypyc/ops.py mypyc/ops_dict.py mypyc/ops_exc.py mypyc/ops_int.py mypyc/ops_list.py mypyc/ops_misc.py mypyc/ops_primitive.py mypyc/ops_set.py mypyc/ops_str.py mypyc/ops_tuple.py mypyc/options.py mypyc/prebuildvisitor.py mypyc/refcount.py mypyc/rt_subtype.py mypyc/sametype.py mypyc/subtype.py mypyc/uninit.py mypyc/lib-rt/CPy.c mypyc/lib-rt/CPy.h mypyc/lib-rt/getargs.c mypyc/lib-rt/module_shim.tmpl mypyc/lib-rt/mypyc_util.h mypyc/lib-rt/pythonsupport.h mypyc/test/__init__.py mypyc/test/config.py mypyc/test/test_analysis.py mypyc/test/test_commandline.py mypyc/test/test_emit.py mypyc/test/test_emitfunc.py mypyc/test/test_emitwrapper.py mypyc/test/test_exceptions.py mypyc/test/test_external.py mypyc/test/test_genops.py mypyc/test/test_namegen.py mypyc/test/test_refcount.py mypyc/test/test_run.py mypyc/test/test_serialization.py mypyc/test/test_tuplename.py mypyc/test/testutil.py scripts/dumpmodule.py scripts/find_type.py scripts/mypyc scripts/stubtest.py test-data/.flake8 test-data/packages/modulefinder/readme.txt test-data/packages/modulefinder/nsx-pkg1/nsx/a/__init__.py test-data/packages/modulefinder/nsx-pkg2/nsx/b/__init__.py test-data/packages/modulefinder/nsx-pkg3/nsx/c/c.py test-data/packages/modulefinder/nsy-pkg1/nsy/a/__init__.py test-data/packages/modulefinder/nsy-pkg1/nsy/a/__init__.pyi test-data/packages/modulefinder/nsy-pkg2/nsy/b.pyi test-data/packages/modulefinder/nsy-pkg2/nsy/c.py test-data/packages/modulefinder/nsy-pkg2/nsy/c.pyi test-data/packages/modulefinder/nsy-pkg2/nsy/b/__init__.py test-data/packages/modulefinder/pkg1/a.py test-data/packages/modulefinder/pkg2/b/__init__.py test-data/packages/typedpkg/setup.py test-data/packages/typedpkg-stubs/setup.py test-data/packages/typedpkg-stubs/typedpkg-stubs/__init__.pyi test-data/packages/typedpkg-stubs/typedpkg-stubs/py.typed test-data/packages/typedpkg-stubs/typedpkg-stubs/sample.pyi test-data/packages/typedpkg/typedpkg/__init__.py test-data/packages/typedpkg/typedpkg/dne.py test-data/packages/typedpkg/typedpkg/py.typed test-data/packages/typedpkg/typedpkg/sample.py test-data/packages/typedpkg/typedpkg/pkg/__init__.py test-data/packages/typedpkg/typedpkg/pkg/aaa.py test-data/packages/typedpkg/typedpkg/pkg/py.typed test-data/packages/typedpkg_ns/setup.py test-data/packages/typedpkg_ns/typedpkg_ns/__init__.py test-data/packages/typedpkg_ns/typedpkg_ns/ns/__init__.py test-data/packages/typedpkg_ns/typedpkg_ns/ns/bbb.py test-data/packages/typedpkg_ns/typedpkg_ns/ns/py.typed test-data/samples/bottles.py test-data/samples/class.py test-data/samples/cmdline.py test-data/samples/crawl.py test-data/samples/crawl2.py test-data/samples/dict.py test-data/samples/fib.py test-data/samples/files.py test-data/samples/for.py test-data/samples/generators.py test-data/samples/greet.py test-data/samples/guess.py test-data/samples/hello.py test-data/samples/input.py test-data/samples/itertool.py test-data/samples/readme.txt test-data/samples/regexp.py test-data/stdlib-samples/3.2/base64.py test-data/stdlib-samples/3.2/fnmatch.py test-data/stdlib-samples/3.2/genericpath.py test-data/stdlib-samples/3.2/getopt.py test-data/stdlib-samples/3.2/glob.py test-data/stdlib-samples/3.2/posixpath.py test-data/stdlib-samples/3.2/pprint.py test-data/stdlib-samples/3.2/random.py test-data/stdlib-samples/3.2/shutil.py test-data/stdlib-samples/3.2/subprocess.py test-data/stdlib-samples/3.2/tempfile.py test-data/stdlib-samples/3.2/textwrap.py test-data/stdlib-samples/3.2/test/__init__.py test-data/stdlib-samples/3.2/test/mypy.ini test-data/stdlib-samples/3.2/test/randv2_32.pck test-data/stdlib-samples/3.2/test/randv2_64.pck test-data/stdlib-samples/3.2/test/randv3.pck test-data/stdlib-samples/3.2/test/support.py test-data/stdlib-samples/3.2/test/test_base64.py test-data/stdlib-samples/3.2/test/test_fnmatch.py test-data/stdlib-samples/3.2/test/test_genericpath.py test-data/stdlib-samples/3.2/test/test_getopt.py test-data/stdlib-samples/3.2/test/test_glob.py test-data/stdlib-samples/3.2/test/test_posixpath.py test-data/stdlib-samples/3.2/test/test_pprint.py test-data/stdlib-samples/3.2/test/test_random.py test-data/stdlib-samples/3.2/test/test_set.py test-data/stdlib-samples/3.2/test/test_shutil.py test-data/stdlib-samples/3.2/test/test_subprocess.py test-data/stdlib-samples/3.2/test/test_tempfile.py test-data/stdlib-samples/3.2/test/test_textwrap.py test-data/stdlib-samples/3.2/test/tf_inherit_check.py test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py test-data/unit/README.md test-data/unit/check-abstract.test test-data/unit/check-annotated.test test-data/unit/check-async-await.test test-data/unit/check-attr.test test-data/unit/check-basic.test test-data/unit/check-bound.test test-data/unit/check-callable.test test-data/unit/check-class-namedtuple.test test-data/unit/check-classes.test test-data/unit/check-classvar.test test-data/unit/check-columns.test test-data/unit/check-ctypes.test test-data/unit/check-custom-plugin.test test-data/unit/check-dataclasses.test test-data/unit/check-default-plugin.test test-data/unit/check-dynamic-typing.test test-data/unit/check-enum.test test-data/unit/check-errorcodes.test test-data/unit/check-expressions.test test-data/unit/check-fastparse.test test-data/unit/check-final.test test-data/unit/check-flags.test test-data/unit/check-functions.test test-data/unit/check-generic-subtyping.test test-data/unit/check-generics.test test-data/unit/check-ignore.test test-data/unit/check-incomplete-fixture.test test-data/unit/check-incremental.test test-data/unit/check-inference-context.test test-data/unit/check-inference.test test-data/unit/check-inline-config.test test-data/unit/check-isinstance.test test-data/unit/check-kwargs.test test-data/unit/check-lists.test test-data/unit/check-literal.test test-data/unit/check-modules-case.test test-data/unit/check-modules.test test-data/unit/check-multiple-inheritance.test test-data/unit/check-namedtuple.test test-data/unit/check-narrowing.test test-data/unit/check-newsemanal.test test-data/unit/check-newsyntax.test test-data/unit/check-newtype.test test-data/unit/check-optional.test test-data/unit/check-overloading.test test-data/unit/check-protocols.test test-data/unit/check-python2.test test-data/unit/check-python38.test test-data/unit/check-redefine.test test-data/unit/check-reports.test test-data/unit/check-selftype.test test-data/unit/check-semanal-error.test test-data/unit/check-serialize.test test-data/unit/check-statements.test test-data/unit/check-super.test test-data/unit/check-tuples.test test-data/unit/check-type-aliases.test test-data/unit/check-type-checks.test test-data/unit/check-type-promotion.test test-data/unit/check-typeddict.test test-data/unit/check-typevar-values.test test-data/unit/check-underscores.test test-data/unit/check-unions.test test-data/unit/check-unreachable-code.test test-data/unit/check-unsupported.test test-data/unit/check-varargs.test test-data/unit/check-warnings.test test-data/unit/cmdline.test test-data/unit/daemon.test test-data/unit/deps-classes.test test-data/unit/deps-expressions.test test-data/unit/deps-generics.test test-data/unit/deps-statements.test test-data/unit/deps-types.test test-data/unit/deps.test test-data/unit/diff.test test-data/unit/errorstream.test test-data/unit/fine-grained-blockers.test test-data/unit/fine-grained-cache-incremental.test test-data/unit/fine-grained-cycles.test test-data/unit/fine-grained-modules.test test-data/unit/fine-grained-suggest.test test-data/unit/fine-grained.test test-data/unit/hacks.txt test-data/unit/merge.test test-data/unit/parse-errors.test test-data/unit/parse-python2.test test-data/unit/parse.test test-data/unit/python2eval.test test-data/unit/pythoneval-asyncio.test test-data/unit/pythoneval.test test-data/unit/reports.test test-data/unit/semanal-abstractclasses.test test-data/unit/semanal-basic.test test-data/unit/semanal-classes.test test-data/unit/semanal-classvar.test test-data/unit/semanal-errors.test test-data/unit/semanal-expressions.test test-data/unit/semanal-modules.test test-data/unit/semanal-namedtuple.test test-data/unit/semanal-python2.test test-data/unit/semanal-statements.test test-data/unit/semanal-symtable.test test-data/unit/semanal-typealiases.test test-data/unit/semanal-typeddict.test test-data/unit/semanal-typeinfo.test test-data/unit/semanal-types.test test-data/unit/semenal-literal.test test-data/unit/stubgen.test test-data/unit/typexport-basic.test test-data/unit/fixtures/__init_subclass__.pyi test-data/unit/fixtures/__new__.pyi test-data/unit/fixtures/alias.pyi test-data/unit/fixtures/args.pyi test-data/unit/fixtures/async_await.pyi test-data/unit/fixtures/attr.pyi test-data/unit/fixtures/bool.pyi test-data/unit/fixtures/bool_py2.pyi test-data/unit/fixtures/callable.pyi test-data/unit/fixtures/classmethod.pyi test-data/unit/fixtures/complex.pyi test-data/unit/fixtures/complex_tuple.pyi test-data/unit/fixtures/dict.pyi test-data/unit/fixtures/divmod.pyi test-data/unit/fixtures/exception.pyi test-data/unit/fixtures/f_string.pyi test-data/unit/fixtures/fine_grained.pyi test-data/unit/fixtures/float.pyi test-data/unit/fixtures/floatdict.pyi test-data/unit/fixtures/floatdict_python2.pyi test-data/unit/fixtures/for.pyi test-data/unit/fixtures/function.pyi test-data/unit/fixtures/isinstance.pyi test-data/unit/fixtures/isinstancelist.pyi test-data/unit/fixtures/list.pyi test-data/unit/fixtures/module.pyi test-data/unit/fixtures/module_all.pyi test-data/unit/fixtures/module_all_python2.pyi test-data/unit/fixtures/notimplemented.pyi test-data/unit/fixtures/object_with_init_subclass.pyi test-data/unit/fixtures/ops.pyi test-data/unit/fixtures/primitives.pyi test-data/unit/fixtures/property.pyi test-data/unit/fixtures/property_py2.pyi test-data/unit/fixtures/python2.pyi test-data/unit/fixtures/set.pyi test-data/unit/fixtures/slice.pyi test-data/unit/fixtures/staticmethod.pyi test-data/unit/fixtures/transform.pyi test-data/unit/fixtures/tuple-simple.pyi test-data/unit/fixtures/tuple.pyi test-data/unit/fixtures/type.pyi test-data/unit/fixtures/typing-full.pyi test-data/unit/fixtures/union.pyi test-data/unit/lib-stub/__builtin__.pyi test-data/unit/lib-stub/abc.pyi test-data/unit/lib-stub/attr.pyi test-data/unit/lib-stub/blocker.pyi test-data/unit/lib-stub/blocker2.pyi test-data/unit/lib-stub/broken.pyi test-data/unit/lib-stub/builtins.pyi test-data/unit/lib-stub/collections.pyi test-data/unit/lib-stub/contextlib.pyi test-data/unit/lib-stub/dataclasses.pyi test-data/unit/lib-stub/enum.pyi test-data/unit/lib-stub/mypy_extensions.pyi test-data/unit/lib-stub/six.pyi test-data/unit/lib-stub/sys.pyi test-data/unit/lib-stub/types.pyi test-data/unit/lib-stub/typing.pyi test-data/unit/lib-stub/typing_extensions.pyi test-data/unit/lib-stub/future/__init__.pyi test-data/unit/lib-stub/future/utils.pyi test-data/unit/plugins/arg_kinds.py test-data/unit/plugins/arg_names.py test-data/unit/plugins/attrhook.py test-data/unit/plugins/attrhook2.py test-data/unit/plugins/badreturn.py test-data/unit/plugins/badreturn2.py test-data/unit/plugins/callable_instance.py test-data/unit/plugins/class_callable.py test-data/unit/plugins/common_api_incremental.py test-data/unit/plugins/config_data.py test-data/unit/plugins/customentry.py test-data/unit/plugins/customize_mro.py test-data/unit/plugins/depshook.py test-data/unit/plugins/dyn_class.py test-data/unit/plugins/dyn_class_from_method.py test-data/unit/plugins/fnplugin.py test-data/unit/plugins/fully_qualified_test_hook.py test-data/unit/plugins/method_sig_hook.py test-data/unit/plugins/named_callable.py test-data/unit/plugins/noentry.py test-data/unit/plugins/plugin2.py test-data/unit/plugins/type_anal_hook.py test-data/unit/plugins/union_method.pymypy-0.761/mypy.egg-info/dependency_links.txt0000644€tŠÔÚ€2›s®0000000000113576752266025460 0ustar jukkaDROPBOX\Domain Users00000000000000 mypy-0.761/mypy.egg-info/entry_points.txt0000644€tŠÔÚ€2›s®0000000017213576752266024710 0ustar jukkaDROPBOX\Domain Users00000000000000[console_scripts] dmypy = mypy.dmypy.client:console_entry mypy = mypy.__main__:console_entry stubgen = mypy.stubgen:main mypy-0.761/mypy.egg-info/requires.txt0000644€tŠÔÚ€2›s®0000000014413576752266024011 0ustar jukkaDROPBOX\Domain Users00000000000000typed_ast<1.5.0,>=1.4.0 typing_extensions>=3.7.4 mypy_extensions<0.5.0,>=0.4.3 [dmypy] psutil>=4.0 mypy-0.761/mypy.egg-info/top_level.txt0000644€tŠÔÚ€2›s®0000000001313576752266024136 0ustar jukkaDROPBOX\Domain Users00000000000000mypy mypyc mypy-0.761/mypy_bootstrap.ini0000644€tŠÔÚ€2›s®0000000060213576752246022512 0ustar jukkaDROPBOX\Domain Users00000000000000[mypy] disallow_untyped_calls = True disallow_untyped_defs = True disallow_incomplete_defs = True check_untyped_defs = True disallow_subclassing_any = True warn_no_return = True strict_optional = True no_implicit_optional = True disallow_any_generics = True disallow_any_unimported = True warn_redundant_casts = True warn_unused_configs = True show_traceback = True always_true = MYPYC mypy-0.761/mypy_self_check.ini0000644€tŠÔÚ€2›s®0000000077313576752246022574 0ustar jukkaDROPBOX\Domain Users00000000000000[mypy] disallow_untyped_calls = True disallow_untyped_defs = True disallow_incomplete_defs = True check_untyped_defs = True disallow_subclassing_any = True warn_no_return = True strict_optional = True strict_equality = True no_implicit_optional = True disallow_any_generics = True disallow_any_unimported = True warn_redundant_casts = True warn_unused_ignores = True warn_unused_configs = True show_traceback = True show_error_codes = True pretty = True always_false = MYPYC plugins = misc/proper_plugin.py mypy-0.761/mypyc/0000755€tŠÔÚ€2›s®0000000000013576752267020064 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypyc/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246022160 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypyc/analysis.py0000644€tŠÔÚ€2›s®0000004044713576752246022267 0ustar jukkaDROPBOX\Domain Users00000000000000"""Data-flow analyses.""" from abc import abstractmethod from typing import Dict, Tuple, List, Set, TypeVar, Iterator, Generic, Optional, Iterable, Union from mypyc.ops import ( Value, ControlOp, BasicBlock, OpVisitor, Assign, LoadInt, LoadErrorValue, RegisterOp, Goto, Branch, Return, Call, Environment, Box, Unbox, Cast, Op, Unreachable, TupleGet, TupleSet, GetAttr, SetAttr, LoadStatic, InitStatic, PrimitiveOp, MethodCall, RaiseStandardError, ) class CFG: """Control-flow graph. Node 0 is always assumed to be the entry point. There must be a non-empty set of exits. """ def __init__(self, succ: Dict[BasicBlock, List[BasicBlock]], pred: Dict[BasicBlock, List[BasicBlock]], exits: Set[BasicBlock]) -> None: assert exits self.succ = succ self.pred = pred self.exits = exits def __str__(self) -> str: lines = [] lines.append('exits: %s' % sorted(self.exits)) lines.append('succ: %s' % self.succ) lines.append('pred: %s' % self.pred) return '\n'.join(lines) def get_cfg(blocks: List[BasicBlock]) -> CFG: """Calculate basic block control-flow graph. The result is a dictionary like this: basic block index -> (successors blocks, predecesssor blocks) """ succ_map = {} pred_map = {} # type: Dict[BasicBlock, List[BasicBlock]] exits = set() for block in blocks: assert not any(isinstance(op, ControlOp) for op in block.ops[:-1]), ( "Control-flow ops must be at the end of blocks") last = block.ops[-1] if isinstance(last, Branch): succ = [last.true, last.false] elif isinstance(last, Goto): succ = [last.label] else: succ = [] exits.add(block) # Errors can occur anywhere inside a block, which means that # we can't assume that the entire block has executed before # jumping to the error handler. In our CFG construction, we # model this as saying that a block can jump to its error # handler or the error handlers of any of its normal # successors (to represent an error before that next block # completes). This works well for analyses like "must # defined", where it implies that registers assigned in a # block may be undefined in its error handler, but is in # general not a precise representation of reality; any # analyses that require more fidelity must wait until after # exception insertion. for error_point in [block] + succ: if error_point.error_handler: succ.append(error_point.error_handler) succ_map[block] = succ pred_map[block] = [] for prev, nxt in succ_map.items(): for label in nxt: pred_map[label].append(prev) return CFG(succ_map, pred_map, exits) def get_real_target(label: BasicBlock) -> BasicBlock: if len(label.ops) == 1 and isinstance(label.ops[-1], Goto): label = label.ops[-1].label return label def cleanup_cfg(blocks: List[BasicBlock]) -> None: """Cleanup the control flow graph. This eliminates obviously dead basic blocks and eliminates blocks that contain nothing but a single jump. There is a lot more that could be done. """ changed = True while changed: # First collapse any jumps to basic block that only contain a goto for block in blocks: term = block.ops[-1] if isinstance(term, Goto): term.label = get_real_target(term.label) elif isinstance(term, Branch): term.true = get_real_target(term.true) term.false = get_real_target(term.false) # Then delete any blocks that have no predecessors changed = False cfg = get_cfg(blocks) orig_blocks = blocks[:] blocks.clear() for i, block in enumerate(orig_blocks): if i == 0 or cfg.pred[block]: blocks.append(block) else: changed = True T = TypeVar('T') AnalysisDict = Dict[Tuple[BasicBlock, int], Set[T]] class AnalysisResult(Generic[T]): def __init__(self, before: 'AnalysisDict[T]', after: 'AnalysisDict[T]') -> None: self.before = before self.after = after def __str__(self) -> str: return 'before: %s\nafter: %s\n' % (self.before, self.after) GenAndKill = Tuple[Set[Value], Set[Value]] class BaseAnalysisVisitor(OpVisitor[GenAndKill]): def visit_goto(self, op: Goto) -> GenAndKill: return set(), set() @abstractmethod def visit_register_op(self, op: RegisterOp) -> GenAndKill: raise NotImplementedError @abstractmethod def visit_assign(self, op: Assign) -> GenAndKill: raise NotImplementedError def visit_call(self, op: Call) -> GenAndKill: return self.visit_register_op(op) def visit_method_call(self, op: MethodCall) -> GenAndKill: return self.visit_register_op(op) def visit_primitive_op(self, op: PrimitiveOp) -> GenAndKill: return self.visit_register_op(op) def visit_load_int(self, op: LoadInt) -> GenAndKill: return self.visit_register_op(op) def visit_load_error_value(self, op: LoadErrorValue) -> GenAndKill: return self.visit_register_op(op) def visit_get_attr(self, op: GetAttr) -> GenAndKill: return self.visit_register_op(op) def visit_set_attr(self, op: SetAttr) -> GenAndKill: return self.visit_register_op(op) def visit_load_static(self, op: LoadStatic) -> GenAndKill: return self.visit_register_op(op) def visit_init_static(self, op: InitStatic) -> GenAndKill: return self.visit_register_op(op) def visit_tuple_get(self, op: TupleGet) -> GenAndKill: return self.visit_register_op(op) def visit_tuple_set(self, op: TupleSet) -> GenAndKill: return self.visit_register_op(op) def visit_box(self, op: Box) -> GenAndKill: return self.visit_register_op(op) def visit_unbox(self, op: Unbox) -> GenAndKill: return self.visit_register_op(op) def visit_cast(self, op: Cast) -> GenAndKill: return self.visit_register_op(op) def visit_raise_standard_error(self, op: RaiseStandardError) -> GenAndKill: return self.visit_register_op(op) class DefinedVisitor(BaseAnalysisVisitor): """Visitor for finding defined registers. Note that this only deals with registers and not temporaries, on the assumption that we never access temporaries when they might be undefined. """ def visit_branch(self, op: Branch) -> GenAndKill: return set(), set() def visit_return(self, op: Return) -> GenAndKill: return set(), set() def visit_unreachable(self, op: Unreachable) -> GenAndKill: return set(), set() def visit_register_op(self, op: RegisterOp) -> GenAndKill: return set(), set() def visit_assign(self, op: Assign) -> GenAndKill: # Loading an error value may undefine the register. if isinstance(op.src, LoadErrorValue) and op.src.undefines: return set(), {op.dest} else: return {op.dest}, set() def analyze_maybe_defined_regs(blocks: List[BasicBlock], cfg: CFG, initial_defined: Set[Value]) -> AnalysisResult[Value]: """Calculate potentially defined registers at each CFG location. A register is defined if it has a value along some path from the initial location. """ return run_analysis(blocks=blocks, cfg=cfg, gen_and_kill=DefinedVisitor(), initial=initial_defined, backward=False, kind=MAYBE_ANALYSIS) def analyze_must_defined_regs( blocks: List[BasicBlock], cfg: CFG, initial_defined: Set[Value], regs: Iterable[Value]) -> AnalysisResult[Value]: """Calculate always defined registers at each CFG location. This analysis can work before exception insertion, since it is a sound assumption that registers defined in a block might not be initialized in its error handler. A register is defined if it has a value along all paths from the initial location. """ return run_analysis(blocks=blocks, cfg=cfg, gen_and_kill=DefinedVisitor(), initial=initial_defined, backward=False, kind=MUST_ANALYSIS, universe=set(regs)) class BorrowedArgumentsVisitor(BaseAnalysisVisitor): def __init__(self, args: Set[Value]) -> None: self.args = args def visit_branch(self, op: Branch) -> GenAndKill: return set(), set() def visit_return(self, op: Return) -> GenAndKill: return set(), set() def visit_unreachable(self, op: Unreachable) -> GenAndKill: return set(), set() def visit_register_op(self, op: RegisterOp) -> GenAndKill: return set(), set() def visit_assign(self, op: Assign) -> GenAndKill: if op.dest in self.args: return set(), {op.dest} return set(), set() def analyze_borrowed_arguments( blocks: List[BasicBlock], cfg: CFG, borrowed: Set[Value]) -> AnalysisResult[Value]: """Calculate arguments that can use references borrowed from the caller. When assigning to an argument, it no longer is borrowed. """ return run_analysis(blocks=blocks, cfg=cfg, gen_and_kill=BorrowedArgumentsVisitor(borrowed), initial=borrowed, backward=False, kind=MUST_ANALYSIS, universe=borrowed) class UndefinedVisitor(BaseAnalysisVisitor): def visit_branch(self, op: Branch) -> GenAndKill: return set(), set() def visit_return(self, op: Return) -> GenAndKill: return set(), set() def visit_unreachable(self, op: Unreachable) -> GenAndKill: return set(), set() def visit_register_op(self, op: RegisterOp) -> GenAndKill: return set(), {op} if not op.is_void else set() def visit_assign(self, op: Assign) -> GenAndKill: return set(), {op.dest} def analyze_undefined_regs(blocks: List[BasicBlock], cfg: CFG, env: Environment, initial_defined: Set[Value]) -> AnalysisResult[Value]: """Calculate potentially undefined registers at each CFG location. A register is undefined if there is some path from initial block where it has an undefined value. """ initial_undefined = set(env.regs()) - initial_defined return run_analysis(blocks=blocks, cfg=cfg, gen_and_kill=UndefinedVisitor(), initial=initial_undefined, backward=False, kind=MAYBE_ANALYSIS) class LivenessVisitor(BaseAnalysisVisitor): def visit_branch(self, op: Branch) -> GenAndKill: return set(op.sources()), set() def visit_return(self, op: Return) -> GenAndKill: return {op.reg}, set() def visit_unreachable(self, op: Unreachable) -> GenAndKill: return set(), set() def visit_register_op(self, op: RegisterOp) -> GenAndKill: gen = set(op.sources()) if not op.is_void: return gen, {op} else: return gen, set() def visit_assign(self, op: Assign) -> GenAndKill: return set(op.sources()), {op.dest} def analyze_live_regs(blocks: List[BasicBlock], cfg: CFG) -> AnalysisResult[Value]: """Calculate live registers at each CFG location. A register is live at a location if it can be read along some CFG path starting from the location. """ return run_analysis(blocks=blocks, cfg=cfg, gen_and_kill=LivenessVisitor(), initial=set(), backward=True, kind=MAYBE_ANALYSIS) # Analysis kinds MUST_ANALYSIS = 0 MAYBE_ANALYSIS = 1 # TODO the return type of this function is too complicated. Abtract it into its # own class. def run_analysis(blocks: List[BasicBlock], cfg: CFG, gen_and_kill: OpVisitor[Tuple[Set[T], Set[T]]], initial: Set[T], kind: int, backward: bool, universe: Optional[Set[T]] = None) -> AnalysisResult[T]: """Run a general set-based data flow analysis. Args: blocks: All basic blocks cfg: Control-flow graph for the code gen_and_kill: Implementation of gen and kill functions for each op initial: Value of analysis for the entry points (for a forward analysis) or the exit points (for a backward analysis) kind: MUST_ANALYSIS or MAYBE_ANALYSIS backward: If False, the analysis is a forward analysis; it's backward otherwise universe: For a must analysis, the set of all possible values. This is the starting value for the work list algorithm, which will narrow this down until reaching a fixed point. For a maybe analysis the iteration always starts from an empty set and this argument is ignored. Return analysis results: (before, after) """ block_gen = {} block_kill = {} # Calculate kill and gen sets for entire basic blocks. for block in blocks: gen = set() # type: Set[T] kill = set() # type: Set[T] ops = block.ops if backward: ops = list(reversed(ops)) for op in ops: opgen, opkill = op.accept(gen_and_kill) gen = ((gen - opkill) | opgen) kill = ((kill - opgen) | opkill) block_gen[block] = gen block_kill[block] = kill # Set up initial state for worklist algorithm. worklist = list(blocks) if not backward: worklist = worklist[::-1] # Reverse for a small performance improvement workset = set(worklist) before = {} # type: Dict[BasicBlock, Set[T]] after = {} # type: Dict[BasicBlock, Set[T]] for block in blocks: if kind == MAYBE_ANALYSIS: before[block] = set() after[block] = set() else: assert universe is not None, "Universe must be defined for a must analysis" before[block] = set(universe) after[block] = set(universe) if backward: pred_map = cfg.succ succ_map = cfg.pred else: pred_map = cfg.pred succ_map = cfg.succ # Run work list algorithm to generate in and out sets for each basic block. while worklist: label = worklist.pop() workset.remove(label) if pred_map[label]: new_before = None # type: Union[Set[T], None] for pred in pred_map[label]: if new_before is None: new_before = set(after[pred]) elif kind == MAYBE_ANALYSIS: new_before |= after[pred] else: new_before &= after[pred] assert new_before is not None else: new_before = set(initial) before[label] = new_before new_after = (new_before - block_kill[label]) | block_gen[label] if new_after != after[label]: for succ in succ_map[label]: if succ not in workset: worklist.append(succ) workset.add(succ) after[label] = new_after # Run algorithm for each basic block to generate opcode-level sets. op_before = {} # type: Dict[Tuple[BasicBlock, int], Set[T]] op_after = {} # type: Dict[Tuple[BasicBlock, int], Set[T]] for block in blocks: label = block cur = before[label] ops_enum = enumerate(block.ops) # type: Iterator[Tuple[int, Op]] if backward: ops_enum = reversed(list(ops_enum)) for idx, op in ops_enum: op_before[label, idx] = cur opgen, opkill = op.accept(gen_and_kill) cur = (cur - opkill) | opgen op_after[label, idx] = cur if backward: op_after, op_before = op_before, op_after return AnalysisResult(op_before, op_after) mypy-0.761/mypyc/build.py0000644€tŠÔÚ€2›s®0000005055613576752246021545 0ustar jukkaDROPBOX\Domain Users00000000000000"""Support for building extensions using mypyc with distutils or setuptools The main entry point is mypycify, which produces a list of extension modules to be passed to setup. A trivial setup.py for a mypyc built project, then, looks like: from distutils.core import setup from mypyc.build import mypycify setup(name='test_module', ext_modules=mypycify(['foo.py']), ) See the mypycify docs for additional arguments. mypycify can integrate with either distutils or setuptools, but needs to know at import-time whether it is using distutils or setuputils. We hackily decide based on whether setuptools has been imported already. """ import sys import os.path import hashlib import time import re from typing import List, Tuple, Any, Optional, Dict, Union, Set, Iterable, cast from typing_extensions import TYPE_CHECKING, NoReturn, Type from mypy.main import process_options from mypy.errors import CompileError from mypy.options import Options from mypy.build import BuildSource from mypy.fscache import FileSystemCache from mypy.util import write_junit_xml from mypyc.namegen import exported_name from mypyc.options import CompilerOptions from mypyc.errors import Errors from mypyc.common import shared_lib_name from mypyc.ops import format_modules from mypyc import emitmodule if TYPE_CHECKING: from distutils.core import Extension # noqa from distutils import sysconfig, ccompiler def get_extension() -> Type['Extension']: # We can work with either setuptools or distutils, and pick setuptools # if it has been imported. use_setuptools = 'setuptools' in sys.modules if not use_setuptools: from distutils.core import Extension else: from setuptools import Extension # type: ignore # noqa return Extension def setup_mypycify_vars() -> None: """Rewrite a bunch of config vars in pretty dubious ways.""" # There has to be a better approach to this. # The vars can contain ints but we only work with str ones vars = cast(Dict[str, str], sysconfig.get_config_vars()) if sys.platform == 'darwin': # Disable building 32-bit binaries, since we generate too much code # for a 32-bit Mach-O object. There has to be a better way to do this. vars['LDSHARED'] = vars['LDSHARED'].replace('-arch i386', '') vars['LDFLAGS'] = vars['LDFLAGS'].replace('-arch i386', '') vars['CFLAGS'] = vars['CFLAGS'].replace('-arch i386', '') def fail(message: str) -> NoReturn: # TODO: Is there something else we should do to fail? sys.exit(message) def get_mypy_config(mypy_options: List[str], only_compile_paths: Optional[Iterable[str]], compiler_options: CompilerOptions, fscache: Optional[FileSystemCache], ) -> Tuple[List[BuildSource], List[BuildSource], Options]: """Construct mypy BuildSources and Options from file and options lists""" all_sources, options = process_options(mypy_options, fscache=fscache) if only_compile_paths is not None: paths_set = set(only_compile_paths) mypyc_sources = [s for s in all_sources if s.path in paths_set] else: mypyc_sources = all_sources if compiler_options.separate: mypyc_sources = [src for src in mypyc_sources if src.path and not src.path.endswith('__init__.py')] if not mypyc_sources: return mypyc_sources, all_sources, options # Override whatever python_version is inferred from the .ini file, # and set the python_version to be the currently used version. options.python_version = sys.version_info[:2] if options.python_version[0] == 2: fail('Python 2 not supported') if not options.strict_optional: fail('Disabling strict optional checking not supported') options.show_traceback = True # Needed to get types for all AST nodes options.export_types = True # We use mypy incremental mode when doing separate/incremental mypyc compilation options.incremental = compiler_options.separate options.preserve_asts = True for source in mypyc_sources: options.per_module_options.setdefault(source.module, {})['mypyc'] = True return mypyc_sources, all_sources, options def generate_c_extension_shim( full_module_name: str, module_name: str, dir_name: str, group_name: str) -> str: """Create a C extension shim with a passthrough PyInit function. Arguments: full_module_name: the dotted full module name module_name: the final component of the module name dir_name: the directory to place source code group_name: the name of the group """ cname = '%s.c' % full_module_name.replace('.', os.sep) cpath = os.path.join(dir_name, cname) # We load the C extension shim template from a file. # (So that the file could be reused as a bazel template also.) with open(os.path.join(include_dir(), 'module_shim.tmpl')) as f: shim_template = f.read() write_file( cpath, shim_template.format(modname=module_name, libname=shared_lib_name(group_name), full_modname=exported_name(full_module_name))) return cpath def group_name(modules: List[str]) -> str: """Produce a probably unique name for a group from a list of module names.""" if len(modules) == 1: return modules[0] h = hashlib.sha1() h.update(','.join(modules).encode()) return h.hexdigest()[:20] def include_dir() -> str: """Find the path of the lib-rt dir that needs to be included""" return os.path.join(os.path.abspath(os.path.dirname(__file__)), 'lib-rt') def generate_c(sources: List[BuildSource], options: Options, groups: emitmodule.Groups, fscache: FileSystemCache, compiler_options: CompilerOptions, ) -> Tuple[List[List[Tuple[str, str]]], str]: """Drive the actual core compilation step. The groups argument describes how modules are assigned to C extension modules. See the comments on the Groups type in mypyc.emitmodule for details. Returns the C source code and (for debugging) the pretty printed IR. """ t0 = time.time() # Do the actual work now serious = False result = None try: result = emitmodule.parse_and_typecheck( sources, options, compiler_options, groups, fscache) messages = result.errors except CompileError as e: messages = e.messages if not e.use_stdout: serious = True t1 = time.time() if compiler_options.verbose: print("Parsed and typechecked in {:.3f}s".format(t1 - t0)) if not messages and result: errors = Errors() modules, ctext = emitmodule.compile_modules_to_c( result, compiler_options=compiler_options, errors=errors, groups=groups) if errors.num_errors: messages.extend(errors.new_messages()) t2 = time.time() if compiler_options.verbose: print("Compiled to C in {:.3f}s".format(t2 - t1)) # ... you know, just in case. if options.junit_xml: py_version = "{}_{}".format( options.python_version[0], options.python_version[1] ) write_junit_xml( t2 - t0, serious, messages, options.junit_xml, py_version, options.platform ) if messages: print("\n".join(messages)) sys.exit(1) return ctext, '\n'.join(format_modules(modules)) def build_using_shared_lib(sources: List[BuildSource], group_name: str, cfiles: List[str], deps: List[str], build_dir: str, extra_compile_args: List[str], ) -> List['Extension']: """Produce the list of extension modules when a shared library is needed. This creates one shared library extension module that all of the others import and then one shim extension module for each module in the build, that simply calls an initialization function in the shared library. The shared library (which lib_name is the name of) is a python extension module that exports the real initialization functions in Capsules stored in module attributes. """ extensions = [get_extension()( shared_lib_name(group_name), sources=cfiles, include_dirs=[include_dir(), build_dir], depends=deps, extra_compile_args=extra_compile_args, )] for source in sources: module_name = source.module.split('.')[-1] shim_file = generate_c_extension_shim(source.module, module_name, build_dir, group_name) # We include the __init__ in the "module name" we stick in the Extension, # since this seems to be needed for it to end up in the right place. full_module_name = source.module assert source.path if os.path.split(source.path)[1] == '__init__.py': full_module_name += '.__init__' extensions.append(get_extension()( full_module_name, sources=[shim_file], extra_compile_args=extra_compile_args, )) return extensions def build_single_module(sources: List[BuildSource], cfiles: List[str], extra_compile_args: List[str], ) -> List['Extension']: """Produce the list of extension modules for a standalone extension. This contains just one module, since there is no need for a shared module. """ return [get_extension()( sources[0].module, sources=cfiles, include_dirs=[include_dir()], extra_compile_args=extra_compile_args, )] def write_file(path: str, contents: str) -> None: """Write data into a file. If the file already exists and has the same contents we want to write, skip writing so as to preserve the mtime and avoid triggering recompilation. """ # We encode it ourselves and open the files as binary to avoid windows # newline translation encoded_contents = contents.encode('utf-8') try: with open(path, 'rb') as f: old_contents = f.read() # type: Optional[bytes] except IOError: old_contents = None if old_contents != encoded_contents: os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'wb') as f: f.write(encoded_contents) # Fudge the mtime forward because otherwise when two builds happen close # together (like in a test) setuptools might not realize the source is newer # than the new artifact. # XXX: This is bad though. new_mtime = os.stat(path).st_mtime + 1 os.utime(path, times=(new_mtime, new_mtime)) def construct_groups( sources: List[BuildSource], separate: Union[bool, List[Tuple[List[str], Optional[str]]]], use_shared_lib: bool, ) -> emitmodule.Groups: """Compute Groups given the input source list and separate configs. separate is the user-specified configuration for how to assign modules to compilation groups (see mypycify docstring for details). This takes that and expands it into our internal representation of group configuration, documented in mypyc.emitmodule's definition of Group. """ if separate is True: groups = [ ([source], None) for source in sources ] # type: emitmodule.Groups elif isinstance(separate, list): groups = [] used_sources = set() for files, name in separate: group_sources = [src for src in sources if src.path in files] groups.append((group_sources, name)) used_sources.update(group_sources) unused_sources = [src for src in sources if src not in used_sources] if unused_sources: groups.extend([([source], None) for source in unused_sources]) else: groups = [(sources, None)] # Generate missing names for i, (group, name) in enumerate(groups): if use_shared_lib and not name: name = group_name([source.module for source in group]) groups[i] = (group, name) return groups def get_header_deps(cfiles: List[Tuple[str, str]]) -> List[str]: """Find all the headers used by a group of cfiles. We do this by just regexping the source, which is a bit simpler than properly plumbing the data through. Arguments: cfiles: A list of (file name, file contents) pairs. """ headers = set() # type: Set[str] for _, contents in cfiles: headers.update(re.findall(r'#include "(.*)"', contents)) return sorted(headers) def mypyc_build( paths: List[str], compiler_options: CompilerOptions, *, separate: Union[bool, List[Tuple[List[str], Optional[str]]]] = False, only_compile_paths: Optional[Iterable[str]] = None, skip_cgen_input: Optional[Any] = None, always_use_shared_lib: bool = False ) -> Tuple[emitmodule.Groups, List[Tuple[List[str], List[str]]]]: """Do the front and middle end of mypyc building, producing and writing out C source.""" fscache = FileSystemCache() mypyc_sources, all_sources, options = get_mypy_config( paths, only_compile_paths, compiler_options, fscache) # We generate a shared lib if there are multiple modules or if any # of the modules are in package. (Because I didn't want to fuss # around with making the single module code handle packages.) use_shared_lib = ( len(mypyc_sources) > 1 or any('.' in x.module for x in mypyc_sources) or always_use_shared_lib ) groups = construct_groups(mypyc_sources, separate, use_shared_lib) # We let the test harness just pass in the c file contents instead # so that it can do a corner-cutting version without full stubs. if not skip_cgen_input: group_cfiles, ops_text = generate_c(all_sources, options, groups, fscache, compiler_options=compiler_options) # TODO: unique names? write_file(os.path.join(compiler_options.target_dir, 'ops.txt'), ops_text) else: group_cfiles = skip_cgen_input # Write out the generated C and collect the files for each group # Should this be here?? group_cfilenames = [] # type: List[Tuple[List[str], List[str]]] for cfiles in group_cfiles: cfilenames = [] for cfile, ctext in cfiles: cfile = os.path.join(compiler_options.target_dir, cfile) write_file(cfile, ctext) if os.path.splitext(cfile)[1] == '.c': cfilenames.append(cfile) deps = [os.path.join(compiler_options.target_dir, dep) for dep in get_header_deps(cfiles)] group_cfilenames.append((cfilenames, deps)) return groups, group_cfilenames def mypycify( paths: List[str], *, only_compile_paths: Optional[Iterable[str]] = None, verbose: bool = False, opt_level: str = '3', strip_asserts: bool = False, multi_file: bool = False, separate: Union[bool, List[Tuple[List[str], Optional[str]]]] = False, skip_cgen_input: Optional[Any] = None, target_dir: Optional[str] = None, include_runtime_files: Optional[bool] = None ) -> List['Extension']: """Main entry point to building using mypyc. This produces a list of Extension objects that should be passed as the ext_modules parameter to setup. Arguments: paths: A list of file paths to build. It may also contain mypy options. only_compile_paths: If not None, an iterable of paths that are to be the only modules compiled, even if other modules appear in the mypy command line given to paths. (These modules must still be passed to paths.) verbose: Should mypyc be more verbose. Defaults to false. opt_level: The optimization level, as a string. Defaults to '3' (meaning '-O3'). strip_asserts: Should asserts be stripped from the generated code. multi_file: Should each Python module be compiled into its own C source file. This can reduce compile time and memory requirements at the likely cost of runtime performance of compiled code. Defaults to false. separate: Should compiled modules be placed in separate extension modules. If False, all modules are placed in a single shared library. If True, every module is placed in its own library. Otherwise separate should be a list of (file name list, optional shared library name) pairs specifying groups of files that should be placed in the same shared library (while all other modules will be placed in its own library). Each group can be compiled independently, which can speed up compilation, but calls between groups can be slower than calls within a group and can't be inlined. target_dir: The directory to write C output files. Defaults to 'build'. include_runtime_files: If not None, whether the mypyc runtime library should be directly #include'd instead of linked separately in order to reduce compiler invocations. Defaults to False in multi_file mode, True otherwise. """ # Figure out our configuration compiler_options = CompilerOptions( strip_asserts=strip_asserts, multi_file=multi_file, verbose=verbose, separate=separate is not False, target_dir=target_dir, include_runtime_files=include_runtime_files, ) # Generate all the actual important C code groups, group_cfilenames = mypyc_build( paths, only_compile_paths=only_compile_paths, compiler_options=compiler_options, separate=separate, skip_cgen_input=skip_cgen_input, ) # Mess around with setuptools and actually get the thing built setup_mypycify_vars() # Create a compiler object so we can make decisions based on what # compiler is being used. typeshed is missing some attribues on the # compiler object so we give it type Any compiler = ccompiler.new_compiler() # type: Any sysconfig.customize_compiler(compiler) build_dir = compiler_options.target_dir cflags = [] # type: List[str] if compiler.compiler_type == 'unix': cflags += [ '-O{}'.format(opt_level), '-Werror', '-Wno-unused-function', '-Wno-unused-label', '-Wno-unreachable-code', '-Wno-unused-variable', '-Wno-unused-command-line-argument', '-Wno-unknown-warning-option', ] if 'gcc' in compiler.compiler[0]: # This flag is needed for gcc but does not exist on clang. cflags += ['-Wno-unused-but-set-variable'] elif compiler.compiler_type == 'msvc': if opt_level == '3': opt_level = '2' cflags += [ '/O{}'.format(opt_level), '/wd4102', # unreferenced label '/wd4101', # unreferenced local variable '/wd4146', # negating unsigned int ] if multi_file: # Disable whole program optimization in multi-file mode so # that we actually get the compilation speed and memory # use wins that multi-file mode is intended for. cflags += [ '/GL-', '/wd9025', # warning about overriding /GL ] # If configured to (defaults to yes in multi-file mode), copy the # runtime library in. Otherwise it just gets #included to save on # compiler invocations. shared_cfilenames = [] if not compiler_options.include_runtime_files: for name in ['CPy.c', 'getargs.c']: rt_file = os.path.join(build_dir, name) with open(os.path.join(include_dir(), name), encoding='utf-8') as f: write_file(rt_file, f.read()) shared_cfilenames.append(rt_file) extensions = [] for (group_sources, lib_name), (cfilenames, deps) in zip(groups, group_cfilenames): if lib_name: extensions.extend(build_using_shared_lib( group_sources, lib_name, cfilenames + shared_cfilenames, deps, build_dir, cflags)) else: extensions.extend(build_single_module( group_sources, cfilenames + shared_cfilenames, cflags)) return extensions mypy-0.761/mypyc/common.py0000644€tŠÔÚ€2›s®0000000321013576752246021717 0ustar jukkaDROPBOX\Domain Users00000000000000MYPY = False if MYPY: from typing_extensions import Final PREFIX = 'CPyPy_' # type: Final # Python wrappers NATIVE_PREFIX = 'CPyDef_' # type: Final # Native functions etc. DUNDER_PREFIX = 'CPyDunder_' # type: Final # Wrappers for exposing dunder methods to the API REG_PREFIX = 'cpy_r_' # type: Final # Registers STATIC_PREFIX = 'CPyStatic_' # type: Final # Static variables (for literals etc.) TYPE_PREFIX = 'CPyType_' # type: Final # Type object struct MODULE_PREFIX = 'CPyModule_' # type: Final # Cached modules ATTR_PREFIX = '_' # type: Final # Attributes ENV_ATTR_NAME = '__mypyc_env__' # type: Final NEXT_LABEL_ATTR_NAME = '__mypyc_next_label__' # type: Final TEMP_ATTR_NAME = '__mypyc_temp__' # type: Final LAMBDA_NAME = '__mypyc_lambda__' # type: Final PROPSET_PREFIX = '__mypyc_setter__' # type: Final SELF_NAME = '__mypyc_self__' # type: Final INT_PREFIX = '__tmp_literal_int_' # type: Final # Max short int we accept as a literal is based on 32-bit platforms, # so that we can just always emit the same code. MAX_LITERAL_SHORT_INT = (1 << 30) - 1 # type: Final TOP_LEVEL_NAME = '__top_level__' # type: Final # Special function representing module top level # Maximal number of subclasses for a class to trigger fast path in isinstance() checks. FAST_ISINSTANCE_MAX_SUBCLASSES = 2 # type: Final def decorator_helper_name(func_name: str) -> str: return '__mypyc_{}_decorator_helper__'.format(func_name) def shared_lib_name(group_name: str) -> str: """Given a group name, return the actual name of its extension module. (This just adds a suffix to the final component.) """ return '{}__mypyc'.format(group_name) mypy-0.761/mypyc/crash.py0000644€tŠÔÚ€2›s®0000000166613576752246021544 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterator MYPY = False if MYPY: from typing import NoReturn import sys import traceback from contextlib import contextmanager @contextmanager def catch_errors(module_path: str, line: int) -> Iterator[None]: try: yield except Exception: crash_report(module_path, line) def crash_report(module_path: str, line: int) -> 'NoReturn': # Adapted from report_internal_error in mypy err = sys.exc_info()[1] tb = traceback.extract_stack()[:-4] # Excise all the traceback from the test runner for i, x in enumerate(tb): if x.name == 'pytest_runtest_call': tb = tb[i + 1:] break tb2 = traceback.extract_tb(sys.exc_info()[2])[1:] print('Traceback (most recent call last):') for s in traceback.format_list(tb + tb2): print(s.rstrip('\n')) print('{}:{}: {}: {}'.format(module_path, line, type(err).__name__, err)) raise SystemExit(2) mypy-0.761/mypyc/cstring.py0000644€tŠÔÚ€2›s®0000000362413576752246022111 0ustar jukkaDROPBOX\Domain Users00000000000000"""Encode valid C string literals from Python strings. If a character is not allowed in C string literals, it is either emitted as a simple escape sequence (e.g. '\\n'), or an octal escape sequence with exactly three digits ('\\oXXX'). Question marks are escaped to prevent trigraphs in the string literal from being interpreted. Note that '\\?' is an invalid escape sequence in Python. Consider the string literal "AB\\xCDEF". As one would expect, Python parses it as ['A', 'B', 0xCD, 'E', 'F']. However, the C standard specifies that all hexadecimal digits immediately following '\\x' will be interpreted as part of the escape sequence. Therefore, it is unexpectedly parsed as ['A', 'B', 0xCDEF]. Emitting ("AB\\xCD" "EF") would avoid this behaviour. However, we opt for simplicity and use octal escape sequences instead. They do not suffer from the same issue as they are defined to parse at most three octal digits. """ import string from typing import Tuple CHAR_MAP = ['\\{:03o}'.format(i) for i in range(256)] # It is safe to use string.printable as it always uses the C locale. for c in string.printable: CHAR_MAP[ord(c)] = c # These assignments must come last because we prioritize simple escape # sequences over any other representation. for c in ('\'', '"', '\\', 'a', 'b', 'f', 'n', 'r', 't', 'v'): escaped = '\\{}'.format(c) decoded = escaped.encode('ascii').decode('unicode_escape') CHAR_MAP[ord(decoded)] = escaped # This escape sequence is invalid in Python. CHAR_MAP[ord('?')] = r'\?' def encode_as_c_string(s: str) -> Tuple[str, int]: """Produce a quoted C string literal and its size, for a UTF-8 string.""" return encode_bytes_as_c_string(s.encode('utf-8')) def encode_bytes_as_c_string(b: bytes) -> Tuple[str, int]: """Produce a quoted C string literal and its size, for a byte string.""" escaped = ''.join([CHAR_MAP[i] for i in b]) return '"{}"'.format(escaped), len(b) mypy-0.761/mypyc/emit.py0000644€tŠÔÚ€2›s®0000010005313576752246021370 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for emitting C code.""" from collections import OrderedDict from typing import List, Set, Dict, Optional, Callable, Union from mypyc.common import ( REG_PREFIX, ATTR_PREFIX, STATIC_PREFIX, TYPE_PREFIX, NATIVE_PREFIX, FAST_ISINSTANCE_MAX_SUBCLASSES, ) from mypyc.ops import ( Environment, BasicBlock, Value, RType, RTuple, RInstance, RUnion, RPrimitive, is_float_rprimitive, is_bool_rprimitive, is_int_rprimitive, is_short_int_rprimitive, is_list_rprimitive, is_dict_rprimitive, is_set_rprimitive, is_tuple_rprimitive, is_none_rprimitive, is_object_rprimitive, object_rprimitive, is_str_rprimitive, ClassIR, FuncDecl, int_rprimitive, is_optional_type, optional_value_type, all_concrete_classes ) from mypyc.namegen import NameGenerator, exported_name from mypyc.sametype import is_same_type class HeaderDeclaration: """A representation of a declaration in C. This is used to generate declarations in header files and (optionally) definitions in source files. Attributes: decl: C source code for the declaration. defn: Optionally, C source code for a definition. dependencies: The names of any objects that must be declared prior. is_type: Whether the declaration is of a C type. (C types will be declared in external header files and not marked 'extern'.) needs_export: Whether the declared object needs to be exported to other modules in the linking table. """ def __init__(self, decl: Union[str, List[str]], defn: Optional[List[str]] = None, *, dependencies: Optional[Set[str]] = None, is_type: bool = False, needs_export: bool = False ) -> None: self.decl = [decl] if isinstance(decl, str) else decl self.defn = defn self.dependencies = dependencies or set() self.is_type = is_type self.needs_export = needs_export class EmitterContext: """Shared emitter state for a compilation group.""" def __init__(self, names: NameGenerator, group_name: Optional[str] = None, group_map: Optional[Dict[str, Optional[str]]] = None, ) -> None: """Setup shared emitter state. Args: names: The name generator to use group_map: Map from module names to group name group_name: Current group name """ self.temp_counter = 0 self.names = names self.group_name = group_name self.group_map = group_map or {} # Groups that this group depends on self.group_deps = set() # type: Set[str] # The map below is used for generating declarations and # definitions at the top of the C file. The main idea is that they can # be generated at any time during the emit phase. # A map of a C identifier to whatever the C identifier declares. Currently this is # used for declaring structs and the key corresponds to the name of the struct. # The declaration contains the body of the struct. self.declarations = OrderedDict() # type: Dict[str, HeaderDeclaration] class Emitter: """Helper for C code generation.""" def __init__(self, context: EmitterContext, env: Optional[Environment] = None) -> None: self.context = context self.names = context.names self.env = env or Environment() self.fragments = [] # type: List[str] self._indent = 0 # Low-level operations def indent(self) -> None: self._indent += 4 def dedent(self) -> None: self._indent -= 4 assert self._indent >= 0 def label(self, label: BasicBlock) -> str: return 'CPyL%s' % label.label def reg(self, reg: Value) -> str: return REG_PREFIX + reg.name def attr(self, name: str) -> str: return ATTR_PREFIX + name def emit_line(self, line: str = '') -> None: if line.startswith('}'): self.dedent() self.fragments.append(self._indent * ' ' + line + '\n') if line.endswith('{'): self.indent() def emit_lines(self, *lines: str) -> None: for line in lines: self.emit_line(line) def emit_label(self, label: Union[BasicBlock, str]) -> None: if isinstance(label, str): text = label else: text = self.label(label) # Extra semicolon prevents an error when the next line declares a tempvar self.fragments.append('{}: ;\n'.format(text)) def emit_from_emitter(self, emitter: 'Emitter') -> None: self.fragments.extend(emitter.fragments) def emit_printf(self, fmt: str, *args: str) -> None: fmt = fmt.replace('\n', '\\n') self.emit_line('printf(%s);' % ', '.join(['"%s"' % fmt] + list(args))) self.emit_line('fflush(stdout);') def temp_name(self) -> str: self.context.temp_counter += 1 return '__tmp%d' % self.context.temp_counter def new_label(self) -> str: self.context.temp_counter += 1 return '__LL%d' % self.context.temp_counter def get_module_group_prefix(self, module_name: str) -> str: """Get the group prefix for a module (relative to the current group). The prefix should be prepended to the object name whenever accessing an object from this module. If the module lives is in the current compilation group, there is no prefix. But if it lives in a different group (and hence a separate extension module), we need to access objects from it indirectly via an export table. For example, for code in group `a` to call a function `bar` in group `b`, it would need to do `exports_b.CPyDef_bar(...)`, while code that is also in group `b` can simply do `CPyDef_bar(...)`. Thus the prefix for a module in group `b` is 'exports_b.' if the current group is *not* b and just '' if it is. """ groups = self.context.group_map target_group_name = groups.get(module_name) if target_group_name and target_group_name != self.context.group_name: self.context.group_deps.add(target_group_name) return 'exports_{}.'.format(exported_name(target_group_name)) else: return '' def get_group_prefix(self, obj: Union[ClassIR, FuncDecl]) -> str: """Get the group prefix for an object.""" # See docs above return self.get_module_group_prefix(obj.module_name) def static_name(self, id: str, module: Optional[str], prefix: str = STATIC_PREFIX) -> str: """Create name of a C static variable. These are used for literals and imported modules, among other things. The caller should ensure that the (id, module) pair cannot overlap with other calls to this method within a compilation group. """ lib_prefix = '' if not module else self.get_module_group_prefix(module) # If we are accessing static via the export table, we need to dereference # the pointer also. star_maybe = '*' if lib_prefix else '' suffix = self.names.private_name(module or '', id) return '{}{}{}{}'.format(star_maybe, lib_prefix, prefix, suffix) def type_struct_name(self, cl: ClassIR) -> str: return self.static_name(cl.name, cl.module_name, prefix=TYPE_PREFIX) def ctype(self, rtype: RType) -> str: return rtype._ctype def ctype_spaced(self, rtype: RType) -> str: """Adds a space after ctype for non-pointers.""" ctype = self.ctype(rtype) if ctype[-1] == '*': return ctype else: return ctype + ' ' def c_undefined_value(self, rtype: RType) -> str: if not rtype.is_unboxed: return 'NULL' elif isinstance(rtype, RPrimitive): return rtype.c_undefined elif isinstance(rtype, RTuple): return self.tuple_undefined_value(rtype) assert False, rtype def c_error_value(self, rtype: RType) -> str: return self.c_undefined_value(rtype) def native_function_name(self, fn: FuncDecl) -> str: return '{}{}'.format(NATIVE_PREFIX, fn.cname(self.names)) def tuple_c_declaration(self, rtuple: RTuple) -> List[str]: result = [ '#ifndef MYPYC_DECLARED_{}'.format(rtuple.struct_name), '#define MYPYC_DECLARED_{}'.format(rtuple.struct_name), 'typedef struct {} {{'.format(rtuple.struct_name), ] if len(rtuple.types) == 0: # empty tuple # Empty tuples contain a flag so that they can still indicate # error values. result.append('int empty_struct_error_flag;') else: i = 0 for typ in rtuple.types: result.append('{}f{};'.format(self.ctype_spaced(typ), i)) i += 1 result.append('}} {};'.format(rtuple.struct_name)) values = self.tuple_undefined_value_helper(rtuple) result.append('static {} {} = {{ {} }};'.format( self.ctype(rtuple), self.tuple_undefined_value(rtuple), ''.join(values))) result.append('#endif') result.append('') return result def tuple_undefined_check_cond( self, rtuple: RTuple, tuple_expr_in_c: str, c_type_compare_val: Callable[[RType], str], compare: str) -> str: if len(rtuple.types) == 0: # empty tuple return '{}.empty_struct_error_flag {} {}'.format( tuple_expr_in_c, compare, c_type_compare_val(int_rprimitive)) item_type = rtuple.types[0] if isinstance(item_type, RTuple): return self.tuple_undefined_check_cond( item_type, tuple_expr_in_c + '.f0', c_type_compare_val, compare) else: return '{}.f0 {} {}'.format( tuple_expr_in_c, compare, c_type_compare_val(item_type)) def tuple_undefined_value(self, rtuple: RTuple) -> str: return 'tuple_undefined_' + rtuple.unique_id def tuple_undefined_value_helper(self, rtuple: RTuple) -> List[str]: res = [] # see tuple_c_declaration() if len(rtuple.types) == 0: return [self.c_undefined_value(int_rprimitive)] for item in rtuple.types: if not isinstance(item, RTuple): res.append(self.c_undefined_value(item)) else: sub_list = self.tuple_undefined_value_helper(item) res.append('{ ') res.extend(sub_list) res.append(' }') res.append(', ') return res[:-1] # Higher-level operations def declare_tuple_struct(self, tuple_type: RTuple) -> None: if tuple_type.struct_name not in self.context.declarations: dependencies = set() for typ in tuple_type.types: # XXX other types might eventually need similar behavior if isinstance(typ, RTuple): dependencies.add(typ.struct_name) self.context.declarations[tuple_type.struct_name] = HeaderDeclaration( self.tuple_c_declaration(tuple_type), dependencies=dependencies, is_type=True, ) def emit_inc_ref(self, dest: str, rtype: RType) -> None: """Increment reference count of C expression `dest`. For composite unboxed structures (e.g. tuples) recursively increment reference counts for each component. """ if is_int_rprimitive(rtype): self.emit_line('CPyTagged_IncRef(%s);' % dest) elif isinstance(rtype, RTuple): for i, item_type in enumerate(rtype.types): self.emit_inc_ref('{}.f{}'.format(dest, i), item_type) elif not rtype.is_unboxed: self.emit_line('CPy_INCREF(%s);' % dest) # Otherwise assume it's an unboxed, pointerless value and do nothing. def emit_dec_ref(self, dest: str, rtype: RType, is_xdec: bool = False) -> None: """Decrement reference count of C expression `dest`. For composite unboxed structures (e.g. tuples) recursively decrement reference counts for each component. """ x = 'X' if is_xdec else '' if is_int_rprimitive(rtype): self.emit_line('CPyTagged_%sDecRef(%s);' % (x, dest)) elif isinstance(rtype, RTuple): for i, item_type in enumerate(rtype.types): self.emit_dec_ref('{}.f{}'.format(dest, i), item_type, is_xdec) elif not rtype.is_unboxed: self.emit_line('CPy_%sDecRef(%s);' % (x, dest)) # Otherwise assume it's an unboxed, pointerless value and do nothing. def pretty_name(self, typ: RType) -> str: value_type = optional_value_type(typ) if value_type is not None: return '%s or None' % self.pretty_name(value_type) return str(typ) def emit_cast(self, src: str, dest: str, typ: RType, declare_dest: bool = False, custom_message: Optional[str] = None, optional: bool = False, src_type: Optional[RType] = None, likely: bool = True) -> None: """Emit code for casting a value of given type. Somewhat strangely, this supports unboxed types but only operates on boxed versions. This is necessary to properly handle types such as Optional[int] in compatibility glue. Assign NULL (error value) to dest if the value has an incompatible type. Always copy/steal the reference in src. Args: src: Name of source C variable dest: Name of target C variable typ: Type of value declare_dest: If True, also declare the variable 'dest' likely: If the cast is likely to succeed (can be False for unions) """ if custom_message is not None: err = custom_message else: err = 'CPy_TypeError("{}", {});'.format(self.pretty_name(typ), src) # Special case casting *from* optional if src_type and is_optional_type(src_type) and not is_object_rprimitive(typ): value_type = optional_value_type(src_type) assert value_type is not None if is_same_type(value_type, typ): if declare_dest: self.emit_line('PyObject *{};'.format(dest)) check = '({} != Py_None)' if likely: check = '(likely{})'.format(check) self.emit_arg_check(src, dest, typ, check.format(src), optional) self.emit_lines( ' {} = {};'.format(dest, src), 'else {', err, '{} = NULL;'.format(dest), '}') return # TODO: Verify refcount handling. if (is_list_rprimitive(typ) or is_dict_rprimitive(typ) or is_set_rprimitive(typ) or is_float_rprimitive(typ) or is_str_rprimitive(typ) or is_int_rprimitive(typ) or is_bool_rprimitive(typ)): if declare_dest: self.emit_line('PyObject *{};'.format(dest)) if is_list_rprimitive(typ): prefix = 'PyList' elif is_dict_rprimitive(typ): prefix = 'PyDict' elif is_set_rprimitive(typ): prefix = 'PySet' elif is_float_rprimitive(typ): prefix = 'CPyFloat' elif is_str_rprimitive(typ): prefix = 'PyUnicode' elif is_int_rprimitive(typ): prefix = 'PyLong' elif is_bool_rprimitive(typ): prefix = 'PyBool' else: assert False, 'unexpected primitive type' check = '({}_Check({}))' if likely: check = '(likely{})'.format(check) self.emit_arg_check(src, dest, typ, check.format(prefix, src), optional) self.emit_lines( ' {} = {};'.format(dest, src), 'else {', err, '{} = NULL;'.format(dest), '}') elif is_tuple_rprimitive(typ): if declare_dest: self.emit_line('{} {};'.format(self.ctype(typ), dest)) check = '(PyTuple_Check({}))' if likely: check = '(likely{})'.format(check) self.emit_arg_check(src, dest, typ, check.format(src), optional) self.emit_lines( ' {} = {};'.format(dest, src), 'else {', err, '{} = NULL;'.format(dest), '}') elif isinstance(typ, RInstance): if declare_dest: self.emit_line('PyObject *{};'.format(dest)) concrete = all_concrete_classes(typ.class_ir) # If there are too many concrete subclasses or we can't find any # (meaning the code ought to be dead or we aren't doing global opts), # fall back to a normal typecheck. # Otherwise check all the subclasses. if not concrete or len(concrete) > FAST_ISINSTANCE_MAX_SUBCLASSES + 1: check = '(PyObject_TypeCheck({}, {}))'.format( src, self.type_struct_name(typ.class_ir)) else: full_str = '(Py_TYPE({src}) == {targets[0]})' for i in range(1, len(concrete)): full_str += ' || (Py_TYPE({src}) == {targets[%d]})' % i if len(concrete) > 1: full_str = '(%s)' % full_str check = full_str.format( src=src, targets=[self.type_struct_name(ir) for ir in concrete]) if likely: check = '(likely{})'.format(check) self.emit_arg_check(src, dest, typ, check, optional) self.emit_lines( ' {} = {};'.format(dest, src), 'else {', err, '{} = NULL;'.format(dest), '}') elif is_none_rprimitive(typ): if declare_dest: self.emit_line('PyObject *{};'.format(dest)) check = '({} == Py_None)' if likely: check = '(likely{})'.format(check) self.emit_arg_check(src, dest, typ, check.format(src), optional) self.emit_lines( ' {} = {};'.format(dest, src), 'else {', err, '{} = NULL;'.format(dest), '}') elif is_object_rprimitive(typ): if declare_dest: self.emit_line('PyObject *{};'.format(dest)) self.emit_arg_check(src, dest, typ, '', optional) self.emit_line('{} = {};'.format(dest, src)) if optional: self.emit_line('}') elif isinstance(typ, RUnion): self.emit_union_cast(src, dest, typ, declare_dest, err, optional, src_type) elif isinstance(typ, RTuple): assert not optional self.emit_tuple_cast(src, dest, typ, declare_dest, err, src_type) else: assert False, 'Cast not implemented: %s' % typ def emit_union_cast(self, src: str, dest: str, typ: RUnion, declare_dest: bool, err: str, optional: bool, src_type: Optional[RType]) -> None: """Emit cast to a union type. The arguments are similar to emit_cast. """ if declare_dest: self.emit_line('PyObject *{};'.format(dest)) good_label = self.new_label() if optional: self.emit_line('if ({} == NULL) {{'.format(src)) self.emit_line('{} = {};'.format(dest, self.c_error_value(typ))) self.emit_line('goto {};'.format(good_label)) self.emit_line('}') for item in typ.items: self.emit_cast(src, dest, item, declare_dest=False, custom_message='', optional=False, likely=False) self.emit_line('if ({} != NULL) goto {};'.format(dest, good_label)) # Handle cast failure. self.emit_line(err) self.emit_label(good_label) def emit_tuple_cast(self, src: str, dest: str, typ: RTuple, declare_dest: bool, err: str, src_type: Optional[RType]) -> None: """Emit cast to a tuple type. The arguments are similar to emit_cast. """ if declare_dest: self.emit_line('PyObject *{};'.format(dest)) # This reuse of the variable is super dodgy. We don't even # care about the values except to check whether they are # invalid. out_label = self.new_label() self.emit_lines( 'if (unlikely(!(PyTuple_Check({r}) && PyTuple_GET_SIZE({r}) == {size}))) {{'.format( r=src, size=len(typ.types)), '{} = NULL;'.format(dest), 'goto {};'.format(out_label), '}') for i, item in enumerate(typ.types): # Since we did the checks above this should never fail self.emit_cast('PyTuple_GET_ITEM({}, {})'.format(src, i), dest, item, declare_dest=False, custom_message='', optional=False) self.emit_line('if ({} == NULL) goto {};'.format(dest, out_label)) self.emit_line('{} = {};'.format(dest, src)) self.emit_label(out_label) def emit_arg_check(self, src: str, dest: str, typ: RType, check: str, optional: bool) -> None: if optional: self.emit_line('if ({} == NULL) {{'.format(src)) self.emit_line('{} = {};'.format(dest, self.c_error_value(typ))) if check != '': self.emit_line('{}if {}'.format('} else ' if optional else '', check)) elif optional: self.emit_line('else {') def emit_unbox(self, src: str, dest: str, typ: RType, custom_failure: Optional[str] = None, declare_dest: bool = False, borrow: bool = False, optional: bool = False) -> None: """Emit code for unboxing a value of given type (from PyObject *). Evaluate C code in 'failure' if the value has an incompatible type. Always generate a new reference. Args: src: Name of source C variable dest: Name of target C variable typ: Type of value failure: What happens on error declare_dest: If True, also declare the variable 'dest' borrow: If True, create a borrowed reference """ # TODO: Verify refcount handling. raise_exc = 'CPy_TypeError("{}", {});'.format(self.pretty_name(typ), src) if custom_failure is not None: failure = [raise_exc, custom_failure] else: failure = [raise_exc, '%s = %s;' % (dest, self.c_error_value(typ))] if is_int_rprimitive(typ) or is_short_int_rprimitive(typ): if declare_dest: self.emit_line('CPyTagged {};'.format(dest)) self.emit_arg_check(src, dest, typ, '(likely(PyLong_Check({})))'.format(src), optional) if borrow: self.emit_line(' {} = CPyTagged_BorrowFromObject({});'.format(dest, src)) else: self.emit_line(' {} = CPyTagged_FromObject({});'.format(dest, src)) self.emit_line('else {') self.emit_lines(*failure) self.emit_line('}') elif is_bool_rprimitive(typ): # Whether we are borrowing or not makes no difference. if declare_dest: self.emit_line('char {};'.format(dest)) self.emit_arg_check(src, dest, typ, '(unlikely(!PyBool_Check({}))) {{'.format(src), optional) self.emit_lines(*failure) self.emit_line('} else') conversion = '{} == Py_True'.format(src) self.emit_line(' {} = {};'.format(dest, conversion)) elif is_none_rprimitive(typ): # Whether we are borrowing or not makes no difference. if declare_dest: self.emit_line('char {};'.format(dest)) self.emit_arg_check(src, dest, typ, '(unlikely({} != Py_None)) {{'.format(src), optional) self.emit_lines(*failure) self.emit_line('} else') self.emit_line(' {} = 1;'.format(dest)) elif isinstance(typ, RTuple): self.declare_tuple_struct(typ) if declare_dest: self.emit_line('{} {};'.format(self.ctype(typ), dest)) # HACK: The error handling for unboxing tuples is busted # and instead of fixing it I am just wrapping it in the # cast code which I think is right. This is not good. if optional: self.emit_line('if ({} == NULL) {{'.format(src)) self.emit_line('{} = {};'.format(dest, self.c_error_value(typ))) self.emit_line('} else {') cast_temp = self.temp_name() self.emit_tuple_cast(src, cast_temp, typ, declare_dest=True, err='', src_type=None) self.emit_line('if (unlikely({} == NULL)) {{'.format(cast_temp)) # self.emit_arg_check(src, dest, typ, # '(!PyTuple_Check({}) || PyTuple_Size({}) != {}) {{'.format( # src, src, len(typ.types)), optional) self.emit_lines(*failure) # TODO: Decrease refcount? self.emit_line('} else {') if not typ.types: self.emit_line('{}.empty_struct_error_flag = 0;'.format(dest)) for i, item_type in enumerate(typ.types): temp = self.temp_name() # emit_tuple_cast above checks the size, so this should not fail self.emit_line('PyObject *{} = PyTuple_GET_ITEM({}, {});'.format(temp, src, i)) temp2 = self.temp_name() # Unbox or check the item. if item_type.is_unboxed: self.emit_unbox(temp, temp2, item_type, custom_failure, declare_dest=True, borrow=borrow) else: if not borrow: self.emit_inc_ref(temp, object_rprimitive) self.emit_cast(temp, temp2, item_type, declare_dest=True) self.emit_line('{}.f{} = {};'.format(dest, i, temp2)) self.emit_line('}') if optional: self.emit_line('}') else: assert False, 'Unboxing not implemented: %s' % typ def emit_box(self, src: str, dest: str, typ: RType, declare_dest: bool = False, can_borrow: bool = False) -> None: """Emit code for boxing a value of given type. Generate a simple assignment if no boxing is needed. The source reference count is stolen for the result (no need to decref afterwards). """ # TODO: Always generate a new reference (if a reference type) if declare_dest: declaration = 'PyObject *' else: declaration = '' if is_int_rprimitive(typ) or is_short_int_rprimitive(typ): # Steal the existing reference if it exists. self.emit_line('{}{} = CPyTagged_StealAsObject({});'.format(declaration, dest, src)) elif is_bool_rprimitive(typ): # N.B: bool is special cased to produce a borrowed value # after boxing, so we don't need to increment the refcount # when this comes directly from a Box op. self.emit_lines('{}{} = {} ? Py_True : Py_False;'.format(declaration, dest, src)) if not can_borrow: self.emit_inc_ref(dest, object_rprimitive) elif is_none_rprimitive(typ): # N.B: None is special cased to produce a borrowed value # after boxing, so we don't need to increment the refcount # when this comes directly from a Box op. self.emit_lines('{}{} = Py_None;'.format(declaration, dest)) if not can_borrow: self.emit_inc_ref(dest, object_rprimitive) elif isinstance(typ, RTuple): self.declare_tuple_struct(typ) self.emit_line('{}{} = PyTuple_New({});'.format(declaration, dest, len(typ.types))) self.emit_line('if (unlikely({} == NULL))'.format(dest)) self.emit_line(' CPyError_OutOfMemory();') # TODO: Fail if dest is None for i in range(0, len(typ.types)): if not typ.is_unboxed: self.emit_line('PyTuple_SET_ITEM({}, {}, {}.f{}'.format(dest, i, src, i)) else: inner_name = self.temp_name() self.emit_box('{}.f{}'.format(src, i), inner_name, typ.types[i], declare_dest=True) self.emit_line('PyTuple_SET_ITEM({}, {}, {});'.format(dest, i, inner_name)) else: assert not typ.is_unboxed # Type is boxed -- trivially just assign. self.emit_line('{}{} = {};'.format(declaration, dest, src)) def emit_error_check(self, value: str, rtype: RType, failure: str) -> None: """Emit code for checking a native function return value for uncaught exception.""" if not isinstance(rtype, RTuple): self.emit_line('if ({} == {}) {{'.format(value, self.c_error_value(rtype))) else: if len(rtype.types) == 0: return # empty tuples can't fail. else: cond = self.tuple_undefined_check_cond(rtype, value, self.c_error_value, '==') self.emit_line('if ({}) {{'.format(cond)) self.emit_lines(failure, '}') def emit_gc_visit(self, target: str, rtype: RType) -> None: """Emit code for GC visiting a C variable reference. Assume that 'target' represents a C expression that refers to a struct member, such as 'self->x'. """ if not rtype.is_refcounted: # Not refcounted -> no pointers -> no GC interaction. return elif isinstance(rtype, RPrimitive) and rtype.name == 'builtins.int': self.emit_line('if (CPyTagged_CheckLong({})) {{'.format(target)) self.emit_line('Py_VISIT(CPyTagged_LongAsObject({}));'.format(target)) self.emit_line('}') elif isinstance(rtype, RTuple): for i, item_type in enumerate(rtype.types): self.emit_gc_visit('{}.f{}'.format(target, i), item_type) elif self.ctype(rtype) == 'PyObject *': # The simplest case. self.emit_line('Py_VISIT({});'.format(target)) else: assert False, 'emit_gc_visit() not implemented for %s' % repr(rtype) def emit_gc_clear(self, target: str, rtype: RType) -> None: """Emit code for clearing a C attribute reference for GC. Assume that 'target' represents a C expression that refers to a struct member, such as 'self->x'. """ if not rtype.is_refcounted: # Not refcounted -> no pointers -> no GC interaction. return elif isinstance(rtype, RPrimitive) and rtype.name == 'builtins.int': self.emit_line('if (CPyTagged_CheckLong({})) {{'.format(target)) self.emit_line('CPyTagged __tmp = {};'.format(target)) self.emit_line('{} = {};'.format(target, self.c_undefined_value(rtype))) self.emit_line('Py_XDECREF(CPyTagged_LongAsObject(__tmp));') self.emit_line('}') elif isinstance(rtype, RTuple): for i, item_type in enumerate(rtype.types): self.emit_gc_clear('{}.f{}'.format(target, i), item_type) elif self.ctype(rtype) == 'PyObject *' and self.c_undefined_value(rtype) == 'NULL': # The simplest case. self.emit_line('Py_CLEAR({});'.format(target)) else: assert False, 'emit_gc_clear() not implemented for %s' % repr(rtype) mypy-0.761/mypyc/emitclass.py0000644€tŠÔÚ€2›s®0000010507613576752246022430 0ustar jukkaDROPBOX\Domain Users00000000000000"""Code generation for native classes and related wrappers.""" from typing import Optional, List, Tuple, Dict, Callable, Mapping, Set from collections import OrderedDict from mypyc.common import PREFIX, NATIVE_PREFIX, REG_PREFIX from mypyc.emit import Emitter, HeaderDeclaration from mypyc.emitfunc import native_function_header, native_getter_name, native_setter_name from mypyc.emitwrapper import ( generate_dunder_wrapper, generate_hash_wrapper, generate_richcompare_wrapper, generate_bool_wrapper, generate_get_wrapper, ) from mypyc.ops import ( ClassIR, FuncIR, FuncDecl, RType, RTuple, object_rprimitive, VTableMethod, VTableEntries, FUNC_STATICMETHOD, FUNC_CLASSMETHOD, ) from mypyc.sametype import is_same_type from mypyc.namegen import NameGenerator def native_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: return '{}{}'.format(NATIVE_PREFIX, fn.cname(emitter.names)) def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: return '{}{}'.format(PREFIX, fn.cname(emitter.names)) # We maintain a table from dunder function names to struct slots they # correspond to and functions that generate a wrapper (if necessary) # and return the function name to stick in the slot. # TODO: Add remaining dunder methods SlotGenerator = Callable[[ClassIR, FuncIR, Emitter], str] SlotTable = Mapping[str, Tuple[str, SlotGenerator]] SLOT_DEFS = { '__init__': ('tp_init', lambda c, t, e: generate_init_for_class(c, t, e)), '__call__': ('tp_call', wrapper_slot), '__str__': ('tp_str', native_slot), '__repr__': ('tp_repr', native_slot), '__next__': ('tp_iternext', native_slot), '__iter__': ('tp_iter', native_slot), '__hash__': ('tp_hash', generate_hash_wrapper), '__get__': ('tp_descr_get', generate_get_wrapper), } # type: SlotTable AS_MAPPING_SLOT_DEFS = { '__getitem__': ('mp_subscript', generate_dunder_wrapper), } # type: SlotTable AS_NUMBER_SLOT_DEFS = { '__bool__': ('nb_bool', generate_bool_wrapper), } # type: SlotTable AS_ASYNC_SLOT_DEFS = { '__await__': ('am_await', native_slot), '__aiter__': ('am_aiter', native_slot), '__anext__': ('am_anext', native_slot), } # type: SlotTable SIDE_TABLES = [ ('as_mapping', 'PyMappingMethods', AS_MAPPING_SLOT_DEFS), ('as_number', 'PyNumberMethods', AS_NUMBER_SLOT_DEFS), ('as_async', 'PyAsyncMethods', AS_ASYNC_SLOT_DEFS), ] # Slots that need to always be filled in because they don't get # inherited right. ALWAYS_FILL = { '__hash__', } def generate_slots(cl: ClassIR, table: SlotTable, emitter: Emitter) -> Dict[str, str]: fields = OrderedDict() # type: Dict[str, str] # Sort for determinism on Python 3.5 for name, (slot, generator) in sorted(table.items()): method_cls = cl.get_method_and_class(name) if method_cls and (method_cls[1] == cl or name in ALWAYS_FILL): fields[slot] = generator(cl, method_cls[0], emitter) return fields def generate_class_type_decl(cl: ClassIR, c_emitter: Emitter, external_emitter: Emitter, emitter: Emitter) -> None: context = c_emitter.context name = emitter.type_struct_name(cl) context.declarations[name] = HeaderDeclaration( 'PyTypeObject *{};'.format(emitter.type_struct_name(cl)), needs_export=True) # If this is a non-extension class, all we want is the type object decl. if not cl.is_ext_class: return generate_object_struct(cl, external_emitter) generate_full = not cl.is_trait and not cl.builtin_base if generate_full: declare_native_getters_and_setters(cl, emitter) context.declarations[emitter.native_function_name(cl.ctor)] = HeaderDeclaration( '{};'.format(native_function_header(cl.ctor, emitter)), needs_export=True, ) def generate_class(cl: ClassIR, module: str, emitter: Emitter) -> None: """Generate C code for a class. This is the main entry point to the module. """ name = cl.name name_prefix = cl.name_prefix(emitter.names) setup_name = '{}_setup'.format(name_prefix) new_name = '{}_new'.format(name_prefix) members_name = '{}_members'.format(name_prefix) getseters_name = '{}_getseters'.format(name_prefix) vtable_name = '{}_vtable'.format(name_prefix) traverse_name = '{}_traverse'.format(name_prefix) clear_name = '{}_clear'.format(name_prefix) dealloc_name = '{}_dealloc'.format(name_prefix) methods_name = '{}_methods'.format(name_prefix) vtable_setup_name = '{}_trait_vtable_setup'.format(name_prefix) fields = OrderedDict() # type: Dict[str, str] fields['tp_name'] = '"{}"'.format(name) generate_full = not cl.is_trait and not cl.builtin_base needs_getseters = not cl.is_generated if generate_full: fields['tp_new'] = new_name fields['tp_dealloc'] = '(destructor){}_dealloc'.format(name_prefix) fields['tp_traverse'] = '(traverseproc){}_traverse'.format(name_prefix) fields['tp_clear'] = '(inquiry){}_clear'.format(name_prefix) if needs_getseters: fields['tp_getset'] = getseters_name fields['tp_methods'] = methods_name def emit_line() -> None: emitter.emit_line() emit_line() # If the class has a method to initialize default attribute # values, we need to call it during initialization. defaults_fn = cl.get_method('__mypyc_defaults_setup') # If there is a __init__ method, we'll use it in the native constructor. init_fn = cl.get_method('__init__') # Fill out slots in the type object from dunder methods. fields.update(generate_slots(cl, SLOT_DEFS, emitter)) # Fill out dunder methods that live in tables hanging off the side. for table_name, type, slot_defs in SIDE_TABLES: slots = generate_slots(cl, slot_defs, emitter) if slots: table_struct_name = generate_side_table_for_class(cl, table_name, type, slots, emitter) fields['tp_{}'.format(table_name)] = '&{}'.format(table_struct_name) richcompare_name = generate_richcompare_wrapper(cl, emitter) if richcompare_name: fields['tp_richcompare'] = richcompare_name # If the class inherits from python, make space for a __dict__ struct_name = cl.struct_name(emitter.names) if cl.builtin_base: base_size = 'sizeof({})'.format(cl.builtin_base) elif cl.is_trait: base_size = 'sizeof(PyObject)' else: base_size = 'sizeof({})'.format(struct_name) # Since our types aren't allocated using type() we need to # populate these fields ourselves if we want them to have correct # values. PyType_Ready will inherit the offsets from tp_base but # that isn't what we want. # XXX: there is no reason for the __weakref__ stuff to be mixed up with __dict__ if cl.has_dict: # __dict__ lives right after the struct and __weakref__ lives right after that # TODO: They should get members in the struct instead of doing this nonsense. weak_offset = '{} + sizeof(PyObject *)'.format(base_size) emitter.emit_lines( 'PyMemberDef {}[] = {{'.format(members_name), '{{"__dict__", T_OBJECT_EX, {}, 0, NULL}},'.format(base_size), '{{"__weakref__", T_OBJECT_EX, {}, 0, NULL}},'.format(weak_offset), '{0}', '};', ) fields['tp_members'] = members_name fields['tp_basicsize'] = '{} + 2*sizeof(PyObject *)'.format(base_size) fields['tp_dictoffset'] = base_size fields['tp_weaklistoffset'] = weak_offset else: fields['tp_basicsize'] = base_size if generate_full: # Declare setup method that allocates and initializes an object. type is the # type of the class being initialized, which could be another class if there # is an interpreted subclass. emitter.emit_line('static PyObject *{}(PyTypeObject *type);'.format(setup_name)) assert cl.ctor is not None emitter.emit_line(native_function_header(cl.ctor, emitter) + ';') emit_line() generate_new_for_class(cl, new_name, vtable_name, setup_name, emitter) emit_line() generate_traverse_for_class(cl, traverse_name, emitter) emit_line() generate_clear_for_class(cl, clear_name, emitter) emit_line() generate_dealloc_for_class(cl, dealloc_name, clear_name, emitter) emit_line() generate_native_getters_and_setters(cl, emitter) if cl.allow_interpreted_subclasses: shadow_vtable_name = generate_vtables( cl, vtable_setup_name + "_shadow", vtable_name + "_shadow", emitter, shadow=True ) # type: Optional[str] emit_line() else: shadow_vtable_name = None vtable_name = generate_vtables(cl, vtable_setup_name, vtable_name, emitter, shadow=False) emit_line() if needs_getseters: generate_getseter_declarations(cl, emitter) emit_line() generate_getseters_table(cl, getseters_name, emitter) emit_line() generate_methods_table(cl, methods_name, emitter) emit_line() flags = ['Py_TPFLAGS_DEFAULT', 'Py_TPFLAGS_HEAPTYPE', 'Py_TPFLAGS_BASETYPE'] if generate_full: flags.append('Py_TPFLAGS_HAVE_GC') fields['tp_flags'] = ' | '.join(flags) emitter.emit_line("static PyTypeObject {}_template_ = {{".format(emitter.type_struct_name(cl))) emitter.emit_line("PyVarObject_HEAD_INIT(NULL, 0)") for field, value in fields.items(): emitter.emit_line(".{} = {},".format(field, value)) emitter.emit_line("};") emitter.emit_line("static PyTypeObject *{t}_template = &{t}_template_;".format( t=emitter.type_struct_name(cl))) emitter.emit_line() if generate_full: generate_setup_for_class( cl, setup_name, defaults_fn, vtable_name, shadow_vtable_name, emitter) emitter.emit_line() generate_constructor_for_class( cl, cl.ctor, init_fn, setup_name, vtable_name, emitter) emitter.emit_line() if needs_getseters: generate_getseters(cl, emitter) def getter_name(cl: ClassIR, attribute: str, names: NameGenerator) -> str: return names.private_name(cl.module_name, '{}_get{}'.format(cl.name, attribute)) def setter_name(cl: ClassIR, attribute: str, names: NameGenerator) -> str: return names.private_name(cl.module_name, '{}_set{}'.format(cl.name, attribute)) def generate_object_struct(cl: ClassIR, emitter: Emitter) -> None: seen_attrs = set() # type: Set[Tuple[str, RType]] lines = [] # type: List[str] lines += ['typedef struct {', 'PyObject_HEAD', 'CPyVTableItem *vtable;'] for base in reversed(cl.base_mro): if not base.is_trait: for attr, rtype in base.attributes.items(): if (attr, rtype) not in seen_attrs: lines.append('{}{};'.format(emitter.ctype_spaced(rtype), emitter.attr(attr))) seen_attrs.add((attr, rtype)) if isinstance(rtype, RTuple): emitter.declare_tuple_struct(rtype) lines.append('}} {};'.format(cl.struct_name(emitter.names))) lines.append('') emitter.context.declarations[cl.struct_name(emitter.names)] = HeaderDeclaration( lines, is_type=True ) def declare_native_getters_and_setters(cl: ClassIR, emitter: Emitter) -> None: decls = emitter.context.declarations for attr, rtype in cl.attributes.items(): getter_name = native_getter_name(cl, attr, emitter.names) setter_name = native_setter_name(cl, attr, emitter.names) decls[getter_name] = HeaderDeclaration( '{}{}({} *self);'.format(emitter.ctype_spaced(rtype), getter_name, cl.struct_name(emitter.names)), needs_export=True, ) decls[setter_name] = HeaderDeclaration( 'bool {}({} *self, {}value);'.format(native_setter_name(cl, attr, emitter.names), cl.struct_name(emitter.names), emitter.ctype_spaced(rtype)), needs_export=True, ) def generate_native_getters_and_setters(cl: ClassIR, emitter: Emitter) -> None: for attr, rtype in cl.attributes.items(): attr_field = emitter.attr(attr) # Native getter emitter.emit_line('{}{}({} *self)'.format(emitter.ctype_spaced(rtype), native_getter_name(cl, attr, emitter.names), cl.struct_name(emitter.names))) emitter.emit_line('{') if rtype.is_refcounted: emit_undefined_check(rtype, emitter, attr_field, '==') emitter.emit_lines( 'PyErr_SetString(PyExc_AttributeError, "attribute {} of {} undefined");'.format( repr(attr), repr(cl.name)), '} else {') emitter.emit_inc_ref('self->{}'.format(attr_field), rtype) emitter.emit_line('}') emitter.emit_line('return self->{};'.format(attr_field)) emitter.emit_line('}') emitter.emit_line() # Native setter emitter.emit_line( 'bool {}({} *self, {}value)'.format(native_setter_name(cl, attr, emitter.names), cl.struct_name(emitter.names), emitter.ctype_spaced(rtype))) emitter.emit_line('{') if rtype.is_refcounted: emit_undefined_check(rtype, emitter, attr_field, '!=') emitter.emit_dec_ref('self->{}'.format(attr_field), rtype) emitter.emit_line('}') # This steal the reference to src, so we don't need to increment the arg emitter.emit_lines('self->{} = value;'.format(attr_field), 'return 1;', '}') emitter.emit_line() def generate_vtables(base: ClassIR, vtable_setup_name: str, vtable_name: str, emitter: Emitter, shadow: bool) -> str: """Emit the vtables and vtable setup functions for a class. This includes both the primary vtable and any trait implementation vtables. To account for both dynamic loading and dynamic class creation, vtables are populated dynamically at class creation time, so we emit empty array definitions to store the vtables and a function to populate them. If shadow is True, generate "shadow vtables" that point to the shadow glue methods (which should dispatch via the Python C-API). Returns the expression to use to refer to the vtable, which might be different than the name, if there are trait vtables. """ def trait_vtable_name(trait: ClassIR) -> str: return '{}_{}_trait_vtable{}'.format( base.name_prefix(emitter.names), trait.name_prefix(emitter.names), '_shadow' if shadow else '') # Emit array definitions with enough space for all the entries emitter.emit_line('static CPyVTableItem {}[{}];'.format( vtable_name, max(1, len(base.vtable_entries) + 2 * len(base.trait_vtables)))) for trait, vtable in base.trait_vtables.items(): emitter.emit_line('static CPyVTableItem {}[{}];'.format( trait_vtable_name(trait), max(1, len(vtable)))) # Emit vtable setup function emitter.emit_line('static bool') emitter.emit_line('{}{}(void)'.format(NATIVE_PREFIX, vtable_setup_name)) emitter.emit_line('{') if base.allow_interpreted_subclasses and not shadow: emitter.emit_line('{}{}_shadow();'.format(NATIVE_PREFIX, vtable_setup_name)) subtables = [] for trait, vtable in base.trait_vtables.items(): name = trait_vtable_name(trait) generate_vtable(vtable, name, emitter, [], shadow) subtables.append((trait, name)) generate_vtable(base.vtable_entries, vtable_name, emitter, subtables, shadow) emitter.emit_line('return 1;') emitter.emit_line('}') return vtable_name if not subtables else "{} + {}".format(vtable_name, len(subtables) * 2) def generate_vtable(entries: VTableEntries, vtable_name: str, emitter: Emitter, subtables: List[Tuple[ClassIR, str]], shadow: bool) -> None: emitter.emit_line('CPyVTableItem {}_scratch[] = {{'.format(vtable_name)) if subtables: emitter.emit_line('/* Array of trait vtables */') for trait, table in subtables: emitter.emit_line('(CPyVTableItem){}, (CPyVTableItem){},'.format( emitter.type_struct_name(trait), table)) emitter.emit_line('/* Start of real vtable */') for entry in entries: if isinstance(entry, VTableMethod): method = entry.shadow_method if shadow and entry.shadow_method else entry.method emitter.emit_line('(CPyVTableItem){}{}{},'.format( emitter.get_group_prefix(entry.method.decl), NATIVE_PREFIX, method.cname(emitter.names))) else: cl, attr, is_setter = entry namer = native_setter_name if is_setter else native_getter_name emitter.emit_line('(CPyVTableItem){}{},'.format( emitter.get_group_prefix(cl), namer(cl, attr, emitter.names))) # msvc doesn't allow empty arrays; maybe allowing them at all is an extension? if not entries: emitter.emit_line('NULL') emitter.emit_line('};') emitter.emit_line('memcpy({name}, {name}_scratch, sizeof({name}));'.format(name=vtable_name)) def generate_setup_for_class(cl: ClassIR, func_name: str, defaults_fn: Optional[FuncIR], vtable_name: str, shadow_vtable_name: Optional[str], emitter: Emitter) -> None: """Generate a native function that allocates an instance of a class.""" emitter.emit_line('static PyObject *') emitter.emit_line('{}(PyTypeObject *type)'.format(func_name)) emitter.emit_line('{') emitter.emit_line('{} *self;'.format(cl.struct_name(emitter.names))) emitter.emit_line('self = ({struct} *)type->tp_alloc(type, 0);'.format( struct=cl.struct_name(emitter.names))) emitter.emit_line('if (self == NULL)') emitter.emit_line(' return NULL;') if shadow_vtable_name: emitter.emit_line('if (type != {}) {{'.format(emitter.type_struct_name(cl))) emitter.emit_line('self->vtable = {};'.format(shadow_vtable_name)) emitter.emit_line('} else {') emitter.emit_line('self->vtable = {};'.format(vtable_name)) emitter.emit_line('}') else: emitter.emit_line('self->vtable = {};'.format(vtable_name)) for base in reversed(cl.base_mro): for attr, rtype in base.attributes.items(): emitter.emit_line('self->{} = {};'.format( emitter.attr(attr), emitter.c_undefined_value(rtype))) # Initialize attributes to default values, if necessary if defaults_fn is not None: emitter.emit_lines( 'if ({}{}((PyObject *)self) == 0) {{'.format( NATIVE_PREFIX, defaults_fn.cname(emitter.names)), 'Py_DECREF(self);', 'return NULL;', '}') emitter.emit_line('return (PyObject *)self;') emitter.emit_line('}') def generate_constructor_for_class(cl: ClassIR, fn: FuncDecl, init_fn: Optional[FuncIR], setup_name: str, vtable_name: str, emitter: Emitter) -> None: """Generate a native function that allocates and initializes an instance of a class.""" emitter.emit_line('{}'.format(native_function_header(fn, emitter))) emitter.emit_line('{') emitter.emit_line('PyObject *self = {}({});'.format(setup_name, emitter.type_struct_name(cl))) emitter.emit_line('if (self == NULL)') emitter.emit_line(' return NULL;') args = ', '.join(['self'] + [REG_PREFIX + arg.name for arg in fn.sig.args]) if init_fn is not None: emitter.emit_line('char res = {}{}{}({});'.format( emitter.get_group_prefix(init_fn.decl), NATIVE_PREFIX, init_fn.cname(emitter.names), args)) emitter.emit_line('if (res == 2) {') emitter.emit_line('Py_DECREF(self);') emitter.emit_line('return NULL;') emitter.emit_line('}') # If there is a nontrivial ctor that we didn't define, invoke it via tp_init elif len(fn.sig.args) > 1: emitter.emit_line( 'int res = {}->tp_init({});'.format( emitter.type_struct_name(cl), args)) emitter.emit_line('if (res < 0) {') emitter.emit_line('Py_DECREF(self);') emitter.emit_line('return NULL;') emitter.emit_line('}') emitter.emit_line('return self;') emitter.emit_line('}') def generate_init_for_class(cl: ClassIR, init_fn: FuncIR, emitter: Emitter) -> str: """Generate an init function suitable for use as tp_init. tp_init needs to be a function that returns an int, and our __init__ methods return a PyObject. Translate NULL to -1, everything else to 0. """ func_name = '{}_init'.format(cl.name_prefix(emitter.names)) emitter.emit_line('static int') emitter.emit_line( '{}(PyObject *self, PyObject *args, PyObject *kwds)'.format(func_name)) emitter.emit_line('{') emitter.emit_line('return {}{}(self, args, kwds) != NULL ? 0 : -1;'.format( PREFIX, init_fn.cname(emitter.names))) emitter.emit_line('}') return func_name def generate_new_for_class(cl: ClassIR, func_name: str, vtable_name: str, setup_name: str, emitter: Emitter) -> None: emitter.emit_line('static PyObject *') emitter.emit_line( '{}(PyTypeObject *type, PyObject *args, PyObject *kwds)'.format(func_name)) emitter.emit_line('{') # TODO: Check and unbox arguments if not cl.allow_interpreted_subclasses: emitter.emit_line('if (type != {}) {{'.format(emitter.type_struct_name(cl))) emitter.emit_line( 'PyErr_SetString(PyExc_TypeError, "interpreted classes cannot inherit from compiled");' ) emitter.emit_line('return NULL;') emitter.emit_line('}') emitter.emit_line('return {}(type);'.format(setup_name)) emitter.emit_line('}') def generate_traverse_for_class(cl: ClassIR, func_name: str, emitter: Emitter) -> None: """Emit function that performs cycle GC traversal of an instance.""" emitter.emit_line('static int') emitter.emit_line('{}({} *self, visitproc visit, void *arg)'.format( func_name, cl.struct_name(emitter.names))) emitter.emit_line('{') for base in reversed(cl.base_mro): for attr, rtype in base.attributes.items(): emitter.emit_gc_visit('self->{}'.format(emitter.attr(attr)), rtype) if cl.has_dict: struct_name = cl.struct_name(emitter.names) # __dict__ lives right after the struct and __weakref__ lives right after that emitter.emit_gc_visit('*((PyObject **)((char *)self + sizeof({})))'.format( struct_name), object_rprimitive) emitter.emit_gc_visit( '*((PyObject **)((char *)self + sizeof(PyObject *) + sizeof({})))'.format( struct_name), object_rprimitive) emitter.emit_line('return 0;') emitter.emit_line('}') def generate_clear_for_class(cl: ClassIR, func_name: str, emitter: Emitter) -> None: emitter.emit_line('static int') emitter.emit_line('{}({} *self)'.format(func_name, cl.struct_name(emitter.names))) emitter.emit_line('{') for base in reversed(cl.base_mro): for attr, rtype in base.attributes.items(): emitter.emit_gc_clear('self->{}'.format(emitter.attr(attr)), rtype) if cl.has_dict: struct_name = cl.struct_name(emitter.names) # __dict__ lives right after the struct and __weakref__ lives right after that emitter.emit_gc_clear('*((PyObject **)((char *)self + sizeof({})))'.format( struct_name), object_rprimitive) emitter.emit_gc_clear( '*((PyObject **)((char *)self + sizeof(PyObject *) + sizeof({})))'.format( struct_name), object_rprimitive) emitter.emit_line('return 0;') emitter.emit_line('}') def generate_dealloc_for_class(cl: ClassIR, dealloc_func_name: str, clear_func_name: str, emitter: Emitter) -> None: emitter.emit_line('static void') emitter.emit_line('{}({} *self)'.format(dealloc_func_name, cl.struct_name(emitter.names))) emitter.emit_line('{') emitter.emit_line('PyObject_GC_UnTrack(self);') emitter.emit_line('{}(self);'.format(clear_func_name)) emitter.emit_line('Py_TYPE(self)->tp_free((PyObject *)self);') emitter.emit_line('}') def generate_methods_table(cl: ClassIR, name: str, emitter: Emitter) -> None: emitter.emit_line('static PyMethodDef {}[] = {{'.format(name)) for fn in cl.methods.values(): if fn.decl.is_prop_setter or fn.decl.is_prop_getter: continue emitter.emit_line('{{"{}",'.format(fn.name)) emitter.emit_line(' (PyCFunction){}{},'.format(PREFIX, fn.cname(emitter.names))) flags = ['METH_VARARGS', 'METH_KEYWORDS'] if fn.decl.kind == FUNC_STATICMETHOD: flags.append('METH_STATIC') elif fn.decl.kind == FUNC_CLASSMETHOD: flags.append('METH_CLASS') emitter.emit_line(' {}, NULL}},'.format(' | '.join(flags))) # Provide a default __getstate__ and __setstate__ if not cl.has_method('__setstate__') and not cl.has_method('__getstate__'): emitter.emit_lines( '{"__setstate__", (PyCFunction)CPyPickle_SetState, METH_O, NULL},', '{"__getstate__", (PyCFunction)CPyPickle_GetState, METH_NOARGS, NULL},', ) emitter.emit_line('{NULL} /* Sentinel */') emitter.emit_line('};') def generate_side_table_for_class(cl: ClassIR, name: str, type: str, slots: Dict[str, str], emitter: Emitter) -> Optional[str]: name = '{}_{}'.format(cl.name_prefix(emitter.names), name) emitter.emit_line('static {} {} = {{'.format(type, name)) for field, value in slots.items(): emitter.emit_line(".{} = {},".format(field, value)) emitter.emit_line("};") return name def generate_getseter_declarations(cl: ClassIR, emitter: Emitter) -> None: if not cl.is_trait: for attr in cl.attributes: emitter.emit_line('static PyObject *') emitter.emit_line('{}({} *self, void *closure);'.format( getter_name(cl, attr, emitter.names), cl.struct_name(emitter.names))) emitter.emit_line('static int') emitter.emit_line('{}({} *self, PyObject *value, void *closure);'.format( setter_name(cl, attr, emitter.names), cl.struct_name(emitter.names))) for prop in cl.properties: # Generate getter declaration emitter.emit_line('static PyObject *') emitter.emit_line('{}({} *self, void *closure);'.format( getter_name(cl, prop, emitter.names), cl.struct_name(emitter.names))) # Generate property setter declaration if a setter exists if cl.properties[prop][1]: emitter.emit_line('static int') emitter.emit_line('{}({} *self, PyObject *value, void *closure);'.format( setter_name(cl, prop, emitter.names), cl.struct_name(emitter.names))) def generate_getseters_table(cl: ClassIR, name: str, emitter: Emitter) -> None: emitter.emit_line('static PyGetSetDef {}[] = {{'.format(name)) if not cl.is_trait: for attr in cl.attributes: emitter.emit_line('{{"{}",'.format(attr)) emitter.emit_line(' (getter){}, (setter){},'.format( getter_name(cl, attr, emitter.names), setter_name(cl, attr, emitter.names))) emitter.emit_line(' NULL, NULL},') for prop in cl.properties: emitter.emit_line('{{"{}",'.format(prop)) emitter.emit_line(' (getter){},'.format(getter_name(cl, prop, emitter.names))) setter = cl.properties[prop][1] if setter: emitter.emit_line(' (setter){},'.format(setter_name(cl, prop, emitter.names))) emitter.emit_line('NULL, NULL},') else: emitter.emit_line('NULL, NULL, NULL},') emitter.emit_line('{NULL} /* Sentinel */') emitter.emit_line('};') def generate_getseters(cl: ClassIR, emitter: Emitter) -> None: if not cl.is_trait: for i, (attr, rtype) in enumerate(cl.attributes.items()): generate_getter(cl, attr, rtype, emitter) emitter.emit_line('') generate_setter(cl, attr, rtype, emitter) if i < len(cl.attributes) - 1: emitter.emit_line('') for prop, (getter, setter) in cl.properties.items(): rtype = getter.sig.ret_type emitter.emit_line('') generate_readonly_getter(cl, prop, rtype, getter, emitter) if setter: arg_type = setter.sig.args[1].type emitter.emit_line('') generate_property_setter(cl, prop, arg_type, setter, emitter) def generate_getter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> None: attr_field = emitter.attr(attr) emitter.emit_line('static PyObject *') emitter.emit_line('{}({} *self, void *closure)'.format(getter_name(cl, attr, emitter.names), cl.struct_name(emitter.names))) emitter.emit_line('{') emit_undefined_check(rtype, emitter, attr_field, '==') emitter.emit_line('PyErr_SetString(PyExc_AttributeError,') emitter.emit_line(' "attribute {} of {} undefined");'.format(repr(attr), repr(cl.name))) emitter.emit_line('return NULL;') emitter.emit_line('}') emitter.emit_inc_ref('self->{}'.format(attr_field), rtype) emitter.emit_box('self->{}'.format(attr_field), 'retval', rtype, declare_dest=True) emitter.emit_line('return retval;') emitter.emit_line('}') def generate_setter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> None: attr_field = emitter.attr(attr) emitter.emit_line('static int') emitter.emit_line('{}({} *self, PyObject *value, void *closure)'.format( setter_name(cl, attr, emitter.names), cl.struct_name(emitter.names))) emitter.emit_line('{') if rtype.is_refcounted: emit_undefined_check(rtype, emitter, attr_field, '!=') emitter.emit_dec_ref('self->{}'.format(attr_field), rtype) emitter.emit_line('}') emitter.emit_line('if (value != NULL) {') if rtype.is_unboxed: emitter.emit_unbox('value', 'tmp', rtype, custom_failure='return -1;', declare_dest=True) elif is_same_type(rtype, object_rprimitive): emitter.emit_line('PyObject *tmp = value;') else: emitter.emit_cast('value', 'tmp', rtype, declare_dest=True) emitter.emit_lines('if (!tmp)', ' return -1;') emitter.emit_inc_ref('tmp', rtype) emitter.emit_line('self->{} = tmp;'.format(attr_field)) emitter.emit_line('} else') emitter.emit_line(' self->{} = {};'.format(attr_field, emitter.c_undefined_value(rtype))) emitter.emit_line('return 0;') emitter.emit_line('}') def generate_readonly_getter(cl: ClassIR, attr: str, rtype: RType, func_ir: FuncIR, emitter: Emitter) -> None: emitter.emit_line('static PyObject *') emitter.emit_line('{}({} *self, void *closure)'.format(getter_name(cl, attr, emitter.names), cl.struct_name(emitter.names))) emitter.emit_line('{') if rtype.is_unboxed: emitter.emit_line('{}retval = {}{}((PyObject *) self);'.format( emitter.ctype_spaced(rtype), NATIVE_PREFIX, func_ir.cname(emitter.names))) emitter.emit_box('retval', 'retbox', rtype, declare_dest=True) emitter.emit_line('return retbox;') else: emitter.emit_line('return {}{}((PyObject *) self);'.format(NATIVE_PREFIX, func_ir.cname(emitter.names))) emitter.emit_line('}') def generate_property_setter(cl: ClassIR, attr: str, arg_type: RType, func_ir: FuncIR, emitter: Emitter) -> None: emitter.emit_line('static int') emitter.emit_line('{}({} *self, PyObject *value, void *closure)'.format( setter_name(cl, attr, emitter.names), cl.struct_name(emitter.names))) emitter.emit_line('{') if arg_type.is_unboxed: emitter.emit_unbox('value', 'tmp', arg_type, custom_failure='return -1;', declare_dest=True) emitter.emit_line('{}{}((PyObject *) self, tmp);'.format( NATIVE_PREFIX, func_ir.cname(emitter.names))) else: emitter.emit_line('{}{}((PyObject *) self, value);'.format( NATIVE_PREFIX, func_ir.cname(emitter.names))) emitter.emit_line('return 0;') emitter.emit_line('}') def emit_undefined_check(rtype: RType, emitter: Emitter, attr: str, compare: str) -> None: if isinstance(rtype, RTuple): attr_expr = 'self->{}'.format(attr) emitter.emit_line( 'if ({}) {{'.format( emitter.tuple_undefined_check_cond( rtype, attr_expr, emitter.c_undefined_value, compare))) else: emitter.emit_line( 'if (self->{} {} {}) {{'.format(attr, compare, emitter.c_undefined_value(rtype))) mypy-0.761/mypyc/emitfunc.py0000644€tŠÔÚ€2›s®0000003766413576752246022265 0ustar jukkaDROPBOX\Domain Users00000000000000"""Code generation for native function bodies.""" from mypyc.common import ( REG_PREFIX, NATIVE_PREFIX, STATIC_PREFIX, TYPE_PREFIX, MODULE_PREFIX, ) from mypyc.emit import Emitter from mypyc.ops import ( FuncIR, OpVisitor, Goto, Branch, Return, Assign, LoadInt, LoadErrorValue, GetAttr, SetAttr, LoadStatic, InitStatic, TupleGet, TupleSet, Call, IncRef, DecRef, Box, Cast, Unbox, BasicBlock, Value, RType, RTuple, MethodCall, PrimitiveOp, EmitterInterface, Unreachable, NAMESPACE_STATIC, NAMESPACE_TYPE, NAMESPACE_MODULE, RaiseStandardError, FuncDecl, ClassIR, FUNC_STATICMETHOD, FUNC_CLASSMETHOD, ) from mypyc.namegen import NameGenerator MYPY = False if MYPY: from typing_extensions import Final # Whether to insert debug asserts for all error handling, to quickly # catch errors propagating without exceptions set. DEBUG_ERRORS = False def native_getter_name(cl: ClassIR, attribute: str, names: NameGenerator) -> str: return names.private_name(cl.module_name, 'native_{}_get{}'.format(cl.name, attribute)) def native_setter_name(cl: ClassIR, attribute: str, names: NameGenerator) -> str: return names.private_name(cl.module_name, 'native_{}_set{}'.format(cl.name, attribute)) def native_function_type(fn: FuncIR, emitter: Emitter) -> str: args = ', '.join(emitter.ctype(arg.type) for arg in fn.args) or 'void' ret = emitter.ctype(fn.ret_type) return '{} (*)({})'.format(ret, args) def native_function_header(fn: FuncDecl, emitter: Emitter) -> str: args = [] for arg in fn.sig.args: args.append('{}{}{}'.format(emitter.ctype_spaced(arg.type), REG_PREFIX, arg.name)) return '{ret_type}{name}({args})'.format( ret_type=emitter.ctype_spaced(fn.sig.ret_type), name=emitter.native_function_name(fn), args=', '.join(args) or 'void') def generate_native_function(fn: FuncIR, emitter: Emitter, source_path: str, module_name: str) -> None: declarations = Emitter(emitter.context, fn.env) body = Emitter(emitter.context, fn.env) visitor = FunctionEmitterVisitor(body, declarations, source_path, module_name) declarations.emit_line('{} {{'.format(native_function_header(fn.decl, emitter))) body.indent() for r, i in fn.env.indexes.items(): if isinstance(r.type, RTuple): emitter.declare_tuple_struct(r.type) if i < len(fn.args): continue # skip the arguments ctype = emitter.ctype_spaced(r.type) init = '' if r in fn.env.vars_needing_init: init = ' = {}'.format(declarations.c_error_value(r.type)) declarations.emit_line('{ctype}{prefix}{name}{init};'.format(ctype=ctype, prefix=REG_PREFIX, name=r.name, init=init)) # Before we emit the blocks, give them all labels for i, block in enumerate(fn.blocks): block.label = i for block in fn.blocks: body.emit_label(block) for op in block.ops: op.accept(visitor) body.emit_line('}') emitter.emit_from_emitter(declarations) emitter.emit_from_emitter(body) class FunctionEmitterVisitor(OpVisitor[None], EmitterInterface): def __init__(self, emitter: Emitter, declarations: Emitter, source_path: str, module_name: str) -> None: self.emitter = emitter self.names = emitter.names self.declarations = declarations self.env = self.emitter.env self.source_path = source_path self.module_name = module_name def temp_name(self) -> str: return self.emitter.temp_name() def visit_goto(self, op: Goto) -> None: self.emit_line('goto %s;' % self.label(op.label)) def visit_branch(self, op: Branch) -> None: neg = '!' if op.negated else '' cond = '' if op.op == Branch.BOOL_EXPR: expr_result = self.reg(op.left) # right isn't used cond = '{}{}'.format(neg, expr_result) elif op.op == Branch.IS_ERROR: typ = op.left.type compare = '!=' if op.negated else '==' if isinstance(typ, RTuple): # TODO: What about empty tuple? cond = self.emitter.tuple_undefined_check_cond(typ, self.reg(op.left), self.c_error_value, compare) else: cond = '{} {} {}'.format(self.reg(op.left), compare, self.c_error_value(typ)) else: assert False, "Invalid branch" # For error checks, tell the compiler the branch is unlikely if op.traceback_entry is not None or op.rare: cond = 'unlikely({})'.format(cond) self.emit_line('if ({}) {{'.format(cond)) if op.traceback_entry is not None: globals_static = self.emitter.static_name('globals', self.module_name) self.emit_line('CPy_AddTraceback("%s", "%s", %d, %s);' % ( self.source_path.replace("\\", "\\\\"), op.traceback_entry[0], op.traceback_entry[1], globals_static)) if DEBUG_ERRORS: self.emit_line('assert(PyErr_Occurred() != NULL && "failure w/o err!");') self.emit_lines( 'goto %s;' % self.label(op.true), '} else', ' goto %s;' % self.label(op.false) ) def visit_return(self, op: Return) -> None: regstr = self.reg(op.reg) self.emit_line('return %s;' % regstr) def visit_primitive_op(self, op: PrimitiveOp) -> None: args = [self.reg(arg) for arg in op.args] if not op.is_void: dest = self.reg(op) else: # This will generate a C compile error if used. The reason for this # is that we don't want to insert "assert dest is not None" checks # everywhere. dest = '' op.desc.emit(self, args, dest) def visit_tuple_set(self, op: TupleSet) -> None: dest = self.reg(op) tuple_type = op.tuple_type self.emitter.declare_tuple_struct(tuple_type) if len(op.items) == 0: # empty tuple self.emit_line('{}.empty_struct_error_flag = 0;'.format(dest)) else: for i, item in enumerate(op.items): self.emit_line('{}.f{} = {};'.format(dest, i, self.reg(item))) self.emit_inc_ref(dest, tuple_type) def visit_assign(self, op: Assign) -> None: dest = self.reg(op.dest) src = self.reg(op.src) # clang whines about self assignment (which we might generate # for some casts), so don't emit it. if dest != src: self.emit_line('%s = %s;' % (dest, src)) def visit_load_int(self, op: LoadInt) -> None: dest = self.reg(op) self.emit_line('%s = %d;' % (dest, op.value * 2)) def visit_load_error_value(self, op: LoadErrorValue) -> None: if isinstance(op.type, RTuple): values = [self.c_undefined_value(item) for item in op.type.types] tmp = self.temp_name() self.emit_line('%s %s = { %s };' % (self.ctype(op.type), tmp, ', '.join(values))) self.emit_line('%s = %s;' % (self.reg(op), tmp)) else: self.emit_line('%s = %s;' % (self.reg(op), self.c_error_value(op.type))) def visit_get_attr(self, op: GetAttr) -> None: dest = self.reg(op) obj = self.reg(op.obj) rtype = op.class_type cl = rtype.class_ir version = '_TRAIT' if cl.is_trait else '' if cl.is_trait or cl.get_method(op.attr): self.emit_line('%s = CPY_GET_ATTR%s(%s, %s, %d, %s, %s); /* %s */' % ( dest, version, obj, self.emitter.type_struct_name(rtype.class_ir), rtype.getter_index(op.attr), rtype.struct_name(self.names), self.ctype(rtype.attr_type(op.attr)), op.attr)) else: typ, decl_cl = cl.attr_details(op.attr) # FIXME: We use the lib_prefixed version which is an # indirect call we can't inline. We should investigate # duplicating getter/setter code. self.emit_line('%s = %s%s((%s *)%s); /* %s */' % ( dest, self.emitter.get_group_prefix(decl_cl), native_getter_name(decl_cl, op.attr, self.emitter.names), decl_cl.struct_name(self.names), obj, op.attr)) def visit_set_attr(self, op: SetAttr) -> None: dest = self.reg(op) obj = self.reg(op.obj) src = self.reg(op.src) rtype = op.class_type cl = rtype.class_ir version = '_TRAIT' if cl.is_trait else '' if cl.is_trait or cl.get_method(op.attr): self.emit_line('%s = CPY_SET_ATTR%s(%s, %s, %d, %s, %s, %s); /* %s */' % ( dest, version, obj, self.emitter.type_struct_name(rtype.class_ir), rtype.setter_index(op.attr), src, rtype.struct_name(self.names), self.ctype(rtype.attr_type(op.attr)), op.attr)) else: typ, decl_cl = cl.attr_details(op.attr) self.emit_line('%s = %s%s((%s *)%s, %s); /* %s */' % ( dest, self.emitter.get_group_prefix(decl_cl), native_setter_name(decl_cl, op.attr, self.emitter.names), decl_cl.struct_name(self.names), obj, src, op.attr)) PREFIX_MAP = { NAMESPACE_STATIC: STATIC_PREFIX, NAMESPACE_TYPE: TYPE_PREFIX, NAMESPACE_MODULE: MODULE_PREFIX, } # type: Final def visit_load_static(self, op: LoadStatic) -> None: dest = self.reg(op) prefix = self.PREFIX_MAP[op.namespace] name = self.emitter.static_name(op.identifier, op.module_name, prefix) if op.namespace == NAMESPACE_TYPE: name = '(PyObject *)%s' % name ann = '' if op.ann: s = repr(op.ann) if not any(x in s for x in ('/*', '*/', '\0')): ann = ' /* %s */' % s self.emit_line('%s = %s;%s' % (dest, name, ann)) def visit_init_static(self, op: InitStatic) -> None: value = self.reg(op.value) prefix = self.PREFIX_MAP[op.namespace] name = self.emitter.static_name(op.identifier, op.module_name, prefix) if op.namespace == NAMESPACE_TYPE: value = '(PyTypeObject *)%s' % value self.emit_line('%s = %s;' % (name, value)) self.emit_inc_ref(name, op.value.type) def visit_tuple_get(self, op: TupleGet) -> None: dest = self.reg(op) src = self.reg(op.src) self.emit_line('{} = {}.f{};'.format(dest, src, op.index)) self.emit_inc_ref(dest, op.type) def get_dest_assign(self, dest: Value) -> str: if not dest.is_void: return self.reg(dest) + ' = ' else: return '' def visit_call(self, op: Call) -> None: """Call native function.""" dest = self.get_dest_assign(op) args = ', '.join(self.reg(arg) for arg in op.args) lib = self.emitter.get_group_prefix(op.fn) cname = op.fn.cname(self.names) self.emit_line('%s%s%s%s(%s);' % (dest, lib, NATIVE_PREFIX, cname, args)) def visit_method_call(self, op: MethodCall) -> None: """Call native method.""" dest = self.get_dest_assign(op) obj = self.reg(op.obj) rtype = op.receiver_type class_ir = rtype.class_ir name = op.method method_idx = rtype.method_index(name) method = rtype.class_ir.get_method(name) assert method is not None # Can we call the method directly, bypassing vtable? is_direct = class_ir.is_method_final(name) # The first argument gets omitted for static methods and # turned into the class for class methods obj_args = ( [] if method.decl.kind == FUNC_STATICMETHOD else ['(PyObject *)Py_TYPE({})'.format(obj)] if method.decl.kind == FUNC_CLASSMETHOD else [obj]) args = ', '.join(obj_args + [self.reg(arg) for arg in op.args]) mtype = native_function_type(method, self.emitter) version = '_TRAIT' if rtype.class_ir.is_trait else '' if is_direct: # Directly call method, without going through the vtable. lib = self.emitter.get_group_prefix(method.decl) self.emit_line('{}{}{}{}({});'.format( dest, lib, NATIVE_PREFIX, method.cname(self.names), args)) else: # Call using vtable. self.emit_line('{}CPY_GET_METHOD{}({}, {}, {}, {}, {})({}); /* {} */'.format( dest, version, obj, self.emitter.type_struct_name(rtype.class_ir), method_idx, rtype.struct_name(self.names), mtype, args, op.method)) def visit_inc_ref(self, op: IncRef) -> None: src = self.reg(op.src) self.emit_inc_ref(src, op.src.type) def visit_dec_ref(self, op: DecRef) -> None: src = self.reg(op.src) self.emit_dec_ref(src, op.src.type, op.is_xdec) def visit_box(self, op: Box) -> None: self.emitter.emit_box(self.reg(op.src), self.reg(op), op.src.type, can_borrow=True) def visit_cast(self, op: Cast) -> None: self.emitter.emit_cast(self.reg(op.src), self.reg(op), op.type, src_type=op.src.type) def visit_unbox(self, op: Unbox) -> None: self.emitter.emit_unbox(self.reg(op.src), self.reg(op), op.type) def visit_unreachable(self, op: Unreachable) -> None: self.emitter.emit_line('CPy_Unreachable();') def visit_raise_standard_error(self, op: RaiseStandardError) -> None: # TODO: Better escaping of backspaces and such if op.value is not None: if isinstance(op.value, str): message = op.value.replace('"', '\\"') self.emitter.emit_line( 'PyErr_SetString(PyExc_{}, "{}");'.format(op.class_name, message)) elif isinstance(op.value, Value): self.emitter.emit_line( 'PyErr_SetObject(PyExc_{}, {}{});'.format(op.class_name, REG_PREFIX, op.value.name)) else: assert False, 'op value type must be either str or Value' else: self.emitter.emit_line('PyErr_SetNone(PyExc_{});'.format(op.class_name)) self.emitter.emit_line('{} = 0;'.format(self.reg(op))) # Helpers def label(self, label: BasicBlock) -> str: return self.emitter.label(label) def reg(self, reg: Value) -> str: return self.emitter.reg(reg) def ctype(self, rtype: RType) -> str: return self.emitter.ctype(rtype) def c_error_value(self, rtype: RType) -> str: return self.emitter.c_error_value(rtype) def c_undefined_value(self, rtype: RType) -> str: return self.emitter.c_undefined_value(rtype) def emit_line(self, line: str) -> None: self.emitter.emit_line(line) def emit_lines(self, *lines: str) -> None: self.emitter.emit_lines(*lines) def emit_inc_ref(self, dest: str, rtype: RType) -> None: self.emitter.emit_inc_ref(dest, rtype) def emit_dec_ref(self, dest: str, rtype: RType, is_xdec: bool) -> None: self.emitter.emit_dec_ref(dest, rtype, is_xdec) def emit_declaration(self, line: str) -> None: self.declarations.emit_line(line) mypy-0.761/mypyc/emitmodule.py0000644€tŠÔÚ€2›s®0000012410413576752246022601 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generate C code for a Python C extension module from Python source code.""" # FIXME: Basically nothing in this file operates on the level of a # single module and it should be renamed. import os import hashlib import json from collections import OrderedDict from typing import List, Tuple, Dict, Iterable, Set, TypeVar, Optional from mypy.nodes import MypyFile from mypy.build import ( BuildSource, BuildResult, State, build, sorted_components, get_cache_names, create_metastore, compute_hash, ) from mypy.errors import CompileError from mypy.options import Options from mypy.plugin import Plugin, ReportConfigContext from mypy.fscache import FileSystemCache from mypyc import genops from mypyc.common import ( PREFIX, TOP_LEVEL_NAME, INT_PREFIX, MODULE_PREFIX, shared_lib_name, ) from mypyc.cstring import encode_as_c_string, encode_bytes_as_c_string from mypyc.emit import EmitterContext, Emitter, HeaderDeclaration from mypyc.emitfunc import generate_native_function, native_function_header from mypyc.emitclass import generate_class_type_decl, generate_class from mypyc.emitwrapper import ( generate_wrapper_function, wrapper_function_header, ) from mypyc.ops import ( FuncIR, ClassIR, ModuleIR, ModuleIRs, LiteralsMap, RType, RTuple, DeserMaps, deserialize_modules, ) from mypyc.options import CompilerOptions from mypyc.uninit import insert_uninit_checks from mypyc.refcount import insert_ref_count_opcodes from mypyc.exceptions import insert_exception_handling from mypyc.namegen import NameGenerator, exported_name from mypyc.errors import Errors # All of the modules being compiled are divided into "groups". A group # is a set of modules that are placed into the same shared library. # Two common configurations are that every module is placed in a group # by itself (fully separate compilation) and that every module is # placed in the same group (fully whole-program compilation), but we # support finer-grained control of the group as well. # # In fully whole-program compilation, we will generate N+1 extension # modules: one shim per module and one shared library containing all # the actual code. # In fully separate compilation, we (unfortunately) will generate 2*N # extension modules: one shim per module and also one library containg # each module's actual code. (This might be fixable in the future, # but allows a clean separation between setup of the export tables # (see generate_export_table) and running module top levels.) # # A group is represented as a list of BuildSources containing all of # its modules along with the name of the group. (Which can be None # only if we are compiling only a single group with a single file in it # and not using shared libraries). Group = Tuple[List[BuildSource], Optional[str]] Groups = List[Group] # A list of (file name, file contents) pairs. FileContents = List[Tuple[str, str]] class MarkedDeclaration: """Add a mark, useful for topological sort.""" def __init__(self, declaration: HeaderDeclaration, mark: bool) -> None: self.declaration = declaration self.mark = False class MypycPlugin(Plugin): """Plugin for making mypyc interoperate properly with mypy incremental mode. Basically the point of this plugin is to force mypy to recheck things based on the demands of mypyc in a couple situations: * Any modules in the same group must be compiled together, so we tell mypy that modules depend on all their groupmates. * If the IR metadata is missing or stale or any of the generated C source files associated missing or stale, then we need to recompile the module so we mark it as stale. """ def __init__( self, options: Options, compiler_options: CompilerOptions, groups: Groups) -> None: super().__init__(options) self.group_map = {} # type: Dict[str, Tuple[Optional[str], List[str]]] for sources, name in groups: modules = sorted(source.module for source in sources) for id in modules: self.group_map[id] = (name, modules) self.compiler_options = compiler_options self.metastore = create_metastore(options) def report_config_data( self, ctx: ReportConfigContext) -> Optional[Tuple[Optional[str], List[str]]]: # The config data we report is the group map entry for the module. # If the data is being used to check validity, we do additional checks # that the IR cache exists and matches the metadata cache and all # output source files exist and are up to date. id, path, is_check = ctx.id, ctx.path, ctx.is_check if id not in self.group_map: return None # If we aren't doing validity checks, just return the cache data if not is_check: return self.group_map[id] # Load the metadata and IR cache meta_path, _, _ = get_cache_names(id, path, self.options) ir_path = get_ir_cache_name(id, path, self.options) try: meta_json = self.metastore.read(meta_path) ir_json = self.metastore.read(ir_path) except FileNotFoundError: # This could happen if mypyc failed after mypy succeeded # in the previous run or if some cache files got # deleted. No big deal, just fail to load the cache. return None ir_data = json.loads(ir_json) # Check that the IR cache matches the metadata cache if compute_hash(meta_json) != ir_data['meta_hash']: return None # Check that all of the source files are present and as # expected. The main situation where this would come up is the # user deleting the build directory without deleting # .mypy_cache, which we should handle gracefully. for path, hash in ir_data['src_hashes'].items(): try: with open(os.path.join(self.compiler_options.target_dir, path), 'rb') as f: contents = f.read() except FileNotFoundError: return None real_hash = hashlib.md5(contents).hexdigest() if hash != real_hash: return None return self.group_map[id] def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: # Report dependency on modules in the module's group return [(10, id, -1) for id in self.group_map.get(file.fullname, (None, []))[1]] def parse_and_typecheck( sources: List[BuildSource], options: Options, compiler_options: CompilerOptions, groups: Groups, fscache: Optional[FileSystemCache] = None, alt_lib_path: Optional[str] = None ) -> BuildResult: assert options.strict_optional, 'strict_optional must be turned on' result = build(sources=sources, options=options, alt_lib_path=alt_lib_path, fscache=fscache, extra_plugins=[MypycPlugin(options, compiler_options, groups)]) if result.errors: raise CompileError(result.errors) return result def compile_scc_to_ir( scc: List[MypyFile], result: BuildResult, mapper: genops.Mapper, compiler_options: CompilerOptions, errors: Errors, ) -> ModuleIRs: """Compile an SCC into ModuleIRs. Any modules that this SCC depends on must have either compiled or loaded from a cache into mapper. Arguments: scc: The list of MypyFiles to compile result: The BuildResult from the mypy front-end mapper: The Mapper object mapping mypy ASTs to class and func IRs compiler_options: The compilation options errors: Where to report any errors encountered Returns the IR of the modules. """ if compiler_options.verbose: print("Compiling {}".format(", ".join(x.name for x in scc))) # Generate basic IR, with missing exception and refcount handling. modules = genops.build_ir( scc, result.graph, result.types, mapper, compiler_options, errors ) if errors.num_errors > 0: return modules # Insert uninit checks. for module in modules.values(): for fn in module.functions: insert_uninit_checks(fn) # Insert exception handling. for module in modules.values(): for fn in module.functions: insert_exception_handling(fn) # Insert refcount handling. for module in modules.values(): for fn in module.functions: insert_ref_count_opcodes(fn) return modules def compile_modules_to_ir( result: BuildResult, mapper: genops.Mapper, compiler_options: CompilerOptions, errors: Errors, ) -> ModuleIRs: """Compile a collection of modules into ModuleIRs. The modules to compile are specified as part of mapper's group_map. Returns the IR of the modules. """ deser_ctx = DeserMaps({}, {}) modules = {} # Process the graph by SCC in topological order, like we do in mypy.build for scc in sorted_components(result.graph): scc_states = [result.graph[id] for id in scc] trees = [st.tree for st in scc_states if st.id in mapper.group_map and st.tree] if not trees: continue fresh = all(id not in result.manager.rechecked_modules for id in scc) if fresh: load_scc_from_cache(trees, result, mapper, deser_ctx) else: scc_ir = compile_scc_to_ir(trees, result, mapper, compiler_options, errors) modules.update(scc_ir) return modules def compile_ir_to_c( groups: Groups, modules: ModuleIRs, result: BuildResult, mapper: genops.Mapper, compiler_options: CompilerOptions, ) -> Dict[Optional[str], List[Tuple[str, str]]]: """Compile a collection of ModuleIRs to C source text. Returns a dictionary mapping group names to a list of (file name, file text) pairs. """ source_paths = {source.module: result.graph[source.module].xpath for sources, _ in groups for source in sources} names = NameGenerator([[source.module for source in sources] for sources, _ in groups]) # Generate C code for each compilation group. Each group will be # compiled into a separate extension module. ctext = {} # type: Dict[Optional[str], List[Tuple[str, str]]] for group_sources, group_name in groups: group_modules = [(source.module, modules[source.module]) for source in group_sources if source.module in modules] if not group_modules: ctext[group_name] = [] continue literals = mapper.literals[group_name] generator = GroupGenerator( literals, group_modules, source_paths, group_name, mapper.group_map, names, compiler_options ) ctext[group_name] = generator.generate_c_for_modules() return ctext def get_ir_cache_name(id: str, path: str, options: Options) -> str: meta_path, _, _ = get_cache_names(id, path, options) return meta_path.replace('.meta.json', '.ir.json') def get_state_ir_cache_name(state: State) -> str: return get_ir_cache_name(state.id, state.xpath, state.options) def write_cache( modules: ModuleIRs, result: BuildResult, group_map: Dict[str, Optional[str]], ctext: Dict[Optional[str], List[Tuple[str, str]]], ) -> None: """Write out the cache information for modules. Each module has the following cache information written (which is in addition to the cache information written by mypy itself): * A serialized version of its mypyc IR, minus the bodies of functions. This allows code that depends on it to use these serialized data structures when compiling against it instead of needing to recompile it. (Compiling against a module requires access to both its mypy and mypyc data structures.) * The hash of the mypy metadata cache file for the module. This is used to ensure that the mypyc cache and the mypy cache are in sync and refer to the same version of the code. This is particularly important if mypyc crashes/errors/is stopped after mypy has written its cache but before mypyc has. * The hashes of all of the source file outputs for the group the module is in. This is so that the module will be recompiled if the source outputs are missing. """ hashes = {} for name, files in ctext.items(): hashes[name] = {file: compute_hash(data) for file, data in files} # Write out cache data for id, module in modules.items(): st = result.graph[id] meta_path, _, _ = get_cache_names(id, st.xpath, result.manager.options) # If the metadata isn't there, skip writing the cache. try: meta_data = result.manager.metastore.read(meta_path) except IOError: continue newpath = get_state_ir_cache_name(st) ir_data = { 'ir': module.serialize(), 'meta_hash': compute_hash(meta_data), 'src_hashes': hashes[group_map[id]], } result.manager.metastore.write(newpath, json.dumps(ir_data)) result.manager.metastore.commit() def load_scc_from_cache( scc: List[MypyFile], result: BuildResult, mapper: genops.Mapper, ctx: DeserMaps, ) -> ModuleIRs: """Load IR for an SCC of modules from the cache. Arguments and return are as compile_scc_to_ir. """ cache_data = { k.fullname: json.loads( result.manager.metastore.read(get_state_ir_cache_name(result.graph[k.fullname])) )['ir'] for k in scc } modules = deserialize_modules(cache_data, ctx) genops.load_type_map(mapper, scc, ctx) return modules def compile_modules_to_c( result: BuildResult, compiler_options: CompilerOptions, errors: Errors, groups: Groups, ) -> Tuple[ModuleIRs, List[FileContents]]: """Compile Python module(s) to the source of Python C extension modules. This generates the source code for the "shared library" module for each group. The shim modules are generated in mypyc.build. Each shared library module provides, for each module in its group, a PyCapsule containing an initialization function. Additionally, it provides a capsule containing an export table of pointers to all of the group's functions and static variables. Arguments: result: The BuildResult from the mypy front-end compiler_options: The compilation options errors: Where to report any errors encountered groups: The groups that we are compiling. See documentation of Groups type above. ops: Optionally, where to dump stringified ops for debugging. Returns the IR of the modules and a list containing the generated files for each group. """ # Construct a map from modules to what group they belong to group_map = {source.module: lib_name for group, lib_name in groups for source in group} mapper = genops.Mapper(group_map) modules = compile_modules_to_ir(result, mapper, compiler_options, errors) ctext = compile_ir_to_c(groups, modules, result, mapper, compiler_options) if errors.num_errors == 0: write_cache(modules, result, group_map, ctext) return modules, [ctext[name] for _, name in groups] def generate_function_declaration(fn: FuncIR, emitter: Emitter) -> None: emitter.context.declarations[emitter.native_function_name(fn.decl)] = HeaderDeclaration( '{};'.format(native_function_header(fn.decl, emitter)), needs_export=True) if fn.name != TOP_LEVEL_NAME: emitter.context.declarations[PREFIX + fn.cname(emitter.names)] = HeaderDeclaration( '{};'.format(wrapper_function_header(fn, emitter.names))) def pointerize(decl: str, name: str) -> str: """Given a C decl and its name, modify it to be a declaration to a pointer.""" # This doesn't work in general but does work for all our types... if '(' in decl: # Function pointer. Stick a * in front of the name and wrap it in parens. return decl.replace(name, '(*{})'.format(name)) else: # Non-function pointer. Just stick a * in front of the name. return decl.replace(name, '*{}'.format(name)) def group_dir(group_name: str) -> str: """Given a group name, return the relative directory path for it. """ return os.sep.join(group_name.split('.')[:-1]) class GroupGenerator: def __init__(self, literals: LiteralsMap, modules: List[Tuple[str, ModuleIR]], source_paths: Dict[str, str], group_name: Optional[str], group_map: Dict[str, Optional[str]], names: NameGenerator, compiler_options: CompilerOptions) -> None: """Generator for C source for a compilation group. The code for a compilation group contains an internal and an external .h file, and then one .c if not in multi_file mode or one .c file per module if in multi_file mode.) Arguments: literals: The literals declared in this group modules: (name, ir) pairs for each module in the group source_paths: Map from module names to source file paths group_name: The name of the group (or None if this is single-module compilation) group_map: A map of modules to their group names names: The name generator for the compilation multi_file: Whether to put each module in its own source file regardless of group structure. """ self.literals = literals self.modules = modules self.source_paths = source_paths self.context = EmitterContext(names, group_name, group_map) self.names = names # Initializations of globals to simple values that we can't # do statically because the windows loader is bad. self.simple_inits = [] # type: List[Tuple[str, str]] self.group_name = group_name self.use_shared_lib = group_name is not None self.compiler_options = compiler_options self.multi_file = compiler_options.multi_file @property def group_suffix(self) -> str: return '_' + exported_name(self.group_name) if self.group_name else '' @property def short_group_suffix(self) -> str: return '_' + exported_name(self.group_name.split('.')[-1]) if self.group_name else '' def generate_c_for_modules(self) -> List[Tuple[str, str]]: file_contents = [] multi_file = self.use_shared_lib and self.multi_file base_emitter = Emitter(self.context) # Optionally just include the runtime library c files to # reduce the number of compiler invocations needed if self.compiler_options.include_runtime_files: base_emitter.emit_line('#include "CPy.c"') base_emitter.emit_line('#include "getargs.c"') base_emitter.emit_line('#include "__native{}.h"'.format(self.short_group_suffix)) base_emitter.emit_line('#include "__native_internal{}.h"'.format(self.short_group_suffix)) emitter = base_emitter for (_, literal), identifier in self.literals.items(): if isinstance(literal, int): symbol = emitter.static_name(identifier, None) self.declare_global('CPyTagged ', symbol) else: self.declare_static_pyobject(identifier, emitter) for module_name, module in self.modules: if multi_file: emitter = Emitter(self.context) emitter.emit_line('#include "__native{}.h"'.format(self.short_group_suffix)) emitter.emit_line( '#include "__native_internal{}.h"'.format(self.short_group_suffix)) self.declare_module(module_name, emitter) self.declare_internal_globals(module_name, emitter) self.declare_imports(module.imports, emitter) for cl in module.classes: if cl.is_ext_class: generate_class(cl, module_name, emitter) # Generate Python extension module definitions and module initialization functions. self.generate_module_def(emitter, module_name, module) for fn in module.functions: emitter.emit_line() generate_native_function(fn, emitter, self.source_paths[module_name], module_name) if fn.name != TOP_LEVEL_NAME: emitter.emit_line() generate_wrapper_function( fn, emitter, self.source_paths[module_name], module_name) if multi_file: name = ('__native_{}.c'.format(emitter.names.private_name(module_name))) file_contents.append((name, ''.join(emitter.fragments))) # The external header file contains type declarations while # the internal contains declarations of functions and objects # (which are shared between shared libraries via dynamic # exports tables and not accessed directly.) ext_declarations = Emitter(self.context) ext_declarations.emit_line('#ifndef MYPYC_NATIVE{}_H'.format(self.group_suffix)) ext_declarations.emit_line('#define MYPYC_NATIVE{}_H'.format(self.group_suffix)) ext_declarations.emit_line('#include ') ext_declarations.emit_line('#include ') declarations = Emitter(self.context) declarations.emit_line('#ifndef MYPYC_NATIVE_INTERNAL{}_H'.format(self.group_suffix)) declarations.emit_line('#define MYPYC_NATIVE_INTERNAL{}_H'.format(self.group_suffix)) declarations.emit_line('#include ') declarations.emit_line('#include ') declarations.emit_line('#include "__native{}.h"'.format(self.short_group_suffix)) declarations.emit_line() declarations.emit_line('int CPyGlobalsInit(void);') declarations.emit_line() for module_name, module in self.modules: self.declare_finals(module_name, module.final_names, declarations) for cl in module.classes: generate_class_type_decl(cl, emitter, ext_declarations, declarations) for fn in module.functions: generate_function_declaration(fn, declarations) for lib in sorted(self.context.group_deps): elib = exported_name(lib) short_lib = exported_name(lib.split('.')[-1]) declarations.emit_lines( '#include <{}>'.format( os.path.join(group_dir(lib), "__native_{}.h".format(short_lib)) ), 'struct export_table_{} exports_{};'.format(elib, elib) ) sorted_decls = self.toposort_declarations() emitter = base_emitter self.generate_globals_init(emitter) emitter.emit_line() for declaration in sorted_decls: decls = ext_declarations if declaration.is_type else declarations if not declaration.is_type: decls.emit_lines( 'extern {}'.format(declaration.decl[0]), *declaration.decl[1:]) # If there is a definition, emit it. Otherwise repeat the declaration # (without an extern). if declaration.defn: emitter.emit_lines(*declaration.defn) else: emitter.emit_lines(*declaration.decl) else: decls.emit_lines(*declaration.decl) if self.group_name: self.generate_export_table(ext_declarations, emitter) self.generate_shared_lib_init(emitter) ext_declarations.emit_line('#endif') declarations.emit_line('#endif') output_dir = group_dir(self.group_name) if self.group_name else '' return file_contents + [ (os.path.join(output_dir, '__native{}.c'.format(self.short_group_suffix)), ''.join(emitter.fragments)), (os.path.join(output_dir, '__native_internal{}.h'.format(self.short_group_suffix)), ''.join(declarations.fragments)), (os.path.join(output_dir, '__native{}.h'.format(self.short_group_suffix)), ''.join(ext_declarations.fragments)), ] def generate_export_table(self, decl_emitter: Emitter, code_emitter: Emitter) -> None: """Generate the declaration and definition of the group's export struct. To avoid needing to deal with deeply platform specific issues involving dynamic library linking (and some possibly insurmountable issues involving cyclic dependencies), compiled code accesses functions and data in other compilation groups via an explicit "export struct". Each group declares a struct type that contains a pointer to every function and static variable it exports. It then populates this struct and stores a pointer to it in a capsule stored as an attribute named 'exports' on the group's shared library's python module. On load, a group's init function will import all of its dependencies' exports tables using the capsule mechanism and copy the contents into a local copy of the table (to eliminate the need for a pointer indirection when accessing it). Then, all calls to functions in another group and accesses to statics from another group are done indirectly via the export table. For example, a group containing a module b, where b contains a class B and a function bar, would declare an export table like: struct export_table_b { PyTypeObject **CPyType_B; PyObject *(*CPyDef_B)(CPyTagged cpy_r_x); CPyTagged (*CPyDef_B___foo)(PyObject *cpy_r_self, CPyTagged cpy_r_y); tuple_T2OI (*CPyDef_bar)(PyObject *cpy_r_x); char (*CPyDef___top_level__)(void); }; that would be initialized with: static struct export_table_b exports = { &CPyType_B, &CPyDef_B, &CPyDef_B___foo, &CPyDef_bar, &CPyDef___top_level__, }; To call `b.foo`, then, a function in another group would do `exports_b.CPyDef_bar(...)`. """ decls = decl_emitter.context.declarations decl_emitter.emit_lines( '', 'struct export_table{} {{'.format(self.group_suffix), ) for name, decl in decls.items(): if decl.needs_export: decl_emitter.emit_line(pointerize('\n'.join(decl.decl), name)) decl_emitter.emit_line('};') code_emitter.emit_lines( '', 'static struct export_table{} exports = {{'.format(self.group_suffix), ) for name, decl in decls.items(): if decl.needs_export: code_emitter.emit_line('&{},'.format(name)) code_emitter.emit_line('};') def generate_shared_lib_init(self, emitter: Emitter) -> None: """Generate the init function for a shared library. A shared library contains all of the actual code for a compilation group. The init function is responsible for creating Capsules that wrap pointers to the initialization function of all the real init functions for modules in this shared library as well as the export table containing all of the exported functions and values from all the modules. These capsules are stored in attributes of the shared library. """ assert self.group_name is not None emitter.emit_line() emitter.emit_lines( 'PyMODINIT_FUNC PyInit_{}(void)'.format( shared_lib_name(self.group_name).split('.')[-1]), '{', ('static PyModuleDef def = {{ PyModuleDef_HEAD_INIT, "{}", NULL, -1, NULL, NULL }};' .format(shared_lib_name(self.group_name))), 'int res;', 'PyObject *capsule;', 'PyObject *tmp;', 'static PyObject *module;', 'if (module) {', 'Py_INCREF(module);', 'return module;', '}', 'module = PyModule_Create(&def);', 'if (!module) {', 'goto fail;', '}', '', ) emitter.emit_lines( 'capsule = PyCapsule_New(&exports, "{}.exports", NULL);'.format( shared_lib_name(self.group_name)), 'if (!capsule) {', 'goto fail;', '}', 'res = PyObject_SetAttrString(module, "exports", capsule);', 'Py_DECREF(capsule);', 'if (res < 0) {', 'goto fail;', '}', '', ) for mod, _ in self.modules: name = exported_name(mod) emitter.emit_lines( 'extern PyObject *CPyInit_{}(void);'.format(name), 'capsule = PyCapsule_New((void *)CPyInit_{}, "{}.init_{}", NULL);'.format( name, shared_lib_name(self.group_name), name), 'if (!capsule) {', 'goto fail;', '}', 'res = PyObject_SetAttrString(module, "init_{}", capsule);'.format(name), 'Py_DECREF(capsule);', 'if (res < 0) {', 'goto fail;', '}', '', ) for group in sorted(self.context.group_deps): egroup = exported_name(group) emitter.emit_lines( 'tmp = PyImport_ImportModule("{}"); if (!tmp) goto fail; Py_DECREF(tmp);'.format( shared_lib_name(group)), 'struct export_table_{} *pexports_{} = PyCapsule_Import("{}.exports", 0);'.format( egroup, egroup, shared_lib_name(group)), 'if (!pexports_{}) {{'.format(egroup), 'goto fail;', '}', 'memcpy(&exports_{group}, pexports_{group}, sizeof(exports_{group}));'.format( group=egroup), '', ) emitter.emit_lines( 'return module;', 'fail:', 'Py_XDECREF(module);', 'return NULL;', '}', ) def generate_globals_init(self, emitter: Emitter) -> None: emitter.emit_lines( '', 'int CPyGlobalsInit(void)', '{', 'static int is_initialized = 0;', 'if (is_initialized) return 0;', '' ) emitter.emit_line('CPy_Init();') for symbol, fixup in self.simple_inits: emitter.emit_line('{} = {};'.format(symbol, fixup)) for (_, literal), identifier in self.literals.items(): symbol = emitter.static_name(identifier, None) if isinstance(literal, int): actual_symbol = symbol symbol = INT_PREFIX + symbol emitter.emit_line( 'PyObject * {} = PyLong_FromString(\"{}\", NULL, 10);'.format( symbol, str(literal)) ) elif isinstance(literal, float): emitter.emit_line( '{} = PyFloat_FromDouble({});'.format(symbol, str(literal)) ) elif isinstance(literal, complex): emitter.emit_line( '{} = PyComplex_FromDoubles({}, {});'.format( symbol, str(literal.real), str(literal.imag)) ) elif isinstance(literal, str): emitter.emit_line( '{} = PyUnicode_FromStringAndSize({}, {});'.format( symbol, *encode_as_c_string(literal)) ) elif isinstance(literal, bytes): emitter.emit_line( '{} = PyBytes_FromStringAndSize({}, {});'.format( symbol, *encode_bytes_as_c_string(literal)) ) else: assert False, ('Literals must be integers, floating point numbers, or strings,', 'but the provided literal is of type {}'.format(type(literal))) emitter.emit_lines('if (unlikely({} == NULL))'.format(symbol), ' return -1;') # Ints have an unboxed representation. if isinstance(literal, int): emitter.emit_line( '{} = CPyTagged_FromObject({});'.format(actual_symbol, symbol) ) emitter.emit_lines( 'is_initialized = 1;', 'return 0;', '}', ) def generate_module_def(self, emitter: Emitter, module_name: str, module: ModuleIR) -> None: """Emit the PyModuleDef struct for a module and the module init function.""" # Emit module methods module_prefix = emitter.names.private_name(module_name) emitter.emit_line('static PyMethodDef {}module_methods[] = {{'.format(module_prefix)) for fn in module.functions: if fn.class_name is not None or fn.name == TOP_LEVEL_NAME: continue emitter.emit_line( ('{{"{name}", (PyCFunction){prefix}{cname}, METH_VARARGS | METH_KEYWORDS, ' 'NULL /* docstring */}},').format( name=fn.name, cname=fn.cname(emitter.names), prefix=PREFIX)) emitter.emit_line('{NULL, NULL, 0, NULL}') emitter.emit_line('};') emitter.emit_line() # Emit module definition struct emitter.emit_lines('static struct PyModuleDef {}module = {{'.format(module_prefix), 'PyModuleDef_HEAD_INIT,', '"{}",'.format(module_name), 'NULL, /* docstring */', '-1, /* size of per-interpreter state of the module,', ' or -1 if the module keeps state in global variables. */', '{}module_methods'.format(module_prefix), '};') emitter.emit_line() # Emit module init function. If we are compiling just one module, this # will be the C API init function. If we are compiling 2+ modules, we # generate a shared library for the modules and shims that call into # the shared library, and in this case we use an internal module # initialized function that will be called by the shim. if not self.use_shared_lib: declaration = 'PyMODINIT_FUNC PyInit_{}(void)'.format(module_name) else: declaration = 'PyObject *CPyInit_{}(void)'.format(exported_name(module_name)) emitter.emit_lines(declaration, '{') # Store the module reference in a static and return it when necessary. # This is separate from the *global* reference to the module that will # be populated when it is imported by a compiled module. We want that # reference to only be populated when the module has been successfully # imported, whereas this we want to have to stop a circular import. module_static = self.module_internal_static_name(module_name, emitter) emitter.emit_lines('if ({}) {{'.format(module_static), 'Py_INCREF({});'.format(module_static), 'return {};'.format(module_static), '}') emitter.emit_lines('{} = PyModule_Create(&{}module);'.format(module_static, module_prefix), 'if (unlikely({} == NULL))'.format(module_static), ' return NULL;') emitter.emit_line( 'PyObject *modname = PyObject_GetAttrString((PyObject *){}, "__name__");'.format( module_static)) module_globals = emitter.static_name('globals', module_name) emitter.emit_lines('{} = PyModule_GetDict({});'.format(module_globals, module_static), 'if (unlikely({} == NULL))'.format(module_globals), ' return NULL;') # HACK: Manually instantiate generated classes here for cl in module.classes: if cl.is_generated: type_struct = emitter.type_struct_name(cl) emitter.emit_lines( '{t} = (PyTypeObject *)CPyType_FromTemplate({t}_template, NULL, modname);'. format(t=type_struct)) emitter.emit_lines('if (unlikely(!{}))'.format(type_struct), ' return NULL;') emitter.emit_lines('if (CPyGlobalsInit() < 0)', ' return NULL;') self.generate_top_level_call(module, emitter) emitter.emit_lines('Py_DECREF(modname);') emitter.emit_line('return {};'.format(module_static)) emitter.emit_line('}') def generate_top_level_call(self, module: ModuleIR, emitter: Emitter) -> None: """Generate call to function representing module top level.""" # Optimization: we tend to put the top level last, so reverse iterate for fn in reversed(module.functions): if fn.name == TOP_LEVEL_NAME: emitter.emit_lines( 'char result = {}();'.format(emitter.native_function_name(fn.decl)), 'if (result == 2)', ' return NULL;', ) break def toposort_declarations(self) -> List[HeaderDeclaration]: """Topologically sort the declaration dict by dependencies. Declarations can require other declarations to come prior in C (such as declaring structs). In order to guarantee that the C output will compile the declarations will thus need to be properly ordered. This simple DFS guarantees that we have a proper ordering. This runs in O(V + E). """ result = [] marked_declarations = OrderedDict() # type: Dict[str, MarkedDeclaration] for k, v in self.context.declarations.items(): marked_declarations[k] = MarkedDeclaration(v, False) def _toposort_visit(name: str) -> None: decl = marked_declarations[name] if decl.mark: return for child in decl.declaration.dependencies: _toposort_visit(child) result.append(decl.declaration) decl.mark = True for name, marked_declaration in marked_declarations.items(): _toposort_visit(name) return result def declare_global(self, type_spaced: str, name: str, *, initializer: Optional[str] = None) -> None: if not initializer: defn = None else: defn = ['{}{} = {};'.format(type_spaced, name, initializer)] if name not in self.context.declarations: self.context.declarations[name] = HeaderDeclaration( '{}{};'.format(type_spaced, name), defn=defn, ) def declare_internal_globals(self, module_name: str, emitter: Emitter) -> None: static_name = emitter.static_name('globals', module_name) self.declare_global('PyObject *', static_name) def module_internal_static_name(self, module_name: str, emitter: Emitter) -> str: return emitter.static_name(module_name + '_internal', None, prefix=MODULE_PREFIX) def declare_module(self, module_name: str, emitter: Emitter) -> None: # We declare two globals for each module: # one used internally in the implementation of module init to cache results # and prevent infinite recursion in import cycles, and one used # by other modules to refer to it. internal_static_name = self.module_internal_static_name(module_name, emitter) self.declare_global('CPyModule *', internal_static_name, initializer='NULL') static_name = emitter.static_name(module_name, None, prefix=MODULE_PREFIX) self.declare_global('CPyModule *', static_name) self.simple_inits.append((static_name, 'Py_None')) def declare_imports(self, imps: Iterable[str], emitter: Emitter) -> None: for imp in imps: self.declare_module(imp, emitter) def declare_finals( self, module: str, final_names: Iterable[Tuple[str, RType]], emitter: Emitter) -> None: for name, typ in final_names: static_name = emitter.static_name(name, module) emitter.context.declarations[static_name] = HeaderDeclaration( '{}{};'.format(emitter.ctype_spaced(typ), static_name), [self.final_definition(module, name, typ, emitter)], needs_export=True) def final_definition( self, module: str, name: str, typ: RType, emitter: Emitter) -> str: static_name = emitter.static_name(name, module) # Here we rely on the fact that undefined value and error value are always the same if isinstance(typ, RTuple): # We need to inline because initializer must be static undefined = '{{ {} }}'.format(''.join(emitter.tuple_undefined_value_helper(typ))) else: undefined = emitter.c_undefined_value(typ) return '{}{} = {};'.format(emitter.ctype_spaced(typ), static_name, undefined) def declare_static_pyobject(self, identifier: str, emitter: Emitter) -> None: symbol = emitter.static_name(identifier, None) self.declare_global('PyObject *', symbol) def sort_classes(classes: List[Tuple[str, ClassIR]]) -> List[Tuple[str, ClassIR]]: mod_name = {ir: name for name, ir in classes} irs = [ir for _, ir in classes] deps = OrderedDict() # type: Dict[ClassIR, Set[ClassIR]] for ir in irs: if ir not in deps: deps[ir] = set() if ir.base: deps[ir].add(ir.base) deps[ir].update(ir.traits) sorted_irs = toposort(deps) return [(mod_name[ir], ir) for ir in sorted_irs] T = TypeVar('T') def toposort(deps: Dict[T, Set[T]]) -> List[T]: """Topologically sort a dict from item to dependencies. This runs in O(V + E). """ result = [] visited = set() # type: Set[T] def visit(item: T) -> None: if item in visited: return for child in deps[item]: visit(child) result.append(item) visited.add(item) for item in deps: visit(item) return result mypy-0.761/mypyc/emitwrapper.py0000644€tŠÔÚ€2›s®0000003150613576752246022777 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generate CPython API wrapper function for a native function.""" from mypyc.common import PREFIX, NATIVE_PREFIX, DUNDER_PREFIX from mypyc.emit import Emitter from mypyc.ops import ( ClassIR, FuncIR, RType, RuntimeArg, is_object_rprimitive, is_int_rprimitive, is_bool_rprimitive, object_rprimitive, FUNC_STATICMETHOD, ) from mypyc.namegen import NameGenerator from mypy.nodes import ARG_POS, ARG_OPT, ARG_NAMED_OPT, ARG_NAMED, ARG_STAR, ARG_STAR2 from typing import List, Optional def wrapper_function_header(fn: FuncIR, names: NameGenerator) -> str: return 'PyObject *{prefix}{name}(PyObject *self, PyObject *args, PyObject *kw)'.format( prefix=PREFIX, name=fn.cname(names)) def make_format_string(func_name: str, groups: List[List[RuntimeArg]]) -> str: # Construct the format string. Each group requires the previous # groups delimiters to be present first. main_format = '' if groups[ARG_STAR] or groups[ARG_STAR2]: main_format += '%' main_format += 'O' * len(groups[ARG_POS]) if groups[ARG_OPT] or groups[ARG_NAMED_OPT] or groups[ARG_NAMED]: main_format += '|' + 'O' * len(groups[ARG_OPT]) if groups[ARG_NAMED_OPT] or groups[ARG_NAMED]: main_format += '$' + 'O' * len(groups[ARG_NAMED_OPT]) if groups[ARG_NAMED]: main_format += '@' + 'O' * len(groups[ARG_NAMED]) return '{}:{}'.format(main_format, func_name) def generate_wrapper_function(fn: FuncIR, emitter: Emitter, source_path: str, module_name: str) -> None: """Generates a CPython-compatible wrapper function for a native function. In particular, this handles unboxing the arguments, calling the native function, and then boxing the return value. """ emitter.emit_line('{} {{'.format(wrapper_function_header(fn, emitter.names))) # If we hit an error while processing arguments, then we emit a # traceback frame to make it possible to debug where it happened. # Unlike traceback frames added for exceptions seen in IR, we do this # even if there is no `traceback_name`. This is because the error will # have originated here and so we need it in the traceback. globals_static = emitter.static_name('globals', module_name) traceback_code = 'CPy_AddTraceback("%s", "%s", %d, %s);' % ( source_path.replace("\\", "\\\\"), fn.traceback_name or fn.name, fn.line, globals_static) # If fn is a method, then the first argument is a self param real_args = list(fn.args) if fn.class_name and not fn.decl.kind == FUNC_STATICMETHOD: arg = real_args.pop(0) emitter.emit_line('PyObject *obj_{} = self;'.format(arg.name)) # Need to order args as: required, optional, kwonly optional, kwonly required # This is because CPyArg_ParseTupleAndKeywords format string requires # them grouped in that way. groups = [[arg for arg in real_args if arg.kind == k] for k in range(ARG_NAMED_OPT + 1)] reordered_args = groups[ARG_POS] + groups[ARG_OPT] + groups[ARG_NAMED_OPT] + groups[ARG_NAMED] arg_names = ''.join('"{}", '.format(arg.name) for arg in reordered_args) emitter.emit_line('static char *kwlist[] = {{{}0}};'.format(arg_names)) for arg in real_args: emitter.emit_line('PyObject *obj_{}{};'.format( arg.name, ' = NULL' if arg.optional else '')) cleanups = ['CPy_DECREF(obj_{});'.format(arg.name) for arg in groups[ARG_STAR] + groups[ARG_STAR2]] arg_ptrs = [] # type: List[str] if groups[ARG_STAR] or groups[ARG_STAR2]: arg_ptrs += ['&obj_{}'.format(groups[ARG_STAR][0].name) if groups[ARG_STAR] else 'NULL'] arg_ptrs += ['&obj_{}'.format(groups[ARG_STAR2][0].name) if groups[ARG_STAR2] else 'NULL'] arg_ptrs += ['&obj_{}'.format(arg.name) for arg in reordered_args] emitter.emit_lines( 'if (!CPyArg_ParseTupleAndKeywords(args, kw, "{}", kwlist{})) {{'.format( make_format_string(fn.name, groups), ''.join(', ' + n for n in arg_ptrs)), 'return NULL;', '}') generate_wrapper_core(fn, emitter, groups[ARG_OPT] + groups[ARG_NAMED_OPT], cleanups=cleanups, traceback_code=traceback_code) emitter.emit_line('}') def generate_dunder_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: """Generates a wrapper for native __dunder__ methods to be able to fit into the mapping protocol slot. This specifically means that the arguments are taken as *PyObjects and returned as *PyObjects. """ input_args = ', '.join('PyObject *obj_{}'.format(arg.name) for arg in fn.args) name = '{}{}{}'.format(DUNDER_PREFIX, fn.name, cl.name_prefix(emitter.names)) emitter.emit_line('static PyObject *{name}({input_args}) {{'.format( name=name, input_args=input_args, )) generate_wrapper_core(fn, emitter) emitter.emit_line('}') return name RICHCOMPARE_OPS = { '__lt__': 'Py_LT', '__gt__': 'Py_GT', '__le__': 'Py_LE', '__ge__': 'Py_GE', '__eq__': 'Py_EQ', '__ne__': 'Py_NE', } def generate_richcompare_wrapper(cl: ClassIR, emitter: Emitter) -> Optional[str]: """Generates a wrapper for richcompare dunder methods.""" # Sort for determinism on Python 3.5 matches = sorted([name for name in RICHCOMPARE_OPS if cl.has_method(name)]) if not matches: return None name = '{}_RichCompare_{}'.format(DUNDER_PREFIX, cl.name_prefix(emitter.names)) emitter.emit_line( 'static PyObject *{name}(PyObject *obj_lhs, PyObject *obj_rhs, int op) {{'.format( name=name) ) emitter.emit_line('switch (op) {') for func in matches: emitter.emit_line('case {}: {{'.format(RICHCOMPARE_OPS[func])) method = cl.get_method(func) assert method is not None generate_wrapper_core(method, emitter, arg_names=['lhs', 'rhs']) emitter.emit_line('}') emitter.emit_line('}') emitter.emit_line('Py_INCREF(Py_NotImplemented);') emitter.emit_line('return Py_NotImplemented;') emitter.emit_line('}') return name def generate_get_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: """Generates a wrapper for native __get__ methods.""" name = '{}{}{}'.format(DUNDER_PREFIX, fn.name, cl.name_prefix(emitter.names)) emitter.emit_line( 'static PyObject *{name}(PyObject *self, PyObject *instance, PyObject *owner) {{'. format(name=name)) emitter.emit_line('instance = instance ? instance : Py_None;') emitter.emit_line('return {}{}(self, instance, owner);'.format( NATIVE_PREFIX, fn.cname(emitter.names))) emitter.emit_line('}') return name def generate_hash_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: """Generates a wrapper for native __hash__ methods.""" name = '{}{}{}'.format(DUNDER_PREFIX, fn.name, cl.name_prefix(emitter.names)) emitter.emit_line('static Py_ssize_t {name}(PyObject *self) {{'.format( name=name )) emitter.emit_line('{}retval = {}{}{}(self);'.format(emitter.ctype_spaced(fn.ret_type), emitter.get_group_prefix(fn.decl), NATIVE_PREFIX, fn.cname(emitter.names))) emitter.emit_error_check('retval', fn.ret_type, 'return -1;') if is_int_rprimitive(fn.ret_type): emitter.emit_line('Py_ssize_t val = CPyTagged_AsSsize_t(retval);') else: emitter.emit_line('Py_ssize_t val = PyLong_AsSsize_t(retval);') emitter.emit_dec_ref('retval', fn.ret_type) emitter.emit_line('if (PyErr_Occurred()) return -1;') # We can't return -1 from a hash function.. emitter.emit_line('if (val == -1) return -2;') emitter.emit_line('return val;') emitter.emit_line('}') return name def generate_bool_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: """Generates a wrapper for native __bool__ methods.""" name = '{}{}{}'.format(DUNDER_PREFIX, fn.name, cl.name_prefix(emitter.names)) emitter.emit_line('static int {name}(PyObject *self) {{'.format( name=name )) emitter.emit_line('{}val = {}{}(self);'.format(emitter.ctype_spaced(fn.ret_type), NATIVE_PREFIX, fn.cname(emitter.names))) emitter.emit_error_check('val', fn.ret_type, 'return -1;') # This wouldn't be that hard to fix but it seems unimportant and # getting error handling and unboxing right would be fiddly. (And # way easier to do in IR!) assert is_bool_rprimitive(fn.ret_type), "Only bool return supported for __bool__" emitter.emit_line('return val;') emitter.emit_line('}') return name def generate_wrapper_core(fn: FuncIR, emitter: Emitter, optional_args: Optional[List[RuntimeArg]] = None, arg_names: Optional[List[str]] = None, cleanups: Optional[List[str]] = None, traceback_code: Optional[str] = None) -> None: """Generates the core part of a wrapper function for a native function. This expects each argument as a PyObject * named obj_{arg} as a precondition. It converts the PyObject *s to the necessary types, checking and unboxing if necessary, makes the call, then boxes the result if necessary and returns it. """ optional_args = optional_args or [] cleanups = cleanups or [] use_goto = bool(cleanups or traceback_code) error_code = 'return NULL;' if not use_goto else 'goto fail;' arg_names = arg_names or [arg.name for arg in fn.args] for arg_name, arg in zip(arg_names, fn.args): # Suppress the argument check for *args/**kwargs, since we know it must be right. typ = arg.type if arg.kind not in (ARG_STAR, ARG_STAR2) else object_rprimitive generate_arg_check(arg_name, typ, emitter, error_code, arg in optional_args) native_args = ', '.join('arg_{}'.format(arg) for arg in arg_names) if fn.ret_type.is_unboxed or use_goto: # TODO: The Py_RETURN macros return the correct PyObject * with reference count handling. # Are they relevant? emitter.emit_line('{}retval = {}{}({});'.format(emitter.ctype_spaced(fn.ret_type), NATIVE_PREFIX, fn.cname(emitter.names), native_args)) emitter.emit_lines(*cleanups) if fn.ret_type.is_unboxed: emitter.emit_error_check('retval', fn.ret_type, 'return NULL;') emitter.emit_box('retval', 'retbox', fn.ret_type, declare_dest=True) emitter.emit_line('return {};'.format('retbox' if fn.ret_type.is_unboxed else 'retval')) else: emitter.emit_line('return {}{}({});'.format(NATIVE_PREFIX, fn.cname(emitter.names), native_args)) # TODO: Tracebacks? if use_goto: emitter.emit_label('fail') emitter.emit_lines(*cleanups) if traceback_code: emitter.emit_lines(traceback_code) emitter.emit_lines('return NULL;') def generate_arg_check(name: str, typ: RType, emitter: Emitter, error_code: str, optional: bool = False) -> None: """Insert a runtime check for argument and unbox if necessary. The object is named PyObject *obj_{}. This is expected to generate a value of name arg_{} (unboxed if necessary). For each primitive a runtime check ensures the correct type. """ if typ.is_unboxed: # Borrow when unboxing to avoid reference count manipulation. emitter.emit_unbox('obj_{}'.format(name), 'arg_{}'.format(name), typ, error_code, declare_dest=True, borrow=True, optional=optional) elif is_object_rprimitive(typ): # Object is trivial since any object is valid if optional: emitter.emit_line('PyObject *arg_{};'.format(name)) emitter.emit_line('if (obj_{} == NULL) {{'.format(name)) emitter.emit_line('arg_{} = {};'.format(name, emitter.c_error_value(typ))) emitter.emit_lines('} else {', 'arg_{} = obj_{}; '.format(name, name), '}') else: emitter.emit_line('PyObject *arg_{} = obj_{};'.format(name, name)) else: emitter.emit_cast('obj_{}'.format(name), 'arg_{}'.format(name), typ, declare_dest=True, optional=optional) if optional: emitter.emit_line('if (obj_{} != NULL && arg_{} == NULL) {}'.format( name, name, error_code)) else: emitter.emit_line('if (arg_{} == NULL) {}'.format(name, error_code)) mypy-0.761/mypyc/errors.py0000644€tŠÔÚ€2›s®0000000131713576752246021751 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List import mypy.errors class Errors: def __init__(self) -> None: self.num_errors = 0 self.num_warnings = 0 self._errors = mypy.errors.Errors() def error(self, msg: str, path: str, line: int) -> None: self._errors.report(line, None, msg, severity='error', file=path) self.num_errors += 1 def warning(self, msg: str, path: str, line: int) -> None: self._errors.report(line, None, msg, severity='warning', file=path) self.num_warnings += 1 def new_messages(self) -> List[str]: return self._errors.new_messages() def flush_errors(self) -> None: for error in self.new_messages(): print(error) mypy-0.761/mypyc/exceptions.py0000644€tŠÔÚ€2›s®0000000663513576752246022626 0ustar jukkaDROPBOX\Domain Users00000000000000"""Transform that inserts error checks after opcodes. When initially building the IR, the code doesn't perform error checks for exceptions. This module is used to insert all required error checks afterwards. Each Op describes how it indicates an error condition (if at all). We need to split basic blocks on each error check since branches can only be placed at the end of a basic block. """ from typing import List, Optional from mypyc.ops import ( FuncIR, BasicBlock, LoadErrorValue, Return, Branch, RegisterOp, ERR_NEVER, ERR_MAGIC, ERR_FALSE, NO_TRACEBACK_LINE_NO, ) def insert_exception_handling(ir: FuncIR) -> None: # Generate error block if any ops may raise an exception. If an op # fails without its own error handler, we'll branch to this # block. The block just returns an error value. error_label = None for block in ir.blocks: can_raise = any(op.can_raise() for op in block.ops) if can_raise: error_label = add_handler_block(ir) break if error_label: ir.blocks = split_blocks_at_errors(ir.blocks, error_label, ir.traceback_name) def add_handler_block(ir: FuncIR) -> BasicBlock: block = BasicBlock() ir.blocks.append(block) op = LoadErrorValue(ir.ret_type) block.ops.append(op) ir.env.add_op(op) block.ops.append(Return(op)) return block def split_blocks_at_errors(blocks: List[BasicBlock], default_error_handler: BasicBlock, func_name: Optional[str]) -> List[BasicBlock]: new_blocks = [] # type: List[BasicBlock] # First split blocks on ops that may raise. for block in blocks: ops = block.ops block.ops = [] cur_block = block new_blocks.append(cur_block) # If the block has an error handler specified, use it. Otherwise # fall back to the default. error_label = block.error_handler or default_error_handler block.error_handler = None for op in ops: cur_block.ops.append(op) if isinstance(op, RegisterOp) and op.error_kind != ERR_NEVER: # Split new_block = BasicBlock() new_blocks.append(new_block) if op.error_kind == ERR_MAGIC: # Op returns an error value on error that depends on result RType. variant = Branch.IS_ERROR negated = False elif op.error_kind == ERR_FALSE: # Op returns a C false value on error. variant = Branch.BOOL_EXPR negated = True else: assert False, 'unknown error kind %d' % op.error_kind # Void ops can't generate errors since error is always # indicated by a special value stored in a register. assert not op.is_void, "void op generating errors?" branch = Branch(op, true_label=error_label, false_label=new_block, op=variant, line=op.line) branch.negated = negated if op.line != NO_TRACEBACK_LINE_NO and func_name is not None: branch.traceback_entry = (func_name, op.line) cur_block.ops.append(branch) cur_block = new_block return new_blocks mypy-0.761/mypyc/genops.py0000644€tŠÔÚ€2›s®0000076005413576752246021742 0ustar jukkaDROPBOX\Domain Users00000000000000"""Transform a mypy AST to the IR form (Intermediate Representation). For example, consider a function like this: def f(x: int) -> int: return x * 2 + 1 It would be translated to something that conceptually looks like this: r0 = 2 r1 = 1 r2 = x * r0 :: int r3 = r2 + r1 :: int return r3 """ from typing import ( TypeVar, Callable, Dict, List, Tuple, Optional, Union, Sequence, Set, Any, Iterable, cast ) from typing_extensions import overload, NoReturn from collections import OrderedDict from abc import abstractmethod import importlib.util import itertools from mypy.build import Graph from mypy.nodes import ( MypyFile, SymbolNode, Statement, FuncItem, FuncDef, ReturnStmt, AssignmentStmt, OpExpr, IntExpr, NameExpr, LDEF, Var, IfStmt, UnaryExpr, ComparisonExpr, WhileStmt, CallExpr, IndexExpr, Block, Expression, ListExpr, ExpressionStmt, MemberExpr, ForStmt, RefExpr, Lvalue, BreakStmt, ContinueStmt, ConditionalExpr, OperatorAssignmentStmt, TupleExpr, ClassDef, TypeInfo, Import, ImportFrom, ImportAll, DictExpr, StrExpr, CastExpr, TempNode, PassStmt, PromoteExpr, AssignmentExpr, AwaitExpr, BackquoteExpr, AssertStmt, BytesExpr, ComplexExpr, Decorator, DelStmt, DictionaryComprehension, EllipsisExpr, EnumCallExpr, ExecStmt, FloatExpr, GeneratorExpr, GlobalDecl, LambdaExpr, ListComprehension, SetComprehension, NamedTupleExpr, NewTypeExpr, NonlocalDecl, OverloadedFuncDef, PrintStmt, RaiseStmt, RevealExpr, SetExpr, SliceExpr, StarExpr, SuperExpr, TryStmt, TypeAliasExpr, TypeApplication, TypeVarExpr, TypedDictExpr, UnicodeExpr, WithStmt, YieldFromExpr, YieldExpr, GDEF, ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, is_class_var, op_methods ) from mypy.types import ( Type, Instance, CallableType, NoneTyp, TupleType, UnionType, AnyType, TypeVarType, PartialType, TypeType, Overloaded, TypeOfAny, UninhabitedType, UnboundType, TypedDictType, LiteralType, get_proper_type, ) from mypy.visitor import ExpressionVisitor, StatementVisitor from mypy.checkexpr import map_actuals_to_formals from mypy.state import strict_optional_set from mypy.util import split_target from mypyc.common import ( ENV_ATTR_NAME, NEXT_LABEL_ATTR_NAME, TEMP_ATTR_NAME, LAMBDA_NAME, MAX_LITERAL_SHORT_INT, TOP_LEVEL_NAME, SELF_NAME, decorator_helper_name, FAST_ISINSTANCE_MAX_SUBCLASSES, PROPSET_PREFIX ) from mypyc.prebuildvisitor import PreBuildVisitor from mypyc.ops import ( BasicBlock, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, AssignmentTargetAttr, AssignmentTargetTuple, Environment, Op, LoadInt, RType, Value, Register, Return, FuncIR, Assign, Branch, Goto, RuntimeArg, Call, Box, Unbox, Cast, RTuple, Unreachable, TupleGet, TupleSet, ClassIR, NonExtClassInfo, RInstance, ModuleIR, ModuleIRs, GetAttr, SetAttr, LoadStatic, InitStatic, MethodCall, INVALID_FUNC_DEF, int_rprimitive, float_rprimitive, bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, str_rprimitive, tuple_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, exc_rtuple, PrimitiveOp, ControlOp, OpDescription, RegisterOp, is_object_rprimitive, LiteralsMap, FuncSignature, VTableAttr, VTableMethod, VTableEntries, NAMESPACE_TYPE, NAMESPACE_MODULE, RaiseStandardError, LoadErrorValue, NO_TRACEBACK_LINE_NO, FuncDecl, FUNC_NORMAL, FUNC_STATICMETHOD, FUNC_CLASSMETHOD, RUnion, is_optional_type, optional_value_type, all_concrete_classes, DeserMaps, ) from mypyc.ops_primitive import binary_ops, unary_ops, func_ops, method_ops, name_ref_ops from mypyc.ops_list import ( list_append_op, list_extend_op, list_len_op, new_list_op, to_list, list_pop_last ) from mypyc.ops_tuple import list_tuple_op, new_tuple_op from mypyc.ops_dict import ( new_dict_op, dict_get_item_op, dict_set_item_op, dict_update_in_display_op, ) from mypyc.ops_set import new_set_op, set_add_op, set_update_op from mypyc.ops_misc import ( none_op, none_object_op, true_op, false_op, iter_op, next_op, next_raw_op, check_stop_op, send_op, yield_from_except_op, coro_op, py_getattr_op, py_setattr_op, py_delattr_op, py_hasattr_op, py_call_op, py_call_with_kwargs_op, py_method_call_op, fast_isinstance_op, bool_op, new_slice_op, not_implemented_op, type_op, pytype_from_template_op, import_op, get_module_dict_op, ellipsis_op, method_new_op, type_is_op, type_object_op, py_calc_meta_op, dataclass_sleight_of_hand, ) from mypyc.ops_exc import ( raise_exception_op, raise_exception_with_tb_op, reraise_exception_op, error_catch_op, restore_exc_info_op, exc_matches_op, get_exc_value_op, get_exc_info_op, keep_propagating_op, set_stop_iteration_value, ) from mypyc.genops_for import ForGenerator, ForRange, ForList, ForIterable, ForEnumerate, ForZip from mypyc.rt_subtype import is_runtime_subtype from mypyc.subtype import is_subtype from mypyc.sametype import is_same_type, is_same_method_signature from mypyc.crash import catch_errors from mypyc.options import CompilerOptions from mypyc.errors import Errors GenFunc = Callable[[], None] DictEntry = Tuple[Optional[Value], Value] class UnsupportedException(Exception): pass # The stubs for callable contextmanagers are busted so cast it to the # right type... F = TypeVar('F', bound=Callable[..., Any]) strict_optional_dec = cast(Callable[[F], F], strict_optional_set(True)) def build_type_map(mapper: 'Mapper', modules: List[MypyFile], graph: Graph, types: Dict[Expression, Type], options: CompilerOptions, errors: Errors) -> None: # Collect all classes defined in everything we are compiling classes = [] for module in modules: module_classes = [node for node in module.defs if isinstance(node, ClassDef)] classes.extend([(module, cdef) for cdef in module_classes]) # Collect all class mappings so that we can bind arbitrary class name # references even if there are import cycles. for module, cdef in classes: class_ir = ClassIR(cdef.name, module.fullname, is_trait(cdef), is_abstract=cdef.info.is_abstract) class_ir.is_ext_class = is_extension_class(cdef) # If global optimizations are disabled, turn of tracking of class children if not options.global_opts: class_ir.children = None mapper.type_to_ir[cdef.info] = class_ir # Populate structural information in class IR for extension classes. for module, cdef in classes: with catch_errors(module.path, cdef.line): if mapper.type_to_ir[cdef.info].is_ext_class: prepare_class_def(module.path, module.fullname, cdef, errors, mapper) else: prepare_non_ext_class_def(module.path, module.fullname, cdef, errors, mapper) # Collect all the functions also. We collect from the symbol table # so that we can easily pick out the right copy of a function that # is conditionally defined. for module in modules: for func in get_module_func_defs(module): prepare_func_def(module.fullname, None, func, mapper) # TODO: what else? def load_type_map(mapper: 'Mapper', modules: List[MypyFile], deser_ctx: DeserMaps) -> None: """Populate a Mapper with deserialized IR from a list of modules.""" for module in modules: for name, node in module.names.items(): if isinstance(node.node, TypeInfo): ir = deser_ctx.classes[node.node.fullname] mapper.type_to_ir[node.node] = ir mapper.func_to_decl[node.node] = ir.ctor for module in modules: for func in get_module_func_defs(module): mapper.func_to_decl[func] = deser_ctx.functions[func.fullname].decl @strict_optional_dec # Turn on strict optional for any type manipulations we do def build_ir(modules: List[MypyFile], graph: Graph, types: Dict[Expression, Type], mapper: 'Mapper', options: CompilerOptions, errors: Errors) -> ModuleIRs: build_type_map(mapper, modules, graph, types, options, errors) result = OrderedDict() # type: ModuleIRs # Generate IR for all modules. class_irs = [] for module in modules: # First pass to determine free symbols. pbv = PreBuildVisitor() module.accept(pbv) # Second pass. builder = IRBuilder( module.fullname, types, graph, errors, mapper, pbv, options ) builder.visit_mypy_file(module) module_ir = ModuleIR( module.fullname, list(builder.imports), builder.functions, builder.classes, builder.final_names ) result[module.fullname] = module_ir class_irs.extend(builder.classes) # Compute vtables. for cir in class_irs: if cir.is_ext_class: compute_vtable(cir) return result def is_trait_decorator(d: Expression) -> bool: return isinstance(d, RefExpr) and d.fullname == 'mypy_extensions.trait' def is_trait(cdef: ClassDef) -> bool: return any(is_trait_decorator(d) for d in cdef.decorators) def is_dataclass_decorator(d: Expression) -> bool: return ( (isinstance(d, RefExpr) and d.fullname == 'dataclasses.dataclass') or ( isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and d.callee.fullname == 'dataclasses.dataclass' ) ) def is_dataclass(cdef: ClassDef) -> bool: return any(is_dataclass_decorator(d) for d in cdef.decorators) def get_mypyc_attr_literal(e: Expression) -> Any: """Convert an expression from a mypyc_attr decorator to a value. Supports a pretty limited range.""" if isinstance(e, (StrExpr, IntExpr, FloatExpr)): return e.value elif isinstance(e, RefExpr) and e.fullname == 'builtins.True': return True elif isinstance(e, RefExpr) and e.fullname == 'builtins.False': return False elif isinstance(e, RefExpr) and e.fullname == 'builtins.None': return None return NotImplemented def get_mypyc_attr_call(d: Expression) -> Optional[CallExpr]: """Check if an expression is a call to mypyc_attr and return it if so.""" if ( isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and d.callee.fullname == 'mypy_extensions.mypyc_attr' ): return d return None def get_mypyc_attrs(stmt: Union[ClassDef, Decorator]) -> Dict[str, Any]: """Collect all the mypyc_attr attributes on a class definition or a function.""" attrs = {} # type: Dict[str, Any] for dec in stmt.decorators: d = get_mypyc_attr_call(dec) if d: for name, arg in zip(d.arg_names, d.args): if name is None: if isinstance(arg, StrExpr): attrs[arg.value] = True else: attrs[name] = get_mypyc_attr_literal(arg) return attrs def is_extension_class(cdef: ClassDef) -> bool: if any( not is_trait_decorator(d) and not is_dataclass_decorator(d) and not get_mypyc_attr_call(d) for d in cdef.decorators ): return False elif (cdef.info.metaclass_type and cdef.info.metaclass_type.type.fullname not in ( 'abc.ABCMeta', 'typing.TypingMeta', 'typing.GenericMeta')): return False return True def get_func_def(op: Union[FuncDef, Decorator, OverloadedFuncDef]) -> FuncDef: if isinstance(op, OverloadedFuncDef): assert op.impl op = op.impl if isinstance(op, Decorator): op = op.func return op def get_module_func_defs(module: MypyFile) -> Iterable[FuncDef]: """Collect all of the (non-method) functions declared in a module.""" for name, node in module.names.items(): # We need to filter out functions that are imported or # aliases. The best way to do this seems to be by # checking that the fullname matches. if (isinstance(node.node, (FuncDef, Decorator, OverloadedFuncDef)) and node.fullname == module.fullname + '.' + name): yield get_func_def(node.node) def specialize_parent_vtable(cls: ClassIR, parent: ClassIR) -> VTableEntries: """Generate the part of a vtable corresponding to a parent class or trait""" updated = [] for entry in parent.vtable_entries: if isinstance(entry, VTableMethod): # Find the original method corresponding to this vtable entry. # (This may not be the method in the entry, if it was overridden.) orig_parent_method = entry.cls.get_method(entry.name) assert orig_parent_method method_cls = cls.get_method_and_class(entry.name) if method_cls: child_method, defining_cls = method_cls # TODO: emit a wrapper for __init__ that raises or something if (is_same_method_signature(orig_parent_method.sig, child_method.sig) or orig_parent_method.name == '__init__'): entry = VTableMethod(entry.cls, entry.name, child_method, entry.shadow_method) else: entry = VTableMethod(entry.cls, entry.name, defining_cls.glue_methods[(entry.cls, entry.name)], entry.shadow_method) else: # If it is an attribute from a trait, we need to find out # the real class it got mixed in at and point to that. if parent.is_trait: _, origin_cls = cls.attr_details(entry.name) entry = VTableAttr(origin_cls, entry.name, entry.is_setter) updated.append(entry) return updated def compute_vtable(cls: ClassIR) -> None: """Compute the vtable structure for a class.""" if cls.vtable is not None: return if not cls.is_generated: cls.has_dict = any(x.inherits_python for x in cls.mro) for t in cls.mro[1:]: # Make sure all ancestors are processed first compute_vtable(t) # Merge attributes from traits into the class if not t.is_trait: continue for name, typ in t.attributes.items(): if not cls.is_trait and not any(name in b.attributes for b in cls.base_mro): cls.attributes[name] = typ cls.vtable = {} if cls.base: assert cls.base.vtable is not None cls.vtable.update(cls.base.vtable) cls.vtable_entries = specialize_parent_vtable(cls, cls.base) # Include the vtable from the parent classes, but handle method overrides. entries = cls.vtable_entries # Traits need to have attributes in the vtable, since the # attributes can be at different places in different classes, but # regular classes can just directly get them. if cls.is_trait: # Traits also need to pull in vtable entries for non-trait # parent classes explicitly. for t in cls.mro: for attr in t.attributes: if attr in cls.vtable: continue cls.vtable[attr] = len(entries) entries.append(VTableAttr(t, attr, is_setter=False)) entries.append(VTableAttr(t, attr, is_setter=True)) all_traits = [t for t in cls.mro if t.is_trait] for t in [cls] + cls.traits: for fn in itertools.chain(t.methods.values()): # TODO: don't generate a new entry when we overload without changing the type if fn == cls.get_method(fn.name): cls.vtable[fn.name] = len(entries) # If the class contains a glue method referring to itself, that is a # shadow glue method to support interpreted subclasses. shadow = cls.glue_methods.get((cls, fn.name)) entries.append(VTableMethod(t, fn.name, fn, shadow)) # Compute vtables for all of the traits that the class implements if not cls.is_trait: for trait in all_traits: compute_vtable(trait) cls.trait_vtables[trait] = specialize_parent_vtable(cls, trait) class Mapper: """Keep track of mappings from mypy concepts to IR concepts. This state is shared across all modules being compiled in all compilation groups. """ def __init__(self, group_map: Dict[str, Optional[str]]) -> None: self.group_map = group_map self.type_to_ir = {} # type: Dict[TypeInfo, ClassIR] self.func_to_decl = {} # type: Dict[SymbolNode, FuncDecl] # LiteralsMap maps literal values to a static name. Each # compilation group has its own LiteralsMap. (Since they can't # share literals.) self.literals = { v: OrderedDict() for v in group_map.values() } # type: Dict[Optional[str], LiteralsMap] def type_to_rtype(self, typ: Optional[Type]) -> RType: if typ is None: return object_rprimitive typ = get_proper_type(typ) if isinstance(typ, Instance): if typ.type.fullname == 'builtins.int': return int_rprimitive elif typ.type.fullname == 'builtins.float': return float_rprimitive elif typ.type.fullname == 'builtins.str': return str_rprimitive elif typ.type.fullname == 'builtins.bool': return bool_rprimitive elif typ.type.fullname == 'builtins.list': return list_rprimitive # Dict subclasses are at least somewhat common and we # specifically support them, so make sure that dict operations # get optimized on them. elif any(cls.fullname == 'builtins.dict' for cls in typ.type.mro): return dict_rprimitive elif typ.type.fullname == 'builtins.set': return set_rprimitive elif typ.type.fullname == 'builtins.tuple': return tuple_rprimitive # Varying-length tuple elif typ.type in self.type_to_ir: return RInstance(self.type_to_ir[typ.type]) else: return object_rprimitive elif isinstance(typ, TupleType): # Use our unboxed tuples for raw tuples but fall back to # being boxed for NamedTuple. if typ.partial_fallback.type.fullname == 'builtins.tuple': return RTuple([self.type_to_rtype(t) for t in typ.items]) else: return tuple_rprimitive elif isinstance(typ, CallableType): return object_rprimitive elif isinstance(typ, NoneTyp): return none_rprimitive elif isinstance(typ, UnionType): return RUnion([self.type_to_rtype(item) for item in typ.items]) elif isinstance(typ, AnyType): return object_rprimitive elif isinstance(typ, TypeType): return object_rprimitive elif isinstance(typ, TypeVarType): # Erase type variable to upper bound. # TODO: Erase to union if object has value restriction? return self.type_to_rtype(typ.upper_bound) elif isinstance(typ, PartialType): assert typ.var.type is not None return self.type_to_rtype(typ.var.type) elif isinstance(typ, Overloaded): return object_rprimitive elif isinstance(typ, TypedDictType): return dict_rprimitive elif isinstance(typ, LiteralType): return self.type_to_rtype(typ.fallback) elif isinstance(typ, (UninhabitedType, UnboundType)): # Sure, whatever! return object_rprimitive # I think we've covered everything that is supposed to # actually show up, so anything else is a bug somewhere. assert False, 'unexpected type %s' % type(typ) def get_arg_rtype(self, typ: Type, kind: int) -> RType: if kind == ARG_STAR: return tuple_rprimitive elif kind == ARG_STAR2: return dict_rprimitive else: return self.type_to_rtype(typ) def fdef_to_sig(self, fdef: FuncDef) -> FuncSignature: if isinstance(fdef.type, CallableType): arg_types = [self.get_arg_rtype(typ, kind) for typ, kind in zip(fdef.type.arg_types, fdef.type.arg_kinds)] ret = self.type_to_rtype(fdef.type.ret_type) else: # Handle unannotated functions arg_types = [object_rprimitive for arg in fdef.arguments] ret = object_rprimitive args = [RuntimeArg(arg_name, arg_type, arg_kind) for arg_name, arg_kind, arg_type in zip(fdef.arg_names, fdef.arg_kinds, arg_types)] # We force certain dunder methods to return objects to support letting them # return NotImplemented. It also avoids some pointless boxing and unboxing, # since tp_richcompare needs an object anyways. if fdef.name in ('__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__'): ret = object_rprimitive return FuncSignature(args, ret) def literal_static_name(self, module: str, value: Union[int, float, complex, str, bytes]) -> str: # Literals are shared between modules in a compilation group # but not outside the group. literals = self.literals[self.group_map.get(module)] # Include type to distinguish between 1 and 1.0, and so on. key = (type(value), value) if key not in literals: if isinstance(value, str): prefix = 'unicode_' else: prefix = type(value).__name__ + '_' literals[key] = prefix + str(len(literals)) return literals[key] def prepare_func_def(module_name: str, class_name: Optional[str], fdef: FuncDef, mapper: Mapper) -> FuncDecl: kind = FUNC_STATICMETHOD if fdef.is_static else ( FUNC_CLASSMETHOD if fdef.is_class else FUNC_NORMAL) decl = FuncDecl(fdef.name, class_name, module_name, mapper.fdef_to_sig(fdef), kind) mapper.func_to_decl[fdef] = decl return decl def prepare_method_def(ir: ClassIR, module_name: str, cdef: ClassDef, mapper: Mapper, node: Union[FuncDef, Decorator]) -> None: if isinstance(node, FuncDef): ir.method_decls[node.name] = prepare_func_def(module_name, cdef.name, node, mapper) elif isinstance(node, Decorator): # TODO: do something about abstract methods here. Currently, they are handled just like # normal methods. decl = prepare_func_def(module_name, cdef.name, node.func, mapper) if not node.decorators: ir.method_decls[node.name] = decl elif isinstance(node.decorators[0], MemberExpr) and node.decorators[0].name == 'setter': # Make property setter name different than getter name so there are no # name clashes when generating C code, and property lookup at the IR level # works correctly. decl.name = PROPSET_PREFIX + decl.name decl.is_prop_setter = True ir.method_decls[PROPSET_PREFIX + node.name] = decl if node.func.is_property: assert node.func.type decl.is_prop_getter = True ir.property_types[node.name] = decl.sig.ret_type def is_valid_multipart_property_def(prop: OverloadedFuncDef) -> bool: # Checks to ensure supported property decorator semantics if len(prop.items) == 2: getter = prop.items[0] setter = prop.items[1] if isinstance(getter, Decorator) and isinstance(setter, Decorator): if getter.func.is_property and len(setter.decorators) == 1: if isinstance(setter.decorators[0], MemberExpr): if setter.decorators[0].name == "setter": return True return False def can_subclass_builtin(builtin_base: str) -> bool: # BaseException and dict are special cased. return builtin_base in ( ('builtins.Exception', 'builtins.LookupError', 'builtins.IndexError', 'builtins.Warning', 'builtins.UserWarning', 'builtins.ValueError', 'builtins.object', )) def prepare_class_def(path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper) -> None: ir = mapper.type_to_ir[cdef.info] info = cdef.info attrs = get_mypyc_attrs(cdef) if attrs.get("allow_interpreted_subclasses") is True: ir.allow_interpreted_subclasses = True # We sort the table for determinism here on Python 3.5 for name, node in sorted(info.names.items()): # Currenly all plugin generated methods are dummies and not included. if node.plugin_generated: continue if isinstance(node.node, Var): assert node.node.type, "Class member %s missing type" % name if not node.node.is_classvar and name != '__slots__': ir.attributes[name] = mapper.type_to_rtype(node.node.type) elif isinstance(node.node, (FuncDef, Decorator)): prepare_method_def(ir, module_name, cdef, mapper, node.node) elif isinstance(node.node, OverloadedFuncDef): # Handle case for property with both a getter and a setter if node.node.is_property: if is_valid_multipart_property_def(node.node): for item in node.node.items: prepare_method_def(ir, module_name, cdef, mapper, item) else: errors.error("Unsupported property decorator semantics", path, cdef.line) # Handle case for regular function overload else: assert node.node.impl prepare_method_def(ir, module_name, cdef, mapper, node.node.impl) # Check for subclassing from builtin types for cls in info.mro: # Special case exceptions and dicts # XXX: How do we handle *other* things?? if cls.fullname == 'builtins.BaseException': ir.builtin_base = 'PyBaseExceptionObject' elif cls.fullname == 'builtins.dict': ir.builtin_base = 'PyDictObject' elif cls.fullname.startswith('builtins.'): if not can_subclass_builtin(cls.fullname): # Note that if we try to subclass a C extension class that # isn't in builtins, bad things will happen and we won't # catch it here! But this should catch a lot of the most # common pitfalls. errors.error("Inheriting from most builtin types is unimplemented", path, cdef.line) if ir.builtin_base: ir.attributes.clear() # Set up a constructor decl init_node = cdef.info['__init__'].node if not ir.is_trait and not ir.builtin_base and isinstance(init_node, FuncDef): init_sig = mapper.fdef_to_sig(init_node) defining_ir = mapper.type_to_ir.get(init_node.info) # If there is a nontrivial __init__ that wasn't defined in an # extension class, we need to make the constructor take *args, # **kwargs so it can call tp_init. if ((defining_ir is None or not defining_ir.is_ext_class or cdef.info['__init__'].plugin_generated) and init_node.info.fullname != 'builtins.object'): init_sig = FuncSignature( [init_sig.args[0], RuntimeArg("args", tuple_rprimitive, ARG_STAR), RuntimeArg("kwargs", dict_rprimitive, ARG_STAR2)], init_sig.ret_type) ctor_sig = FuncSignature(init_sig.args[1:], RInstance(ir)) ir.ctor = FuncDecl(cdef.name, None, module_name, ctor_sig) mapper.func_to_decl[cdef.info] = ir.ctor # Set up the parent class bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] if not all(c.is_trait for c in bases[1:]): errors.error("Non-trait bases must appear first in parent list", path, cdef.line) ir.traits = [c for c in bases if c.is_trait] mro = [] base_mro = [] for cls in info.mro: if cls not in mapper.type_to_ir: if cls.fullname != 'builtins.object': ir.inherits_python = True continue base_ir = mapper.type_to_ir[cls] if not base_ir.is_trait: base_mro.append(base_ir) mro.append(base_ir) if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: ir.inherits_python = True base_idx = 1 if not ir.is_trait else 0 if len(base_mro) > base_idx: ir.base = base_mro[base_idx] ir.mro = mro ir.base_mro = base_mro for base in bases: if base.children is not None: base.children.append(ir) if is_dataclass(cdef): ir.is_augmented = True def prepare_non_ext_class_def(path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper) -> None: ir = mapper.type_to_ir[cdef.info] info = cdef.info for name, node in info.names.items(): if isinstance(node.node, (FuncDef, Decorator)): prepare_method_def(ir, module_name, cdef, mapper, node.node) elif isinstance(node.node, OverloadedFuncDef): # Handle case for property with both a getter and a setter if node.node.is_property: if not is_valid_multipart_property_def(node.node): errors.error("Unsupported property decorator semantics", path, cdef.line) for item in node.node.items: prepare_method_def(ir, module_name, cdef, mapper, item) # Handle case for regular function overload else: prepare_method_def(ir, module_name, cdef, mapper, get_func_def(node.node)) if any( cls in mapper.type_to_ir and mapper.type_to_ir[cls].is_ext_class for cls in info.mro ): errors.error( "Non-extension classes may not inherit from extension classes", path, cdef.line) def concrete_arg_kind(kind: int) -> int: """Find the concrete version of an arg kind that is being passed.""" if kind == ARG_OPT: return ARG_POS elif kind == ARG_NAMED_OPT: return ARG_NAMED else: return kind class FuncInfo(object): """Contains information about functions as they are generated.""" def __init__(self, fitem: FuncItem = INVALID_FUNC_DEF, name: str = '', class_name: Optional[str] = None, namespace: str = '', is_nested: bool = False, contains_nested: bool = False, is_decorated: bool = False, in_non_ext: bool = False) -> None: self.fitem = fitem self.name = name if not is_decorated else decorator_helper_name(name) self.class_name = class_name self.ns = namespace # Callable classes implement the '__call__' method, and are used to represent functions # that are nested inside of other functions. self._callable_class = None # type: Optional[ImplicitClass] # Environment classes are ClassIR instances that contain attributes representing the # variables in the environment of the function they correspond to. Environment classes are # generated for functions that contain nested functions. self._env_class = None # type: Optional[ClassIR] # Generator classes implement the '__next__' method, and are used to represent generators # returned by generator functions. self._generator_class = None # type: Optional[GeneratorClass] # Environment class registers are the local registers associated with instances of an # environment class, used for getting and setting attributes. curr_env_reg is the register # associated with the current environment. self._curr_env_reg = None # type: Optional[Value] # These are flags denoting whether a given function is nested, contains a nested function, # is decorated, or is within a non-extension class. self.is_nested = is_nested self.contains_nested = contains_nested self.is_decorated = is_decorated self.in_non_ext = in_non_ext # TODO: add field for ret_type: RType = none_rprimitive def namespaced_name(self) -> str: return '_'.join(x for x in [self.name, self.class_name, self.ns] if x) @property def is_generator(self) -> bool: return self.fitem.is_generator or self.fitem.is_coroutine @property def callable_class(self) -> 'ImplicitClass': assert self._callable_class is not None return self._callable_class @callable_class.setter def callable_class(self, cls: 'ImplicitClass') -> None: self._callable_class = cls @property def env_class(self) -> ClassIR: assert self._env_class is not None return self._env_class @env_class.setter def env_class(self, ir: ClassIR) -> None: self._env_class = ir @property def generator_class(self) -> 'GeneratorClass': assert self._generator_class is not None return self._generator_class @generator_class.setter def generator_class(self, cls: 'GeneratorClass') -> None: self._generator_class = cls @property def curr_env_reg(self) -> Value: assert self._curr_env_reg is not None return self._curr_env_reg class ImplicitClass(object): """Contains information regarding classes that are generated as a result of nested functions or generated functions, but not explicitly defined in the source code. """ def __init__(self, ir: ClassIR) -> None: # The ClassIR instance associated with this class. self.ir = ir # The register associated with the 'self' instance for this generator class. self._self_reg = None # type: Optional[Value] # Environment class registers are the local registers associated with instances of an # environment class, used for getting and setting attributes. curr_env_reg is the register # associated with the current environment. prev_env_reg is the self.__mypyc_env__ field # associated with the previous environment. self._curr_env_reg = None # type: Optional[Value] self._prev_env_reg = None # type: Optional[Value] @property def self_reg(self) -> Value: assert self._self_reg is not None return self._self_reg @self_reg.setter def self_reg(self, reg: Value) -> None: self._self_reg = reg @property def curr_env_reg(self) -> Value: assert self._curr_env_reg is not None return self._curr_env_reg @curr_env_reg.setter def curr_env_reg(self, reg: Value) -> None: self._curr_env_reg = reg @property def prev_env_reg(self) -> Value: assert self._prev_env_reg is not None return self._prev_env_reg @prev_env_reg.setter def prev_env_reg(self, reg: Value) -> None: self._prev_env_reg = reg class GeneratorClass(ImplicitClass): def __init__(self, ir: ClassIR) -> None: super().__init__(ir) # This register holds the label number that the '__next__' function should go to the next # time it is called. self._next_label_reg = None # type: Optional[Value] self._next_label_target = None # type: Optional[AssignmentTarget] # These registers hold the error values for the generator object for the case that the # 'throw' function is called. self.exc_regs = None # type: Optional[Tuple[Value, Value, Value]] # Holds the arg passed to send self.send_arg_reg = None # type: Optional[Value] # The switch block is used to decide which instruction to go using the value held in the # next-label register. self.switch_block = BasicBlock() self.blocks = [] # type: List[BasicBlock] @property def next_label_reg(self) -> Value: assert self._next_label_reg is not None return self._next_label_reg @next_label_reg.setter def next_label_reg(self, reg: Value) -> None: self._next_label_reg = reg @property def next_label_target(self) -> AssignmentTarget: assert self._next_label_target is not None return self._next_label_target @next_label_target.setter def next_label_target(self, target: AssignmentTarget) -> None: self._next_label_target = target # Infrastructure for special casing calls to builtin functions in a # programmatic way. Most special cases should be handled using the # data driven "primitive ops" system, but certain operations require # special handling that has access to the AST/IR directly and can make # decisions/optimizations based on it. # # For example, we use specializers to statically emit the length of a # fixed length tuple and to emit optimized code for any/all calls with # generator comprehensions as the argument. # # Specalizers are attempted before compiling the arguments to the # function. Specializers can return None to indicate that they failed # and the call should be compiled normally. Otherwise they should emit # code for the call and return a value containing the result. # # Specializers take three arguments: the IRBuilder, the CallExpr being # compiled, and the RefExpr that is the left hand side of the call. # # Specializers can operate on methods as well, and are keyed on the # name and RType in that case. Specializer = Callable[['IRBuilder', CallExpr, RefExpr], Optional[Value]] specializers = {} # type: Dict[Tuple[str, Optional[RType]], Specializer] def specialize_function( name: str, typ: Optional[RType] = None) -> Callable[[Specializer], Specializer]: """Decorator to register a function as being a specializer.""" def wrapper(f: Specializer) -> Specializer: specializers[name, typ] = f return f return wrapper class NonlocalControl: """Represents a stack frame of constructs that modify nonlocal control flow. The nonlocal control flow constructs are break, continue, and return, and their behavior is modified by a number of other constructs. The most obvious is loop, which override where break and continue jump to, but also `except` (which needs to clear exc_info when left) and (eventually) finally blocks (which need to ensure that the finally block is always executed when leaving the try/except blocks). """ @abstractmethod def gen_break(self, builder: 'IRBuilder', line: int) -> None: pass @abstractmethod def gen_continue(self, builder: 'IRBuilder', line: int) -> None: pass @abstractmethod def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: pass class BaseNonlocalControl(NonlocalControl): def gen_break(self, builder: 'IRBuilder', line: int) -> None: assert False, "break outside of loop" def gen_continue(self, builder: 'IRBuilder', line: int) -> None: assert False, "continue outside of loop" def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: builder.add(Return(value)) class LoopNonlocalControl(NonlocalControl): def __init__(self, outer: NonlocalControl, continue_block: BasicBlock, break_block: BasicBlock) -> None: self.outer = outer self.continue_block = continue_block self.break_block = break_block def gen_break(self, builder: 'IRBuilder', line: int) -> None: builder.add(Goto(self.break_block)) def gen_continue(self, builder: 'IRBuilder', line: int) -> None: builder.add(Goto(self.continue_block)) def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: self.outer.gen_return(builder, value, line) class GeneratorNonlocalControl(BaseNonlocalControl): def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: # Assign an invalid next label number so that the next time __next__ is called, we jump to # the case in which StopIteration is raised. builder.assign(builder.fn_info.generator_class.next_label_target, builder.add(LoadInt(-1)), line) # Raise a StopIteration containing a field for the value that should be returned. Before # doing so, create a new block without an error handler set so that the implicitly thrown # StopIteration isn't caught by except blocks inside of the generator function. builder.error_handlers.append(None) builder.goto_new_block() # Skip creating a traceback frame when we raise here, because # we don't care about the traceback frame and it is kind of # expensive since raising StopIteration is an extremely common case. # Also we call a special internal function to set StopIteration instead of # using RaiseStandardError because the obvious thing doesn't work if the # value is a tuple (???). builder.primitive_op(set_stop_iteration_value, [value], NO_TRACEBACK_LINE_NO) builder.add(Unreachable()) builder.error_handlers.pop() class CleanupNonlocalControl(NonlocalControl): """Abstract nonlocal control that runs some cleanup code. """ def __init__(self, outer: NonlocalControl) -> None: self.outer = outer @abstractmethod def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: ... def gen_break(self, builder: 'IRBuilder', line: int) -> None: self.gen_cleanup(builder, line) self.outer.gen_break(builder, line) def gen_continue(self, builder: 'IRBuilder', line: int) -> None: self.gen_cleanup(builder, line) self.outer.gen_continue(builder, line) def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: self.gen_cleanup(builder, line) self.outer.gen_return(builder, value, line) class TryFinallyNonlocalControl(NonlocalControl): def __init__(self, target: BasicBlock) -> None: self.target = target self.ret_reg = None # type: Optional[Register] def gen_break(self, builder: 'IRBuilder', line: int) -> None: builder.error("break inside try/finally block is unimplemented", line) def gen_continue(self, builder: 'IRBuilder', line: int) -> None: builder.error("continue inside try/finally block is unimplemented", line) def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: if self.ret_reg is None: self.ret_reg = builder.alloc_temp(builder.ret_types[-1]) builder.add(Assign(self.ret_reg, value)) builder.add(Goto(self.target)) class ExceptNonlocalControl(CleanupNonlocalControl): """Nonlocal control for except blocks. Just makes sure that sys.exc_info always gets restored when we leave. This is super annoying. """ def __init__(self, outer: NonlocalControl, saved: Union[Value, AssignmentTarget]) -> None: super().__init__(outer) self.saved = saved def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: builder.primitive_op(restore_exc_info_op, [builder.read(self.saved)], line) class FinallyNonlocalControl(CleanupNonlocalControl): """Nonlocal control for finally blocks. Just makes sure that sys.exc_info always gets restored when we leave and the return register is decrefed if it isn't null. """ def __init__(self, outer: NonlocalControl, ret_reg: Optional[Value], saved: Value) -> None: super().__init__(outer) self.ret_reg = ret_reg self.saved = saved def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: # Do an error branch on the return value register, which # may be undefined. This will allow it to be properly # decrefed if it is not null. This is kind of a hack. if self.ret_reg: target = BasicBlock() builder.add(Branch(self.ret_reg, target, target, Branch.IS_ERROR)) builder.activate_block(target) # Restore the old exc_info target, cleanup = BasicBlock(), BasicBlock() builder.add(Branch(self.saved, target, cleanup, Branch.IS_ERROR)) builder.activate_block(cleanup) builder.primitive_op(restore_exc_info_op, [self.saved], line) builder.goto_and_activate(target) class IRBuilder(ExpressionVisitor[Value], StatementVisitor[None]): def __init__(self, current_module: str, types: Dict[Expression, Type], graph: Graph, errors: Errors, mapper: Mapper, pbv: PreBuildVisitor, options: CompilerOptions) -> None: self.current_module = current_module self.types = types self.graph = graph self.environment = Environment() self.environments = [self.environment] self.ret_types = [] # type: List[RType] self.blocks = [] # type: List[List[BasicBlock]] self.functions = [] # type: List[FuncIR] self.classes = [] # type: List[ClassIR] self.final_names = [] # type: List[Tuple[str, RType]] self.callable_class_names = set() # type: Set[str] self.options = options # These variables keep track of the number of lambdas, implicit indices, and implicit # iterators instantiated so we avoid name conflicts. The indices and iterators are # instantiated from for-loops. self.lambda_counter = 0 self.temp_counter = 0 # These variables are populated from the first-pass PreBuildVisitor. self.free_variables = pbv.free_variables self.prop_setters = pbv.prop_setters self.encapsulating_funcs = pbv.encapsulating_funcs self.nested_fitems = pbv.nested_funcs.keys() self.fdefs_to_decorators = pbv.funcs_to_decorators # This list operates similarly to a function call stack for nested functions. Whenever a # function definition begins to be generated, a FuncInfo instance is added to the stack, # and information about that function (e.g. whether it is nested, its environment class to # be generated) is stored in that FuncInfo instance. When the function is done being # generated, its corresponding FuncInfo is popped off the stack. self.fn_info = FuncInfo(INVALID_FUNC_DEF, '', '') self.fn_infos = [self.fn_info] # type: List[FuncInfo] # This list operates as a stack of constructs that modify the # behavior of nonlocal control flow constructs. self.nonlocal_control = [] # type: List[NonlocalControl] # Stack of except handler entry blocks self.error_handlers = [None] # type: List[Optional[BasicBlock]] self.errors = errors self.mapper = mapper # Notionally a list of all of the modules imported by the # module being compiled, but stored as an OrderedDict so we # can also do quick lookups. self.imports = OrderedDict() # type: OrderedDict[str, None] def visit_mypy_file(self, mypyfile: MypyFile) -> None: if mypyfile.fullname in ('typing', 'abc'): # These module are special; their contents are currently all # built-in primitives. return self.module_path = mypyfile.path self.module_name = mypyfile.fullname classes = [node for node in mypyfile.defs if isinstance(node, ClassDef)] # Collect all classes. for cls in classes: ir = self.mapper.type_to_ir[cls.info] self.classes.append(ir) self.enter(FuncInfo(name='')) # Make sure we have a builtins import self.gen_import('builtins', -1) # Generate ops. for node in mypyfile.defs: self.accept(node) self.maybe_add_implicit_return() # Generate special function representing module top level. blocks, env, ret_type, _ = self.leave() sig = FuncSignature([], none_rprimitive) func_ir = FuncIR(FuncDecl(TOP_LEVEL_NAME, None, self.module_name, sig), blocks, env, traceback_name="") self.functions.append(func_ir) def handle_ext_method(self, cdef: ClassDef, fdef: FuncDef) -> None: # Perform the function of visit_method for methods inside extension classes. name = fdef.name class_ir = self.mapper.type_to_ir[cdef.info] func_ir, func_reg = self.gen_func_item(fdef, name, self.mapper.fdef_to_sig(fdef), cdef) self.functions.append(func_ir) if self.is_decorated(fdef): # Obtain the the function name in order to construct the name of the helper function. _, _, name = fdef.fullname.rpartition('.') helper_name = decorator_helper_name(name) # Read the PyTypeObject representing the class, get the callable object # representing the non-decorated method typ = self.load_native_type_object(cdef.fullname) orig_func = self.py_get_attr(typ, helper_name, fdef.line) # Decorate the non-decorated method decorated_func = self.load_decorated_func(fdef, orig_func) # Set the callable object representing the decorated method as an attribute of the # extension class. self.primitive_op(py_setattr_op, [typ, self.load_static_unicode(name), decorated_func], fdef.line) if fdef.is_property: # If there is a property setter, it will be processed after the getter, # We populate the optional setter field with none for now. assert name not in class_ir.properties class_ir.properties[name] = (func_ir, None) elif fdef in self.prop_setters: # The respective property getter must have been processed already assert name in class_ir.properties getter_ir, _ = class_ir.properties[name] class_ir.properties[name] = (getter_ir, func_ir) class_ir.methods[func_ir.decl.name] = func_ir # If this overrides a parent class method with a different type, we need # to generate a glue method to mediate between them. for base in class_ir.mro[1:]: if (name in base.method_decls and name != '__init__' and not is_same_method_signature(class_ir.method_decls[name].sig, base.method_decls[name].sig)): # TODO: Support contravariant subtyping in the input argument for # property setters. Need to make a special glue method for handling this, # similar to gen_glue_property. f = self.gen_glue(base.method_decls[name].sig, func_ir, class_ir, base, fdef) class_ir.glue_methods[(base, name)] = f self.functions.append(f) # If the class allows interpreted children, create glue # methods that dispatch via the Python API. These will go in a # "shadow vtable" that will be assigned to interpreted # children. if class_ir.allow_interpreted_subclasses: f = self.gen_glue(func_ir.sig, func_ir, class_ir, class_ir, fdef, do_py_ops=True) class_ir.glue_methods[(class_ir, name)] = f self.functions.append(f) def handle_non_ext_method( self, non_ext: NonExtClassInfo, cdef: ClassDef, fdef: FuncDef) -> None: # Perform the function of visit_method for methods inside non-extension classes. name = fdef.name func_ir, func_reg = self.gen_func_item(fdef, name, self.mapper.fdef_to_sig(fdef), cdef) assert func_reg is not None self.functions.append(func_ir) if self.is_decorated(fdef): # The undecorated method is a generated callable class orig_func = func_reg func_reg = self.load_decorated_func(fdef, orig_func) # TODO: Support property setters in non-extension classes if fdef.is_property: prop = self.load_module_attr_by_fullname('builtins.property', fdef.line) func_reg = self.py_call(prop, [func_reg], fdef.line) elif self.mapper.func_to_decl[fdef].kind == FUNC_CLASSMETHOD: cls_meth = self.load_module_attr_by_fullname('builtins.classmethod', fdef.line) func_reg = self.py_call(cls_meth, [func_reg], fdef.line) elif self.mapper.func_to_decl[fdef].kind == FUNC_STATICMETHOD: stat_meth = self.load_module_attr_by_fullname('builtins.staticmethod', fdef.line) func_reg = self.py_call(stat_meth, [func_reg], fdef.line) self.add_to_non_ext_dict(non_ext, name, func_reg, fdef.line) def visit_method( self, cdef: ClassDef, non_ext: Optional[NonExtClassInfo], fdef: FuncDef) -> None: if non_ext: self.handle_non_ext_method(non_ext, cdef, fdef) else: self.handle_ext_method(cdef, fdef) def is_constant(self, e: Expression) -> bool: """Check whether we allow an expression to appear as a default value. We don't currently properly support storing the evaluated values for default arguments and default attribute values, so we restrict what expressions we allow. We allow literals of primitives types, None, and references to Final global variables. """ return (isinstance(e, (StrExpr, BytesExpr, IntExpr, FloatExpr)) or (isinstance(e, UnaryExpr) and e.op == '-' and isinstance(e.expr, (IntExpr, FloatExpr))) or (isinstance(e, TupleExpr) and all(self.is_constant(e) for e in e.items)) or (isinstance(e, RefExpr) and e.kind == GDEF and (e.fullname in ('builtins.True', 'builtins.False', 'builtins.None') or (isinstance(e.node, Var) and e.node.is_final)))) def generate_attr_defaults(self, cdef: ClassDef) -> None: """Generate an initialization method for default attr values (from class vars)""" cls = self.mapper.type_to_ir[cdef.info] if cls.builtin_base: return # Pull out all assignments in classes in the mro so we can initialize them # TODO: Support nested statements default_assignments = [] for info in reversed(cdef.info.mro): if info not in self.mapper.type_to_ir: continue for stmt in info.defn.defs.body: if (isinstance(stmt, AssignmentStmt) and isinstance(stmt.lvalues[0], NameExpr) and not is_class_var(stmt.lvalues[0]) and not isinstance(stmt.rvalue, TempNode)): if stmt.lvalues[0].name == '__slots__': continue # Skip type annotated assignments in dataclasses if is_dataclass(cdef) and stmt.type: continue default_assignments.append(stmt) if not default_assignments: return self.enter(FuncInfo()) self.ret_types[-1] = bool_rprimitive rt_args = (RuntimeArg(SELF_NAME, RInstance(cls)),) self_var = self.read(self.add_self_to_env(cls), -1) for stmt in default_assignments: lvalue = stmt.lvalues[0] assert isinstance(lvalue, NameExpr) if not stmt.is_final_def and not self.is_constant(stmt.rvalue): self.warning('Unsupported default attribute value', stmt.rvalue.line) # If the attribute is initialized to None and type isn't optional, # don't initialize it to anything. attr_type = cls.attr_type(lvalue.name) if isinstance(stmt.rvalue, RefExpr) and stmt.rvalue.fullname == 'builtins.None': if (not is_optional_type(attr_type) and not is_object_rprimitive(attr_type) and not is_none_rprimitive(attr_type)): continue val = self.coerce(self.accept(stmt.rvalue), attr_type, stmt.line) self.add(SetAttr(self_var, lvalue.name, val, -1)) self.add(Return(self.primitive_op(true_op, [], -1))) blocks, env, ret_type, _ = self.leave() ir = FuncIR( FuncDecl('__mypyc_defaults_setup', cls.name, self.module_name, FuncSignature(rt_args, ret_type)), blocks, env) self.functions.append(ir) cls.methods[ir.name] = ir def finish_non_ext_dict(self, non_ext: NonExtClassInfo, line: int) -> None: # Add __annotations__ to the class dict. self.primitive_op(dict_set_item_op, [non_ext.dict, self.load_static_unicode('__annotations__'), non_ext.anns], -1) # We add a __doc__ attribute so if the non-extension class is decorated with the # dataclass decorator, dataclass will not try to look for __text_signature__. # https://github.com/python/cpython/blob/3.7/Lib/dataclasses.py#L957 filler_doc_str = 'mypyc filler docstring' self.add_to_non_ext_dict( non_ext, '__doc__', self.load_static_unicode(filler_doc_str), line) self.add_to_non_ext_dict( non_ext, '__module__', self.load_static_unicode(self.module_name), line) def load_non_ext_class(self, ir: ClassIR, non_ext: NonExtClassInfo, line: int) -> Value: cls_name = self.load_static_unicode(ir.name) self.finish_non_ext_dict(non_ext, line) class_type_obj = self.py_call(non_ext.metaclass, [cls_name, non_ext.bases, non_ext.dict], line) return class_type_obj def load_decorated_class(self, cdef: ClassDef, type_obj: Value) -> Value: """ Given a decorated ClassDef and a register containing a non-extension representation of the ClassDef created via the type constructor, applies the corresponding decorator functions on that decorated ClassDef and returns a register containing the decorated ClassDef. """ decorators = cdef.decorators dec_class = type_obj for d in reversed(decorators): decorator = d.accept(self) assert isinstance(decorator, Value) dec_class = self.py_call(decorator, [dec_class], dec_class.line) return dec_class def populate_non_ext_bases(self, cdef: ClassDef) -> Value: """ Populate the base-class tuple passed to the metaclass constructor for non-extension classes. """ ir = self.mapper.type_to_ir[cdef.info] bases = [] for cls in cdef.info.mro[1:]: if cls.fullname == 'builtins.object': continue # Add the current class to the base classes list of concrete subclasses if cls in self.mapper.type_to_ir: base_ir = self.mapper.type_to_ir[cls] if base_ir.children is not None: base_ir.children.append(ir) base = self.load_global_str(cls.name, cdef.line) bases.append(base) return self.primitive_op(new_tuple_op, bases, cdef.line) def add_to_non_ext_dict(self, non_ext: NonExtClassInfo, key: str, val: Value, line: int) -> None: # Add an attribute entry into the class dict of a non-extension class. key_unicode = self.load_static_unicode(key) self.primitive_op(dict_set_item_op, [non_ext.dict, key_unicode, val], line) def add_non_ext_class_attr(self, non_ext: NonExtClassInfo, lvalue: NameExpr, stmt: AssignmentStmt, cdef: ClassDef, attr_to_cache: List[Lvalue]) -> None: """ Add a class attribute to __annotations__ of a non-extension class. If the attribute is assigned to a value, it is also added to __dict__. """ # We populate __annotations__ because dataclasses uses it to determine # which attributes to compute on. # TODO: Maybe generate more precise types for annotations key = self.load_static_unicode(lvalue.name) typ = self.primitive_op(type_object_op, [], stmt.line) self.primitive_op(dict_set_item_op, [non_ext.anns, key, typ], stmt.line) # Only add the attribute to the __dict__ if the assignment is of the form: # x: type = value (don't add attributes of the form 'x: type' to the __dict__). if not isinstance(stmt.rvalue, TempNode): rvalue = self.accept(stmt.rvalue) self.add_to_non_ext_dict(non_ext, lvalue.name, rvalue, stmt.line) # We cache enum attributes to speed up enum attribute lookup since they # are final. if ( cdef.info.bases and cdef.info.bases[0].type.fullname == 'enum.Enum' # Skip "_order_", since Enum will remove it and lvalue.name != '_order_' ): attr_to_cache.append(lvalue) def find_non_ext_metaclass(self, cdef: ClassDef, bases: Value) -> Value: """Find the metaclass of a class from its defs and bases. """ if cdef.metaclass: declared_metaclass = self.accept(cdef.metaclass) else: declared_metaclass = self.primitive_op(type_object_op, [], cdef.line) return self.primitive_op(py_calc_meta_op, [declared_metaclass, bases], cdef.line) def setup_non_ext_dict(self, cdef: ClassDef, metaclass: Value, bases: Value) -> Value: """ Initialize the class dictionary for a non-extension class. This class dictionary is passed to the metaclass constructor. """ # Check if the metaclass defines a __prepare__ method, and if so, call it. has_prepare = self.primitive_op(py_hasattr_op, [metaclass, self.load_static_unicode('__prepare__')], cdef.line) non_ext_dict = self.alloc_temp(dict_rprimitive) true_block, false_block, exit_block, = BasicBlock(), BasicBlock(), BasicBlock() self.add_bool_branch(has_prepare, true_block, false_block) self.activate_block(true_block) cls_name = self.load_static_unicode(cdef.name) prepare_meth = self.py_get_attr(metaclass, '__prepare__', cdef.line) prepare_dict = self.py_call(prepare_meth, [cls_name, bases], cdef.line) self.assign(non_ext_dict, prepare_dict, cdef.line) self.goto(exit_block) self.activate_block(false_block) self.assign(non_ext_dict, self.primitive_op(new_dict_op, [], cdef.line), cdef.line) self.goto(exit_block) self.activate_block(exit_block) return non_ext_dict def cache_class_attrs(self, attrs_to_cache: List[Lvalue], cdef: ClassDef) -> None: """Add class attributes to be cached to the global cache""" typ = self.load_native_type_object(cdef.fullname) for lval in attrs_to_cache: assert isinstance(lval, NameExpr) rval = self.py_get_attr(typ, lval.name, cdef.line) self.init_final_static(lval, rval, cdef.name) def dataclass_non_ext_info(self, cdef: ClassDef) -> Optional[NonExtClassInfo]: """Set up a NonExtClassInfo to track dataclass attributes. In addition to setting up a normal extension class for dataclasses, we also collect its class attributes like a non-extension class so that we can hand them to the dataclass decorator. """ if is_dataclass(cdef): return NonExtClassInfo( self.primitive_op(new_dict_op, [], cdef.line), self.add(TupleSet([], cdef.line)), self.primitive_op(new_dict_op, [], cdef.line), self.primitive_op(type_object_op, [], cdef.line), ) else: return None def dataclass_finalize( self, cdef: ClassDef, non_ext: NonExtClassInfo, type_obj: Value) -> None: """Generate code to finish instantiating a dataclass. This works by replacing all of the attributes on the class (which will be descriptors) with whatever they would be in a non-extension class, calling dataclass, then switching them back. The resulting class is an extension class and instances of it do not have a __dict__ (unless something else requires it). All methods written explicitly in the source are compiled and may be called through the vtable while the methods generated by dataclasses are interpreted and may not be. (If we just called dataclass without doing this, it would think that all of the descriptors for our attributes are default values and generate an incorrect constructor. We need to do the switch so that dataclass gets the appropriate defaults.) """ self.finish_non_ext_dict(non_ext, cdef.line) dec = self.accept(next(d for d in cdef.decorators if is_dataclass_decorator(d))) self.primitive_op( dataclass_sleight_of_hand, [dec, type_obj, non_ext.dict, non_ext.anns], cdef.line) def visit_class_def(self, cdef: ClassDef) -> None: ir = self.mapper.type_to_ir[cdef.info] # We do this check here because the base field of parent # classes aren't necessarily populated yet at # prepare_class_def time. if any(ir.base_mro[i].base != ir. base_mro[i + 1] for i in range(len(ir.base_mro) - 1)): self.error("Non-trait MRO must be linear", cdef.line) if ir.allow_interpreted_subclasses: for parent in ir.mro: if not parent.allow_interpreted_subclasses: self.error( 'Base class "{}" does not allow interpreted subclasses'.format( parent.fullname), cdef.line) # Currently, we only create non-extension classes for classes that are # decorated or inherit from Enum. Classes decorated with @trait do not # apply here, and are handled in a different way. if ir.is_ext_class: # If the class is not decorated, generate an extension class for it. type_obj = self.allocate_class(cdef) # type: Optional[Value] non_ext = None # type: Optional[NonExtClassInfo] dataclass_non_ext = self.dataclass_non_ext_info(cdef) else: non_ext_bases = self.populate_non_ext_bases(cdef) non_ext_metaclass = self.find_non_ext_metaclass(cdef, non_ext_bases) non_ext_dict = self.setup_non_ext_dict(cdef, non_ext_metaclass, non_ext_bases) # We populate __annotations__ for non-extension classes # because dataclasses uses it to determine which attributes to compute on. # TODO: Maybe generate more precise types for annotations non_ext_anns = self.primitive_op(new_dict_op, [], cdef.line) non_ext = NonExtClassInfo(non_ext_dict, non_ext_bases, non_ext_anns, non_ext_metaclass) dataclass_non_ext = None type_obj = None attrs_to_cache = [] # type: List[Lvalue] for stmt in cdef.defs.body: if isinstance(stmt, OverloadedFuncDef) and stmt.is_property: if not ir.is_ext_class: # properties with both getters and setters in non_extension # classes not supported self.error("Property setters not supported in non-extension classes", stmt.line) for item in stmt.items: with self.catch_errors(stmt.line): self.visit_method(cdef, non_ext, get_func_def(item)) elif isinstance(stmt, (FuncDef, Decorator, OverloadedFuncDef)): # Ignore plugin generated methods (since they have no # bodies to compile and will need to have the bodies # provided by some other mechanism.) if cdef.info.names[stmt.name].plugin_generated: continue with self.catch_errors(stmt.line): self.visit_method(cdef, non_ext, get_func_def(stmt)) elif isinstance(stmt, PassStmt): continue elif isinstance(stmt, AssignmentStmt): if len(stmt.lvalues) != 1: self.error("Multiple assignment in class bodies not supported", stmt.line) continue lvalue = stmt.lvalues[0] if not isinstance(lvalue, NameExpr): self.error("Only assignment to variables is supported in class bodies", stmt.line) continue # We want to collect class variables in a dictionary for both real # non-extension classes and fake dataclass ones. var_non_ext = non_ext or dataclass_non_ext if var_non_ext: self.add_non_ext_class_attr(var_non_ext, lvalue, stmt, cdef, attrs_to_cache) if non_ext: continue # Variable declaration with no body if isinstance(stmt.rvalue, TempNode): continue # Only treat marked class variables as class variables. if not (is_class_var(lvalue) or stmt.is_final_def): continue typ = self.load_native_type_object(cdef.fullname) value = self.accept(stmt.rvalue) self.primitive_op( py_setattr_op, [typ, self.load_static_unicode(lvalue.name), value], stmt.line) if self.non_function_scope() and stmt.is_final_def: self.init_final_static(lvalue, value, cdef.name) elif isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr): # Docstring. Ignore pass else: self.error("Unsupported statement in class body", stmt.line) if not non_ext: # That is, an extension class self.generate_attr_defaults(cdef) self.create_ne_from_eq(cdef) if dataclass_non_ext: assert type_obj self.dataclass_finalize(cdef, dataclass_non_ext, type_obj) else: # Dynamically create the class via the type constructor non_ext_class = self.load_non_ext_class(ir, non_ext, cdef.line) non_ext_class = self.load_decorated_class(cdef, non_ext_class) # Save the decorated class self.add(InitStatic(non_ext_class, cdef.name, self.module_name, NAMESPACE_TYPE)) # Add the non-extension class to the dict self.primitive_op(dict_set_item_op, [self.load_globals_dict(), self.load_static_unicode(cdef.name), non_ext_class], cdef.line) # Cache any cachable class attributes self.cache_class_attrs(attrs_to_cache, cdef) # Set this attribute back to None until the next non-extension class is visited. self.non_ext_info = None def create_mypyc_attrs_tuple(self, ir: ClassIR, line: int) -> Value: attrs = [name for ancestor in ir.mro for name in ancestor.attributes] if ir.inherits_python: attrs.append('__dict__') return self.primitive_op(new_tuple_op, [self.load_static_unicode(attr) for attr in attrs], line) def allocate_class(self, cdef: ClassDef) -> Value: # OK AND NOW THE FUN PART base_exprs = cdef.base_type_exprs + cdef.removed_base_type_exprs if base_exprs: bases = [self.accept(x) for x in base_exprs] tp_bases = self.primitive_op(new_tuple_op, bases, cdef.line) else: tp_bases = self.add(LoadErrorValue(object_rprimitive, is_borrowed=True)) modname = self.load_static_unicode(self.module_name) template = self.add(LoadStatic(object_rprimitive, cdef.name + "_template", self.module_name, NAMESPACE_TYPE)) # Create the class tp = self.primitive_op(pytype_from_template_op, [template, tp_bases, modname], cdef.line) # Immediately fix up the trait vtables, before doing anything with the class. ir = self.mapper.type_to_ir[cdef.info] if not ir.is_trait and not ir.builtin_base: self.add(Call( FuncDecl(cdef.name + '_trait_vtable_setup', None, self.module_name, FuncSignature([], bool_rprimitive)), [], -1)) # Populate a '__mypyc_attrs__' field containing the list of attrs self.primitive_op(py_setattr_op, [ tp, self.load_static_unicode('__mypyc_attrs__'), self.create_mypyc_attrs_tuple(self.mapper.type_to_ir[cdef.info], cdef.line)], cdef.line) # Save the class self.add(InitStatic(tp, cdef.name, self.module_name, NAMESPACE_TYPE)) # Add it to the dict self.primitive_op(dict_set_item_op, [self.load_globals_dict(), self.load_static_unicode(cdef.name), tp], cdef.line) return tp def gen_import(self, id: str, line: int) -> None: self.imports[id] = None needs_import, out = BasicBlock(), BasicBlock() first_load = self.load_module(id) comparison = self.binary_op(first_load, self.none_object(), 'is not', line) self.add_bool_branch(comparison, out, needs_import) self.activate_block(needs_import) value = self.primitive_op(import_op, [self.load_static_unicode(id)], line) self.add(InitStatic(value, id, namespace=NAMESPACE_MODULE)) self.goto_and_activate(out) def visit_import(self, node: Import) -> None: if node.is_mypy_only: return globals = self.load_globals_dict() for node_id, as_name in node.ids: self.gen_import(node_id, node.line) # Update the globals dict with the appropriate module: # * For 'import foo.bar as baz' we add 'foo.bar' with the name 'baz' # * For 'import foo.bar' we add 'foo' with the name 'foo' # Typically we then ignore these entries and access things directly # via the module static, but we will use the globals version for modules # that mypy couldn't find, since it doesn't analyze module references # from those properly. # Miscompiling imports inside of functions, like below in import from. if as_name: name = as_name base = node_id else: base = name = node_id.split('.')[0] # Python 3.7 has a nice 'PyImport_GetModule' function that we can't use :( mod_dict = self.primitive_op(get_module_dict_op, [], node.line) obj = self.primitive_op(dict_get_item_op, [mod_dict, self.load_static_unicode(base)], node.line) self.translate_special_method_call( globals, '__setitem__', [self.load_static_unicode(name), obj], result_type=None, line=node.line) def visit_import_from(self, node: ImportFrom) -> None: if node.is_mypy_only: return module_state = self.graph[self.module_name] if module_state.ancestors is not None and module_state.ancestors: module_package = module_state.ancestors[0] else: module_package = '' id = importlib.util.resolve_name('.' * node.relative + node.id, module_package) self.gen_import(id, node.line) module = self.load_module(id) # Copy everything into our module's dict. # Note that we miscompile import from inside of functions here, # since that case *shouldn't* load it into the globals dict. # This probably doesn't matter much and the code runs basically right. globals = self.load_globals_dict() for name, maybe_as_name in node.names: # If one of the things we are importing is a module, # import it as a module also. fullname = id + '.' + name if fullname in self.graph or fullname in module_state.suppressed: self.gen_import(fullname, node.line) as_name = maybe_as_name or name obj = self.py_get_attr(module, name, node.line) self.translate_special_method_call( globals, '__setitem__', [self.load_static_unicode(as_name), obj], result_type=None, line=node.line) def visit_import_all(self, node: ImportAll) -> None: if node.is_mypy_only: return self.gen_import(node.id, node.line) def gen_glue(self, sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, fdef: FuncItem, *, do_py_ops: bool = False ) -> FuncIR: """Generate glue methods that mediate between different method types in subclasses. Works on both properties and methods. See gen_glue_methods below for more details. If do_py_ops is True, then the glue methods should use generic C API operations instead of direct calls, to enable generating "shadow" glue methods that work with interpreted subclasses. """ if fdef.is_property: return self.gen_glue_property(sig, target, cls, base, fdef.line, do_py_ops) else: return self.gen_glue_method(sig, target, cls, base, fdef.line, do_py_ops) def gen_glue_method(self, sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, line: int, do_pycall: bool, ) -> FuncIR: """Generate glue methods that mediate between different method types in subclasses. For example, if we have: class A: def f(self, x: int) -> object: ... then it is totally permissible to have a subclass class B(A): def f(self, x: object) -> int: ... since '(object) -> int' is a subtype of '(int) -> object' by the usual contra/co-variant function subtyping rules. The trickiness here is that int and object have different runtime representations in mypyc, so A.f and B.f have different signatures at the native C level. To deal with this, we need to generate glue methods that mediate between the different versions by coercing the arguments and return values. If do_pycall is True, then make the call using the C API instead of a native call. """ self.enter(FuncInfo()) self.ret_types[-1] = sig.ret_type rt_args = list(sig.args) if target.decl.kind == FUNC_NORMAL: rt_args[0] = RuntimeArg(sig.args[0].name, RInstance(cls)) # The environment operates on Vars, so we make some up fake_vars = [(Var(arg.name), arg.type) for arg in rt_args] args = [self.read(self.environment.add_local_reg(var, type, is_arg=True), line) for var, type in fake_vars] arg_names = [arg.name for arg in rt_args] arg_kinds = [concrete_arg_kind(arg.kind) for arg in rt_args] if do_pycall: retval = self.py_method_call( args[0], target.name, args[1:], line, arg_kinds[1:], arg_names[1:]) else: retval = self.call(target.decl, args, arg_kinds, arg_names, line) retval = self.coerce(retval, sig.ret_type, line) self.add(Return(retval)) blocks, env, ret_type, _ = self.leave() return FuncIR( FuncDecl(target.name + '__' + base.name + '_glue', cls.name, self.module_name, FuncSignature(rt_args, ret_type), target.decl.kind), blocks, env) def gen_glue_property(self, sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, line: int, do_pygetattr: bool) -> FuncIR: """Generate glue methods for properties that mediate between different subclass types. Similarly to methods, properties of derived types can be covariantly subtyped. Thus, properties also require glue. However, this only requires the return type to change. Further, instead of a method call, an attribute get is performed. If do_pygetattr is True, then get the attribute using the C API instead of a native call. """ self.enter(FuncInfo()) rt_arg = RuntimeArg(SELF_NAME, RInstance(cls)) arg = self.read(self.add_self_to_env(cls), line) self.ret_types[-1] = sig.ret_type if do_pygetattr: retval = self.py_get_attr(arg, target.name, line) else: retval = self.add(GetAttr(arg, target.name, line)) retbox = self.coerce(retval, sig.ret_type, line) self.add(Return(retbox)) blocks, env, return_type, _ = self.leave() return FuncIR( FuncDecl(target.name + '__' + base.name + '_glue', cls.name, self.module_name, FuncSignature([rt_arg], return_type)), blocks, env) def assign_if_null(self, target: AssignmentTargetRegister, get_val: Callable[[], Value], line: int) -> None: """Generate blocks for registers that NULL values.""" error_block, body_block = BasicBlock(), BasicBlock() self.add(Branch(target.register, error_block, body_block, Branch.IS_ERROR)) self.activate_block(error_block) self.add(Assign(target.register, self.coerce(get_val(), target.register.type, line))) self.goto(body_block) self.activate_block(body_block) def gen_glue_ne_method(self, cls: ClassIR, line: int) -> FuncIR: """Generate a __ne__ method from a __eq__ method. """ self.enter(FuncInfo()) rt_args = (RuntimeArg("self", RInstance(cls)), RuntimeArg("rhs", object_rprimitive)) # The environment operates on Vars, so we make some up fake_vars = [(Var(arg.name), arg.type) for arg in rt_args] args = [self.read(self.environment.add_local_reg(var, type, is_arg=True), line) for var, type in fake_vars] # type: List[Value] self.ret_types[-1] = object_rprimitive # If __eq__ returns NotImplemented, then __ne__ should also not_implemented_block, regular_block = BasicBlock(), BasicBlock() eqval = self.add(MethodCall(args[0], '__eq__', [args[1]], line)) not_implemented = self.primitive_op(not_implemented_op, [], line) self.add(Branch( self.binary_op(eqval, not_implemented, 'is', line), not_implemented_block, regular_block, Branch.BOOL_EXPR)) self.activate_block(regular_block) retval = self.coerce(self.unary_op(eqval, 'not', line), object_rprimitive, line) self.add(Return(retval)) self.activate_block(not_implemented_block) self.add(Return(not_implemented)) blocks, env, ret_type, _ = self.leave() return FuncIR( FuncDecl('__ne__', cls.name, self.module_name, FuncSignature(rt_args, ret_type)), blocks, env) def create_ne_from_eq(self, cdef: ClassDef) -> None: cls = self.mapper.type_to_ir[cdef.info] if cls.has_method('__eq__') and not cls.has_method('__ne__'): f = self.gen_glue_ne_method(cls, cdef.line) cls.method_decls['__ne__'] = f.decl cls.methods['__ne__'] = f self.functions.append(f) def calculate_arg_defaults(self, fn_info: FuncInfo, env: Environment, func_reg: Optional[Value]) -> None: """Calculate default argument values and store them. They are stored in statics for top level functions and in the function objects for nested functions (while constants are still stored computed on demand). """ fitem = fn_info.fitem for arg in fitem.arguments: # Constant values don't get stored but just recomputed if arg.initializer and not self.is_constant(arg.initializer): value = self.coerce(self.accept(arg.initializer), env.lookup(arg.variable).type, arg.line) if not fn_info.is_nested: name = fitem.fullname + '.' + arg.variable.name self.add(InitStatic(value, name, self.module_name)) else: assert func_reg is not None self.add(SetAttr(func_reg, arg.variable.name, value, arg.line)) def gen_arg_defaults(self) -> None: """Generate blocks for arguments that have default values. If the passed value is an error value, then assign the default value to the argument. """ fitem = self.fn_info.fitem for arg in fitem.arguments: if arg.initializer: target = self.environment.lookup(arg.variable) def get_default() -> Value: assert arg.initializer is not None # If it is constant, don't bother storing it if self.is_constant(arg.initializer): return self.accept(arg.initializer) # Because gen_arg_defaults runs before calculate_arg_defaults, we # add the static/attribute to final_names/the class here. elif not self.fn_info.is_nested: name = fitem.fullname + '.' + arg.variable.name self.final_names.append((name, target.type)) return self.add(LoadStatic(target.type, name, self.module_name)) else: name = arg.variable.name self.fn_info.callable_class.ir.attributes[name] = target.type return self.add( GetAttr(self.fn_info.callable_class.self_reg, name, arg.line)) assert isinstance(target, AssignmentTargetRegister) self.assign_if_null(target, get_default, arg.initializer.line) def gen_func_item(self, fitem: FuncItem, name: str, sig: FuncSignature, cdef: Optional[ClassDef] = None, ) -> Tuple[FuncIR, Optional[Value]]: # TODO: do something about abstract methods. """Generates and returns the FuncIR for a given FuncDef. If the given FuncItem is a nested function, then we generate a callable class representing the function and use that instead of the actual function. if the given FuncItem contains a nested function, then we generate an environment class so that inner nested functions can access the environment of the given FuncDef. Consider the following nested function. def a() -> None: def b() -> None: def c() -> None: return None return None return None The classes generated would look something like the following. has pointer to +-------+ +--------------------------> | a_env | | +-------+ | ^ | | has pointer to +-------+ associated with +-------+ | b_obj | -------------------> | b_env | +-------+ +-------+ ^ | +-------+ has pointer to | | c_obj | --------------------------+ +-------+ """ func_reg = None # type: Optional[Value] # We treat lambdas as always being nested because we always generate # a class for lambdas, no matter where they are. (It would probably also # work to special case toplevel lambdas and generate a non-class function.) is_nested = fitem in self.nested_fitems or isinstance(fitem, LambdaExpr) contains_nested = fitem in self.encapsulating_funcs.keys() is_decorated = fitem in self.fdefs_to_decorators in_non_ext = False class_name = None if cdef: ir = self.mapper.type_to_ir[cdef.info] in_non_ext = not ir.is_ext_class class_name = cdef.name self.enter(FuncInfo(fitem, name, class_name, self.gen_func_ns(), is_nested, contains_nested, is_decorated, in_non_ext)) # Functions that contain nested functions need an environment class to store variables that # are free in their nested functions. Generator functions need an environment class to # store a variable denoting the next instruction to be executed when the __next__ function # is called, along with all the variables inside the function itself. if self.fn_info.contains_nested or self.fn_info.is_generator: self.setup_env_class() if self.fn_info.is_nested or self.fn_info.in_non_ext: self.setup_callable_class() if self.fn_info.is_generator: # Do a first-pass and generate a function that just returns a generator object. self.gen_generator_func() blocks, env, ret_type, fn_info = self.leave() func_ir, func_reg = self.gen_func_ir(blocks, sig, env, fn_info, cdef) # Re-enter the FuncItem and visit the body of the function this time. self.enter(fn_info) self.setup_env_for_generator_class() self.load_outer_envs(self.fn_info.generator_class) if self.fn_info.is_nested and isinstance(fitem, FuncDef): self.setup_func_for_recursive_call(fitem, self.fn_info.generator_class) self.create_switch_for_generator_class() self.add_raise_exception_blocks_to_generator_class(fitem.line) else: self.load_env_registers() self.gen_arg_defaults() if self.fn_info.contains_nested and not self.fn_info.is_generator: self.finalize_env_class() self.ret_types[-1] = sig.ret_type # Add all variables and functions that are declared/defined within this # function and are referenced in functions nested within this one to this # function's environment class so the nested functions can reference # them even if they are declared after the nested function's definition. # Note that this is done before visiting the body of this function. env_for_func = self.fn_info # type: Union[FuncInfo, ImplicitClass] if self.fn_info.is_generator: env_for_func = self.fn_info.generator_class elif self.fn_info.is_nested or self.fn_info.in_non_ext: env_for_func = self.fn_info.callable_class if self.fn_info.fitem in self.free_variables: # Sort the variables to keep things deterministic for var in sorted(self.free_variables[self.fn_info.fitem], key=lambda x: x.name): if isinstance(var, Var): rtype = self.type_to_rtype(var.type) self.add_var_to_env_class(var, rtype, env_for_func, reassign=False) if self.fn_info.fitem in self.encapsulating_funcs: for nested_fn in self.encapsulating_funcs[self.fn_info.fitem]: if isinstance(nested_fn, FuncDef): # The return type is 'object' instead of an RInstance of the # callable class because differently defined functions with # the same name and signature across conditional blocks # will generate different callable classes, so the callable # class that gets instantiated must be generic. self.add_var_to_env_class(nested_fn, object_rprimitive, env_for_func, reassign=False) self.accept(fitem.body) self.maybe_add_implicit_return() if self.fn_info.is_generator: self.populate_switch_for_generator_class() blocks, env, ret_type, fn_info = self.leave() if fn_info.is_generator: helper_fn_decl = self.add_helper_to_generator_class(blocks, sig, env, fn_info) self.add_next_to_generator_class(fn_info, helper_fn_decl, sig) self.add_send_to_generator_class(fn_info, helper_fn_decl, sig) self.add_iter_to_generator_class(fn_info) self.add_throw_to_generator_class(fn_info, helper_fn_decl, sig) self.add_close_to_generator_class(fn_info) if fitem.is_coroutine: self.add_await_to_generator_class(fn_info) else: func_ir, func_reg = self.gen_func_ir(blocks, sig, env, fn_info, cdef) self.calculate_arg_defaults(fn_info, env, func_reg) return (func_ir, func_reg) def gen_func_ir(self, blocks: List[BasicBlock], sig: FuncSignature, env: Environment, fn_info: FuncInfo, cdef: Optional[ClassDef]) -> Tuple[FuncIR, Optional[Value]]: """Generates the FuncIR for a function given the blocks, environment, and function info of a particular function and returns it. If the function is nested, also returns the register containing the instance of the corresponding callable class. """ func_reg = None # type: Optional[Value] if fn_info.is_nested or fn_info.in_non_ext: func_ir = self.add_call_to_callable_class(blocks, sig, env, fn_info) self.add_get_to_callable_class(fn_info) func_reg = self.instantiate_callable_class(fn_info) else: assert isinstance(fn_info.fitem, FuncDef) func_decl = self.mapper.func_to_decl[fn_info.fitem] if fn_info.is_decorated: class_name = None if cdef is None else cdef.name func_decl = FuncDecl(fn_info.name, class_name, self.module_name, sig, func_decl.kind, func_decl.is_prop_getter, func_decl.is_prop_setter) func_ir = FuncIR(func_decl, blocks, env, fn_info.fitem.line, traceback_name=fn_info.fitem.name) else: func_ir = FuncIR(func_decl, blocks, env, fn_info.fitem.line, traceback_name=fn_info.fitem.name) return (func_ir, func_reg) def load_decorated_func(self, fdef: FuncDef, orig_func_reg: Value) -> Value: """ Given a decorated FuncDef and the register containing an instance of the callable class representing that FuncDef, applies the corresponding decorator functions on that decorated FuncDef and returns a register containing an instance of the callable class representing the decorated function. """ if not self.is_decorated(fdef): # If there are no decorators associated with the function, then just return the # original function. return orig_func_reg decorators = self.fdefs_to_decorators[fdef] func_reg = orig_func_reg for d in reversed(decorators): decorator = d.accept(self) assert isinstance(decorator, Value) func_reg = self.py_call(decorator, [func_reg], func_reg.line) return func_reg def maybe_add_implicit_return(self) -> None: if is_none_rprimitive(self.ret_types[-1]) or is_object_rprimitive(self.ret_types[-1]): self.add_implicit_return() else: self.add_implicit_unreachable() def visit_func_def(self, fdef: FuncDef) -> None: func_ir, func_reg = self.gen_func_item(fdef, fdef.name, self.mapper.fdef_to_sig(fdef)) # If the function that was visited was a nested function, then either look it up in our # current environment or define it if it was not already defined. if func_reg: self.assign(self.get_func_target(fdef), func_reg, fdef.line) self.functions.append(func_ir) def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: # Handle regular overload case assert o.impl self.accept(o.impl) def add_implicit_return(self) -> None: block = self.blocks[-1][-1] if not block.ops or not isinstance(block.ops[-1], ControlOp): retval = self.coerce(self.none(), self.ret_types[-1], -1) self.nonlocal_control[-1].gen_return(self, retval, self.fn_info.fitem.line) def add_implicit_unreachable(self) -> None: block = self.blocks[-1][-1] if not block.ops or not isinstance(block.ops[-1], ControlOp): self.add(Unreachable()) def visit_block(self, block: Block) -> None: if not block.is_unreachable: for stmt in block.body: self.accept(stmt) # Raise a RuntimeError if we hit a non-empty unreachable block. # Don't complain about empty unreachable blocks, since mypy inserts # those after `if MYPY`. elif block.body: self.add(RaiseStandardError(RaiseStandardError.RUNTIME_ERROR, 'Reached allegedly unreachable code!', block.line)) self.add(Unreachable()) def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: if isinstance(stmt.expr, StrExpr): # Docstring. Ignore return # ExpressionStmts do not need to be coerced like other Expressions. stmt.expr.accept(self) def visit_return_stmt(self, stmt: ReturnStmt) -> None: if stmt.expr: retval = self.accept(stmt.expr) else: retval = self.none() retval = self.coerce(retval, self.ret_types[-1], stmt.line) self.nonlocal_control[-1].gen_return(self, retval, stmt.line) def disallow_class_assignments(self, lvalues: List[Lvalue], line: int) -> None: # Some best-effort attempts to disallow assigning to class # variables that aren't marked ClassVar, since we blatantly # miscompile the interaction between instance and class # variables. for lvalue in lvalues: if (isinstance(lvalue, MemberExpr) and isinstance(lvalue.expr, RefExpr) and isinstance(lvalue.expr.node, TypeInfo)): var = lvalue.expr.node[lvalue.name].node if isinstance(var, Var) and not var.is_classvar: self.error( "Only class variables defined as ClassVar can be assigned to", line) def non_function_scope(self) -> bool: # Currently the stack always has at least two items: dummy and top-level. return len(self.fn_infos) <= 2 def init_final_static(self, lvalue: Lvalue, rvalue_reg: Value, class_name: Optional[str] = None) -> None: assert isinstance(lvalue, NameExpr) assert isinstance(lvalue.node, Var) if lvalue.node.final_value is None: if class_name is None: name = lvalue.name else: name = '{}.{}'.format(class_name, lvalue.name) assert name is not None, "Full name not set for variable" self.final_names.append((name, rvalue_reg.type)) self.add(InitStatic(rvalue_reg, name, self.module_name)) def load_final_static(self, fullname: str, typ: RType, line: int, error_name: Optional[str] = None) -> Value: if error_name is None: error_name = fullname ok_block, error_block = BasicBlock(), BasicBlock() split_name = split_target(self.graph, fullname) assert split_name is not None value = self.add(LoadStatic(typ, split_name[1], split_name[0], line=line)) self.add(Branch(value, error_block, ok_block, Branch.IS_ERROR, rare=True)) self.activate_block(error_block) self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR, 'value for final name "{}" was not set'.format(error_name), line)) self.add(Unreachable()) self.activate_block(ok_block) return value def load_final_literal_value(self, val: Union[int, str, bytes, float, bool], line: int) -> Value: """Load value of a final name or class-level attribute.""" if isinstance(val, bool): if val: return self.primitive_op(true_op, [], line) else: return self.primitive_op(false_op, [], line) elif isinstance(val, int): # TODO: take care of negative integer initializers # (probably easier to fix this in mypy itself). if val > MAX_LITERAL_SHORT_INT: return self.load_static_int(val) return self.add(LoadInt(val)) elif isinstance(val, float): return self.load_static_float(val) elif isinstance(val, str): return self.load_static_unicode(val) elif isinstance(val, bytes): return self.load_static_bytes(val) else: assert False, "Unsupported final literal value" def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: assert len(stmt.lvalues) >= 1 self.disallow_class_assignments(stmt.lvalues, stmt.line) lvalue = stmt.lvalues[0] if stmt.type and isinstance(stmt.rvalue, TempNode): # This is actually a variable annotation without initializer. Don't generate # an assignment but we need to call get_assignment_target since it adds a # name binding as a side effect. self.get_assignment_target(lvalue, stmt.line) return line = stmt.rvalue.line rvalue_reg = self.accept(stmt.rvalue) if self.non_function_scope() and stmt.is_final_def: self.init_final_static(lvalue, rvalue_reg) for lvalue in stmt.lvalues: target = self.get_assignment_target(lvalue) self.assign(target, rvalue_reg, line) def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: """Operator assignment statement such as x += 1""" self.disallow_class_assignments([stmt.lvalue], stmt.line) target = self.get_assignment_target(stmt.lvalue) target_value = self.read(target, stmt.line) rreg = self.accept(stmt.rvalue) # the Python parser strips the '=' from operator assignment statements, so re-add it op = stmt.op + '=' res = self.binary_op(target_value, rreg, op, stmt.line) # usually operator assignments are done in-place # but when target doesn't support that we need to manually assign self.assign(target, res, res.line) def get_assignment_target(self, lvalue: Lvalue, line: int = -1) -> AssignmentTarget: if isinstance(lvalue, NameExpr): # If we are visiting a decorator, then the SymbolNode we really want to be looking at # is the function that is decorated, not the entire Decorator node itself. symbol = lvalue.node if isinstance(symbol, Decorator): symbol = symbol.func if symbol is None: # New semantic analyzer doesn't create ad-hoc Vars for special forms. assert lvalue.is_special_form symbol = Var(lvalue.name) if lvalue.kind == LDEF: if symbol not in self.environment.symtable: # If the function is a generator function, then first define a new variable # in the current function's environment class. Next, define a target that # refers to the newly defined variable in that environment class. Add the # target to the table containing class environment variables, as well as the # current environment. if self.fn_info.is_generator: return self.add_var_to_env_class(symbol, self.node_type(lvalue), self.fn_info.generator_class, reassign=False) # Otherwise define a new local variable. return self.environment.add_local_reg(symbol, self.node_type(lvalue)) else: # Assign to a previously defined variable. return self.environment.lookup(symbol) elif lvalue.kind == GDEF: globals_dict = self.load_globals_dict() name = self.load_static_unicode(lvalue.name) return AssignmentTargetIndex(globals_dict, name) else: assert False, lvalue.kind elif isinstance(lvalue, IndexExpr): # Indexed assignment x[y] = e base = self.accept(lvalue.base) index = self.accept(lvalue.index) return AssignmentTargetIndex(base, index) elif isinstance(lvalue, MemberExpr): # Attribute assignment x.y = e obj = self.accept(lvalue.expr) return AssignmentTargetAttr(obj, lvalue.name) elif isinstance(lvalue, TupleExpr): # Multiple assignment a, ..., b = e star_idx = None # type: Optional[int] lvalues = [] for idx, item in enumerate(lvalue.items): targ = self.get_assignment_target(item) lvalues.append(targ) if isinstance(item, StarExpr): if star_idx is not None: self.error("Two starred expressions in assignment", line) star_idx = idx return AssignmentTargetTuple(lvalues, star_idx) elif isinstance(lvalue, StarExpr): return self.get_assignment_target(lvalue.expr) assert False, 'Unsupported lvalue: %r' % lvalue def read(self, target: Union[Value, AssignmentTarget], line: int = -1) -> Value: if isinstance(target, Value): return target if isinstance(target, AssignmentTargetRegister): return target.register if isinstance(target, AssignmentTargetIndex): reg = self.gen_method_call( target.base, '__getitem__', [target.index], target.type, line) if reg is not None: return reg assert False, target.base.type if isinstance(target, AssignmentTargetAttr): if isinstance(target.obj.type, RInstance) and target.obj.type.class_ir.is_ext_class: return self.add(GetAttr(target.obj, target.attr, line)) else: return self.py_get_attr(target.obj, target.attr, line) assert False, 'Unsupported lvalue: %r' % target def assign(self, target: Union[Register, AssignmentTarget], rvalue_reg: Value, line: int) -> None: if isinstance(target, Register): self.add(Assign(target, rvalue_reg)) elif isinstance(target, AssignmentTargetRegister): rvalue_reg = self.coerce(rvalue_reg, target.type, line) self.add(Assign(target.register, rvalue_reg)) elif isinstance(target, AssignmentTargetAttr): if isinstance(target.obj_type, RInstance): rvalue_reg = self.coerce(rvalue_reg, target.type, line) self.add(SetAttr(target.obj, target.attr, rvalue_reg, line)) else: key = self.load_static_unicode(target.attr) boxed_reg = self.box(rvalue_reg) self.add(PrimitiveOp([target.obj, key, boxed_reg], py_setattr_op, line)) elif isinstance(target, AssignmentTargetIndex): target_reg2 = self.gen_method_call( target.base, '__setitem__', [target.index, rvalue_reg], None, line) assert target_reg2 is not None, target.base.type elif isinstance(target, AssignmentTargetTuple): if isinstance(rvalue_reg.type, RTuple) and target.star_idx is None: rtypes = rvalue_reg.type.types assert len(rtypes) == len(target.items) for i in range(len(rtypes)): item_value = self.add(TupleGet(rvalue_reg, i, line)) self.assign(target.items[i], item_value, line) else: self.process_iterator_tuple_assignment(target, rvalue_reg, line) else: assert False, 'Unsupported assignment target' def process_iterator_tuple_assignment_helper(self, litem: AssignmentTarget, ritem: Value, line: int) -> None: error_block, ok_block = BasicBlock(), BasicBlock() self.add(Branch(ritem, error_block, ok_block, Branch.IS_ERROR)) self.activate_block(error_block) self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR, 'not enough values to unpack', line)) self.add(Unreachable()) self.activate_block(ok_block) self.assign(litem, ritem, line) def process_iterator_tuple_assignment(self, target: AssignmentTargetTuple, rvalue_reg: Value, line: int) -> None: iterator = self.primitive_op(iter_op, [rvalue_reg], line) # This may be the whole lvalue list if there is no starred value split_idx = target.star_idx if target.star_idx is not None else len(target.items) # Assign values before the first starred value for litem in target.items[:split_idx]: ritem = self.primitive_op(next_op, [iterator], line) error_block, ok_block = BasicBlock(), BasicBlock() self.add(Branch(ritem, error_block, ok_block, Branch.IS_ERROR)) self.activate_block(error_block) self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR, 'not enough values to unpack', line)) self.add(Unreachable()) self.activate_block(ok_block) self.assign(litem, ritem, line) # Assign the starred value and all values after it if target.star_idx is not None: post_star_vals = target.items[split_idx + 1:] iter_list = self.primitive_op(to_list, [iterator], line) iter_list_len = self.primitive_op(list_len_op, [iter_list], line) post_star_len = self.add(LoadInt(len(post_star_vals))) condition = self.binary_op(post_star_len, iter_list_len, '<=', line) error_block, ok_block = BasicBlock(), BasicBlock() self.add(Branch(condition, ok_block, error_block, Branch.BOOL_EXPR)) self.activate_block(error_block) self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR, 'not enough values to unpack', line)) self.add(Unreachable()) self.activate_block(ok_block) for litem in reversed(post_star_vals): ritem = self.primitive_op(list_pop_last, [iter_list], line) self.assign(litem, ritem, line) # Assign the starred value self.assign(target.items[target.star_idx], iter_list, line) # There is no starred value, so check if there are extra values in rhs that # have not been assigned. else: extra = self.primitive_op(next_op, [iterator], line) error_block, ok_block = BasicBlock(), BasicBlock() self.add(Branch(extra, ok_block, error_block, Branch.IS_ERROR)) self.activate_block(error_block) self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR, 'too many values to unpack', line)) self.add(Unreachable()) self.activate_block(ok_block) def visit_if_stmt(self, stmt: IfStmt) -> None: if_body, next = BasicBlock(), BasicBlock() else_body = BasicBlock() if stmt.else_body else next # If statements are normalized assert len(stmt.expr) == 1 self.process_conditional(stmt.expr[0], if_body, else_body) self.activate_block(if_body) self.accept(stmt.body[0]) self.goto(next) if stmt.else_body: self.activate_block(else_body) self.accept(stmt.else_body) self.goto(next) self.activate_block(next) def push_loop_stack(self, continue_block: BasicBlock, break_block: BasicBlock) -> None: self.nonlocal_control.append( LoopNonlocalControl(self.nonlocal_control[-1], continue_block, break_block)) def pop_loop_stack(self) -> None: self.nonlocal_control.pop() def visit_while_stmt(self, s: WhileStmt) -> None: body, next, top, else_block = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() normal_loop_exit = else_block if s.else_body is not None else next self.push_loop_stack(top, next) # Split block so that we get a handle to the top of the loop. self.goto_and_activate(top) self.process_conditional(s.expr, body, normal_loop_exit) self.activate_block(body) self.accept(s.body) # Add branch to the top at the end of the body. self.goto(top) self.pop_loop_stack() if s.else_body is not None: self.activate_block(else_block) self.accept(s.else_body) self.goto(next) self.activate_block(next) def visit_for_stmt(self, s: ForStmt) -> None: def body() -> None: self.accept(s.body) def else_block() -> None: assert s.else_body is not None self.accept(s.else_body) self.for_loop_helper(s.index, s.expr, body, else_block if s.else_body else None, s.line) def spill(self, value: Value) -> AssignmentTarget: """Moves a given Value instance into the generator class' environment class.""" name = '{}{}'.format(TEMP_ATTR_NAME, self.temp_counter) self.temp_counter += 1 target = self.add_var_to_env_class(Var(name), value.type, self.fn_info.generator_class) # Shouldn't be able to fail, so -1 for line self.assign(target, value, -1) return target def maybe_spill(self, value: Value) -> Union[Value, AssignmentTarget]: """ Moves a given Value instance into the environment class for generator functions. For non-generator functions, leaves the Value instance as it is. Returns an AssignmentTarget associated with the Value for generator functions and the original Value itself for non-generator functions. """ if self.fn_info.is_generator: return self.spill(value) return value def maybe_spill_assignable(self, value: Value) -> Union[Register, AssignmentTarget]: """ Moves a given Value instance into the environment class for generator functions. For non-generator functions, allocate a temporary Register. Returns an AssignmentTarget associated with the Value for generator functions and an assignable Register for non-generator functions. """ if self.fn_info.is_generator: return self.spill(value) if isinstance(value, Register): return value # Allocate a temporary register for the assignable value. reg = self.alloc_temp(value.type) self.assign(reg, value, -1) return reg def for_loop_helper(self, index: Lvalue, expr: Expression, body_insts: GenFunc, else_insts: Optional[GenFunc], line: int) -> None: """Generate IR for a loop. Args: index: the loop index Lvalue expr: the expression to iterate over body_insts: a function that generates the body of the loop else_insts: a function that generates the else block instructions """ # Body of the loop body_block = BasicBlock() # Block that steps to the next item step_block = BasicBlock() # Block for the else clause, if we need it else_block = BasicBlock() # Block executed after the loop exit_block = BasicBlock() # Determine where we want to exit, if our condition check fails. normal_loop_exit = else_block if else_insts is not None else exit_block for_gen = self.make_for_loop_generator(index, expr, body_block, normal_loop_exit, line) self.push_loop_stack(step_block, exit_block) condition_block = self.goto_new_block() # Add loop condition check. for_gen.gen_condition() # Generate loop body. self.activate_block(body_block) for_gen.begin_body() body_insts() # We generate a separate step block (which might be empty). self.goto_and_activate(step_block) for_gen.gen_step() # Go back to loop condition. self.goto(condition_block) for_gen.add_cleanup(normal_loop_exit) self.pop_loop_stack() if else_insts is not None: self.activate_block(else_block) else_insts() self.goto(exit_block) self.activate_block(exit_block) def extract_int(self, e: Expression) -> Optional[int]: if isinstance(e, IntExpr): return e.value elif isinstance(e, UnaryExpr) and e.op == '-' and isinstance(e.expr, IntExpr): return -e.expr.value else: return None def make_for_loop_generator(self, index: Lvalue, expr: Expression, body_block: BasicBlock, loop_exit: BasicBlock, line: int, nested: bool = False) -> ForGenerator: """Return helper object for generating a for loop over an iterable. If "nested" is True, this is a nested iterator such as "e" in "enumerate(e)". """ if is_list_rprimitive(self.node_type(expr)): # Special case "for x in ". expr_reg = self.accept(expr) target_list_type = get_proper_type(self.types[expr]) assert isinstance(target_list_type, Instance) target_type = self.type_to_rtype(target_list_type.args[0]) for_list = ForList(self, index, body_block, loop_exit, line, nested) for_list.init(expr_reg, target_type, reverse=False) return for_list if (isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr)): if (expr.callee.fullname == 'builtins.range' and (len(expr.args) <= 2 or (len(expr.args) == 3 and self.extract_int(expr.args[2]) is not None)) and set(expr.arg_kinds) == {ARG_POS}): # Special case "for x in range(...)". # We support the 3 arg form but only for int literals, since it doesn't # seem worth the hassle of supporting dynamically determining which # direction of comparison to do. if len(expr.args) == 1: start_reg = self.add(LoadInt(0)) end_reg = self.accept(expr.args[0]) else: start_reg = self.accept(expr.args[0]) end_reg = self.accept(expr.args[1]) if len(expr.args) == 3: step = self.extract_int(expr.args[2]) assert step is not None if step == 0: self.error("range() step can't be zero", expr.args[2].line) else: step = 1 for_range = ForRange(self, index, body_block, loop_exit, line, nested) for_range.init(start_reg, end_reg, step) return for_range elif (expr.callee.fullname == 'builtins.enumerate' and len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and isinstance(index, TupleExpr) and len(index.items) == 2): # Special case "for i, x in enumerate(y)". lvalue1 = index.items[0] lvalue2 = index.items[1] for_enumerate = ForEnumerate(self, index, body_block, loop_exit, line, nested) for_enumerate.init(lvalue1, lvalue2, expr.args[0]) return for_enumerate elif (expr.callee.fullname == 'builtins.zip' and len(expr.args) >= 2 and set(expr.arg_kinds) == {ARG_POS} and isinstance(index, TupleExpr) and len(index.items) == len(expr.args)): # Special case "for x, y in zip(a, b)". for_zip = ForZip(self, index, body_block, loop_exit, line, nested) for_zip.init(index.items, expr.args) return for_zip if (expr.callee.fullname == 'builtins.reversed' and len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and is_list_rprimitive(self.node_type(expr.args[0]))): # Special case "for x in reversed()". expr_reg = self.accept(expr.args[0]) target_list_type = get_proper_type(self.types[expr.args[0]]) assert isinstance(target_list_type, Instance) target_type = self.type_to_rtype(target_list_type.args[0]) for_list = ForList(self, index, body_block, loop_exit, line, nested) for_list.init(expr_reg, target_type, reverse=True) return for_list # Default to a generic for loop. expr_reg = self.accept(expr) for_obj = ForIterable(self, index, body_block, loop_exit, line, nested) item_type = self._analyze_iterable_item_type(expr) item_rtype = self.type_to_rtype(item_type) for_obj.init(expr_reg, item_rtype) return for_obj def _analyze_iterable_item_type(self, expr: Expression) -> Type: """Return the item type given by 'expr' in an iterable context.""" # This logic is copied from mypy's TypeChecker.analyze_iterable_item_type. iterable = get_proper_type(self.types[expr]) echk = self.graph[self.module_name].type_checker().expr_checker iterator = echk.check_method_call_by_name('__iter__', iterable, [], [], expr)[0] from mypy.join import join_types if isinstance(iterable, TupleType): joined = UninhabitedType() # type: Type for item in iterable.items: joined = join_types(joined, item) return joined else: # Non-tuple iterable. return echk.check_method_call_by_name('__next__', iterator, [], [], expr)[0] def visit_break_stmt(self, node: BreakStmt) -> None: self.nonlocal_control[-1].gen_break(self, node.line) def visit_continue_stmt(self, node: ContinueStmt) -> None: self.nonlocal_control[-1].gen_continue(self, node.line) def visit_unary_expr(self, expr: UnaryExpr) -> Value: return self.unary_op(self.accept(expr.expr), expr.op, expr.line) def visit_op_expr(self, expr: OpExpr) -> Value: if expr.op in ('and', 'or'): return self.shortcircuit_expr(expr) return self.binary_op(self.accept(expr.left), self.accept(expr.right), expr.op, expr.line) def translate_eq_cmp(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Optional[Value]: ltype = lreg.type rtype = rreg.type if not (isinstance(ltype, RInstance) and ltype == rtype): return None class_ir = ltype.class_ir # Check whether any subclasses of the operand redefines __eq__ # or it might be redefined in a Python parent class or by # dataclasses cmp_varies_at_runtime = ( not class_ir.is_method_final('__eq__') or not class_ir.is_method_final('__ne__') or class_ir.inherits_python or class_ir.is_augmented ) if cmp_varies_at_runtime: # We might need to call left.__eq__(right) or right.__eq__(left) # depending on which is the more specific type. return None if not class_ir.has_method('__eq__'): # There's no __eq__ defined, so just use object identity. identity_ref_op = 'is' if expr_op == '==' else 'is not' return self.binary_op(lreg, rreg, identity_ref_op, line) return self.gen_method_call( lreg, op_methods[expr_op], [rreg], ltype, line ) def matching_primitive_op(self, candidates: List[OpDescription], args: List[Value], line: int, result_type: Optional[RType] = None) -> Optional[Value]: # Find the highest-priority primitive op that matches. matching = None # type: Optional[OpDescription] for desc in candidates: if len(desc.arg_types) != len(args): continue if all(is_subtype(actual.type, formal) for actual, formal in zip(args, desc.arg_types)): if matching: assert matching.priority != desc.priority, 'Ambiguous:\n1) %s\n2) %s' % ( matching, desc) if desc.priority > matching.priority: matching = desc else: matching = desc if matching: target = self.primitive_op(matching, args, line) if result_type and not is_runtime_subtype(target.type, result_type): if is_none_rprimitive(result_type): # Special case None return. The actual result may actually be a bool # and so we can't just coerce it. target = self.none() else: target = self.coerce(target, result_type, line) return target return None def binary_op(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Value: # Special case == and != when we can resolve the method call statically. value = None if expr_op in ('==', '!='): value = self.translate_eq_cmp(lreg, rreg, expr_op, line) if value is not None: return value ops = binary_ops.get(expr_op, []) target = self.matching_primitive_op(ops, [lreg, rreg], line) assert target, 'Unsupported binary operation: %s' % expr_op return target def unary_op(self, lreg: Value, expr_op: str, line: int) -> Value: ops = unary_ops.get(expr_op, []) target = self.matching_primitive_op(ops, [lreg], line) assert target, 'Unsupported unary operation: %s' % expr_op return target def visit_index_expr(self, expr: IndexExpr) -> Value: base = self.accept(expr.base) if isinstance(base.type, RTuple) and isinstance(expr.index, IntExpr): return self.add(TupleGet(base, expr.index.value, expr.line)) index_reg = self.accept(expr.index) return self.gen_method_call( base, '__getitem__', [index_reg], self.node_type(expr), expr.line) def visit_int_expr(self, expr: IntExpr) -> Value: if expr.value > MAX_LITERAL_SHORT_INT: return self.load_static_int(expr.value) return self.add(LoadInt(expr.value)) def visit_float_expr(self, expr: FloatExpr) -> Value: return self.load_static_float(expr.value) def visit_complex_expr(self, expr: ComplexExpr) -> Value: return self.load_static_complex(expr.value) def visit_bytes_expr(self, expr: BytesExpr) -> Value: value = bytes(expr.value, 'utf8').decode('unicode-escape').encode('raw-unicode-escape') return self.load_static_bytes(value) def is_native_module(self, module: str) -> bool: """Is the given module one compiled by mypyc?""" return module in self.mapper.group_map def is_native_ref_expr(self, expr: RefExpr) -> bool: if expr.node is None: return False if '.' in expr.node.fullname: return self.is_native_module(expr.node.fullname.rpartition('.')[0]) return True def is_native_module_ref_expr(self, expr: RefExpr) -> bool: return self.is_native_ref_expr(expr) and expr.kind == GDEF def is_synthetic_type(self, typ: TypeInfo) -> bool: """Is a type something other than just a class we've created?""" return typ.is_named_tuple or typ.is_newtype or typ.typeddict_type is not None def is_decorated(self, fdef: FuncDef) -> bool: return fdef in self.fdefs_to_decorators def is_free_variable(self, symbol: SymbolNode) -> bool: fitem = self.fn_info.fitem return fitem in self.free_variables and symbol in self.free_variables[fitem] def get_final_ref(self, expr: MemberExpr) -> Optional[Tuple[str, Var, bool]]: """Check if `expr` is a final attribute. This needs to be done differently for class and module attributes to correctly determine fully qualified name. Return a tuple that consists of the qualified name, the corresponding Var node, and a flag indicating whether the final name was defined in a compiled module. Return None if `expr` does not refer to a final attribute. """ final_var = None if isinstance(expr.expr, RefExpr) and isinstance(expr.expr.node, TypeInfo): # a class attribute sym = expr.expr.node.get(expr.name) if sym and isinstance(sym.node, Var): # Enum attribute are treated as final since they are added to the global cache expr_fullname = expr.expr.node.bases[0].type.fullname is_final = sym.node.is_final or expr_fullname == 'enum.Enum' if is_final: final_var = sym.node fullname = '{}.{}'.format(sym.node.info.fullname, final_var.name) native = self.is_native_module(expr.expr.node.module_name) elif self.is_module_member_expr(expr): # a module attribute if isinstance(expr.node, Var) and expr.node.is_final: final_var = expr.node fullname = expr.node.fullname native = self.is_native_ref_expr(expr) if final_var is not None: return fullname, final_var, native return None def emit_load_final(self, final_var: Var, fullname: str, name: str, native: bool, typ: Type, line: int) -> Optional[Value]: """Emit code for loading value of a final name (if possible). Args: final_var: Var corresponding to the final name fullname: its qualified name name: shorter name to show in errors native: whether the name was defined in a compiled module typ: its type line: line number where loading occurs """ if final_var.final_value is not None: # this is safe even for non-native names return self.load_final_literal_value(final_var.final_value, line) elif native: return self.load_final_static(fullname, self.mapper.type_to_rtype(typ), line, name) else: return None def visit_name_expr(self, expr: NameExpr) -> Value: assert expr.node, "RefExpr not resolved" fullname = expr.node.fullname if fullname in name_ref_ops: # Use special access op for this particular name. desc = name_ref_ops[fullname] assert desc.result_type is not None return self.add(PrimitiveOp([], desc, expr.line)) if isinstance(expr.node, Var) and expr.node.is_final: value = self.emit_load_final(expr.node, fullname, expr.name, self.is_native_ref_expr(expr), self.types[expr], expr.line) if value is not None: return value if isinstance(expr.node, MypyFile) and expr.node.fullname in self.imports: return self.load_module(expr.node.fullname) # If the expression is locally defined, then read the result from the corresponding # assignment target and return it. Otherwise if the expression is a global, load it from # the globals dictionary. # Except for imports, that currently always happens in the global namespace. if expr.kind == LDEF and not (isinstance(expr.node, Var) and expr.node.is_suppressed_import): # Try to detect and error when we hit the irritating mypy bug # where a local variable is cast to None. (#5423) if (isinstance(expr.node, Var) and is_none_rprimitive(self.node_type(expr)) and expr.node.is_inferred): self.error( "Local variable '{}' has inferred type None; add an annotation".format( expr.node.name), expr.node.line) # TODO: Behavior currently only defined for Var and FuncDef node types. return self.read(self.get_assignment_target(expr), expr.line) return self.load_global(expr) def is_module_member_expr(self, expr: MemberExpr) -> bool: return isinstance(expr.expr, RefExpr) and isinstance(expr.expr.node, MypyFile) def visit_member_expr(self, expr: MemberExpr) -> Value: # First check if this is maybe a final attribute. final = self.get_final_ref(expr) if final is not None: fullname, final_var, native = final value = self.emit_load_final(final_var, fullname, final_var.name, native, self.types[expr], expr.line) if value is not None: return value if isinstance(expr.node, MypyFile) and expr.node.fullname in self.imports: return self.load_module(expr.node.fullname) obj = self.accept(expr.expr) return self.get_attr(obj, expr.name, self.node_type(expr), expr.line) def get_attr(self, obj: Value, attr: str, result_type: RType, line: int) -> Value: if (isinstance(obj.type, RInstance) and obj.type.class_ir.is_ext_class and obj.type.class_ir.has_attr(attr)): return self.add(GetAttr(obj, attr, line)) elif isinstance(obj.type, RUnion): return self.union_get_attr(obj, obj.type, attr, result_type, line) else: return self.py_get_attr(obj, attr, line) def union_get_attr(self, obj: Value, rtype: RUnion, attr: str, result_type: RType, line: int) -> Value: def get_item_attr(value: Value) -> Value: return self.get_attr(value, attr, result_type, line) return self.decompose_union_helper(obj, rtype, result_type, get_item_attr, line) def decompose_union_helper(self, obj: Value, rtype: RUnion, result_type: RType, process_item: Callable[[Value], Value], line: int) -> Value: """Generate isinstance() + specialized operations for union items. Say, for Union[A, B] generate ops resembling this (pseudocode): if isinstance(obj, A): result = else: result = Args: obj: value with a union type rtype: the union type result_type: result of the operation process_item: callback to generate op for a single union item (arg is coerced to union item type) line: line number """ # TODO: Optimize cases where a single operation can handle multiple union items # (say a method is implemented in a common base class) fast_items = [] rest_items = [] for item in rtype.items: if isinstance(item, RInstance): fast_items.append(item) else: # For everything but RInstance we fall back to C API rest_items.append(item) exit_block = BasicBlock() result = self.alloc_temp(result_type) for i, item in enumerate(fast_items): more_types = i < len(fast_items) - 1 or rest_items if more_types: # We are not at the final item so we need one more branch op = self.isinstance_native(obj, item.class_ir, line) true_block, false_block = BasicBlock(), BasicBlock() self.add_bool_branch(op, true_block, false_block) self.activate_block(true_block) coerced = self.coerce(obj, item, line) temp = process_item(coerced) temp2 = self.coerce(temp, result_type, line) self.add(Assign(result, temp2)) self.goto(exit_block) if more_types: self.activate_block(false_block) if rest_items: # For everything else we use generic operation. Use force=True to drop the # union type. coerced = self.coerce(obj, object_rprimitive, line, force=True) temp = process_item(coerced) temp2 = self.coerce(temp, result_type, line) self.add(Assign(result, temp2)) self.goto(exit_block) self.activate_block(exit_block) return result def isinstance_helper(self, obj: Value, class_irs: List[ClassIR], line: int) -> Value: """Fast path for isinstance() that checks against a list of native classes.""" if not class_irs: return self.primitive_op(false_op, [], line) ret = self.isinstance_native(obj, class_irs[0], line) for class_ir in class_irs[1:]: def other() -> Value: return self.isinstance_native(obj, class_ir, line) ret = self.shortcircuit_helper('or', bool_rprimitive, lambda: ret, other, line) return ret def isinstance_native(self, obj: Value, class_ir: ClassIR, line: int) -> Value: """Fast isinstance() check for a native class. If there three or less concrete (non-trait) classes among the class and all its children, use even faster type comparison checks `type(obj) is typ`. """ concrete = all_concrete_classes(class_ir) if concrete is None or len(concrete) > FAST_ISINSTANCE_MAX_SUBCLASSES + 1: return self.primitive_op(fast_isinstance_op, [obj, self.get_native_type(class_ir)], line) if not concrete: # There can't be any concrete instance that matches this. return self.primitive_op(false_op, [], line) type_obj = self.get_native_type(concrete[0]) ret = self.primitive_op(type_is_op, [obj, type_obj], line) for c in concrete[1:]: def other() -> Value: return self.primitive_op(type_is_op, [obj, self.get_native_type(c)], line) ret = self.shortcircuit_helper('or', bool_rprimitive, lambda: ret, other, line) return ret def get_native_type(self, cls: ClassIR) -> Value: fullname = '%s.%s' % (cls.module_name, cls.name) return self.load_native_type_object(fullname) def py_get_attr(self, obj: Value, attr: str, line: int) -> Value: key = self.load_static_unicode(attr) return self.add(PrimitiveOp([obj, key], py_getattr_op, line)) def py_call(self, function: Value, arg_values: List[Value], line: int, arg_kinds: Optional[List[int]] = None, arg_names: Optional[Sequence[Optional[str]]] = None) -> Value: """Use py_call_op or py_call_with_kwargs_op for function call.""" # If all arguments are positional, we can use py_call_op. if (arg_kinds is None) or all(kind == ARG_POS for kind in arg_kinds): return self.primitive_op(py_call_op, [function] + arg_values, line) # Otherwise fallback to py_call_with_kwargs_op. assert arg_names is not None pos_arg_values = [] kw_arg_key_value_pairs = [] # type: List[DictEntry] star_arg_values = [] for value, kind, name in zip(arg_values, arg_kinds, arg_names): if kind == ARG_POS: pos_arg_values.append(value) elif kind == ARG_NAMED: assert name is not None key = self.load_static_unicode(name) kw_arg_key_value_pairs.append((key, value)) elif kind == ARG_STAR: star_arg_values.append(value) elif kind == ARG_STAR2: # NOTE: mypy currently only supports a single ** arg, but python supports multiple. # This code supports multiple primarily to make the logic easier to follow. kw_arg_key_value_pairs.append((None, value)) else: assert False, ("Argument kind should not be possible:", kind) if len(star_arg_values) == 0: # We can directly construct a tuple if there are no star args. pos_args_tuple = self.primitive_op(new_tuple_op, pos_arg_values, line) else: # Otherwise we construct a list and call extend it with the star args, since tuples # don't have an extend method. pos_args_list = self.primitive_op(new_list_op, pos_arg_values, line) for star_arg_value in star_arg_values: self.primitive_op(list_extend_op, [pos_args_list, star_arg_value], line) pos_args_tuple = self.primitive_op(list_tuple_op, [pos_args_list], line) kw_args_dict = self.make_dict(kw_arg_key_value_pairs, line) return self.primitive_op( py_call_with_kwargs_op, [function, pos_args_tuple, kw_args_dict], line) def py_method_call(self, obj: Value, method_name: str, arg_values: List[Value], line: int, arg_kinds: Optional[List[int]], arg_names: Optional[Sequence[Optional[str]]]) -> Value: if (arg_kinds is None) or all(kind == ARG_POS for kind in arg_kinds): method_name_reg = self.load_static_unicode(method_name) return self.primitive_op(py_method_call_op, [obj, method_name_reg] + arg_values, line) else: method = self.py_get_attr(obj, method_name, line) return self.py_call(method, arg_values, line, arg_kinds=arg_kinds, arg_names=arg_names) def call(self, decl: FuncDecl, args: Sequence[Value], arg_kinds: List[int], arg_names: Sequence[Optional[str]], line: int) -> Value: # Normalize args to positionals. args = self.native_args_to_positional( args, arg_kinds, arg_names, decl.sig, line) return self.add(Call(decl, args, line)) def visit_call_expr(self, expr: CallExpr) -> Value: if isinstance(expr.analyzed, CastExpr): return self.translate_cast_expr(expr.analyzed) callee = expr.callee if isinstance(callee, IndexExpr) and isinstance(callee.analyzed, TypeApplication): callee = callee.analyzed.expr # Unwrap type application if isinstance(callee, MemberExpr): return self.translate_method_call(expr, callee) elif isinstance(callee, SuperExpr): return self.translate_super_method_call(expr, callee) else: return self.translate_call(expr, callee) def translate_call(self, expr: CallExpr, callee: Expression) -> Value: # The common case of calls is refexprs if isinstance(callee, RefExpr): return self.translate_refexpr_call(expr, callee) function = self.accept(callee) args = [self.accept(arg) for arg in expr.args] return self.py_call(function, args, expr.line, arg_kinds=expr.arg_kinds, arg_names=expr.arg_names) def translate_refexpr_call(self, expr: CallExpr, callee: RefExpr) -> Value: """Translate a non-method call.""" # TODO: Allow special cases to have default args or named args. Currently they don't since # they check that everything in arg_kinds is ARG_POS. # If there is a specializer for this function, try calling it. if callee.fullname and (callee.fullname, None) in specializers: val = specializers[callee.fullname, None](self, expr, callee) if val is not None: return val # Gen the argument values arg_values = [self.accept(arg) for arg in expr.args] return self.call_refexpr_with_args(expr, callee, arg_values) def call_refexpr_with_args( self, expr: CallExpr, callee: RefExpr, arg_values: List[Value]) -> Value: # Handle data-driven special-cased primitive call ops. if callee.fullname is not None and expr.arg_kinds == [ARG_POS] * len(arg_values): ops = func_ops.get(callee.fullname, []) target = self.matching_primitive_op(ops, arg_values, expr.line, self.node_type(expr)) if target: return target # Standard native call if signature and fullname are good and all arguments are positional # or named. callee_node = callee.node if isinstance(callee_node, OverloadedFuncDef): callee_node = callee_node.impl if (callee_node is not None and callee.fullname is not None and callee_node in self.mapper.func_to_decl and all(kind in (ARG_POS, ARG_NAMED) for kind in expr.arg_kinds)): decl = self.mapper.func_to_decl[callee_node] return self.call(decl, arg_values, expr.arg_kinds, expr.arg_names, expr.line) # Fall back to a Python call function = self.accept(callee) return self.py_call(function, arg_values, expr.line, arg_kinds=expr.arg_kinds, arg_names=expr.arg_names) def translate_method_call(self, expr: CallExpr, callee: MemberExpr) -> Value: """Generate IR for an arbitrary call of form e.m(...). This can also deal with calls to module-level functions. """ if self.is_native_ref_expr(callee): # Call to module-level native function or such return self.translate_call(expr, callee) elif ( isinstance(callee.expr, RefExpr) and isinstance(callee.expr.node, TypeInfo) and callee.expr.node in self.mapper.type_to_ir and self.mapper.type_to_ir[callee.expr.node].has_method(callee.name) ): # Call a method via the *class* assert isinstance(callee.expr.node, TypeInfo) ir = self.mapper.type_to_ir[callee.expr.node] decl = ir.method_decl(callee.name) args = [] arg_kinds, arg_names = expr.arg_kinds[:], expr.arg_names[:] # Add the class argument for class methods in extension classes if decl.kind == FUNC_CLASSMETHOD and ir.is_ext_class: args.append(self.load_native_type_object(callee.expr.node.fullname)) arg_kinds.insert(0, ARG_POS) arg_names.insert(0, None) args += [self.accept(arg) for arg in expr.args] if ir.is_ext_class: return self.call(decl, args, arg_kinds, arg_names, expr.line) else: obj = self.accept(callee.expr) return self.gen_method_call(obj, callee.name, args, self.node_type(expr), expr.line, expr.arg_kinds, expr.arg_names) elif self.is_module_member_expr(callee): # Fall back to a PyCall for non-native module calls function = self.accept(callee) args = [self.accept(arg) for arg in expr.args] return self.py_call(function, args, expr.line, arg_kinds=expr.arg_kinds, arg_names=expr.arg_names) else: receiver_typ = self.node_type(callee.expr) # If there is a specializer for this method name/type, try calling it. if (callee.name, receiver_typ) in specializers: val = specializers[callee.name, receiver_typ](self, expr, callee) if val is not None: return val obj = self.accept(callee.expr) args = [self.accept(arg) for arg in expr.args] return self.gen_method_call(obj, callee.name, args, self.node_type(expr), expr.line, expr.arg_kinds, expr.arg_names) def translate_super_method_call(self, expr: CallExpr, callee: SuperExpr) -> Value: if callee.info is None or callee.call.args: return self.translate_call(expr, callee) ir = self.mapper.type_to_ir[callee.info] # Search for the method in the mro, skipping ourselves. for base in ir.mro[1:]: if callee.name in base.method_decls: break else: return self.translate_call(expr, callee) decl = base.method_decl(callee.name) arg_values = [self.accept(arg) for arg in expr.args] arg_kinds, arg_names = expr.arg_kinds[:], expr.arg_names[:] if decl.kind != FUNC_STATICMETHOD: vself = next(iter(self.environment.indexes)) # grab first argument if decl.kind == FUNC_CLASSMETHOD: vself = self.primitive_op(type_op, [vself], expr.line) elif self.fn_info.is_generator: # For generator classes, the self target is the 6th value # in the symbol table (which is an ordered dict). This is sort # of ugly, but we can't search by name since the 'self' parameter # could be named anything, and it doesn't get added to the # environment indexes. self_targ = list(self.environment.symtable.values())[6] vself = self.read(self_targ, self.fn_info.fitem.line) arg_values.insert(0, vself) arg_kinds.insert(0, ARG_POS) arg_names.insert(0, None) return self.call(decl, arg_values, arg_kinds, arg_names, expr.line) def gen_method_call(self, base: Value, name: str, arg_values: List[Value], return_rtype: Optional[RType], line: int, arg_kinds: Optional[List[int]] = None, arg_names: Optional[List[Optional[str]]] = None) -> Value: # If arg_kinds contains values other than arg_pos and arg_named, then fallback to # Python method call. if (arg_kinds is not None and not all(kind in (ARG_POS, ARG_NAMED) for kind in arg_kinds)): return self.py_method_call(base, name, arg_values, base.line, arg_kinds, arg_names) # If the base type is one of ours, do a MethodCall if (isinstance(base.type, RInstance) and base.type.class_ir.is_ext_class and not base.type.class_ir.builtin_base): if base.type.class_ir.has_method(name): decl = base.type.class_ir.method_decl(name) if arg_kinds is None: assert arg_names is None, "arg_kinds not present but arg_names is" arg_kinds = [ARG_POS for _ in arg_values] arg_names = [None for _ in arg_values] else: assert arg_names is not None, "arg_kinds present but arg_names is not" # Normalize args to positionals. assert decl.bound_sig arg_values = self.native_args_to_positional( arg_values, arg_kinds, arg_names, decl.bound_sig, line) return self.add(MethodCall(base, name, arg_values, line)) elif base.type.class_ir.has_attr(name): function = self.add(GetAttr(base, name, line)) return self.py_call(function, arg_values, line, arg_kinds=arg_kinds, arg_names=arg_names) elif isinstance(base.type, RUnion): return self.union_method_call(base, base.type, name, arg_values, return_rtype, line, arg_kinds, arg_names) # Try to do a special-cased method call if not arg_kinds or arg_kinds == [ARG_POS] * len(arg_values): target = self.translate_special_method_call(base, name, arg_values, return_rtype, line) if target: return target # Fall back to Python method call return self.py_method_call(base, name, arg_values, line, arg_kinds, arg_names) def union_method_call(self, base: Value, obj_type: RUnion, name: str, arg_values: List[Value], return_rtype: Optional[RType], line: int, arg_kinds: Optional[List[int]], arg_names: Optional[List[Optional[str]]]) -> Value: # Union method call needs a return_rtype for the type of the output register. # If we don't have one, use object_rprimitive. return_rtype = return_rtype or object_rprimitive def call_union_item(value: Value) -> Value: return self.gen_method_call(value, name, arg_values, return_rtype, line, arg_kinds, arg_names) return self.decompose_union_helper(base, obj_type, return_rtype, call_union_item, line) def translate_cast_expr(self, expr: CastExpr) -> Value: src = self.accept(expr.expr) target_type = self.type_to_rtype(expr.type) return self.coerce(src, target_type, expr.line) def shortcircuit_helper(self, op: str, expr_type: RType, left: Callable[[], Value], right: Callable[[], Value], line: int) -> Value: # Having actual Phi nodes would be really nice here! target = self.alloc_temp(expr_type) # left_body takes the value of the left side, right_body the right left_body, right_body, next = BasicBlock(), BasicBlock(), BasicBlock() # true_body is taken if the left is true, false_body if it is false. # For 'and' the value is the right side if the left is true, and for 'or' # it is the right side if the left is false. true_body, false_body = ( (right_body, left_body) if op == 'and' else (left_body, right_body)) left_value = left() self.add_bool_branch(left_value, true_body, false_body) self.activate_block(left_body) left_coerced = self.coerce(left_value, expr_type, line) self.add(Assign(target, left_coerced)) self.goto(next) self.activate_block(right_body) right_value = right() right_coerced = self.coerce(right_value, expr_type, line) self.add(Assign(target, right_coerced)) self.goto(next) self.activate_block(next) return target def shortcircuit_expr(self, expr: OpExpr) -> Value: return self.shortcircuit_helper( expr.op, self.node_type(expr), lambda: self.accept(expr.left), lambda: self.accept(expr.right), expr.line ) def visit_conditional_expr(self, expr: ConditionalExpr) -> Value: if_body, else_body, next = BasicBlock(), BasicBlock(), BasicBlock() self.process_conditional(expr.cond, if_body, else_body) expr_type = self.node_type(expr) # Having actual Phi nodes would be really nice here! target = self.alloc_temp(expr_type) self.activate_block(if_body) true_value = self.accept(expr.if_expr) true_value = self.coerce(true_value, expr_type, expr.line) self.add(Assign(target, true_value)) self.goto(next) self.activate_block(else_body) false_value = self.accept(expr.else_expr) false_value = self.coerce(false_value, expr_type, expr.line) self.add(Assign(target, false_value)) self.goto(next) self.activate_block(next) return target def translate_special_method_call(self, base_reg: Value, name: str, args: List[Value], result_type: Optional[RType], line: int) -> Optional[Value]: """Translate a method call which is handled nongenerically. These are special in the sense that we have code generated specifically for them. They tend to be method calls which have equivalents in C that are more direct than calling with the PyObject api. Return None if no translation found; otherwise return the target register. """ ops = method_ops.get(name, []) return self.matching_primitive_op(ops, [base_reg] + args, line, result_type=result_type) def visit_list_expr(self, expr: ListExpr) -> Value: return self._visit_list_display(expr.items, expr.line) def _visit_list_display(self, items: List[Expression], line: int) -> Value: return self._visit_display( items, new_list_op, list_append_op, list_extend_op, line ) def _visit_display(self, items: List[Expression], constructor_op: OpDescription, append_op: OpDescription, extend_op: OpDescription, line: int ) -> Value: accepted_items = [] for item in items: if isinstance(item, StarExpr): accepted_items.append((True, self.accept(item.expr))) else: accepted_items.append((False, self.accept(item))) result = None # type: Union[Value, None] initial_items = [] for starred, value in accepted_items: if result is None and not starred and constructor_op.is_var_arg: initial_items.append(value) continue if result is None: result = self.primitive_op(constructor_op, initial_items, line) self.primitive_op(extend_op if starred else append_op, [result, value], line) if result is None: result = self.primitive_op(constructor_op, initial_items, line) return result def visit_tuple_expr(self, expr: TupleExpr) -> Value: if any(isinstance(item, StarExpr) for item in expr.items): # create a tuple of unknown length return self._visit_tuple_display(expr) # create an tuple of fixed length (RTuple) tuple_type = self.node_type(expr) # When handling NamedTuple et. al we might not have proper type info, # so make some up if we need it. types = (tuple_type.types if isinstance(tuple_type, RTuple) else [object_rprimitive] * len(expr.items)) items = [] for item_expr, item_type in zip(expr.items, types): reg = self.accept(item_expr) items.append(self.coerce(reg, item_type, item_expr.line)) return self.add(TupleSet(items, expr.line)) def _visit_tuple_display(self, expr: TupleExpr) -> Value: """Create a list, then turn it into a tuple.""" val_as_list = self._visit_list_display(expr.items, expr.line) return self.primitive_op(list_tuple_op, [val_as_list], expr.line) def visit_dict_expr(self, expr: DictExpr) -> Value: """First accepts all keys and values, then makes a dict out of them.""" key_value_pairs = [] for key_expr, value_expr in expr.items: key = self.accept(key_expr) if key_expr is not None else None value = self.accept(value_expr) key_value_pairs.append((key, value)) return self.make_dict(key_value_pairs, expr.line) def visit_set_expr(self, expr: SetExpr) -> Value: return self._visit_display( expr.items, new_set_op, set_add_op, set_update_op, expr.line ) def visit_str_expr(self, expr: StrExpr) -> Value: return self.load_static_unicode(expr.value) # Conditional expressions def process_conditional(self, e: Expression, true: BasicBlock, false: BasicBlock) -> None: if isinstance(e, OpExpr) and e.op in ['and', 'or']: if e.op == 'and': # Short circuit 'and' in a conditional context. new = BasicBlock() self.process_conditional(e.left, new, false) self.activate_block(new) self.process_conditional(e.right, true, false) else: # Short circuit 'or' in a conditional context. new = BasicBlock() self.process_conditional(e.left, true, new) self.activate_block(new) self.process_conditional(e.right, true, false) elif isinstance(e, UnaryExpr) and e.op == 'not': self.process_conditional(e.expr, false, true) # Catch-all for arbitrary expressions. else: reg = self.accept(e) self.add_bool_branch(reg, true, false) def visit_basic_comparison(self, op: str, left: Value, right: Value, line: int) -> Value: negate = False if op == 'is not': op, negate = 'is', True elif op == 'not in': op, negate = 'in', True target = self.binary_op(left, right, op, line) if negate: target = self.unary_op(target, 'not', line) return target def visit_comparison_expr(self, e: ComparisonExpr) -> Value: # TODO: Don't produce an expression when used in conditional context # All of the trickiness here is due to support for chained conditionals # (`e1 < e2 > e3`, etc). `e1 < e2 > e3` is approximately equivalent to # `e1 < e2 and e2 > e3` except that `e2` is only evaluated once. expr_type = self.node_type(e) # go(i, prev) generates code for `ei opi e{i+1} op{i+1} ... en`, # assuming that prev contains the value of `ei`. def go(i: int, prev: Value) -> Value: if i == len(e.operators) - 1: return self.visit_basic_comparison( e.operators[i], prev, self.accept(e.operands[i + 1]), e.line) next = self.accept(e.operands[i + 1]) return self.shortcircuit_helper( 'and', expr_type, lambda: self.visit_basic_comparison( e.operators[i], prev, next, e.line), lambda: go(i + 1, next), e.line) return go(0, self.accept(e.operands[0])) def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: if is_runtime_subtype(value.type, int_rprimitive): zero = self.add(LoadInt(0)) value = self.binary_op(value, zero, '!=', value.line) elif is_same_type(value.type, list_rprimitive): length = self.primitive_op(list_len_op, [value], value.line) zero = self.add(LoadInt(0)) value = self.binary_op(length, zero, '!=', value.line) elif (isinstance(value.type, RInstance) and value.type.class_ir.is_ext_class and value.type.class_ir.has_method('__bool__')): # Directly call the __bool__ method on classes that have it. value = self.gen_method_call(value, '__bool__', [], bool_rprimitive, value.line) else: value_type = optional_value_type(value.type) if value_type is not None: is_none = self.binary_op(value, self.none_object(), 'is not', value.line) branch = Branch(is_none, true, false, Branch.BOOL_EXPR) self.add(branch) always_truthy = False if isinstance(value_type, RInstance): # check whether X.__bool__ is always just the default (object.__bool__) if (not value_type.class_ir.has_method('__bool__') and value_type.class_ir.is_method_final('__bool__')): always_truthy = True if not always_truthy: # Optional[X] where X may be falsey and requires a check branch.true = self.new_block() # unbox_or_cast instead of coerce because we want the # type to change even if it is a subtype. remaining = self.unbox_or_cast(value, value_type, value.line) self.add_bool_branch(remaining, true, false) return elif not is_same_type(value.type, bool_rprimitive): value = self.primitive_op(bool_op, [value], value.line) self.add(Branch(value, true, false, Branch.BOOL_EXPR)) def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: pass def visit_slice_expr(self, expr: SliceExpr) -> Value: def get_arg(arg: Optional[Expression]) -> Value: if arg is None: return self.none_object() else: return self.accept(arg) args = [get_arg(expr.begin_index), get_arg(expr.end_index), get_arg(expr.stride)] return self.primitive_op(new_slice_op, args, expr.line) def visit_raise_stmt(self, s: RaiseStmt) -> None: if s.expr is None: self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) self.add(Unreachable()) return exc = self.accept(s.expr) self.primitive_op(raise_exception_op, [exc], s.line) self.add(Unreachable()) def visit_try_except(self, body: GenFunc, handlers: Sequence[ Tuple[Optional[Expression], Optional[Expression], GenFunc]], else_body: Optional[GenFunc], line: int) -> None: """Generalized try/except/else handling that takes functions to gen the bodies. The point of this is to also be able to support with.""" assert handlers, "try needs except" except_entry, exit_block, cleanup_block = BasicBlock(), BasicBlock(), BasicBlock() double_except_block = BasicBlock() # If there is an else block, jump there after the try, otherwise just leave else_block = BasicBlock() if else_body else exit_block # Compile the try block with an error handler self.error_handlers.append(except_entry) self.goto_and_activate(BasicBlock()) body() self.goto(else_block) self.error_handlers.pop() # The error handler catches the error and then checks it # against the except clauses. We compile the error handler # itself with an error handler so that it can properly restore # the *old* exc_info if an exception occurs. # The exception chaining will be done automatically when the # exception is raised, based on the exception in exc_info. self.error_handlers.append(double_except_block) self.activate_block(except_entry) old_exc = self.maybe_spill(self.primitive_op(error_catch_op, [], line)) # Compile the except blocks with the nonlocal control flow overridden to clear exc_info self.nonlocal_control.append( ExceptNonlocalControl(self.nonlocal_control[-1], old_exc)) # Process the bodies for type, var, handler_body in handlers: next_block = None if type: next_block, body_block = BasicBlock(), BasicBlock() matches = self.primitive_op(exc_matches_op, [self.accept(type)], type.line) self.add(Branch(matches, body_block, next_block, Branch.BOOL_EXPR)) self.activate_block(body_block) if var: target = self.get_assignment_target(var) self.assign(target, self.primitive_op(get_exc_value_op, [], var.line), var.line) handler_body() self.goto(cleanup_block) if next_block: self.activate_block(next_block) # Reraise the exception if needed if next_block: self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) self.add(Unreachable()) self.nonlocal_control.pop() self.error_handlers.pop() # Cleanup for if we leave except through normal control flow: # restore the saved exc_info information and continue propagating # the exception if it exists. self.activate_block(cleanup_block) self.primitive_op(restore_exc_info_op, [self.read(old_exc)], line) self.goto(exit_block) # Cleanup for if we leave except through a raised exception: # restore the saved exc_info information and continue propagating # the exception. self.activate_block(double_except_block) self.primitive_op(restore_exc_info_op, [self.read(old_exc)], line) self.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) self.add(Unreachable()) # If present, compile the else body in the obvious way if else_body: self.activate_block(else_block) else_body() self.goto(exit_block) self.activate_block(exit_block) def visit_try_except_stmt(self, t: TryStmt) -> None: def body() -> None: self.accept(t.body) # Work around scoping woes def make_handler(body: Block) -> GenFunc: return lambda: self.accept(body) handlers = [(type, var, make_handler(body)) for type, var, body in zip(t.types, t.vars, t.handlers)] else_body = (lambda: self.accept(t.else_body)) if t.else_body else None self.visit_try_except(body, handlers, else_body, t.line) def try_finally_try(self, err_handler: BasicBlock, return_entry: BasicBlock, main_entry: BasicBlock, try_body: GenFunc) -> Optional[Register]: # Compile the try block with an error handler control = TryFinallyNonlocalControl(return_entry) self.error_handlers.append(err_handler) self.nonlocal_control.append(control) self.goto_and_activate(BasicBlock()) try_body() self.goto(main_entry) self.nonlocal_control.pop() self.error_handlers.pop() return control.ret_reg def try_finally_entry_blocks(self, err_handler: BasicBlock, return_entry: BasicBlock, main_entry: BasicBlock, finally_block: BasicBlock, ret_reg: Optional[Register]) -> Value: old_exc = self.alloc_temp(exc_rtuple) # Entry block for non-exceptional flow self.activate_block(main_entry) if ret_reg: self.add(Assign(ret_reg, self.add(LoadErrorValue(self.ret_types[-1])))) self.goto(return_entry) self.activate_block(return_entry) self.add(Assign(old_exc, self.add(LoadErrorValue(exc_rtuple)))) self.goto(finally_block) # Entry block for errors self.activate_block(err_handler) if ret_reg: self.add(Assign(ret_reg, self.add(LoadErrorValue(self.ret_types[-1])))) self.add(Assign(old_exc, self.primitive_op(error_catch_op, [], -1))) self.goto(finally_block) return old_exc def try_finally_body( self, finally_block: BasicBlock, finally_body: GenFunc, ret_reg: Optional[Value], old_exc: Value) -> Tuple[BasicBlock, 'FinallyNonlocalControl']: cleanup_block = BasicBlock() # Compile the finally block with the nonlocal control flow overridden to restore exc_info self.error_handlers.append(cleanup_block) finally_control = FinallyNonlocalControl( self.nonlocal_control[-1], ret_reg, old_exc) self.nonlocal_control.append(finally_control) self.activate_block(finally_block) finally_body() self.nonlocal_control.pop() return cleanup_block, finally_control def try_finally_resolve_control(self, cleanup_block: BasicBlock, finally_control: FinallyNonlocalControl, old_exc: Value, ret_reg: Optional[Value]) -> BasicBlock: """Resolve the control flow out of a finally block. This means returning if there was a return, propagating exceptions, break/continue (soon), or just continuing on. """ reraise, rest = BasicBlock(), BasicBlock() self.add(Branch(old_exc, rest, reraise, Branch.IS_ERROR)) # Reraise the exception if there was one self.activate_block(reraise) self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) self.add(Unreachable()) self.error_handlers.pop() # If there was a return, keep returning if ret_reg: self.activate_block(rest) return_block, rest = BasicBlock(), BasicBlock() self.add(Branch(ret_reg, rest, return_block, Branch.IS_ERROR)) self.activate_block(return_block) self.nonlocal_control[-1].gen_return(self, ret_reg, -1) # TODO: handle break/continue self.activate_block(rest) out_block = BasicBlock() self.goto(out_block) # If there was an exception, restore again self.activate_block(cleanup_block) finally_control.gen_cleanup(self, -1) self.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) self.add(Unreachable()) return out_block def visit_try_finally_stmt(self, try_body: GenFunc, finally_body: GenFunc) -> None: """Generalized try/finally handling that takes functions to gen the bodies. The point of this is to also be able to support with.""" # Finally is a big pain, because there are so many ways that # exits can occur. We emit 10+ basic blocks for every finally! err_handler, main_entry, return_entry, finally_block = ( BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock()) # Compile the body of the try ret_reg = self.try_finally_try( err_handler, return_entry, main_entry, try_body) # Set up the entry blocks for the finally statement old_exc = self.try_finally_entry_blocks( err_handler, return_entry, main_entry, finally_block, ret_reg) # Compile the body of the finally cleanup_block, finally_control = self.try_finally_body( finally_block, finally_body, ret_reg, old_exc) # Resolve the control flow out of the finally block out_block = self.try_finally_resolve_control( cleanup_block, finally_control, old_exc, ret_reg) self.activate_block(out_block) def visit_try_stmt(self, t: TryStmt) -> None: # Our compilation strategy for try/except/else/finally is to # treat try/except/else and try/finally as separate language # constructs that we compile separately. When we have a # try/except/else/finally, we treat the try/except/else as the # body of a try/finally block. if t.finally_body: def visit_try_body() -> None: if t.handlers: self.visit_try_except_stmt(t) else: self.accept(t.body) body = t.finally_body self.visit_try_finally_stmt(visit_try_body, lambda: self.accept(body)) else: self.visit_try_except_stmt(t) def get_sys_exc_info(self) -> List[Value]: exc_info = self.primitive_op(get_exc_info_op, [], -1) return [self.add(TupleGet(exc_info, i, -1)) for i in range(3)] def visit_with(self, expr: Expression, target: Optional[Lvalue], body: GenFunc, line: int) -> None: # This is basically a straight transcription of the Python code in PEP 343. # I don't actually understand why a bunch of it is the way it is. # We could probably optimize the case where the manager is compiled by us, # but that is not our common case at all, so. mgr_v = self.accept(expr) typ = self.primitive_op(type_op, [mgr_v], line) exit_ = self.maybe_spill(self.py_get_attr(typ, '__exit__', line)) value = self.py_call(self.py_get_attr(typ, '__enter__', line), [mgr_v], line) mgr = self.maybe_spill(mgr_v) exc = self.maybe_spill_assignable(self.primitive_op(true_op, [], -1)) def try_body() -> None: if target: self.assign(self.get_assignment_target(target), value, line) body() def except_body() -> None: self.assign(exc, self.primitive_op(false_op, [], -1), line) out_block, reraise_block = BasicBlock(), BasicBlock() self.add_bool_branch(self.py_call(self.read(exit_), [self.read(mgr)] + self.get_sys_exc_info(), line), out_block, reraise_block) self.activate_block(reraise_block) self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) self.add(Unreachable()) self.activate_block(out_block) def finally_body() -> None: out_block, exit_block = BasicBlock(), BasicBlock() self.add(Branch(self.read(exc), exit_block, out_block, Branch.BOOL_EXPR)) self.activate_block(exit_block) none = self.none_object() self.py_call(self.read(exit_), [self.read(mgr), none, none, none], line) self.goto_and_activate(out_block) self.visit_try_finally_stmt( lambda: self.visit_try_except(try_body, [(None, None, except_body)], None, line), finally_body) def visit_with_stmt(self, o: WithStmt) -> None: # Generate separate logic for each expr in it, left to right def generate(i: int) -> None: if i >= len(o.expr): self.accept(o.body) else: self.visit_with(o.expr[i], o.target[i], lambda: generate(i + 1), o.line) generate(0) def visit_lambda_expr(self, expr: LambdaExpr) -> Value: typ = get_proper_type(self.types[expr]) assert isinstance(typ, CallableType) runtime_args = [] for arg, arg_type in zip(expr.arguments, typ.arg_types): arg.variable.type = arg_type runtime_args.append( RuntimeArg(arg.variable.name, self.type_to_rtype(arg_type), arg.kind)) ret_type = self.type_to_rtype(typ.ret_type) fsig = FuncSignature(runtime_args, ret_type) fname = '{}{}'.format(LAMBDA_NAME, self.lambda_counter) self.lambda_counter += 1 func_ir, func_reg = self.gen_func_item(expr, fname, fsig) assert func_reg is not None self.functions.append(func_ir) return func_reg def visit_pass_stmt(self, o: PassStmt) -> None: pass def visit_global_decl(self, o: GlobalDecl) -> None: # Pure declaration -- no runtime effect pass def visit_assert_stmt(self, a: AssertStmt) -> None: if self.options.strip_asserts: return cond = self.accept(a.expr) ok_block, error_block = BasicBlock(), BasicBlock() self.add_bool_branch(cond, ok_block, error_block) self.activate_block(error_block) if a.msg is None: # Special case (for simpler generated code) self.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, None, a.line)) elif isinstance(a.msg, StrExpr): # Another special case self.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, a.msg.value, a.line)) else: # The general case -- explicitly construct an exception instance message = self.accept(a.msg) exc_type = self.load_module_attr_by_fullname('builtins.AssertionError', a.line) exc = self.py_call(exc_type, [message], a.line) self.primitive_op(raise_exception_op, [exc], a.line) self.add(Unreachable()) self.activate_block(ok_block) def translate_list_comprehension(self, gen: GeneratorExpr) -> Value: list_ops = self.primitive_op(new_list_op, [], gen.line) loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) def gen_inner_stmts() -> None: e = self.accept(gen.left_expr) self.primitive_op(list_append_op, [list_ops, e], gen.line) self.comprehension_helper(loop_params, gen_inner_stmts, gen.line) return list_ops def visit_list_comprehension(self, o: ListComprehension) -> Value: return self.translate_list_comprehension(o.generator) def visit_set_comprehension(self, o: SetComprehension) -> Value: gen = o.generator set_ops = self.primitive_op(new_set_op, [], o.line) loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) def gen_inner_stmts() -> None: e = self.accept(gen.left_expr) self.primitive_op(set_add_op, [set_ops, e], o.line) self.comprehension_helper(loop_params, gen_inner_stmts, o.line) return set_ops def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> Value: d = self.primitive_op(new_dict_op, [], o.line) loop_params = list(zip(o.indices, o.sequences, o.condlists)) def gen_inner_stmts() -> None: k = self.accept(o.key) v = self.accept(o.value) self.primitive_op(dict_set_item_op, [d, k, v], o.line) self.comprehension_helper(loop_params, gen_inner_stmts, o.line) return d def visit_generator_expr(self, o: GeneratorExpr) -> Value: self.warning('Treating generator comprehension as list', o.line) return self.primitive_op(iter_op, [self.translate_list_comprehension(o)], o.line) def comprehension_helper(self, loop_params: List[Tuple[Lvalue, Expression, List[Expression]]], gen_inner_stmts: Callable[[], None], line: int) -> None: """Helper function for list comprehensions. "loop_params" is a list of (index, expr, [conditions]) tuples defining nested loops: - "index" is the Lvalue indexing that loop; - "expr" is the expression for the object to be iterated over; - "conditions" is a list of conditions, evaluated in order with short-circuiting, that must all be true for the loop body to be executed "gen_inner_stmts" is a function to generate the IR for the body of the innermost loop """ def handle_loop(loop_params: List[Tuple[Lvalue, Expression, List[Expression]]]) -> None: """Generate IR for a loop. Given a list of (index, expression, [conditions]) tuples, generate IR for the nested loops the list defines. """ index, expr, conds = loop_params[0] self.for_loop_helper(index, expr, lambda: loop_contents(conds, loop_params[1:]), None, line) def loop_contents( conds: List[Expression], remaining_loop_params: List[Tuple[Lvalue, Expression, List[Expression]]], ) -> None: """Generate the body of the loop. "conds" is a list of conditions to be evaluated (in order, with short circuiting) to gate the body of the loop. "remaining_loop_params" is the parameters for any further nested loops; if it's empty we'll instead evaluate the "gen_inner_stmts" function. """ # Check conditions, in order, short circuiting them. for cond in conds: cond_val = self.accept(cond) cont_block, rest_block = BasicBlock(), BasicBlock() # If the condition is true we'll skip the continue. self.add_bool_branch(cond_val, rest_block, cont_block) self.activate_block(cont_block) self.nonlocal_control[-1].gen_continue(self, cond.line) self.goto_and_activate(rest_block) if remaining_loop_params: # There's another nested level, so the body of this loop is another loop. return handle_loop(remaining_loop_params) else: # We finally reached the actual body of the generator. # Generate the IR for the inner loop body. gen_inner_stmts() handle_loop(loop_params) def visit_decorator(self, dec: Decorator) -> None: func_ir, func_reg = self.gen_func_item(dec.func, dec.func.name, self.mapper.fdef_to_sig(dec.func)) if dec.func in self.nested_fitems: assert func_reg is not None decorated_func = self.load_decorated_func(dec.func, func_reg) self.assign(self.get_func_target(dec.func), decorated_func, dec.func.line) func_reg = decorated_func else: # Obtain the the function name in order to construct the name of the helper function. name = dec.func.fullname.split('.')[-1] helper_name = decorator_helper_name(name) # Load the callable object representing the non-decorated function, and decorate it. orig_func = self.load_global_str(helper_name, dec.line) decorated_func = self.load_decorated_func(dec.func, orig_func) # Set the callable object representing the decorated function as a global. self.primitive_op(dict_set_item_op, [self.load_globals_dict(), self.load_static_unicode(dec.func.name), decorated_func], decorated_func.line) self.functions.append(func_ir) def visit_del_stmt(self, o: DelStmt) -> None: self.visit_del_item(self.get_assignment_target(o.expr), o.line) def visit_del_item(self, target: AssignmentTarget, line: int) -> None: if isinstance(target, AssignmentTargetIndex): self.translate_special_method_call( target.base, '__delitem__', [target.index], result_type=None, line=line ) elif isinstance(target, AssignmentTargetAttr): key = self.load_static_unicode(target.attr) self.add(PrimitiveOp([target.obj, key], py_delattr_op, line)) elif isinstance(target, AssignmentTargetRegister): # Delete a local by assigning an error value to it, which will # prompt the insertion of uninit checks. self.add(Assign(target.register, self.add(LoadErrorValue(target.type, undefines=True)))) elif isinstance(target, AssignmentTargetTuple): for subtarget in target.items: self.visit_del_item(subtarget, line) def visit_super_expr(self, o: SuperExpr) -> Value: # self.warning('can not optimize super() expression', o.line) sup_val = self.load_module_attr_by_fullname('builtins.super', o.line) if o.call.args: args = [self.accept(arg) for arg in o.call.args] else: assert o.info is not None typ = self.load_native_type_object(o.info.fullname) ir = self.mapper.type_to_ir[o.info] iter_env = iter(self.environment.indexes) vself = next(iter_env) # grab first argument if self.fn_info.is_generator: # grab sixth argument (see comment in translate_super_method_call) self_targ = list(self.environment.symtable.values())[6] vself = self.read(self_targ, self.fn_info.fitem.line) elif not ir.is_ext_class: vself = next(iter_env) # second argument is self if non_extension class args = [typ, vself] res = self.py_call(sup_val, args, o.line) return self.py_get_attr(res, o.name, o.line) def visit_yield_expr(self, expr: YieldExpr) -> Value: if expr.expr: retval = self.accept(expr.expr) else: retval = self.none() return self.emit_yield(retval, expr.line) def emit_yield(self, val: Value, line: int) -> Value: retval = self.coerce(val, self.ret_types[-1], line) cls = self.fn_info.generator_class # Create a new block for the instructions immediately following the yield expression, and # set the next label so that the next time '__next__' is called on the generator object, # the function continues at the new block. next_block = BasicBlock() next_label = len(cls.blocks) cls.blocks.append(next_block) self.assign(cls.next_label_target, self.add(LoadInt(next_label)), line) self.add(Return(retval)) self.activate_block(next_block) self.add_raise_exception_blocks_to_generator_class(line) assert cls.send_arg_reg is not None return cls.send_arg_reg def handle_yield_from_and_await(self, o: Union[YieldFromExpr, AwaitExpr]) -> Value: # This is basically an implementation of the code in PEP 380. # TODO: do we want to use the right types here? result = self.alloc_temp(object_rprimitive) to_yield_reg = self.alloc_temp(object_rprimitive) received_reg = self.alloc_temp(object_rprimitive) if isinstance(o, YieldFromExpr): iter_val = self.primitive_op(iter_op, [self.accept(o.expr)], o.line) else: iter_val = self.primitive_op(coro_op, [self.accept(o.expr)], o.line) iter_reg = self.maybe_spill_assignable(iter_val) stop_block, main_block, done_block = BasicBlock(), BasicBlock(), BasicBlock() _y_init = self.primitive_op(next_raw_op, [self.read(iter_reg)], o.line) self.add(Branch(_y_init, stop_block, main_block, Branch.IS_ERROR)) # Try extracting a return value from a StopIteration and return it. # If it wasn't, this reraises the exception. self.activate_block(stop_block) self.assign(result, self.primitive_op(check_stop_op, [], o.line), o.line) self.goto(done_block) self.activate_block(main_block) self.assign(to_yield_reg, _y_init, o.line) # OK Now the main loop! loop_block = BasicBlock() self.goto_and_activate(loop_block) def try_body() -> None: self.assign(received_reg, self.emit_yield(self.read(to_yield_reg), o.line), o.line) def except_body() -> None: # The body of the except is all implemented in a C function to # reduce how much code we need to generate. It returns a value # indicating whether to break or yield (or raise an exception). res = self.primitive_op(yield_from_except_op, [self.read(iter_reg)], o.line) to_stop = self.add(TupleGet(res, 0, o.line)) val = self.add(TupleGet(res, 1, o.line)) ok, stop = BasicBlock(), BasicBlock() self.add(Branch(to_stop, stop, ok, Branch.BOOL_EXPR)) # The exception got swallowed. Continue, yielding the returned value self.activate_block(ok) self.assign(to_yield_reg, val, o.line) self.nonlocal_control[-1].gen_continue(self, o.line) # The exception was a StopIteration. Stop iterating. self.activate_block(stop) self.assign(result, val, o.line) self.nonlocal_control[-1].gen_break(self, o.line) def else_body() -> None: # Do a next() or a .send(). It will return NULL on exception # but it won't automatically propagate. _y = self.primitive_op(send_op, [self.read(iter_reg), self.read(received_reg)], o.line) ok, stop = BasicBlock(), BasicBlock() self.add(Branch(_y, stop, ok, Branch.IS_ERROR)) # Everything's fine. Yield it. self.activate_block(ok) self.assign(to_yield_reg, _y, o.line) self.nonlocal_control[-1].gen_continue(self, o.line) # Try extracting a return value from a StopIteration and return it. # If it wasn't, this rereaises the exception. self.activate_block(stop) self.assign(result, self.primitive_op(check_stop_op, [], o.line), o.line) self.nonlocal_control[-1].gen_break(self, o.line) self.push_loop_stack(loop_block, done_block) self.visit_try_except(try_body, [(None, None, except_body)], else_body, o.line) self.pop_loop_stack() self.goto_and_activate(done_block) return self.read(result) def visit_yield_from_expr(self, o: YieldFromExpr) -> Value: return self.handle_yield_from_and_await(o) def visit_ellipsis(self, o: EllipsisExpr) -> Value: return self.primitive_op(ellipsis_op, [], o.line) # Builtin function special cases @specialize_function('builtins.globals') def translate_globals(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: # Special case builtins.globals if len(expr.args) == 0: return self.load_globals_dict() return None @specialize_function('builtins.len') def translate_len( self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: # Special case builtins.len if (len(expr.args) == 1 and expr.arg_kinds == [ARG_POS]): expr_rtype = self.node_type(expr.args[0]) if isinstance(expr_rtype, RTuple): # len() of fixed-length tuple can be trivially determined statically, # though we still need to evaluate it. self.accept(expr.args[0]) return self.add(LoadInt(len(expr_rtype.types))) return None # Special cases for things that consume iterators where we know we # can safely compile a generator into a list. @specialize_function('builtins.tuple') @specialize_function('builtins.set') @specialize_function('builtins.dict') @specialize_function('builtins.sum') @specialize_function('builtins.min') @specialize_function('builtins.max') @specialize_function('builtins.sorted') @specialize_function('collections.OrderedDict') @specialize_function('join', str_rprimitive) @specialize_function('extend', list_rprimitive) @specialize_function('update', dict_rprimitive) @specialize_function('update', set_rprimitive) def translate_safe_generator_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: if (len(expr.args) > 0 and expr.arg_kinds[0] == ARG_POS and isinstance(expr.args[0], GeneratorExpr)): if isinstance(callee, MemberExpr): return self.gen_method_call( self.accept(callee.expr), callee.name, ([self.translate_list_comprehension(expr.args[0])] + [self.accept(arg) for arg in expr.args[1:]]), self.node_type(expr), expr.line, expr.arg_kinds, expr.arg_names) else: return self.call_refexpr_with_args( expr, callee, ([self.translate_list_comprehension(expr.args[0])] + [self.accept(arg) for arg in expr.args[1:]])) return None @specialize_function('builtins.any') def translate_any_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: if (len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and isinstance(expr.args[0], GeneratorExpr)): return self.any_all_helper(expr.args[0], false_op, lambda x: x, true_op) return None @specialize_function('builtins.all') def translate_all_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: if (len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and isinstance(expr.args[0], GeneratorExpr)): return self.any_all_helper(expr.args[0], true_op, lambda x: self.unary_op(x, 'not', expr.line), false_op) return None # Special case for 'dataclasses.field' and 'attr.Factory' function calls # because the results of such calls are typechecked by mypy using the types # of the arguments to their respective functions, resulting in attempted # coercions by mypyc that throw a runtime error. @specialize_function('dataclasses.field') @specialize_function('attr.Factory') def translate_dataclasses_field_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: self.types[expr] = AnyType(TypeOfAny.from_error) return None def any_all_helper(self, gen: GeneratorExpr, initial_value_op: OpDescription, modify: Callable[[Value], Value], new_value_op: OpDescription) -> Value: retval = self.alloc_temp(bool_rprimitive) self.assign(retval, self.primitive_op(initial_value_op, [], -1), -1) loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) true_block, false_block, exit_block = BasicBlock(), BasicBlock(), BasicBlock() def gen_inner_stmts() -> None: comparison = modify(self.accept(gen.left_expr)) self.add_bool_branch(comparison, true_block, false_block) self.activate_block(true_block) self.assign(retval, self.primitive_op(new_value_op, [], -1), -1) self.goto(exit_block) self.activate_block(false_block) self.comprehension_helper(loop_params, gen_inner_stmts, gen.line) self.goto_and_activate(exit_block) return retval # Special case for calling next() on a generator expression, an # idiom that shows up some in mypy. # # For example, next(x for x in l if x.id == 12, None) will # generate code that searches l for an element where x.id == 12 # and produce the first such object, or None if no such element # exists. @specialize_function('builtins.next') def translate_next_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: if not (expr.arg_kinds in ([ARG_POS], [ARG_POS, ARG_POS]) and isinstance(expr.args[0], GeneratorExpr)): return None gen = expr.args[0] retval = self.alloc_temp(self.node_type(expr)) default_val = None if len(expr.args) > 1: default_val = self.accept(expr.args[1]) exit_block = BasicBlock() def gen_inner_stmts() -> None: # next takes the first element of the generator, so if # something gets produced, we are done. self.assign(retval, self.accept(gen.left_expr), gen.left_expr.line) self.goto(exit_block) loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) self.comprehension_helper(loop_params, gen_inner_stmts, gen.line) # Now we need the case for when nothing got hit. If there was # a default value, we produce it, and otherwise we raise # StopIteration. if default_val: self.assign(retval, default_val, gen.left_expr.line) self.goto(exit_block) else: self.add(RaiseStandardError(RaiseStandardError.STOP_ITERATION, None, expr.line)) self.add(Unreachable()) self.activate_block(exit_block) return retval @specialize_function('builtins.isinstance') def translate_isinstance(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: # Special case builtins.isinstance if (len(expr.args) == 2 and expr.arg_kinds == [ARG_POS, ARG_POS] and isinstance(expr.args[1], (RefExpr, TupleExpr))): irs = self.flatten_classes(expr.args[1]) if irs is not None: return self.isinstance_helper(self.accept(expr.args[0]), irs, expr.line) return None def flatten_classes(self, arg: Union[RefExpr, TupleExpr]) -> Optional[List[ClassIR]]: """Flatten classes in isinstance(obj, (A, (B, C))). If at least one item is not a reference to a native class, return None. """ if isinstance(arg, RefExpr): if isinstance(arg.node, TypeInfo) and self.is_native_module_ref_expr(arg): ir = self.mapper.type_to_ir.get(arg.node) if ir: return [ir] return None else: res = [] # type: List[ClassIR] for item in arg.items: if isinstance(item, (RefExpr, TupleExpr)): item_part = self.flatten_classes(item) if item_part is None: return None res.extend(item_part) else: return None return res def visit_await_expr(self, o: AwaitExpr) -> Value: return self.handle_yield_from_and_await(o) # Unimplemented constructs def visit_assignment_expr(self, o: AssignmentExpr) -> Value: self.bail("I Am The Walrus (unimplemented)", o.line) # Unimplemented constructs that shouldn't come up because they are py2 only def visit_backquote_expr(self, o: BackquoteExpr) -> Value: self.bail("Python 2 features are unsupported", o.line) def visit_exec_stmt(self, o: ExecStmt) -> None: self.bail("Python 2 features are unsupported", o.line) def visit_print_stmt(self, o: PrintStmt) -> None: self.bail("Python 2 features are unsupported", o.line) def visit_unicode_expr(self, o: UnicodeExpr) -> Value: self.bail("Python 2 features are unsupported", o.line) # Constructs that shouldn't ever show up def visit_enum_call_expr(self, o: EnumCallExpr) -> Value: assert False, "can't compile analysis-only expressions" def visit__promote_expr(self, o: PromoteExpr) -> Value: assert False, "can't compile analysis-only expressions" def visit_namedtuple_expr(self, o: NamedTupleExpr) -> Value: assert False, "can't compile analysis-only expressions" def visit_newtype_expr(self, o: NewTypeExpr) -> Value: assert False, "can't compile analysis-only expressions" def visit_temp_node(self, o: TempNode) -> Value: assert False, "can't compile analysis-only expressions" def visit_type_alias_expr(self, o: TypeAliasExpr) -> Value: assert False, "can't compile analysis-only expressions" def visit_type_application(self, o: TypeApplication) -> Value: assert False, "can't compile analysis-only expressions" def visit_type_var_expr(self, o: TypeVarExpr) -> Value: assert False, "can't compile analysis-only expressions" def visit_typeddict_expr(self, o: TypedDictExpr) -> Value: assert False, "can't compile analysis-only expressions" def visit_reveal_expr(self, o: RevealExpr) -> Value: assert False, "can't compile analysis-only expressions" def visit_var(self, o: Var) -> None: assert False, "can't compile Var; should have been handled already?" def visit_cast_expr(self, o: CastExpr) -> Value: assert False, "CastExpr should have been handled in CallExpr" def visit_star_expr(self, o: StarExpr) -> Value: assert False, "should have been handled in Tuple/List/Set/DictExpr or CallExpr" # Helpers def enter(self, fn_info: FuncInfo) -> None: self.environment = Environment(fn_info.name) self.environments.append(self.environment) self.fn_info = fn_info self.fn_infos.append(self.fn_info) self.ret_types.append(none_rprimitive) self.error_handlers.append(None) if fn_info.is_generator: self.nonlocal_control.append(GeneratorNonlocalControl()) else: self.nonlocal_control.append(BaseNonlocalControl()) self.blocks.append([]) self.new_block() def activate_block(self, block: BasicBlock) -> None: if self.blocks[-1]: assert isinstance(self.blocks[-1][-1].ops[-1], ControlOp) block.error_handler = self.error_handlers[-1] self.blocks[-1].append(block) def goto_and_activate(self, block: BasicBlock) -> None: self.goto(block) self.activate_block(block) def new_block(self) -> BasicBlock: block = BasicBlock() self.activate_block(block) return block def goto_new_block(self) -> BasicBlock: block = BasicBlock() self.goto_and_activate(block) return block def leave(self) -> Tuple[List[BasicBlock], Environment, RType, FuncInfo]: blocks = self.blocks.pop() env = self.environments.pop() ret_type = self.ret_types.pop() fn_info = self.fn_infos.pop() self.error_handlers.pop() self.nonlocal_control.pop() self.environment = self.environments[-1] self.fn_info = self.fn_infos[-1] return blocks, env, ret_type, fn_info def add(self, op: Op) -> Value: if self.blocks[-1][-1].ops: assert not isinstance(self.blocks[-1][-1].ops[-1], ControlOp), ( "Can't add to finished block") self.blocks[-1][-1].ops.append(op) if isinstance(op, RegisterOp): self.environment.add_op(op) return op def goto(self, target: BasicBlock) -> None: if not self.blocks[-1][-1].ops or not isinstance(self.blocks[-1][-1].ops[-1], ControlOp): self.add(Goto(target)) def primitive_op(self, desc: OpDescription, args: List[Value], line: int) -> Value: assert desc.result_type is not None coerced = [] for i, arg in enumerate(args): formal_type = self.op_arg_type(desc, i) arg = self.coerce(arg, formal_type, line) coerced.append(arg) target = self.add(PrimitiveOp(coerced, desc, line)) return target def op_arg_type(self, desc: OpDescription, n: int) -> RType: if n >= len(desc.arg_types): assert desc.is_var_arg return desc.arg_types[-1] return desc.arg_types[n] @overload def accept(self, node: Expression) -> Value: ... @overload def accept(self, node: Statement) -> None: ... def accept(self, node: Union[Statement, Expression]) -> Optional[Value]: with self.catch_errors(node.line): if isinstance(node, Expression): try: res = node.accept(self) res = self.coerce(res, self.node_type(node), node.line) # If we hit an error during compilation, we want to # keep trying, so we can produce more error # messages. Generate a temp of the right type to keep # from causing more downstream trouble. except UnsupportedException: res = self.alloc_temp(self.node_type(node)) return res else: try: node.accept(self) except UnsupportedException: pass return None def alloc_temp(self, type: RType) -> Register: return self.environment.add_temp(type) def type_to_rtype(self, typ: Optional[Type]) -> RType: return self.mapper.type_to_rtype(typ) def node_type(self, node: Expression) -> RType: if isinstance(node, IntExpr): # TODO: Don't special case IntExpr return int_rprimitive if node not in self.types: return object_rprimitive mypy_type = self.types[node] return self.type_to_rtype(mypy_type) def box(self, src: Value) -> Value: if src.type.is_unboxed: return self.add(Box(src)) else: return src def unbox_or_cast(self, src: Value, target_type: RType, line: int) -> Value: if target_type.is_unboxed: return self.add(Unbox(src, target_type, line)) else: return self.add(Cast(src, target_type, line)) def box_expr(self, expr: Expression) -> Value: return self.box(self.accept(expr)) def make_dict(self, key_value_pairs: Sequence[DictEntry], line: int) -> Value: result = None # type: Union[Value, None] initial_items = [] # type: List[Value] for key, value in key_value_pairs: if key is not None: # key:value if result is None: initial_items.extend((key, value)) continue self.translate_special_method_call( result, '__setitem__', [key, value], result_type=None, line=line) else: # **value if result is None: result = self.primitive_op(new_dict_op, initial_items, line) self.primitive_op( dict_update_in_display_op, [result, value], line=line ) if result is None: result = self.primitive_op(new_dict_op, initial_items, line) return result def none(self) -> Value: return self.add(PrimitiveOp([], none_op, line=-1)) def none_object(self) -> Value: return self.add(PrimitiveOp([], none_object_op, line=-1)) def load_outer_env(self, base: Value, outer_env: Environment) -> Value: """Loads the environment class for a given base into a register. Additionally, iterates through all of the SymbolNode and AssignmentTarget instances of the environment at the given index's symtable, and adds those instances to the environment of the current environment. This is done so that the current environment can access outer environment variables without having to reload all of the environment registers. Returns the register where the environment class was loaded. """ env = self.add(GetAttr(base, ENV_ATTR_NAME, self.fn_info.fitem.line)) assert isinstance(env.type, RInstance), '{} must be of type RInstance'.format(env) for symbol, target in outer_env.symtable.items(): env.type.class_ir.attributes[symbol.name] = target.type symbol_target = AssignmentTargetAttr(env, symbol.name) self.environment.add_target(symbol, symbol_target) return env def load_outer_envs(self, base: ImplicitClass) -> None: index = len(self.environments) - 2 # Load the first outer environment. This one is special because it gets saved in the # FuncInfo instance's prev_env_reg field. if index > 1: # outer_env = self.fn_infos[index].environment outer_env = self.environments[index] if isinstance(base, GeneratorClass): base.prev_env_reg = self.load_outer_env(base.curr_env_reg, outer_env) else: base.prev_env_reg = self.load_outer_env(base.self_reg, outer_env) env_reg = base.prev_env_reg index -= 1 # Load the remaining outer environments into registers. while index > 1: # outer_env = self.fn_infos[index].environment outer_env = self.environments[index] env_reg = self.load_outer_env(env_reg, outer_env) index -= 1 def load_env_registers(self) -> None: """Loads the registers for the current FuncItem being visited. Adds the arguments of the FuncItem to the environment. If the FuncItem is nested inside of another function, then this also loads all of the outer environments of the FuncItem into registers so that they can be used when accessing free variables. """ self.add_args_to_env(local=True) fn_info = self.fn_info fitem = fn_info.fitem if fn_info.is_nested: self.load_outer_envs(fn_info.callable_class) # If this is a FuncDef, then make sure to load the FuncDef into its own environment # class so that the function can be called recursively. if isinstance(fitem, FuncDef): self.setup_func_for_recursive_call(fitem, fn_info.callable_class) def add_var_to_env_class(self, var: SymbolNode, rtype: RType, base: Union[FuncInfo, ImplicitClass], reassign: bool = False) -> AssignmentTarget: # First, define the variable name as an attribute of the environment class, and then # construct a target for that attribute. self.fn_info.env_class.attributes[var.name] = rtype attr_target = AssignmentTargetAttr(base.curr_env_reg, var.name) if reassign: # Read the local definition of the variable, and set the corresponding attribute of # the environment class' variable to be that value. reg = self.read(self.environment.lookup(var), self.fn_info.fitem.line) self.add(SetAttr(base.curr_env_reg, var.name, reg, self.fn_info.fitem.line)) # Override the local definition of the variable to instead point at the variable in # the environment class. return self.environment.add_target(var, attr_target) def setup_func_for_recursive_call(self, fdef: FuncDef, base: ImplicitClass) -> None: """ Adds the instance of the callable class representing the given FuncDef to a register in the environment so that the function can be called recursively. Note that this needs to be done only for nested functions. """ # First, set the attribute of the environment class so that GetAttr can be called on it. prev_env = self.fn_infos[-2].env_class prev_env.attributes[fdef.name] = self.type_to_rtype(fdef.type) if isinstance(base, GeneratorClass): # If we are dealing with a generator class, then we need to first get the register # holding the current environment class, and load the previous environment class from # there. prev_env_reg = self.add(GetAttr(base.curr_env_reg, ENV_ATTR_NAME, -1)) else: prev_env_reg = base.prev_env_reg # Obtain the instance of the callable class representing the FuncDef, and add it to the # current environment. val = self.add(GetAttr(prev_env_reg, fdef.name, -1)) target = self.environment.add_local_reg(fdef, object_rprimitive) self.assign(target, val, -1) def setup_env_for_generator_class(self) -> None: """Populates the environment for a generator class.""" fitem = self.fn_info.fitem cls = self.fn_info.generator_class self_target = self.add_self_to_env(cls.ir) # Add the type, value, and traceback variables to the environment. exc_type = self.environment.add_local(Var('type'), object_rprimitive, is_arg=True) exc_val = self.environment.add_local(Var('value'), object_rprimitive, is_arg=True) exc_tb = self.environment.add_local(Var('traceback'), object_rprimitive, is_arg=True) # TODO: Use the right type here instead of object? exc_arg = self.environment.add_local(Var('arg'), object_rprimitive, is_arg=True) cls.exc_regs = (exc_type, exc_val, exc_tb) cls.send_arg_reg = exc_arg cls.self_reg = self.read(self_target, fitem.line) cls.curr_env_reg = self.load_outer_env(cls.self_reg, self.environment) # Define a variable representing the label to go to the next time the '__next__' function # of the generator is called, and add it as an attribute to the environment class. cls.next_label_target = self.add_var_to_env_class(Var(NEXT_LABEL_ATTR_NAME), int_rprimitive, cls, reassign=False) # Add arguments from the original generator function to the generator class' environment. self.add_args_to_env(local=False, base=cls, reassign=False) # Set the next label register for the generator class. cls.next_label_reg = self.read(cls.next_label_target, fitem.line) def add_args_to_env(self, local: bool = True, base: Optional[Union[FuncInfo, ImplicitClass]] = None, reassign: bool = True) -> None: fn_info = self.fn_info if local: for arg in fn_info.fitem.arguments: rtype = self.type_to_rtype(arg.variable.type) self.environment.add_local_reg(arg.variable, rtype, is_arg=True) else: for arg in fn_info.fitem.arguments: if self.is_free_variable(arg.variable) or fn_info.is_generator: rtype = self.type_to_rtype(arg.variable.type) assert base is not None, 'base cannot be None for adding nonlocal args' self.add_var_to_env_class(arg.variable, rtype, base, reassign=reassign) def gen_func_ns(self) -> str: """Generates a namespace for a nested function using its outer function names.""" return '_'.join(info.name + ('' if not info.class_name else '_' + info.class_name) for info in self.fn_infos if info.name and info.name != '') def setup_callable_class(self) -> None: """Generates a callable class representing a nested function or a function within a non-extension class and sets up the 'self' variable for that class. This takes the most recently visited function and returns a ClassIR to represent that function. Each callable class contains an environment attribute with points to another ClassIR representing the environment class where some of its variables can be accessed. Note that its '__call__' method is not yet implemented, and is implemented in the add_call_to_callable_class function. Returns a newly constructed ClassIR representing the callable class for the nested function. """ # Check to see that the name has not already been taken. If so, rename the class. We allow # multiple uses of the same function name because this is valid in if-else blocks. Example: # if True: # def foo(): ----> foo_obj() # return True # else: # def foo(): ----> foo_obj_0() # return False name = base_name = '{}_obj'.format(self.fn_info.namespaced_name()) count = 0 while name in self.callable_class_names: name = base_name + '_' + str(count) count += 1 self.callable_class_names.add(name) # Define the actual callable class ClassIR, and set its environment to point at the # previously defined environment class. callable_class_ir = ClassIR(name, self.module_name, is_generated=True) # The functools @wraps decorator attempts to call setattr on nested functions, so # we create a dict for these nested functions. # https://github.com/python/cpython/blob/3.7/Lib/functools.py#L58 if self.fn_info.is_nested: callable_class_ir.has_dict = True # If the enclosing class doesn't contain nested (which will happen if # this is a toplevel lambda), don't set up an environment. if self.fn_infos[-2].contains_nested: callable_class_ir.attributes[ENV_ATTR_NAME] = RInstance(self.fn_infos[-2].env_class) callable_class_ir.mro = [callable_class_ir] self.fn_info.callable_class = ImplicitClass(callable_class_ir) self.classes.append(callable_class_ir) # Add a 'self' variable to the callable class' environment, and store that variable in a # register to be accessed later. self_target = self.add_self_to_env(callable_class_ir) self.fn_info.callable_class.self_reg = self.read(self_target, self.fn_info.fitem.line) def add_call_to_callable_class(self, blocks: List[BasicBlock], sig: FuncSignature, env: Environment, fn_info: FuncInfo) -> FuncIR: """Generates a '__call__' method for a callable class representing a nested function. This takes the blocks, signature, and environment associated with a function definition and uses those to build the '__call__' method of a given callable class, used to represent that function. Note that a 'self' parameter is added to its list of arguments, as the nested function becomes a class method. """ sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),) + sig.args, sig.ret_type) call_fn_decl = FuncDecl('__call__', fn_info.callable_class.ir.name, self.module_name, sig) call_fn_ir = FuncIR(call_fn_decl, blocks, env, fn_info.fitem.line, traceback_name=fn_info.fitem.name) fn_info.callable_class.ir.methods['__call__'] = call_fn_ir return call_fn_ir def add_get_to_callable_class(self, fn_info: FuncInfo) -> None: """Generates the '__get__' method for a callable class.""" line = fn_info.fitem.line self.enter(fn_info) vself = self.read(self.environment.add_local_reg(Var(SELF_NAME), object_rprimitive, True)) instance = self.environment.add_local_reg(Var('instance'), object_rprimitive, True) self.environment.add_local_reg(Var('owner'), object_rprimitive, True) # If accessed through the class, just return the callable # object. If accessed through an object, create a new bound # instance method object. instance_block, class_block = BasicBlock(), BasicBlock() comparison = self.binary_op(self.read(instance), self.none_object(), 'is', line) self.add_bool_branch(comparison, class_block, instance_block) self.activate_block(class_block) self.add(Return(vself)) self.activate_block(instance_block) self.add(Return(self.primitive_op(method_new_op, [vself, self.read(instance)], line))) blocks, env, _, fn_info = self.leave() sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), RuntimeArg('instance', object_rprimitive), RuntimeArg('owner', object_rprimitive)), object_rprimitive) get_fn_decl = FuncDecl('__get__', fn_info.callable_class.ir.name, self.module_name, sig) get_fn_ir = FuncIR(get_fn_decl, blocks, env) fn_info.callable_class.ir.methods['__get__'] = get_fn_ir self.functions.append(get_fn_ir) def instantiate_callable_class(self, fn_info: FuncInfo) -> Value: """ Assigns a callable class to a register named after the given function definition. Note that fn_info refers to the function being assigned, whereas self.fn_info refers to the function encapsulating the function being turned into a callable class. """ fitem = fn_info.fitem func_reg = self.add(Call(fn_info.callable_class.ir.ctor, [], fitem.line)) # Set the callable class' environment attribute to point at the environment class # defined in the callable class' immediate outer scope. Note that there are three possible # environment class registers we may use. If the encapsulating function is: # - a generator function, then the callable class is instantiated from the generator class' # __next__' function, and hence the generator class' environment register is used. # - a nested function, then the callable class is instantiated from the current callable # class' '__call__' function, and hence the callable class' environment register is used. # - neither, then we use the environment register of the original function. curr_env_reg = None if self.fn_info.is_generator: curr_env_reg = self.fn_info.generator_class.curr_env_reg elif self.fn_info.is_nested: curr_env_reg = self.fn_info.callable_class.curr_env_reg elif self.fn_info.contains_nested: curr_env_reg = self.fn_info.curr_env_reg if curr_env_reg: self.add(SetAttr(func_reg, ENV_ATTR_NAME, curr_env_reg, fitem.line)) return func_reg def setup_env_class(self) -> ClassIR: """Generates a class representing a function environment. Note that the variables in the function environment are not actually populated here. This is because when the environment class is generated, the function environment has not yet been visited. This behavior is allowed so that when the compiler visits nested functions, it can use the returned ClassIR instance to figure out free variables it needs to access. The remaining attributes of the environment class are populated when the environment registers are loaded. Returns a ClassIR representing an environment for a function containing a nested function. """ env_class = ClassIR('{}_env'.format(self.fn_info.namespaced_name()), self.module_name, is_generated=True) env_class.attributes[SELF_NAME] = RInstance(env_class) if self.fn_info.is_nested: # If the function is nested, its environment class must contain an environment # attribute pointing to its encapsulating functions' environment class. env_class.attributes[ENV_ATTR_NAME] = RInstance(self.fn_infos[-2].env_class) env_class.mro = [env_class] self.fn_info.env_class = env_class self.classes.append(env_class) return env_class def finalize_env_class(self) -> None: """Generates, instantiates, and sets up the environment of an environment class.""" self.instantiate_env_class() # Iterate through the function arguments and replace local definitions (using registers) # that were previously added to the environment with references to the function's # environment class. if self.fn_info.is_nested: self.add_args_to_env(local=False, base=self.fn_info.callable_class) else: self.add_args_to_env(local=False, base=self.fn_info) def instantiate_env_class(self) -> Value: """Assigns an environment class to a register named after the given function definition.""" curr_env_reg = self.add(Call(self.fn_info.env_class.ctor, [], self.fn_info.fitem.line)) if self.fn_info.is_nested: self.fn_info.callable_class._curr_env_reg = curr_env_reg self.add(SetAttr(curr_env_reg, ENV_ATTR_NAME, self.fn_info.callable_class.prev_env_reg, self.fn_info.fitem.line)) else: self.fn_info._curr_env_reg = curr_env_reg return curr_env_reg def gen_generator_func(self) -> None: self.setup_generator_class() self.load_env_registers() self.gen_arg_defaults() self.finalize_env_class() self.add(Return(self.instantiate_generator_class())) def setup_generator_class(self) -> ClassIR: name = '{}_gen'.format(self.fn_info.namespaced_name()) generator_class_ir = ClassIR(name, self.module_name, is_generated=True) generator_class_ir.attributes[ENV_ATTR_NAME] = RInstance(self.fn_info.env_class) generator_class_ir.mro = [generator_class_ir] self.classes.append(generator_class_ir) self.fn_info.generator_class = GeneratorClass(generator_class_ir) return generator_class_ir def add_helper_to_generator_class(self, blocks: List[BasicBlock], sig: FuncSignature, env: Environment, fn_info: FuncInfo) -> FuncDecl: """Generates a helper method for a generator class, called by '__next__' and 'throw'.""" sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), RuntimeArg('type', object_rprimitive), RuntimeArg('value', object_rprimitive), RuntimeArg('traceback', object_rprimitive), RuntimeArg('arg', object_rprimitive) ), sig.ret_type) helper_fn_decl = FuncDecl('__mypyc_generator_helper__', fn_info.generator_class.ir.name, self.module_name, sig) helper_fn_ir = FuncIR(helper_fn_decl, blocks, env, fn_info.fitem.line, traceback_name=fn_info.fitem.name) fn_info.generator_class.ir.methods['__mypyc_generator_helper__'] = helper_fn_ir self.functions.append(helper_fn_ir) return helper_fn_decl def add_iter_to_generator_class(self, fn_info: FuncInfo) -> None: """Generates the '__iter__' method for a generator class.""" self.enter(fn_info) self_target = self.add_self_to_env(fn_info.generator_class.ir) self.add(Return(self.read(self_target, fn_info.fitem.line))) blocks, env, _, fn_info = self.leave() # Next, add the actual function as a method of the generator class. sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) iter_fn_decl = FuncDecl('__iter__', fn_info.generator_class.ir.name, self.module_name, sig) iter_fn_ir = FuncIR(iter_fn_decl, blocks, env) fn_info.generator_class.ir.methods['__iter__'] = iter_fn_ir self.functions.append(iter_fn_ir) def add_next_to_generator_class(self, fn_info: FuncInfo, fn_decl: FuncDecl, sig: FuncSignature) -> None: """Generates the '__next__' method for a generator class.""" self.enter(fn_info) self_reg = self.read(self.add_self_to_env(fn_info.generator_class.ir)) none_reg = self.none_object() # Call the helper function with error flags set to Py_None, and return that result. result = self.add(Call(fn_decl, [self_reg, none_reg, none_reg, none_reg, none_reg], fn_info.fitem.line)) self.add(Return(result)) blocks, env, _, fn_info = self.leave() sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), sig.ret_type) next_fn_decl = FuncDecl('__next__', fn_info.generator_class.ir.name, self.module_name, sig) next_fn_ir = FuncIR(next_fn_decl, blocks, env) fn_info.generator_class.ir.methods['__next__'] = next_fn_ir self.functions.append(next_fn_ir) def add_send_to_generator_class(self, fn_info: FuncInfo, fn_decl: FuncDecl, sig: FuncSignature) -> None: """Generates the 'send' method for a generator class.""" # FIXME: this is basically the same as add_next... self.enter(fn_info) self_reg = self.read(self.add_self_to_env(fn_info.generator_class.ir)) arg = self.environment.add_local_reg(Var('arg'), object_rprimitive, True) none_reg = self.none_object() # Call the helper function with error flags set to Py_None, and return that result. result = self.add(Call(fn_decl, [self_reg, none_reg, none_reg, none_reg, self.read(arg)], fn_info.fitem.line)) self.add(Return(result)) blocks, env, _, fn_info = self.leave() sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), RuntimeArg('arg', object_rprimitive),), sig.ret_type) next_fn_decl = FuncDecl('send', fn_info.generator_class.ir.name, self.module_name, sig) next_fn_ir = FuncIR(next_fn_decl, blocks, env) fn_info.generator_class.ir.methods['send'] = next_fn_ir self.functions.append(next_fn_ir) def add_throw_to_generator_class(self, fn_info: FuncInfo, fn_decl: FuncDecl, sig: FuncSignature) -> None: """Generates the 'throw' method for a generator class.""" self.enter(fn_info) self_reg = self.read(self.add_self_to_env(fn_info.generator_class.ir)) # Add the type, value, and traceback variables to the environment. typ = self.environment.add_local_reg(Var('type'), object_rprimitive, True) val = self.environment.add_local_reg(Var('value'), object_rprimitive, True) tb = self.environment.add_local_reg(Var('traceback'), object_rprimitive, True) # Because the value and traceback arguments are optional and hence can be NULL if not # passed in, we have to assign them Py_None if they are not passed in. none_reg = self.none_object() self.assign_if_null(val, lambda: none_reg, self.fn_info.fitem.line) self.assign_if_null(tb, lambda: none_reg, self.fn_info.fitem.line) # Call the helper function using the arguments passed in, and return that result. result = self.add(Call(fn_decl, [self_reg, self.read(typ), self.read(val), self.read(tb), none_reg], fn_info.fitem.line)) self.add(Return(result)) blocks, env, _, fn_info = self.leave() # Create the FuncSignature for the throw function. NOte that the value and traceback fields # are optional, and are assigned to if they are not passed in inside the body of the throw # function. sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), RuntimeArg('type', object_rprimitive), RuntimeArg('value', object_rprimitive, ARG_OPT), RuntimeArg('traceback', object_rprimitive, ARG_OPT)), sig.ret_type) throw_fn_decl = FuncDecl('throw', fn_info.generator_class.ir.name, self.module_name, sig) throw_fn_ir = FuncIR(throw_fn_decl, blocks, env) fn_info.generator_class.ir.methods['throw'] = throw_fn_ir self.functions.append(throw_fn_ir) def add_close_to_generator_class(self, fn_info: FuncInfo) -> None: """Generates the '__close__' method for a generator class.""" # TODO: Currently this method just triggers a runtime error, # we should fill this out eventually. self.enter(fn_info) self.add_self_to_env(fn_info.generator_class.ir) self.add(RaiseStandardError(RaiseStandardError.RUNTIME_ERROR, 'close method on generator classes uimplemented', fn_info.fitem.line)) self.add(Unreachable()) blocks, env, _, fn_info = self.leave() # Next, add the actual function as a method of the generator class. sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) close_fn_decl = FuncDecl('close', fn_info.generator_class.ir.name, self.module_name, sig) close_fn_ir = FuncIR(close_fn_decl, blocks, env) fn_info.generator_class.ir.methods['close'] = close_fn_ir self.functions.append(close_fn_ir) def add_await_to_generator_class(self, fn_info: FuncInfo) -> None: """Generates the '__await__' method for a generator class.""" self.enter(fn_info) self_target = self.add_self_to_env(fn_info.generator_class.ir) self.add(Return(self.read(self_target, fn_info.fitem.line))) blocks, env, _, fn_info = self.leave() # Next, add the actual function as a method of the generator class. sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) await_fn_decl = FuncDecl('__await__', fn_info.generator_class.ir.name, self.module_name, sig) await_fn_ir = FuncIR(await_fn_decl, blocks, env) fn_info.generator_class.ir.methods['__await__'] = await_fn_ir self.functions.append(await_fn_ir) def create_switch_for_generator_class(self) -> None: self.add(Goto(self.fn_info.generator_class.switch_block)) self.fn_info.generator_class.blocks.append(self.new_block()) def populate_switch_for_generator_class(self) -> None: cls = self.fn_info.generator_class line = self.fn_info.fitem.line self.activate_block(cls.switch_block) for label, true_block in enumerate(cls.blocks): false_block = BasicBlock() comparison = self.binary_op(cls.next_label_reg, self.add(LoadInt(label)), '==', line) self.add_bool_branch(comparison, true_block, false_block) self.activate_block(false_block) self.add(RaiseStandardError(RaiseStandardError.STOP_ITERATION, None, line)) self.add(Unreachable()) def instantiate_generator_class(self) -> Value: fitem = self.fn_info.fitem generator_reg = self.add(Call(self.fn_info.generator_class.ir.ctor, [], fitem.line)) # Get the current environment register. If the current function is nested, then the # generator class gets instantiated from the callable class' '__call__' method, and hence # we use the callable class' environment register. Otherwise, we use the original # function's environment register. if self.fn_info.is_nested: curr_env_reg = self.fn_info.callable_class.curr_env_reg else: curr_env_reg = self.fn_info.curr_env_reg # Set the generator class' environment attribute to point at the environment class # defined in the current scope. self.add(SetAttr(generator_reg, ENV_ATTR_NAME, curr_env_reg, fitem.line)) # Set the generator class' environment class' NEXT_LABEL_ATTR_NAME attribute to 0. zero_reg = self.add(LoadInt(0)) self.add(SetAttr(curr_env_reg, NEXT_LABEL_ATTR_NAME, zero_reg, fitem.line)) return generator_reg def add_raise_exception_blocks_to_generator_class(self, line: int) -> None: """ Generates blocks to check if error flags are set while calling the helper method for generator functions, and raises an exception if those flags are set. """ cls = self.fn_info.generator_class assert cls.exc_regs is not None exc_type, exc_val, exc_tb = cls.exc_regs # Check to see if an exception was raised. error_block = BasicBlock() ok_block = BasicBlock() comparison = self.binary_op(exc_type, self.none_object(), 'is not', line) self.add_bool_branch(comparison, error_block, ok_block) self.activate_block(error_block) self.primitive_op(raise_exception_with_tb_op, [exc_type, exc_val, exc_tb], line) self.add(Unreachable()) self.goto_and_activate(ok_block) def add_self_to_env(self, cls: ClassIR) -> AssignmentTargetRegister: return self.environment.add_local_reg(Var(SELF_NAME), RInstance(cls), is_arg=True) def is_builtin_ref_expr(self, expr: RefExpr) -> bool: assert expr.node, "RefExpr not resolved" return '.' in expr.node.fullname and expr.node.fullname.split('.')[0] == 'builtins' def load_global(self, expr: NameExpr) -> Value: """Loads a Python-level global. This takes a NameExpr and uses its name as a key to retrieve the corresponding PyObject * from the _globals dictionary in the C-generated code. """ # If the global is from 'builtins', turn it into a module attr load instead if self.is_builtin_ref_expr(expr): assert expr.node, "RefExpr not resolved" return self.load_module_attr_by_fullname(expr.node.fullname, expr.line) if (self.is_native_module_ref_expr(expr) and isinstance(expr.node, TypeInfo) and not self.is_synthetic_type(expr.node)): assert expr.fullname is not None return self.load_native_type_object(expr.fullname) return self.load_global_str(expr.name, expr.line) def load_global_str(self, name: str, line: int) -> Value: _globals = self.load_globals_dict() reg = self.load_static_unicode(name) return self.primitive_op(dict_get_item_op, [_globals, reg], line) def load_globals_dict(self) -> Value: return self.add(LoadStatic(dict_rprimitive, 'globals', self.module_name)) def literal_static_name(self, value: Union[int, float, complex, str, bytes]) -> str: return self.mapper.literal_static_name(self.current_module, value) def load_static_int(self, value: int) -> Value: """Loads a static integer Python 'int' object into a register.""" static_symbol = self.literal_static_name(value) return self.add(LoadStatic(int_rprimitive, static_symbol, ann=value)) def load_static_float(self, value: float) -> Value: """Loads a static float value into a register.""" static_symbol = self.literal_static_name(value) return self.add(LoadStatic(float_rprimitive, static_symbol, ann=value)) def load_static_bytes(self, value: bytes) -> Value: """Loads a static bytes value into a register.""" static_symbol = self.literal_static_name(value) return self.add(LoadStatic(object_rprimitive, static_symbol, ann=value)) def load_static_complex(self, value: complex) -> Value: """Loads a static complex value into a register.""" static_symbol = self.literal_static_name(value) return self.add(LoadStatic(object_rprimitive, static_symbol, ann=value)) def load_static_unicode(self, value: str) -> Value: """Loads a static unicode value into a register. This is useful for more than just unicode literals; for example, method calls also require a PyObject * form for the name of the method. """ static_symbol = self.literal_static_name(value) return self.add(LoadStatic(str_rprimitive, static_symbol, ann=value)) def load_module(self, name: str) -> Value: return self.add(LoadStatic(object_rprimitive, name, namespace=NAMESPACE_MODULE)) def load_module_attr_by_fullname(self, fullname: str, line: int) -> Value: module, _, name = fullname.rpartition('.') left = self.load_module(module) return self.py_get_attr(left, name, line) def load_native_type_object(self, fullname: str) -> Value: module, name = fullname.rsplit('.', 1) return self.add(LoadStatic(object_rprimitive, name, module, NAMESPACE_TYPE)) def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) -> Value: """Generate a coercion/cast from one type to other (only if needed). For example, int -> object boxes the source int; int -> int emits nothing; object -> int unboxes the object. All conversions preserve object value. If force is true, always generate an op (even if it is just an assignment) so that the result will have exactly target_type as the type. Returns the register with the converted value (may be same as src). """ if src.type.is_unboxed and not target_type.is_unboxed: return self.box(src) if ((src.type.is_unboxed and target_type.is_unboxed) and not is_runtime_subtype(src.type, target_type)): # To go from one unboxed type to another, we go through a boxed # in-between value, for simplicity. tmp = self.box(src) return self.unbox_or_cast(tmp, target_type, line) if ((not src.type.is_unboxed and target_type.is_unboxed) or not is_subtype(src.type, target_type)): return self.unbox_or_cast(src, target_type, line) elif force: tmp = self.alloc_temp(target_type) self.add(Assign(tmp, src)) return tmp return src def native_args_to_positional(self, args: Sequence[Value], arg_kinds: List[int], arg_names: Sequence[Optional[str]], sig: FuncSignature, line: int) -> List[Value]: """Prepare arguments for a native call. Given args/kinds/names and a target signature for a native call, map keyword arguments to their appropriate place in the argument list, fill in error values for unspecified default arguments, package arguments that will go into *args/**kwargs into a tuple/dict, and coerce arguments to the appropriate type. """ sig_arg_kinds = [arg.kind for arg in sig.args] sig_arg_names = [arg.name for arg in sig.args] formal_to_actual = map_actuals_to_formals(arg_kinds, arg_names, sig_arg_kinds, sig_arg_names, lambda n: AnyType(TypeOfAny.special_form)) # Flatten out the arguments, loading error values for default # arguments, constructing tuples/dicts for star args, and # coercing everything to the expected type. output_args = [] for lst, arg in zip(formal_to_actual, sig.args): output_arg = None if arg.kind == ARG_STAR: output_arg = self.primitive_op(new_tuple_op, [args[i] for i in lst], line) elif arg.kind == ARG_STAR2: dict_entries = [(self.load_static_unicode(cast(str, arg_names[i])), args[i]) for i in lst] output_arg = self.make_dict(dict_entries, line) elif not lst: output_arg = self.add(LoadErrorValue(arg.type, is_borrowed=True)) else: output_arg = args[lst[0]] output_args.append(self.coerce(output_arg, arg.type, line)) return output_args def get_func_target(self, fdef: FuncDef) -> AssignmentTarget: """ Given a FuncDef, return the target associated the instance of its callable class. If the function was not already defined somewhere, then define it and add it to the current environment. """ if fdef.original_def: # Get the target associated with the previously defined FuncDef. return self.environment.lookup(fdef.original_def) if self.fn_info.is_generator or self.fn_info.contains_nested: return self.environment.lookup(fdef) return self.environment.add_local_reg(fdef, object_rprimitive) # Lacks a good type because there wasn't a reasonable type in 3.5 :( def catch_errors(self, line: int) -> Any: return catch_errors(self.module_path, line) def warning(self, msg: str, line: int) -> None: self.errors.warning(msg, self.module_path, line) def error(self, msg: str, line: int) -> None: self.errors.error(msg, self.module_path, line) def bail(self, msg: str, line: int) -> 'NoReturn': """Reports an error and aborts compilation up until the last accept() call (accept() catches the UnsupportedException and keeps on processing. This allows errors to be non-blocking without always needing to write handling for them. """ self.error(msg, line) raise UnsupportedException() mypy-0.761/mypyc/genops_for.py0000644€tŠÔÚ€2›s®0000003353413576752246022604 0ustar jukkaDROPBOX\Domain Users00000000000000"""Helpers for generating for loops. We special case certain kinds for loops such as "for x in range(...)" for better efficiency. Each for loop generator class below deals one such special case. """ from typing import Union, List from typing_extensions import TYPE_CHECKING from mypy.nodes import Lvalue, Expression from mypyc.ops import ( Value, BasicBlock, is_short_int_rprimitive, LoadInt, RType, PrimitiveOp, Branch, Register, AssignmentTarget ) from mypyc.ops_int import unsafe_short_add from mypyc.ops_list import list_len_op, list_get_item_unsafe_op from mypyc.ops_misc import iter_op, next_op from mypyc.ops_exc import no_err_occurred_op if TYPE_CHECKING: import mypyc.genops class ForGenerator: """Abstract base class for generating for loops.""" def __init__(self, builder: 'mypyc.genops.IRBuilder', index: Lvalue, body_block: BasicBlock, loop_exit: BasicBlock, line: int, nested: bool) -> None: self.builder = builder self.index = index self.body_block = body_block self.line = line # Some for loops need a cleanup block that we execute at exit. We # create a cleanup block if needed. However, if we are generating a for # loop for a nested iterator, such as "e" in "enumerate(e)", the # outermost generator should generate the cleanup block -- we don't # need to do it here. if self.need_cleanup() and not nested: # Create a new block to handle cleanup after loop exit. self.loop_exit = BasicBlock() else: # Just use the existing loop exit block. self.loop_exit = loop_exit def need_cleanup(self) -> bool: """If this returns true, we need post-loop cleanup.""" return False def add_cleanup(self, exit_block: BasicBlock) -> None: """Add post-loop cleanup, if needed.""" if self.need_cleanup(): self.builder.activate_block(self.loop_exit) self.gen_cleanup() self.builder.goto(exit_block) def gen_condition(self) -> None: """Generate check for loop exit (e.g. exhaustion of iteration).""" def begin_body(self) -> None: """Generate ops at the beginning of the body (if needed).""" def gen_step(self) -> None: """Generate stepping to the next item (if needed).""" def gen_cleanup(self) -> None: """Generate post-loop cleanup (if needed).""" class ForIterable(ForGenerator): """Generate IR for a for loop over an arbitrary iterable (the normal case).""" def need_cleanup(self) -> bool: # Create a new cleanup block for when the loop is finished. return True def init(self, expr_reg: Value, target_type: RType) -> None: # Define targets to contain the expression, along with the iterator that will be used # for the for-loop. If we are inside of a generator function, spill these into the # environment class. builder = self.builder iter_reg = builder.primitive_op(iter_op, [expr_reg], self.line) builder.maybe_spill(expr_reg) self.iter_target = builder.maybe_spill(iter_reg) self.target_type = target_type def gen_condition(self) -> None: # We call __next__ on the iterator and check to see if the return value # is NULL, which signals either the end of the Iterable being traversed # or an exception being raised. Note that Branch.IS_ERROR checks only # for NULL (an exception does not necessarily have to be raised). builder = self.builder line = self.line self.next_reg = builder.primitive_op(next_op, [builder.read(self.iter_target, line)], line) builder.add(Branch(self.next_reg, self.loop_exit, self.body_block, Branch.IS_ERROR)) def begin_body(self) -> None: # Assign the value obtained from __next__ to the # lvalue so that it can be referenced by code in the body of the loop. builder = self.builder line = self.line # We unbox here so that iterating with tuple unpacking generates a tuple based # unpack instead of an iterator based one. next_reg = builder.unbox_or_cast(self.next_reg, self.target_type, line) builder.assign(builder.get_assignment_target(self.index), next_reg, line) def gen_step(self) -> None: # Nothing to do here, since we get the next item as part of gen_condition(). pass def gen_cleanup(self) -> None: # We set the branch to go here if the conditional evaluates to true. If # an exception was raised during the loop, then err_reg wil be set to # True. If no_err_occurred_op returns False, then the exception will be # propagated using the ERR_FALSE flag. self.builder.primitive_op(no_err_occurred_op, [], self.line) # TODO: Generalize to support other sequences (tuples at least) with # different length and indexing ops. class ForList(ForGenerator): """Generate optimized IR for a for loop over a list. Supports iterating in both forward and reverse.""" def init(self, expr_reg: Value, target_type: RType, reverse: bool) -> None: builder = self.builder self.reverse = reverse # Define target to contain the expression, along with the index that will be used # for the for-loop. If we are inside of a generator function, spill these into the # environment class. self.expr_target = builder.maybe_spill(expr_reg) if not reverse: index_reg = builder.add(LoadInt(0)) else: index_reg = builder.binary_op(self.load_len(), builder.add(LoadInt(1)), '-', self.line) self.index_target = builder.maybe_spill_assignable(index_reg) self.target_type = target_type def load_len(self) -> Value: return self.builder.add(PrimitiveOp([self.builder.read(self.expr_target, self.line)], list_len_op, self.line)) def gen_condition(self) -> None: builder = self.builder line = self.line if self.reverse: # If we are iterating in reverse order, we obviously need # to check that the index is still positive. Somewhat less # obviously we still need to check against the length, # since it could shrink out from under us. comparison = builder.binary_op(builder.read(self.index_target, line), builder.add(LoadInt(0)), '>=', line) second_check = BasicBlock() builder.add_bool_branch(comparison, second_check, self.loop_exit) builder.activate_block(second_check) # For compatibility with python semantics we recalculate the length # at every iteration. len_reg = self.load_len() comparison = builder.binary_op(builder.read(self.index_target, line), len_reg, '<', line) builder.add_bool_branch(comparison, self.body_block, self.loop_exit) def begin_body(self) -> None: builder = self.builder line = self.line # Read the next list item. value_box = builder.primitive_op( list_get_item_unsafe_op, [builder.read(self.expr_target, line), builder.read(self.index_target, line)], line) assert value_box # We coerce to the type of list elements here so that # iterating with tuple unpacking generates a tuple based # unpack instead of an iterator based one. builder.assign(builder.get_assignment_target(self.index), builder.unbox_or_cast(value_box, self.target_type, line), line) def gen_step(self) -> None: # Step to the next item. builder = self.builder line = self.line step = 1 if not self.reverse else -1 builder.assign(self.index_target, builder.primitive_op( unsafe_short_add, [builder.read(self.index_target, line), builder.add(LoadInt(step))], line), line) class ForRange(ForGenerator): """Generate optimized IR for a for loop over an integer range.""" # TODO: Use a separate register for the index to allow safe index mutation. def init(self, start_reg: Value, end_reg: Value, step: int) -> None: builder = self.builder self.start_reg = start_reg self.end_reg = end_reg self.step = step self.end_target = builder.maybe_spill(end_reg) self.index_reg = builder.maybe_spill_assignable(start_reg) # Initialize loop index to 0. Assert that the index target is assignable. self.index_target = builder.get_assignment_target( self.index) # type: Union[Register, AssignmentTarget] builder.assign(self.index_target, builder.read(self.index_reg, self.line), self.line) def gen_condition(self) -> None: builder = self.builder line = self.line # Add loop condition check. cmp = '<' if self.step > 0 else '>' comparison = builder.binary_op(builder.read(self.index_target, line), builder.read(self.end_target, line), cmp, line) builder.add_bool_branch(comparison, self.body_block, self.loop_exit) def gen_step(self) -> None: builder = self.builder line = self.line # Increment index register. If the range is known to fit in short ints, use # short ints. if (is_short_int_rprimitive(self.start_reg.type) and is_short_int_rprimitive(self.end_reg.type)): new_val = builder.primitive_op( unsafe_short_add, [builder.read(self.index_reg, line), builder.add(LoadInt(self.step))], line) else: new_val = builder.binary_op( builder.read(self.index_reg, line), builder.add(LoadInt(self.step)), '+', line) builder.assign(self.index_reg, new_val, line) builder.assign(self.index_target, new_val, line) class ForInfiniteCounter(ForGenerator): """Generate optimized IR for a for loop counting from 0 to infinity.""" def init(self) -> None: builder = self.builder # Create a register to store the state of the loop index and # initialize this register along with the loop index to 0. zero = builder.add(LoadInt(0)) self.index_reg = builder.maybe_spill_assignable(zero) self.index_target = builder.get_assignment_target( self.index) # type: Union[Register, AssignmentTarget] builder.assign(self.index_target, zero, self.line) def gen_step(self) -> None: builder = self.builder line = self.line # We can safely assume that the integer is short, since we are not going to wrap # around a 63-bit integer. # NOTE: This would be questionable if short ints could be 32 bits. new_val = builder.primitive_op( unsafe_short_add, [builder.read(self.index_reg, line), builder.add(LoadInt(1))], line) builder.assign(self.index_reg, new_val, line) builder.assign(self.index_target, new_val, line) class ForEnumerate(ForGenerator): """Generate optimized IR for a for loop of form "for i, x in enumerate(it)".""" def need_cleanup(self) -> bool: # The wrapped for loop might need cleanup. This might generate a # redundant cleanup block, but that's okay. return True def init(self, index1: Lvalue, index2: Lvalue, expr: Expression) -> None: # Count from 0 to infinity (for the index lvalue). self.index_gen = ForInfiniteCounter( self.builder, index1, self.body_block, self.loop_exit, self.line, nested=True) self.index_gen.init() # Iterate over the actual iterable. self.main_gen = self.builder.make_for_loop_generator( index2, expr, self.body_block, self.loop_exit, self.line, nested=True) def gen_condition(self) -> None: # No need for a check for the index generator, since it's unconditional. self.main_gen.gen_condition() def begin_body(self) -> None: self.index_gen.begin_body() self.main_gen.begin_body() def gen_step(self) -> None: self.index_gen.gen_step() self.main_gen.gen_step() def gen_cleanup(self) -> None: self.index_gen.gen_cleanup() self.main_gen.gen_cleanup() class ForZip(ForGenerator): """Generate IR for a for loop of form `for x, ... in zip(a, ...)`.""" def need_cleanup(self) -> bool: # The wrapped for loops might need cleanup. We might generate a # redundant cleanup block, but that's okay. return True def init(self, indexes: List[Lvalue], exprs: List[Expression]) -> None: assert len(indexes) == len(exprs) # Condition check will require multiple basic blocks, since there will be # multiple conditions to check. self.cond_blocks = [BasicBlock() for _ in range(len(indexes) - 1)] + [self.body_block] self.gens = [] # type: List[ForGenerator] for index, expr, next_block in zip(indexes, exprs, self.cond_blocks): gen = self.builder.make_for_loop_generator( index, expr, next_block, self.loop_exit, self.line, nested=True) self.gens.append(gen) def gen_condition(self) -> None: for i, gen in enumerate(self.gens): gen.gen_condition() if i < len(self.gens) - 1: self.builder.activate_block(self.cond_blocks[i]) def begin_body(self) -> None: for gen in self.gens: gen.begin_body() def gen_step(self) -> None: for gen in self.gens: gen.gen_step() def gen_cleanup(self) -> None: for gen in self.gens: gen.gen_cleanup() mypy-0.761/mypyc/lib-rt/0000755€tŠÔÚ€2›s®0000000000013576752267021255 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypyc/lib-rt/CPy.c0000644€tŠÔÚ€2›s®0000000135713576752246022117 0ustar jukkaDROPBOX\Domain Users00000000000000#include #include #include #include #include "CPy.h" // TODO: Currently only the things that *need* to be defined a single time // instead of copied into every module live here. This is silly, and most // of the code in CPy.h and pythonsupport.h should move here. struct ExcDummyStruct _CPy_ExcDummyStruct = { PyObject_HEAD_INIT(NULL) }; PyObject *_CPy_ExcDummy = (PyObject *)&_CPy_ExcDummyStruct; // Because its dynamic linker is more restricted than linux/OS X, // Windows doesn't allow initializing globals with values from // other dynamic libraries. This means we need to initialize // things at load time. void CPy_Init(void) { _CPy_ExcDummyStruct.ob_base.ob_type = &PyBaseObject_Type; } mypy-0.761/mypyc/lib-rt/CPy.h0000644€tŠÔÚ€2›s®0000013412713576752246022126 0ustar jukkaDROPBOX\Domain Users00000000000000#ifndef CPY_CPY_H #define CPY_CPY_H #include #include #include #include #include #include "pythonsupport.h" #include "mypyc_util.h" #ifdef __cplusplus extern "C" { #endif #if 0 } // why isn't emacs smart enough to not indent this #endif /* We use intentionally non-inlined decrefs since it pretty * substantially speeds up compile time while only causing a ~1% * performance degradation. We have our own copies both to avoid the * null check in Py_DecRef and to avoid making an indirect PIC * call. */ CPy_NOINLINE static void CPy_DecRef(PyObject *p) { CPy_DECREF(p); } CPy_NOINLINE static void CPy_XDecRef(PyObject *p) { CPy_XDECREF(p); } // Naming conventions: // // Tagged: tagged int // Long: tagged long int (pointer) // Short: tagged short int (unboxed) // Ssize_t: A Py_ssize_t, which ought to be the same width as pointers // Object: CPython object (PyObject *) static void CPyDebug_Print(const char *msg) { printf("%s\n", msg); fflush(stdout); } // Search backwards through the trait part of a vtable (which sits *before* // the start of the vtable proper) looking for the subvtable describing a trait // implementation. We don't do any bounds checking so we'd better be pretty sure // we know that it is there. static inline CPyVTableItem *CPy_FindTraitVtable(PyTypeObject *trait, CPyVTableItem *vtable) { int i; for (i = -2; ; i -= 2) { if ((PyTypeObject *)vtable[i] == trait) { return (CPyVTableItem *)vtable[i + 1]; } } } static bool _CPy_IsSafeMetaClass(PyTypeObject *metaclass) { // mypyc classes can't work with metaclasses in // general. Through some various nasty hacks we *do* // manage to work with TypingMeta and its friends. if (metaclass == &PyType_Type) return true; PyObject *module = PyObject_GetAttrString((PyObject *)metaclass, "__module__"); if (!module) { PyErr_Clear(); return false; } bool matches = false; if (PyUnicode_CompareWithASCIIString(module, "typing") == 0 && (strcmp(metaclass->tp_name, "TypingMeta") == 0 || strcmp(metaclass->tp_name, "GenericMeta") == 0)) { matches = true; } else if (PyUnicode_CompareWithASCIIString(module, "abc") == 0 && strcmp(metaclass->tp_name, "ABCMeta") == 0) { matches = true; } Py_DECREF(module); return matches; } // Create a heap type based on a template non-heap type. // This is super hacky and maybe we should suck it up and use PyType_FromSpec instead. // We allow bases to be NULL to represent just inheriting from object. // We don't support NULL bases and a non-type metaclass. static PyObject *CPyType_FromTemplate(PyTypeObject *template_, PyObject *orig_bases, PyObject *modname) { PyHeapTypeObject *t = NULL; PyTypeObject *dummy_class = NULL; PyObject *name = NULL; PyObject *bases = NULL; PyObject *slots; // If the type of the class (the metaclass) is NULL, we default it // to being type. (This allows us to avoid needing to initialize // it explicitly on windows.) if (!Py_TYPE(template_)) { Py_TYPE(template_) = &PyType_Type; } PyTypeObject *metaclass = Py_TYPE(template_); if (orig_bases) { bases = update_bases(orig_bases); // update_bases doesn't increment the refcount if nothing changes, // so we do it to make sure we have distinct "references" to both if (bases == orig_bases) Py_INCREF(bases); // Find the appropriate metaclass from our base classes. We // care about this because Generic uses a metaclass prior to // Python 3.7. metaclass = _PyType_CalculateMetaclass(metaclass, bases); if (!metaclass) goto error; if (!_CPy_IsSafeMetaClass(metaclass)) { PyErr_SetString(PyExc_TypeError, "mypyc classes can't have a metaclass"); goto error; } } name = PyUnicode_FromString(template_->tp_name); if (!name) goto error; // If there is a metaclass other than type, we would like to call // its __new__ function. Unfortunately there doesn't seem to be a // good way to mix a C extension class and creating it via a // metaclass. We need to do it anyways, though, in order to // support subclassing Generic[T] prior to Python 3.7. // // We solve this with a kind of atrocious hack: create a parallel // class using the metaclass, determine the bases of the real // class by pulling them out of the parallel class, creating the // real class, and then merging its dict back into the original // class. There are lots of cases where this won't really work, // but for the case of GenericMeta setting a bunch of properties // on the class we should be fine. if (metaclass != &PyType_Type) { assert(bases && "non-type metaclasses require non-NULL bases"); PyObject *ns = PyDict_New(); if (!ns) goto error; if (bases != orig_bases) { if (PyDict_SetItemString(ns, "__orig_bases__", orig_bases) < 0) goto error; } dummy_class = (PyTypeObject *)PyObject_CallFunctionObjArgs( (PyObject *)metaclass, name, bases, ns, NULL); Py_DECREF(ns); if (!dummy_class) goto error; Py_DECREF(bases); bases = dummy_class->tp_bases; Py_INCREF(bases); } // Allocate the type and then copy the main stuff in. t = (PyHeapTypeObject*)PyType_GenericAlloc(&PyType_Type, 0); if (!t) goto error; memcpy((char *)t + sizeof(PyVarObject), (char *)template_ + sizeof(PyVarObject), sizeof(PyTypeObject) - sizeof(PyVarObject)); if (bases != orig_bases) { if (PyObject_SetAttrString((PyObject *)t, "__orig_bases__", orig_bases) < 0) goto error; } // Having tp_base set is I think required for stuff to get // inherited in PyType_Ready, which we needed for subclassing // BaseException. XXX: Taking the first element is wrong I think though. if (bases) { t->ht_type.tp_base = (PyTypeObject *)PyTuple_GET_ITEM(bases, 0); Py_INCREF((PyObject *)t->ht_type.tp_base); } t->ht_name = name; Py_INCREF(name); t->ht_qualname = name; t->ht_type.tp_bases = bases; // references stolen so NULL these out bases = name = NULL; if (PyType_Ready((PyTypeObject *)t) < 0) goto error; assert(t->ht_type.tp_base != NULL); // XXX: This is a terrible hack to work around a cpython check on // the mro. It was needed for mypy.stats. I need to investigate // what is actually going on here. Py_INCREF(metaclass); Py_TYPE(t) = metaclass; if (dummy_class) { if (PyDict_Merge(t->ht_type.tp_dict, dummy_class->tp_dict, 0) != 0) goto error; // This is the *really* tasteless bit. GenericMeta's __new__ // in certain versions of typing sets _gorg to point back to // the class. We need to override it to keep it from pointing // to the proxy. if (PyDict_SetItemString(t->ht_type.tp_dict, "_gorg", (PyObject *)t) < 0) goto error; } // Reject anything that would give us a nontrivial __slots__, // because the layout will conflict slots = PyObject_GetAttrString((PyObject *)t, "__slots__"); if (slots) { // don't fail on an empty __slots__ int is_true = PyObject_IsTrue(slots); Py_DECREF(slots); if (is_true > 0) PyErr_SetString(PyExc_TypeError, "mypyc classes can't have __slots__"); if (is_true != 0) goto error; } else { PyErr_Clear(); } if (PyObject_SetAttrString((PyObject *)t, "__module__", modname) < 0) goto error; if (init_subclass((PyTypeObject *)t, NULL)) goto error; Py_XDECREF(dummy_class); return (PyObject *)t; error: Py_XDECREF(t); Py_XDECREF(bases); Py_XDECREF(dummy_class); Py_XDECREF(name); return NULL; } // Get attribute value using vtable (may return an undefined value) #define CPY_GET_ATTR(obj, type, vtable_index, object_type, attr_type) \ ((attr_type (*)(object_type *))((object_type *)obj)->vtable[vtable_index])((object_type *)obj) #define CPY_GET_ATTR_TRAIT(obj, trait, vtable_index, object_type, attr_type) \ ((attr_type (*)(object_type *))(CPy_FindTraitVtable(trait, ((object_type *)obj)->vtable))[vtable_index])((object_type *)obj) // Set attribute value using vtable #define CPY_SET_ATTR(obj, type, vtable_index, value, object_type, attr_type) \ ((bool (*)(object_type *, attr_type))((object_type *)obj)->vtable[vtable_index])( \ (object_type *)obj, value) #define CPY_SET_ATTR_TRAIT(obj, trait, vtable_index, value, object_type, attr_type) \ ((bool (*)(object_type *, attr_type))(CPy_FindTraitVtable(trait, ((object_type *)obj)->vtable))[vtable_index])( \ (object_type *)obj, value) #define CPY_GET_METHOD(obj, type, vtable_index, object_type, method_type) \ ((method_type)(((object_type *)obj)->vtable[vtable_index])) #define CPY_GET_METHOD_TRAIT(obj, trait, vtable_index, object_type, method_type) \ ((method_type)(CPy_FindTraitVtable(trait, ((object_type *)obj)->vtable)[vtable_index])) static void CPyError_OutOfMemory(void) { fprintf(stderr, "fatal: out of memory\n"); fflush(stderr); abort(); } static inline int CPyTagged_CheckLong(CPyTagged x) { return x & CPY_INT_TAG; } static inline int CPyTagged_CheckShort(CPyTagged x) { return !CPyTagged_CheckLong(x); } static inline Py_ssize_t CPyTagged_ShortAsSsize_t(CPyTagged x) { // NOTE: Assume that we sign extend. return (Py_ssize_t)x >> 1; } static inline PyObject *CPyTagged_LongAsObject(CPyTagged x) { // NOTE: Assume target is not a short int. return (PyObject *)(x & ~CPY_INT_TAG); } static inline bool CPyTagged_TooBig(Py_ssize_t value) { // Micro-optimized for the common case where it fits. return (size_t)value > CPY_TAGGED_MAX && (value >= 0 || value < CPY_TAGGED_MIN); } static CPyTagged CPyTagged_FromSsize_t(Py_ssize_t value) { // We use a Python object if the value shifted left by 1 is too // large for Py_ssize_t if (CPyTagged_TooBig(value)) { PyObject *object = PyLong_FromSsize_t(value); return ((CPyTagged)object) | CPY_INT_TAG; } else { return value << 1; } } static CPyTagged CPyTagged_FromObject(PyObject *object) { int overflow; // The overflow check knows about CPyTagged's width Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); if (overflow != 0) { Py_INCREF(object); return ((CPyTagged)object) | CPY_INT_TAG; } else { return value << 1; } } static CPyTagged CPyTagged_StealFromObject(PyObject *object) { int overflow; // The overflow check knows about CPyTagged's width Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); if (overflow != 0) { return ((CPyTagged)object) | CPY_INT_TAG; } else { Py_DECREF(object); return value << 1; } } static CPyTagged CPyTagged_BorrowFromObject(PyObject *object) { int overflow; // The overflow check knows about CPyTagged's width Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); if (overflow != 0) { return ((CPyTagged)object) | CPY_INT_TAG; } else { return value << 1; } } static PyObject *CPyTagged_AsObject(CPyTagged x) { PyObject *value; if (CPyTagged_CheckLong(x)) { value = CPyTagged_LongAsObject(x); Py_INCREF(value); } else { value = PyLong_FromSsize_t(CPyTagged_ShortAsSsize_t(x)); if (value == NULL) { CPyError_OutOfMemory(); } } return value; } static PyObject *CPyTagged_StealAsObject(CPyTagged x) { PyObject *value; if (CPyTagged_CheckLong(x)) { value = CPyTagged_LongAsObject(x); } else { value = PyLong_FromSsize_t(CPyTagged_ShortAsSsize_t(x)); if (value == NULL) { CPyError_OutOfMemory(); } } return value; } static Py_ssize_t CPyTagged_AsSsize_t(CPyTagged x) { if (CPyTagged_CheckShort(x)) { return CPyTagged_ShortAsSsize_t(x); } else { return PyLong_AsSsize_t(CPyTagged_LongAsObject(x)); } } CPy_NOINLINE static void CPyTagged_IncRef(CPyTagged x) { if (CPyTagged_CheckLong(x)) { Py_INCREF(CPyTagged_LongAsObject(x)); } } CPy_NOINLINE static void CPyTagged_DecRef(CPyTagged x) { if (CPyTagged_CheckLong(x)) { Py_DECREF(CPyTagged_LongAsObject(x)); } } CPy_NOINLINE static void CPyTagged_XDecRef(CPyTagged x) { if (CPyTagged_CheckLong(x)) { Py_XDECREF(CPyTagged_LongAsObject(x)); } } static inline bool CPyTagged_IsAddOverflow(CPyTagged sum, CPyTagged left, CPyTagged right) { // This check was copied from some of my old code I believe that it works :-) return (Py_ssize_t)(sum ^ left) < 0 && (Py_ssize_t)(sum ^ right) < 0; } static CPyTagged CPyTagged_Negate(CPyTagged num) { if (CPyTagged_CheckShort(num) && num != (CPyTagged) ((Py_ssize_t)1 << (CPY_INT_BITS - 1))) { // The only possibility of an overflow error happening when negating a short is if we // attempt to negate the most negative number. return -num; } PyObject *num_obj = CPyTagged_AsObject(num); PyObject *result = PyNumber_Negative(num_obj); if (result == NULL) { CPyError_OutOfMemory(); } Py_DECREF(num_obj); return CPyTagged_StealFromObject(result); } static CPyTagged CPyTagged_Add(CPyTagged left, CPyTagged right) { // TODO: Use clang/gcc extension __builtin_saddll_overflow instead. if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { CPyTagged sum = left + right; if (!CPyTagged_IsAddOverflow(sum, left, right)) { return sum; } } PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_Add(left_obj, right_obj); if (result == NULL) { CPyError_OutOfMemory(); } Py_DECREF(left_obj); Py_DECREF(right_obj); return CPyTagged_StealFromObject(result); } static inline bool CPyTagged_IsSubtractOverflow(CPyTagged diff, CPyTagged left, CPyTagged right) { // This check was copied from some of my old code I believe that it works :-) return (Py_ssize_t)(diff ^ left) < 0 && (Py_ssize_t)(diff ^ right) >= 0; } static CPyTagged CPyTagged_Subtract(CPyTagged left, CPyTagged right) { // TODO: Use clang/gcc extension __builtin_saddll_overflow instead. if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { CPyTagged diff = left - right; if (!CPyTagged_IsSubtractOverflow(diff, left, right)) { return diff; } } PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_Subtract(left_obj, right_obj); if (result == NULL) { CPyError_OutOfMemory(); } Py_DECREF(left_obj); Py_DECREF(right_obj); return CPyTagged_StealFromObject(result); } static inline bool CPyTagged_IsMultiplyOverflow(CPyTagged left, CPyTagged right) { // This is conservative -- return false only in a small number of all non-overflow cases return left >= (1U << (CPY_INT_BITS/2 - 1)) || right >= (1U << (CPY_INT_BITS/2 - 1)); } static CPyTagged CPyTagged_Multiply(CPyTagged left, CPyTagged right) { // TODO: Consider using some clang/gcc extension if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { if (!CPyTagged_IsMultiplyOverflow(left, right)) { return left * CPyTagged_ShortAsSsize_t(right); } } PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_Multiply(left_obj, right_obj); if (result == NULL) { CPyError_OutOfMemory(); } Py_DECREF(left_obj); Py_DECREF(right_obj); return CPyTagged_StealFromObject(result); } static inline bool CPyTagged_MaybeFloorDivideFault(CPyTagged left, CPyTagged right) { return right == 0 || left == -((size_t)1 << (CPY_INT_BITS-1)); } static CPyTagged CPyTagged_FloorDivide(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right) && !CPyTagged_MaybeFloorDivideFault(left, right)) { Py_ssize_t result = ((Py_ssize_t)left / CPyTagged_ShortAsSsize_t(right)) & ~1; if (((Py_ssize_t)left < 0) != (((Py_ssize_t)right) < 0)) { if (result / 2 * right != left) { // Round down result -= 2; } } return result; } PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_FloorDivide(left_obj, right_obj); Py_DECREF(left_obj); Py_DECREF(right_obj); // Handle exceptions honestly because it could be ZeroDivisionError if (result == NULL) { return CPY_INT_TAG; } else { return CPyTagged_StealFromObject(result); } } static inline bool CPyTagged_MaybeRemainderFault(CPyTagged left, CPyTagged right) { // Division/modulus can fault when dividing INT_MIN by -1, but we // do our mods on still-tagged integers with the low-bit clear, so // -1 is actually represented as -2 and can't overflow. // Mod by 0 can still fault though. return right == 0; } static CPyTagged CPyTagged_Remainder(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right) && !CPyTagged_MaybeRemainderFault(left, right)) { Py_ssize_t result = (Py_ssize_t)left % (Py_ssize_t)right; if (((Py_ssize_t)right < 0) != ((Py_ssize_t)left < 0) && result != 0) { result += right; } return result; } PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_Remainder(left_obj, right_obj); Py_DECREF(left_obj); Py_DECREF(right_obj); // Handle exceptions honestly because it could be ZeroDivisionError if (result == NULL) { return CPY_INT_TAG; } else { return CPyTagged_StealFromObject(result); } } static bool CPyTagged_IsEq_(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(right)) { return false; } else { int result = PyObject_RichCompareBool(CPyTagged_LongAsObject(left), CPyTagged_LongAsObject(right), Py_EQ); if (result == -1) { CPyError_OutOfMemory(); } return result; } } static inline bool CPyTagged_IsEq(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(left)) { return left == right; } else { return CPyTagged_IsEq_(left, right); } } static inline bool CPyTagged_IsNe(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(left)) { return left != right; } else { return !CPyTagged_IsEq_(left, right); } } static bool CPyTagged_IsLt_(CPyTagged left, CPyTagged right) { PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); int result = PyObject_RichCompareBool(left_obj, right_obj, Py_LT); Py_DECREF(left_obj); Py_DECREF(right_obj); if (result == -1) { CPyError_OutOfMemory(); } return result; } static inline bool CPyTagged_IsLt(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { return (Py_ssize_t)left < (Py_ssize_t)right; } else { return CPyTagged_IsLt_(left, right); } } static inline bool CPyTagged_IsGe(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { return (Py_ssize_t)left >= (Py_ssize_t)right; } else { return !CPyTagged_IsLt_(left, right); } } static inline bool CPyTagged_IsGt(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { return (Py_ssize_t)left > (Py_ssize_t)right; } else { return CPyTagged_IsLt_(right, left); } } static inline bool CPyTagged_IsLe(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { return (Py_ssize_t)left <= (Py_ssize_t)right; } else { return !CPyTagged_IsLt_(right, left); } } static CPyTagged CPyTagged_Id(PyObject *o) { return CPyTagged_FromSsize_t((Py_ssize_t)o); } static PyObject *CPyList_GetItemUnsafe(PyObject *list, CPyTagged index) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); PyObject *result = PyList_GET_ITEM(list, n); Py_INCREF(result); return result; } static PyObject *CPyList_GetItemShort(PyObject *list, CPyTagged index) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); Py_ssize_t size = PyList_GET_SIZE(list); if (n >= 0) { if (n >= size) { PyErr_SetString(PyExc_IndexError, "list index out of range"); return NULL; } } else { n += size; if (n < 0) { PyErr_SetString(PyExc_IndexError, "list index out of range"); return NULL; } } PyObject *result = PyList_GET_ITEM(list, n); Py_INCREF(result); return result; } static PyObject *CPyList_GetItem(PyObject *list, CPyTagged index) { if (CPyTagged_CheckShort(index)) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); Py_ssize_t size = PyList_GET_SIZE(list); if (n >= 0) { if (n >= size) { PyErr_SetString(PyExc_IndexError, "list index out of range"); return NULL; } } else { n += size; if (n < 0) { PyErr_SetString(PyExc_IndexError, "list index out of range"); return NULL; } } PyObject *result = PyList_GET_ITEM(list, n); Py_INCREF(result); return result; } else { PyErr_SetString(PyExc_IndexError, "list index out of range"); return NULL; } } static bool CPyList_SetItem(PyObject *list, CPyTagged index, PyObject *value) { if (CPyTagged_CheckShort(index)) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); Py_ssize_t size = PyList_GET_SIZE(list); if (n >= 0) { if (n >= size) { PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); return false; } } else { n += size; if (n < 0) { PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); return false; } } // PyList_SET_ITEM doesn't decref the old element, so we do Py_DECREF(PyList_GET_ITEM(list, n)); // N.B: Steals reference PyList_SET_ITEM(list, n, value); return true; } else { PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); return false; } } static PyObject *CPyList_PopLast(PyObject *obj) { // I tried a specalized version of pop_impl for just removing the // last element and it wasn't any faster in microbenchmarks than // the generic one so I ditched it. return list_pop_impl((PyListObject *)obj, -1); } static PyObject *CPyList_Pop(PyObject *obj, CPyTagged index) { if (CPyTagged_CheckShort(index)) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); return list_pop_impl((PyListObject *)obj, n); } else { PyErr_SetString(PyExc_IndexError, "pop index out of range"); return NULL; } } static CPyTagged CPyList_Count(PyObject *obj, PyObject *value) { return list_count((PyListObject *)obj, value); } static bool CPySet_Remove(PyObject *set, PyObject *key) { int success = PySet_Discard(set, key); if (success == 1) { return true; } if (success == 0) { _PyErr_SetKeyError(key); } return false; } static PyObject *CPySequenceTuple_GetItem(PyObject *tuple, CPyTagged index) { if (CPyTagged_CheckShort(index)) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); Py_ssize_t size = PyTuple_GET_SIZE(tuple); if (n >= 0) { if (n >= size) { PyErr_SetString(PyExc_IndexError, "tuple index out of range"); return NULL; } } else { n += size; if (n < 0) { PyErr_SetString(PyExc_IndexError, "tuple index out of range"); return NULL; } } PyObject *result = PyTuple_GET_ITEM(tuple, n); Py_INCREF(result); return result; } else { PyErr_SetString(PyExc_IndexError, "tuple index out of range"); return NULL; } } static CPyTagged CPyObject_Hash(PyObject *o) { Py_hash_t h = PyObject_Hash(o); if (h == -1) { return CPY_INT_TAG; } else { // This is tragically annoying. The range of hash values in // 64-bit python covers 64-bits, and our short integers only // cover 63. This means that half the time we are boxing the // result for basically no good reason. To add insult to // injury it is probably about to be immediately unboxed by a // tp_hash wrapper. return CPyTagged_FromSsize_t(h); } } static inline CPyTagged CPyObject_Size(PyObject *obj) { Py_ssize_t s = PyObject_Size(obj); if (s < 0) { return CPY_INT_TAG; } else { // Technically __len__ could return a really big number, so we // should allow this to produce a boxed int. In practice it // shouldn't ever if the data structure actually contains all // the elements, but... return CPyTagged_FromSsize_t(s); } } static inline int CPy_ObjectToStatus(PyObject *obj) { if (obj) { Py_DECREF(obj); return 0; } else { return -1; } } // dict subclasses like defaultdict override things in interesting // ways, so we don't want to just directly use the dict methods. Not // sure if it is actually worth doing all this stuff, but it saves // some indirections. static PyObject *CPyDict_GetItem(PyObject *dict, PyObject *key) { if (PyDict_CheckExact(dict)) { PyObject *res = PyDict_GetItemWithError(dict, key); if (!res) { if (!PyErr_Occurred()) { PyErr_SetObject(PyExc_KeyError, key); } } else { Py_INCREF(res); } return res; } else { return PyObject_GetItem(dict, key); } } static PyObject *CPyDict_Build(Py_ssize_t size, ...) { Py_ssize_t i; PyObject *res = _PyDict_NewPresized(size); if (res == NULL) { return NULL; } va_list args; va_start(args, size); for (i = 0; i < size; i++) { PyObject *key = va_arg(args, PyObject *); PyObject *value = va_arg(args, PyObject *); if (PyDict_SetItem(res, key, value)) { Py_DECREF(res); return NULL; } } va_end(args); return res; } static PyObject *CPyDict_Get(PyObject *dict, PyObject *key, PyObject *fallback) { // We are dodgily assuming that get on a subclass doesn't have // different behavior. PyObject *res = PyDict_GetItemWithError(dict, key); if (!res) { if (PyErr_Occurred()) { return NULL; } res = fallback; } Py_INCREF(res); return res; } static int CPyDict_SetItem(PyObject *dict, PyObject *key, PyObject *value) { if (PyDict_CheckExact(dict)) { return PyDict_SetItem(dict, key, value); } else { return PyObject_SetItem(dict, key, value); } } static int CPyDict_UpdateGeneral(PyObject *dict, PyObject *stuff) { _Py_IDENTIFIER(update); PyObject *res = _PyObject_CallMethodIdObjArgs(dict, &PyId_update, stuff, NULL); return CPy_ObjectToStatus(res); } static int CPyDict_UpdateInDisplay(PyObject *dict, PyObject *stuff) { // from https://github.com/python/cpython/blob/55d035113dfb1bd90495c8571758f504ae8d4802/Python/ceval.c#L2710 int ret = PyDict_Update(dict, stuff); if (ret < 0) { if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Format(PyExc_TypeError, "'%.200s' object is not a mapping", stuff->ob_type->tp_name); } } return ret; } static int CPyDict_Update(PyObject *dict, PyObject *stuff) { if (PyDict_CheckExact(dict)) { return PyDict_Update(dict, stuff); } else { return CPyDict_UpdateGeneral(dict, stuff); } } static int CPyDict_UpdateFromAny(PyObject *dict, PyObject *stuff) { if (PyDict_CheckExact(dict)) { // Argh this sucks _Py_IDENTIFIER(keys); if (PyDict_Check(stuff) || _PyObject_HasAttrId(stuff, &PyId_keys)) { return PyDict_Update(dict, stuff); } else { return PyDict_MergeFromSeq2(dict, stuff, 1); } } else { return CPyDict_UpdateGeneral(dict, stuff); } } static PyObject *CPyDict_FromAny(PyObject *obj) { if (PyDict_Check(obj)) { return PyDict_Copy(obj); } else { int res; PyObject *dict = PyDict_New(); if (!dict) { return NULL; } _Py_IDENTIFIER(keys); if (_PyObject_HasAttrId(obj, &PyId_keys)) { res = PyDict_Update(dict, obj); } else { res = PyDict_MergeFromSeq2(dict, obj, 1); } if (res < 0) { Py_DECREF(dict); return NULL; } return dict; } } static PyObject *CPyIter_Next(PyObject *iter) { return (*iter->ob_type->tp_iternext)(iter); } static PyObject *CPy_FetchStopIterationValue(void) { PyObject *val = NULL; _PyGen_FetchStopIterationValue(&val); return val; } static PyObject *CPyIter_Send(PyObject *iter, PyObject *val) { // Do a send, or a next if second arg is None. // (This behavior is to match the PEP 380 spec for yield from.) _Py_IDENTIFIER(send); if (val == Py_None) { return CPyIter_Next(iter); } else { return _PyObject_CallMethodIdObjArgs(iter, &PyId_send, val, NULL); } } static PyObject *CPy_GetCoro(PyObject *obj) { // If the type has an __await__ method, call it, // otherwise, fallback to calling __iter__. PyAsyncMethods* async_struct = obj->ob_type->tp_as_async; if (async_struct != NULL && async_struct->am_await != NULL) { return (async_struct->am_await)(obj); } else { // TODO: We should check that the type is a generator decorated with // asyncio.coroutine return PyObject_GetIter(obj); } } static PyObject *CPyObject_GetAttr3(PyObject *v, PyObject *name, PyObject *defl) { PyObject *result = PyObject_GetAttr(v, name); if (!result && PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); Py_INCREF(defl); result = defl; } return result; } // mypy lets ints silently coerce to floats, so a mypyc runtime float // might be an int also static inline bool CPyFloat_Check(PyObject *o) { return PyFloat_Check(o) || PyLong_Check(o); } static PyObject *CPyLong_FromFloat(PyObject *o) { if (PyLong_Check(o)) { CPy_INCREF(o); return o; } else { return PyLong_FromDouble(PyFloat_AS_DOUBLE(o)); } } // Construct a nicely formatted type name based on __module__ and __name__. static PyObject *CPy_GetTypeName(PyObject *type) { PyObject *module = NULL, *name = NULL; PyObject *full = NULL; module = PyObject_GetAttrString(type, "__module__"); if (!module || !PyUnicode_Check(module)) { goto out; } name = PyObject_GetAttrString(type, "__qualname__"); if (!name || !PyUnicode_Check(name)) { goto out; } if (PyUnicode_CompareWithASCIIString(module, "builtins") == 0) { Py_INCREF(name); full = name; } else { full = PyUnicode_FromFormat("%U.%U", module, name); } out: Py_XDECREF(module); Py_XDECREF(name); return full; } // Get the type of a value as a string, expanding tuples to include // all the element types. static PyObject *CPy_FormatTypeName(PyObject *value) { if (value == Py_None) { return PyUnicode_FromString("None"); } if (!PyTuple_CheckExact(value)) { return CPy_GetTypeName((PyObject *)Py_TYPE(value)); } if (PyTuple_GET_SIZE(value) > 10) { return PyUnicode_FromFormat("tuple[<%d items>]", PyTuple_GET_SIZE(value)); } // Most of the logic is all for tuples, which is the only interesting case PyObject *output = PyUnicode_FromString("tuple["); if (!output) { return NULL; } /* This is quadratic but if that ever matters something is really weird. */ int i; for (i = 0; i < PyTuple_GET_SIZE(value); i++) { PyObject *s = CPy_FormatTypeName(PyTuple_GET_ITEM(value, i)); if (!s) { Py_DECREF(output); return NULL; } PyObject *next = PyUnicode_FromFormat("%U%U%s", output, s, i + 1 == PyTuple_GET_SIZE(value) ? "]" : ", "); Py_DECREF(output); Py_DECREF(s); if (!next) { return NULL; } output = next; } return output; } static void CPy_TypeError(const char *expected, PyObject *value) { PyObject *out = CPy_FormatTypeName(value); if (out) { PyErr_Format(PyExc_TypeError, "%s object expected; got %U", expected, out); Py_DECREF(out); } else { PyErr_Format(PyExc_TypeError, "%s object expected; and errored formatting real type!", expected); } } // These functions are basically exactly PyCode_NewEmpty and // _PyTraceback_Add which are available in all the versions we support. // We're continuing to use them because we'll probably optimize them later. static PyCodeObject *CPy_CreateCodeObject(const char *filename, const char *funcname, int line) { PyObject *filename_obj = PyUnicode_FromString(filename); PyObject *funcname_obj = PyUnicode_FromString(funcname); PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); PyObject *empty_tuple = PyTuple_New(0); PyCodeObject *code_obj = NULL; if (filename_obj == NULL || funcname_obj == NULL || empty_bytes == NULL || empty_tuple == NULL) { goto Error; } code_obj = PyCode_New(0, 0, 0, 0, 0, empty_bytes, empty_tuple, empty_tuple, empty_tuple, empty_tuple, empty_tuple, filename_obj, funcname_obj, line, empty_bytes); Error: Py_XDECREF(empty_bytes); Py_XDECREF(empty_tuple); Py_XDECREF(filename_obj); Py_XDECREF(funcname_obj); return code_obj; } static void CPy_AddTraceback(const char *filename, const char *funcname, int line, PyObject *globals) { PyObject *exc, *val, *tb; PyThreadState *thread_state = PyThreadState_GET(); PyFrameObject *frame_obj; // We need to save off the exception state because in 3.8, // PyFrame_New fails if there is an error set and it fails to look // up builtins in the globals. (_PyTraceback_Add documents that it // needs to do it because it decodes the filename according to the // FS encoding, which could have a decoder in Python. We don't do // that so *that* doesn't apply to us.) PyErr_Fetch(&exc, &val, &tb); PyCodeObject *code_obj = CPy_CreateCodeObject(filename, funcname, line); if (code_obj == NULL) { goto error; } frame_obj = PyFrame_New(thread_state, code_obj, globals, 0); if (frame_obj == NULL) { Py_DECREF(code_obj); goto error; } frame_obj->f_lineno = line; PyErr_Restore(exc, val, tb); PyTraceBack_Here(frame_obj); Py_DECREF(code_obj); Py_DECREF(frame_obj); return; error: _PyErr_ChainExceptions(exc, val, tb); } // mypyc is not very good at dealing with refcount management of // pointers that might be NULL. As a workaround for this, the // exception APIs that might want to return NULL pointers instead // return properly refcounted pointers to this dummy object. struct ExcDummyStruct { PyObject_HEAD }; extern struct ExcDummyStruct _CPy_ExcDummyStruct; extern PyObject *_CPy_ExcDummy; static inline void _CPy_ToDummy(PyObject **p) { if (*p == NULL) { Py_INCREF(_CPy_ExcDummy); *p = _CPy_ExcDummy; } } static inline PyObject *_CPy_FromDummy(PyObject *p) { if (p == _CPy_ExcDummy) return NULL; Py_INCREF(p); return p; } static void CPy_CatchError(PyObject **p_type, PyObject **p_value, PyObject **p_traceback) { // We need to return the existing sys.exc_info() information, so // that it can be restored when we finish handling the error we // are catching now. Grab that triple and convert NULL values to // the ExcDummy object in order to simplify refcount handling in // generated code. PyErr_GetExcInfo(p_type, p_value, p_traceback); _CPy_ToDummy(p_type); _CPy_ToDummy(p_value); _CPy_ToDummy(p_traceback); if (!PyErr_Occurred()) { PyErr_SetString(PyExc_RuntimeError, "CPy_CatchError called with no error!"); } // Retrieve the error info and normalize it so that it looks like // what python code needs it to be. PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); // Could we avoid always normalizing? PyErr_NormalizeException(&type, &value, &traceback); if (traceback != NULL) { PyException_SetTraceback(value, traceback); } // Indicate that we are now handling this exception by stashing it // in sys.exc_info(). mypyc routines that need access to the // exception will read it out of there. PyErr_SetExcInfo(type, value, traceback); // Clear the error indicator, since the exception isn't // propagating anymore. PyErr_Clear(); } static void CPy_RestoreExcInfo(PyObject *type, PyObject *value, PyObject *traceback) { // PyErr_SetExcInfo steals the references to the values passed to it. PyErr_SetExcInfo(_CPy_FromDummy(type), _CPy_FromDummy(value), _CPy_FromDummy(traceback)); } static void CPy_Raise(PyObject *exc) { if (PyObject_IsInstance(exc, (PyObject *)&PyType_Type)) { PyObject *obj = PyObject_CallFunctionObjArgs(exc, NULL); if (!obj) return; PyErr_SetObject(exc, obj); Py_DECREF(obj); } else { PyErr_SetObject((PyObject *)Py_TYPE(exc), exc); } } static void CPy_Reraise(void) { PyObject *p_type, *p_value, *p_traceback; PyErr_GetExcInfo(&p_type, &p_value, &p_traceback); PyErr_Restore(p_type, p_value, p_traceback); } static void CPyErr_SetObjectAndTraceback(PyObject *type, PyObject *value, PyObject *traceback) { // Set the value and traceback of an error. Because calling // PyErr_Restore takes away a reference to each object passed in // as an argument, we manually increase the reference count of // each argument before calling it. Py_INCREF(type); Py_INCREF(value); Py_INCREF(traceback); PyErr_Restore(type, value, traceback); } // We want to avoid the public PyErr_GetExcInfo API for these because // it requires a bunch of spurious refcount traffic on the parts of // the triple we don't care about. Unfortunately the layout of the // data structure changed in 3.7 so we need to handle that. #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7 #define CPy_ExcState() PyThreadState_GET()->exc_info #else #define CPy_ExcState() PyThreadState_GET() #endif static bool CPy_ExceptionMatches(PyObject *type) { return PyErr_GivenExceptionMatches(CPy_ExcState()->exc_type, type); } static PyObject *CPy_GetExcValue(void) { PyObject *exc = CPy_ExcState()->exc_value; Py_INCREF(exc); return exc; } static inline void _CPy_ToNone(PyObject **p) { if (*p == NULL) { Py_INCREF(Py_None); *p = Py_None; } } static void CPy_GetExcInfo(PyObject **p_type, PyObject **p_value, PyObject **p_traceback) { PyErr_GetExcInfo(p_type, p_value, p_traceback); _CPy_ToNone(p_type); _CPy_ToNone(p_value); _CPy_ToNone(p_traceback); } void CPy_Init(void); // A somewhat hairy implementation of specifically most of the error handling // in `yield from` error handling. The point here is to reduce code size. // // This implements most of the bodies of the `except` blocks in the // pseudocode in PEP 380. // // Returns true (1) if a StopIteration was received and we should return. // Returns false (0) if a value should be yielded. // In both cases the value is stored in outp. // Signals an error (2) if the an exception should be propagated. static int CPy_YieldFromErrorHandle(PyObject *iter, PyObject **outp) { _Py_IDENTIFIER(close); _Py_IDENTIFIER(throw); PyObject *exc_type = CPy_ExcState()->exc_type; PyObject *type, *value, *traceback; PyObject *_m; PyObject *res; *outp = NULL; if (PyErr_GivenExceptionMatches(exc_type, PyExc_GeneratorExit)) { _m = _PyObject_GetAttrId(iter, &PyId_close); if (_m) { res = PyObject_CallFunctionObjArgs(_m, NULL); Py_DECREF(_m); if (!res) return 2; Py_DECREF(res); } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); } else { return 2; } } else { _m = _PyObject_GetAttrId(iter, &PyId_throw); if (_m) { CPy_GetExcInfo(&type, &value, &traceback); res = PyObject_CallFunctionObjArgs(_m, type, value, traceback, NULL); Py_DECREF(type); Py_DECREF(value); Py_DECREF(traceback); Py_DECREF(_m); if (res) { *outp = res; return 0; } else { res = CPy_FetchStopIterationValue(); if (res) { *outp = res; return 1; } } } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); } else { return 2; } } CPy_Reraise(); return 2; } static int _CPy_UpdateObjFromDict(PyObject *obj, PyObject *dict) { Py_ssize_t pos = 0; PyObject *key, *value; while (PyDict_Next(dict, &pos, &key, &value)) { if (PyObject_SetAttr(obj, key, value) != 0) { return -1; } } return 0; } // Support for pickling; reusable getstate and setstate functions static PyObject * CPyPickle_SetState(PyObject *obj, PyObject *state) { if (_CPy_UpdateObjFromDict(obj, state) != 0) { return NULL; } Py_RETURN_NONE; } static PyObject * CPyPickle_GetState(PyObject *obj) { PyObject *attrs = NULL, *state = NULL; attrs = PyObject_GetAttrString((PyObject *)Py_TYPE(obj), "__mypyc_attrs__"); if (!attrs) { goto fail; } if (!PyTuple_Check(attrs)) { PyErr_SetString(PyExc_TypeError, "__mypyc_attrs__ is not a tuple"); goto fail; } state = PyDict_New(); if (!state) { goto fail; } // Collect all the values of attributes in __mypyc_attrs__ // Attributes that are missing we just ignore int i; for (i = 0; i < PyTuple_GET_SIZE(attrs); i++) { PyObject *key = PyTuple_GET_ITEM(attrs, i); PyObject *value = PyObject_GetAttr(obj, key); if (!value) { if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); continue; } goto fail; } int result = PyDict_SetItem(state, key, value); Py_DECREF(value); if (result != 0) { goto fail; } } Py_DECREF(attrs); return state; fail: Py_XDECREF(attrs); Py_XDECREF(state); return NULL; } /* Support for our partial built-in support for dataclasses. * * Take a class we want to make a dataclass, remove any descriptors * for annotated attributes, swap in the actual values of the class * variables invoke dataclass, and then restore all of the * descriptors. * * The purpose of all this is that dataclasses uses the values of * class variables to drive which attributes are required and what the * default values/factories are for optional attributes. This means * that the class dict needs to contain those values instead of getset * descriptors for the attributes when we invoke dataclass. * * We need to remove descriptors for attributes even when there is no * default value for them, or else dataclass will think the descriptor * is the default value. We remove only the attributes, since we don't * want dataclasses to try generating functions when they are already * implemented. * * Args: * dataclass_dec: The decorator to apply * tp: The class we are making a dataclass * dict: The dictionary containing values that dataclasses needs * annotations: The type annotation dictionary */ static int CPyDataclass_SleightOfHand(PyObject *dataclass_dec, PyObject *tp, PyObject *dict, PyObject *annotations) { PyTypeObject *ttp = (PyTypeObject *)tp; Py_ssize_t pos; PyObject *res; /* Make a copy of the original class __dict__ */ PyObject *orig_dict = PyDict_Copy(ttp->tp_dict); if (!orig_dict) { goto fail; } /* Delete anything that had an annotation */ pos = 0; PyObject *key; while (PyDict_Next(annotations, &pos, &key, NULL)) { if (PyObject_DelAttr(tp, key) != 0) { goto fail; } } /* Copy in all the attributes that we want dataclass to see */ if (_CPy_UpdateObjFromDict(tp, dict) != 0) { goto fail; } /* Run the @dataclass descriptor */ res = PyObject_CallFunctionObjArgs(dataclass_dec, tp, NULL); if (!res) { goto fail; } Py_DECREF(res); /* Copy back the original contents of the dict */ if (_CPy_UpdateObjFromDict(tp, orig_dict) != 0) { goto fail; } Py_DECREF(orig_dict); return 1; fail: Py_XDECREF(orig_dict); return 0; } int CPyArg_ParseTupleAndKeywords(PyObject *, PyObject *, const char *, char **, ...); #ifdef __cplusplus } #endif #endif // CPY_CPY_H mypy-0.761/mypyc/lib-rt/getargs.c0000644€tŠÔÚ€2›s®0000014763313576752246023070 0ustar jukkaDROPBOX\Domain Users00000000000000/* getargs implementation copied from Python 3.8 and stripped down to only include * the functions we need. * We also add support for required kwonly args and accepting *args / **kwargs. * A good idea would be to also vendor in the Fast versions and get our stuff * working with *that*. * Another probably good idea is to strip out all the formatting stuff we don't need * and then add in custom stuff that we do need. * * DOCUMENTATION OF THE EXTENSIONS: * - Arguments given after a @ format specify are required keyword-only arguments. * The | and $ specifiers must both appear before @. * - If the first character of a format string is %, then the function can support * *args and **kwargs. On seeing a %, the parser will consume two arguments, * which should be pointers to variables to store the *args and **kwargs, respectively. * Either pointer can be NULL, in which case the function doesn't take that * variety of vararg. * Unlike most format specifiers, the caller takes ownership of these objects * and is responsible for decrefing them. */ #include "Python.h" #include "pythonsupport.h" #include #include #define _PyTuple_CAST(op) (assert(PyTuple_Check(op)), (PyTupleObject *)(op)) #define _PyTuple_ITEMS(op) (_PyTuple_CAST(op)->ob_item) #ifndef PyDict_GET_SIZE #define PyDict_GET_SIZE(d) PyDict_Size(d) #endif #ifdef __cplusplus extern "C" { #endif int CPyArg_ParseTupleAndKeywords(PyObject *, PyObject *, const char *, char **, ...); int CPyArg_VaParseTupleAndKeywords(PyObject *, PyObject *, const char *, char **, va_list); #define FLAG_COMPAT 1 #define FLAG_SIZE_T 2 typedef int (*destr_t)(PyObject *, void *); /* Keep track of "objects" that have been allocated or initialized and which will need to be deallocated or cleaned up somehow if overall parsing fails. */ typedef struct { void *item; destr_t destructor; } freelistentry_t; typedef struct { freelistentry_t *entries; int first_available; int entries_malloced; } freelist_t; #define STATIC_FREELIST_ENTRIES 8 /* Forward */ static void seterror(Py_ssize_t, const char *, int *, const char *, const char *); static const char *convertitem(PyObject *, const char **, va_list *, int, int *, char *, size_t, freelist_t *); static const char *converttuple(PyObject *, const char **, va_list *, int, int *, char *, size_t, int, freelist_t *); static const char *convertsimple(PyObject *, const char **, va_list *, int, char *, size_t, freelist_t *); static Py_ssize_t convertbuffer(PyObject *, const void **p, const char **); static int getbuffer(PyObject *, Py_buffer *, const char**); static int vgetargskeywords(PyObject *, PyObject *, const char *, char **, va_list *, int); static const char *skipitem(const char **, va_list *, int); /* Handle cleanup of allocated memory in case of exception */ static int cleanup_ptr(PyObject *self, void *ptr) { if (ptr) { PyMem_FREE(ptr); } return 0; } static int cleanup_buffer(PyObject *self, void *ptr) { Py_buffer *buf = (Py_buffer *)ptr; if (buf) { PyBuffer_Release(buf); } return 0; } static int addcleanup(void *ptr, freelist_t *freelist, destr_t destructor) { int index; index = freelist->first_available; freelist->first_available += 1; freelist->entries[index].item = ptr; freelist->entries[index].destructor = destructor; return 0; } static int cleanreturn(int retval, freelist_t *freelist) { int index; if (retval == 0) { /* A failure occurred, therefore execute all of the cleanup functions. */ for (index = 0; index < freelist->first_available; ++index) { freelist->entries[index].destructor(NULL, freelist->entries[index].item); } } if (freelist->entries_malloced) PyMem_FREE(freelist->entries); return retval; } static void seterror(Py_ssize_t iarg, const char *msg, int *levels, const char *fname, const char *message) { char buf[512]; int i; char *p = buf; if (PyErr_Occurred()) return; else if (message == NULL) { if (fname != NULL) { PyOS_snprintf(p, sizeof(buf), "%.200s() ", fname); p += strlen(p); } if (iarg != 0) { PyOS_snprintf(p, sizeof(buf) - (p - buf), "argument %" PY_FORMAT_SIZE_T "d", iarg); i = 0; p += strlen(p); while (i < 32 && levels[i] > 0 && (int)(p-buf) < 220) { PyOS_snprintf(p, sizeof(buf) - (p - buf), ", item %d", levels[i]-1); p += strlen(p); i++; } } else { PyOS_snprintf(p, sizeof(buf) - (p - buf), "argument"); p += strlen(p); } PyOS_snprintf(p, sizeof(buf) - (p - buf), " %.256s", msg); message = buf; } if (msg[0] == '(') { PyErr_SetString(PyExc_SystemError, message); } else { PyErr_SetString(PyExc_TypeError, message); } } /* Convert a tuple argument. On entry, *p_format points to the character _after_ the opening '('. On successful exit, *p_format points to the closing ')'. If successful: *p_format and *p_va are updated, *levels and *msgbuf are untouched, and NULL is returned. If the argument is invalid: *p_format is unchanged, *p_va is undefined, *levels is a 0-terminated list of item numbers, *msgbuf contains an error message, whose format is: "must be , not ", where: is the name of the expected type, and is the name of the actual type, and msgbuf is returned. */ static const char * converttuple(PyObject *arg, const char **p_format, va_list *p_va, int flags, int *levels, char *msgbuf, size_t bufsize, int toplevel, freelist_t *freelist) { int level = 0; int n = 0; const char *format = *p_format; int i; Py_ssize_t len; for (;;) { int c = *format++; if (c == '(') { if (level == 0) n++; level++; } else if (c == ')') { if (level == 0) break; level--; } else if (c == ':' || c == ';' || c == '\0') break; else if (level == 0 && Py_ISALPHA(Py_CHARMASK(c))) n++; } if (!PySequence_Check(arg) || PyBytes_Check(arg)) { levels[0] = 0; PyOS_snprintf(msgbuf, bufsize, toplevel ? "expected %d arguments, not %.50s" : "must be %d-item sequence, not %.50s", n, arg == Py_None ? "None" : arg->ob_type->tp_name); return msgbuf; } len = PySequence_Size(arg); if (len != n) { levels[0] = 0; if (toplevel) { PyOS_snprintf(msgbuf, bufsize, "expected %d argument%s, not %" PY_FORMAT_SIZE_T "d", n, n == 1 ? "" : "s", len); } else { PyOS_snprintf(msgbuf, bufsize, "must be sequence of length %d, " "not %" PY_FORMAT_SIZE_T "d", n, len); } return msgbuf; } format = *p_format; for (i = 0; i < n; i++) { const char *msg; PyObject *item; item = PySequence_GetItem(arg, i); if (item == NULL) { PyErr_Clear(); levels[0] = i+1; levels[1] = 0; strncpy(msgbuf, "is not retrievable", bufsize); return msgbuf; } msg = convertitem(item, &format, p_va, flags, levels+1, msgbuf, bufsize, freelist); /* PySequence_GetItem calls tp->sq_item, which INCREFs */ Py_XDECREF(item); if (msg != NULL) { levels[0] = i+1; return msg; } } *p_format = format; return NULL; } /* Convert a single item. */ static const char * convertitem(PyObject *arg, const char **p_format, va_list *p_va, int flags, int *levels, char *msgbuf, size_t bufsize, freelist_t *freelist) { const char *msg; const char *format = *p_format; if (*format == '(' /* ')' */) { format++; msg = converttuple(arg, &format, p_va, flags, levels, msgbuf, bufsize, 0, freelist); if (msg == NULL) format++; } else { msg = convertsimple(arg, &format, p_va, flags, msgbuf, bufsize, freelist); if (msg != NULL) levels[0] = 0; } if (msg == NULL) *p_format = format; return msg; } /* Format an error message generated by convertsimple(). */ static const char * converterr(const char *expected, PyObject *arg, char *msgbuf, size_t bufsize) { assert(expected != NULL); assert(arg != NULL); if (expected[0] == '(') { PyOS_snprintf(msgbuf, bufsize, "%.100s", expected); } else { PyOS_snprintf(msgbuf, bufsize, "must be %.50s, not %.50s", expected, arg == Py_None ? "None" : arg->ob_type->tp_name); } return msgbuf; } #define CONV_UNICODE "(unicode conversion error)" /* Explicitly check for float arguments when integers are expected. Return 1 for error, 0 if ok. XXX Should be removed after the end of the deprecation period in _PyLong_FromNbIndexOrNbInt. */ static int float_argument_error(PyObject *arg) { if (PyFloat_Check(arg)) { PyErr_SetString(PyExc_TypeError, "integer argument expected, got float" ); return 1; } else return 0; } /* Convert a non-tuple argument. Return NULL if conversion went OK, or a string with a message describing the failure. The message is formatted as "must be , not ". When failing, an exception may or may not have been raised. Don't call if a tuple is expected. When you add new format codes, please don't forget poor skipitem() below. */ static const char * convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, char *msgbuf, size_t bufsize, freelist_t *freelist) { /* For # codes */ #define FETCH_SIZE int *q=NULL;Py_ssize_t *q2=NULL;\ if (flags & FLAG_SIZE_T) q2=va_arg(*p_va, Py_ssize_t*); \ else { \ if (PyErr_WarnEx(PyExc_DeprecationWarning, \ "PY_SSIZE_T_CLEAN will be required for '#' formats", 1)) { \ return NULL; \ } \ q=va_arg(*p_va, int*); \ } #define STORE_SIZE(s) \ if (flags & FLAG_SIZE_T) \ *q2=s; \ else { \ if (INT_MAX < s) { \ PyErr_SetString(PyExc_OverflowError, \ "size does not fit in an int"); \ return converterr("", arg, msgbuf, bufsize); \ } \ *q = (int)s; \ } #define BUFFER_LEN ((flags & FLAG_SIZE_T) ? *q2:*q) #define RETURN_ERR_OCCURRED return msgbuf const char *format = *p_format; char c = *format++; const char *sarg; switch (c) { case 'b': { /* unsigned byte -- very short int */ char *p = va_arg(*p_va, char *); long ival; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else if (ival < 0) { PyErr_SetString(PyExc_OverflowError, "unsigned byte integer is less than minimum"); RETURN_ERR_OCCURRED; } else if (ival > UCHAR_MAX) { PyErr_SetString(PyExc_OverflowError, "unsigned byte integer is greater than maximum"); RETURN_ERR_OCCURRED; } else *p = (unsigned char) ival; break; } case 'B': {/* byte sized bitfield - both signed and unsigned values allowed */ char *p = va_arg(*p_va, char *); long ival; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; ival = PyLong_AsUnsignedLongMask(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = (unsigned char) ival; break; } case 'h': {/* signed short int */ short *p = va_arg(*p_va, short *); long ival; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else if (ival < SHRT_MIN) { PyErr_SetString(PyExc_OverflowError, "signed short integer is less than minimum"); RETURN_ERR_OCCURRED; } else if (ival > SHRT_MAX) { PyErr_SetString(PyExc_OverflowError, "signed short integer is greater than maximum"); RETURN_ERR_OCCURRED; } else *p = (short) ival; break; } case 'H': { /* short int sized bitfield, both signed and unsigned allowed */ unsigned short *p = va_arg(*p_va, unsigned short *); long ival; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; ival = PyLong_AsUnsignedLongMask(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = (unsigned short) ival; break; } case 'i': {/* signed int */ int *p = va_arg(*p_va, int *); long ival; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else if (ival > INT_MAX) { PyErr_SetString(PyExc_OverflowError, "signed integer is greater than maximum"); RETURN_ERR_OCCURRED; } else if (ival < INT_MIN) { PyErr_SetString(PyExc_OverflowError, "signed integer is less than minimum"); RETURN_ERR_OCCURRED; } else *p = ival; break; } case 'I': { /* int sized bitfield, both signed and unsigned allowed */ unsigned int *p = va_arg(*p_va, unsigned int *); unsigned int ival; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; ival = (unsigned int)PyLong_AsUnsignedLongMask(arg); if (ival == (unsigned int)-1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = ival; break; } case 'n': /* Py_ssize_t */ { PyObject *iobj; Py_ssize_t *p = va_arg(*p_va, Py_ssize_t *); Py_ssize_t ival = -1; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; iobj = PyNumber_Index(arg); if (iobj != NULL) { ival = PyLong_AsSsize_t(iobj); Py_DECREF(iobj); } if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; *p = ival; break; } case 'l': {/* long int */ long *p = va_arg(*p_va, long *); long ival; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = ival; break; } case 'k': { /* long sized bitfield */ unsigned long *p = va_arg(*p_va, unsigned long *); unsigned long ival; if (PyLong_Check(arg)) ival = PyLong_AsUnsignedLongMask(arg); else return converterr("int", arg, msgbuf, bufsize); *p = ival; break; } case 'L': {/* long long */ long long *p = va_arg( *p_va, long long * ); long long ival; if (float_argument_error(arg)) RETURN_ERR_OCCURRED; ival = PyLong_AsLongLong(arg); if (ival == (long long)-1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = ival; break; } case 'K': { /* long long sized bitfield */ unsigned long long *p = va_arg(*p_va, unsigned long long *); unsigned long long ival; if (PyLong_Check(arg)) ival = PyLong_AsUnsignedLongLongMask(arg); else return converterr("int", arg, msgbuf, bufsize); *p = ival; break; } case 'f': {/* float */ float *p = va_arg(*p_va, float *); double dval = PyFloat_AsDouble(arg); if (PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = (float) dval; break; } case 'd': {/* double */ double *p = va_arg(*p_va, double *); double dval = PyFloat_AsDouble(arg); if (PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = dval; break; } case 'D': {/* complex double */ Py_complex *p = va_arg(*p_va, Py_complex *); Py_complex cval; cval = PyComplex_AsCComplex(arg); if (PyErr_Occurred()) RETURN_ERR_OCCURRED; else *p = cval; break; } case 'c': {/* char */ char *p = va_arg(*p_va, char *); if (PyBytes_Check(arg) && PyBytes_Size(arg) == 1) *p = PyBytes_AS_STRING(arg)[0]; else if (PyByteArray_Check(arg) && PyByteArray_Size(arg) == 1) *p = PyByteArray_AS_STRING(arg)[0]; else return converterr("a byte string of length 1", arg, msgbuf, bufsize); break; } case 'C': {/* unicode char */ int *p = va_arg(*p_va, int *); int kind; void *data; if (!PyUnicode_Check(arg)) return converterr("a unicode character", arg, msgbuf, bufsize); if (PyUnicode_READY(arg)) RETURN_ERR_OCCURRED; if (PyUnicode_GET_LENGTH(arg) != 1) return converterr("a unicode character", arg, msgbuf, bufsize); kind = PyUnicode_KIND(arg); data = PyUnicode_DATA(arg); *p = PyUnicode_READ(kind, data, 0); break; } case 'p': {/* boolean *p*redicate */ int *p = va_arg(*p_va, int *); int val = PyObject_IsTrue(arg); if (val > 0) *p = 1; else if (val == 0) *p = 0; else RETURN_ERR_OCCURRED; break; } /* XXX WAAAAH! 's', 'y', 'z', 'u', 'Z', 'e', 'w' codes all need to be cleaned up! */ case 'y': {/* any bytes-like object */ void **p = (void **)va_arg(*p_va, char **); const char *buf; Py_ssize_t count; if (*format == '*') { if (getbuffer(arg, (Py_buffer*)p, &buf) < 0) return converterr(buf, arg, msgbuf, bufsize); format++; if (addcleanup(p, freelist, cleanup_buffer)) { return converterr( "(cleanup problem)", arg, msgbuf, bufsize); } break; } count = convertbuffer(arg, (const void **)p, &buf); if (count < 0) return converterr(buf, arg, msgbuf, bufsize); if (*format == '#') { FETCH_SIZE; STORE_SIZE(count); format++; } else { if (strlen(*p) != (size_t)count) { PyErr_SetString(PyExc_ValueError, "embedded null byte"); RETURN_ERR_OCCURRED; } } break; } case 's': /* text string or bytes-like object */ case 'z': /* text string, bytes-like object or None */ { if (*format == '*') { /* "s*" or "z*" */ Py_buffer *p = (Py_buffer *)va_arg(*p_va, Py_buffer *); if (c == 'z' && arg == Py_None) PyBuffer_FillInfo(p, NULL, NULL, 0, 1, 0); else if (PyUnicode_Check(arg)) { Py_ssize_t len; sarg = PyUnicode_AsUTF8AndSize(arg, &len); if (sarg == NULL) return converterr(CONV_UNICODE, arg, msgbuf, bufsize); PyBuffer_FillInfo(p, arg, (void *)sarg, len, 1, 0); } else { /* any bytes-like object */ const char *buf; if (getbuffer(arg, p, &buf) < 0) return converterr(buf, arg, msgbuf, bufsize); } if (addcleanup(p, freelist, cleanup_buffer)) { return converterr( "(cleanup problem)", arg, msgbuf, bufsize); } format++; } else if (*format == '#') { /* a string or read-only bytes-like object */ /* "s#" or "z#" */ const void **p = (const void **)va_arg(*p_va, const char **); FETCH_SIZE; if (c == 'z' && arg == Py_None) { *p = NULL; STORE_SIZE(0); } else if (PyUnicode_Check(arg)) { Py_ssize_t len; sarg = PyUnicode_AsUTF8AndSize(arg, &len); if (sarg == NULL) return converterr(CONV_UNICODE, arg, msgbuf, bufsize); *p = sarg; STORE_SIZE(len); } else { /* read-only bytes-like object */ /* XXX Really? */ const char *buf; Py_ssize_t count = convertbuffer(arg, p, &buf); if (count < 0) return converterr(buf, arg, msgbuf, bufsize); STORE_SIZE(count); } format++; } else { /* "s" or "z" */ const char **p = va_arg(*p_va, const char **); Py_ssize_t len; sarg = NULL; if (c == 'z' && arg == Py_None) *p = NULL; else if (PyUnicode_Check(arg)) { sarg = PyUnicode_AsUTF8AndSize(arg, &len); if (sarg == NULL) return converterr(CONV_UNICODE, arg, msgbuf, bufsize); if (strlen(sarg) != (size_t)len) { PyErr_SetString(PyExc_ValueError, "embedded null character"); RETURN_ERR_OCCURRED; } *p = sarg; } else return converterr(c == 'z' ? "str or None" : "str", arg, msgbuf, bufsize); } break; } case 'u': /* raw unicode buffer (Py_UNICODE *) */ case 'Z': /* raw unicode buffer or None */ { Py_UNICODE **p = va_arg(*p_va, Py_UNICODE **); if (*format == '#') { /* "u#" or "Z#" */ FETCH_SIZE; if (c == 'Z' && arg == Py_None) { *p = NULL; STORE_SIZE(0); } else if (PyUnicode_Check(arg)) { Py_ssize_t len; *p = PyUnicode_AsUnicodeAndSize(arg, &len); if (*p == NULL) RETURN_ERR_OCCURRED; STORE_SIZE(len); } else return converterr(c == 'Z' ? "str or None" : "str", arg, msgbuf, bufsize); format++; } else { /* "u" or "Z" */ if (c == 'Z' && arg == Py_None) *p = NULL; else if (PyUnicode_Check(arg)) { Py_ssize_t len; *p = PyUnicode_AsUnicodeAndSize(arg, &len); if (*p == NULL) RETURN_ERR_OCCURRED; if (wcslen(*p) != (size_t)len) { PyErr_SetString(PyExc_ValueError, "embedded null character"); RETURN_ERR_OCCURRED; } } else return converterr(c == 'Z' ? "str or None" : "str", arg, msgbuf, bufsize); } break; } case 'e': {/* encoded string */ char **buffer; const char *encoding; PyObject *s; int recode_strings; Py_ssize_t size; const char *ptr; /* Get 'e' parameter: the encoding name */ encoding = (const char *)va_arg(*p_va, const char *); if (encoding == NULL) encoding = PyUnicode_GetDefaultEncoding(); /* Get output buffer parameter: 's' (recode all objects via Unicode) or 't' (only recode non-string objects) */ if (*format == 's') recode_strings = 1; else if (*format == 't') recode_strings = 0; else return converterr( "(unknown parser marker combination)", arg, msgbuf, bufsize); buffer = (char **)va_arg(*p_va, char **); format++; if (buffer == NULL) return converterr("(buffer is NULL)", arg, msgbuf, bufsize); /* Encode object */ if (!recode_strings && (PyBytes_Check(arg) || PyByteArray_Check(arg))) { s = arg; Py_INCREF(s); if (PyBytes_Check(arg)) { size = PyBytes_GET_SIZE(s); ptr = PyBytes_AS_STRING(s); } else { size = PyByteArray_GET_SIZE(s); ptr = PyByteArray_AS_STRING(s); } } else if (PyUnicode_Check(arg)) { /* Encode object; use default error handling */ s = PyUnicode_AsEncodedString(arg, encoding, NULL); if (s == NULL) return converterr("(encoding failed)", arg, msgbuf, bufsize); assert(PyBytes_Check(s)); size = PyBytes_GET_SIZE(s); ptr = PyBytes_AS_STRING(s); if (ptr == NULL) ptr = ""; } else { return converterr( recode_strings ? "str" : "str, bytes or bytearray", arg, msgbuf, bufsize); } /* Write output; output is guaranteed to be 0-terminated */ if (*format == '#') { /* Using buffer length parameter '#': - if *buffer is NULL, a new buffer of the needed size is allocated and the data copied into it; *buffer is updated to point to the new buffer; the caller is responsible for PyMem_Free()ing it after usage - if *buffer is not NULL, the data is copied to *buffer; *buffer_len has to be set to the size of the buffer on input; buffer overflow is signalled with an error; buffer has to provide enough room for the encoded string plus the trailing 0-byte - in both cases, *buffer_len is updated to the size of the buffer /excluding/ the trailing 0-byte */ FETCH_SIZE; format++; if (q == NULL && q2 == NULL) { Py_DECREF(s); return converterr( "(buffer_len is NULL)", arg, msgbuf, bufsize); } if (*buffer == NULL) { *buffer = PyMem_NEW(char, size + 1); if (*buffer == NULL) { Py_DECREF(s); PyErr_NoMemory(); RETURN_ERR_OCCURRED; } if (addcleanup(*buffer, freelist, cleanup_ptr)) { Py_DECREF(s); return converterr( "(cleanup problem)", arg, msgbuf, bufsize); } } else { if (size + 1 > BUFFER_LEN) { Py_DECREF(s); PyErr_Format(PyExc_ValueError, "encoded string too long " "(%zd, maximum length %zd)", (Py_ssize_t)size, (Py_ssize_t)(BUFFER_LEN-1)); RETURN_ERR_OCCURRED; } } memcpy(*buffer, ptr, size+1); STORE_SIZE(size); } else { /* Using a 0-terminated buffer: - the encoded string has to be 0-terminated for this variant to work; if it is not, an error raised - a new buffer of the needed size is allocated and the data copied into it; *buffer is updated to point to the new buffer; the caller is responsible for PyMem_Free()ing it after usage */ if ((Py_ssize_t)strlen(ptr) != size) { Py_DECREF(s); return converterr( "encoded string without null bytes", arg, msgbuf, bufsize); } *buffer = PyMem_NEW(char, size + 1); if (*buffer == NULL) { Py_DECREF(s); PyErr_NoMemory(); RETURN_ERR_OCCURRED; } if (addcleanup(*buffer, freelist, cleanup_ptr)) { Py_DECREF(s); return converterr("(cleanup problem)", arg, msgbuf, bufsize); } memcpy(*buffer, ptr, size+1); } Py_DECREF(s); break; } case 'S': { /* PyBytes object */ PyObject **p = va_arg(*p_va, PyObject **); if (PyBytes_Check(arg)) *p = arg; else return converterr("bytes", arg, msgbuf, bufsize); break; } case 'Y': { /* PyByteArray object */ PyObject **p = va_arg(*p_va, PyObject **); if (PyByteArray_Check(arg)) *p = arg; else return converterr("bytearray", arg, msgbuf, bufsize); break; } case 'U': { /* PyUnicode object */ PyObject **p = va_arg(*p_va, PyObject **); if (PyUnicode_Check(arg)) { if (PyUnicode_READY(arg) == -1) RETURN_ERR_OCCURRED; *p = arg; } else return converterr("str", arg, msgbuf, bufsize); break; } case 'O': { /* object */ PyTypeObject *type; PyObject **p; if (*format == '!') { type = va_arg(*p_va, PyTypeObject*); p = va_arg(*p_va, PyObject **); format++; if (PyType_IsSubtype(arg->ob_type, type)) *p = arg; else return converterr(type->tp_name, arg, msgbuf, bufsize); } else if (*format == '&') { typedef int (*converter)(PyObject *, void *); converter convert = va_arg(*p_va, converter); void *addr = va_arg(*p_va, void *); int res; format++; if (! (res = (*convert)(arg, addr))) return converterr("(unspecified)", arg, msgbuf, bufsize); if (res == Py_CLEANUP_SUPPORTED && addcleanup(addr, freelist, convert) == -1) return converterr("(cleanup problem)", arg, msgbuf, bufsize); } else { p = va_arg(*p_va, PyObject **); *p = arg; } break; } case 'w': { /* "w*": memory buffer, read-write access */ void **p = va_arg(*p_va, void **); if (*format != '*') return converterr( "(invalid use of 'w' format character)", arg, msgbuf, bufsize); format++; /* Caller is interested in Py_buffer, and the object supports it directly. */ if (PyObject_GetBuffer(arg, (Py_buffer*)p, PyBUF_WRITABLE) < 0) { PyErr_Clear(); return converterr("read-write bytes-like object", arg, msgbuf, bufsize); } if (!PyBuffer_IsContiguous((Py_buffer*)p, 'C')) { PyBuffer_Release((Py_buffer*)p); return converterr("contiguous buffer", arg, msgbuf, bufsize); } if (addcleanup(p, freelist, cleanup_buffer)) { return converterr( "(cleanup problem)", arg, msgbuf, bufsize); } break; } default: return converterr("(impossible)", arg, msgbuf, bufsize); } *p_format = format; return NULL; #undef FETCH_SIZE #undef STORE_SIZE #undef BUFFER_LEN #undef RETURN_ERR_OCCURRED } static Py_ssize_t convertbuffer(PyObject *arg, const void **p, const char **errmsg) { PyBufferProcs *pb = Py_TYPE(arg)->tp_as_buffer; Py_ssize_t count; Py_buffer view; *errmsg = NULL; *p = NULL; if (pb != NULL && pb->bf_releasebuffer != NULL) { *errmsg = "read-only bytes-like object"; return -1; } if (getbuffer(arg, &view, errmsg) < 0) return -1; count = view.len; *p = view.buf; PyBuffer_Release(&view); return count; } static int getbuffer(PyObject *arg, Py_buffer *view, const char **errmsg) { if (PyObject_GetBuffer(arg, view, PyBUF_SIMPLE) != 0) { *errmsg = "bytes-like object"; return -1; } if (!PyBuffer_IsContiguous(view, 'C')) { PyBuffer_Release(view); *errmsg = "contiguous buffer"; return -1; } return 0; } /* Support for keyword arguments donated by Geoff Philbrick */ /* Return false (0) for error, else true. */ int CPyArg_ParseTupleAndKeywords(PyObject *args, PyObject *keywords, const char *format, char **kwlist, ...) { int retval; va_list va; if ((args == NULL || !PyTuple_Check(args)) || (keywords != NULL && !PyDict_Check(keywords)) || format == NULL || kwlist == NULL) { PyErr_BadInternalCall(); return 0; } va_start(va, kwlist); retval = vgetargskeywords(args, keywords, format, kwlist, &va, FLAG_SIZE_T); va_end(va); return retval; } int CPyArg_VaParseTupleAndKeywords(PyObject *args, PyObject *keywords, const char *format, char **kwlist, va_list va) { int retval; va_list lva; if ((args == NULL || !PyTuple_Check(args)) || (keywords != NULL && !PyDict_Check(keywords)) || format == NULL || kwlist == NULL) { PyErr_BadInternalCall(); return 0; } va_copy(lva, va); retval = vgetargskeywords(args, keywords, format, kwlist, &lva, FLAG_SIZE_T); va_end(lva); return retval; } #define IS_END_OF_FORMAT(c) (c == '\0' || c == ';' || c == ':') static int vgetargskeywords(PyObject *args, PyObject *kwargs, const char *format, char **kwlist, va_list *p_va, int flags) { char msgbuf[512]; int levels[32]; const char *fname, *msg, *custom_msg; int min = INT_MAX; int max = INT_MAX; int required_kwonly_start = INT_MAX; int has_required_kws = 0; int i, pos, len; int skip = 0; Py_ssize_t nargs, nkwargs; PyObject *current_arg; freelistentry_t static_entries[STATIC_FREELIST_ENTRIES]; freelist_t freelist; int bound_pos_args; PyObject **p_args = NULL, **p_kwargs = NULL; freelist.entries = static_entries; freelist.first_available = 0; freelist.entries_malloced = 0; assert(args != NULL && PyTuple_Check(args)); assert(kwargs == NULL || PyDict_Check(kwargs)); assert(format != NULL); assert(kwlist != NULL); assert(p_va != NULL); /* grab the function name or custom error msg first (mutually exclusive) */ fname = strchr(format, ':'); if (fname) { fname++; custom_msg = NULL; } else { custom_msg = strchr(format,';'); if (custom_msg) custom_msg++; } /* scan kwlist and count the number of positional-only parameters */ for (pos = 0; kwlist[pos] && !*kwlist[pos]; pos++) { } /* scan kwlist and get greatest possible nbr of args */ for (len = pos; kwlist[len]; len++) { if (!*kwlist[len]) { PyErr_SetString(PyExc_SystemError, "Empty keyword parameter name"); return cleanreturn(0, &freelist); } } if (*format == '%') { p_args = va_arg(*p_va, PyObject **); p_kwargs = va_arg(*p_va, PyObject **); format++; } if (len > STATIC_FREELIST_ENTRIES) { freelist.entries = PyMem_NEW(freelistentry_t, len); if (freelist.entries == NULL) { PyErr_NoMemory(); return 0; } freelist.entries_malloced = 1; } nargs = PyTuple_GET_SIZE(args); nkwargs = (kwargs == NULL) ? 0 : PyDict_GET_SIZE(kwargs); if (nargs + nkwargs > len && !p_args && !p_kwargs) { /* Adding "keyword" (when nargs == 0) prevents producing wrong error messages in some special cases (see bpo-31229). */ PyErr_Format(PyExc_TypeError, "%.200s%s takes at most %d %sargument%s (%zd given)", (fname == NULL) ? "function" : fname, (fname == NULL) ? "" : "()", len, (nargs == 0) ? "keyword " : "", (len == 1) ? "" : "s", nargs + nkwargs); return cleanreturn(0, &freelist); } /* convert tuple args and keyword args in same loop, using kwlist to drive process */ for (i = 0; i < len; i++) { if (*format == '|') { if (min != INT_MAX) { PyErr_SetString(PyExc_SystemError, "Invalid format string (| specified twice)"); return cleanreturn(0, &freelist); } min = i; format++; if (max != INT_MAX) { PyErr_SetString(PyExc_SystemError, "Invalid format string ($ before |)"); return cleanreturn(0, &freelist); } /* If there are optional args, figure out whether we have * required keyword arguments so that we don't bail without * enforcing them. */ has_required_kws = strchr(format, '@') != NULL; } if (*format == '$') { if (max != INT_MAX) { PyErr_SetString(PyExc_SystemError, "Invalid format string ($ specified twice)"); return cleanreturn(0, &freelist); } max = i; format++; if (max < pos) { PyErr_SetString(PyExc_SystemError, "Empty parameter name after $"); return cleanreturn(0, &freelist); } if (skip) { /* Now we know the minimal and the maximal numbers of * positional arguments and can raise an exception with * informative message (see below). */ break; } if (max < nargs && !p_args) { if (max == 0) { PyErr_Format(PyExc_TypeError, "%.200s%s takes no positional arguments", (fname == NULL) ? "function" : fname, (fname == NULL) ? "" : "()"); } else { PyErr_Format(PyExc_TypeError, "%.200s%s takes %s %d positional argument%s" " (%zd given)", (fname == NULL) ? "function" : fname, (fname == NULL) ? "" : "()", (min < max) ? "at most" : "exactly", max, max == 1 ? "" : "s", nargs); } return cleanreturn(0, &freelist); } } if (*format == '@') { if (min == INT_MAX && max == INT_MAX) { PyErr_SetString(PyExc_SystemError, "Invalid format string " "(@ without preceding | and $)"); return cleanreturn(0, &freelist); } if (required_kwonly_start != INT_MAX) { PyErr_SetString(PyExc_SystemError, "Invalid format string (@ specified twice)"); return cleanreturn(0, &freelist); } required_kwonly_start = i; format++; } if (IS_END_OF_FORMAT(*format)) { PyErr_Format(PyExc_SystemError, "More keyword list entries (%d) than " "format specifiers (%d)", len, i); return cleanreturn(0, &freelist); } if (!skip) { if (i < nargs && i < max) { current_arg = PyTuple_GET_ITEM(args, i); } else if (nkwargs && i >= pos) { current_arg = _PyDict_GetItemStringWithError(kwargs, kwlist[i]); if (current_arg) { --nkwargs; } else if (PyErr_Occurred()) { return cleanreturn(0, &freelist); } } else { current_arg = NULL; } if (current_arg) { msg = convertitem(current_arg, &format, p_va, flags, levels, msgbuf, sizeof(msgbuf), &freelist); if (msg) { seterror(i+1, msg, levels, fname, custom_msg); return cleanreturn(0, &freelist); } continue; } if (i < min || i >= required_kwonly_start) { if (i < pos) { assert (min == INT_MAX); assert (max == INT_MAX); skip = 1; /* At that moment we still don't know the minimal and * the maximal numbers of positional arguments. Raising * an exception is deferred until we encounter | and $ * or the end of the format. */ } else { if (i >= max) { PyErr_Format(PyExc_TypeError, "%.200s%s missing required " "keyword-only argument '%s'", (fname == NULL) ? "function" : fname, (fname == NULL) ? "" : "()", kwlist[i]); } else { PyErr_Format(PyExc_TypeError, "%.200s%s missing required " "argument '%s' (pos %d)", (fname == NULL) ? "function" : fname, (fname == NULL) ? "" : "()", kwlist[i], i+1); } return cleanreturn(0, &freelist); } } /* current code reports success when all required args * fulfilled and no keyword args left, with no further * validation. XXX Maybe skip this in debug build ? */ if (!nkwargs && !skip && !has_required_kws && !p_args && !p_kwargs) { return cleanreturn(1, &freelist); } } /* We are into optional args, skip through to any remaining * keyword args */ msg = skipitem(&format, p_va, flags); if (msg) { PyErr_Format(PyExc_SystemError, "%s: '%s'", msg, format); return cleanreturn(0, &freelist); } } if (skip) { PyErr_Format(PyExc_TypeError, "%.200s%s takes %s %d positional argument%s" " (%zd given)", (fname == NULL) ? "function" : fname, (fname == NULL) ? "" : "()", (Py_MIN(pos, min) < i) ? "at least" : "exactly", Py_MIN(pos, min), Py_MIN(pos, min) == 1 ? "" : "s", nargs); return cleanreturn(0, &freelist); } if (!IS_END_OF_FORMAT(*format) && (*format != '|') && (*format != '$') && (*format != '@')) { PyErr_Format(PyExc_SystemError, "more argument specifiers than keyword list entries " "(remaining format:'%s')", format); return cleanreturn(0, &freelist); } bound_pos_args = Py_MIN(nargs, Py_MIN(max, len)); if (p_args) { *p_args = PyTuple_GetSlice(args, bound_pos_args, nargs); if (!*p_args) { return cleanreturn(0, &freelist); } } if (p_kwargs) { /* This unfortunately needs to be special cased because if len is 0 then we * never go through the main loop. */ if (nargs > 0 && len == 0 && !p_args) { PyErr_Format(PyExc_TypeError, "%.200s%s takes no positional arguments", (fname == NULL) ? "function" : fname, (fname == NULL) ? "" : "()"); return cleanreturn(0, &freelist); } *p_kwargs = PyDict_New(); if (!*p_kwargs) { goto latefail; } } if (nkwargs > 0) { PyObject *key, *value; Py_ssize_t j; /* make sure there are no arguments given by name and position */ for (i = pos; i < bound_pos_args && i < len; i++) { current_arg = _PyDict_GetItemStringWithError(kwargs, kwlist[i]); if (current_arg) { /* arg present in tuple and in dict */ PyErr_Format(PyExc_TypeError, "argument for %.200s%s given by name ('%s') " "and position (%d)", (fname == NULL) ? "function" : fname, (fname == NULL) ? "" : "()", kwlist[i], i+1); goto latefail; } else if (PyErr_Occurred()) { goto latefail; } } /* make sure there are no extraneous keyword arguments */ j = 0; while (PyDict_Next(kwargs, &j, &key, &value)) { int match = 0; if (!PyUnicode_Check(key)) { PyErr_SetString(PyExc_TypeError, "keywords must be strings"); goto latefail; } for (i = pos; i < len; i++) { if (CPyUnicode_EqualToASCIIString(key, kwlist[i])) { match = 1; break; } } if (!match) { if (!p_kwargs) { PyErr_Format(PyExc_TypeError, "'%U' is an invalid keyword " "argument for %.200s%s", key, (fname == NULL) ? "this function" : fname, (fname == NULL) ? "" : "()"); goto latefail; } else { if (PyDict_SetItem(*p_kwargs, key, value) < 0) { goto latefail; } } } } } return cleanreturn(1, &freelist); /* Handle failures that have happened after we have tried to * create *args and **kwargs, if they exist. */ latefail: if (p_args) { Py_XDECREF(*p_args); } if (p_kwargs) { Py_XDECREF(*p_kwargs); } return cleanreturn(0, &freelist); } static const char * skipitem(const char **p_format, va_list *p_va, int flags) { const char *format = *p_format; char c = *format++; switch (c) { /* * codes that take a single data pointer as an argument * (the type of the pointer is irrelevant) */ case 'b': /* byte -- very short int */ case 'B': /* byte as bitfield */ case 'h': /* short int */ case 'H': /* short int as bitfield */ case 'i': /* int */ case 'I': /* int sized bitfield */ case 'l': /* long int */ case 'k': /* long int sized bitfield */ case 'L': /* long long */ case 'K': /* long long sized bitfield */ case 'n': /* Py_ssize_t */ case 'f': /* float */ case 'd': /* double */ case 'D': /* complex double */ case 'c': /* char */ case 'C': /* unicode char */ case 'p': /* boolean predicate */ case 'S': /* string object */ case 'Y': /* string object */ case 'U': /* unicode string object */ { if (p_va != NULL) { (void) va_arg(*p_va, void *); } break; } /* string codes */ case 'e': /* string with encoding */ { if (p_va != NULL) { (void) va_arg(*p_va, const char *); } if (!(*format == 's' || *format == 't')) /* after 'e', only 's' and 't' is allowed */ goto err; format++; } /* fall through */ case 's': /* string */ case 'z': /* string or None */ case 'y': /* bytes */ case 'u': /* unicode string */ case 'Z': /* unicode string or None */ case 'w': /* buffer, read-write */ { if (p_va != NULL) { (void) va_arg(*p_va, char **); } if (*format == '#') { if (p_va != NULL) { if (flags & FLAG_SIZE_T) (void) va_arg(*p_va, Py_ssize_t *); else { if (PyErr_WarnEx(PyExc_DeprecationWarning, "PY_SSIZE_T_CLEAN will be required for '#' formats", 1)) { return NULL; } (void) va_arg(*p_va, int *); } } format++; } else if ((c == 's' || c == 'z' || c == 'y' || c == 'w') && *format == '*') { format++; } break; } case 'O': /* object */ { if (*format == '!') { format++; if (p_va != NULL) { (void) va_arg(*p_va, PyTypeObject*); (void) va_arg(*p_va, PyObject **); } } else if (*format == '&') { typedef int (*converter)(PyObject *, void *); if (p_va != NULL) { (void) va_arg(*p_va, converter); (void) va_arg(*p_va, void *); } format++; } else { if (p_va != NULL) { (void) va_arg(*p_va, PyObject **); } } break; } case '(': /* bypass tuple, not handled at all previously */ { const char *msg; for (;;) { if (*format==')') break; if (IS_END_OF_FORMAT(*format)) return "Unmatched left paren in format " "string"; msg = skipitem(&format, p_va, flags); if (msg) return msg; } format++; break; } case ')': return "Unmatched right paren in format string"; default: err: return "impossible"; } *p_format = format; return NULL; } #ifdef __cplusplus }; #endif mypy-0.761/mypyc/lib-rt/module_shim.tmpl0000644€tŠÔÚ€2›s®0000000101213576752246024447 0ustar jukkaDROPBOX\Domain Users00000000000000#include PyMODINIT_FUNC PyInit_{modname}(void) {{ PyObject *tmp; if (!(tmp = PyImport_ImportModule("{libname}"))) return NULL; Py_DECREF(tmp); void *init_func = PyCapsule_Import("{libname}.init_{full_modname}", 0); if (!init_func) {{ return NULL; }} return ((PyObject *(*)(void))init_func)(); }} // distutils sometimes spuriously tells cl to export CPyInit___init__, // so provide that so it chills out PyMODINIT_FUNC PyInit___init__(void) {{ return PyInit_{modname}(); }} mypy-0.761/mypyc/lib-rt/mypyc_util.h0000644€tŠÔÚ€2›s®0000000276113576752246023627 0ustar jukkaDROPBOX\Domain Users00000000000000#ifndef MYPYC_UTIL_H #define MYPYC_UTIL_H #include #include #include #if defined(__clang__) || defined(__GNUC__) #define likely(x) __builtin_expect((x),1) #define unlikely(x) __builtin_expect((x),0) #define CPy_Unreachable() __builtin_unreachable() #else #define likely(x) (x) #define unlikely(x) (x) #define CPy_Unreachable() abort() #endif #if defined(__clang__) || defined(__GNUC__) #define CPy_NOINLINE __attribute__((noinline)) #elif defined(_MSC_VER) #define CPy_NOINLINE __declspec(noinline) #else #define CPy_NOINLINE #endif // INCREF and DECREF that assert the pointer is not NULL. // asserts are disabled in release builds so there shouldn't be a perf hit. // I'm honestly kind of surprised that this isn't done by default. #define CPy_INCREF(p) do { assert(p); Py_INCREF(p); } while (0) #define CPy_DECREF(p) do { assert(p); Py_DECREF(p); } while (0) // Here just for consistency #define CPy_XDECREF(p) Py_XDECREF(p) typedef size_t CPyTagged; #define CPY_INT_BITS (CHAR_BIT * sizeof(CPyTagged)) #define CPY_TAGGED_MAX (((Py_ssize_t)1 << (CPY_INT_BITS - 2)) - 1) #define CPY_TAGGED_MIN (-((Py_ssize_t)1 << (CPY_INT_BITS - 2))) #define CPY_TAGGED_ABS_MIN (0-(size_t)CPY_TAGGED_MIN) typedef PyObject CPyModule; #define CPY_INT_TAG 1 typedef void (*CPyVTableItem)(void); static inline CPyTagged CPyTagged_ShortFromInt(int x) { return x << 1; } static inline CPyTagged CPyTagged_ShortFromSsize_t(Py_ssize_t x) { return x << 1; } #endif mypy-0.761/mypyc/lib-rt/pythonsupport.h0000644€tŠÔÚ€2›s®0000002474513576752246024415 0ustar jukkaDROPBOX\Domain Users00000000000000// Collects code that was copied in from cpython, for a couple of different reasons: // * We wanted to modify it to produce a more efficient version for our uses // * We needed to call it and it was static :( // * We wanted to call it and needed to backport it #ifndef CPY_PYTHONSUPPORT_H #define CPY_PYTHONSUPPORT_H #include #include #include #include #include "mypyc_util.h" #ifdef __cplusplus extern "C" { #endif #if 0 } // why isn't emacs smart enough to not indent this #endif ///////////////////////////////////////// // Adapted from bltinmodule.c in Python 3.7.0 #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7 _Py_IDENTIFIER(__mro_entries__); static PyObject* update_bases(PyObject *bases) { Py_ssize_t i, j; PyObject *base, *meth, *new_base, *result, *new_bases = NULL; PyObject *stack[1] = {bases}; assert(PyTuple_Check(bases)); Py_ssize_t nargs = PyTuple_GET_SIZE(bases); for (i = 0; i < nargs; i++) { base = PyTuple_GET_ITEM(bases, i); if (PyType_Check(base)) { if (new_bases) { /* If we already have made a replacement, then we append every normal base, otherwise just skip it. */ if (PyList_Append(new_bases, base) < 0) { goto error; } } continue; } if (_PyObject_LookupAttrId(base, &PyId___mro_entries__, &meth) < 0) { goto error; } if (!meth) { if (new_bases) { if (PyList_Append(new_bases, base) < 0) { goto error; } } continue; } new_base = _PyObject_FastCall(meth, stack, 1); Py_DECREF(meth); if (!new_base) { goto error; } if (!PyTuple_Check(new_base)) { PyErr_SetString(PyExc_TypeError, "__mro_entries__ must return a tuple"); Py_DECREF(new_base); goto error; } if (!new_bases) { /* If this is a first successful replacement, create new_bases list and copy previously encountered bases. */ if (!(new_bases = PyList_New(i))) { goto error; } for (j = 0; j < i; j++) { base = PyTuple_GET_ITEM(bases, j); PyList_SET_ITEM(new_bases, j, base); Py_INCREF(base); } } j = PyList_GET_SIZE(new_bases); if (PyList_SetSlice(new_bases, j, j, new_base) < 0) { goto error; } Py_DECREF(new_base); } if (!new_bases) { return bases; } result = PyList_AsTuple(new_bases); Py_DECREF(new_bases); return result; error: Py_XDECREF(new_bases); return NULL; } #else static PyObject* update_bases(PyObject *bases) { return bases; } #endif // From Python 3.7's typeobject.c #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 6 _Py_IDENTIFIER(__init_subclass__); static int init_subclass(PyTypeObject *type, PyObject *kwds) { PyObject *super, *func, *result; PyObject *args[2] = {(PyObject *)type, (PyObject *)type}; super = _PyObject_FastCall((PyObject *)&PySuper_Type, args, 2); if (super == NULL) { return -1; } func = _PyObject_GetAttrId(super, &PyId___init_subclass__); Py_DECREF(super); if (func == NULL) { return -1; } result = _PyObject_FastCallDict(func, NULL, 0, kwds); Py_DECREF(func); if (result == NULL) { return -1; } Py_DECREF(result); return 0; } #else static int init_subclass(PyTypeObject *type, PyObject *kwds) { return 0; } #endif // Adapted from longobject.c in Python 3.7.0 /* This function adapted from PyLong_AsLongLongAndOverflow, but with * some safety checks removed and specialized to only work for objects * that are already longs. * About half of the win this provides, though, just comes from being * able to inline the function, which in addition to saving function call * overhead allows the out-parameter overflow flag to be collapsed into * control flow. * Additionally, we check against the possible range of CPyTagged, not of * Py_ssize_t. */ static inline Py_ssize_t CPyLong_AsSsize_tAndOverflow(PyObject *vv, int *overflow) { /* This version by Tim Peters */ PyLongObject *v = (PyLongObject *)vv; size_t x, prev; Py_ssize_t res; Py_ssize_t i; int sign; *overflow = 0; res = -1; i = Py_SIZE(v); if (likely(i == 1)) { res = v->ob_digit[0]; } else if (likely(i == 0)) { res = 0; } else if (i == -1) { res = -(sdigit)v->ob_digit[0]; } else { sign = 1; x = 0; if (i < 0) { sign = -1; i = -(i); } while (--i >= 0) { prev = x; x = (x << PyLong_SHIFT) + v->ob_digit[i]; if ((x >> PyLong_SHIFT) != prev) { *overflow = sign; goto exit; } } /* Haven't lost any bits, but casting to long requires extra * care (see comment above). */ if (x <= (size_t)CPY_TAGGED_MAX) { res = (Py_ssize_t)x * sign; } else if (sign < 0 && x == CPY_TAGGED_ABS_MIN) { res = CPY_TAGGED_MIN; } else { *overflow = sign; /* res is already set to -1 */ } } exit: return res; } // Adapted from listobject.c in Python 3.7.0 static int list_resize(PyListObject *self, Py_ssize_t newsize) { PyObject **items; size_t new_allocated, num_allocated_bytes; Py_ssize_t allocated = self->allocated; /* Bypass realloc() when a previous overallocation is large enough to accommodate the newsize. If the newsize falls lower than half the allocated size, then proceed with the realloc() to shrink the list. */ if (allocated >= newsize && newsize >= (allocated >> 1)) { assert(self->ob_item != NULL || newsize == 0); Py_SIZE(self) = newsize; return 0; } /* This over-allocates proportional to the list size, making room * for additional growth. The over-allocation is mild, but is * enough to give linear-time amortized behavior over a long * sequence of appends() in the presence of a poorly-performing * system realloc(). * The growth pattern is: 0, 4, 8, 16, 25, 35, 46, 58, 72, 88, ... * Note: new_allocated won't overflow because the largest possible value * is PY_SSIZE_T_MAX * (9 / 8) + 6 which always fits in a size_t. */ new_allocated = (size_t)newsize + (newsize >> 3) + (newsize < 9 ? 3 : 6); if (new_allocated > (size_t)PY_SSIZE_T_MAX / sizeof(PyObject *)) { PyErr_NoMemory(); return -1; } if (newsize == 0) new_allocated = 0; num_allocated_bytes = new_allocated * sizeof(PyObject *); items = (PyObject **)PyMem_Realloc(self->ob_item, num_allocated_bytes); if (items == NULL) { PyErr_NoMemory(); return -1; } self->ob_item = items; Py_SIZE(self) = newsize; self->allocated = new_allocated; return 0; } // Changed to use PyList_SetSlice instead of the internal list_ass_slice static PyObject * list_pop_impl(PyListObject *self, Py_ssize_t index) { PyObject *v; int status; if (Py_SIZE(self) == 0) { /* Special-case most common failure cause */ PyErr_SetString(PyExc_IndexError, "pop from empty list"); return NULL; } if (index < 0) index += Py_SIZE(self); if (index < 0 || index >= Py_SIZE(self)) { PyErr_SetString(PyExc_IndexError, "pop index out of range"); return NULL; } v = self->ob_item[index]; if (index == Py_SIZE(self) - 1) { status = list_resize(self, Py_SIZE(self) - 1); if (status >= 0) return v; /* and v now owns the reference the list had */ else return NULL; } Py_INCREF(v); status = PyList_SetSlice((PyObject *)self, index, index+1, (PyObject *)NULL); if (status < 0) { Py_DECREF(v); return NULL; } return v; } // Tweaked to directly use CPyTagged static CPyTagged list_count(PyListObject *self, PyObject *value) { Py_ssize_t count = 0; Py_ssize_t i; for (i = 0; i < Py_SIZE(self); i++) { int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ); if (cmp > 0) count++; else if (cmp < 0) return CPY_INT_TAG; } return CPyTagged_ShortFromSsize_t(count); } #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 8 static PyObject * _PyDict_GetItemStringWithError(PyObject *v, const char *key) { PyObject *kv, *rv; kv = PyUnicode_FromString(key); if (kv == NULL) { return NULL; } rv = PyDict_GetItemWithError(v, kv); Py_DECREF(kv); return rv; } #endif #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 6 /* _PyUnicode_EqualToASCIIString got added in 3.5.3 (argh!) so we can't actually know * whether it will be precent at runtime, so we just assume we don't have it in 3.5. */ #define CPyUnicode_EqualToASCIIString(x, y) (PyUnicode_CompareWithASCIIString((x), (y)) == 0) #elif PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 6 #define CPyUnicode_EqualToASCIIString(x, y) _PyUnicode_EqualToASCIIString(x, y) #endif // Adapted from genobject.c in Python 3.7.2 // Copied because it wasn't in 3.5.2 and it is undocumented anyways. /* * Set StopIteration with specified value. Value can be arbitrary object * or NULL. * * Returns 0 if StopIteration is set and -1 if any other exception is set. */ static int CPyGen_SetStopIterationValue(PyObject *value) { PyObject *e; if (value == NULL || (!PyTuple_Check(value) && !PyExceptionInstance_Check(value))) { /* Delay exception instantiation if we can */ PyErr_SetObject(PyExc_StopIteration, value); return 0; } /* Construct an exception instance manually with * PyObject_CallFunctionObjArgs and pass it to PyErr_SetObject. * * We do this to handle a situation when "value" is a tuple, in which * case PyErr_SetObject would set the value of StopIteration to * the first element of the tuple. * * (See PyErr_SetObject/_PyErr_CreateException code for details.) */ e = PyObject_CallFunctionObjArgs(PyExc_StopIteration, value, NULL); if (e == NULL) { return -1; } PyErr_SetObject(PyExc_StopIteration, e); Py_DECREF(e); return 0; } #ifdef __cplusplus } #endif #endif mypy-0.761/mypyc/namegen.py0000644€tŠÔÚ€2›s®0000001047613576752246022055 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Dict, Tuple, Set, Optional, Iterable class NameGenerator: """Utility for generating distinct C names from Python names. Since C names can't use '.' (or unicode), some care is required to make C names generated from Python names unique. Also, we want to avoid generating overly long C names since they make the generated code harder to read. Note that we don't restrict ourselves to a 32-character distinguishing prefix guaranteed by the C standard since all the compilers we care about at the moment support longer names without issues. For names that are exported in a shared library (not static) use exported_name() instead. Summary of the approach: * Generate a unique name prefix from suffix of fully-qualified module name used for static names. If only compiling a single module, this can be empty. For example, if the modules are 'foo.bar' and 'foo.baz', the prefixes can be 'bar_' and 'baz_', respectively. If the modules are 'bar.foo' and 'baz.foo', the prefixes will be 'bar_foo_' and 'baz_foo_'. * Replace '.' in the Python name with '___' in the C name. (And replace the unlikely but possible '___' with '___3_'. This collides '___' with '.3_', but this is OK because names may not start with a digit.) The generated should be internal to a build and thus the mapping is arbitrary. Just generating names '1', '2', ... would be correct, though not very usable. """ def __init__(self, groups: Iterable[List[str]]) -> None: """Initialize with a list of modules in each compilation group. The names of modules are used to shorten names referring to modules, for convenience. Arbitrary module names are supported for generated names, but uncompiled modules will use long names. """ self.module_map = {} # type: Dict[str, str] for names in groups: self.module_map.update(make_module_translation_map(names)) self.translations = {} # type: Dict[Tuple[str, str], str] self.used_names = set() # type: Set[str] def private_name(self, module: str, partial_name: Optional[str] = None) -> str: """Return a C name usable for a static definition. Return a distinct result for each (module, partial_name) pair. The caller should add a suitable prefix to the name to avoid conflicts with other C names. Only ensure that the results of this function are unique, not that they aren't overlapping with arbitrary names. If a name is not specific to any module, the module argument can be an empty string. """ # TODO: Support unicode if partial_name is None: return exported_name(self.module_map[module].rstrip('.')) if (module, partial_name) in self.translations: return self.translations[module, partial_name] if module in self.module_map: module_prefix = self.module_map[module] elif module: module_prefix = module + '.' else: module_prefix = '' actual = exported_name('{}{}'.format(module_prefix, partial_name)) self.translations[module, partial_name] = actual return actual def exported_name(fullname: str) -> str: """Return a C name usable for an exported definition. This is like private_name(), but the output only depends on the 'fullname' argument, so the names are distinct across multiple builds. """ # TODO: Support unicode return fullname.replace('___', '___3_').replace('.', '___') def make_module_translation_map(names: List[str]) -> Dict[str, str]: num_instances = {} # type: Dict[str, int] for name in names: for suffix in candidate_suffixes(name): num_instances[suffix] = num_instances.get(suffix, 0) + 1 result = {} for name in names: for suffix in candidate_suffixes(name): if num_instances[suffix] == 1: result[name] = suffix break else: assert False, names return result def candidate_suffixes(fullname: str) -> List[str]: components = fullname.split('.') result = [''] for i in range(len(components)): result.append('.'.join(components[-i - 1:]) + '.') return result mypy-0.761/mypyc/ops.py0000644€tŠÔÚ€2›s®0000022626113576752246021245 0ustar jukkaDROPBOX\Domain Users00000000000000"""Representation of low-level opcodes for compiler intermediate representation (IR). Opcodes operate on abstract registers in a register machine. Each register has a type and a name, specified in an environment. A register can hold various things: - local variables - intermediate values of expressions - condition flags (true/false) - literals (integer literals, True, False, etc.) """ from abc import abstractmethod from typing import ( List, Sequence, Dict, Generic, TypeVar, Optional, Any, NamedTuple, Tuple, Callable, Union, Iterable, Set ) from typing_extensions import Final, Type, ClassVar from collections import OrderedDict from mypy.nodes import ARG_NAMED_OPT, ARG_OPT, ARG_POS, Block, FuncDef, SymbolNode from mypyc.common import PROPSET_PREFIX from mypy_extensions import trait from mypyc.namegen import NameGenerator, exported_name T = TypeVar('T') JsonDict = Dict[str, Any] class RType: """Abstract base class for runtime types (erased, only concrete; no generics).""" name = None # type: str is_unboxed = False c_undefined = None # type: str is_refcounted = True # If unboxed: does the unboxed version use reference counting? _ctype = None # type: str # C type; use Emitter.ctype() to access @abstractmethod def accept(self, visitor: 'RTypeVisitor[T]') -> T: raise NotImplementedError def short_name(self) -> str: return short_name(self.name) def __str__(self) -> str: return short_name(self.name) def __repr__(self) -> str: return '<%s>' % self.__class__.__name__ def __eq__(self, other: object) -> bool: return isinstance(other, RType) and other.name == self.name def __hash__(self) -> int: return hash(self.name) def serialize(self) -> Union[JsonDict, str]: raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__)) # We do a three-pass deserialization scheme in order to resolve name # references. # 1. Create an empty ClassIR for each class in an SCC. # 2. Deserialize all of the functions, which can contain references # to ClassIRs in their types # 3. Deserialize all of the classes, which contain lots of references # to the functions they contain. (And to other classes.) # # Note that this approach differs from how we deserialize ASTs in mypy itself, # where everything is deserialized in one pass then a second pass cleans up # 'cross_refs'. We don't follow that approach here because it seems to be more # code for not a lot of gain since it is easy in mypyc to identify all the objects # we might need to reference. # # Because of these references, we need to maintain maps from class # names to ClassIRs and func names to FuncIRs. # # These are tracked in a DeserMaps which is passed to every # deserialization function. # # (Serialization and deserialization *will* be used for incremental # compilation but so far it is not hooked up to anything.) DeserMaps = NamedTuple('DeserMaps', [('classes', Dict[str, 'ClassIR']), ('functions', Dict[str, 'FuncIR'])]) def deserialize_type(data: Union[JsonDict, str], ctx: DeserMaps) -> 'RType': """Deserialize a JSON-serialized RType. Arguments: data: The decoded JSON of the serialized type ctx: The deserialization maps to use """ # Since there are so few types, we just case on them directly. If # more get added we should switch to a system like mypy.types # uses. if isinstance(data, str): if data in ctx.classes: return RInstance(ctx.classes[data]) elif data in RPrimitive.primitive_map: return RPrimitive.primitive_map[data] elif data == "void": return RVoid() else: assert False, "Can't find class {}".format(data) elif data['.class'] == 'RTuple': return RTuple.deserialize(data, ctx) elif data['.class'] == 'RUnion': return RUnion.deserialize(data, ctx) raise NotImplementedError('unexpected .class {}'.format(data['.class'])) class RTypeVisitor(Generic[T]): @abstractmethod def visit_rprimitive(self, typ: 'RPrimitive') -> T: raise NotImplementedError @abstractmethod def visit_rinstance(self, typ: 'RInstance') -> T: raise NotImplementedError @abstractmethod def visit_runion(self, typ: 'RUnion') -> T: raise NotImplementedError @abstractmethod def visit_rtuple(self, typ: 'RTuple') -> T: raise NotImplementedError @abstractmethod def visit_rvoid(self, typ: 'RVoid') -> T: raise NotImplementedError class RVoid(RType): """void""" is_unboxed = False name = 'void' ctype = 'void' def accept(self, visitor: 'RTypeVisitor[T]') -> T: return visitor.visit_rvoid(self) def serialize(self) -> str: return 'void' void_rtype = RVoid() # type: Final class RPrimitive(RType): """Primitive type such as 'object' or 'int'. These often have custom ops associated with them. """ # Map from primitive names to primitive types and is used by deserialization primitive_map = {} # type: ClassVar[Dict[str, RPrimitive]] def __init__(self, name: str, is_unboxed: bool, is_refcounted: bool, ctype: str = 'PyObject *') -> None: RPrimitive.primitive_map[name] = self self.name = name self.is_unboxed = is_unboxed self._ctype = ctype self.is_refcounted = is_refcounted if ctype == 'CPyTagged': self.c_undefined = 'CPY_INT_TAG' elif ctype == 'PyObject *': self.c_undefined = 'NULL' elif ctype == 'char': self.c_undefined = '2' else: assert False, 'Unrecognized ctype: %r' % ctype def accept(self, visitor: 'RTypeVisitor[T]') -> T: return visitor.visit_rprimitive(self) def serialize(self) -> str: return self.name def __repr__(self) -> str: return '' % self.name # Used to represent arbitrary objects and dynamically typed values object_rprimitive = RPrimitive('builtins.object', is_unboxed=False, is_refcounted=True) # type: Final int_rprimitive = RPrimitive('builtins.int', is_unboxed=True, is_refcounted=True, ctype='CPyTagged') # type: Final short_int_rprimitive = RPrimitive('short_int', is_unboxed=True, is_refcounted=False, ctype='CPyTagged') # type: Final float_rprimitive = RPrimitive('builtins.float', is_unboxed=False, is_refcounted=True) # type: Final bool_rprimitive = RPrimitive('builtins.bool', is_unboxed=True, is_refcounted=False, ctype='char') # type: Final none_rprimitive = RPrimitive('builtins.None', is_unboxed=True, is_refcounted=False, ctype='char') # type: Final list_rprimitive = RPrimitive('builtins.list', is_unboxed=False, is_refcounted=True) # type: Final dict_rprimitive = RPrimitive('builtins.dict', is_unboxed=False, is_refcounted=True) # type: Final set_rprimitive = RPrimitive('builtins.set', is_unboxed=False, is_refcounted=True) # type: Final # At the C layer, str is refered to as unicode (PyUnicode) str_rprimitive = RPrimitive('builtins.str', is_unboxed=False, is_refcounted=True) # type: Final # Tuple of an arbitrary length (corresponds to Tuple[t, ...], with explicit '...') tuple_rprimitive = RPrimitive('builtins.tuple', is_unboxed=False, is_refcounted=True) # type: Final def is_int_rprimitive(rtype: RType) -> bool: return rtype is int_rprimitive def is_short_int_rprimitive(rtype: RType) -> bool: return rtype is short_int_rprimitive def is_float_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.float' def is_bool_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.bool' def is_object_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.object' def is_none_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.None' def is_list_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.list' def is_dict_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.dict' def is_set_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.set' def is_str_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.str' def is_tuple_rprimitive(rtype: RType) -> bool: return isinstance(rtype, RPrimitive) and rtype.name == 'builtins.tuple' class TupleNameVisitor(RTypeVisitor[str]): """Produce a tuple name based on the concrete representations of types.""" def visit_rinstance(self, t: 'RInstance') -> str: return "O" def visit_runion(self, t: 'RUnion') -> str: return "O" def visit_rprimitive(self, t: 'RPrimitive') -> str: if t._ctype == 'CPyTagged': return 'I' elif t._ctype == 'char': return 'C' assert not t.is_unboxed, "{} unexpected unboxed type".format(t) return 'O' def visit_rtuple(self, t: 'RTuple') -> str: parts = [elem.accept(self) for elem in t.types] return 'T{}{}'.format(len(parts), ''.join(parts)) def visit_rvoid(self, t: 'RVoid') -> str: assert False, "rvoid in tuple?" class RTuple(RType): """Fixed-length unboxed tuple (represented as a C struct).""" is_unboxed = True def __init__(self, types: List[RType]) -> None: self.name = 'tuple' self.types = tuple(types) self.is_refcounted = any(t.is_refcounted for t in self.types) # Generate a unique id which is used in naming corresponding C identifiers. # This is necessary since C does not have anonymous structural type equivalence # in the same way python can just assign a Tuple[int, bool] to a Tuple[int, bool]. self.unique_id = self.accept(TupleNameVisitor()) # Nominally the max c length is 31 chars, but I'm not honestly worried about this. self.struct_name = 'tuple_{}'.format(self.unique_id) self._ctype = '{}'.format(self.struct_name) def accept(self, visitor: 'RTypeVisitor[T]') -> T: return visitor.visit_rtuple(self) def __str__(self) -> str: return 'tuple[%s]' % ', '.join(str(typ) for typ in self.types) def __repr__(self) -> str: return '' % ', '.join(repr(typ) for typ in self.types) def __eq__(self, other: object) -> bool: return isinstance(other, RTuple) and self.types == other.types def __hash__(self) -> int: return hash((self.name, self.types)) def serialize(self) -> JsonDict: types = [x.serialize() for x in self.types] return {'.class': 'RTuple', 'types': types} @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'RTuple': types = [deserialize_type(t, ctx) for t in data['types']] return RTuple(types) exc_rtuple = RTuple([object_rprimitive, object_rprimitive, object_rprimitive]) class RInstance(RType): """Instance of user-defined class (compiled to C extension class).""" is_unboxed = False def __init__(self, class_ir: 'ClassIR') -> None: # name is used for formatting the name in messages and debug output # so we want the fullname for precision. self.name = class_ir.fullname self.class_ir = class_ir self._ctype = 'PyObject *' def accept(self, visitor: 'RTypeVisitor[T]') -> T: return visitor.visit_rinstance(self) def struct_name(self, names: NameGenerator) -> str: return self.class_ir.struct_name(names) def getter_index(self, name: str) -> int: return self.class_ir.vtable_entry(name) def setter_index(self, name: str) -> int: return self.getter_index(name) + 1 def method_index(self, name: str) -> int: return self.class_ir.vtable_entry(name) def attr_type(self, name: str) -> RType: return self.class_ir.attr_type(name) def __repr__(self) -> str: return '' % self.name def serialize(self) -> str: return self.name class RUnion(RType): """union[x, ..., y]""" is_unboxed = False def __init__(self, items: List[RType]) -> None: self.name = 'union' self.items = items self.items_set = frozenset(items) self._ctype = 'PyObject *' def accept(self, visitor: 'RTypeVisitor[T]') -> T: return visitor.visit_runion(self) def __repr__(self) -> str: return '' % ', '.join(str(item) for item in self.items) def __str__(self) -> str: return 'union[%s]' % ', '.join(str(item) for item in self.items) # We compare based on the set because order in a union doesn't matter def __eq__(self, other: object) -> bool: return isinstance(other, RUnion) and self.items_set == other.items_set def __hash__(self) -> int: return hash(('union', self.items_set)) def serialize(self) -> JsonDict: types = [x.serialize() for x in self.items] return {'.class': 'RUnion', 'types': types} @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'RUnion': types = [deserialize_type(t, ctx) for t in data['types']] return RUnion(types) def optional_value_type(rtype: RType) -> Optional[RType]: if isinstance(rtype, RUnion) and len(rtype.items) == 2: if rtype.items[0] == none_rprimitive: return rtype.items[1] elif rtype.items[1] == none_rprimitive: return rtype.items[0] return None def is_optional_type(rtype: RType) -> bool: return optional_value_type(rtype) is not None class AssignmentTarget(object): type = None # type: RType @abstractmethod def to_str(self, env: 'Environment') -> str: raise NotImplementedError class AssignmentTargetRegister(AssignmentTarget): """Register as assignment target""" def __init__(self, register: 'Register') -> None: self.register = register self.type = register.type def to_str(self, env: 'Environment') -> str: return self.register.name class AssignmentTargetIndex(AssignmentTarget): """base[index] as assignment target""" def __init__(self, base: 'Value', index: 'Value') -> None: self.base = base self.index = index # TODO: This won't be right for user-defined classes. Store the # lvalue type in mypy and remove this special case. self.type = object_rprimitive def to_str(self, env: 'Environment') -> str: return '{}[{}]'.format(self.base.name, self.index.name) class AssignmentTargetAttr(AssignmentTarget): """obj.attr as assignment target""" def __init__(self, obj: 'Value', attr: str) -> None: self.obj = obj self.attr = attr if isinstance(obj.type, RInstance) and obj.type.class_ir.has_attr(attr): self.obj_type = obj.type # type: RType self.type = obj.type.attr_type(attr) else: self.obj_type = object_rprimitive self.type = object_rprimitive def to_str(self, env: 'Environment') -> str: return '{}.{}'.format(self.obj.to_str(env), self.attr) class AssignmentTargetTuple(AssignmentTarget): """x, ..., y as assignment target""" def __init__(self, items: List[AssignmentTarget], star_idx: Optional[int] = None) -> None: self.items = items self.star_idx = star_idx # The shouldn't be relevant, but provide it just in case. self.type = object_rprimitive def to_str(self, env: 'Environment') -> str: return '({})'.format(', '.join(item.to_str(env) for item in self.items)) class Environment: """Maintain the register symbol table and manage temp generation""" def __init__(self, name: Optional[str] = None) -> None: self.name = name self.indexes = OrderedDict() # type: Dict[Value, int] self.symtable = OrderedDict() # type: OrderedDict[SymbolNode, AssignmentTarget] self.temp_index = 0 self.names = {} # type: Dict[str, int] self.vars_needing_init = set() # type: Set[Value] def regs(self) -> Iterable['Value']: return self.indexes.keys() def add(self, reg: 'Value', name: str) -> None: # Ensure uniqueness of variable names in this environment. # This is needed for things like list comprehensions, which are their own scope-- # if we don't do this and two comprehensions use the same variable, we'd try to # declare that variable twice. unique_name = name while unique_name in self.names: unique_name = name + str(self.names[name]) self.names[name] += 1 self.names[unique_name] = 0 reg.name = unique_name self.indexes[reg] = len(self.indexes) def add_local(self, symbol: SymbolNode, typ: RType, is_arg: bool = False) -> 'Register': assert isinstance(symbol, SymbolNode) reg = Register(typ, symbol.line, is_arg=is_arg) self.symtable[symbol] = AssignmentTargetRegister(reg) self.add(reg, symbol.name) return reg def add_local_reg(self, symbol: SymbolNode, typ: RType, is_arg: bool = False) -> AssignmentTargetRegister: self.add_local(symbol, typ, is_arg) target = self.symtable[symbol] assert isinstance(target, AssignmentTargetRegister) return target def add_target(self, symbol: SymbolNode, target: AssignmentTarget) -> AssignmentTarget: self.symtable[symbol] = target return target def lookup(self, symbol: SymbolNode) -> AssignmentTarget: return self.symtable[symbol] def add_temp(self, typ: RType, is_arg: bool = False) -> 'Register': assert isinstance(typ, RType) reg = Register(typ, is_arg=is_arg) self.add(reg, 'r%d' % self.temp_index) self.temp_index += 1 return reg def add_op(self, reg: 'RegisterOp') -> None: if reg.is_void: return self.add(reg, 'r%d' % self.temp_index) self.temp_index += 1 def format(self, fmt: str, *args: Any) -> str: result = [] i = 0 arglist = list(args) while i < len(fmt): n = fmt.find('%', i) if n < 0: n = len(fmt) result.append(fmt[i:n]) if n < len(fmt): typespec = fmt[n + 1] arg = arglist.pop(0) if typespec == 'r': result.append(arg.name) elif typespec == 'd': result.append('%d' % arg) elif typespec == 'f': result.append('%f' % arg) elif typespec == 'l': if isinstance(arg, BasicBlock): arg = arg.label result.append('L%s' % arg) elif typespec == 's': result.append(str(arg)) else: raise ValueError('Invalid format sequence %{}'.format(typespec)) i = n + 2 else: i = n return ''.join(result) def to_lines(self) -> List[str]: result = [] i = 0 regs = list(self.regs()) while i < len(regs): i0 = i group = [regs[i0].name] while i + 1 < len(regs) and regs[i + 1].type == regs[i0].type: i += 1 group.append(regs[i].name) i += 1 result.append('%s :: %s' % (', '.join(group), regs[i0].type)) return result class BasicBlock: """Basic IR block. Ends with a jump, branch, or return. When building the IR, ops that raise exceptions can be included in the middle of a basic block, but the exceptions aren't checked. Afterwards we perform a transform that inserts explicit checks for all error conditions and splits basic blocks accordingly to preserve the invariant that a jump, branch or return can only ever appear as the final op in a block. Manually inserting error checking ops would be boring and error-prone. BasicBlocks have an error_handler attribute that determines where to jump if an error occurs. If none is specified, an error will propagate up out of the function. This is compiled away by the `exceptions` module. Block labels are used for pretty printing and emitting C code, and get filled in by those passes. Ops that may terminate the program aren't treated as exits. """ def __init__(self, label: int = -1) -> None: self.label = label self.ops = [] # type: List[Op] self.error_handler = None # type: Optional[BasicBlock] # Never generates an exception ERR_NEVER = 0 # type: Final # Generates magic value (c_error_value) based on target RType on exception ERR_MAGIC = 1 # type: Final # Generates false (bool) on exception ERR_FALSE = 2 # type: Final # Hack: using this line number for an op will supress it in tracebacks NO_TRACEBACK_LINE_NO = -10000 class Value: # Source line number line = -1 name = '?' type = void_rtype # type: RType is_borrowed = False def __init__(self, line: int) -> None: self.line = line @property def is_void(self) -> bool: return isinstance(self.type, RVoid) @abstractmethod def to_str(self, env: Environment) -> str: raise NotImplementedError class Register(Value): def __init__(self, type: RType, line: int = -1, is_arg: bool = False, name: str = '') -> None: super().__init__(line) self.name = name self.type = type self.is_arg = is_arg self.is_borrowed = is_arg def to_str(self, env: Environment) -> str: return self.name @property def is_void(self) -> bool: return False class Op(Value): def __init__(self, line: int) -> None: super().__init__(line) def can_raise(self) -> bool: # Override this is if Op may raise an exception. Note that currently the fact that # only RegisterOps may raise an exception in hard coded in some places. return False @abstractmethod def sources(self) -> List[Value]: pass def stolen(self) -> List[Value]: """Return arguments that have a reference count stolen by this op""" return [] def unique_sources(self) -> List[Value]: result = [] # type: List[Value] for reg in self.sources(): if reg not in result: result.append(reg) return result @abstractmethod def accept(self, visitor: 'OpVisitor[T]') -> T: pass class ControlOp(Op): # Basically just for hierarchy organization. # We could plausibly have a targets() method if we wanted. pass class Goto(ControlOp): """Unconditional jump.""" error_kind = ERR_NEVER def __init__(self, label: BasicBlock, line: int = -1) -> None: super().__init__(line) self.label = label def __repr__(self) -> str: return '' % self.label.label def sources(self) -> List[Value]: return [] def to_str(self, env: Environment) -> str: return env.format('goto %l', self.label) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_goto(self) class Branch(ControlOp): """if [not] r1 goto 1 else goto 2""" # Branch ops must *not* raise an exception. If a comparison, for example, can raise an # exception, it needs to split into two opcodes and only the first one may fail. error_kind = ERR_NEVER BOOL_EXPR = 100 # type: Final IS_ERROR = 101 # type: Final op_names = { BOOL_EXPR: ('%r', 'bool'), IS_ERROR: ('is_error(%r)', ''), } # type: Final def __init__(self, left: Value, true_label: BasicBlock, false_label: BasicBlock, op: int, line: int = -1, *, rare: bool = False) -> None: super().__init__(line) self.left = left self.true = true_label self.false = false_label self.op = op self.negated = False # If not None, the true label should generate a traceback entry (func name, line number) self.traceback_entry = None # type: Optional[Tuple[str, int]] self.rare = rare def sources(self) -> List[Value]: return [self.left] def to_str(self, env: Environment) -> str: fmt, typ = self.op_names[self.op] if self.negated: fmt = 'not {}'.format(fmt) cond = env.format(fmt, self.left) tb = '' if self.traceback_entry: tb = ' (error at %s:%d)' % self.traceback_entry fmt = 'if {} goto %l{} else goto %l'.format(cond, tb) if typ: fmt += ' :: {}'.format(typ) return env.format(fmt, self.true, self.false) def invert(self) -> None: self.negated = not self.negated def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_branch(self) class Return(ControlOp): error_kind = ERR_NEVER def __init__(self, reg: Value, line: int = -1) -> None: super().__init__(line) self.reg = reg def sources(self) -> List[Value]: return [self.reg] def stolen(self) -> List[Value]: return [self.reg] def to_str(self, env: Environment) -> str: return env.format('return %r', self.reg) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_return(self) class Unreachable(ControlOp): """Added to the end of non-None returning functions. Mypy statically guarantees that the end of the function is not unreachable if there is not a return statement. This prevents the block formatter from being confused due to lack of a leave and also leaves a nifty note in the IR. It is not generally processed by visitors. """ error_kind = ERR_NEVER def __init__(self, line: int = -1) -> None: super().__init__(line) def to_str(self, env: Environment) -> str: return "unreachable" def sources(self) -> List[Value]: return [] def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_unreachable(self) class RegisterOp(Op): """An operation that can be written as r1 = f(r2, ..., rn). Takes some registers, performs an operation and generates an output. Doesn't do any control flow, but can raise an error. """ error_kind = -1 # Can this raise exception and how is it signalled; one of ERR_* _type = None # type: Optional[RType] def __init__(self, line: int) -> None: super().__init__(line) assert self.error_kind != -1, 'error_kind not defined' def can_raise(self) -> bool: return self.error_kind != ERR_NEVER class IncRef(RegisterOp): """inc_ref r""" error_kind = ERR_NEVER def __init__(self, src: Value, line: int = -1) -> None: assert src.type.is_refcounted super().__init__(line) self.src = src def to_str(self, env: Environment) -> str: s = env.format('inc_ref %r', self.src) if is_bool_rprimitive(self.src.type) or is_int_rprimitive(self.src.type): s += ' :: {}'.format(short_name(self.src.type.name)) return s def sources(self) -> List[Value]: return [self.src] def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_inc_ref(self) class DecRef(RegisterOp): """dec_ref r The is_xdec flag says to use an XDECREF, which checks if the pointer is NULL first. """ error_kind = ERR_NEVER def __init__(self, src: Value, is_xdec: bool = False, line: int = -1) -> None: assert src.type.is_refcounted super().__init__(line) self.src = src self.is_xdec = is_xdec def __repr__(self) -> str: return '<%sDecRef %r>' % ('X' if self.is_xdec else '', self.src) def to_str(self, env: Environment) -> str: s = env.format('%sdec_ref %r', 'x' if self.is_xdec else '', self.src) if is_bool_rprimitive(self.src.type) or is_int_rprimitive(self.src.type): s += ' :: {}'.format(short_name(self.src.type.name)) return s def sources(self) -> List[Value]: return [self.src] def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_dec_ref(self) class Call(RegisterOp): """Native call f(arg, ...) The call target can be a module-level function or a class. """ error_kind = ERR_MAGIC def __init__(self, fn: 'FuncDecl', args: Sequence[Value], line: int) -> None: super().__init__(line) self.fn = fn self.args = list(args) self.type = fn.sig.ret_type def to_str(self, env: Environment) -> str: args = ', '.join(env.format('%r', arg) for arg in self.args) # TODO: Display long name? short_name = self.fn.shortname s = '%s(%s)' % (short_name, args) if not self.is_void: s = env.format('%r = ', self) + s return s def sources(self) -> List[Value]: return list(self.args[:]) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_call(self) class MethodCall(RegisterOp): """Native method call obj.m(arg, ...) """ error_kind = ERR_MAGIC def __init__(self, obj: Value, method: str, args: List[Value], line: int = -1) -> None: super().__init__(line) self.obj = obj self.method = method self.args = args assert isinstance(obj.type, RInstance), "Methods can only be called on instances" self.receiver_type = obj.type method_ir = self.receiver_type.class_ir.method_sig(method) assert method_ir is not None, "{} doesn't have method {}".format( self.receiver_type.name, method) self.type = method_ir.ret_type def to_str(self, env: Environment) -> str: args = ', '.join(env.format('%r', arg) for arg in self.args) s = env.format('%r.%s(%s)', self.obj, self.method, args) if not self.is_void: s = env.format('%r = ', self) + s return s def sources(self) -> List[Value]: return self.args[:] + [self.obj] def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_method_call(self) @trait class EmitterInterface(): @abstractmethod def reg(self, name: Value) -> str: raise NotImplementedError @abstractmethod def c_error_value(self, rtype: RType) -> str: raise NotImplementedError @abstractmethod def temp_name(self) -> str: raise NotImplementedError @abstractmethod def emit_line(self, line: str) -> None: raise NotImplementedError @abstractmethod def emit_lines(self, *lines: str) -> None: raise NotImplementedError @abstractmethod def emit_declaration(self, line: str) -> None: raise NotImplementedError EmitCallback = Callable[[EmitterInterface, List[str], str], None] # True steals all arguments, False steals none, a list steals those in matching positions StealsDescription = Union[bool, List[bool]] OpDescription = NamedTuple( 'OpDescription', [('name', str), ('arg_types', List[RType]), ('result_type', Optional[RType]), ('is_var_arg', bool), ('error_kind', int), ('format_str', str), ('emit', EmitCallback), ('steals', StealsDescription), ('is_borrowed', bool), ('priority', int)]) # To resolve ambiguities, highest priority wins class PrimitiveOp(RegisterOp): """reg = op(reg, ...) These are register-based primitive operations that work on specific operand types. The details of the operation are defined by the 'desc' attribute. The mypyc.ops_* modules define the supported operations. mypyc.genops uses the descriptions to look for suitable primitive ops. """ def __init__(self, args: List[Value], desc: OpDescription, line: int) -> None: if not desc.is_var_arg: assert len(args) == len(desc.arg_types) self.error_kind = desc.error_kind super().__init__(line) self.args = args self.desc = desc if desc.result_type is None: assert desc.error_kind == ERR_FALSE # TODO: No-value ops not supported yet self.type = bool_rprimitive else: self.type = desc.result_type self.is_borrowed = desc.is_borrowed def sources(self) -> List[Value]: return list(self.args) def stolen(self) -> List[Value]: if isinstance(self.desc.steals, list): assert len(self.desc.steals) == len(self.args) return [arg for arg, steal in zip(self.args, self.desc.steals) if steal] else: return [] if not self.desc.steals else self.sources() def __repr__(self) -> str: return '' % (self.desc.name, self.args) def to_str(self, env: Environment) -> str: params = {} # type: Dict[str, Any] if not self.is_void: params['dest'] = env.format('%r', self) args = [env.format('%r', arg) for arg in self.args] params['args'] = args params['comma_args'] = ', '.join(args) params['colon_args'] = ', '.join( '{}: {}'.format(k, v) for k, v in zip(args[::2], args[1::2]) ) return self.desc.format_str.format(**params).strip() def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_primitive_op(self) class Assign(Op): """dest = int""" error_kind = ERR_NEVER def __init__(self, dest: Register, src: Value, line: int = -1) -> None: super().__init__(line) self.src = src self.dest = dest def sources(self) -> List[Value]: return [self.src] def stolen(self) -> List[Value]: return [self.src] def to_str(self, env: Environment) -> str: return env.format('%r = %r', self.dest, self.src) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_assign(self) class LoadInt(RegisterOp): """dest = int""" error_kind = ERR_NEVER def __init__(self, value: int, line: int = -1) -> None: super().__init__(line) self.value = value self.type = short_int_rprimitive def sources(self) -> List[Value]: return [] def to_str(self, env: Environment) -> str: return env.format('%r = %d', self, self.value) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_load_int(self) class LoadErrorValue(RegisterOp): """dest = """ error_kind = ERR_NEVER def __init__(self, rtype: RType, line: int = -1, is_borrowed: bool = False, undefines: bool = False) -> None: super().__init__(line) self.type = rtype self.is_borrowed = is_borrowed # Undefines is true if this should viewed by the definedness # analysis pass as making the register it is assigned to # undefined (and thus checks should be added on uses). self.undefines = undefines def sources(self) -> List[Value]: return [] def to_str(self, env: Environment) -> str: return env.format('%r = :: %s', self, self.type) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_load_error_value(self) class GetAttr(RegisterOp): """dest = obj.attr (for a native object)""" error_kind = ERR_MAGIC def __init__(self, obj: Value, attr: str, line: int) -> None: super().__init__(line) self.obj = obj self.attr = attr assert isinstance(obj.type, RInstance), 'Attribute access not supported: %s' % obj.type self.class_type = obj.type self.type = obj.type.attr_type(attr) def sources(self) -> List[Value]: return [self.obj] def to_str(self, env: Environment) -> str: return env.format('%r = %r.%s', self, self.obj, self.attr) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_get_attr(self) class SetAttr(RegisterOp): """obj.attr = src (for a native object) Steals the reference to src. """ error_kind = ERR_FALSE def __init__(self, obj: Value, attr: str, src: Value, line: int) -> None: super().__init__(line) self.obj = obj self.attr = attr self.src = src assert isinstance(obj.type, RInstance), 'Attribute access not supported: %s' % obj.type self.class_type = obj.type self.type = bool_rprimitive def sources(self) -> List[Value]: return [self.obj, self.src] def stolen(self) -> List[Value]: return [self.src] def to_str(self, env: Environment) -> str: return env.format('%r.%s = %r; %r = is_error', self.obj, self.attr, self.src, self) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_set_attr(self) NAMESPACE_STATIC = 'static' # type: Final # Default name space for statics, variables NAMESPACE_TYPE = 'type' # type: Final # Static namespace for pointers to native type objects NAMESPACE_MODULE = 'module' # type: Final # Namespace for modules class LoadStatic(RegisterOp): """dest = name :: static Load a C static variable/pointer. The namespace for statics is shared for the entire compilation group. You can optionally provide a module name and a sub-namespace identifier for additional namespacing to avoid name conflicts. The static namespace does not overlap with other C names, since the final C name will get a prefix, so conflicts only must be avoided with other statics. """ error_kind = ERR_NEVER is_borrowed = True def __init__(self, type: RType, identifier: str, module_name: Optional[str] = None, namespace: str = NAMESPACE_STATIC, line: int = -1, ann: object = None) -> None: super().__init__(line) self.identifier = identifier self.module_name = module_name self.namespace = namespace self.type = type self.ann = ann # An object to pretty print with the load def sources(self) -> List[Value]: return [] def to_str(self, env: Environment) -> str: ann = ' ({})'.format(repr(self.ann)) if self.ann else '' name = self.identifier if self.module_name is not None: name = '{}.{}'.format(self.module_name, name) return env.format('%r = %s :: %s%s', self, name, self.namespace, ann) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_load_static(self) class InitStatic(RegisterOp): """static = value :: static Initialize a C static variable/pointer. See everything in LoadStatic. """ error_kind = ERR_NEVER def __init__(self, value: Value, identifier: str, module_name: Optional[str] = None, namespace: str = NAMESPACE_STATIC, line: int = -1) -> None: super().__init__(line) self.identifier = identifier self.module_name = module_name self.namespace = namespace self.value = value def sources(self) -> List[Value]: return [self.value] def to_str(self, env: Environment) -> str: name = self.identifier if self.module_name is not None: name = '{}.{}'.format(self.module_name, name) return env.format('%s = %r :: %s', name, self.value, self.namespace) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_init_static(self) class TupleSet(RegisterOp): """dest = (reg, ...) (for fixed-length tuple)""" error_kind = ERR_NEVER def __init__(self, items: List[Value], line: int) -> None: super().__init__(line) self.items = items # Don't keep track of the fact that an int is short after it # is put into a tuple, since we don't properly implement # runtime subtyping for tuples. self.tuple_type = RTuple( [arg.type if not is_short_int_rprimitive(arg.type) else int_rprimitive for arg in items]) self.type = self.tuple_type def sources(self) -> List[Value]: return self.items[:] def to_str(self, env: Environment) -> str: item_str = ', '.join(env.format('%r', item) for item in self.items) return env.format('%r = (%s)', self, item_str) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_tuple_set(self) class TupleGet(RegisterOp): """dest = src[n] (for fixed-length tuple)""" error_kind = ERR_NEVER def __init__(self, src: Value, index: int, line: int) -> None: super().__init__(line) self.src = src self.index = index assert isinstance(src.type, RTuple), "TupleGet only operates on tuples" self.type = src.type.types[index] def sources(self) -> List[Value]: return [self.src] def to_str(self, env: Environment) -> str: return env.format('%r = %r[%d]', self, self.src, self.index) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_tuple_get(self) class Cast(RegisterOp): """dest = cast(type, src) Perform a runtime type check (no representation or value conversion). DO NOT increment reference counts. """ error_kind = ERR_MAGIC def __init__(self, src: Value, typ: RType, line: int) -> None: super().__init__(line) self.src = src self.type = typ def sources(self) -> List[Value]: return [self.src] def stolen(self) -> List[Value]: return [self.src] def to_str(self, env: Environment) -> str: return env.format('%r = cast(%s, %r)', self, self.type, self.src) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_cast(self) class Box(RegisterOp): """dest = box(type, src) This converts from a potentially unboxed representation to a straight Python object. Only supported for types with an unboxed representation. """ error_kind = ERR_NEVER def __init__(self, src: Value, line: int = -1) -> None: super().__init__(line) self.src = src self.type = object_rprimitive # When we box None and bool values, we produce a borrowed result if is_none_rprimitive(self.src.type) or is_bool_rprimitive(self.src.type): self.is_borrowed = True def sources(self) -> List[Value]: return [self.src] def stolen(self) -> List[Value]: return [self.src] def to_str(self, env: Environment) -> str: return env.format('%r = box(%s, %r)', self, self.src.type, self.src) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_box(self) class Unbox(RegisterOp): """dest = unbox(type, src) This is similar to a cast, but it also changes to a (potentially) unboxed runtime representation. Only supported for types with an unboxed representation. """ error_kind = ERR_MAGIC def __init__(self, src: Value, typ: RType, line: int) -> None: super().__init__(line) self.src = src self.type = typ def sources(self) -> List[Value]: return [self.src] def to_str(self, env: Environment) -> str: return env.format('%r = unbox(%s, %r)', self, self.type, self.src) def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_unbox(self) class RaiseStandardError(RegisterOp): """Raise built-in exception with an optional error string. We have a separate opcode for this for convenience and to generate smaller, more idiomatic C code. """ # TODO: Make it more explicit at IR level that this always raises error_kind = ERR_FALSE VALUE_ERROR = 'ValueError' # type: Final ASSERTION_ERROR = 'AssertionError' # type: Final STOP_ITERATION = 'StopIteration' # type: Final UNBOUND_LOCAL_ERROR = 'UnboundLocalError' # type: Final RUNTIME_ERROR = 'RuntimeError' # type: Final def __init__(self, class_name: str, value: Optional[Union[str, Value]], line: int) -> None: super().__init__(line) self.class_name = class_name self.value = value self.type = bool_rprimitive def to_str(self, env: Environment) -> str: if self.value is not None: if isinstance(self.value, str): return 'raise %s(%r)' % (self.class_name, self.value) elif isinstance(self.value, Value): return env.format('raise %s(%r)', self.class_name, self.value) else: assert False, 'value type must be either str or Value' else: return 'raise %s' % self.class_name def sources(self) -> List[Value]: return [] def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_raise_standard_error(self) class RuntimeArg: def __init__(self, name: str, typ: RType, kind: int = ARG_POS) -> None: self.name = name self.type = typ self.kind = kind @property def optional(self) -> bool: return self.kind == ARG_OPT or self.kind == ARG_NAMED_OPT def __repr__(self) -> str: return 'RuntimeArg(name=%s, type=%s, optional=%r)' % (self.name, self.type, self.optional) def serialize(self) -> JsonDict: return {'name': self.name, 'type': self.type.serialize(), 'kind': self.kind} @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'RuntimeArg': return RuntimeArg( data['name'], deserialize_type(data['type'], ctx), data['kind'], ) class FuncSignature: # TODO: track if method? def __init__(self, args: Sequence[RuntimeArg], ret_type: RType) -> None: self.args = tuple(args) self.ret_type = ret_type def __repr__(self) -> str: return 'FuncSignature(args=%r, ret=%r)' % (self.args, self.ret_type) def serialize(self) -> JsonDict: return {'args': [t.serialize() for t in self.args], 'ret_type': self.ret_type.serialize()} @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'FuncSignature': return FuncSignature( [RuntimeArg.deserialize(arg, ctx) for arg in data['args']], deserialize_type(data['ret_type'], ctx), ) FUNC_NORMAL = 0 # type: Final FUNC_STATICMETHOD = 1 # type: Final FUNC_CLASSMETHOD = 2 # type: Final class FuncDecl: def __init__(self, name: str, class_name: Optional[str], module_name: str, sig: FuncSignature, kind: int = FUNC_NORMAL, is_prop_setter: bool = False, is_prop_getter: bool = False) -> None: self.name = name self.class_name = class_name self.module_name = module_name self.sig = sig self.kind = kind self.is_prop_setter = is_prop_setter self.is_prop_getter = is_prop_getter if class_name is None: self.bound_sig = None # type: Optional[FuncSignature] else: if kind == FUNC_STATICMETHOD: self.bound_sig = sig else: self.bound_sig = FuncSignature(sig.args[1:], sig.ret_type) @staticmethod def compute_shortname(class_name: Optional[str], name: str) -> str: return class_name + '.' + name if class_name else name @property def shortname(self) -> str: return FuncDecl.compute_shortname(self.class_name, self.name) @property def fullname(self) -> str: return self.module_name + '.' + self.shortname def cname(self, names: NameGenerator) -> str: return names.private_name(self.module_name, self.shortname) def serialize(self) -> JsonDict: return { 'name': self.name, 'class_name': self.class_name, 'module_name': self.module_name, 'sig': self.sig.serialize(), 'kind': self.kind, 'is_prop_setter': self.is_prop_setter, 'is_prop_getter': self.is_prop_getter, } @staticmethod def get_name_from_json(f: JsonDict) -> str: return f['module_name'] + '.' + FuncDecl.compute_shortname(f['class_name'], f['name']) @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'FuncDecl': return FuncDecl( data['name'], data['class_name'], data['module_name'], FuncSignature.deserialize(data['sig'], ctx), data['kind'], data['is_prop_setter'], data['is_prop_getter'], ) class FuncIR: """Intermediate representation of a function with contextual information.""" def __init__(self, decl: FuncDecl, blocks: List[BasicBlock], env: Environment, line: int = -1, traceback_name: Optional[str] = None) -> None: self.decl = decl self.blocks = blocks self.env = env self.line = line # The name that should be displayed for tracebacks that # include this function. Function will be omitted from # tracebacks if None. self.traceback_name = traceback_name @property def args(self) -> Sequence[RuntimeArg]: return self.decl.sig.args @property def ret_type(self) -> RType: return self.decl.sig.ret_type @property def class_name(self) -> Optional[str]: return self.decl.class_name @property def sig(self) -> FuncSignature: return self.decl.sig @property def name(self) -> str: return self.decl.name @property def fullname(self) -> str: return self.decl.fullname def cname(self, names: NameGenerator) -> str: return self.decl.cname(names) def __str__(self) -> str: return '\n'.join(format_func(self)) def serialize(self) -> JsonDict: # We don't include blocks or env in the serialized version return { 'decl': self.decl.serialize(), 'line': self.line, 'traceback_name': self.traceback_name, } @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'FuncIR': return FuncIR( FuncDecl.deserialize(data['decl'], ctx), [], Environment(), data['line'], data['traceback_name'], ) INVALID_FUNC_DEF = FuncDef('', [], Block([])) # type: Final # Some notes on the vtable layout: Each concrete class has a vtable # that contains function pointers for its methods. So that subclasses # may be efficiently used when their parent class is expected, the # layout of child vtables must be an extension of their base class's # vtable. # # This makes multiple inheritance tricky, since obviously we cannot be # an extension of multiple parent classes. We solve this by requriing # all but one parent to be "traits", which we can operate on in a # somewhat less efficient way. For each trait implemented by a class, # we generate a separate vtable for the methods in that trait. # We then store an array of (trait type, trait vtable) pointers alongside # a class's main vtable. When we want to call a trait method, we # (at runtime!) search the array of trait vtables to find the correct one, # then call through it. # Trait vtables additionally need entries for attribute getters and setters, # since they can't always be in the same location. # # To keep down the number of indirections necessary, we store the # array of trait vtables in the memory *before* the class vtable, and # search it backwards. (This is a trick we can only do once---there # are only two directions to store data in---but I don't think we'll # need it again.) # There are some tricks we could try in the future to store the trait # vtables inline in the trait table (which would cut down one indirection), # but this seems good enough for now. # # As an example: # Imagine that we have a class B that inherits from a concrete class A # and traits T1 and T2, and that A has methods foo() and # bar() and B overrides bar() with a more specific type. # Then B's vtable will look something like: # # T1 type object # ptr to B's T1 trait vtable # T2 type object # ptr to B's T2 trait vtable # -> | A.foo # | Glue function that converts between A.bar's type and B.bar # B.bar # B.baz # # The arrow points to the "start" of the vtable (what vtable pointers # point to) and the bars indicate which parts correspond to the parent # class A's vtable layout. # # Classes that allow interpreted code to subclass them also have a # "shadow vtable" that contains implementations that delegate to # making a pycall, so that overridden methods in interpreted children # will be called. (A better strategy could dynamically generate these # vtables based on which methods are overridden in the children.) # Descriptions of method and attribute entries in class vtables. # The 'cls' field is the class that the method/attr was defined in, # which might be a parent class. # The 'shadow_method', if present, contains the method that should be # placed in the class's shadow vtable (if it has one). VTableMethod = NamedTuple( 'VTableMethod', [('cls', 'ClassIR'), ('name', str), ('method', FuncIR), ('shadow_method', Optional[FuncIR])]) VTableAttr = NamedTuple( 'VTableAttr', [('cls', 'ClassIR'), ('name', str), ('is_setter', bool)]) VTableEntry = Union[VTableMethod, VTableAttr] VTableEntries = List[VTableEntry] def serialize_vtable_entry(entry: VTableEntry) -> JsonDict: if isinstance(entry, VTableMethod): return { '.class': 'VTableMethod', 'cls': entry.cls.fullname, 'name': entry.name, 'method': entry.method.decl.fullname, 'shadow_method': entry.shadow_method.decl.fullname if entry.shadow_method else None, } else: return { '.class': 'VTableAttr', 'cls': entry.cls.fullname, 'name': entry.name, 'is_setter': entry.is_setter, } def serialize_vtable(vtable: VTableEntries) -> List[JsonDict]: return [serialize_vtable_entry(v) for v in vtable] def deserialize_vtable_entry(data: JsonDict, ctx: DeserMaps) -> VTableEntry: if data['.class'] == 'VTableMethod': return VTableMethod( ctx.classes[data['cls']], data['name'], ctx.functions[data['method']], ctx.functions[data['shadow_method']] if data['shadow_method'] else None) elif data['.class'] == 'VTableAttr': return VTableAttr(ctx.classes[data['cls']], data['name'], data['is_setter']) assert False, "Bogus vtable .class: %s" % data['.class'] def deserialize_vtable(data: List[JsonDict], ctx: DeserMaps) -> VTableEntries: return [deserialize_vtable_entry(x, ctx) for x in data] class ClassIR: """Intermediate representation of a class. This also describes the runtime structure of native instances. """ def __init__(self, name: str, module_name: str, is_trait: bool = False, is_generated: bool = False, is_abstract: bool = False, is_ext_class: bool = True) -> None: self.name = name self.module_name = module_name self.is_trait = is_trait self.is_generated = is_generated self.is_abstract = is_abstract self.is_ext_class = is_ext_class # An augmented class has additional methods separate from what mypyc generates. # Right now the only one is dataclasses. self.is_augmented = False self.inherits_python = False self.has_dict = False # Do we allow interpreted subclasses? Derived from a mypyc_attr. self.allow_interpreted_subclasses = False # If this a subclass of some built-in python class, the name # of the object for that class. We currently only support this # in a few ad-hoc cases. self.builtin_base = None # type: Optional[str] # Default empty ctor self.ctor = FuncDecl(name, None, module_name, FuncSignature([], RInstance(self))) self.attributes = OrderedDict() # type: OrderedDict[str, RType] # We populate method_types with the signatures of every method before # we generate methods, and we rely on this information being present. self.method_decls = OrderedDict() # type: OrderedDict[str, FuncDecl] # Map of methods that are actually present in an extension class self.methods = OrderedDict() # type: OrderedDict[str, FuncIR] # Glue methods for boxing/unboxing when a class changes the type # while overriding a method. Maps from (parent class overrided, method) # to IR of glue method. self.glue_methods = OrderedDict() # type: Dict[Tuple[ClassIR, str], FuncIR] # Properties are accessed like attributes, but have behavior like method calls. # They don't belong in the methods dictionary, since we don't want to expose them to # Python's method API. But we want to put them into our own vtable as methods, so that # they are properly handled and overridden. The property dictionary values are a tuple # containing a property getter and an optional property setter. self.properties = OrderedDict() # type: OrderedDict[str, Tuple[FuncIR, Optional[FuncIR]]] # We generate these in prepare_class_def so that we have access to them when generating # other methods and properties that rely on these types. self.property_types = OrderedDict() # type: OrderedDict[str, RType] self.vtable = None # type: Optional[Dict[str, int]] self.vtable_entries = [] # type: VTableEntries self.trait_vtables = OrderedDict() # type: OrderedDict[ClassIR, VTableEntries] # N.B: base might not actually quite be the direct base. # It is the nearest concrete base, but we allow a trait in between. self.base = None # type: Optional[ClassIR] self.traits = [] # type: List[ClassIR] # Supply a working mro for most generated classes. Real classes will need to # fix it up. self.mro = [self] # type: List[ClassIR] # base_mro is the chain of concrete (non-trait) ancestors self.base_mro = [self] # type: List[ClassIR] # Direct subclasses of this class (use subclasses() to also incude non-direct ones) # None if separate compilation prevents this from working self.children = [] # type: Optional[List[ClassIR]] @property def fullname(self) -> str: return "{}.{}".format(self.module_name, self.name) def real_base(self) -> Optional['ClassIR']: """Return the actual concrete base class, if there is one.""" if len(self.mro) > 1 and not self.mro[1].is_trait: return self.mro[1] return None def vtable_entry(self, name: str) -> int: assert self.vtable is not None, "vtable not computed yet" assert name in self.vtable, '%r has no attribute %r' % (self.name, name) return self.vtable[name] def attr_details(self, name: str) -> Tuple[RType, 'ClassIR']: for ir in self.mro: if name in ir.attributes: return ir.attributes[name], ir if name in ir.property_types: return ir.property_types[name], ir raise KeyError('%r has no attribute %r' % (self.name, name)) def attr_type(self, name: str) -> RType: return self.attr_details(name)[0] def method_decl(self, name: str) -> FuncDecl: for ir in self.mro: if name in ir.method_decls: return ir.method_decls[name] raise KeyError('%r has no attribute %r' % (self.name, name)) def method_sig(self, name: str) -> FuncSignature: return self.method_decl(name).sig def has_method(self, name: str) -> bool: try: self.method_decl(name) except KeyError: return False return True def is_method_final(self, name: str) -> bool: subs = self.subclasses() if subs is None: # TODO: Look at the final attribute! return False if self.has_method(name): method_decl = self.method_decl(name) for subc in subs: if subc.method_decl(name) != method_decl: return False return True else: return not any(subc.has_method(name) for subc in subs) def has_attr(self, name: str) -> bool: try: self.attr_type(name) except KeyError: return False return True def name_prefix(self, names: NameGenerator) -> str: return names.private_name(self.module_name, self.name) def struct_name(self, names: NameGenerator) -> str: return '{}Object'.format(exported_name(self.fullname)) def get_method_and_class(self, name: str) -> Optional[Tuple[FuncIR, 'ClassIR']]: for ir in self.mro: if name in ir.methods: return ir.methods[name], ir return None def get_method(self, name: str) -> Optional[FuncIR]: res = self.get_method_and_class(name) return res[0] if res else None def subclasses(self) -> Optional[Set['ClassIR']]: """Return all subclassses of this class, both direct and indirect. Return None if it is impossible to identify all subclasses, for example because we are performing separate compilation. """ if self.children is None or self.allow_interpreted_subclasses: return None result = set(self.children) for child in self.children: if child.children: child_subs = child.subclasses() if child_subs is None: return None result.update(child_subs) return result def concrete_subclasses(self) -> Optional[List['ClassIR']]: """Return all concrete (i.e. non-trait and non-abstract) subclasses. Include both direct and indirect subclasses. Place classes with no children first. """ subs = self.subclasses() if subs is None: return None concrete = {c for c in subs if not (c.is_trait or c.is_abstract)} # We place classes with no children first because they are more likely # to appear in various isinstance() checks. We then sort leafs by name # to get stable order. return sorted(concrete, key=lambda c: (len(c.children or []), c.name)) def serialize(self) -> JsonDict: return { 'name': self.name, 'module_name': self.module_name, 'is_trait': self.is_trait, 'is_ext_class': self.is_ext_class, 'is_abstract': self.is_abstract, 'is_generated': self.is_generated, 'is_augmented': self.is_augmented, 'inherits_python': self.inherits_python, 'has_dict': self.has_dict, 'allow_interpreted_subclasses': self.allow_interpreted_subclasses, 'builtin_base': self.builtin_base, 'ctor': self.ctor.serialize(), # We serialize dicts as lists to ensure order is preserved 'attributes': [(k, t.serialize()) for k, t in self.attributes.items()], # We try to serialize a name reference, but if the decl isn't in methods # then we can't be sure that will work so we serialize the whole decl. 'method_decls': [(k, d.fullname if k in self.methods else d.serialize()) for k, d in self.method_decls.items()], # We serialize method fullnames out and put methods in a separate dict 'methods': [(k, m.fullname) for k, m in self.methods.items()], 'glue_methods': [ ((cir.fullname, k), m.fullname) for (cir, k), m in self.glue_methods.items() ], # We serialize properties and property_types separately out of an # abundance of caution about preserving dict ordering... 'property_types': [(k, t.serialize()) for k, t in self.property_types.items()], 'properties': list(self.properties), 'vtable': self.vtable, 'vtable_entries': serialize_vtable(self.vtable_entries), 'trait_vtables': [ (cir.fullname, serialize_vtable(v)) for cir, v in self.trait_vtables.items() ], # References to class IRs are all just names 'base': self.base.fullname if self.base else None, 'traits': [cir.fullname for cir in self.traits], 'mro': [cir.fullname for cir in self.mro], 'base_mro': [cir.fullname for cir in self.base_mro], 'children': [ cir.fullname for cir in self.children ] if self.children is not None else None, } @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'ClassIR': fullname = data['module_name'] + '.' + data['name'] assert fullname in ctx.classes, "Class %s not in deser class map" % fullname ir = ctx.classes[fullname] ir.is_trait = data['is_trait'] ir.is_generated = data['is_generated'] ir.is_abstract = data['is_abstract'] ir.is_ext_class = data['is_ext_class'] ir.is_augmented = data['is_augmented'] ir.inherits_python = data['inherits_python'] ir.has_dict = data['has_dict'] ir.allow_interpreted_subclasses = data['allow_interpreted_subclasses'] ir.builtin_base = data['builtin_base'] ir.ctor = FuncDecl.deserialize(data['ctor'], ctx) ir.attributes = OrderedDict( (k, deserialize_type(t, ctx)) for k, t in data['attributes'] ) ir.method_decls = OrderedDict((k, ctx.functions[v].decl if isinstance(v, str) else FuncDecl.deserialize(v, ctx)) for k, v in data['method_decls']) ir.methods = OrderedDict((k, ctx.functions[v]) for k, v in data['methods']) ir.glue_methods = OrderedDict( ((ctx.classes[c], k), ctx.functions[v]) for (c, k), v in data['glue_methods'] ) ir.property_types = OrderedDict( (k, deserialize_type(t, ctx)) for k, t in data['property_types'] ) ir.properties = OrderedDict( (k, (ir.methods[k], ir.methods.get(PROPSET_PREFIX + k))) for k in data['properties'] ) ir.vtable = data['vtable'] ir.vtable_entries = deserialize_vtable(data['vtable_entries'], ctx) ir.trait_vtables = OrderedDict( (ctx.classes[k], deserialize_vtable(v, ctx)) for k, v in data['trait_vtables'] ) base = data['base'] ir.base = ctx.classes[base] if base else None ir.traits = [ctx.classes[s] for s in data['traits']] ir.mro = [ctx.classes[s] for s in data['mro']] ir.base_mro = [ctx.classes[s] for s in data['base_mro']] ir.children = data['children'] and [ctx.classes[s] for s in data['children']] return ir class NonExtClassInfo: """Information needed to construct a non-extension class. Includes the class dictionary, a tuple of base classes, the class annotations dictionary, and the metaclass. """ def __init__(self, dict: Value, bases: Value, anns: Value, metaclass: Value) -> None: self.dict = dict self.bases = bases self.anns = anns self.metaclass = metaclass LiteralsMap = Dict[Tuple[Type[object], Union[int, float, str, bytes, complex]], str] class ModuleIR: """Intermediate representation of a module.""" def __init__( self, fullname: str, imports: List[str], functions: List[FuncIR], classes: List[ClassIR], final_names: List[Tuple[str, RType]]) -> None: self.fullname = fullname self.imports = imports[:] self.functions = functions self.classes = classes self.final_names = final_names def serialize(self) -> JsonDict: return { 'fullname': self.fullname, 'imports': self.imports, 'functions': [f.serialize() for f in self.functions], 'classes': [c.serialize() for c in self.classes], 'final_names': [(k, t.serialize()) for k, t in self.final_names], } @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'ModuleIR': return ModuleIR( data['fullname'], data['imports'], [ctx.functions[FuncDecl.get_name_from_json(f['decl'])] for f in data['functions']], [ClassIR.deserialize(c, ctx) for c in data['classes']], [(k, deserialize_type(t, ctx)) for k, t in data['final_names']], ) def deserialize_modules(data: Dict[str, JsonDict], ctx: DeserMaps) -> Dict[str, ModuleIR]: """Deserialize a collection of modules. The modules can contain dependencies on each other. Arguments: data: A dict containing the modules to deserialize. ctx: The deserialization maps to use and to populate. They are populated with information from the deserialized modules and as a precondition must have been populated by deserializing any dependencies of the modules being deserialized (outside of dependencies between the modules themselves). Returns a map containing the deserialized modules. """ for mod in data.values(): # First create ClassIRs for every class so that we can construct types and whatnot for cls in mod['classes']: ir = ClassIR(cls['name'], cls['module_name']) assert ir.fullname not in ctx.classes, "Class %s already in map" % ir.fullname ctx.classes[ir.fullname] = ir for mod in data.values(): # Then deserialize all of the functions so that methods are available # to the class deserialization. for method in mod['functions']: func = FuncIR.deserialize(method, ctx) assert func.decl.fullname not in ctx.functions, ( "Method %s already in map" % func.decl.fullname) ctx.functions[func.decl.fullname] = func return {k: ModuleIR.deserialize(v, ctx) for k, v in data.items()} # ModulesIRs should also always be an *OrderedDict*, but if we # declared it that way we would need to put it in quotes everywhere... ModuleIRs = Dict[str, ModuleIR] @trait class OpVisitor(Generic[T]): @abstractmethod def visit_goto(self, op: Goto) -> T: raise NotImplementedError @abstractmethod def visit_branch(self, op: Branch) -> T: raise NotImplementedError @abstractmethod def visit_return(self, op: Return) -> T: raise NotImplementedError @abstractmethod def visit_unreachable(self, op: Unreachable) -> T: raise NotImplementedError @abstractmethod def visit_primitive_op(self, op: PrimitiveOp) -> T: raise NotImplementedError @abstractmethod def visit_assign(self, op: Assign) -> T: raise NotImplementedError @abstractmethod def visit_load_int(self, op: LoadInt) -> T: raise NotImplementedError @abstractmethod def visit_load_error_value(self, op: LoadErrorValue) -> T: raise NotImplementedError @abstractmethod def visit_get_attr(self, op: GetAttr) -> T: raise NotImplementedError @abstractmethod def visit_set_attr(self, op: SetAttr) -> T: raise NotImplementedError @abstractmethod def visit_load_static(self, op: LoadStatic) -> T: raise NotImplementedError @abstractmethod def visit_init_static(self, op: InitStatic) -> T: raise NotImplementedError @abstractmethod def visit_tuple_get(self, op: TupleGet) -> T: raise NotImplementedError @abstractmethod def visit_tuple_set(self, op: TupleSet) -> T: raise NotImplementedError def visit_inc_ref(self, op: IncRef) -> T: raise NotImplementedError def visit_dec_ref(self, op: DecRef) -> T: raise NotImplementedError @abstractmethod def visit_call(self, op: Call) -> T: raise NotImplementedError @abstractmethod def visit_method_call(self, op: MethodCall) -> T: raise NotImplementedError @abstractmethod def visit_cast(self, op: Cast) -> T: raise NotImplementedError @abstractmethod def visit_box(self, op: Box) -> T: raise NotImplementedError @abstractmethod def visit_unbox(self, op: Unbox) -> T: raise NotImplementedError @abstractmethod def visit_raise_standard_error(self, op: RaiseStandardError) -> T: raise NotImplementedError def format_blocks(blocks: List[BasicBlock], env: Environment) -> List[str]: # First label all of the blocks for i, block in enumerate(blocks): block.label = i handler_map = {} # type: Dict[BasicBlock, List[BasicBlock]] for b in blocks: if b.error_handler: handler_map.setdefault(b.error_handler, []).append(b) lines = [] for i, block in enumerate(blocks): i == len(blocks) - 1 handler_msg = '' if block in handler_map: labels = sorted(env.format('%l', b.label) for b in handler_map[block]) handler_msg = ' (handler for {})'.format(', '.join(labels)) lines.append(env.format('%l:%s', block.label, handler_msg)) ops = block.ops if (isinstance(ops[-1], Goto) and i + 1 < len(blocks) and ops[-1].label == blocks[i + 1]): # Hide the last goto if it just goes to the next basic block. ops = ops[:-1] for op in ops: line = ' ' + op.to_str(env) lines.append(line) if not isinstance(block.ops[-1], (Goto, Branch, Return, Unreachable)): # Each basic block needs to exit somewhere. lines.append(' [MISSING BLOCK EXIT OPCODE]') return lines def format_func(fn: FuncIR) -> List[str]: lines = [] cls_prefix = fn.class_name + '.' if fn.class_name else '' lines.append('def {}{}({}):'.format(cls_prefix, fn.name, ', '.join(arg.name for arg in fn.args))) for line in fn.env.to_lines(): lines.append(' ' + line) code = format_blocks(fn.blocks, fn.env) lines.extend(code) return lines def format_modules(modules: ModuleIRs) -> List[str]: ops = [] for module in modules.values(): for fn in module.functions: ops.extend(format_func(fn)) ops.append('') return ops def all_concrete_classes(class_ir: ClassIR) -> Optional[List[ClassIR]]: """Return all concrete classes among the class itself and its subclasses.""" concrete = class_ir.concrete_subclasses() if concrete is None: return None if not (class_ir.is_abstract or class_ir.is_trait): concrete.append(class_ir) return concrete def short_name(name: str) -> str: if name.startswith('builtins.'): return name[9:] return name # Import ops_primitive that will set up set up global primitives tables. import mypyc.ops_primitive # noqa mypy-0.761/mypyc/ops_dict.py0000644€tŠÔÚ€2›s®0000000732313576752246022244 0ustar jukkaDROPBOX\Domain Users00000000000000"""Primitive dict ops.""" from typing import List from mypyc.ops import ( EmitterInterface, dict_rprimitive, object_rprimitive, bool_rprimitive, int_rprimitive, ERR_FALSE, ERR_MAGIC, ERR_NEVER, ) from mypyc.ops_primitive import ( name_ref_op, method_op, binary_op, func_op, custom_op, simple_emit, negative_int_emit, call_emit, call_negative_bool_emit, ) name_ref_op('builtins.dict', result_type=object_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = (PyObject *)&PyDict_Type;'), is_borrowed=True) dict_get_item_op = method_op( name='__getitem__', arg_types=[dict_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyDict_GetItem')) dict_set_item_op = method_op( name='__setitem__', arg_types=[dict_rprimitive, object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('CPyDict_SetItem')) binary_op(op='in', arg_types=[object_rprimitive, dict_rprimitive], result_type=bool_rprimitive, error_kind=ERR_MAGIC, format_str='{dest} = {args[0]} in {args[1]} :: dict', emit=negative_int_emit('{dest} = PyDict_Contains({args[1]}, {args[0]});')) dict_update_op = method_op( name='update', arg_types=[dict_rprimitive, dict_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('CPyDict_Update'), priority=2) dict_update_in_display_op = custom_op( arg_types=[dict_rprimitive, dict_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('CPyDict_UpdateInDisplay'), format_str='{dest} = {args[0]}.update({args[1]}) (display) :: dict',) method_op( name='update', arg_types=[dict_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=simple_emit('{dest} = CPyDict_UpdateFromAny({args[0]}, {args[1]}) != -1;')) method_op( name='get', arg_types=[dict_rprimitive, object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyDict_Get')) method_op( name='get', arg_types=[dict_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = CPyDict_Get({args[0]}, {args[1]}, Py_None);')) def emit_new_dict(emitter: EmitterInterface, args: List[str], dest: str) -> None: if not args: emitter.emit_line('%s = PyDict_New();' % (dest,)) return emitter.emit_line('%s = CPyDict_Build(%s, %s);' % (dest, len(args) // 2, ', '.join(args))) new_dict_op = custom_op( name='builtins.dict', arg_types=[object_rprimitive], is_var_arg=True, result_type=dict_rprimitive, format_str='{dest} = {{{colon_args}}}', error_kind=ERR_MAGIC, emit=emit_new_dict) func_op( name='builtins.dict', arg_types=[dict_rprimitive], result_type=dict_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PyDict_Copy'), priority=2) func_op( name='builtins.dict', arg_types=[object_rprimitive], result_type=dict_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyDict_FromAny')) def emit_len(emitter: EmitterInterface, args: List[str], dest: str) -> None: temp = emitter.temp_name() emitter.emit_declaration('Py_ssize_t %s;' % temp) emitter.emit_line('%s = PyDict_Size(%s);' % (temp, args[0])) emitter.emit_line('%s = CPyTagged_ShortFromSsize_t(%s);' % (dest, temp)) func_op(name='builtins.len', arg_types=[dict_rprimitive], result_type=int_rprimitive, error_kind=ERR_NEVER, emit=emit_len) mypy-0.761/mypyc/ops_exc.py0000644€tŠÔÚ€2›s®0000000700513576752246022075 0ustar jukkaDROPBOX\Domain Users00000000000000"""Exception-related primitive ops.""" from mypyc.ops import ( bool_rprimitive, object_rprimitive, void_rtype, exc_rtuple, ERR_NEVER, ERR_FALSE ) from mypyc.ops_primitive import ( simple_emit, custom_op, ) # TODO: Making this raise conditionally is kind of hokey. raise_exception_op = custom_op( arg_types=[object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, format_str='raise_exception({args[0]}); {dest} = 0', emit=simple_emit('CPy_Raise({args[0]}); {dest} = 0;')) set_stop_iteration_value = custom_op( arg_types=[object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, format_str='set_stop_iteration_value({args[0]}); {dest} = 0', emit=simple_emit('CPyGen_SetStopIterationValue({args[0]}); {dest} = 0;')) raise_exception_with_tb_op = custom_op( arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, format_str='raise_exception_with_tb({args[0]}, {args[1]}, {args[2]}); {dest} = 0', emit=simple_emit('CPyErr_SetObjectAndTraceback({args[0]}, {args[1]}, {args[2]}); {dest} = 0;')) reraise_exception_op = custom_op( arg_types=[], result_type=bool_rprimitive, error_kind=ERR_FALSE, format_str='reraise_exc; {dest} = 0', emit=simple_emit('CPy_Reraise(); {dest} = 0;')) no_err_occurred_op = custom_op( arg_types=[], result_type=bool_rprimitive, error_kind=ERR_FALSE, format_str='{dest} = no_err_occurred', emit=simple_emit('{dest} = (PyErr_Occurred() == NULL);')) assert_err_occured_op = custom_op( arg_types=[], result_type=void_rtype, error_kind=ERR_NEVER, format_str='assert_err_occurred', emit=simple_emit('assert(PyErr_Occurred() != NULL && "failure w/o err!");')) keep_propagating_op = custom_op( arg_types=[], result_type=bool_rprimitive, error_kind=ERR_FALSE, format_str='{dest} = keep_propagating', emit=simple_emit('{dest} = 0;')) # Catches a propagating exception and makes it the "currently # handled exception" (by sticking it into sys.exc_info()). Returns the # exception that was previously being handled, which must be restored # later. error_catch_op = custom_op( arg_types=[], result_type=exc_rtuple, error_kind=ERR_NEVER, format_str='{dest} = error_catch', emit=simple_emit('CPy_CatchError(&{dest}.f0, &{dest}.f1, &{dest}.f2);')) # Restore an old "currently handled exception" returned from # error_catch (by sticking it into sys.exc_info()) restore_exc_info_op = custom_op( arg_types=[exc_rtuple], result_type=void_rtype, error_kind=ERR_NEVER, format_str='restore_exc_info {args[0]}', emit=simple_emit('CPy_RestoreExcInfo({args[0]}.f0, {args[0]}.f1, {args[0]}.f2);')) # Checks whether the exception currently being handled matches a particular type. exc_matches_op = custom_op( arg_types=[object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_NEVER, format_str='{dest} = exc_matches {args[0]}', emit=simple_emit('{dest} = CPy_ExceptionMatches({args[0]});')) # Get the value of the exception currently being handled. get_exc_value_op = custom_op( arg_types=[], result_type=object_rprimitive, error_kind=ERR_NEVER, format_str='{dest} = get_exc_value', emit=simple_emit('{dest} = CPy_GetExcValue();')) get_exc_info_op = custom_op( arg_types=[], result_type=exc_rtuple, error_kind=ERR_NEVER, format_str='{dest} = get_exc_info', emit=simple_emit('CPy_GetExcInfo(&{dest}.f0, &{dest}.f1, &{dest}.f2);')) mypy-0.761/mypyc/ops_int.py0000644€tŠÔÚ€2›s®0000000707713576752246022121 0ustar jukkaDROPBOX\Domain Users00000000000000 from mypyc.ops import ( int_rprimitive, bool_rprimitive, float_rprimitive, object_rprimitive, short_int_rprimitive, RType, OpDescription, ERR_NEVER, ERR_MAGIC, ) from mypyc.ops_primitive import ( name_ref_op, binary_op, unary_op, func_op, custom_op, simple_emit, call_emit, ) # These int constructors produce object_rprimitives that then need to be unboxed # I guess unboxing ourselves would save a check and branch though? # For ordinary calls to int() we use a name_ref to the type name_ref_op('builtins.int', result_type=object_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = (PyObject *)&PyLong_Type;'), is_borrowed=True) # Convert from a float. We could do a bit better directly. func_op( name='builtins.int', arg_types=[float_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyLong_FromFloat'), priority=1) def int_binary_op(op: str, c_func_name: str, result_type: RType = int_rprimitive, error_kind: int = ERR_NEVER) -> None: binary_op(op=op, arg_types=[int_rprimitive, int_rprimitive], result_type=result_type, error_kind=error_kind, format_str='{dest} = {args[0]} %s {args[1]} :: int' % op, emit=call_emit(c_func_name)) def int_compare_op(op: str, c_func_name: str) -> None: int_binary_op(op, c_func_name, bool_rprimitive) # Generate a straight compare if we know both sides are short binary_op(op=op, arg_types=[short_int_rprimitive, short_int_rprimitive], result_type=bool_rprimitive, error_kind=ERR_NEVER, format_str='{dest} = {args[0]} %s {args[1]} :: short_int' % op, emit=simple_emit( '{dest} = (Py_ssize_t){args[0]} %s (Py_ssize_t){args[1]};' % op), priority=2) int_binary_op('+', 'CPyTagged_Add') int_binary_op('-', 'CPyTagged_Subtract') int_binary_op('*', 'CPyTagged_Multiply') # Divide and remainder we honestly propagate errors from because they # can raise ZeroDivisionError int_binary_op('//', 'CPyTagged_FloorDivide', error_kind=ERR_MAGIC) int_binary_op('%', 'CPyTagged_Remainder', error_kind=ERR_MAGIC) # this should work because assignment operators are parsed differently # and the code in genops that handles it does the assignment # regardless of whether or not the operator works in place anyway int_binary_op('+=', 'CPyTagged_Add') int_binary_op('-=', 'CPyTagged_Subtract') int_binary_op('*=', 'CPyTagged_Multiply') int_binary_op('//=', 'CPyTagged_FloorDivide', error_kind=ERR_MAGIC) int_binary_op('%=', 'CPyTagged_Remainder', error_kind=ERR_MAGIC) int_compare_op('==', 'CPyTagged_IsEq') int_compare_op('!=', 'CPyTagged_IsNe') int_compare_op('<', 'CPyTagged_IsLt') int_compare_op('<=', 'CPyTagged_IsLe') int_compare_op('>', 'CPyTagged_IsGt') int_compare_op('>=', 'CPyTagged_IsGe') unsafe_short_add = custom_op( arg_types=[int_rprimitive, int_rprimitive], result_type=short_int_rprimitive, error_kind=ERR_NEVER, format_str='{dest} = {args[0]} + {args[1]} :: short_int', emit=simple_emit('{dest} = {args[0]} + {args[1]};')) def int_unary_op(op: str, c_func_name: str) -> OpDescription: return unary_op(op=op, arg_type=int_rprimitive, result_type=int_rprimitive, error_kind=ERR_NEVER, format_str='{dest} = %s{args[0]} :: int' % op, emit=call_emit(c_func_name)) int_neg_op = int_unary_op('-', 'CPyTagged_Negate') mypy-0.761/mypyc/ops_list.py0000644€tŠÔÚ€2›s®0000001246013576752246022272 0ustar jukkaDROPBOX\Domain Users00000000000000"""List primitive ops.""" from typing import List from mypyc.ops import ( int_rprimitive, short_int_rprimitive, list_rprimitive, object_rprimitive, bool_rprimitive, ERR_MAGIC, ERR_NEVER, ERR_FALSE, EmitterInterface, ) from mypyc.ops_primitive import ( name_ref_op, binary_op, func_op, method_op, custom_op, simple_emit, call_emit, call_negative_bool_emit, ) name_ref_op('builtins.list', result_type=object_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = (PyObject *)&PyList_Type;'), is_borrowed=True) to_list = func_op( name='builtins.list', arg_types=[object_rprimitive], result_type=list_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PySequence_List')) def emit_new(emitter: EmitterInterface, args: List[str], dest: str) -> None: # TODO: This would be better split into multiple smaller ops. emitter.emit_line('%s = PyList_New(%d); ' % (dest, len(args))) emitter.emit_line('if (likely(%s != NULL)) {' % dest) for i, arg in enumerate(args): emitter.emit_line('PyList_SET_ITEM(%s, %s, %s);' % (dest, i, arg)) emitter.emit_line('}') new_list_op = custom_op(arg_types=[object_rprimitive], result_type=list_rprimitive, is_var_arg=True, error_kind=ERR_MAGIC, steals=True, format_str='{dest} = [{comma_args}]', emit=emit_new) list_get_item_op = method_op( name='__getitem__', arg_types=[list_rprimitive, int_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyList_GetItem')) # Version with no int bounds check for when it is known to be short method_op( name='__getitem__', arg_types=[list_rprimitive, short_int_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyList_GetItemShort'), priority=2) # This is unsafe because it assumes that the index is a non-negative short integer # that is in-bounds for the list. list_get_item_unsafe_op = custom_op( name='__getitem__', arg_types=[list_rprimitive, short_int_rprimitive], result_type=object_rprimitive, error_kind=ERR_NEVER, format_str='{dest} = {args[0]}[{args[1]}] :: unsafe list', emit=simple_emit('{dest} = CPyList_GetItemUnsafe({args[0]}, {args[1]});')) list_set_item_op = method_op( name='__setitem__', arg_types=[list_rprimitive, int_rprimitive, object_rprimitive], steals=[False, False, True], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_emit('CPyList_SetItem')) list_append_op = method_op( name='append', arg_types=[list_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('PyList_Append')) list_extend_op = method_op( name='extend', arg_types=[list_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = _PyList_Extend((PyListObject *) {args[0]}, {args[1]});')) list_pop_last = method_op( name='pop', arg_types=[list_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyList_PopLast')) list_pop = method_op( name='pop', arg_types=[list_rprimitive, int_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyList_Pop')) method_op( name='count', arg_types=[list_rprimitive, object_rprimitive], result_type=short_int_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyList_Count')) def emit_multiply_helper(emitter: EmitterInterface, dest: str, lst: str, num: str) -> None: temp = emitter.temp_name() emitter.emit_declaration('Py_ssize_t %s;' % temp) emitter.emit_lines( "%s = CPyTagged_AsSsize_t(%s);" % (temp, num), "if (%s == -1 && PyErr_Occurred())" % temp, " CPyError_OutOfMemory();", "%s = PySequence_Repeat(%s, %s);" % (dest, lst, temp)) def emit_multiply(emitter: EmitterInterface, args: List[str], dest: str) -> None: emit_multiply_helper(emitter, dest, args[0], args[1]) def emit_multiply_reversed(emitter: EmitterInterface, args: List[str], dest: str) -> None: emit_multiply_helper(emitter, dest, args[1], args[0]) binary_op(op='*', arg_types=[list_rprimitive, int_rprimitive], result_type=list_rprimitive, error_kind=ERR_MAGIC, format_str='{dest} = {args[0]} * {args[1]} :: list', emit=emit_multiply) binary_op(op='*', arg_types=[int_rprimitive, list_rprimitive], result_type=list_rprimitive, error_kind=ERR_MAGIC, format_str='{dest} = {args[0]} * {args[1]} :: list', emit=emit_multiply_reversed) def emit_len(emitter: EmitterInterface, args: List[str], dest: str) -> None: temp = emitter.temp_name() emitter.emit_declaration('Py_ssize_t %s;' % temp) emitter.emit_line('%s = PyList_GET_SIZE(%s);' % (temp, args[0])) emitter.emit_line('%s = CPyTagged_ShortFromSsize_t(%s);' % (dest, temp)) list_len_op = func_op(name='builtins.len', arg_types=[list_rprimitive], result_type=short_int_rprimitive, error_kind=ERR_NEVER, emit=emit_len) mypy-0.761/mypyc/ops_misc.py0000644€tŠÔÚ€2›s®0000003650713576752246022262 0ustar jukkaDROPBOX\Domain Users00000000000000"""Miscellaneous primitive ops.""" from mypyc.ops import ( RTuple, none_rprimitive, bool_rprimitive, object_rprimitive, str_rprimitive, int_rprimitive, dict_rprimitive, ERR_NEVER, ERR_MAGIC, ERR_FALSE ) from mypyc.ops_primitive import ( name_ref_op, simple_emit, binary_op, unary_op, func_op, method_op, custom_op, negative_int_emit, call_emit, name_emit, call_negative_bool_emit, call_negative_magic_emit, ) none_object_op = custom_op(result_type=object_rprimitive, arg_types=[], error_kind=ERR_NEVER, format_str='{dest} = builtins.None :: object', emit=name_emit('Py_None'), is_borrowed=True) none_op = name_ref_op('builtins.None', result_type=none_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = 1; /* None */')) true_op = name_ref_op('builtins.True', result_type=bool_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = 1;')) false_op = name_ref_op('builtins.False', result_type=bool_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = 0;')) ellipsis_op = custom_op(name='...', arg_types=[], result_type=object_rprimitive, error_kind=ERR_NEVER, emit=name_emit('Py_Ellipsis'), is_borrowed=True) not_implemented_op = name_ref_op(name='builtins.NotImplemented', result_type=object_rprimitive, error_kind=ERR_NEVER, emit=name_emit('Py_NotImplemented'), is_borrowed=True) func_op(name='builtins.id', arg_types=[object_rprimitive], result_type=int_rprimitive, error_kind=ERR_NEVER, emit=call_emit('CPyTagged_Id')) iter_op = func_op(name='builtins.iter', arg_types=[object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PyObject_GetIter')) coro_op = custom_op(name='get_coroutine_obj', arg_types=[object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPy_GetCoro')) # Although the error_kind is set to be ERR_NEVER, this can actually # return NULL, and thus it must be checked using Branch.IS_ERROR. next_op = custom_op(name='next', arg_types=[object_rprimitive], result_type=object_rprimitive, error_kind=ERR_NEVER, emit=call_emit('PyIter_Next')) # Do a next, don't swallow StopIteration, but also don't propagate an # error. (N.B: This can still return NULL without an error to # represent an implicit StopIteration, but if StopIteration is # *explicitly* raised this will not swallow it.) # Can return NULL: see next_op. next_raw_op = custom_op(name='next', arg_types=[object_rprimitive], result_type=object_rprimitive, error_kind=ERR_NEVER, emit=call_emit('CPyIter_Next')) # Do a send, or a next if second arg is None. # (This behavior is to match the PEP 380 spec for yield from.) # Like next_raw_op, don't swallow StopIteration, # but also don't propagate an error. # Can return NULL: see next_op. send_op = custom_op(name='send', arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_NEVER, emit=call_emit('CPyIter_Send')) # This is sort of unfortunate but oh well: yield_from_except performs most of the # error handling logic in `yield from` operations. It returns a bool and a value. # If the bool is true, then a StopIteration was received and we should return. # If the bool is false, then the value should be yielded. # The normal case is probably that it signals an exception, which gets # propagated. yield_from_rtuple = RTuple([bool_rprimitive, object_rprimitive]) yield_from_except_op = custom_op( name='yield_from_except', arg_types=[object_rprimitive], result_type=yield_from_rtuple, error_kind=ERR_MAGIC, emit=simple_emit('{dest}.f0 = CPy_YieldFromErrorHandle({args[0]}, &{dest}.f1);')) method_new_op = custom_op(name='method_new', arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PyMethod_New')) # Check if the current exception is a StopIteration and return its value if so. # Treats "no exception" as StopIteration with a None value. # If it is a different exception, re-reraise it. check_stop_op = custom_op(name='check_stop_iteration', arg_types=[], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPy_FetchStopIterationValue')) # # Fallback primitive operations that operate on 'object' operands # for op, opid in [('==', 'Py_EQ'), ('!=', 'Py_NE'), ('<', 'Py_LT'), ('<=', 'Py_LE'), ('>', 'Py_GT'), ('>=', 'Py_GE')]: # The result type is 'object' since that's what PyObject_RichCompare returns. binary_op(op=op, arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = PyObject_RichCompare({args[0]}, {args[1]}, %s);' % opid), priority=0) for op, funcname in [('+', 'PyNumber_Add'), ('-', 'PyNumber_Subtract'), ('*', 'PyNumber_Multiply'), ('//', 'PyNumber_FloorDivide'), ('/', 'PyNumber_TrueDivide'), ('%', 'PyNumber_Remainder'), ('<<', 'PyNumber_Lshift'), ('>>', 'PyNumber_Rshift'), ('&', 'PyNumber_And'), ('^', 'PyNumber_Xor'), ('|', 'PyNumber_Or')]: binary_op(op=op, arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit(funcname), priority=0) for op, funcname in [('+=', 'PyNumber_InPlaceAdd'), ('-=', 'PyNumber_InPlaceSubtract'), ('*=', 'PyNumber_InPlaceMultiply'), ('@=', 'PyNumber_InPlaceMatrixMultiply'), ('//=', 'PyNumber_InPlaceFloorDivide'), ('/=', 'PyNumber_InPlaceTrueDivide'), ('%=', 'PyNumber_InPlaceRemainder'), ('<<=', 'PyNumber_InPlaceLshift'), ('>>=', 'PyNumber_InPlaceRshift'), ('&=', 'PyNumber_InPlaceAnd'), ('^=', 'PyNumber_InPlaceXor'), ('|=', 'PyNumber_InPlaceOr')]: binary_op(op=op, arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = %s({args[0]}, {args[1]});' % funcname), priority=0) binary_op(op='**', arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = PyNumber_Power({args[0]}, {args[1]}, Py_None);'), priority=0) binary_op('in', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_MAGIC, emit=negative_int_emit('{dest} = PySequence_Contains({args[1]}, {args[0]});'), priority=0) binary_op('is', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = {args[0]} == {args[1]};'), priority=0) binary_op('is not', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = {args[0]} != {args[1]};'), priority=0) for op, funcname in [('-', 'PyNumber_Negative'), ('+', 'PyNumber_Positive'), ('~', 'PyNumber_Invert')]: unary_op(op=op, arg_type=object_rprimitive, result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit(funcname), priority=0) unary_op(op='not', arg_type=object_rprimitive, result_type=bool_rprimitive, error_kind=ERR_MAGIC, format_str='{dest} = not {args[0]}', emit=call_negative_magic_emit('PyObject_Not'), priority=0) unary_op(op='not', arg_type=bool_rprimitive, result_type=bool_rprimitive, error_kind=ERR_NEVER, format_str='{dest} = !{args[0]}', emit=simple_emit('{dest} = !{args[0]};'), priority=1) method_op('__getitem__', arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PyObject_GetItem'), priority=0) method_op('__setitem__', arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('PyObject_SetItem'), priority=0) method_op('__delitem__', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('PyObject_DelItem'), priority=0) func_op( name='builtins.hash', arg_types=[object_rprimitive], result_type=int_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyObject_Hash')) py_getattr_op = func_op( name='builtins.getattr', arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PyObject_GetAttr') ) func_op( name='builtins.getattr', arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPyObject_GetAttr3') ) py_setattr_op = func_op( name='builtins.setattr', arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('PyObject_SetAttr') ) py_hasattr_op = func_op( name='builtins.hasattr', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_NEVER, emit=call_emit('PyObject_HasAttr') ) py_calc_meta_op = custom_op( arg_types=[object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, format_str='{dest} = py_calc_metaclass({comma_args})', emit=simple_emit( '{dest} = (PyObject*) _PyType_CalculateMetaclass((PyTypeObject *){args[0]}, {args[1]});'), is_borrowed=True ) py_delattr_op = func_op( name='builtins.delattr', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('PyObject_DelAttr') ) py_call_op = custom_op( arg_types=[object_rprimitive], result_type=object_rprimitive, is_var_arg=True, error_kind=ERR_MAGIC, format_str='{dest} = py_call({comma_args})', emit=simple_emit('{dest} = PyObject_CallFunctionObjArgs({comma_args}, NULL);')) py_call_with_kwargs_op = custom_op( arg_types=[object_rprimitive], result_type=object_rprimitive, is_var_arg=True, error_kind=ERR_MAGIC, format_str='{dest} = py_call_with_kwargs({args[0]}, {args[1]}, {args[2]})', emit=call_emit('PyObject_Call')) py_method_call_op = custom_op( arg_types=[object_rprimitive], result_type=object_rprimitive, is_var_arg=True, error_kind=ERR_MAGIC, format_str='{dest} = py_method_call({comma_args})', emit=simple_emit('{dest} = PyObject_CallMethodObjArgs({comma_args}, NULL);')) import_op = custom_op( name='import', arg_types=[str_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PyImport_Import')) get_module_dict_op = custom_op( name='get_module_dict', arg_types=[], result_type=dict_rprimitive, error_kind=ERR_NEVER, emit=call_emit('PyImport_GetModuleDict'), is_borrowed=True) func_op('builtins.isinstance', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_MAGIC, emit=call_negative_magic_emit('PyObject_IsInstance')) # Faster isinstance() that only works with native classes and doesn't perform type checking # of the type argument. fast_isinstance_op = func_op( 'builtins.isinstance', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = PyObject_TypeCheck({args[0]}, (PyTypeObject *){args[1]});'), priority=0) type_is_op = custom_op( name='type_is', arg_types=[object_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = Py_TYPE({args[0]}) == (PyTypeObject *){args[1]};')) bool_op = func_op( 'builtins.bool', arg_types=[object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_MAGIC, emit=call_negative_magic_emit('PyObject_IsTrue')) new_slice_op = func_op( 'builtins.slice', arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PySlice_New')) type_op = func_op( 'builtins.type', arg_types=[object_rprimitive], result_type=object_rprimitive, error_kind=ERR_NEVER, emit=call_emit('PyObject_Type')) type_object_op = name_ref_op( 'builtins.type', result_type=object_rprimitive, error_kind=ERR_NEVER, emit=name_emit('(PyObject*) &PyType_Type'), is_borrowed=True) func_op(name='builtins.len', arg_types=[object_rprimitive], result_type=int_rprimitive, error_kind=ERR_NEVER, emit=call_emit('CPyObject_Size'), priority=0) pytype_from_template_op = custom_op( arg_types=[object_rprimitive, object_rprimitive, str_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, format_str='{dest} = pytype_from_template({comma_args})', emit=simple_emit( '{dest} = CPyType_FromTemplate((PyTypeObject *){args[0]}, {args[1]}, {args[2]});')) # Create a dataclass from an extension class. See # CPyDataclass_SleightOfHand for more docs. dataclass_sleight_of_hand = custom_op( arg_types=[object_rprimitive, object_rprimitive, dict_rprimitive, dict_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, format_str='{dest} = dataclass_sleight_of_hand({comma_args})', emit=call_emit('CPyDataclass_SleightOfHand')) mypy-0.761/mypyc/ops_primitive.py0000644€tŠÔÚ€2›s®0000002042113576752246023323 0ustar jukkaDROPBOX\Domain Users00000000000000"""Primitive types and utilities for defining primitive ops. Most of the ops can be automatically generated by matching against AST nodes and types. For example, a func_op is automatically generated when a specific function is called with the specific positional argument count and argument types. """ from typing import Dict, List, Optional from mypyc.ops import ( OpDescription, RType, EmitterInterface, EmitCallback, StealsDescription, short_name, bool_rprimitive ) # Primitive binary ops (key is operator such as '+') binary_ops = {} # type: Dict[str, List[OpDescription]] # Primitive unary ops (key is operator such as '-') unary_ops = {} # type: Dict[str, List[OpDescription]] # Primitive ops for built-in functions (key is function name such as 'builtins.len') func_ops = {} # type: Dict[str, List[OpDescription]] # Primitive ops for built-in methods (key is method name such as 'builtins.list.append') method_ops = {} # type: Dict[str, List[OpDescription]] # Primitive ops for reading module attributes (key is name such as 'builtins.None') name_ref_ops = {} # type: Dict[str, OpDescription] def simple_emit(template: str) -> EmitCallback: """Construct a simple PrimitiveOp emit callback function. It just applies a str.format template to 'args', 'dest', 'comma_args', 'num_args', 'pre_comma_args'. For more complex cases you need to define a custom function. """ def emit(emitter: EmitterInterface, args: List[str], dest: str) -> None: comma_args = ', '.join(args) pre_comma_args = ', ' + comma_args if comma_args else '' emitter.emit_line(template.format( args=args, dest=dest, comma_args=comma_args, pre_comma_args=pre_comma_args, num_args=len(args))) return emit def name_emit(name: str) -> EmitCallback: return simple_emit('{dest} = %s;' % name) def call_emit(func: str) -> EmitCallback: return simple_emit('{dest} = %s({comma_args});' % func) def call_negative_bool_emit(func: str) -> EmitCallback: return simple_emit('{dest} = %s({comma_args}) >= 0;' % func) def negative_int_emit(template: str) -> EmitCallback: """Construct a simple PrimitiveOp emit callback function that checks for -1 return.""" def emit(emitter: EmitterInterface, args: List[str], dest: str) -> None: temp = emitter.temp_name() emitter.emit_line(template.format(args=args, dest='int %s' % temp, comma_args=', '.join(args))) emitter.emit_lines('if (%s < 0)' % temp, ' %s = %s;' % (dest, emitter.c_error_value(bool_rprimitive)), 'else', ' %s = %s;' % (dest, temp)) return emit def call_negative_magic_emit(func: str) -> EmitCallback: return negative_int_emit('{dest} = %s({comma_args});' % func) def binary_op(op: str, arg_types: List[RType], result_type: RType, error_kind: int, emit: EmitCallback, format_str: Optional[str] = None, steals: StealsDescription = False, is_borrowed: bool = False, priority: int = 1) -> None: assert len(arg_types) == 2 ops = binary_ops.setdefault(op, []) if format_str is None: format_str = '{dest} = {args[0]} %s {args[1]}' % op desc = OpDescription(op, arg_types, result_type, False, error_kind, format_str, emit, steals, is_borrowed, priority) ops.append(desc) def unary_op(op: str, arg_type: RType, result_type: RType, error_kind: int, emit: EmitCallback, format_str: Optional[str] = None, steals: StealsDescription = False, is_borrowed: bool = False, priority: int = 1) -> OpDescription: ops = unary_ops.setdefault(op, []) if format_str is None: format_str = '{dest} = %s{args[0]}' % op desc = OpDescription(op, [arg_type], result_type, False, error_kind, format_str, emit, steals, is_borrowed, priority) ops.append(desc) return desc def func_op(name: str, arg_types: List[RType], result_type: RType, error_kind: int, emit: EmitCallback, format_str: Optional[str] = None, steals: StealsDescription = False, is_borrowed: bool = False, priority: int = 1) -> OpDescription: ops = func_ops.setdefault(name, []) typename = '' if len(arg_types) == 1: typename = ' :: %s' % short_name(arg_types[0].name) if format_str is None: format_str = '{dest} = %s %s%s' % (short_name(name), ', '.join('{args[%d]}' % i for i in range(len(arg_types))), typename) desc = OpDescription(name, arg_types, result_type, False, error_kind, format_str, emit, steals, is_borrowed, priority) ops.append(desc) return desc def method_op(name: str, arg_types: List[RType], result_type: Optional[RType], error_kind: int, emit: EmitCallback, steals: StealsDescription = False, is_borrowed: bool = False, priority: int = 1) -> OpDescription: """Define a primitive op that replaces a method call. Args: name: short name of the method (for example, 'append') arg_types: argument typess; the receiver is always the first argument result_type: type of the result, None if void """ ops = method_ops.setdefault(name, []) assert len(arg_types) > 0 args = ', '.join('{args[%d]}' % i for i in range(1, len(arg_types))) type_name = short_name(arg_types[0].name) if name == '__getitem__': format_str = '{dest} = {args[0]}[{args[1]}] :: %s' % type_name else: format_str = '{dest} = {args[0]}.%s(%s) :: %s' % (name, args, type_name) desc = OpDescription(name, arg_types, result_type, False, error_kind, format_str, emit, steals, is_borrowed, priority) ops.append(desc) return desc def name_ref_op(name: str, result_type: RType, error_kind: int, emit: EmitCallback, is_borrowed: bool = False) -> OpDescription: """Define an op that is used to implement reading a module attribute. Args: name: fully-qualified name (e.g. 'builtins.None') """ assert name not in name_ref_ops, 'already defined: %s' % name format_str = '{dest} = %s' % short_name(name) desc = OpDescription(name, [], result_type, False, error_kind, format_str, emit, False, is_borrowed, 0) name_ref_ops[name] = desc return desc def custom_op(arg_types: List[RType], result_type: RType, error_kind: int, emit: EmitCallback, name: Optional[str] = None, format_str: Optional[str] = None, steals: StealsDescription = False, is_borrowed: bool = False, is_var_arg: bool = False) -> OpDescription: """ Create a one-off op that can't be automatically generated from the AST. Note that if the format_str argument is not provided, then a format_str is generated using the name argument. The name argument only needs to be provided if the format_str argument is not provided. """ if name is not None and format_str is None: typename = '' if len(arg_types) == 1: typename = ' :: %s' % short_name(arg_types[0].name) format_str = '{dest} = %s %s%s' % (short_name(name), ', '.join('{args[%d]}' % i for i in range(len(arg_types))), typename) assert format_str is not None return OpDescription('', arg_types, result_type, is_var_arg, error_kind, format_str, emit, steals, is_borrowed, 0) # Import various modules that set up global state. import mypyc.ops_int # noqa import mypyc.ops_str # noqa import mypyc.ops_list # noqa import mypyc.ops_dict # noqa import mypyc.ops_tuple # noqa import mypyc.ops_misc # noqa mypy-0.761/mypyc/ops_set.py0000644€tŠÔÚ€2›s®0000000531213576752246022110 0ustar jukkaDROPBOX\Domain Users00000000000000"""Primitive set ops.""" from mypyc.ops_primitive import ( func_op, method_op, binary_op, simple_emit, negative_int_emit, call_emit, call_negative_bool_emit, ) from mypyc.ops import ( object_rprimitive, bool_rprimitive, set_rprimitive, int_rprimitive, ERR_MAGIC, ERR_FALSE, ERR_NEVER, EmitterInterface ) from typing import List new_set_op = func_op( name='builtins.set', arg_types=[], result_type=set_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = PySet_New(NULL);') ) func_op( name='builtins.set', arg_types=[object_rprimitive], result_type=set_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PySet_New') ) func_op( name='builtins.frozenset', arg_types=[object_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PyFrozenSet_New') ) def emit_len(emitter: EmitterInterface, args: List[str], dest: str) -> None: temp = emitter.temp_name() emitter.emit_declaration('Py_ssize_t %s;' % temp) emitter.emit_line('%s = PySet_GET_SIZE(%s);' % (temp, args[0])) emitter.emit_line('%s = CPyTagged_ShortFromSsize_t(%s);' % (dest, temp)) func_op( name='builtins.len', arg_types=[set_rprimitive], result_type=int_rprimitive, error_kind=ERR_NEVER, emit=emit_len, ) binary_op( op='in', arg_types=[object_rprimitive, set_rprimitive], result_type=bool_rprimitive, error_kind=ERR_MAGIC, format_str='{dest} = {args[0]} in {args[1]} :: set', emit=negative_int_emit('{dest} = PySet_Contains({args[1]}, {args[0]});') ) method_op( name='remove', arg_types=[set_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_emit('CPySet_Remove') ) method_op( name='discard', arg_types=[set_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('PySet_Discard') ) set_add_op = method_op( name='add', arg_types=[set_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('PySet_Add') ) # This is not a public API but looks like it should be fine. set_update_op = method_op( name='update', arg_types=[set_rprimitive, object_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('_PySet_Update') ) method_op( name='clear', arg_types=[set_rprimitive], result_type=bool_rprimitive, error_kind=ERR_FALSE, emit=call_negative_bool_emit('PySet_Clear') ) method_op( name='pop', arg_types=[set_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PySet_Pop') ) mypy-0.761/mypyc/ops_str.py0000644€tŠÔÚ€2›s®0000000454313576752246022132 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Callable from mypyc.ops import ( object_rprimitive, str_rprimitive, bool_rprimitive, ERR_MAGIC, ERR_NEVER, EmitterInterface ) from mypyc.ops_primitive import func_op, binary_op, simple_emit, name_ref_op, method_op name_ref_op('builtins.str', result_type=object_rprimitive, error_kind=ERR_NEVER, emit=simple_emit('{dest} = (PyObject *)&PyUnicode_Type;'), is_borrowed=True) func_op(name='builtins.str', arg_types=[object_rprimitive], result_type=str_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = PyObject_Str({args[0]});')) binary_op(op='+', arg_types=[str_rprimitive, str_rprimitive], result_type=str_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = PyUnicode_Concat({args[0]}, {args[1]});')) method_op( name='join', arg_types=[str_rprimitive, object_rprimitive], result_type=str_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = PyUnicode_Join({args[0]}, {args[1]});')) # PyUnicodeAppend makes an effort to reuse the LHS when the refcount # is 1. This is super dodgy but oh well, the interpreter does it. binary_op(op='+=', arg_types=[str_rprimitive, str_rprimitive], steals=[True, False], result_type=str_rprimitive, error_kind=ERR_MAGIC, emit=simple_emit('{dest} = {args[0]}; PyUnicode_Append(&{dest}, {args[1]});')) def emit_str_compare(comparison: str) -> Callable[[EmitterInterface, List[str], str], None]: def emit(emitter: EmitterInterface, args: List[str], dest: str) -> None: temp = emitter.temp_name() emitter.emit_declaration('int %s;' % temp) emitter.emit_lines( '%s = PyUnicode_Compare(%s, %s);' % (temp, args[0], args[1]), 'if (%s == -1 && PyErr_Occurred())' % temp, ' %s = 2;' % dest, 'else', ' %s = (%s %s);' % (dest, temp, comparison)) return emit binary_op(op='==', arg_types=[str_rprimitive, str_rprimitive], result_type=bool_rprimitive, error_kind=ERR_MAGIC, emit=emit_str_compare('== 0')) binary_op(op='!=', arg_types=[str_rprimitive, str_rprimitive], result_type=bool_rprimitive, error_kind=ERR_MAGIC, emit=emit_str_compare('!= 0')) mypy-0.761/mypyc/ops_tuple.py0000644€tŠÔÚ€2›s®0000000331613576752246022450 0ustar jukkaDROPBOX\Domain Users00000000000000"""Primitive tuple ops. These are for varying-length tuples represented as Python tuple objects (RPrimitive, not RTuple). """ from typing import List from mypyc.ops import ( EmitterInterface, tuple_rprimitive, int_rprimitive, list_rprimitive, object_rprimitive, ERR_NEVER, ERR_MAGIC ) from mypyc.ops_primitive import ( func_op, method_op, custom_op, call_emit, simple_emit, ) tuple_get_item_op = method_op( name='__getitem__', arg_types=[tuple_rprimitive, int_rprimitive], result_type=object_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('CPySequenceTuple_GetItem')) new_tuple_op = custom_op( arg_types=[object_rprimitive], result_type=tuple_rprimitive, is_var_arg=True, error_kind=ERR_MAGIC, steals=False, format_str='{dest} = ({comma_args}) :: tuple', emit=simple_emit('{dest} = PyTuple_Pack({num_args}{pre_comma_args});')) def emit_len(emitter: EmitterInterface, args: List[str], dest: str) -> None: temp = emitter.temp_name() emitter.emit_declaration('Py_ssize_t %s;' % temp) emitter.emit_line('%s = PyTuple_GET_SIZE(%s);' % (temp, args[0])) emitter.emit_line('%s = CPyTagged_ShortFromSsize_t(%s);' % (dest, temp)) tuple_len_op = func_op( name='builtins.len', arg_types=[tuple_rprimitive], result_type=int_rprimitive, error_kind=ERR_NEVER, emit=emit_len) list_tuple_op = func_op( name='builtins.tuple', arg_types=[list_rprimitive], result_type=tuple_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PyList_AsTuple'), priority=2) func_op( name='builtins.tuple', arg_types=[object_rprimitive], result_type=tuple_rprimitive, error_kind=ERR_MAGIC, emit=call_emit('PySequence_Tuple')) mypy-0.761/mypyc/options.py0000644€tŠÔÚ€2›s®0000000126213576752246022127 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional class CompilerOptions: def __init__(self, strip_asserts: bool = False, multi_file: bool = False, verbose: bool = False, separate: bool = False, target_dir: Optional[str] = None, include_runtime_files: Optional[bool] = None) -> None: self.strip_asserts = strip_asserts self.multi_file = multi_file self.verbose = verbose self.separate = separate self.global_opts = not separate self.target_dir = target_dir or 'build' self.include_runtime_files = ( include_runtime_files if include_runtime_files is not None else not multi_file ) mypy-0.761/mypyc/prebuildvisitor.py0000644€tŠÔÚ€2›s®0000001332013576752246023660 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, List, Set from mypy.nodes import ( Decorator, Expression, FuncDef, FuncItem, LambdaExpr, NameExpr, SymbolNode, Var, MemberExpr ) from mypy.traverser import TraverserVisitor class PreBuildVisitor(TraverserVisitor): """ Class used to visit a mypy file before building the IR for that program. This is done as a first pass so that nested functions, encapsulating functions, lambda functions, decorated functions, and free variables can be determined before instantiating the IRBuilder. """ def __init__(self) -> None: super().__init__() # Mapping from FuncItem instances to sets of variables. The FuncItem instances are where # these variables were first declared, and these variables are free in any functions that # are nested within the FuncItem from which they are mapped. self.free_variables = {} # type: Dict[FuncItem, Set[SymbolNode]] # Intermediate data structure used to map SymbolNode instances to the FuncDef in which they # were first visited. self.symbols_to_funcs = {} # type: Dict[SymbolNode, FuncItem] # Stack representing the function call stack. self.funcs = [] # type: List[FuncItem] # The set of property setters self.prop_setters = set() # type: Set[FuncDef] # A map from any function that contains nested functions to # a set of all the functions that are nested within it. self.encapsulating_funcs = {} # type: Dict[FuncItem, List[FuncItem]] # A map from a nested func to it's parent/encapsulating func. self.nested_funcs = {} # type: Dict[FuncItem, FuncItem] self.funcs_to_decorators = {} # type: Dict[FuncDef, List[Expression]] def add_free_variable(self, symbol: SymbolNode) -> None: # Get the FuncItem instance where the free symbol was first declared, and map that FuncItem # to the SymbolNode representing the free symbol. func = self.symbols_to_funcs[symbol] self.free_variables.setdefault(func, set()).add(symbol) def visit_decorator(self, dec: Decorator) -> None: if dec.decorators: # Only add the function being decorated if there exist decorators in the decorator # list. Note that meaningful decorators (@property, @abstractmethod) are removed from # this list by mypy, but functions decorated by those decorators (in addition to # property setters) do not need to be added to the set of decorated functions for # the IRBuilder, because they are handled in a special way. if isinstance(dec.decorators[0], MemberExpr) and dec.decorators[0].name == 'setter': self.prop_setters.add(dec.func) else: self.funcs_to_decorators[dec.func] = dec.decorators super().visit_decorator(dec) def visit_func(self, func: FuncItem) -> None: # If there were already functions or lambda expressions defined in the function stack, then # note the previous FuncItem as containing a nested function and the current FuncItem as # being a nested function. if self.funcs: # Add the new func to the set of nested funcs within the func at top of the func stack. self.encapsulating_funcs.setdefault(self.funcs[-1], []).append(func) # Add the func at top of the func stack as the parent of new func. self.nested_funcs[func] = self.funcs[-1] self.funcs.append(func) super().visit_func(func) self.funcs.pop() def visit_func_def(self, fdef: FuncItem) -> None: self.visit_func(fdef) def visit_lambda_expr(self, expr: LambdaExpr) -> None: self.visit_func(expr) def visit_name_expr(self, expr: NameExpr) -> None: if isinstance(expr.node, (Var, FuncDef)): self.visit_symbol_node(expr.node) # Check if child is contained within fdef (possibly indirectly within # multiple nested functions). def is_parent(self, fitem: FuncItem, child: FuncItem) -> bool: if child in self.nested_funcs: parent = self.nested_funcs[child] if parent == fitem: return True return self.is_parent(fitem, parent) return False def visit_symbol_node(self, symbol: SymbolNode) -> None: if not self.funcs: # If the list of FuncDefs is empty, then we are not inside of a function and hence do # not need to do anything regarding free variables. return if symbol in self.symbols_to_funcs: orig_func = self.symbols_to_funcs[symbol] if self.is_parent(self.funcs[-1], orig_func): # If the function in which the symbol was originally seen is nested # within the function currently being visited, fix the free_variable # and symbol_to_funcs dictionaries. self.symbols_to_funcs[symbol] = self.funcs[-1] self.free_variables.setdefault(self.funcs[-1], set()).add(symbol) elif self.is_parent(orig_func, self.funcs[-1]): # If the SymbolNode instance has already been visited before, # and it was declared in a FuncDef not nested within the current # FuncDef being visited, then it is a free symbol because it is # being visited again. self.add_free_variable(symbol) else: # Otherwise, this is the first time the SymbolNode is being visited. We map the # SymbolNode to the current FuncDef being visited to note where it was first visited. self.symbols_to_funcs[symbol] = self.funcs[-1] def visit_var(self, var: Var) -> None: self.visit_symbol_node(var) mypy-0.761/mypyc/refcount.py0000644€tŠÔÚ€2›s®0000002362613576752246022271 0ustar jukkaDROPBOX\Domain Users00000000000000"""Transformation for inserting refrecence count inc/dec opcodes. This transformation happens towards the end of compilation. Before this transformation, reference count management is not explicitly handled at all. By postponing this pass, the previous passes are simpler as they don't have to update reference count opcodes. The approach is to decrement reference counts soon after a value is no longer live, to quickly free memory (and call __del__ methods), though there are no strict guarantees -- other than that local variables are freed before return from a function. Function arguments are a little special. They are initially considered 'borrowed' from the caller and their reference counts don't need to be decremented before returning. An assignment to a borrowed value turns it into a regular, owned reference that needs to freed before return. """ from typing import Dict, Iterable, List, Set, Tuple from mypyc.analysis import ( get_cfg, analyze_must_defined_regs, analyze_live_regs, analyze_borrowed_arguments, cleanup_cfg, AnalysisDict ) from mypyc.ops import ( FuncIR, BasicBlock, Assign, RegisterOp, DecRef, IncRef, Branch, Goto, Environment, Op, ControlOp, Value, Register ) DecIncs = Tuple[Tuple[Tuple[Value, bool], ...], Tuple[Value, ...]] # A of basic blocks that decrement and increment specific values and # then jump to some target block. This lets us cut down on how much # code we generate in some circumstances. BlockCache = Dict[Tuple[BasicBlock, DecIncs], BasicBlock] def insert_ref_count_opcodes(ir: FuncIR) -> None: """Insert reference count inc/dec opcodes to a function. This is the entry point to this module. """ cfg = get_cfg(ir.blocks) borrowed = set(reg for reg in ir.env.regs() if reg.is_borrowed) args = set(reg for reg in ir.env.regs() if ir.env.indexes[reg] < len(ir.args)) regs = [reg for reg in ir.env.regs() if isinstance(reg, Register)] live = analyze_live_regs(ir.blocks, cfg) borrow = analyze_borrowed_arguments(ir.blocks, cfg, borrowed) defined = analyze_must_defined_regs(ir.blocks, cfg, args, regs) cache = {} # type: BlockCache for block in ir.blocks[:]: if isinstance(block.ops[-1], (Branch, Goto)): insert_branch_inc_and_decrefs(block, cache, ir.blocks, live.before, borrow.before, borrow.after, defined.after, ir.env) transform_block(block, live.before, live.after, borrow.before, defined.after, ir.env) # Find all the xdecs we inserted and note the registers down as # needing to be initialized. for block in ir.blocks: for op in block.ops: if isinstance(op, DecRef) and op.is_xdec: ir.env.vars_needing_init.add(op.src) cleanup_cfg(ir.blocks) def is_maybe_undefined(post_must_defined: Set[Value], src: Value) -> bool: return isinstance(src, Register) and src not in post_must_defined def maybe_append_dec_ref(ops: List[Op], dest: Value, defined: 'AnalysisDict[Value]', key: Tuple[BasicBlock, int]) -> None: if dest.type.is_refcounted: ops.append(DecRef(dest, is_xdec=is_maybe_undefined(defined[key], dest))) def maybe_append_inc_ref(ops: List[Op], dest: Value) -> None: if dest.type.is_refcounted: ops.append(IncRef(dest)) def transform_block(block: BasicBlock, pre_live: 'AnalysisDict[Value]', post_live: 'AnalysisDict[Value]', pre_borrow: 'AnalysisDict[Value]', post_must_defined: 'AnalysisDict[Value]', env: Environment) -> None: old_ops = block.ops ops = [] # type: List[Op] for i, op in enumerate(old_ops): key = (block, i) assert op not in pre_live[key] dest = op.dest if isinstance(op, Assign) else op stolen = op.stolen() # Incref any references that are being stolen that stay live, were borrowed, # or are stolen more than once by this operation. for i, src in enumerate(stolen): if src in post_live[key] or src in pre_borrow[key] or src in stolen[:i]: maybe_append_inc_ref(ops, src) # For assignments to registers that were already live, # decref the old value. if (dest not in pre_borrow[key] and dest in pre_live[key]): assert isinstance(op, Assign) maybe_append_dec_ref(ops, dest, post_must_defined, key) ops.append(op) # Control ops don't have any space to insert ops after them, so # their inc/decrefs get inserted by insert_branch_inc_and_decrefs. if isinstance(op, ControlOp): continue for src in op.unique_sources(): # Decrement source that won't be live afterwards. if src not in post_live[key] and src not in pre_borrow[key] and src not in stolen: maybe_append_dec_ref(ops, src, post_must_defined, key) # Decrement the destination if it is dead after the op and # wasn't a borrowed RegisterOp if (not dest.is_void and dest not in post_live[key] and not (isinstance(op, RegisterOp) and dest.is_borrowed)): maybe_append_dec_ref(ops, dest, post_must_defined, key) block.ops = ops def insert_branch_inc_and_decrefs( block: BasicBlock, cache: BlockCache, blocks: List[BasicBlock], pre_live: 'AnalysisDict[Value]', pre_borrow: 'AnalysisDict[Value]', post_borrow: 'AnalysisDict[Value]', post_must_defined: 'AnalysisDict[Value]', env: Environment) -> None: """Insert inc_refs and/or dec_refs after a branch/goto. Add dec_refs for registers that become dead after a branch. Add inc_refs for registers that become unborrowed after a branch or goto. Branches are special as the true and false targets may have a different live and borrowed register sets. Add new blocks before the true/false target blocks that tweak reference counts. Example where we need to add an inc_ref: def f(a: int) -> None if a: a = 1 return a # a is borrowed if condition is false and unborrowed if true """ prev_key = (block, len(block.ops) - 1) source_live_regs = pre_live[prev_key] source_borrowed = post_borrow[prev_key] source_defined = post_must_defined[prev_key] if isinstance(block.ops[-1], Branch): branch = block.ops[-1] # HAX: After we've checked against an error value the value we must not touch the # refcount since it will be a null pointer. The correct way to do this would be # to perform data flow analysis on whether a value can be null (or is always # null). if branch.op == Branch.IS_ERROR: omitted = {branch.left} else: omitted = set() true_decincs = ( after_branch_decrefs( branch.true, pre_live, source_defined, source_borrowed, source_live_regs, env, omitted), after_branch_increfs( branch.true, pre_live, pre_borrow, source_borrowed, env)) branch.true = add_block(true_decincs, cache, blocks, branch.true) false_decincs = ( after_branch_decrefs( branch.false, pre_live, source_defined, source_borrowed, source_live_regs, env), after_branch_increfs( branch.false, pre_live, pre_borrow, source_borrowed, env)) branch.false = add_block(false_decincs, cache, blocks, branch.false) elif isinstance(block.ops[-1], Goto): goto = block.ops[-1] new_decincs = ((), after_branch_increfs( goto.label, pre_live, pre_borrow, source_borrowed, env)) goto.label = add_block(new_decincs, cache, blocks, goto.label) def after_branch_decrefs(label: BasicBlock, pre_live: 'AnalysisDict[Value]', source_defined: Set[Value], source_borrowed: Set[Value], source_live_regs: Set[Value], env: Environment, omitted: Iterable[Value] = ()) -> Tuple[Tuple[Value, bool], ...]: target_pre_live = pre_live[label, 0] decref = source_live_regs - target_pre_live - source_borrowed if decref: return tuple((reg, is_maybe_undefined(source_defined, reg)) for reg in sorted(decref, key=lambda r: env.indexes[r]) if reg.type.is_refcounted and reg not in omitted) return () def after_branch_increfs(label: BasicBlock, pre_live: 'AnalysisDict[Value]', pre_borrow: 'AnalysisDict[Value]', source_borrowed: Set[Value], env: Environment) -> Tuple[Value, ...]: target_pre_live = pre_live[label, 0] target_borrowed = pre_borrow[label, 0] incref = (source_borrowed - target_borrowed) & target_pre_live if incref: return tuple(reg for reg in sorted(incref, key=lambda r: env.indexes[r]) if reg.type.is_refcounted) return () def add_block(decincs: DecIncs, cache: BlockCache, blocks: List[BasicBlock], label: BasicBlock) -> BasicBlock: decs, incs = decincs if not decs and not incs: return label # TODO: be able to share *partial* results if (label, decincs) in cache: return cache[label, decincs] block = BasicBlock() blocks.append(block) block.ops.extend(DecRef(reg, is_xdec=xdec) for reg, xdec in decs) block.ops.extend(IncRef(reg) for reg in incs) block.ops.append(Goto(label)) cache[label, decincs] = block return block mypy-0.761/mypyc/rt_subtype.py0000644€tŠÔÚ€2›s®0000000363613576752246022643 0ustar jukkaDROPBOX\Domain Users00000000000000"""'Runtime subtype' check for RTypes. A type S is a runtime subtype of T if a value of type S can be used at runtime when a value of type T is expected without requiring any runtime conversions. For boxed types, runtime subtyping is the same as regular subtyping. Unboxed subtypes, on the other hand, are not runtime subtypes of object (since they require boxing to be used as an object), but short ints are runtime subtypes of int. Subtyping is used to determine whether an object can be in a particular place and runtime subtyping is used to determine whether a coercion is necessary first. """ from mypyc.ops import ( RType, RUnion, RInstance, RPrimitive, RTuple, RVoid, RTypeVisitor, is_int_rprimitive, is_short_int_rprimitive, ) from mypyc.subtype import is_subtype def is_runtime_subtype(left: RType, right: RType) -> bool: return left.accept(RTSubtypeVisitor(right)) class RTSubtypeVisitor(RTypeVisitor[bool]): """Is left a runtime subtype of right? A few special cases such as right being 'object' are handled in is_runtime_subtype and don't need to be covered here. """ def __init__(self, right: RType) -> None: self.right = right def visit_rinstance(self, left: RInstance) -> bool: return is_subtype(left, self.right) def visit_runion(self, left: RUnion) -> bool: return is_subtype(left, self.right) def visit_rprimitive(self, left: RPrimitive) -> bool: if is_short_int_rprimitive(left) and is_int_rprimitive(self.right): return True return left is self.right def visit_rtuple(self, left: RTuple) -> bool: if isinstance(self.right, RTuple): return len(self.right.types) == len(left.types) and all( is_runtime_subtype(t1, t2) for t1, t2 in zip(left.types, self.right.types)) return False def visit_rvoid(self, left: RVoid) -> bool: return isinstance(self.right, RVoid) mypy-0.761/mypyc/sametype.py0000644€tŠÔÚ€2›s®0000000370613576752246022270 0ustar jukkaDROPBOX\Domain Users00000000000000"""Same type check for RTypes.""" from mypyc.ops import ( RType, RTypeVisitor, RInstance, RPrimitive, RTuple, RVoid, FuncSignature, RUnion ) def is_same_type(a: RType, b: RType) -> bool: return a.accept(SameTypeVisitor(b)) def is_same_signature(a: FuncSignature, b: FuncSignature) -> bool: return (len(a.args) == len(b.args) and is_same_type(a.ret_type, b.ret_type) and all(is_same_type(t1.type, t2.type) and t1.name == t2.name for t1, t2 in zip(a.args, b.args))) def is_same_method_signature(a: FuncSignature, b: FuncSignature) -> bool: return (len(a.args) == len(b.args) and is_same_type(a.ret_type, b.ret_type) and all(is_same_type(t1.type, t2.type) and t1.name == t2.name for t1, t2 in zip(a.args[1:], b.args[1:]))) class SameTypeVisitor(RTypeVisitor[bool]): def __init__(self, right: RType) -> None: self.right = right def visit_rinstance(self, left: RInstance) -> bool: return isinstance(self.right, RInstance) and left.name == self.right.name def visit_runion(self, left: RUnion) -> bool: if isinstance(self.right, RUnion): items = list(self.right.items) for left_item in left.items: for j, right_item in enumerate(items): if is_same_type(left_item, right_item): del items[j] break else: return False return not items return False def visit_rprimitive(self, left: RPrimitive) -> bool: return left is self.right def visit_rtuple(self, left: RTuple) -> bool: return (isinstance(self.right, RTuple) and len(self.right.types) == len(left.types) and all(is_same_type(t1, t2) for t1, t2 in zip(left.types, self.right.types))) def visit_rvoid(self, left: RVoid) -> bool: return isinstance(self.right, RVoid) mypy-0.761/mypyc/subtype.py0000644€tŠÔÚ€2›s®0000000405513576752246022132 0ustar jukkaDROPBOX\Domain Users00000000000000"""Subtype check for RTypes.""" from mypyc.ops import ( RType, RInstance, RPrimitive, RTuple, RVoid, RTypeVisitor, RUnion, is_bool_rprimitive, is_int_rprimitive, is_tuple_rprimitive, is_short_int_rprimitive, is_object_rprimitive ) def is_subtype(left: RType, right: RType) -> bool: if is_object_rprimitive(right): return True elif isinstance(right, RUnion): if isinstance(left, RUnion): for left_item in left.items: if not any(is_subtype(left_item, right_item) for right_item in right.items): return False return True else: return any(is_subtype(left, item) for item in right.items) return left.accept(SubtypeVisitor(right)) class SubtypeVisitor(RTypeVisitor[bool]): """Is left a subtype of right? A few special cases such as right being 'object' are handled in is_subtype and don't need to be covered here. """ def __init__(self, right: RType) -> None: self.right = right def visit_rinstance(self, left: RInstance) -> bool: return isinstance(self.right, RInstance) and self.right.class_ir in left.class_ir.mro def visit_runion(self, left: RUnion) -> bool: return all(is_subtype(item, self.right) for item in left.items) def visit_rprimitive(self, left: RPrimitive) -> bool: if is_bool_rprimitive(left) and is_int_rprimitive(self.right): return True if is_short_int_rprimitive(left) and is_int_rprimitive(self.right): return True return left is self.right def visit_rtuple(self, left: RTuple) -> bool: if is_tuple_rprimitive(self.right): return True if isinstance(self.right, RTuple): return len(self.right.types) == len(left.types) and all( is_subtype(t1, t2) for t1, t2 in zip(left.types, self.right.types)) return False def visit_rvoid(self, left: RVoid) -> bool: return isinstance(self.right, RVoid) mypy-0.761/mypyc/test/0000755€tŠÔÚ€2›s®0000000000013576752267021043 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypyc/test/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246023137 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/mypyc/test/config.py0000644€tŠÔÚ€2›s®0000000040613576752246022657 0ustar jukkaDROPBOX\Domain Users00000000000000import os this_file_dir = os.path.dirname(os.path.realpath(__file__)) prefix = os.path.dirname(os.path.dirname(this_file_dir)) # Locations of test data files such as test case descriptions (.test). test_data_prefix = os.path.join(prefix, 'mypyc', 'test-data') mypy-0.761/mypyc/test/test_analysis.py0000644€tŠÔÚ€2›s®0000000613113576752246024275 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test runner for data-flow analysis test cases.""" import os.path from mypy.test.data import DataDrivenTestCase from mypy.test.config import test_temp_dir from mypy.errors import CompileError from mypyc.common import TOP_LEVEL_NAME from mypyc import analysis from mypyc import exceptions from mypyc.ops import format_func from mypyc.test.testutil import ( ICODE_GEN_BUILTINS, use_custom_builtins, MypycDataSuite, build_ir_for_single_file, assert_test_output, ) files = [ 'analysis.test' ] class TestAnalysis(MypycDataSuite): files = files base_path = test_temp_dir optional_out = True def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a data-flow analysis test case.""" with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): try: ir = build_ir_for_single_file(testcase.input) except CompileError as e: actual = e.messages else: actual = [] for fn in ir: if (fn.name == TOP_LEVEL_NAME and not testcase.name.endswith('_toplevel')): continue exceptions.insert_exception_handling(fn) actual.extend(format_func(fn)) cfg = analysis.get_cfg(fn.blocks) args = set(reg for reg, i in fn.env.indexes.items() if i < len(fn.args)) name = testcase.name if name.endswith('_MaybeDefined'): # Forward, maybe analysis_result = analysis.analyze_maybe_defined_regs(fn.blocks, cfg, args) elif name.endswith('_Liveness'): # Backward, maybe analysis_result = analysis.analyze_live_regs(fn.blocks, cfg) elif name.endswith('_MustDefined'): # Forward, must analysis_result = analysis.analyze_must_defined_regs( fn.blocks, cfg, args, regs=fn.env.regs()) elif name.endswith('_BorrowedArgument'): # Forward, must analysis_result = analysis.analyze_borrowed_arguments(fn.blocks, cfg, args) else: assert False, 'No recognized _AnalysisName suffix in test case' for key in sorted(analysis_result.before.keys(), key=lambda x: (x[0].label, x[1])): pre = ', '.join(sorted(reg.name for reg in analysis_result.before[key])) post = ', '.join(sorted(reg.name for reg in analysis_result.after[key])) actual.append('%-8s %-23s %s' % ((key[0].label, key[1]), '{%s}' % pre, '{%s}' % post)) assert_test_output(testcase, actual, 'Invalid source code output') mypy-0.761/mypyc/test/test_commandline.py0000644€tŠÔÚ€2›s®0000000463113576752246024743 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for invoking mypyc on the command line. These are slow -- do not add test cases unless you have a very good reason to do so. """ import glob import os import os.path import re import subprocess import sys from mypy.test.data import DataDrivenTestCase from mypy.test.config import test_temp_dir from mypy.test.helpers import normalize_error_messages from mypyc.test.testutil import MypycDataSuite, assert_test_output files = [ 'commandline.test', ] base_path = os.path.join(os.path.dirname(__file__), '..', '..') python3_path = sys.executable class TestCommandLine(MypycDataSuite): files = files base_path = test_temp_dir optional_out = True def run_case(self, testcase: DataDrivenTestCase) -> None: # Parse options from test case description (arguments must not have spaces) text = '\n'.join(testcase.input) m = re.search(r'# *cmd: *(.*)', text) assert m is not None, 'Test case missing "# cmd: " section' args = m.group(1).split() # Write main program to run (not compiled) program = '_%s.py' % testcase.name program_path = os.path.join(test_temp_dir, program) with open(program_path, 'w') as f: f.write(text) out = b'' try: # Compile program cmd = subprocess.run([sys.executable, os.path.join(base_path, 'scripts', 'mypyc')] + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd='tmp') if 'ErrorOutput' in testcase.name or cmd.returncode != 0: out += cmd.stdout if cmd.returncode == 0: # Run main program out += subprocess.check_output( [python3_path, program], cwd='tmp') finally: suffix = 'pyd' if sys.platform == 'win32' else 'so' so_paths = glob.glob('tmp/**/*.{}'.format(suffix), recursive=True) for path in so_paths: os.remove(path) # Strip out 'tmp/' from error message paths in the testcase output, # due to a mismatch between this test and mypy's test suite. expected = [x.replace('tmp/', '') for x in testcase.output] # Verify output actual = normalize_error_messages(out.decode().splitlines()) assert_test_output(testcase, actual, 'Invalid output', expected=expected) mypy-0.761/mypyc/test/test_emit.py0000644€tŠÔÚ€2›s®0000000204313576752246023406 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from mypy.nodes import Var from mypyc.emit import Emitter, EmitterContext from mypyc.ops import BasicBlock, Environment, int_rprimitive from mypyc.namegen import NameGenerator class TestEmitter(unittest.TestCase): def setUp(self) -> None: self.env = Environment() self.n = self.env.add_local(Var('n'), int_rprimitive) self.context = EmitterContext(NameGenerator([['mod']])) self.emitter = Emitter(self.context, self.env) def test_label(self) -> None: assert self.emitter.label(BasicBlock(4)) == 'CPyL4' def test_reg(self) -> None: assert self.emitter.reg(self.n) == 'cpy_r_n' def test_emit_line(self) -> None: self.emitter.emit_line('line;') self.emitter.emit_line('a {') self.emitter.emit_line('f();') self.emitter.emit_line('}') assert self.emitter.fragments == ['line;\n', 'a {\n', ' f();\n', '}\n'] mypy-0.761/mypyc/test/test_emitfunc.py0000644€tŠÔÚ€2›s®0000003100513576752246024262 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from collections import OrderedDict from mypy.nodes import Var from mypy.test.helpers import assert_string_arrays_equal from mypyc.ops import ( Environment, BasicBlock, FuncIR, RuntimeArg, Goto, Return, LoadInt, Assign, IncRef, DecRef, Branch, Call, Unbox, Box, RTuple, TupleGet, GetAttr, PrimitiveOp, RegisterOp, FuncDecl, ClassIR, RInstance, SetAttr, Op, Value, int_rprimitive, bool_rprimitive, list_rprimitive, dict_rprimitive, object_rprimitive, FuncSignature, ) from mypyc.genops import compute_vtable from mypyc.emit import Emitter, EmitterContext from mypyc.emitfunc import generate_native_function, FunctionEmitterVisitor from mypyc.ops_primitive import binary_ops from mypyc.ops_misc import none_object_op, true_op, false_op from mypyc.ops_list import ( list_len_op, list_get_item_op, list_set_item_op, new_list_op, list_append_op ) from mypyc.ops_dict import new_dict_op, dict_update_op, dict_get_item_op, dict_set_item_op from mypyc.ops_int import int_neg_op from mypyc.subtype import is_subtype from mypyc.namegen import NameGenerator class TestFunctionEmitterVisitor(unittest.TestCase): def setUp(self) -> None: self.env = Environment() self.n = self.env.add_local(Var('n'), int_rprimitive) self.m = self.env.add_local(Var('m'), int_rprimitive) self.k = self.env.add_local(Var('k'), int_rprimitive) self.l = self.env.add_local(Var('l'), list_rprimitive) # noqa self.ll = self.env.add_local(Var('ll'), list_rprimitive) self.o = self.env.add_local(Var('o'), object_rprimitive) self.o2 = self.env.add_local(Var('o2'), object_rprimitive) self.d = self.env.add_local(Var('d'), dict_rprimitive) self.b = self.env.add_local(Var('b'), bool_rprimitive) self.t = self.env.add_local(Var('t'), RTuple([int_rprimitive, bool_rprimitive])) self.tt = self.env.add_local( Var('tt'), RTuple([RTuple([int_rprimitive, bool_rprimitive]), bool_rprimitive])) ir = ClassIR('A', 'mod') ir.attributes = OrderedDict([('x', bool_rprimitive), ('y', int_rprimitive)]) compute_vtable(ir) ir.mro = [ir] self.r = self.env.add_local(Var('r'), RInstance(ir)) self.context = EmitterContext(NameGenerator([['mod']])) self.emitter = Emitter(self.context, self.env) self.declarations = Emitter(self.context, self.env) self.visitor = FunctionEmitterVisitor(self.emitter, self.declarations, 'prog.py', 'prog') def test_goto(self) -> None: self.assert_emit(Goto(BasicBlock(2)), "goto CPyL2;") def test_return(self) -> None: self.assert_emit(Return(self.m), "return cpy_r_m;") def test_load_int(self) -> None: self.assert_emit(LoadInt(5), "cpy_r_r0 = 10;") def test_tuple_get(self) -> None: self.assert_emit(TupleGet(self.t, 1, 0), 'cpy_r_r0 = cpy_r_t.f1;') def test_load_None(self) -> None: self.assert_emit(PrimitiveOp([], none_object_op, 0), "cpy_r_r0 = Py_None;") def test_load_True(self) -> None: self.assert_emit(PrimitiveOp([], true_op, 0), "cpy_r_r0 = 1;") def test_load_False(self) -> None: self.assert_emit(PrimitiveOp([], false_op, 0), "cpy_r_r0 = 0;") def test_assign_int(self) -> None: self.assert_emit(Assign(self.m, self.n), "cpy_r_m = cpy_r_n;") def test_int_add(self) -> None: self.assert_emit_binary_op( '+', self.n, self.m, self.k, "cpy_r_r0 = CPyTagged_Add(cpy_r_m, cpy_r_k);") def test_int_sub(self) -> None: self.assert_emit_binary_op( '-', self.n, self.m, self.k, "cpy_r_r0 = CPyTagged_Subtract(cpy_r_m, cpy_r_k);") def test_list_repeat(self) -> None: self.assert_emit_binary_op( '*', self.ll, self.l, self.n, """Py_ssize_t __tmp1; __tmp1 = CPyTagged_AsSsize_t(cpy_r_n); if (__tmp1 == -1 && PyErr_Occurred()) CPyError_OutOfMemory(); cpy_r_r0 = PySequence_Repeat(cpy_r_l, __tmp1); """) def test_int_neg(self) -> None: self.assert_emit(PrimitiveOp([self.m], int_neg_op, 55), "cpy_r_r0 = CPyTagged_Negate(cpy_r_m);") def test_list_len(self) -> None: self.assert_emit(PrimitiveOp([self.l], list_len_op, 55), """Py_ssize_t __tmp1; __tmp1 = PyList_GET_SIZE(cpy_r_l); cpy_r_r0 = CPyTagged_ShortFromSsize_t(__tmp1); """) def test_branch(self) -> None: self.assert_emit(Branch(self.b, BasicBlock(8), BasicBlock(9), Branch.BOOL_EXPR), """if (cpy_r_b) { goto CPyL8; } else goto CPyL9; """) b = Branch(self.b, BasicBlock(8), BasicBlock(9), Branch.BOOL_EXPR) b.negated = True self.assert_emit(b, """if (!cpy_r_b) { goto CPyL8; } else goto CPyL9; """) def test_call(self) -> None: decl = FuncDecl('myfn', None, 'mod', FuncSignature([RuntimeArg('m', int_rprimitive)], int_rprimitive)) self.assert_emit(Call(decl, [self.m], 55), "cpy_r_r0 = CPyDef_myfn(cpy_r_m);") def test_call_two_args(self) -> None: decl = FuncDecl('myfn', None, 'mod', FuncSignature([RuntimeArg('m', int_rprimitive), RuntimeArg('n', int_rprimitive)], int_rprimitive)) self.assert_emit(Call(decl, [self.m, self.k], 55), "cpy_r_r0 = CPyDef_myfn(cpy_r_m, cpy_r_k);") def test_inc_ref(self) -> None: self.assert_emit(IncRef(self.m), "CPyTagged_IncRef(cpy_r_m);") def test_dec_ref(self) -> None: self.assert_emit(DecRef(self.m), "CPyTagged_DecRef(cpy_r_m);") def test_dec_ref_tuple(self) -> None: self.assert_emit(DecRef(self.t), 'CPyTagged_DecRef(cpy_r_t.f0);') def test_dec_ref_tuple_nested(self) -> None: self.assert_emit(DecRef(self.tt), 'CPyTagged_DecRef(cpy_r_tt.f0.f0);') def test_list_get_item(self) -> None: self.assert_emit(PrimitiveOp([self.m, self.k], list_get_item_op, 55), """cpy_r_r0 = CPyList_GetItem(cpy_r_m, cpy_r_k);""") def test_list_set_item(self) -> None: self.assert_emit(PrimitiveOp([self.l, self.n, self.o], list_set_item_op, 55), """cpy_r_r0 = CPyList_SetItem(cpy_r_l, cpy_r_n, cpy_r_o);""") def test_box(self) -> None: self.assert_emit(Box(self.n), """cpy_r_r0 = CPyTagged_StealAsObject(cpy_r_n);""") def test_unbox(self) -> None: self.assert_emit(Unbox(self.m, int_rprimitive, 55), """if (likely(PyLong_Check(cpy_r_m))) cpy_r_r0 = CPyTagged_FromObject(cpy_r_m); else { CPy_TypeError("int", cpy_r_m); cpy_r_r0 = CPY_INT_TAG; } """) def test_new_list(self) -> None: self.assert_emit(PrimitiveOp([self.n, self.m], new_list_op, 55), """cpy_r_r0 = PyList_New(2); if (likely(cpy_r_r0 != NULL)) { PyList_SET_ITEM(cpy_r_r0, 0, cpy_r_n); PyList_SET_ITEM(cpy_r_r0, 1, cpy_r_m); } """) def test_list_append(self) -> None: self.assert_emit(PrimitiveOp([self.l, self.o], list_append_op, 1), """cpy_r_r0 = PyList_Append(cpy_r_l, cpy_r_o) >= 0;""") def test_get_attr(self) -> None: self.assert_emit( GetAttr(self.r, 'y', 1), """cpy_r_r0 = native_A_gety((mod___AObject *)cpy_r_r); /* y */""") def test_set_attr(self) -> None: self.assert_emit( SetAttr(self.r, 'y', self.m, 1), "cpy_r_r0 = native_A_sety((mod___AObject *)cpy_r_r, cpy_r_m); /* y */") def test_dict_get_item(self) -> None: self.assert_emit(PrimitiveOp([self.d, self.o2], dict_get_item_op, 1), """cpy_r_r0 = CPyDict_GetItem(cpy_r_d, cpy_r_o2);""") def test_dict_set_item(self) -> None: self.assert_emit(PrimitiveOp([self.d, self.o, self.o2], dict_set_item_op, 1), """cpy_r_r0 = CPyDict_SetItem(cpy_r_d, cpy_r_o, cpy_r_o2) >= 0;""") def test_dict_update(self) -> None: self.assert_emit(PrimitiveOp([self.d, self.o], dict_update_op, 1), """cpy_r_r0 = CPyDict_Update(cpy_r_d, cpy_r_o) >= 0;""") def test_new_dict(self) -> None: self.assert_emit(PrimitiveOp([], new_dict_op, 1), """cpy_r_r0 = PyDict_New();""") def test_dict_contains(self) -> None: self.assert_emit_binary_op( 'in', self.b, self.o, self.d, """int __tmp1 = PyDict_Contains(cpy_r_d, cpy_r_o); if (__tmp1 < 0) cpy_r_r0 = 2; else cpy_r_r0 = __tmp1; """) def assert_emit(self, op: Op, expected: str) -> None: self.emitter.fragments = [] self.declarations.fragments = [] self.env.temp_index = 0 if isinstance(op, RegisterOp): self.env.add_op(op) op.accept(self.visitor) frags = self.declarations.fragments + self.emitter.fragments actual_lines = [line.strip(' ') for line in frags] assert all(line.endswith('\n') for line in actual_lines) actual_lines = [line.rstrip('\n') for line in actual_lines] expected_lines = expected.rstrip().split('\n') expected_lines = [line.strip(' ') for line in expected_lines] assert_string_arrays_equal(expected_lines, actual_lines, msg='Generated code unexpected') def assert_emit_binary_op(self, op: str, dest: Value, left: Value, right: Value, expected: str) -> None: ops = binary_ops[op] for desc in ops: if (is_subtype(left.type, desc.arg_types[0]) and is_subtype(right.type, desc.arg_types[1])): self.assert_emit(PrimitiveOp([left, right], desc, 55), expected) break else: assert False, 'Could not find matching op' class TestGenerateFunction(unittest.TestCase): def setUp(self) -> None: self.var = Var('arg') self.arg = RuntimeArg('arg', int_rprimitive) self.env = Environment() self.reg = self.env.add_local(self.var, int_rprimitive) self.block = BasicBlock(0) def test_simple(self) -> None: self.block.ops.append(Return(self.reg)) fn = FuncIR(FuncDecl('myfunc', None, 'mod', FuncSignature([self.arg], int_rprimitive)), [self.block], self.env) emitter = Emitter(EmitterContext(NameGenerator([['mod']]))) generate_native_function(fn, emitter, 'prog.py', 'prog') result = emitter.fragments assert_string_arrays_equal( [ 'CPyTagged CPyDef_myfunc(CPyTagged cpy_r_arg) {\n', 'CPyL0: ;\n', ' return cpy_r_arg;\n', '}\n', ], result, msg='Generated code invalid') def test_register(self) -> None: self.env.temp_index = 0 op = LoadInt(5) self.block.ops.append(op) self.env.add_op(op) fn = FuncIR(FuncDecl('myfunc', None, 'mod', FuncSignature([self.arg], list_rprimitive)), [self.block], self.env) emitter = Emitter(EmitterContext(NameGenerator([['mod']]))) generate_native_function(fn, emitter, 'prog.py', 'prog') result = emitter.fragments assert_string_arrays_equal( [ 'PyObject *CPyDef_myfunc(CPyTagged cpy_r_arg) {\n', ' CPyTagged cpy_r_r0;\n', 'CPyL0: ;\n', ' cpy_r_r0 = 10;\n', '}\n', ], result, msg='Generated code invalid') mypy-0.761/mypyc/test/test_emitwrapper.py0000644€tŠÔÚ€2›s®0000000401313576752246025006 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from typing import List from mypy.test.helpers import assert_string_arrays_equal from mypyc.emit import Emitter, EmitterContext from mypyc.emitwrapper import generate_arg_check from mypyc.ops import list_rprimitive, int_rprimitive from mypyc.namegen import NameGenerator class TestArgCheck(unittest.TestCase): def setUp(self) -> None: self.context = EmitterContext(NameGenerator([['mod']])) def test_check_list(self) -> None: emitter = Emitter(self.context) generate_arg_check('x', list_rprimitive, emitter, 'return NULL;') lines = emitter.fragments self.assert_lines([ 'PyObject *arg_x;', 'if (likely(PyList_Check(obj_x)))', ' arg_x = obj_x;', 'else {', ' CPy_TypeError("list", obj_x);', ' arg_x = NULL;', '}', 'if (arg_x == NULL) return NULL;', ], lines) def test_check_int(self) -> None: emitter = Emitter(self.context) generate_arg_check('x', int_rprimitive, emitter, 'return NULL;') generate_arg_check('y', int_rprimitive, emitter, 'return NULL;', True) lines = emitter.fragments self.assert_lines([ 'CPyTagged arg_x;', 'if (likely(PyLong_Check(obj_x)))', ' arg_x = CPyTagged_BorrowFromObject(obj_x);', 'else {', ' CPy_TypeError("int", obj_x);', ' return NULL;', '}', 'CPyTagged arg_y;', 'if (obj_y == NULL) {', ' arg_y = CPY_INT_TAG;', '} else if (likely(PyLong_Check(obj_y)))', ' arg_y = CPyTagged_BorrowFromObject(obj_y);', 'else {', ' CPy_TypeError("int", obj_y);', ' return NULL;', '}', ], lines) def assert_lines(self, expected: List[str], actual: List[str]) -> None: actual = [line.rstrip('\n') for line in actual] assert_string_arrays_equal(expected, actual, 'Invalid output') mypy-0.761/mypyc/test/test_exceptions.py0000644€tŠÔÚ€2›s®0000000341513576752246024635 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test runner for exception handling transform test cases. The transform inserts exception handling branch operations to IR. """ import os.path from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase from mypy.errors import CompileError from mypyc.common import TOP_LEVEL_NAME from mypyc.ops import format_func from mypyc.uninit import insert_uninit_checks from mypyc.exceptions import insert_exception_handling from mypyc.refcount import insert_ref_count_opcodes from mypyc.test.testutil import ( ICODE_GEN_BUILTINS, use_custom_builtins, MypycDataSuite, build_ir_for_single_file, assert_test_output, remove_comment_lines ) files = [ 'exceptions.test' ] class TestExceptionTransform(MypycDataSuite): files = files base_path = test_temp_dir def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a runtime checking transformation test case.""" with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): expected_output = remove_comment_lines(testcase.output) try: ir = build_ir_for_single_file(testcase.input) except CompileError as e: actual = e.messages else: actual = [] for fn in ir: if (fn.name == TOP_LEVEL_NAME and not testcase.name.endswith('_toplevel')): continue insert_uninit_checks(fn) insert_exception_handling(fn) insert_ref_count_opcodes(fn) actual.extend(format_func(fn)) assert_test_output(testcase, actual, 'Invalid source code output', expected_output) mypy-0.761/mypyc/test/test_external.py0000644€tŠÔÚ€2›s®0000000354513576752246024302 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases that run tests as subprocesses.""" from typing import List import os import subprocess import sys import unittest base_dir = os.path.join(os.path.dirname(__file__), '..', '..') class TestExternal(unittest.TestCase): # TODO: Get this to work on Windows. # (Or don't. It is probably not a good use of time.) @unittest.skipIf(sys.platform.startswith("win"), "rt tests don't work on windows") def test_c_unit_test(self) -> None: """Run C unit tests in a subprocess.""" # Build Google Test, the C++ framework we use for testing C code. # The source code for Google Test is copied to this repository. cppflags = [] # type: List[str] env = os.environ.copy() if sys.platform == 'darwin': cppflags += ['-mmacosx-version-min=10.10', '-stdlib=libc++'] env['CPPFLAGS'] = ' '.join(cppflags) subprocess.check_call( ['make', 'libgtest.a'], env=env, cwd=os.path.join(base_dir, 'mypyc', 'external', 'googletest', 'make')) # Build Python wrapper for C unit tests. env = os.environ.copy() env['CPPFLAGS'] = ' '.join(cppflags) status = subprocess.check_call( [sys.executable, 'setup.py', 'build_ext', '--inplace'], env=env, cwd=os.path.join(base_dir, 'mypyc', 'lib-rt')) # Run C unit tests. env = os.environ.copy() if 'GTEST_COLOR' not in os.environ: env['GTEST_COLOR'] = 'yes' # Use fancy colors status = subprocess.call([sys.executable, '-c', 'import sys, test_capi; sys.exit(test_capi.run_tests())'], env=env, cwd=os.path.join(base_dir, 'mypyc', 'lib-rt')) if status != 0: raise AssertionError("make test: C unit test failure") mypy-0.761/mypyc/test/test_genops.py0000644€tŠÔÚ€2›s®0000000362413576752246023751 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for IR generation.""" import os.path from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase from mypy.errors import CompileError from mypyc.common import TOP_LEVEL_NAME from mypyc.ops import format_func from mypyc.test.testutil import ( ICODE_GEN_BUILTINS, use_custom_builtins, MypycDataSuite, build_ir_for_single_file, assert_test_output, remove_comment_lines ) from mypyc.options import CompilerOptions files = [ 'genops-basic.test', 'genops-lists.test', 'genops-dict.test', 'genops-statements.test', 'genops-nested.test', 'genops-classes.test', 'genops-optional.test', 'genops-tuple.test', 'genops-any.test', 'genops-generics.test', 'genops-try.test', 'genops-set.test', 'genops-strip-asserts.test', ] class TestGenOps(MypycDataSuite): files = files base_path = test_temp_dir optional_out = True def run_case(self, testcase: DataDrivenTestCase) -> None: # Kind of hacky. Not sure if we need more structure here. options = CompilerOptions(strip_asserts='StripAssert' in testcase.name) """Perform a runtime checking transformation test case.""" with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): expected_output = remove_comment_lines(testcase.output) try: ir = build_ir_for_single_file(testcase.input, options) except CompileError as e: actual = e.messages else: actual = [] for fn in ir: if (fn.name == TOP_LEVEL_NAME and not testcase.name.endswith('_toplevel')): continue actual.extend(format_func(fn)) assert_test_output(testcase, actual, 'Invalid source code output', expected_output) mypy-0.761/mypyc/test/test_namegen.py0000644€tŠÔÚ€2›s®0000000352113576752246024064 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from mypyc.namegen import ( NameGenerator, exported_name, candidate_suffixes, make_module_translation_map ) class TestNameGen(unittest.TestCase): def test_candidate_suffixes(self) -> None: assert candidate_suffixes('foo') == ['', 'foo.'] assert candidate_suffixes('foo.bar') == ['', 'bar.', 'foo.bar.'] def test_exported_name(self) -> None: assert exported_name('foo') == 'foo' assert exported_name('foo.bar') == 'foo___bar' def test_make_module_translation_map(self) -> None: assert make_module_translation_map( ['foo', 'bar']) == {'foo': 'foo.', 'bar': 'bar.'} assert make_module_translation_map( ['foo.bar', 'foo.baz']) == {'foo.bar': 'bar.', 'foo.baz': 'baz.'} assert make_module_translation_map( ['zar', 'foo.bar', 'foo.baz']) == {'foo.bar': 'bar.', 'foo.baz': 'baz.', 'zar': 'zar.'} assert make_module_translation_map( ['foo.bar', 'fu.bar', 'foo.baz']) == {'foo.bar': 'foo.bar.', 'fu.bar': 'fu.bar.', 'foo.baz': 'baz.'} def test_name_generator(self) -> None: g = NameGenerator([['foo', 'foo.zar']]) assert g.private_name('foo', 'f') == 'foo___f' assert g.private_name('foo', 'C.x.y') == 'foo___C___x___y' assert g.private_name('foo', 'C.x.y') == 'foo___C___x___y' assert g.private_name('foo.zar', 'C.x.y') == 'zar___C___x___y' assert g.private_name('foo', 'C.x_y') == 'foo___C___x_y' assert g.private_name('foo', 'C_x_y') == 'foo___C_x_y' assert g.private_name('foo', 'C_x_y') == 'foo___C_x_y' assert g.private_name('foo', '___') == 'foo______3_' mypy-0.761/mypyc/test/test_refcount.py0000644€tŠÔÚ€2›s®0000000317513576752246024304 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test runner for reference count opcode insertion transform test cases. The transform inserts needed reference count increment/decrement operations to IR. """ import os.path from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase from mypy.errors import CompileError from mypyc.common import TOP_LEVEL_NAME from mypyc.ops import format_func from mypyc.refcount import insert_ref_count_opcodes from mypyc.test.testutil import ( ICODE_GEN_BUILTINS, use_custom_builtins, MypycDataSuite, build_ir_for_single_file, assert_test_output, remove_comment_lines ) files = [ 'refcount.test' ] class TestRefCountTransform(MypycDataSuite): files = files base_path = test_temp_dir optional_out = True def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a runtime checking transformation test case.""" with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): expected_output = remove_comment_lines(testcase.output) try: ir = build_ir_for_single_file(testcase.input) except CompileError as e: actual = e.messages else: actual = [] for fn in ir: if (fn.name == TOP_LEVEL_NAME and not testcase.name.endswith('_toplevel')): continue insert_ref_count_opcodes(fn) actual.extend(format_func(fn)) assert_test_output(testcase, actual, 'Invalid source code output', expected_output) mypy-0.761/mypyc/test/test_run.py0000644€tŠÔÚ€2›s®0000002730613576752246023265 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for building an C extension and running it.""" import ast import glob import os.path import platform import re import subprocess import contextlib import shutil import sys from typing import Any, Iterator, List, cast from mypy import build from mypy.test.data import DataDrivenTestCase, UpdateFile from mypy.test.config import test_temp_dir from mypy.errors import CompileError from mypy.options import Options from mypy.test.helpers import copy_and_fudge_mtime, assert_module_equivalence from mypyc import emitmodule from mypyc.options import CompilerOptions from mypyc.errors import Errors from mypyc.build import construct_groups from mypyc.test.testutil import ( ICODE_GEN_BUILTINS, TESTUTIL_PATH, use_custom_builtins, MypycDataSuite, assert_test_output, show_c, fudge_dir_mtimes, ) from mypyc.test.test_serialization import check_serialization_roundtrip files = [ 'run-functions.test', 'run.test', 'run-classes.test', 'run-traits.test', 'run-multimodule.test', 'run-bench.test', 'run-mypy-sim.test', ] setup_format = """\ from setuptools import setup from mypyc.build import mypycify setup(name='test_run_output', ext_modules=mypycify({}, separate={}, skip_cgen_input={!r}, strip_asserts=False, multi_file={}), ) """ WORKDIR = 'build' def run_setup(script_name: str, script_args: List[str]) -> bool: """Run a setup script in a somewhat controlled environment. This is adapted from code in distutils and our goal here is that is faster to not need to spin up a python interpreter to run it. We had to fork it because the real run_setup swallows errors and KeyboardInterrupt with no way to recover them (!). The real version has some extra features that we removed since we weren't using them. Returns whether the setup succeeded. """ save_argv = sys.argv.copy() g = {'__file__': script_name} try: try: sys.argv[0] = script_name sys.argv[1:] = script_args with open(script_name, 'rb') as f: exec(f.read(), g) finally: sys.argv = save_argv except SystemExit as e: # typeshed reports code as being an int but that is wrong code = cast(Any, e).code # distutils converts KeyboardInterrupt into a SystemExit with # "interrupted" as the argument. Convert it back so that # pytest will exit instead of just failing the test. if code == "interrupted": raise KeyboardInterrupt return code == 0 or code is None return True @contextlib.contextmanager def chdir_manager(target: str) -> Iterator[None]: dir = os.getcwd() os.chdir(target) try: yield finally: os.chdir(dir) class TestRun(MypycDataSuite): """Test cases that build a C extension and run code.""" files = files base_path = test_temp_dir optional_out = True multi_file = False separate = False def run_case(self, testcase: DataDrivenTestCase) -> None: # setup.py wants to be run from the root directory of the package, which we accommodate # by chdiring into tmp/ with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase), ( chdir_manager('tmp')): self.run_case_inner(testcase) def run_case_inner(self, testcase: DataDrivenTestCase) -> None: os.mkdir(WORKDIR) text = '\n'.join(testcase.input) with open('native.py', 'w', encoding='utf-8') as f: f.write(text) with open('interpreted.py', 'w', encoding='utf-8') as f: f.write(text) shutil.copyfile(TESTUTIL_PATH, 'testutil.py') step = 1 self.run_case_step(testcase, step) steps = testcase.find_steps() if steps == [[]]: steps = [] for operations in steps: # To make sure that any new changes get picked up as being # new by distutils, shift the mtime of all of the # generated artifacts back by a second. fudge_dir_mtimes(WORKDIR, -1) step += 1 with chdir_manager('..'): for op in operations: if isinstance(op, UpdateFile): # Modify/create file copy_and_fudge_mtime(op.source_path, op.target_path) else: # Delete file try: os.remove(op.path) except FileNotFoundError: pass self.run_case_step(testcase, step) def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> None: bench = testcase.config.getoption('--bench', False) and 'Benchmark' in testcase.name options = Options() options.use_builtins_fixtures = True options.show_traceback = True options.strict_optional = True # N.B: We try to (and ought to!) run with the current # version of python, since we are going to link and run # against the current version of python. # But a lot of the tests use type annotations so we can't say it is 3.5. options.python_version = max(sys.version_info[:2], (3, 6)) options.export_types = True options.preserve_asts = True options.incremental = self.separate # Avoid checking modules/packages named 'unchecked', to provide a way # to test interacting with code we don't have types for. options.per_module_options['unchecked.*'] = {'follow_imports': 'error'} source = build.BuildSource('native.py', 'native', None) sources = [source] module_names = ['native'] module_paths = ['native.py'] # Hard code another module name to compile in the same compilation unit. to_delete = [] for fn, text in testcase.files: fn = os.path.relpath(fn, test_temp_dir) if os.path.basename(fn).startswith('other') and fn.endswith('.py'): name = fn.split('.')[0].replace(os.sep, '.') module_names.append(name) sources.append(build.BuildSource(fn, name, None)) to_delete.append(fn) module_paths.append(fn) shutil.copyfile(fn, os.path.join(os.path.dirname(fn), name + '_interpreted.py')) for source in sources: options.per_module_options.setdefault(source.module, {})['mypyc'] = True separate = (self.get_separate('\n'.join(testcase.input), incremental_step) if self.separate else False) groups = construct_groups(sources, separate, len(module_names) > 1) try: compiler_options = CompilerOptions(multi_file=self.multi_file, separate=self.separate) result = emitmodule.parse_and_typecheck( sources=sources, options=options, compiler_options=compiler_options, groups=groups, alt_lib_path='.') errors = Errors() ir, cfiles = emitmodule.compile_modules_to_c( result, compiler_options=compiler_options, errors=errors, groups=groups, ) if errors.num_errors: errors.flush_errors() assert False, "Compile error" except CompileError as e: for line in e.messages: print(line) assert False, 'Compile error' # Check that serialization works on this IR. (Only on the first # step because the the returned ir only includes updated code.) if incremental_step == 1: check_serialization_roundtrip(ir) setup_file = os.path.abspath(os.path.join(WORKDIR, 'setup.py')) # We pass the C file information to the build script via setup.py unfortunately with open(setup_file, 'w', encoding='utf-8') as f: f.write(setup_format.format(module_paths, separate, cfiles, self.multi_file)) if not run_setup(setup_file, ['build_ext', '--inplace']): if testcase.config.getoption('--mypyc-showc'): show_c(cfiles) assert False, "Compilation failed" # Assert that an output file got created suffix = 'pyd' if sys.platform == 'win32' else 'so' assert glob.glob('native.*.{}'.format(suffix)) driver_path = 'driver.py' env = os.environ.copy() env['MYPYC_RUN_BENCH'] = '1' if bench else '0' # XXX: This is an ugly hack. if 'MYPYC_RUN_GDB' in os.environ: if platform.system() == 'Darwin': subprocess.check_call(['lldb', '--', sys.executable, driver_path], env=env) assert False, ("Test can't pass in lldb mode. (And remember to pass -s to " "pytest)") elif platform.system() == 'Linux': subprocess.check_call(['gdb', '--args', sys.executable, driver_path], env=env) assert False, ("Test can't pass in gdb mode. (And remember to pass -s to " "pytest)") else: assert False, 'Unsupported OS' proc = subprocess.Popen([sys.executable, driver_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) output = proc.communicate()[0].decode('utf8') outlines = output.splitlines() if testcase.config.getoption('--mypyc-showc'): show_c(cfiles) if proc.returncode != 0: print() print('*** Exit status: %d' % proc.returncode) # Verify output. if bench: print('Test output:') print(output) else: if incremental_step == 1: msg = 'Invalid output' expected = testcase.output else: msg = 'Invalid output (step {})'.format(incremental_step) expected = testcase.output2.get(incremental_step, []) assert_test_output(testcase, outlines, msg, expected) if incremental_step > 1 and options.incremental: suffix = '' if incremental_step == 2 else str(incremental_step - 1) expected_rechecked = testcase.expected_rechecked_modules.get(incremental_step - 1) if expected_rechecked is not None: assert_module_equivalence( 'rechecked' + suffix, expected_rechecked, result.manager.rechecked_modules) expected_stale = testcase.expected_stale_modules.get(incremental_step - 1) if expected_stale is not None: assert_module_equivalence( 'stale' + suffix, expected_stale, result.manager.stale_modules) assert proc.returncode == 0 def get_separate(self, program_text: str, incremental_step: int) -> Any: template = r'# separate{}: (\[.*\])$' m = re.search(template.format(incremental_step), program_text, flags=re.MULTILINE) if not m: m = re.search(template.format(''), program_text, flags=re.MULTILINE) if m: return ast.literal_eval(m.group(1)) else: return True # Run the main multi-module tests in multi-file compliation mode class TestRunMultiFile(TestRun): multi_file = True test_name_suffix = '_multi' files = [ 'run-multimodule.test', 'run-mypy-sim.test', ] # Run the main multi-module tests in separate compliation mode class TestRunSeparate(TestRun): separate = True test_name_suffix = '_separate' files = [ 'run-multimodule.test', 'run-mypy-sim.test', ] mypy-0.761/mypyc/test/test_serialization.py0000644€tŠÔÚ€2›s®0000000740313576752246025332 0ustar jukkaDROPBOX\Domain Users00000000000000"""Functions to check that serialization round-tripped properly.""" # This file is named test_serialization.py even though it doesn't # contain its own tests so that pytest will rewrite the asserts... from typing import Any, Dict, Tuple from collections import OrderedDict from collections.abc import Iterable from mypyc.ops import ( deserialize_modules, DeserMaps, ModuleIR, FuncDecl, FuncIR, ClassIR, FuncSignature, RType ) from mypyc.sametype import is_same_type, is_same_signature def get_dict(x: Any) -> Dict[str, Any]: if hasattr(x, '__mypyc_attrs__'): return {k: getattr(x, k) for k in x.__mypyc_attrs__ if hasattr(x, k)} else: return dict(x.__dict__) def get_function_dict(x: FuncIR) -> Dict[str, Any]: """Get a dict of function attributes safe to compare across serialization""" d = get_dict(x) d.pop('blocks', None) d.pop('env', None) return d def assert_blobs_same(x: Any, y: Any, trail: Tuple[Any, ...]) -> None: """Compare two blobs of IR as best we can. FuncDecls, FuncIRs, and ClassIRs are compared by fullname to avoid infinite recursion. (More detailed comparisions should be done manually.) Types and signatures are compared using mypyc.sametype. Containers are compared recursively. Anything else is compared with ==. The `trail` argument is used in error messages. """ assert type(x) is type(y), ("Type mismatch at {}".format(trail), type(x), type(y)) if isinstance(x, (FuncDecl, FuncIR, ClassIR)): assert x.fullname == y.fullname, "Name mismatch at {}".format(trail) elif isinstance(x, OrderedDict): assert len(x.keys()) == len(y.keys()), "Keys mismatch at {}".format(trail) for (xk, xv), (yk, yv) in zip(x.items(), y.items()): assert_blobs_same(xk, yk, trail + ("keys",)) assert_blobs_same(xv, yv, trail + (xk,)) elif isinstance(x, dict): assert x.keys() == y.keys(), "Keys mismatch at {}".format(trail) for k in x.keys(): assert_blobs_same(x[k], y[k], trail + (k,)) elif isinstance(x, Iterable) and not isinstance(x, str): for i, (xv, yv) in enumerate(zip(x, y)): assert_blobs_same(xv, yv, trail + (i,)) elif isinstance(x, RType): assert is_same_type(x, y), "RType mismatch at {}".format(trail) elif isinstance(x, FuncSignature): assert is_same_signature(x, y), "Signature mismatch at {}".format(trail) else: assert x == y, "Value mismatch at {}".format(trail) def assert_modules_same(ir1: ModuleIR, ir2: ModuleIR) -> None: """Assert that two module IRs are the same (*). * Or rather, as much as we care about preserving across serialization. We drop the actual IR bodies of functions but try to preserve everything else. """ assert ir1.fullname == ir2.fullname assert ir1.imports == ir2.imports for cls1, cls2 in zip(ir1.classes, ir2.classes): assert_blobs_same(get_dict(cls1), get_dict(cls2), (ir1.fullname, cls1.fullname)) for fn1, fn2 in zip(ir1.functions, ir2.functions): assert_blobs_same(get_function_dict(fn1), get_function_dict(fn2), (ir1.fullname, fn1.fullname)) assert_blobs_same(get_dict(fn1.decl), get_dict(fn2.decl), (ir1.fullname, fn1.fullname)) assert_blobs_same(ir1.final_names, ir2.final_names, (ir1.fullname, 'final_names')) def check_serialization_roundtrip(irs: Dict[str, ModuleIR]) -> None: """Check that we can serialize modules out and deserialize them to the same thing.""" serialized = {k: ir.serialize() for k, ir in irs.items()} ctx = DeserMaps({}, {}) irs2 = deserialize_modules(serialized, ctx) assert irs.keys() == irs2.keys() for k in irs: assert_modules_same(irs[k], irs2[k]) mypy-0.761/mypyc/test/test_tuplename.py0000644€tŠÔÚ€2›s®0000000165713576752246024454 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from mypyc.ops import ( RTuple, object_rprimitive, int_rprimitive, bool_rprimitive, list_rprimitive, RInstance, RUnion, ClassIR, ) class TestTupleNames(unittest.TestCase): def setUp(self) -> None: self.inst_a = RInstance(ClassIR('A', '__main__')) self.inst_b = RInstance(ClassIR('B', '__main__')) def test_names(self) -> None: assert RTuple([int_rprimitive, int_rprimitive]).unique_id == "T2II" assert RTuple([list_rprimitive, object_rprimitive, self.inst_a]).unique_id == "T3OOO" assert RTuple([list_rprimitive, object_rprimitive, self.inst_b]).unique_id == "T3OOO" assert RTuple([]).unique_id == "T0" assert RTuple([RTuple([]), RTuple([int_rprimitive, int_rprimitive])]).unique_id == "T2T0T2II" assert RTuple([bool_rprimitive, RUnion([bool_rprimitive, int_rprimitive])]).unique_id == "T2CO" mypy-0.761/mypyc/test/testutil.py0000644€tŠÔÚ€2›s®0000001521613576752246023274 0ustar jukkaDROPBOX\Domain Users00000000000000"""Helpers for writing tests""" import contextlib import os import os.path import re import shutil from typing import List, Callable, Iterator, Optional, Tuple from mypy import build from mypy.errors import CompileError from mypy.options import Options from mypy.test.data import DataSuite, DataDrivenTestCase from mypy.test.config import test_temp_dir from mypy.test.helpers import assert_string_arrays_equal from mypyc import genops from mypyc.options import CompilerOptions from mypyc.ops import FuncIR from mypyc.errors import Errors from mypyc.test.config import test_data_prefix # The builtins stub used during icode generation test cases. ICODE_GEN_BUILTINS = os.path.join(test_data_prefix, 'fixtures/ir.py') # The testutil support library TESTUTIL_PATH = os.path.join(test_data_prefix, 'fixtures/testutil.py') class MypycDataSuite(DataSuite): # Need to list no files, since this will be picked up as a suite of tests files = [] # type: List[str] data_prefix = test_data_prefix def builtins_wrapper(func: Callable[[DataDrivenTestCase], None], path: str) -> Callable[[DataDrivenTestCase], None]: """Decorate a function that implements a data-driven test case to copy an alternative builtins module implementation in place before performing the test case. Clean up after executing the test case. """ return lambda testcase: perform_test(func, path, testcase) @contextlib.contextmanager def use_custom_builtins(builtins_path: str, testcase: DataDrivenTestCase) -> Iterator[None]: for path, _ in testcase.files: if os.path.basename(path) == 'builtins.pyi': default_builtins = False break else: # Use default builtins. builtins = os.path.abspath(os.path.join(test_temp_dir, 'builtins.pyi')) shutil.copyfile(builtins_path, builtins) default_builtins = True # Actually peform the test case. yield None if default_builtins: # Clean up. os.remove(builtins) def perform_test(func: Callable[[DataDrivenTestCase], None], builtins_path: str, testcase: DataDrivenTestCase) -> None: for path, _ in testcase.files: if os.path.basename(path) == 'builtins.py': default_builtins = False break else: # Use default builtins. builtins = os.path.join(test_temp_dir, 'builtins.py') shutil.copyfile(builtins_path, builtins) default_builtins = True # Actually peform the test case. func(testcase) if default_builtins: # Clean up. os.remove(builtins) def build_ir_for_single_file(input_lines: List[str], compiler_options: Optional[CompilerOptions] = None) -> List[FuncIR]: program_text = '\n'.join(input_lines) compiler_options = compiler_options or CompilerOptions() options = Options() options.show_traceback = True options.use_builtins_fixtures = True options.strict_optional = True options.python_version = (3, 6) options.export_types = True options.preserve_asts = True options.per_module_options['__main__'] = {'mypyc': True} source = build.BuildSource('main', '__main__', program_text) # Construct input as a single single. # Parse and type check the input program. result = build.build(sources=[source], options=options, alt_lib_path=test_temp_dir) if result.errors: raise CompileError(result.errors) errors = Errors() modules = genops.build_ir( [result.files['__main__']], result.graph, result.types, genops.Mapper({'__main__': None}), compiler_options, errors) assert errors.num_errors == 0 module = list(modules.values())[0] return module.functions def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> None: # TODO: backport this to mypy assert testcase.old_cwd is not None, "test was not properly set up" testcase_path = os.path.join(testcase.old_cwd, testcase.file) with open(testcase_path) as f: data_lines = f.read().splitlines() # We can't rely on the test line numbers to *find* the test, since # we might fix multiple tests in a run. So find it by the case # header. Give up if there are multiple tests with the same name. test_slug = '[case {}]'.format(testcase.name) if data_lines.count(test_slug) != 1: return start_idx = data_lines.index(test_slug) stop_idx = start_idx + 11 while stop_idx < len(data_lines) and not data_lines[stop_idx].startswith('[case '): stop_idx += 1 test = data_lines[start_idx:stop_idx] out_start = test.index('[out]') test[out_start + 1:] = output data_lines[start_idx:stop_idx] = test + [''] data = '\n'.join(data_lines) with open(testcase_path, 'w') as f: print(data, file=f) def assert_test_output(testcase: DataDrivenTestCase, actual: List[str], message: str, expected: Optional[List[str]] = None) -> None: expected_output = expected if expected is not None else testcase.output if expected_output != actual and testcase.config.getoption('--update-data', False): update_testcase_output(testcase, actual) assert_string_arrays_equal( expected_output, actual, '{} ({}, line {})'.format(message, testcase.file, testcase.line)) def get_func_names(expected: List[str]) -> List[str]: res = [] for s in expected: m = re.match(r'def ([_a-zA-Z0-9.*$]+)\(', s) if m: res.append(m.group(1)) return res def remove_comment_lines(a: List[str]) -> List[str]: """Return a copy of array with comments removed. Lines starting with '--' (but not with '---') are removed. """ r = [] for s in a: if s.strip().startswith('--') and not s.strip().startswith('---'): pass else: r.append(s) return r def print_with_line_numbers(s: str) -> None: lines = s.splitlines() for i, line in enumerate(lines): print('%-4d %s' % (i + 1, line)) def heading(text: str) -> None: print('=' * 20 + ' ' + text + ' ' + '=' * 20) def show_c(cfiles: List[List[Tuple[str, str]]]) -> None: heading('Generated C') for group in cfiles: for cfile, ctext in group: print('== {} =='.format(cfile)) print_with_line_numbers(ctext) heading('End C') def fudge_dir_mtimes(dir: str, delta: int) -> None: for dirpath, _, filenames in os.walk(dir): for name in filenames: path = os.path.join(dirpath, name) new_mtime = os.stat(path).st_mtime + delta os.utime(path, times=(new_mtime, new_mtime)) mypy-0.761/mypyc/uninit.py0000644€tŠÔÚ€2›s®0000000534013576752246021743 0ustar jukkaDROPBOX\Domain Users00000000000000"""Insert checks for uninitialized values.""" from typing import List from mypyc.analysis import ( get_cfg, cleanup_cfg, analyze_must_defined_regs, AnalysisDict ) from mypyc.ops import ( FuncIR, BasicBlock, Branch, Value, RaiseStandardError, Unreachable, Environment, Register, ) def insert_uninit_checks(ir: FuncIR) -> None: # Remove dead blocks from the CFG, which helps avoid spurious # checks due to unused error handling blocks. cleanup_cfg(ir.blocks) cfg = get_cfg(ir.blocks) args = set(reg for reg in ir.env.regs() if ir.env.indexes[reg] < len(ir.args)) must_defined = analyze_must_defined_regs(ir.blocks, cfg, args, ir.env.regs()) ir.blocks = split_blocks_at_uninits(ir.env, ir.blocks, must_defined.before) def split_blocks_at_uninits(env: Environment, blocks: List[BasicBlock], pre_must_defined: 'AnalysisDict[Value]') -> List[BasicBlock]: new_blocks = [] # type: List[BasicBlock] # First split blocks on ops that may raise. for block in blocks: ops = block.ops block.ops = [] cur_block = block new_blocks.append(cur_block) for i, op in enumerate(ops): defined = pre_must_defined[block, i] for src in op.unique_sources(): # If a register operand is not guarenteed to be # initialized is an operand to something other than a # check that it is defined, insert a check. if (isinstance(src, Register) and src not in defined and not (isinstance(op, Branch) and op.op == Branch.IS_ERROR)): new_block, error_block = BasicBlock(), BasicBlock() new_block.error_handler = error_block.error_handler = cur_block.error_handler new_blocks += [error_block, new_block] env.vars_needing_init.add(src) cur_block.ops.append(Branch(src, true_label=error_block, false_label=new_block, op=Branch.IS_ERROR, line=op.line)) raise_std = RaiseStandardError( RaiseStandardError.UNBOUND_LOCAL_ERROR, "local variable '{}' referenced before assignment".format(src.name), op.line) env.add_op(raise_std) error_block.ops.append(raise_std) error_block.ops.append(Unreachable()) cur_block = new_block cur_block.ops.append(op) return new_blocks mypy-0.761/scripts/0000755€tŠÔÚ€2›s®0000000000013576752267020412 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/scripts/dumpmodule.py0000644€tŠÔÚ€2›s®0000001021613576752246023134 0ustar jukkaDROPBOX\Domain Users00000000000000"""Dump the runtime structure of a module as JSON. This is used for testing stubs. """ import importlib import inspect import json import sys import types from types import FunctionType from typing import Optional, Dict, Any, Set, Callable from typing_extensions import Final DumpNode = Dict[str, Any] def dump_module(id: str) -> None: m = importlib.import_module(id) data = module_to_json(m) print(json.dumps(data, ensure_ascii=True, indent=4, sort_keys=True)) def module_to_json(m: object) -> Dict[str, DumpNode]: result = {} # type: Dict[str, DumpNode] for name, value in m.__dict__.items(): # Filter out some useless attributes. if name in ('__file__', '__doc__', '__name__', '__builtins__', '__package__'): continue if name == '__all__': result[name] = {'type': 'list', 'values': sorted(value)} else: result[name] = dump_value(value) try: line = inspect.getsourcelines(getattr(m, name))[1] # type: Optional[int] except (TypeError, OSError): line = None result[name]['line'] = line return result def dump_value(value: object, depth: int = 0) -> DumpNode: if depth > 10: # TODO: Callers don't handle this case. return 'max_recursion_depth_exceeded' # type: ignore if isinstance(value, type): return dump_class(value, depth + 1) if isinstance(value, FunctionType): return dump_function(value) if callable(value): return {'type': 'callable'} # TODO more information if isinstance(value, types.ModuleType): return {'type': 'module'} # TODO module name if inspect.isdatadescriptor(value): return {'type': 'datadescriptor'} if inspect.ismemberdescriptor(value): return {'type': 'memberdescriptor'} return dump_simple(value) def dump_simple(value: object) -> DumpNode: if type(value) in (int, bool, float, str, bytes, list, set, dict, tuple): return {'type': type(value).__name__} if value is None: return {'type': 'None'} if value is inspect.Parameter.empty: return {'type': None} # 'None' and None: Ruh-Roh return {'type': 'unknown'} def dump_class(value: type, depth: int) -> DumpNode: return { 'type': 'class', 'attributes': dump_attrs(value, depth), } special_methods = [ '__init__', '__str__', '__int__', '__float__', '__bool__', '__contains__', '__iter__', ] # type: Final # Change to return a dict def dump_attrs(d: type, depth: int) -> DumpNode: result = {} seen = set() # type: Set[str] try: mro = d.mro() except TypeError: mro = [d] for base in mro: v = vars(base) for name, value in v.items(): if name not in seen: result[name] = dump_value(value, depth + 1) seen.add(name) for m in special_methods: if hasattr(d, m) and m not in seen: result[m] = dump_value(getattr(d, m), depth + 1) return result kind_map = { inspect.Parameter.POSITIONAL_ONLY: 'POS_ONLY', inspect.Parameter.POSITIONAL_OR_KEYWORD: 'POS_OR_KW', inspect.Parameter.VAR_POSITIONAL: 'VAR_POS', inspect.Parameter.KEYWORD_ONLY: 'KW_ONLY', inspect.Parameter.VAR_KEYWORD: 'VAR_KW', } # type: Final def param_kind(p: inspect.Parameter) -> str: s = kind_map[p.kind] if p.default != inspect.Parameter.empty: assert s in ('POS_ONLY', 'POS_OR_KW', 'KW_ONLY') s += '_OPT' return s def dump_function(value: FunctionType) -> DumpNode: try: sig = inspect.signature(value) except ValueError: # The signature call sometimes fails for some reason. return {'type': 'invalid_signature'} params = list(sig.parameters.items()) return { 'type': 'function', 'args': [(name, param_kind(p), dump_simple(p.default)) for name, p in params], } if __name__ == '__main__': import sys if len(sys.argv) != 2: sys.exit('usage: dumpmodule.py module-name') dump_module(sys.argv[1]) mypy-0.761/scripts/find_type.py0000755€tŠÔÚ€2›s®0000000663713576752246022761 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 # Usage: find_type.py FILENAME START_LINE START_COL END_LINE END_COL MYPY_AND_ARGS # Prints out the type of the expression in the given location if the mypy run # succeeds cleanly. Otherwise, prints out the errors encountered. # Note: this only works on expressions, and not assignment targets. # Note: MYPY_AND_ARGS is should be the remainder of argv, not a single # spaces-included argument. # NOTE: Line numbers are 1-based; column numbers are 0-based. # # # Example vim usage: # function RevealType() # " Set this to the command you use to run mypy on your project. Include the mypy invocation. # let mypycmd = 'python3 -m mypy mypy --incremental' # let [startline, startcol] = getpos("'<")[1:2] # let [endline, endcol] = getpos("'>")[1:2] # " Convert to 0-based column offsets # let startcol = startcol - 1 # " Change this line to point to the find_type.py script. # execute '!python3 /path/to/mypy/scripts/find_type.py % ' . startline . ' ' . startcol . ' ' . endline . ' ' . endcol . ' ' . mypycmd # endfunction # vnoremap t :call RevealType() # # For an Emacs example, see misc/macs.el. from typing import List, Tuple, Optional import subprocess import sys import tempfile import os.path import re REVEAL_TYPE_START = 'reveal_type(' REVEAL_TYPE_END = ')' def update_line(line: str, s: str, pos: int) -> str: return line[:pos] + s + line[pos:] def run_mypy(mypy_and_args: List[str], filename: str, tmp_name: str) -> str: proc = subprocess.run(mypy_and_args + ['--shadow-file', filename, tmp_name], stdout=subprocess.PIPE) assert(isinstance(proc.stdout, bytes)) # Guaranteed to be true because we called run with universal_newlines=False return proc.stdout.decode(encoding="utf-8") def get_revealed_type(line: str, relevant_file: str, relevant_line: int) -> Optional[str]: m = re.match(r"(.+?):(\d+): note: Revealed type is '(.*)'$", line) if (m and int(m.group(2)) == relevant_line and os.path.samefile(relevant_file, m.group(1))): return m.group(3) else: return None def process_output(output: str, filename: str, start_line: int) -> Tuple[Optional[str], bool]: error_found = False for line in output.splitlines(): t = get_revealed_type(line, filename, start_line) if t: return t, error_found elif 'error:' in line: error_found = True return None, True # finding no reveal_type is an error def main(): filename, start_line_str, start_col_str, end_line_str, end_col_str, *mypy_and_args = sys.argv[1:] start_line = int(start_line_str) start_col = int(start_col_str) end_line = int(end_line_str) end_col = int(end_col_str) with open(filename, 'r') as f: lines = f.readlines() lines[end_line - 1] = update_line(lines[end_line - 1], REVEAL_TYPE_END, end_col) # insert after end_col lines[start_line - 1] = update_line(lines[start_line - 1], REVEAL_TYPE_START, start_col) with tempfile.NamedTemporaryFile(mode='w', prefix='mypy') as tmp_f: tmp_f.writelines(lines) tmp_f.flush() output = run_mypy(mypy_and_args, filename, tmp_f.name) revealed_type, error = process_output(output, filename, start_line) if revealed_type: print(revealed_type) if error: print(output) exit(int(error)) if __name__ == "__main__": main() mypy-0.761/scripts/mypyc0000755€tŠÔÚ€2›s®0000000261613576752246021503 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Mypyc command-line tool. Usage: $ mypyc foo.py [...] $ python3 -c 'import foo' # Uses compiled 'foo' This is just a thin wrapper that generates a setup.py file that uses mypycify, suitable for prototyping and testing. """ import os import os.path import subprocess import sys import tempfile import time base_path = os.path.join(os.path.dirname(__file__), '..') setup_format = """\ from distutils.core import setup from mypyc.build import mypycify setup(name='mypyc_output', ext_modules=mypycify({}, {}), ) """ def main() -> None: build_dir = 'build' # can this be overridden?? try: os.mkdir(build_dir) except FileExistsError: pass opt_level = os.getenv("MYPYC_OPT_LEVEL", '') setup_file = os.path.join(build_dir, 'setup.py') with open(setup_file, 'w') as f: f.write(setup_format.format(sys.argv[1:], opt_level)) # We don't use run_setup (like we do in the test suite) because it throws # away the error code from distutils, and we don't care about the slight # performance loss here. env = os.environ.copy() base_path = os.path.join(os.path.dirname(__file__), '..') env['PYTHONPATH'] = base_path + os.pathsep + env.get('PYTHONPATH', '') cmd = subprocess.run([sys.executable, setup_file, 'build_ext', '--inplace'], env=env) sys.exit(cmd.returncode) if __name__ == '__main__': main() mypy-0.761/scripts/stubtest.py0000644€tŠÔÚ€2›s®0000001723113576752246022642 0ustar jukkaDROPBOX\Domain Users00000000000000"""Tests for stubs. Verify that various things in stubs are consistent with how things behave at runtime. """ import importlib import sys from typing import Dict, Any, List, Iterator, NamedTuple, Optional, Mapping, Tuple from typing_extensions import Type, Final from collections import defaultdict from functools import singledispatch from mypy import build from mypy.build import default_data_dir from mypy.modulefinder import compute_search_paths, FindModuleCache from mypy.errors import CompileError from mypy import nodes from mypy.options import Options from dumpmodule import module_to_json, DumpNode # TODO: email.contentmanager has a symbol table with a None node. # This seems like it should not be. skip = { '_importlib_modulespec', '_subprocess', 'distutils.command.bdist_msi', 'distutils.command.bdist_packager', 'msvcrt', 'wsgiref.types', 'mypy_extensions', 'unittest.mock', # mock.call infinite loops on inspect.getsourcelines # https://bugs.python.org/issue25532 # TODO: can we filter only call? } # type: Final messages = { 'not_in_runtime': ('{error.stub_type} "{error.name}" defined at line ' ' {error.line} in stub but is not defined at runtime'), 'not_in_stub': ('{error.module_type} "{error.name}" defined at line' ' {error.line} at runtime but is not defined in stub'), 'no_stubs': 'could not find typeshed {error.name}', 'inconsistent': ('"{error.name}" is {error.stub_type} in stub but' ' {error.module_type} at runtime'), } # type: Final Error = NamedTuple('Error', ( ('module', str), ('name', str), ('error_type', str), ('line', Optional[int]), ('stub_type', Optional[Type[nodes.Node]]), ('module_type', Optional[str]), )) ErrorParts = Tuple[ List[str], str, Optional[int], Optional[Type[nodes.Node]], Optional[str], ] def test_stub(options: Options, find_module_cache: FindModuleCache, name: str) -> Iterator[Error]: stubs = { mod: stub for mod, stub in build_stubs(options, find_module_cache, name).items() if (mod == name or mod.startswith(name + '.')) and mod not in skip } for mod, stub in stubs.items(): instance = dump_module(mod) for identifiers, error_type, line, stub_type, module_type in verify(stub, instance): yield Error(mod, '.'.join(identifiers), error_type, line, stub_type, module_type) @singledispatch def verify(node: nodes.Node, module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: raise TypeError('unknown mypy node ' + str(node)) @verify.register(nodes.MypyFile) def verify_mypyfile(stub: nodes.MypyFile, instance: Optional[DumpNode]) -> Iterator[ErrorParts]: if instance is None: yield [], 'not_in_runtime', stub.line, type(stub), None elif instance['type'] != 'file': yield [], 'inconsistent', stub.line, type(stub), instance['type'] else: stub_children = defaultdict(lambda: None, stub.names) # type: Mapping[str, Optional[nodes.SymbolTableNode]] instance_children = defaultdict(lambda: None, instance['names']) # TODO: I would rather not filter public children here. # For example, what if the checkersurfaces an inconsistency # in the typing of a private child public_nodes = { name: (stub_children[name], instance_children[name]) for name in set(stub_children) | set(instance_children) if not name.startswith('_') and (stub_children[name] is None or stub_children[name].module_public) # type: ignore } for node, (stub_child, instance_child) in public_nodes.items(): stub_child = getattr(stub_child, 'node', None) for identifiers, error_type, line, stub_type, module_type in verify(stub_child, instance_child): yield ([node] + identifiers, error_type, line, stub_type, module_type) @verify.register(nodes.TypeInfo) def verify_typeinfo(stub: nodes.TypeInfo, instance: Optional[DumpNode]) -> Iterator[ErrorParts]: if not instance: yield [], 'not_in_runtime', stub.line, type(stub), None elif instance['type'] != 'class': yield [], 'inconsistent', stub.line, type(stub), instance['type'] else: for attr, attr_node in stub.names.items(): subdump = instance['attributes'].get(attr, None) for identifiers, error_type, line, stub_type, module_type in verify(attr_node.node, subdump): yield ([attr] + identifiers, error_type, line, stub_type, module_type) @verify.register(nodes.FuncItem) def verify_funcitem(stub: nodes.FuncItem, instance: Optional[DumpNode]) -> Iterator[ErrorParts]: if not instance: yield [], 'not_in_runtime', stub.line, type(stub), None elif 'type' not in instance or instance['type'] not in ('function', 'callable'): yield [], 'inconsistent', stub.line, type(stub), instance['type'] # TODO check arguments and return value @verify.register(type(None)) def verify_none(stub: None, instance: Optional[DumpNode]) -> Iterator[ErrorParts]: if instance is None: yield [], 'not_in_stub', None, None, None else: yield [], 'not_in_stub', instance['line'], None, instance['type'] @verify.register(nodes.Var) def verify_var(node: nodes.Var, module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: if False: yield None # Need to check if types are inconsistent. #if 'type' not in dump or dump['type'] != node.node.type: # import ipdb; ipdb.set_trace() # yield name, 'inconsistent', node.node.line, shed_type, module_type @verify.register(nodes.OverloadedFuncDef) def verify_overloadedfuncdef(node: nodes.OverloadedFuncDef, module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: # Should check types of the union of the overloaded types. if False: yield None @verify.register(nodes.TypeVarExpr) def verify_typevarexpr(node: nodes.TypeVarExpr, module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: if False: yield None @verify.register(nodes.Decorator) def verify_decorator(node: nodes.Decorator, module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: if False: yield None def dump_module(name: str) -> DumpNode: mod = importlib.import_module(name) return {'type': 'file', 'names': module_to_json(mod)} def build_stubs(options: Options, find_module_cache: FindModuleCache, mod: str) -> Dict[str, nodes.MypyFile]: sources = find_module_cache.find_modules_recursive(mod) try: res = build.build(sources=sources, options=options) messages = res.errors except CompileError as error: messages = error.messages if messages: for msg in messages: print(msg) sys.exit(1) return res.files def main(args: List[str]) -> Iterator[Error]: if len(args) == 1: print('must provide at least one module to test') sys.exit(1) else: modules = args[1:] options = Options() options.python_version = (3, 6) data_dir = default_data_dir() search_path = compute_search_paths([], options, data_dir) find_module_cache = FindModuleCache(search_path) for module in modules: for error in test_stub(options, find_module_cache, module): yield error if __name__ == '__main__': for err in main(sys.argv): print(messages[err.error_type].format(error=err)) mypy-0.761/setup.cfg0000644€tŠÔÚ€2›s®0000000125013576752267020542 0ustar jukkaDROPBOX\Domain Users00000000000000[flake8] max-line-length = 99 exclude = build, bin, lib, include, @*, env, docs/build, out, .venv, .mypy_cache, .git, .cache, docs/source/conf.py, misc/*, scripts/*, test-data/*, mypyc/test-data/*, mypy/typeshed/*, .tox .eggs .Python extend-ignore = E128,W601,E701,E704,E402,B3,B006,B007,B011,F821,F811 [coverage:run] branch = true source = mypy parallel = true [coverage:report] show_missing = true skip_covered = True omit = mypy/test/* exclude_lines = \#\s*pragma: no cover ^\s*raise AssertionError\b ^\s*raise NotImplementedError\b ^\s*return NotImplemented\b ^\s*raise$ ^if __name__ == ['"]__main__['"]:$ [egg_info] tag_build = tag_date = 0 mypy-0.761/setup.py0000644€tŠÔÚ€2›s®0000001572513576752246020444 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python import glob import os import os.path import sys if sys.version_info < (3, 5, 0): sys.stderr.write("ERROR: You need Python 3.5 or later to use mypy.\n") exit(1) # we'll import stuff from the source tree, let's ensure is on the sys path sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # This requires setuptools when building; setuptools is not needed # when installing from a wheel file (though it is still neeeded for # alternative forms of installing, as suggested by README.md). from setuptools import setup from setuptools.command.build_py import build_py from mypy.version import __version__ as version from mypy import git git.verify_git_integrity_or_abort(".") description = 'Optional static typing for Python' long_description = ''' Mypy -- Optional Static Typing for Python ========================================= Add type annotations to your Python programs, and use mypy to type check them. Mypy is essentially a Python linter on steroids, and it can catch many programming errors by analyzing your program, without actually having to run it. Mypy has a powerful type system with features such as type inference, gradual typing, generics and union types. '''.lstrip() def find_package_data(base, globs, root='mypy'): """Find all interesting data files, for setup(package_data=) Arguments: root: The directory to search in. globs: A list of glob patterns to accept files. """ rv_dirs = [root for root, dirs, files in os.walk(base)] rv = [] for rv_dir in rv_dirs: files = [] for pat in globs: files += glob.glob(os.path.join(rv_dir, pat)) if not files: continue rv.extend([os.path.relpath(f, root) for f in files]) return rv class CustomPythonBuild(build_py): def pin_version(self): path = os.path.join(self.build_lib, 'mypy') self.mkpath(path) with open(os.path.join(path, 'version.py'), 'w') as stream: stream.write('__version__ = "{}"\n'.format(version)) def run(self): self.execute(self.pin_version, ()) build_py.run(self) cmdclass = {'build_py': CustomPythonBuild} package_data = ['py.typed'] package_data += find_package_data(os.path.join('mypy', 'typeshed'), ['*.py', '*.pyi']) package_data += find_package_data(os.path.join('mypy', 'xml'), ['*.xsd', '*.xslt', '*.css']) USE_MYPYC = False # To compile with mypyc, a mypyc checkout must be present on the PYTHONPATH if len(sys.argv) > 1 and sys.argv[1] == '--use-mypyc': sys.argv.pop(1) USE_MYPYC = True if os.getenv('MYPY_USE_MYPYC', None) == '1': USE_MYPYC = True if USE_MYPYC: MYPYC_BLACKLIST = tuple(os.path.join('mypy', x) for x in ( # Need to be runnable as scripts '__main__.py', 'sitepkgs.py', os.path.join('dmypy', '__main__.py'), # Uses __getattr__/__setattr__ 'split_namespace.py', # Lies to mypy about code reachability 'bogus_type.py', # We don't populate __file__ properly at the top level or something? # Also I think there would be problems with how we generate version.py. 'version.py', )) + ( # Don't want to grab this accidentally os.path.join('mypyc', 'lib-rt', 'setup.py'), ) everything = ( [os.path.join('mypy', x) for x in find_package_data('mypy', ['*.py'])] + [os.path.join('mypyc', x) for x in find_package_data('mypyc', ['*.py'], root='mypyc')]) # Start with all the .py files all_real_pys = [x for x in everything if not x.startswith(os.path.join('mypy', 'typeshed') + os.sep)] # Strip out anything in our blacklist mypyc_targets = [x for x in all_real_pys if x not in MYPYC_BLACKLIST] # Strip out any test code mypyc_targets = [x for x in mypyc_targets if not x.startswith((os.path.join('mypy', 'test') + os.sep, os.path.join('mypyc', 'test') + os.sep, os.path.join('mypyc', 'test-data') + os.sep, ))] # ... and add back in the one test module we need mypyc_targets.append(os.path.join('mypy', 'test', 'visitors.py')) # The targets come out of file system apis in an unspecified # order. Sort them so that the mypyc output is deterministic. mypyc_targets.sort() use_other_mypyc = os.getenv('ALTERNATE_MYPYC_PATH', None) if use_other_mypyc: # This bit is super unfortunate: we want to use a different # mypy/mypyc version, but we've already imported parts, so we # remove the modules that we've imported already, which will # let the right versions be imported by mypyc. del sys.modules['mypy'] del sys.modules['mypy.version'] del sys.modules['mypy.git'] sys.path.insert(0, use_other_mypyc) from mypyc.build import mypycify opt_level = os.getenv('MYPYC_OPT_LEVEL', '3') force_multifile = os.getenv('MYPYC_MULTI_FILE', '') == '1' ext_modules = mypycify( mypyc_targets + ['--config-file=mypy_bootstrap.ini'], opt_level=opt_level, # Use multi-file compliation mode on windows because without it # our Appveyor builds run out of memory sometimes. multi_file=sys.platform == 'win32' or force_multifile, ) else: ext_modules = [] classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Topic :: Software Development', ] setup(name='mypy', version=version, description=description, long_description=long_description, author='Jukka Lehtosalo', author_email='jukka.lehtosalo@iki.fi', url='http://www.mypy-lang.org/', license='MIT License', py_modules=[], ext_modules=ext_modules, packages=[ 'mypy', 'mypy.test', 'mypy.server', 'mypy.plugins', 'mypy.dmypy', 'mypyc', 'mypyc.test', ], package_data={'mypy': package_data}, scripts=['scripts/mypyc'], entry_points={'console_scripts': ['mypy=mypy.__main__:console_entry', 'stubgen=mypy.stubgen:main', 'dmypy=mypy.dmypy.client:console_entry', ]}, classifiers=classifiers, cmdclass=cmdclass, # When changing this, also update mypy-requirements.txt. install_requires=['typed_ast >= 1.4.0, < 1.5.0', 'typing_extensions>=3.7.4', 'mypy_extensions >= 0.4.3, < 0.5.0', ], # Same here. extras_require={'dmypy': 'psutil >= 4.0'}, python_requires=">=3.5", include_package_data=True, ) mypy-0.761/test-data/0000755€tŠÔÚ€2›s®0000000000013576752267020611 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/.flake80000644€tŠÔÚ€2›s®0000000172413576752246021765 0ustar jukkaDROPBOX\Domain Users00000000000000# Some PEP8 deviations are considered irrelevant to stub files: # (error counts as of 2016-12-19) # 17381 E704 multiple statements on one line (def) # 11840 E301 expected 1 blank line # 7467 E302 expected 2 blank lines # 1772 E501 line too long # 1487 F401 imported but unused # 1248 E701 multiple statements on one line (colon) # 427 F811 redefinition # 356 E305 expected 2 blank lines # Nice-to-haves ignored for now # 152 E128 continuation line under-indented for visual indent # 43 E127 continuation line over-indented for visual indent [flake8] ignore = F401, F811, E127, E128, E301, E302, E305, E501, E701, E704, B303 # We are checking with Python 3 but many of the stubs are Python 2 stubs. # A nice future improvement would be to provide separate .flake8 # configurations for Python 2 and Python 3 files. builtins = StandardError,apply,basestring,buffer,cmp,coerce,execfile,file,intern,long,raw_input,reduce,reload,unichr,unicode,xrange exclude = .venv*,@* mypy-0.761/test-data/packages/0000755€tŠÔÚ€2›s®0000000000013576752266022366 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/0000755€tŠÔÚ€2›s®0000000000013576752267025044 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg1/0000755€tŠÔÚ€2›s®0000000000013576752266026513 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg1/nsx/0000755€tŠÔÚ€2›s®0000000000013576752266027323 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg1/nsx/a/0000755€tŠÔÚ€2›s®0000000000013576752267027544 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg1/nsx/a/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246031640 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg2/0000755€tŠÔÚ€2›s®0000000000013576752266026514 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg2/nsx/0000755€tŠÔÚ€2›s®0000000000013576752266027324 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg2/nsx/b/0000755€tŠÔÚ€2›s®0000000000013576752267027546 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg2/nsx/b/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246031642 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg3/0000755€tŠÔÚ€2›s®0000000000013576752266026515 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg3/nsx/0000755€tŠÔÚ€2›s®0000000000013576752266027325 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg3/nsx/c/0000755€tŠÔÚ€2›s®0000000000013576752267027550 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsx-pkg3/nsx/c/c.py0000644€tŠÔÚ€2›s®0000000000013576752246030327 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg1/0000755€tŠÔÚ€2›s®0000000000013576752266026514 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg1/nsy/0000755€tŠÔÚ€2›s®0000000000013576752266027325 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg1/nsy/a/0000755€tŠÔÚ€2›s®0000000000013576752267027546 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg1/nsy/a/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246031642 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg1/nsy/a/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752246032013 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg2/0000755€tŠÔÚ€2›s®0000000000013576752266026515 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg2/nsy/0000755€tŠÔÚ€2›s®0000000000013576752267027327 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg2/nsy/b/0000755€tŠÔÚ€2›s®0000000000013576752267027550 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg2/nsy/b/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246031644 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg2/nsy/b.pyi0000644€tŠÔÚ€2›s®0000000000013576752246030256 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg2/nsy/c.py0000644€tŠÔÚ€2›s®0000000000013576752246030106 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/nsy-pkg2/nsy/c.pyi0000644€tŠÔÚ€2›s®0000000000013576752246030257 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/pkg1/0000755€tŠÔÚ€2›s®0000000000013576752267025706 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/pkg1/a.py0000644€tŠÔÚ€2›s®0000000000013576752246026463 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/pkg2/0000755€tŠÔÚ€2›s®0000000000013576752266025706 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/pkg2/b/0000755€tŠÔÚ€2›s®0000000000013576752267026130 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/pkg2/b/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246030224 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/modulefinder/readme.txt0000644€tŠÔÚ€2›s®0000000021413576752246027034 0ustar jukkaDROPBOX\Domain Users00000000000000Samples for testing modulefinder.FindModuleCache. Contains three packages for the `nsx` namespace, and two packages providing `a` and `b`. mypy-0.761/test-data/packages/typedpkg/0000755€tŠÔÚ€2›s®0000000000013576752267024216 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg/setup.py0000644€tŠÔÚ€2›s®0000000050713576752246025727 0ustar jukkaDROPBOX\Domain Users00000000000000""" This setup file installs packages to test mypy's PEP 561 implementation """ from setuptools import setup setup( name='typedpkg', author="The mypy team", version='0.1', package_data={'typedpkg': ['py.typed']}, packages=['typedpkg', 'typedpkg.pkg'], include_package_data=True, zip_safe=False, ) mypy-0.761/test-data/packages/typedpkg/typedpkg/0000755€tŠÔÚ€2›s®0000000000013576752267026045 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg/typedpkg/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246030141 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg/typedpkg/dne.py0000644€tŠÔÚ€2›s®0000000000013576752246027150 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg/typedpkg/pkg/0000755€tŠÔÚ€2›s®0000000000013576752267026626 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg/typedpkg/pkg/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246030722 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg/typedpkg/pkg/aaa.py0000644€tŠÔÚ€2›s®0000000006013576752246027713 0ustar jukkaDROPBOX\Domain Users00000000000000def af(a: str) -> str: return a + " nested" mypy-0.761/test-data/packages/typedpkg/typedpkg/pkg/py.typed0000644€tŠÔÚ€2›s®0000000000013576752246030310 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg/typedpkg/py.typed0000644€tŠÔÚ€2›s®0000000000013576752246027527 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg/typedpkg/sample.py0000644€tŠÔÚ€2›s®0000000022313576752246027672 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, Tuple def ex(a): # type: (Iterable[str]) -> Tuple[str, ...] """Example typed package.""" return list(a) mypy-0.761/test-data/packages/typedpkg-stubs/0000755€tŠÔÚ€2›s®0000000000013576752267025354 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg-stubs/setup.py0000644€tŠÔÚ€2›s®0000000047013576752246027064 0ustar jukkaDROPBOX\Domain Users00000000000000""" This setup file installs packages to test mypy's PEP 561 implementation """ from distutils.core import setup setup( name='typedpkg-stubs', author="The mypy team", version='0.1', package_data={'typedpkg-stubs': ['sample.pyi', '__init__.pyi', 'py.typed']}, packages=['typedpkg-stubs'], ) mypy-0.761/test-data/packages/typedpkg-stubs/typedpkg-stubs/0000755€tŠÔÚ€2›s®0000000000013576752267030341 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg-stubs/typedpkg-stubs/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013576752246032606 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg-stubs/typedpkg-stubs/py.typed0000644€tŠÔÚ€2›s®0000000001013576752246032024 0ustar jukkaDROPBOX\Domain Users00000000000000partial mypy-0.761/test-data/packages/typedpkg-stubs/typedpkg-stubs/sample.pyi0000644€tŠÔÚ€2›s®0000000011513576752246032337 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, List def ex(a: Iterable[str]) -> List[str]: ... mypy-0.761/test-data/packages/typedpkg_ns/0000755€tŠÔÚ€2›s®0000000000013576752267024716 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg_ns/setup.py0000644€tŠÔÚ€2›s®0000000037413576752246026431 0ustar jukkaDROPBOX\Domain Users00000000000000from setuptools import setup, find_packages setup( name='typedpkg_namespace.alpha', version='1.0.0', packages=find_packages(), namespace_packages=['typedpkg_ns'], zip_safe=False, package_data={'typedpkg_ns.ns': ['py.typed']} ) mypy-0.761/test-data/packages/typedpkg_ns/typedpkg_ns/0000755€tŠÔÚ€2›s®0000000000013576752267027245 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg_ns/typedpkg_ns/__init__.py0000644€tŠÔÚ€2›s®0000000011013576752246031343 0ustar jukkaDROPBOX\Domain Users00000000000000# namespace pkg __import__("pkg_resources").declare_namespace(__name__) mypy-0.761/test-data/packages/typedpkg_ns/typedpkg_ns/ns/0000755€tŠÔÚ€2›s®0000000000013576752267027665 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg_ns/typedpkg_ns/ns/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246031761 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/packages/typedpkg_ns/typedpkg_ns/ns/bbb.py0000644€tŠÔÚ€2›s®0000000005213576752246030756 0ustar jukkaDROPBOX\Domain Users00000000000000def bf(a: bool) -> bool: return not a mypy-0.761/test-data/packages/typedpkg_ns/typedpkg_ns/ns/py.typed0000644€tŠÔÚ€2›s®0000000000013576752246031347 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/samples/0000755€tŠÔÚ€2›s®0000000000013576752267022255 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/samples/bottles.py0000644€tŠÔÚ€2›s®0000000046413576752246024304 0ustar jukkaDROPBOX\Domain Users00000000000000import typing REFRAIN = ''' %d bottles of beer on the wall, %d bottles of beer, take one down, pass it around, %d bottles of beer on the wall! ''' bottles_of_beer = 99 while bottles_of_beer > 1: print(REFRAIN % (bottles_of_beer, bottles_of_beer, bottles_of_beer - 1)) bottles_of_beer -= 1 mypy-0.761/test-data/samples/class.py0000644€tŠÔÚ€2›s®0000000066513576752246023740 0ustar jukkaDROPBOX\Domain Users00000000000000import typing class BankAccount(object): def __init__(self, initial_balance: int = 0) -> None: self.balance = initial_balance def deposit(self, amount: int) -> None: self.balance += amount def withdraw(self, amount: int) -> None: self.balance -= amount def overdrawn(self) -> bool: return self.balance < 0 my_account = BankAccount(15) my_account.withdraw(5) print(my_account.balance) mypy-0.761/test-data/samples/cmdline.py0000644€tŠÔÚ€2›s®0000000033613576752246024241 0ustar jukkaDROPBOX\Domain Users00000000000000# This program adds up integers in the command line import sys import typing try: total = sum(int(arg) for arg in sys.argv[1:]) print('sum =', total) except ValueError: print('Please supply integer arguments') mypy-0.761/test-data/samples/crawl.py0000644€tŠÔÚ€2›s®0000007657513576752246023760 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3.4 """A simple web crawler.""" # This is cloned from /examples/crawl.py, # with type annotations added (PEP 484). # # TODO: convert to `async def` + `await` (PEP 492). import argparse import asyncio import cgi from http.client import BadStatusLine import logging import re import sys import time import urllib.parse from typing import Any, Generator, IO, Optional, Sequence, Set, Tuple, List, Dict ARGS = argparse.ArgumentParser(description="Web crawler") ARGS.add_argument( '--iocp', action='store_true', dest='iocp', default=False, help='Use IOCP event loop (Windows only)') ARGS.add_argument( '--select', action='store_true', dest='select', default=False, help='Use Select event loop instead of default') ARGS.add_argument( 'roots', nargs='*', default=[], help='Root URL (may be repeated)') ARGS.add_argument( '--max_redirect', action='store', type=int, metavar='N', default=10, help='Limit redirection chains (for 301, 302 etc.)') ARGS.add_argument( '--max_tries', action='store', type=int, metavar='N', default=4, help='Limit retries on network errors') ARGS.add_argument( '--max_tasks', action='store', type=int, metavar='N', default=100, help='Limit concurrent connections') ARGS.add_argument( '--max_pool', action='store', type=int, metavar='N', default=100, help='Limit connection pool size') ARGS.add_argument( '--exclude', action='store', metavar='REGEX', help='Exclude matching URLs') ARGS.add_argument( '--strict', action='store_true', default=True, help='Strict host matching (default)') ARGS.add_argument( '--lenient', action='store_false', dest='strict', default=False, help='Lenient host matching') ARGS.add_argument( '-v', '--verbose', action='count', dest='level', default=1, help='Verbose logging (repeat for more verbose)') ARGS.add_argument( '-q', '--quiet', action='store_const', const=0, dest='level', default=1, help='Quiet logging (opposite of --verbose)') ESCAPES = [('quot', '"'), ('gt', '>'), ('lt', '<'), ('amp', '&') # Must be last. ] def unescape(url: str) -> str: """Turn & into &, and so on. This is the inverse of cgi.escape(). """ for name, char in ESCAPES: url = url.replace('&' + name + ';', char) return url def fix_url(url: str) -> str: """Prefix a schema-less URL with http://.""" if '://' not in url: url = 'http://' + url return url class Logger: def __init__(self, level: int) -> None: self.level = level def _log(self, n: int, args: Sequence[Any]) -> None: if self.level >= n: print(*args, file=sys.stderr, flush=True) def log(self, n: int, *args: Any) -> None: self._log(n, args) def __call__(self, n: int, *args: Any) -> None: self._log(n, args) KeyTuple = Tuple[str, int, bool] class ConnectionPool: """A connection pool. To open a connection, use reserve(). To recycle it, use unreserve(). The pool is mostly just a mapping from (host, port, ssl) tuples to lists of Connections. The currently active connections are *not* in the data structure; get_connection() takes the connection out, and recycle_connection() puts it back in. To recycle a connection, call conn.close(recycle=True). There are limits to both the overall pool and the per-key pool. """ def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None: self.log = log self.max_pool = max_pool # Overall limit. self.max_tasks = max_tasks # Per-key limit. self.loop = asyncio.get_event_loop() self.connections = {} # type: Dict[KeyTuple, List[Connection]] self.queue = [] # type: List[Connection] def close(self) -> None: """Close all connections available for reuse.""" for conns in self.connections.values(): for conn in conns: conn.close() self.connections.clear() self.queue.clear() @asyncio.coroutine def get_connection(self, host: str, port: int, ssl: bool) -> Generator[Any, None, 'Connection']: """Create or reuse a connection.""" port = port or (443 if ssl else 80) try: ipaddrs = yield from self.loop.getaddrinfo(host, port) except Exception as exc: self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port)) raise self.log(1, '* %s resolves to %s' % (host, ', '.join(ip[4][0] for ip in ipaddrs))) # Look for a reusable connection. for _1, _2, _3, _4, (h, p, *_5) in ipaddrs: key = h, p, ssl conn = None conns = self.connections.get(key) while conns: conn = conns.pop(0) self.queue.remove(conn) if not conns: del self.connections[key] if conn.stale(): self.log(1, 'closing stale connection for', key) conn.close() # Just in case. else: self.log(1, '* Reusing pooled connection', key, 'FD =', conn.fileno()) return conn # Create a new connection. conn = Connection(self.log, self, host, port, ssl) yield from conn.connect() self.log(1, '* New connection', conn.key, 'FD =', conn.fileno()) return conn def recycle_connection(self, conn: 'Connection') -> None: """Make a connection available for reuse. This also prunes the pool if it exceeds the size limits. """ if conn.stale(): conn.close() return key = conn.key conns = self.connections.setdefault(key, []) conns.append(conn) self.queue.append(conn) if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool: return # Prune the queue. # Close stale connections for this key first. stale = [conn for conn in conns if conn.stale()] if stale: for conn in stale: conns.remove(conn) self.queue.remove(conn) self.log(1, 'closing stale connection for', key) conn.close() if not conns: del self.connections[key] # Close oldest connection(s) for this key if limit reached. while len(conns) > self.max_tasks: conn = conns.pop(0) self.queue.remove(conn) self.log(1, 'closing oldest connection for', key) conn.close() if len(self.queue) <= self.max_pool: return # Close overall stale connections. stale = [conn for conn in self.queue if conn.stale()] if stale: for conn in stale: conns = self.connections.get(conn.key) conns.remove(conn) self.queue.remove(conn) self.log(1, 'closing stale connection for', key) conn.close() # Close oldest overall connection(s) if limit reached. while len(self.queue) > self.max_pool: conn = self.queue.pop(0) conns = self.connections.get(conn.key) c = conns.pop(0) assert conn == c, (conn.key, conn, c, conns) self.log(1, 'closing overall oldest connection for', conn.key) conn.close() class Connection: def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None: self.log = log self.pool = pool self.host = host self.port = port self.ssl = ssl self.reader = None # type: asyncio.StreamReader self.writer = None # type: asyncio.StreamWriter self.key = None # type: KeyTuple def stale(self) -> bool: return self.reader is None or self.reader.at_eof() def fileno(self) -> Optional[int]: writer = self.writer if writer is not None: transport = writer.transport if transport is not None: sock = transport.get_extra_info('socket') if sock is not None: return sock.fileno() return None @asyncio.coroutine def connect(self) -> Generator[Any, None, None]: self.reader, self.writer = yield from asyncio.open_connection( self.host, self.port, ssl=self.ssl) peername = self.writer.get_extra_info('peername') if peername: self.host, self.port = peername[:2] else: self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl) self.key = self.host, self.port, self.ssl def close(self, recycle: bool = False) -> None: if recycle and not self.stale(): self.pool.recycle_connection(self) else: self.writer.close() self.pool = self.reader = self.writer = None class Request: """HTTP request. Use connect() to open a connection; send_request() to send the request; get_response() to receive the response headers. """ def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None: self.log = log self.url = url self.pool = pool self.parts = urllib.parse.urlparse(self.url) self.scheme = self.parts.scheme assert self.scheme in ('http', 'https'), repr(url) self.ssl = self.parts.scheme == 'https' self.netloc = self.parts.netloc self.hostname = self.parts.hostname self.port = self.parts.port or (443 if self.ssl else 80) self.path = (self.parts.path or '/') self.query = self.parts.query if self.query: self.full_path = '%s?%s' % (self.path, self.query) else: self.full_path = self.path self.http_version = 'HTTP/1.1' self.method = 'GET' self.headers = [] # type: List[Tuple[str, str]] self.conn = None # type: Connection @asyncio.coroutine def connect(self) -> Generator[Any, None, None]: """Open a connection to the server.""" self.log(1, '* Connecting to %s:%s using %s for %s' % (self.hostname, self.port, 'ssl' if self.ssl else 'tcp', self.url)) self.conn = yield from self.pool.get_connection(self.hostname, self.port, self.ssl) def close(self, recycle: bool = False) -> None: """Close the connection, recycle if requested.""" if self.conn is not None: if not recycle: self.log(1, 'closing connection for', self.conn.key) self.conn.close(recycle) self.conn = None @asyncio.coroutine def putline(self, line: str) -> None: """Write a line to the connection. Used for the request line and headers. """ self.log(2, '>', line) self.conn.writer.write(line.encode('latin-1') + b'\r\n') @asyncio.coroutine def send_request(self) -> Generator[Any, None, None]: """Send the request.""" request_line = '%s %s %s' % (self.method, self.full_path, self.http_version) yield from self.putline(request_line) # TODO: What if a header is already set? self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0')) self.headers.append(('Host', self.netloc)) self.headers.append(('Accept', '*/*')) # self.headers.append(('Accept-Encoding', 'gzip')) for key, value in self.headers: line = '%s: %s' % (key, value) yield from self.putline(line) yield from self.putline('') @asyncio.coroutine def get_response(self) -> Generator[Any, None, 'Response']: """Receive the response.""" response = Response(self.log, self.conn.reader) yield from response.read_headers() return response class Response: """HTTP response. Call read_headers() to receive the request headers. Then check the status attribute and call get_header() to inspect the headers. Finally call read() to receive the body. """ def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None: self.log = log self.reader = reader self.http_version = None # type: str # 'HTTP/1.1' self.status = None # type: int # 200 self.reason = None # type: str # 'Ok' self.headers = [] # type: List[Tuple[str, str]] # [('Content-Type', 'text/html')] @asyncio.coroutine def getline(self) -> Generator[Any, None, str]: """Read one line from the connection.""" line = (yield from self.reader.readline()).decode('latin-1').rstrip() self.log(2, '<', line) return line @asyncio.coroutine def read_headers(self) -> Generator[Any, None, None]: """Read the response status and the request headers.""" status_line = yield from self.getline() status_parts = status_line.split(None, 2) if len(status_parts) != 3: self.log(0, 'bad status_line', repr(status_line)) raise BadStatusLine(status_line) self.http_version, status, self.reason = status_parts self.status = int(status) while True: header_line = yield from self.getline() if not header_line: break # TODO: Continuation lines. key, value = header_line.split(':', 1) self.headers.append((key, value.strip())) def get_redirect_url(self, default: str = '') -> str: """Inspect the status and return the redirect url if appropriate.""" if self.status not in (300, 301, 302, 303, 307): return default return self.get_header('Location', default) def get_header(self, key: str, default: str = '') -> str: """Get one header value, using a case insensitive header name.""" key = key.lower() for k, v in self.headers: if k.lower() == key: return v return default @asyncio.coroutine def read(self) -> Generator[Any, None, bytes]: """Read the response body. This honors Content-Length and Transfer-Encoding: chunked. """ nbytes = None for key, value in self.headers: if key.lower() == 'content-length': nbytes = int(value) break if nbytes is None: if self.get_header('transfer-encoding').lower() == 'chunked': self.log(2, 'parsing chunked response') blocks = [] while True: size_header = yield from self.reader.readline() if not size_header: self.log(0, 'premature end of chunked response') break self.log(3, 'size_header =', repr(size_header)) parts = size_header.split(b';') size = int(parts[0], 16) if size: self.log(3, 'reading chunk of', size, 'bytes') block = yield from self.reader.readexactly(size) assert len(block) == size, (len(block), size) blocks.append(block) crlf = yield from self.reader.readline() assert crlf == b'\r\n', repr(crlf) if not size: break body = b''.join(blocks) self.log(1, 'chunked response had', len(body), 'bytes in', len(blocks), 'blocks') else: self.log(3, 'reading until EOF') body = yield from self.reader.read() # TODO: Should make sure not to recycle the connection # in this case. else: body = yield from self.reader.readexactly(nbytes) return body class Fetcher: """Logic and state for one URL. When found in crawler.busy, this represents a URL to be fetched or in the process of being fetched; when found in crawler.done, this holds the results from fetching it. This is usually associated with a task. This references the crawler for the connection pool and to add more URLs to its todo list. Call fetch() to do the fetching, then report() to print the results. """ def __init__(self, log: Logger, url: str, crawler: 'Crawler', max_redirect: int = 10, max_tries: int = 4) -> None: self.log = log self.url = url self.crawler = crawler # We don't loop resolving redirects here -- we just use this # to decide whether to add the redirect URL to crawler.todo. self.max_redirect = max_redirect # But we do loop to retry on errors a few times. self.max_tries = max_tries # Everything we collect from the response goes here. self.task = None # type: asyncio.Task self.exceptions = [] # type: List[Exception] self.tries = 0 self.request = None # type: Request self.response = None # type: Response self.body = None # type: bytes self.next_url = None # type: str self.ctype = None # type: str self.pdict = None # type: Dict[str, str] self.encoding = None # type: str self.urls = None # type: Set[str] self.new_urls = None # type: Set[str] @asyncio.coroutine def fetch(self) -> Generator[Any, None, None]: """Attempt to fetch the contents of the URL. If successful, and the data is HTML, extract further links and add them to the crawler. Redirects are also added back there. """ while self.tries < self.max_tries: self.tries += 1 self.request = None try: self.request = Request(self.log, self.url, self.crawler.pool) yield from self.request.connect() yield from self.request.send_request() self.response = yield from self.request.get_response() self.body = yield from self.response.read() h_conn = self.response.get_header('connection').lower() if h_conn != 'close': self.request.close(recycle=True) self.request = None if self.tries > 1: self.log(1, 'try', self.tries, 'for', self.url, 'success') break except (BadStatusLine, OSError) as exc: self.exceptions.append(exc) self.log(1, 'try', self.tries, 'for', self.url, 'raised', repr(exc)) # import pdb; pdb.set_trace() # Don't reuse the connection in this case. finally: if self.request is not None: self.request.close() else: # We never broke out of the while loop, i.e. all tries failed. self.log(0, 'no success for', self.url, 'in', self.max_tries, 'tries') return next_url = self.response.get_redirect_url() if next_url: self.next_url = urllib.parse.urljoin(self.url, next_url) if self.max_redirect > 0: self.log(1, 'redirect to', self.next_url, 'from', self.url) self.crawler.add_url(self.next_url, self.max_redirect - 1) else: self.log(0, 'redirect limit reached for', self.next_url, 'from', self.url) else: if self.response.status == 200: self.ctype = self.response.get_header('content-type') self.pdict = {} if self.ctype: self.ctype, self.pdict = cgi.parse_header(self.ctype) self.encoding = self.pdict.get('charset', 'utf-8') if self.ctype == 'text/html': body = self.body.decode(self.encoding, 'replace') # Replace href with (?:href|src) to follow image links. self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', body)) if self.urls: self.log(1, 'got', len(self.urls), 'distinct urls from', self.url) self.new_urls = set() for url in self.urls: url = unescape(url) url = urllib.parse.urljoin(self.url, url) url, frag = urllib.parse.urldefrag(url) if self.crawler.add_url(url): self.new_urls.add(url) def report(self, stats: 'Stats', file: IO[str] = None) -> None: """Print a report on the state for this URL. Also update the Stats instance. """ if self.task is not None: if not self.task.done(): stats.add('pending') print(self.url, 'pending', file=file) return elif self.task.cancelled(): stats.add('cancelled') print(self.url, 'cancelled', file=file) return elif self.task.exception(): stats.add('exception') exc = self.task.exception() stats.add('exception_' + exc.__class__.__name__) print(self.url, exc, file=file) return if len(self.exceptions) == self.tries: stats.add('fail') exc = self.exceptions[-1] stats.add('fail_' + str(exc.__class__.__name__)) print(self.url, 'error', exc, file=file) elif self.next_url: stats.add('redirect') print(self.url, self.response.status, 'redirect', self.next_url, file=file) elif self.ctype == 'text/html': stats.add('html') size = len(self.body or b'') stats.add('html_bytes', size) if self.log.level: print(self.url, self.response.status, self.ctype, self.encoding, size, '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())), file=file) elif self.response is None: print(self.url, 'no response object') else: size = len(self.body or b'') if self.response.status == 200: stats.add('other') stats.add('other_bytes', size) else: stats.add('error') stats.add('error_bytes', size) stats.add('status_%s' % self.response.status) print(self.url, self.response.status, self.ctype, self.encoding, size, file=file) class Stats: """Record stats of various sorts.""" def __init__(self) -> None: self.stats = {} # type: Dict[str, int] def add(self, key: str, count: int = 1) -> None: self.stats[key] = self.stats.get(key, 0) + count def report(self, file: IO[str] = None) -> None: for key, count in sorted(self.stats.items()): print('%10d' % count, key, file=file) class Crawler: """Crawl a set of URLs. This manages three disjoint sets of URLs (todo, busy, done). The data structures actually store dicts -- the values in todo give the redirect limit, while the values in busy and done are Fetcher instances. """ def __init__(self, log: Logger, roots: Set[str], exclude: str = None, strict: bool = True, # What to crawl. max_redirect: int = 10, max_tries: int = 4, # Per-url limits. max_tasks: int = 10, max_pool: int = 10, # Global limits. ) -> None: self.log = log self.roots = roots self.exclude = exclude self.strict = strict self.max_redirect = max_redirect self.max_tries = max_tries self.max_tasks = max_tasks self.max_pool = max_pool self.todo = {} # type: Dict[str, int] self.busy = {} # type: Dict[str, Fetcher] self.done = {} # type: Dict[str, Fetcher] self.pool = ConnectionPool(self.log, max_pool, max_tasks) self.root_domains = set() # type: Set[str] for root in roots: host = urllib.parse.urlparse(root).hostname if not host: continue if re.match(r'\A[\d\.]*\Z', host): self.root_domains.add(host) else: host = host.lower() if self.strict: self.root_domains.add(host) if host.startswith('www.'): self.root_domains.add(host[4:]) else: self.root_domains.add('www.' + host) else: parts = host.split('.') if len(parts) > 2: host = '.'.join(parts[-2:]) self.root_domains.add(host) for root in roots: self.add_url(root) self.governor = asyncio.Semaphore(max_tasks) self.termination = asyncio.Condition() self.t0 = time.time() self.t1 = None # type: Optional[float] def close(self) -> None: """Close resources (currently only the pool).""" self.pool.close() def host_okay(self, host: str) -> bool: """Check if a host should be crawled. A literal match (after lowercasing) is always good. For hosts that don't look like IP addresses, some approximate matches are okay depending on the strict flag. """ host = host.lower() if host in self.root_domains: return True if re.match(r'\A[\d\.]*\Z', host): return False if self.strict: return self._host_okay_strictish(host) else: return self._host_okay_lenient(host) def _host_okay_strictish(self, host: str) -> bool: """Check if a host should be crawled, strict-ish version. This checks for equality modulo an initial 'www.' component. """ if host.startswith('www.'): if host[4:] in self.root_domains: return True else: if 'www.' + host in self.root_domains: return True return False def _host_okay_lenient(self, host: str) -> bool: """Check if a host should be crawled, lenient version. This compares the last two components of the host. """ parts = host.split('.') if len(parts) > 2: host = '.'.join(parts[-2:]) return host in self.root_domains def add_url(self, url: str, max_redirect: int = None) -> bool: """Add a URL to the todo list if not seen before.""" if self.exclude and re.search(self.exclude, url): return False parsed = urllib.parse.urlparse(url) if parsed.scheme not in ('http', 'https'): self.log(2, 'skipping non-http scheme in', url) return False host = parsed.hostname if not self.host_okay(host): self.log(2, 'skipping non-root host in', url) return False if max_redirect is None: max_redirect = self.max_redirect if url in self.todo or url in self.busy or url in self.done: return False self.log(1, 'adding', url, max_redirect) self.todo[url] = max_redirect return True @asyncio.coroutine def crawl(self) -> Generator[Any, None, None]: """Run the crawler until all finished.""" with (yield from self.termination): while self.todo or self.busy: if self.todo: url, max_redirect = self.todo.popitem() fetcher = Fetcher(self.log, url, crawler=self, max_redirect=max_redirect, max_tries=self.max_tries, ) self.busy[url] = fetcher fetcher.task = asyncio.Task(self.fetch(fetcher)) else: yield from self.termination.wait() self.t1 = time.time() @asyncio.coroutine def fetch(self, fetcher: Fetcher) -> Generator[Any, None, None]: """Call the Fetcher's fetch(), with a limit on concurrency. Once this returns, move the fetcher from busy to done. """ url = fetcher.url with (yield from self.governor): try: yield from fetcher.fetch() # Fetcher gonna fetch. finally: # Force GC of the task, so the error is logged. fetcher.task = None with (yield from self.termination): self.done[url] = fetcher del self.busy[url] self.termination.notify() def report(self, file: IO[str] = None) -> None: """Print a report on all completed URLs.""" if self.t1 is None: self.t1 = time.time() dt = self.t1 - self.t0 if dt and self.max_tasks: speed = len(self.done) / dt / self.max_tasks else: speed = 0 stats = Stats() print('*** Report ***', file=file) try: show = [] # type: List[Tuple[str, Fetcher]] show.extend(self.done.items()) show.extend(self.busy.items()) show.sort() for url, fetcher in show: fetcher.report(stats, file=file) except KeyboardInterrupt: print('\nInterrupted', file=file) print('Finished', len(self.done), 'urls in %.3f secs' % dt, '(max_tasks=%d)' % self.max_tasks, '(%.3f urls/sec/task)' % speed, file=file) stats.report(file=file) print('Todo:', len(self.todo), file=file) print('Busy:', len(self.busy), file=file) print('Done:', len(self.done), file=file) print('Date:', time.ctime(), 'local time', file=file) def main() -> None: """Main program. Parse arguments, set up event loop, run crawler, print report. """ args = ARGS.parse_args() if not args.roots: print('Use --help for command line help') return log = Logger(args.level) if args.iocp: if sys.platform == 'win32': from asyncio import ProactorEventLoop loop = ProactorEventLoop() # type: ignore asyncio.set_event_loop(loop) else: assert False elif args.select: loop = asyncio.SelectorEventLoop() # type: ignore asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() # type: ignore roots = {fix_url(root) for root in args.roots} crawler = Crawler(log, roots, exclude=args.exclude, strict=args.strict, max_redirect=args.max_redirect, max_tries=args.max_tries, max_tasks=args.max_tasks, max_pool=args.max_pool, ) try: loop.run_until_complete(crawler.crawl()) # Crawler gonna crawl. except KeyboardInterrupt: sys.stderr.flush() print('\nInterrupted\n') finally: crawler.report() crawler.close() loop.close() if __name__ == '__main__': logging.basicConfig(level=logging.INFO) # type: ignore main() mypy-0.761/test-data/samples/crawl2.py0000644€tŠÔÚ€2›s®0000007555113576752246024033 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3.4 """A simple web crawler.""" # This is cloned from /examples/crawl.py, # with type annotations added (PEP 484). # # This version (crawl2.) has also been converted to use `async def` + # `await` (PEP 492). import argparse import asyncio import cgi from http.client import BadStatusLine import logging import re import sys import time import urllib.parse from typing import Any, Awaitable, IO, Optional, Sequence, Set, Tuple, List, Dict ARGS = argparse.ArgumentParser(description="Web crawler") ARGS.add_argument( '--iocp', action='store_true', dest='iocp', default=False, help='Use IOCP event loop (Windows only)') ARGS.add_argument( '--select', action='store_true', dest='select', default=False, help='Use Select event loop instead of default') ARGS.add_argument( 'roots', nargs='*', default=[], help='Root URL (may be repeated)') ARGS.add_argument( '--max_redirect', action='store', type=int, metavar='N', default=10, help='Limit redirection chains (for 301, 302 etc.)') ARGS.add_argument( '--max_tries', action='store', type=int, metavar='N', default=4, help='Limit retries on network errors') ARGS.add_argument( '--max_tasks', action='store', type=int, metavar='N', default=100, help='Limit concurrent connections') ARGS.add_argument( '--max_pool', action='store', type=int, metavar='N', default=100, help='Limit connection pool size') ARGS.add_argument( '--exclude', action='store', metavar='REGEX', help='Exclude matching URLs') ARGS.add_argument( '--strict', action='store_true', default=True, help='Strict host matching (default)') ARGS.add_argument( '--lenient', action='store_false', dest='strict', default=False, help='Lenient host matching') ARGS.add_argument( '-v', '--verbose', action='count', dest='level', default=1, help='Verbose logging (repeat for more verbose)') ARGS.add_argument( '-q', '--quiet', action='store_const', const=0, dest='level', default=1, help='Quiet logging (opposite of --verbose)') ESCAPES = [('quot', '"'), ('gt', '>'), ('lt', '<'), ('amp', '&') # Must be last. ] def unescape(url: str) -> str: """Turn & into &, and so on. This is the inverse of cgi.escape(). """ for name, char in ESCAPES: url = url.replace('&' + name + ';', char) return url def fix_url(url: str) -> str: """Prefix a schema-less URL with http://.""" if '://' not in url: url = 'http://' + url return url class Logger: def __init__(self, level: int) -> None: self.level = level def _log(self, n: int, args: Sequence[Any]) -> None: if self.level >= n: print(*args, file=sys.stderr, flush=True) def log(self, n: int, *args: Any) -> None: self._log(n, args) def __call__(self, n: int, *args: Any) -> None: self._log(n, args) KeyTuple = Tuple[str, int, bool] class ConnectionPool: """A connection pool. To open a connection, use reserve(). To recycle it, use unreserve(). The pool is mostly just a mapping from (host, port, ssl) tuples to lists of Connections. The currently active connections are *not* in the data structure; get_connection() takes the connection out, and recycle_connection() puts it back in. To recycle a connection, call conn.close(recycle=True). There are limits to both the overall pool and the per-key pool. """ def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None: self.log = log self.max_pool = max_pool # Overall limit. self.max_tasks = max_tasks # Per-key limit. self.loop = asyncio.get_event_loop() self.connections = {} # type: Dict[KeyTuple, List[Connection]] self.queue = [] # type: List[Connection] def close(self) -> None: """Close all connections available for reuse.""" for conns in self.connections.values(): for conn in conns: conn.close() self.connections.clear() self.queue.clear() async def get_connection(self, host: str, port: int, ssl: bool) -> 'Connection': """Create or reuse a connection.""" port = port or (443 if ssl else 80) try: ipaddrs = await self.loop.getaddrinfo(host, port) except Exception as exc: self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port)) raise self.log(1, '* %s resolves to %s' % (host, ', '.join(ip[4][0] for ip in ipaddrs))) # Look for a reusable connection. for _1, _2, _3, _4, (h, p, *_5) in ipaddrs: key = h, p, ssl conn = None conns = self.connections.get(key) while conns: conn = conns.pop(0) self.queue.remove(conn) if not conns: del self.connections[key] if conn.stale(): self.log(1, 'closing stale connection for', key) conn.close() # Just in case. else: self.log(1, '* Reusing pooled connection', key, 'FD =', conn.fileno()) return conn # Create a new connection. conn = Connection(self.log, self, host, port, ssl) await conn.connect() self.log(1, '* New connection', conn.key, 'FD =', conn.fileno()) return conn def recycle_connection(self, conn: 'Connection') -> None: """Make a connection available for reuse. This also prunes the pool if it exceeds the size limits. """ if conn.stale(): conn.close() return key = conn.key conns = self.connections.setdefault(key, []) conns.append(conn) self.queue.append(conn) if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool: return # Prune the queue. # Close stale connections for this key first. stale = [conn for conn in conns if conn.stale()] if stale: for conn in stale: conns.remove(conn) self.queue.remove(conn) self.log(1, 'closing stale connection for', key) conn.close() if not conns: del self.connections[key] # Close oldest connection(s) for this key if limit reached. while len(conns) > self.max_tasks: conn = conns.pop(0) self.queue.remove(conn) self.log(1, 'closing oldest connection for', key) conn.close() if len(self.queue) <= self.max_pool: return # Close overall stale connections. stale = [conn for conn in self.queue if conn.stale()] if stale: for conn in stale: conns = self.connections.get(conn.key) conns.remove(conn) self.queue.remove(conn) self.log(1, 'closing stale connection for', key) conn.close() # Close oldest overall connection(s) if limit reached. while len(self.queue) > self.max_pool: conn = self.queue.pop(0) conns = self.connections.get(conn.key) c = conns.pop(0) assert conn == c, (conn.key, conn, c, conns) self.log(1, 'closing overall oldest connection for', conn.key) conn.close() class Connection: def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None: self.log = log self.pool = pool self.host = host self.port = port self.ssl = ssl self.reader = None # type: asyncio.StreamReader self.writer = None # type: asyncio.StreamWriter self.key = None # type: KeyTuple def stale(self) -> bool: return self.reader is None or self.reader.at_eof() def fileno(self) -> Optional[int]: writer = self.writer if writer is not None: transport = writer.transport if transport is not None: sock = transport.get_extra_info('socket') if sock is not None: return sock.fileno() return None async def connect(self) -> None: self.reader, self.writer = await asyncio.open_connection( self.host, self.port, ssl=self.ssl) peername = self.writer.get_extra_info('peername') if peername: self.host, self.port = peername[:2] else: self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl) self.key = self.host, self.port, self.ssl def close(self, recycle: bool = False) -> None: if recycle and not self.stale(): self.pool.recycle_connection(self) else: self.writer.close() self.pool = self.reader = self.writer = None class Request: """HTTP request. Use connect() to open a connection; send_request() to send the request; get_response() to receive the response headers. """ def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None: self.log = log self.url = url self.pool = pool self.parts = urllib.parse.urlparse(self.url) self.scheme = self.parts.scheme assert self.scheme in ('http', 'https'), repr(url) self.ssl = self.parts.scheme == 'https' self.netloc = self.parts.netloc self.hostname = self.parts.hostname self.port = self.parts.port or (443 if self.ssl else 80) self.path = (self.parts.path or '/') self.query = self.parts.query if self.query: self.full_path = '%s?%s' % (self.path, self.query) else: self.full_path = self.path self.http_version = 'HTTP/1.1' self.method = 'GET' self.headers = [] # type: List[Tuple[str, str]] self.conn = None # type: Connection async def connect(self) -> None: """Open a connection to the server.""" self.log(1, '* Connecting to %s:%s using %s for %s' % (self.hostname, self.port, 'ssl' if self.ssl else 'tcp', self.url)) self.conn = await self.pool.get_connection(self.hostname, self.port, self.ssl) def close(self, recycle: bool = False) -> None: """Close the connection, recycle if requested.""" if self.conn is not None: if not recycle: self.log(1, 'closing connection for', self.conn.key) self.conn.close(recycle) self.conn = None async def putline(self, line: str) -> None: """Write a line to the connection. Used for the request line and headers. """ self.log(2, '>', line) self.conn.writer.write(line.encode('latin-1') + b'\r\n') async def send_request(self) -> None: """Send the request.""" request_line = '%s %s %s' % (self.method, self.full_path, self.http_version) await self.putline(request_line) # TODO: What if a header is already set? self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0')) self.headers.append(('Host', self.netloc)) self.headers.append(('Accept', '*/*')) # self.headers.append(('Accept-Encoding', 'gzip')) for key, value in self.headers: line = '%s: %s' % (key, value) await self.putline(line) await self.putline('') async def get_response(self) -> 'Response': """Receive the response.""" response = Response(self.log, self.conn.reader) await response.read_headers() return response class Response: """HTTP response. Call read_headers() to receive the request headers. Then check the status attribute and call get_header() to inspect the headers. Finally call read() to receive the body. """ def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None: self.log = log self.reader = reader self.http_version = None # type: str # 'HTTP/1.1' self.status = None # type: int # 200 self.reason = None # type: str # 'Ok' self.headers = [] # type: List[Tuple[str, str]] # [('Content-Type', 'text/html')] async def getline(self) -> str: """Read one line from the connection.""" line = (await self.reader.readline()).decode('latin-1').rstrip() self.log(2, '<', line) return line async def read_headers(self) -> None: """Read the response status and the request headers.""" status_line = await self.getline() status_parts = status_line.split(None, 2) if len(status_parts) != 3: self.log(0, 'bad status_line', repr(status_line)) raise BadStatusLine(status_line) self.http_version, status, self.reason = status_parts self.status = int(status) while True: header_line = await self.getline() if not header_line: break # TODO: Continuation lines. key, value = header_line.split(':', 1) self.headers.append((key, value.strip())) def get_redirect_url(self, default: str = '') -> str: """Inspect the status and return the redirect url if appropriate.""" if self.status not in (300, 301, 302, 303, 307): return default return self.get_header('Location', default) def get_header(self, key: str, default: str = '') -> str: """Get one header value, using a case insensitive header name.""" key = key.lower() for k, v in self.headers: if k.lower() == key: return v return default async def read(self) -> bytes: """Read the response body. This honors Content-Length and Transfer-Encoding: chunked. """ nbytes = None for key, value in self.headers: if key.lower() == 'content-length': nbytes = int(value) break if nbytes is None: if self.get_header('transfer-encoding').lower() == 'chunked': self.log(2, 'parsing chunked response') blocks = [] while True: size_header = await self.reader.readline() if not size_header: self.log(0, 'premature end of chunked response') break self.log(3, 'size_header =', repr(size_header)) parts = size_header.split(b';') size = int(parts[0], 16) if size: self.log(3, 'reading chunk of', size, 'bytes') block = await self.reader.readexactly(size) assert len(block) == size, (len(block), size) blocks.append(block) crlf = await self.reader.readline() assert crlf == b'\r\n', repr(crlf) if not size: break body = b''.join(blocks) self.log(1, 'chunked response had', len(body), 'bytes in', len(blocks), 'blocks') else: self.log(3, 'reading until EOF') body = await self.reader.read() # TODO: Should make sure not to recycle the connection # in this case. else: body = await self.reader.readexactly(nbytes) return body class Fetcher: """Logic and state for one URL. When found in crawler.busy, this represents a URL to be fetched or in the process of being fetched; when found in crawler.done, this holds the results from fetching it. This is usually associated with a task. This references the crawler for the connection pool and to add more URLs to its todo list. Call fetch() to do the fetching, then report() to print the results. """ def __init__(self, log: Logger, url: str, crawler: 'Crawler', max_redirect: int = 10, max_tries: int = 4) -> None: self.log = log self.url = url self.crawler = crawler # We don't loop resolving redirects here -- we just use this # to decide whether to add the redirect URL to crawler.todo. self.max_redirect = max_redirect # But we do loop to retry on errors a few times. self.max_tries = max_tries # Everything we collect from the response goes here. self.task = None # type: asyncio.Task self.exceptions = [] # type: List[Exception] self.tries = 0 self.request = None # type: Request self.response = None # type: Response self.body = None # type: bytes self.next_url = None # type: str self.ctype = None # type: str self.pdict = None # type: Dict[str, str] self.encoding = None # type: str self.urls = None # type: Set[str] self.new_urls = None # type: Set[str] async def fetch(self) -> None: """Attempt to fetch the contents of the URL. If successful, and the data is HTML, extract further links and add them to the crawler. Redirects are also added back there. """ while self.tries < self.max_tries: self.tries += 1 self.request = None try: self.request = Request(self.log, self.url, self.crawler.pool) await self.request.connect() await self.request.send_request() self.response = await self.request.get_response() self.body = await self.response.read() h_conn = self.response.get_header('connection').lower() if h_conn != 'close': self.request.close(recycle=True) self.request = None if self.tries > 1: self.log(1, 'try', self.tries, 'for', self.url, 'success') break except (BadStatusLine, OSError) as exc: self.exceptions.append(exc) self.log(1, 'try', self.tries, 'for', self.url, 'raised', repr(exc)) # import pdb; pdb.set_trace() # Don't reuse the connection in this case. finally: if self.request is not None: self.request.close() else: # We never broke out of the while loop, i.e. all tries failed. self.log(0, 'no success for', self.url, 'in', self.max_tries, 'tries') return next_url = self.response.get_redirect_url() if next_url: self.next_url = urllib.parse.urljoin(self.url, next_url) if self.max_redirect > 0: self.log(1, 'redirect to', self.next_url, 'from', self.url) self.crawler.add_url(self.next_url, self.max_redirect - 1) else: self.log(0, 'redirect limit reached for', self.next_url, 'from', self.url) else: if self.response.status == 200: self.ctype = self.response.get_header('content-type') self.pdict = {} if self.ctype: self.ctype, self.pdict = cgi.parse_header(self.ctype) self.encoding = self.pdict.get('charset', 'utf-8') if self.ctype == 'text/html': body = self.body.decode(self.encoding, 'replace') # Replace href with (?:href|src) to follow image links. self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', body)) if self.urls: self.log(1, 'got', len(self.urls), 'distinct urls from', self.url) self.new_urls = set() for url in self.urls: url = unescape(url) url = urllib.parse.urljoin(self.url, url) url, frag = urllib.parse.urldefrag(url) if self.crawler.add_url(url): self.new_urls.add(url) def report(self, stats: 'Stats', file: IO[str] = None) -> None: """Print a report on the state for this URL. Also update the Stats instance. """ if self.task is not None: if not self.task.done(): stats.add('pending') print(self.url, 'pending', file=file) return elif self.task.cancelled(): stats.add('cancelled') print(self.url, 'cancelled', file=file) return elif self.task.exception(): stats.add('exception') exc = self.task.exception() stats.add('exception_' + exc.__class__.__name__) print(self.url, exc, file=file) return if len(self.exceptions) == self.tries: stats.add('fail') exc = self.exceptions[-1] stats.add('fail_' + str(exc.__class__.__name__)) print(self.url, 'error', exc, file=file) elif self.next_url: stats.add('redirect') print(self.url, self.response.status, 'redirect', self.next_url, file=file) elif self.ctype == 'text/html': stats.add('html') size = len(self.body or b'') stats.add('html_bytes', size) if self.log.level: print(self.url, self.response.status, self.ctype, self.encoding, size, '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())), file=file) elif self.response is None: print(self.url, 'no response object') else: size = len(self.body or b'') if self.response.status == 200: stats.add('other') stats.add('other_bytes', size) else: stats.add('error') stats.add('error_bytes', size) stats.add('status_%s' % self.response.status) print(self.url, self.response.status, self.ctype, self.encoding, size, file=file) class Stats: """Record stats of various sorts.""" def __init__(self) -> None: self.stats = {} # type: Dict[str, int] def add(self, key: str, count: int = 1) -> None: self.stats[key] = self.stats.get(key, 0) + count def report(self, file: IO[str] = None) -> None: for key, count in sorted(self.stats.items()): print('%10d' % count, key, file=file) class Crawler: """Crawl a set of URLs. This manages three disjoint sets of URLs (todo, busy, done). The data structures actually store dicts -- the values in todo give the redirect limit, while the values in busy and done are Fetcher instances. """ def __init__(self, log: Logger, roots: Set[str], exclude: str = None, strict: bool = True, # What to crawl. max_redirect: int = 10, max_tries: int = 4, # Per-url limits. max_tasks: int = 10, max_pool: int = 10, # Global limits. ) -> None: self.log = log self.roots = roots self.exclude = exclude self.strict = strict self.max_redirect = max_redirect self.max_tries = max_tries self.max_tasks = max_tasks self.max_pool = max_pool self.todo = {} # type: Dict[str, int] self.busy = {} # type: Dict[str, Fetcher] self.done = {} # type: Dict[str, Fetcher] self.pool = ConnectionPool(self.log, max_pool, max_tasks) self.root_domains = set() # type: Set[str] for root in roots: host = urllib.parse.urlparse(root).hostname if not host: continue if re.match(r'\A[\d\.]*\Z', host): self.root_domains.add(host) else: host = host.lower() if self.strict: self.root_domains.add(host) if host.startswith('www.'): self.root_domains.add(host[4:]) else: self.root_domains.add('www.' + host) else: parts = host.split('.') if len(parts) > 2: host = '.'.join(parts[-2:]) self.root_domains.add(host) for root in roots: self.add_url(root) self.governor = asyncio.Semaphore(max_tasks) self.termination = asyncio.Condition() self.t0 = time.time() self.t1 = None # type: Optional[float] def close(self) -> None: """Close resources (currently only the pool).""" self.pool.close() def host_okay(self, host: str) -> bool: """Check if a host should be crawled. A literal match (after lowercasing) is always good. For hosts that don't look like IP addresses, some approximate matches are okay depending on the strict flag. """ host = host.lower() if host in self.root_domains: return True if re.match(r'\A[\d\.]*\Z', host): return False if self.strict: return self._host_okay_strictish(host) else: return self._host_okay_lenient(host) def _host_okay_strictish(self, host: str) -> bool: """Check if a host should be crawled, strict-ish version. This checks for equality modulo an initial 'www.' component. """ if host.startswith('www.'): if host[4:] in self.root_domains: return True else: if 'www.' + host in self.root_domains: return True return False def _host_okay_lenient(self, host: str) -> bool: """Check if a host should be crawled, lenient version. This compares the last two components of the host. """ parts = host.split('.') if len(parts) > 2: host = '.'.join(parts[-2:]) return host in self.root_domains def add_url(self, url: str, max_redirect: int = None) -> bool: """Add a URL to the todo list if not seen before.""" if self.exclude and re.search(self.exclude, url): return False parsed = urllib.parse.urlparse(url) if parsed.scheme not in ('http', 'https'): self.log(2, 'skipping non-http scheme in', url) return False host = parsed.hostname if not self.host_okay(host): self.log(2, 'skipping non-root host in', url) return False if max_redirect is None: max_redirect = self.max_redirect if url in self.todo or url in self.busy or url in self.done: return False self.log(1, 'adding', url, max_redirect) self.todo[url] = max_redirect return True async def crawl(self) -> None: """Run the crawler until all finished.""" with (await self.termination): while self.todo or self.busy: if self.todo: url, max_redirect = self.todo.popitem() fetcher = Fetcher(self.log, url, crawler=self, max_redirect=max_redirect, max_tries=self.max_tries, ) self.busy[url] = fetcher fetcher.task = asyncio.Task(self.fetch(fetcher)) else: await self.termination.wait() self.t1 = time.time() async def fetch(self, fetcher: Fetcher) -> None: """Call the Fetcher's fetch(), with a limit on concurrency. Once this returns, move the fetcher from busy to done. """ url = fetcher.url with (await self.governor): try: await fetcher.fetch() # Fetcher gonna fetch. finally: # Force GC of the task, so the error is logged. fetcher.task = None with (await self.termination): self.done[url] = fetcher del self.busy[url] self.termination.notify() def report(self, file: IO[str] = None) -> None: """Print a report on all completed URLs.""" if self.t1 is None: self.t1 = time.time() dt = self.t1 - self.t0 if dt and self.max_tasks: speed = len(self.done) / dt / self.max_tasks else: speed = 0 stats = Stats() print('*** Report ***', file=file) try: show = [] # type: List[Tuple[str, Fetcher]] show.extend(self.done.items()) show.extend(self.busy.items()) show.sort() for url, fetcher in show: fetcher.report(stats, file=file) except KeyboardInterrupt: print('\nInterrupted', file=file) print('Finished', len(self.done), 'urls in %.3f secs' % dt, '(max_tasks=%d)' % self.max_tasks, '(%.3f urls/sec/task)' % speed, file=file) stats.report(file=file) print('Todo:', len(self.todo), file=file) print('Busy:', len(self.busy), file=file) print('Done:', len(self.done), file=file) print('Date:', time.ctime(), 'local time', file=file) def main() -> None: """Main program. Parse arguments, set up event loop, run crawler, print report. """ args = ARGS.parse_args() if not args.roots: print('Use --help for command line help') return log = Logger(args.level) if args.iocp: if sys.platform == 'win32': from asyncio import ProactorEventLoop loop = ProactorEventLoop() # type: ignore asyncio.set_event_loop(loop) else: assert False elif args.select: loop = asyncio.SelectorEventLoop() # type: ignore asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() # type: ignore roots = {fix_url(root) for root in args.roots} crawler = Crawler(log, roots, exclude=args.exclude, strict=args.strict, max_redirect=args.max_redirect, max_tries=args.max_tries, max_tasks=args.max_tasks, max_pool=args.max_pool, ) try: loop.run_until_complete(crawler.crawl()) # Crawler gonna crawl. except KeyboardInterrupt: sys.stderr.flush() print('\nInterrupted\n') finally: crawler.report() crawler.close() loop.close() if __name__ == '__main__': logging.basicConfig(level=logging.INFO) # type: ignore main() mypy-0.761/test-data/samples/dict.py0000644€tŠÔÚ€2›s®0000000037213576752246023551 0ustar jukkaDROPBOX\Domain Users00000000000000import typing prices = {'apple': 0.40, 'banana': 0.50} my_purchase = { 'apple': 1, 'banana': 6} grocery_bill = sum(prices[fruit] * my_purchase[fruit] for fruit in my_purchase) print('I owe the grocer $%.2f' % grocery_bill) mypy-0.761/test-data/samples/fib.py0000644€tŠÔÚ€2›s®0000000024713576752246023367 0ustar jukkaDROPBOX\Domain Users00000000000000import typing parents, babies = (1, 1) while babies < 100: print('This generation has {0} babies'.format(babies)) parents, babies = (babies, parents + babies) mypy-0.761/test-data/samples/files.py0000644€tŠÔÚ€2›s®0000000052113576752246023724 0ustar jukkaDROPBOX\Domain Users00000000000000# indent your Python code to put into an email import glob import typing # glob supports Unix style pathname extensions python_files = glob.glob('*.py') for file_name in sorted(python_files): print(' ------' + file_name) f = open(file_name) for line in f: print(' ' + line.rstrip()) f.close() print() mypy-0.761/test-data/samples/for.py0000644€tŠÔÚ€2›s®0000000025213576752246023411 0ustar jukkaDROPBOX\Domain Users00000000000000import typing friends = ['john', 'pat', 'gary', 'michael'] for i, name in enumerate(friends): print("iteration {iteration} is {name}".format(iteration=i, name=name)) mypy-0.761/test-data/samples/generators.py0000644€tŠÔÚ€2›s®0000000113413576752246024774 0ustar jukkaDROPBOX\Domain Users00000000000000# Prime number sieve with generators import itertools from typing import Iterator def iter_primes() -> Iterator[int]: # an iterator of all numbers between 2 and +infinity numbers = itertools.count(2) # generate primes forever while True: # get the first number from the iterator (always a prime) prime = next(numbers) yield prime # this code iteratively builds up a chain of # filters...slightly tricky, but ponder it a bit numbers = filter(prime.__rmod__, numbers) for p in iter_primes(): if p > 1000: break print(p) mypy-0.761/test-data/samples/greet.py0000644€tŠÔÚ€2›s®0000000016013576752246023727 0ustar jukkaDROPBOX\Domain Users00000000000000import typing def greet(name: str) -> None: print('Hello', name) greet('Jack') greet('Jill') greet('Bob') mypy-0.761/test-data/samples/guess.py0000644€tŠÔÚ€2›s®0000000133113576752246023750 0ustar jukkaDROPBOX\Domain Users00000000000000# "Guess the Number" Game (edited) from http://inventwithpython.com import random import typing guesses_made = 0 name = input('Hello! What is your name?\n') number = random.randint(1, 20) print('Well, {0}, I am thinking of a number between 1 and 20.'.format(name)) while guesses_made < 6: guess = int(input('Take a guess: ')) guesses_made += 1 if guess < number: print('Your guess is too low.') if guess > number: print('Your guess is too high.') if guess == number: break if guess == number: print('Good job, {0}! You guessed my number in {1} guesses!'.format( name, guesses_made)) else: print('Nope. The number I was thinking of was {0}'.format(number)) mypy-0.761/test-data/samples/hello.py0000644€tŠÔÚ€2›s®0000000004413576752246023725 0ustar jukkaDROPBOX\Domain Users00000000000000import typing print('Hello, world') mypy-0.761/test-data/samples/input.py0000644€tŠÔÚ€2›s®0000000011313576752246023756 0ustar jukkaDROPBOX\Domain Users00000000000000import typing name = input('What is your name?\n') print('Hi, %s.' % name) mypy-0.761/test-data/samples/itertool.py0000644€tŠÔÚ€2›s®0000000060513576752246024466 0ustar jukkaDROPBOX\Domain Users00000000000000from itertools import groupby import typing lines = ''' This is the first paragraph. This is the second. '''.splitlines() # Use itertools.groupby and bool to return groups of # consecutive lines that either have content or don't. for has_chars, frags in groupby(lines, bool): if has_chars: print(' '.join(frags)) # PRINTS: # This is the first paragraph. # This is the second. mypy-0.761/test-data/samples/readme.txt0000644€tŠÔÚ€2›s®0000000125013576752246024246 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy Sample Programs -------------------- The sample programs use static typing unless otherwise noted in comments. Original credits for sample programs: fib.py - Python Wiki [1] for.py - Python Wiki [1] greet.py - Python Wiki [1] hello.py - Python Wiki [1] input.py - Python Wiki [1] regexp.py - Python Wiki [1] dict.py - Python Wiki [1] cmdline.py - Python Wiki [1] files.py - Python Wiki [1] bottles.py - Python Wiki [1] class.py - Python Wiki [1] guess.py - Python Wiki [1] generators.py - Python Wiki [1] itertool.py - Python Wiki [1] The sample programs were ported to mypy by Jukka Lehtosalo. [1] http://wiki.python.org/moin/SimplePrograms mypy-0.761/test-data/samples/regexp.py0000644€tŠÔÚ€2›s®0000000034513576752246024120 0ustar jukkaDROPBOX\Domain Users00000000000000import typing import re for test_string in ['555-1212', 'ILL-EGAL']: if re.match(r'^\d{3}-\d{4}$', test_string): print(test_string, 'is a valid US local phone number') else: print(test_string, 'rejected') mypy-0.761/test-data/stdlib-samples/0000755€tŠÔÚ€2›s®0000000000013576752266023533 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/stdlib-samples/3.2/0000755€tŠÔÚ€2›s®0000000000013576752267024036 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/stdlib-samples/3.2/base64.py0000644€tŠÔÚ€2›s®0000003441213576752246025475 0ustar jukkaDROPBOX\Domain Users00000000000000#! /usr/bin/env python3 """RFC 3548: Base16, Base32, Base64 Data Encodings""" # Modified 04-Oct-1995 by Jack Jansen to use binascii module # Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support # Modified 22-May-2007 by Guido van Rossum to use bytes everywhere import re import struct import binascii from typing import Dict, List, AnyStr, IO __all__ = [ # Legacy interface exports traditional RFC 1521 Base64 encodings 'encode', 'decode', 'encodebytes', 'decodebytes', # Generalized interface for other encodings 'b64encode', 'b64decode', 'b32encode', 'b32decode', 'b16encode', 'b16decode', # Standard Base64 encoding 'standard_b64encode', 'standard_b64decode', # Some common Base64 alternatives. As referenced by RFC 3458, see thread # starting at: # # http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html 'urlsafe_b64encode', 'urlsafe_b64decode', ] bytes_types = (bytes, bytearray) # Types acceptable as binary data def _translate(s: bytes, altchars: Dict[AnyStr, bytes]) -> bytes: if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) translation = bytearray(range(256)) for k, v in altchars.items(): translation[ord(k)] = v[0] return s.translate(translation) # Base64 encoding/decoding uses binascii def b64encode(s: bytes, altchars: bytes = None) -> bytes: """Encode a byte string using Base64. s is the byte string to encode. Optional altchars must be a byte string of length 2 which specifies an alternative alphabet for the '+' and '/' characters. This allows an application to e.g. generate url or filesystem safe Base64 strings. The encoded byte string is returned. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) # Strip off the trailing newline encoded = binascii.b2a_base64(s)[:-1] if altchars is not None: if not isinstance(altchars, bytes_types): raise TypeError("expected bytes, not %s" % altchars.__class__.__name__) assert len(altchars) == 2, repr(altchars) return _translate(encoded, {'+': altchars[0:1], '/': altchars[1:2]}) return encoded def b64decode(s: bytes, altchars: bytes = None, validate: bool = False) -> bytes: """Decode a Base64 encoded byte string. s is the byte string to decode. Optional altchars must be a string of length 2 which specifies the alternative alphabet used instead of the '+' and '/' characters. The decoded string is returned. A binascii.Error is raised if s is incorrectly padded. If validate is False (the default), non-base64-alphabet characters are discarded prior to the padding check. If validate is True, non-base64-alphabet characters in the input result in a binascii.Error. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) if altchars is not None: if not isinstance(altchars, bytes_types): raise TypeError("expected bytes, not %s" % altchars.__class__.__name__) assert len(altchars) == 2, repr(altchars) s = _translate(s, {chr(altchars[0]): b'+', chr(altchars[1]): b'/'}) if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') return binascii.a2b_base64(s) def standard_b64encode(s: bytes) -> bytes: """Encode a byte string using the standard Base64 alphabet. s is the byte string to encode. The encoded byte string is returned. """ return b64encode(s) def standard_b64decode(s: bytes) -> bytes: """Decode a byte string encoded with the standard Base64 alphabet. s is the byte string to decode. The decoded byte string is returned. binascii.Error is raised if the input is incorrectly padded or if there are non-alphabet characters present in the input. """ return b64decode(s) def urlsafe_b64encode(s: bytes) -> bytes: """Encode a byte string using a url-safe Base64 alphabet. s is the byte string to encode. The encoded byte string is returned. The alphabet uses '-' instead of '+' and '_' instead of '/'. """ return b64encode(s, b'-_') def urlsafe_b64decode(s: bytes) -> bytes: """Decode a byte string encoded with the standard Base64 alphabet. s is the byte string to decode. The decoded byte string is returned. binascii.Error is raised if the input is incorrectly padded or if there are non-alphabet characters present in the input. The alphabet uses '-' instead of '+' and '_' instead of '/'. """ return b64decode(s, b'-_') # Base32 encoding/decoding must be done in Python _b32alphabet = { 0: b'A', 9: b'J', 18: b'S', 27: b'3', 1: b'B', 10: b'K', 19: b'T', 28: b'4', 2: b'C', 11: b'L', 20: b'U', 29: b'5', 3: b'D', 12: b'M', 21: b'V', 30: b'6', 4: b'E', 13: b'N', 22: b'W', 31: b'7', 5: b'F', 14: b'O', 23: b'X', 6: b'G', 15: b'P', 24: b'Y', 7: b'H', 16: b'Q', 25: b'Z', 8: b'I', 17: b'R', 26: b'2', } _b32tab = [v[0] for k, v in sorted(_b32alphabet.items())] _b32rev = dict([(v[0], k) for k, v in _b32alphabet.items()]) def b32encode(s: bytes) -> bytes: """Encode a byte string using Base32. s is the byte string to encode. The encoded byte string is returned. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) quanta, leftover = divmod(len(s), 5) # Pad the last quantum with zero bits if necessary if leftover: s = s + bytes(5 - leftover) # Don't use += ! quanta += 1 encoded = bytes() for i in range(quanta): # c1 and c2 are 16 bits wide, c3 is 8 bits wide. The intent of this # code is to process the 40 bits in units of 5 bits. So we take the 1 # leftover bit of c1 and tack it onto c2. Then we take the 2 leftover # bits of c2 and tack them onto c3. The shifts and masks are intended # to give us values of exactly 5 bits in width. c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5]) # type: (int, int, int) c2 += (c1 & 1) << 16 # 17 bits wide c3 += (c2 & 3) << 8 # 10 bits wide encoded += bytes([_b32tab[c1 >> 11], # bits 1 - 5 _b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10 _b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15 _b32tab[c2 >> 12], # bits 16 - 20 (1 - 5) _b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10) _b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15) _b32tab[c3 >> 5], # bits 31 - 35 (1 - 5) _b32tab[c3 & 0x1f], # bits 36 - 40 (1 - 5) ]) # Adjust for any leftover partial quanta if leftover == 1: return encoded[:-6] + b'======' elif leftover == 2: return encoded[:-4] + b'====' elif leftover == 3: return encoded[:-3] + b'===' elif leftover == 4: return encoded[:-1] + b'=' return encoded def b32decode(s: bytes, casefold: bool = False, map01: bytes = None) -> bytes: """Decode a Base32 encoded byte string. s is the byte string to decode. Optional casefold is a flag specifying whether a lowercase alphabet is acceptable as input. For security purposes, the default is False. RFC 3548 allows for optional mapping of the digit 0 (zero) to the letter O (oh), and for optional mapping of the digit 1 (one) to either the letter I (eye) or letter L (el). The optional argument map01 when not None, specifies which letter the digit 1 should be mapped to (when map01 is not None, the digit 0 is always mapped to the letter O). For security purposes the default is None, so that 0 and 1 are not allowed in the input. The decoded byte string is returned. binascii.Error is raised if the input is incorrectly padded or if there are non-alphabet characters present in the input. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) quanta, leftover = divmod(len(s), 8) if leftover: raise binascii.Error('Incorrect padding') # Handle section 2.4 zero and one mapping. The flag map01 will be either # False, or the character to map the digit 1 (one) to. It should be # either L (el) or I (eye). if map01 is not None: if not isinstance(map01, bytes_types): raise TypeError("expected bytes, not %s" % map01.__class__.__name__) assert len(map01) == 1, repr(map01) s = _translate(s, {b'0': b'O', b'1': map01}) if casefold: s = s.upper() # Strip off pad characters from the right. We need to count the pad # characters because this will tell us how many null bytes to remove from # the end of the decoded string. padchars = 0 mo = re.search(b'(?P[=]*)$', s) if mo: padchars = len(mo.group('pad')) if padchars > 0: s = s[:-padchars] # Now decode the full quanta parts = [] # type: List[bytes] acc = 0 shift = 35 for c in s: val = _b32rev.get(c) if val is None: raise TypeError('Non-base32 digit found') acc += _b32rev[c] << shift shift -= 5 if shift < 0: parts.append(binascii.unhexlify(bytes('%010x' % acc, "ascii"))) acc = 0 shift = 35 # Process the last, partial quanta last = binascii.unhexlify(bytes('%010x' % acc, "ascii")) if padchars == 0: last = b'' # No characters elif padchars == 1: last = last[:-1] elif padchars == 3: last = last[:-2] elif padchars == 4: last = last[:-3] elif padchars == 6: last = last[:-4] else: raise binascii.Error('Incorrect padding') parts.append(last) return b''.join(parts) # RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns # lowercase. The RFC also recommends against accepting input case # insensitively. def b16encode(s: bytes) -> bytes: """Encode a byte string using Base16. s is the byte string to encode. The encoded byte string is returned. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) return binascii.hexlify(s).upper() def b16decode(s: bytes, casefold: bool = False) -> bytes: """Decode a Base16 encoded byte string. s is the byte string to decode. Optional casefold is a flag specifying whether a lowercase alphabet is acceptable as input. For security purposes, the default is False. The decoded byte string is returned. binascii.Error is raised if s were incorrectly padded or if there are non-alphabet characters present in the string. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) if casefold: s = s.upper() if re.search(b'[^0-9A-F]', s): raise binascii.Error('Non-base16 digit found') return binascii.unhexlify(s) # Legacy interface. This code could be cleaned up since I don't believe # binascii has any line length limitations. It just doesn't seem worth it # though. The files should be opened in binary mode. MAXLINESIZE = 76 # Excluding the CRLF MAXBINSIZE = (MAXLINESIZE//4)*3 def encode(input: IO[bytes], output: IO[bytes]) -> None: """Encode a file; input and output are binary files.""" while True: s = input.read(MAXBINSIZE) if not s: break while len(s) < MAXBINSIZE: ns = input.read(MAXBINSIZE-len(s)) if not ns: break s += ns line = binascii.b2a_base64(s) output.write(line) def decode(input: IO[bytes], output: IO[bytes]) -> None: """Decode a file; input and output are binary files.""" while True: line = input.readline() if not line: break s = binascii.a2b_base64(line) output.write(s) def encodebytes(s: bytes) -> bytes: """Encode a bytestring into a bytestring containing multiple lines of base-64 data.""" if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) pieces = [] # type: List[bytes] for i in range(0, len(s), MAXBINSIZE): chunk = s[i : i + MAXBINSIZE] pieces.append(binascii.b2a_base64(chunk)) return b"".join(pieces) def encodestring(s: bytes) -> bytes: """Legacy alias of encodebytes().""" import warnings warnings.warn("encodestring() is a deprecated alias, use encodebytes()", DeprecationWarning, 2) return encodebytes(s) def decodebytes(s: bytes) -> bytes: """Decode a bytestring of base-64 data into a bytestring.""" if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) return binascii.a2b_base64(s) def decodestring(s: bytes) -> bytes: """Legacy alias of decodebytes().""" import warnings warnings.warn("decodestring() is a deprecated alias, use decodebytes()", DeprecationWarning, 2) return decodebytes(s) # Usable as a script... def main() -> None: """Small main program""" import sys, getopt try: opts, args = getopt.getopt(sys.argv[1:], 'deut') except getopt.error as msg: sys.stdout = sys.stderr print(msg) print("""usage: %s [-d|-e|-u|-t] [file|-] -d, -u: decode -e: encode (default) -t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0]) sys.exit(2) func = encode for o, a in opts: if o == '-e': func = encode if o == '-d': func = decode if o == '-u': func = decode if o == '-t': test(); return if args and args[0] != '-': with open(args[0], 'rb') as f: func(f, sys.stdout.buffer) else: func(sys.stdin.buffer, sys.stdout.buffer) def test() -> None: s0 = b"Aladdin:open sesame" print(repr(s0)) s1 = encodebytes(s0) print(repr(s1)) s2 = decodebytes(s1) print(repr(s2)) assert s0 == s2 if __name__ == '__main__': main() mypy-0.761/test-data/stdlib-samples/3.2/fnmatch.py0000644€tŠÔÚ€2›s®0000000671713576752246026040 0ustar jukkaDROPBOX\Domain Users00000000000000"""Filename matching with shell patterns. fnmatch(FILENAME, PATTERN) matches according to the local convention. fnmatchcase(FILENAME, PATTERN) always takes case in account. The functions operate by translating the pattern into a regular expression. They cache the compiled regular expressions for speed. The function translate(PATTERN) returns a regular expression corresponding to PATTERN. (It does not compile it.) """ import os import posixpath import re import functools from typing import Iterable, List, AnyStr, Any, Callable, Match __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] def fnmatch(name: AnyStr, pat: AnyStr) -> bool: """Test whether FILENAME matches PATTERN. Patterns are Unix shell style: * matches everything ? matches any single character [seq] matches any character in seq [!seq] matches any char not in seq An initial period in FILENAME is not special. Both FILENAME and PATTERN are first case-normalized if the operating system requires it. If you don't want this, use fnmatchcase(FILENAME, PATTERN). """ name = os.path.normcase(name) pat = os.path.normcase(pat) return fnmatchcase(name, pat) @functools.lru_cache(maxsize=250) def _compile_pattern(pat: AnyStr, is_bytes: bool = False) -> Callable[[AnyStr], Match[AnyStr]]: if isinstance(pat, bytes): pat_str = str(pat, 'ISO-8859-1') res_str = translate(pat_str) res = bytes(res_str, 'ISO-8859-1') else: res = translate(pat) return re.compile(res).match def filter(names: Iterable[AnyStr], pat: AnyStr) -> List[AnyStr]: """Return the subset of the list NAMES that match PAT.""" result = [] # type: List[AnyStr] pat = os.path.normcase(pat) match = _compile_pattern(pat, isinstance(pat, bytes)) if os.path is posixpath: # normcase on posix is NOP. Optimize it away from the loop. for name in names: if match(name): result.append(name) else: for name in names: if match(os.path.normcase(name)): result.append(name) return result def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: """Test whether FILENAME matches PATTERN, including case. This is a version of fnmatch() which doesn't case-normalize its arguments. """ match = _compile_pattern(pat, isinstance(pat, bytes)) return match(name) is not None def translate(pat: str) -> str: """Translate a shell PATTERN to a regular expression. There is no way to quote meta-characters. """ i, n = 0, len(pat) res = '' while i < n: c = pat[i] i = i+1 if c == '*': res = res + '.*' elif c == '?': res = res + '.' elif c == '[': j = i if j < n and pat[j] == '!': j = j+1 if j < n and pat[j] == ']': j = j+1 while j < n and pat[j] != ']': j = j+1 if j >= n: res = res + '\\[' else: stuff = pat[i:j].replace('\\','\\\\') i = j+1 if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) else: res = res + re.escape(c) return res + r'\Z(?ms)' mypy-0.761/test-data/stdlib-samples/3.2/genericpath.py0000644€tŠÔÚ€2›s®0000000651613576752246026706 0ustar jukkaDROPBOX\Domain Users00000000000000""" Path operations common to more than one OS Do not use directly. The OS specific modules import the appropriate functions from this module themselves. """ import os import stat from typing import ( Any as Any_, List as List_, AnyStr as AnyStr_, Tuple as Tuple_ ) __all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime', 'getsize', 'isdir', 'isfile'] # Does a path exist? # This is false for dangling symbolic links on systems that support them. def exists(path: AnyStr_) -> bool: """Test whether a path exists. Returns False for broken symbolic links""" try: os.stat(path) except os.error: return False return True # This follows symbolic links, so both islink() and isdir() can be true # for the same path ono systems that support symlinks def isfile(path: AnyStr_) -> bool: """Test whether a path is a regular file""" try: st = os.stat(path) except os.error: return False return stat.S_ISREG(st.st_mode) # Is a path a directory? # This follows symbolic links, so both islink() and isdir() # can be true for the same path on systems that support symlinks def isdir(s: AnyStr_) -> bool: """Return true if the pathname refers to an existing directory.""" try: st = os.stat(s) except os.error: return False return stat.S_ISDIR(st.st_mode) def getsize(filename: AnyStr_) -> int: """Return the size of a file, reported by os.stat().""" return os.stat(filename).st_size def getmtime(filename: AnyStr_) -> float: """Return the last modification time of a file, reported by os.stat().""" return os.stat(filename).st_mtime def getatime(filename: AnyStr_) -> float: """Return the last access time of a file, reported by os.stat().""" return os.stat(filename).st_atime def getctime(filename: AnyStr_) -> float: """Return the metadata change time of a file, reported by os.stat().""" return os.stat(filename).st_ctime # Return the longest prefix of all list elements. def commonprefix(m: List_[Any_]) -> Any_: "Given a list of pathnames, returns the longest common leading component" if not m: return '' s1 = min(m) s2 = max(m) for i, c in enumerate(s1): if c != s2[i]: return s1[:i] return s1 # Split a path in root and extension. # The extension is everything starting at the last dot in the last # pathname component; the root is everything before that. # It is always true that root + ext == p. # Generic implementation of splitext, to be parametrized with # the separators def _splitext(p: AnyStr_, sep: AnyStr_, altsep: AnyStr_, extsep: AnyStr_) -> Tuple_[AnyStr_, AnyStr_]: """Split the extension from a pathname. Extension is everything from the last dot to the end, ignoring leading dots. Returns "(root, ext)"; ext may be empty.""" # NOTE: This code must work for text and bytes strings. sepIndex = p.rfind(sep) if altsep: altsepIndex = p.rfind(altsep) sepIndex = max(sepIndex, altsepIndex) dotIndex = p.rfind(extsep) if dotIndex > sepIndex: # skip all leading dots filenameIndex = sepIndex + 1 while filenameIndex < dotIndex: if p[filenameIndex:filenameIndex+1] != extsep: return p[:dotIndex], p[dotIndex:] filenameIndex += 1 return p, p[:0] mypy-0.761/test-data/stdlib-samples/3.2/getopt.py0000644€tŠÔÚ€2›s®0000001764313576752246025722 0ustar jukkaDROPBOX\Domain Users00000000000000"""Parser for command line options. This module helps scripts to parse the command line arguments in sys.argv. It supports the same conventions as the Unix getopt() function (including the special meanings of arguments of the form `-' and `--'). Long options similar to those supported by GNU software may be used as well via an optional third argument. This module provides two functions and an exception: getopt() -- Parse command line options gnu_getopt() -- Like getopt(), but allow option and non-option arguments to be intermixed. GetoptError -- exception (class) raised with 'opt' attribute, which is the option involved with the exception. """ # Long option support added by Lars Wirzenius . # # Gerrit Holl moved the string-based exceptions # to class-based exceptions. # # Peter Åstrand added gnu_getopt(). # # TODO for gnu_getopt(): # # - GNU getopt_long_only mechanism # - allow the caller to specify ordering # - RETURN_IN_ORDER option # - GNU extension with '-' as first character of option string # - optional arguments, specified by double colons # - a option string with a W followed by semicolon should # treat "-W foo" as "--foo" __all__ = ["GetoptError","error","getopt","gnu_getopt"] import os from typing import List, Tuple, Iterable class GetoptError(Exception): opt = '' msg = '' def __init__(self, msg: str, opt: str = '') -> None: self.msg = msg self.opt = opt Exception.__init__(self, msg, opt) def __str__(self) -> str: return self.msg error = GetoptError # backward compatibility def getopt(args: List[str], shortopts: str, longopts: Iterable[str] = []) -> Tuple[List[Tuple[str, str]], List[str]]: """getopt(args, options[, long_options]) -> opts, args Parses command line options and parameter list. args is the argument list to be parsed, without the leading reference to the running program. Typically, this means "sys.argv[1:]". shortopts is the string of option letters that the script wants to recognize, with options that require an argument followed by a colon (i.e., the same format that Unix getopt() uses). If specified, longopts is a list of strings with the names of the long options which should be supported. The leading '--' characters should not be included in the option name. Options which require an argument should be followed by an equal sign ('='). The return value consists of two elements: the first is a list of (option, value) pairs; the second is the list of program arguments left after the option list was stripped (this is a trailing slice of the first argument). Each option-and-value pair returned has the option as its first element, prefixed with a hyphen (e.g., '-x'), and the option argument as its second element, or an empty string if the option has no argument. The options occur in the list in the same order in which they were found, thus allowing multiple occurrences. Long and short options may be mixed. """ opts = [] # type: List[Tuple[str, str]] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) while args and args[0].startswith('-') and args[0] != '-': if args[0] == '--': args = args[1:] break if args[0].startswith('--'): opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) else: opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) return opts, args def gnu_getopt(args: List[str], shortopts: str, longopts: Iterable[str] = []) -> Tuple[List[Tuple[str, str]], List[str]]: """getopt(args, options[, long_options]) -> opts, args This function works like getopt(), except that GNU style scanning mode is used by default. This means that option and non-option arguments may be intermixed. The getopt() function stops processing options as soon as a non-option argument is encountered. If the first character of the option string is `+', or if the environment variable POSIXLY_CORRECT is set, then option processing stops as soon as a non-option argument is encountered. """ opts = [] # type: List[Tuple[str, str]] prog_args = [] # type: List[str] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) # Allow options after non-option arguments? if shortopts.startswith('+'): shortopts = shortopts[1:] all_options_first = True elif os.environ.get("POSIXLY_CORRECT"): all_options_first = True else: all_options_first = False while args: if args[0] == '--': prog_args += args[1:] break if args[0][:2] == '--': opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) elif args[0][:1] == '-' and args[0] != '-': opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) else: if all_options_first: prog_args += args break else: prog_args.append(args[0]) args = args[1:] return opts, prog_args def do_longs(opts: List[Tuple[str, str]], opt: str, longopts: List[str], args: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]: try: i = opt.index('=') except ValueError: optarg = None # type: str else: opt, optarg = opt[:i], opt[i+1:] has_arg, opt = long_has_args(opt, longopts) if has_arg: if optarg is None: if not args: raise GetoptError('option --%s requires argument' % opt, opt) optarg, args = args[0], args[1:] elif optarg is not None: raise GetoptError('option --%s must not have an argument' % opt, opt) opts.append(('--' + opt, optarg or '')) return opts, args # Return: # has_arg? # full option name def long_has_args(opt: str, longopts: List[str]) -> Tuple[bool, str]: possibilities = [o for o in longopts if o.startswith(opt)] if not possibilities: raise GetoptError('option --%s not recognized' % opt, opt) # Is there an exact match? if opt in possibilities: return False, opt elif opt + '=' in possibilities: return True, opt # No exact match, so better be unique. if len(possibilities) > 1: # XXX since possibilities contains all valid continuations, might be # nice to work them into the error msg raise GetoptError('option --%s not a unique prefix' % opt, opt) assert len(possibilities) == 1 unique_match = possibilities[0] has_arg = unique_match.endswith('=') if has_arg: unique_match = unique_match[:-1] return has_arg, unique_match def do_shorts(opts: List[Tuple[str, str]], optstring: str, shortopts: str, args: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]: while optstring != '': opt, optstring = optstring[0], optstring[1:] if short_has_arg(opt, shortopts): if optstring == '': if not args: raise GetoptError('option -%s requires argument' % opt, opt) optstring, args = args[0], args[1:] optarg, optstring = optstring, '' else: optarg = '' opts.append(('-' + opt, optarg)) return opts, args def short_has_arg(opt: str, shortopts: str) -> bool: for i in range(len(shortopts)): if opt == shortopts[i] != ':': return shortopts.startswith(':', i+1) raise GetoptError('option -%s not recognized' % opt, opt) if __name__ == '__main__': import sys print(getopt(sys.argv[1:], "a:b", ["alpha=", "beta"])) mypy-0.761/test-data/stdlib-samples/3.2/glob.py0000644€tŠÔÚ€2›s®0000000471313576752246025335 0ustar jukkaDROPBOX\Domain Users00000000000000"""Filename globbing utility.""" import os import re import fnmatch from typing import List, Iterator, Iterable, Any, AnyStr __all__ = ["glob", "iglob"] def glob(pathname: AnyStr) -> List[AnyStr]: """Return a list of paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. """ return list(iglob(pathname)) def iglob(pathname: AnyStr) -> Iterator[AnyStr]: """Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. """ if not has_magic(pathname): if os.path.lexists(pathname): yield pathname return dirname, basename = os.path.split(pathname) if not dirname: for name in glob1(None, basename): yield name return if has_magic(dirname): dirs = iglob(dirname) # type: Iterable[AnyStr] else: dirs = [dirname] if has_magic(basename): glob_in_dir = glob1 # type: Any else: glob_in_dir = glob0 for dirname in dirs: for name in glob_in_dir(dirname, basename): yield os.path.join(dirname, name) # These 2 helper functions non-recursively glob inside a literal directory. # They return a list of basenames. `glob1` accepts a pattern while `glob0` # takes a literal basename (so it only has to check for its existence). def glob1(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]: if not dirname: if isinstance(pattern, bytes): dirname = bytes(os.curdir, 'ASCII') else: dirname = os.curdir try: names = os.listdir(dirname) except os.error: return [] if pattern[0] != '.': names = [x for x in names if x[0] != '.'] return fnmatch.filter(names, pattern) def glob0(dirname: AnyStr, basename: AnyStr) -> List[AnyStr]: if basename == '': # `os.path.split()` returns an empty basename for paths ending with a # directory separator. 'q*x/' should match only directories. if os.path.isdir(dirname): return [basename] else: if os.path.lexists(os.path.join(dirname, basename)): return [basename] return [] magic_check = re.compile('[*?[]') magic_check_bytes = re.compile(b'[*?[]') def has_magic(s: AnyStr) -> bool: if isinstance(s, bytes): match = magic_check_bytes.search(s) else: match = magic_check.search(s) return match is not None mypy-0.761/test-data/stdlib-samples/3.2/posixpath.py0000644€tŠÔÚ€2›s®0000003413613576752246026433 0ustar jukkaDROPBOX\Domain Users00000000000000"""Common operations on Posix pathnames. Instead of importing this module directly, import os and refer to this module as os.path. The "os.path" name is an alias for this module on Posix systems; on other systems (e.g. Mac, Windows), os.path provides the same operations in a manner specific to that platform, and is an alias to another module (e.g. macpath, ntpath). Some of this can actually be useful on non-Posix systems too, e.g. for manipulation of the pathname component of URLs. """ import os import sys import stat import genericpath from genericpath import * from typing import ( Tuple, BinaryIO, TextIO, Pattern, AnyStr, List, Set, Any, Union, cast ) __all__ = ["normcase","isabs","join","splitdrive","split","splitext", "basename","dirname","commonprefix","getsize","getmtime", "getatime","getctime","islink","exists","lexists","isdir","isfile", "ismount", "expanduser","expandvars","normpath","abspath", "samefile","sameopenfile","samestat", "curdir","pardir","sep","pathsep","defpath","altsep","extsep", "devnull","realpath","supports_unicode_filenames","relpath"] # Strings representing various path-related bits and pieces. # These are primarily for export; internally, they are hardcoded. curdir = '.' pardir = '..' extsep = '.' sep = '/' pathsep = ':' defpath = ':/bin:/usr/bin' altsep = None # type: str devnull = '/dev/null' def _get_sep(path: AnyStr) -> AnyStr: if isinstance(path, bytes): return b'/' else: return '/' # Normalize the case of a pathname. Trivial in Posix, string.lower on Mac. # On MS-DOS this may also turn slashes into backslashes; however, other # normalizations (such as optimizing '../' away) are not allowed # (another function should be defined to do that). def normcase(s: AnyStr) -> AnyStr: """Normalize case of pathname. Has no effect under Posix""" # TODO: on Mac OS X, this should really return s.lower(). if not isinstance(s, (bytes, str)): raise TypeError("normcase() argument must be str or bytes, " "not '{}'".format(s.__class__.__name__)) return cast(AnyStr, s) # Return whether a path is absolute. # Trivial in Posix, harder on the Mac or MS-DOS. def isabs(s: AnyStr) -> bool: """Test whether a path is absolute""" sep = _get_sep(s) return s.startswith(sep) # Join pathnames. # Ignore the previous parts if a part is absolute. # Insert a '/' unless the first part is empty or already ends in '/'. def join(a: AnyStr, *p: AnyStr) -> AnyStr: """Join two or more pathname components, inserting '/' as needed. If any component is an absolute path, all previous path components will be discarded.""" sep = _get_sep(a) path = a for b in p: if b.startswith(sep): path = b elif not path or path.endswith(sep): path += b else: path += sep + b return path # Split a path in head (everything up to the last '/') and tail (the # rest). If the path ends in '/', tail will be empty. If there is no # '/' in the path, head will be empty. # Trailing '/'es are stripped from head unless it is the root. def split(p: AnyStr) -> Tuple[AnyStr, AnyStr]: """Split a pathname. Returns tuple "(head, tail)" where "tail" is everything after the final slash. Either part may be empty.""" sep = _get_sep(p) i = p.rfind(sep) + 1 head, tail = p[:i], p[i:] if head and head != sep*len(head): head = head.rstrip(sep) return head, tail # Split a path in root and extension. # The extension is everything starting at the last dot in the last # pathname component; the root is everything before that. # It is always true that root + ext == p. def splitext(p: AnyStr) -> Tuple[AnyStr, AnyStr]: if isinstance(p, bytes): sep = b'/' extsep = b'.' else: sep = '/' extsep = '.' return genericpath._splitext(p, sep, None, extsep) splitext.__doc__ = genericpath._splitext.__doc__ # Split a pathname into a drive specification and the rest of the # path. Useful on DOS/Windows/NT; on Unix, the drive is always empty. def splitdrive(p: AnyStr) -> Tuple[AnyStr, AnyStr]: """Split a pathname into drive and path. On Posix, drive is always empty.""" return p[:0], p # Return the tail (basename) part of a path, same as split(path)[1]. def basename(p: AnyStr) -> AnyStr: """Returns the final component of a pathname""" sep = _get_sep(p) i = p.rfind(sep) + 1 return p[i:] # Return the head (dirname) part of a path, same as split(path)[0]. def dirname(p: AnyStr) -> AnyStr: """Returns the directory component of a pathname""" sep = _get_sep(p) i = p.rfind(sep) + 1 head = p[:i] if head and head != sep*len(head): head = head.rstrip(sep) return head # Is a path a symbolic link? # This will always return false on systems where os.lstat doesn't exist. def islink(path: AnyStr) -> bool: """Test whether a path is a symbolic link""" try: st = os.lstat(path) except (os.error, AttributeError): return False return stat.S_ISLNK(st.st_mode) # Being true for dangling symbolic links is also useful. def lexists(path: AnyStr) -> bool: """Test whether a path exists. Returns True for broken symbolic links""" try: os.lstat(path) except os.error: return False return True # Are two filenames really pointing to the same file? def samefile(f1: AnyStr, f2: AnyStr) -> bool: """Test whether two pathnames reference the same actual file""" s1 = os.stat(f1) s2 = os.stat(f2) return samestat(s1, s2) # Are two open files really referencing the same file? # (Not necessarily the same file descriptor!) def sameopenfile(fp1: int, fp2: int) -> bool: """Test whether two open file objects reference the same file""" s1 = os.fstat(fp1) s2 = os.fstat(fp2) return samestat(s1, s2) # Are two stat buffers (obtained from stat, fstat or lstat) # describing the same file? def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: """Test whether two stat buffers reference the same file""" return s1.st_ino == s2.st_ino and \ s1.st_dev == s2.st_dev # Is a path a mount point? # (Does this work for all UNIXes? Is it even guaranteed to work by Posix?) def ismount(path: AnyStr) -> bool: """Test whether a path is a mount point""" if islink(path): # A symlink can never be a mount point return False try: s1 = os.lstat(path) if isinstance(path, bytes): parent = join(path, b'..') else: parent = join(path, '..') s2 = os.lstat(parent) except os.error: return False # It doesn't exist -- so not a mount point :-) dev1 = s1.st_dev dev2 = s2.st_dev if dev1 != dev2: return True # path/.. on a different device as path ino1 = s1.st_ino ino2 = s2.st_ino if ino1 == ino2: return True # path/.. is the same i-node as path return False # Expand paths beginning with '~' or '~user'. # '~' means $HOME; '~user' means that user's home directory. # If the path doesn't begin with '~', or if the user or $HOME is unknown, # the path is returned unchanged (leaving error reporting to whatever # function is called with the expanded path as argument). # See also module 'glob' for expansion of *, ? and [...] in pathnames. # (A function should also be defined to do full *sh-style environment # variable expansion.) def expanduser(path: AnyStr) -> AnyStr: """Expand ~ and ~user constructions. If user or $HOME is unknown, do nothing.""" if isinstance(path, bytes): tilde = b'~' else: tilde = '~' if not path.startswith(tilde): return path sep = _get_sep(path) i = path.find(sep, 1) if i < 0: i = len(path) if i == 1: userhome = None # type: Union[str, bytes] if 'HOME' not in os.environ: import pwd userhome = pwd.getpwuid(os.getuid()).pw_dir else: userhome = os.environ['HOME'] else: import pwd name = path[1:i] # type: Union[str, bytes] if isinstance(name, bytes): name = str(name, 'ASCII') try: pwent = pwd.getpwnam(name) except KeyError: return path userhome = pwent.pw_dir if isinstance(path, bytes): userhome = os.fsencode(userhome) root = b'/' else: root = '/' userhome = userhome.rstrip(root) return (userhome + path[i:]) or root # Expand paths containing shell variable substitutions. # This expands the forms $variable and ${variable} only. # Non-existent variables are left unchanged. _varprog = None # type: Pattern[str] _varprogb = None # type: Pattern[bytes] def expandvars(path: AnyStr) -> AnyStr: """Expand shell variables of form $var and ${var}. Unknown variables are left unchanged.""" global _varprog, _varprogb if isinstance(path, bytes): if b'$' not in path: return path if not _varprogb: import re _varprogb = re.compile(br'\$(\w+|\{[^}]*\})', re.ASCII) search = _varprogb.search start = b'{' end = b'}' else: if '$' not in path: return path if not _varprog: import re _varprog = re.compile(r'\$(\w+|\{[^}]*\})', re.ASCII) search = _varprog.search start = '{' end = '}' i = 0 while True: m = search(path, i) if not m: break i, j = m.span(0) name = None # type: Union[str, bytes] name = m.group(1) if name.startswith(start) and name.endswith(end): name = name[1:-1] if isinstance(name, bytes): name = str(name, 'ASCII') if name in os.environ: tail = path[j:] value = None # type: Union[str, bytes] value = os.environ[name] if isinstance(path, bytes): value = value.encode('ASCII') path = path[:i] + value i = len(path) path += tail else: i = j return path # Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B. # It should be understood that this may change the meaning of the path # if it contains symbolic links! def normpath(path: AnyStr) -> AnyStr: """Normalize path, eliminating double slashes, etc.""" if isinstance(path, bytes): sep = b'/' empty = b'' dot = b'.' dotdot = b'..' else: sep = '/' empty = '' dot = '.' dotdot = '..' if path == empty: return dot initial_slashes = path.startswith(sep) # type: int # POSIX allows one or two initial slashes, but treats three or more # as single slash. if (initial_slashes and path.startswith(sep*2) and not path.startswith(sep*3)): initial_slashes = 2 comps = path.split(sep) new_comps = [] # type: List[AnyStr] for comp in comps: if comp in (empty, dot): continue if (comp != dotdot or (not initial_slashes and not new_comps) or (new_comps and new_comps[-1] == dotdot)): new_comps.append(comp) elif new_comps: new_comps.pop() comps = new_comps path = sep.join(comps) if initial_slashes: path = sep*initial_slashes + path return path or dot def abspath(path: AnyStr) -> AnyStr: """Return an absolute path.""" if not isabs(path): if isinstance(path, bytes): cwd = os.getcwdb() else: cwd = os.getcwd() path = join(cwd, path) return normpath(path) # Return a canonical path (i.e. the absolute location of a file on the # filesystem). def realpath(filename: AnyStr) -> AnyStr: """Return the canonical path of the specified filename, eliminating any symbolic links encountered in the path.""" if isinstance(filename, bytes): sep = b'/' empty = b'' else: sep = '/' empty = '' if isabs(filename): bits = [sep] + filename.split(sep)[1:] else: bits = [empty] + filename.split(sep) for i in range(2, len(bits)+1): component = join(*bits[0:i]) # Resolve symbolic links. if islink(component): resolved = _resolve_link(component) if resolved is None: # Infinite loop -- return original component + rest of the path return abspath(join(*([component] + bits[i:]))) else: newpath = join(*([resolved] + bits[i:])) return realpath(newpath) return abspath(filename) def _resolve_link(path: AnyStr) -> AnyStr: """Internal helper function. Takes a path and follows symlinks until we either arrive at something that isn't a symlink, or encounter a path we've seen before (meaning that there's a loop). """ paths_seen = set() # type: Set[AnyStr] while islink(path): if path in paths_seen: # Already seen this path, so we must have a symlink loop return None paths_seen.add(path) # Resolve where the link points to resolved = os.readlink(path) if not isabs(resolved): dir = dirname(path) path = normpath(join(dir, resolved)) else: path = normpath(resolved) return path supports_unicode_filenames = (sys.platform == 'darwin') def relpath(path: AnyStr, start: AnyStr = None) -> AnyStr: """Return a relative version of a path""" if not path: raise ValueError("no path specified") if isinstance(path, bytes): curdir = b'.' sep = b'/' pardir = b'..' else: curdir = '.' sep = '/' pardir = '..' if start is None: start = curdir start_list = [x for x in abspath(start).split(sep) if x] path_list = [x for x in abspath(path).split(sep) if x] # Work out how much of the filepath is shared by start and path. i = len(commonprefix([start_list, path_list])) rel_list = [pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return curdir return join(*rel_list) mypy-0.761/test-data/stdlib-samples/3.2/pprint.py0000644€tŠÔÚ€2›s®0000003252513576752246025730 0ustar jukkaDROPBOX\Domain Users00000000000000# Author: Fred L. Drake, Jr. # fdrake@acm.org # # This is a simple little module I wrote to make life easier. I didn't # see anything quite like it in the library, though I may have overlooked # something. I wrote this when I was trying to read some heavily nested # tuples with fairly non-descriptive content. This is modeled very much # after Lisp/Scheme - style pretty-printing of lists. If you find it # useful, thank small children who sleep at night. """Support to pretty-print lists, tuples, & dictionaries recursively. Very simple, but useful, especially in debugging data structures. Classes ------- PrettyPrinter() Handle pretty-printing operations onto a stream using a configured set of formatting parameters. Functions --------- pformat() Format a Python object into a pretty-printed representation. pprint() Pretty-print a Python object to a stream [default is sys.stdout]. saferepr() Generate a 'standard' repr()-like value, but protect against recursive data structures. """ import sys as _sys from collections import OrderedDict as _OrderedDict from io import StringIO as _StringIO from typing import Any, Tuple, Dict, TextIO, cast, List __all__ = ["pprint","pformat","isreadable","isrecursive","saferepr", "PrettyPrinter"] # cache these for faster access: _commajoin = ", ".join _id = id _len = len _type = type def pprint(object: object, stream: TextIO = None, indent: int = 1, width: int = 80, depth: int = None) -> None: """Pretty-print a Python object to a stream [default is sys.stdout].""" printer = PrettyPrinter( stream=stream, indent=indent, width=width, depth=depth) printer.pprint(object) def pformat(object: object, indent: int = 1, width: int = 80, depth: int = None) -> str: """Format a Python object into a pretty-printed representation.""" return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(object) def saferepr(object: object) -> str: """Version of repr() which can handle recursive data structures.""" return _safe_repr(object, {}, None, 0)[0] def isreadable(object: object) -> bool: """Determine if saferepr(object) is readable by eval().""" return _safe_repr(object, {}, None, 0)[1] def isrecursive(object: object) -> bool: """Determine if object requires a recursive representation.""" return _safe_repr(object, {}, None, 0)[2] class _safe_key: """Helper function for key functions when sorting unorderable objects. The wrapped-object will fallback to an Py2.x style comparison for unorderable types (sorting first comparing the type name and then by the obj ids). Does not work recursively, so dict.items() must have _safe_key applied to both the key and the value. """ __slots__ = ['obj'] def __init__(self, obj: Any) -> None: self.obj = obj def __lt__(self, other: Any) -> Any: rv = self.obj.__lt__(other.obj) # type: Any if rv is NotImplemented: rv = (str(type(self.obj)), id(self.obj)) < \ (str(type(other.obj)), id(other.obj)) return rv def _safe_tuple(t: Tuple[Any, Any]) -> Tuple[_safe_key, _safe_key]: "Helper function for comparing 2-tuples" return _safe_key(t[0]), _safe_key(t[1]) class PrettyPrinter: def __init__(self, indent: int = 1, width: int = 80, depth: int = None, stream: TextIO = None) -> None: """Handle pretty printing operations onto a stream using a set of configured parameters. indent Number of spaces to indent for each level of nesting. width Attempted maximum number of columns in the output. depth The maximum depth to print out nested structures. stream The desired output stream. If omitted (or false), the standard output stream available at construction will be used. """ indent = int(indent) width = int(width) assert indent >= 0, "indent must be >= 0" assert depth is None or depth > 0, "depth must be > 0" assert width, "width must be != 0" self._depth = depth self._indent_per_level = indent self._width = width if stream is not None: self._stream = stream else: self._stream = _sys.stdout def pprint(self, object: object) -> None: self._format(object, self._stream, 0, 0, {}, 0) self._stream.write("\n") def pformat(self, object: object) -> str: sio = _StringIO() self._format(object, sio, 0, 0, {}, 0) return sio.getvalue() def isrecursive(self, object: object) -> int: return self.format(object, {}, 0, 0)[2] def isreadable(self, object: object) -> int: s, readable, recursive = self.format(object, {}, 0, 0) return readable and not recursive def _format(self, object: object, stream: TextIO, indent: int, allowance: int, context: Dict[int, int], level: int) -> None: level = level + 1 objid = _id(object) if objid in context: stream.write(_recursion(object)) self._recursive = True self._readable = False return rep = self._repr(object, context, level - 1) typ = _type(object) sepLines = _len(rep) > (self._width - 1 - indent - allowance) write = stream.write if self._depth and level > self._depth: write(rep) return if sepLines: r = getattr(typ, "__repr__", None) if isinstance(object, dict): write('{') if self._indent_per_level > 1: write((self._indent_per_level - 1) * ' ') length = _len(object) if length: context[objid] = 1 indent = indent + self._indent_per_level if issubclass(typ, _OrderedDict): items = list(object.items()) else: items = sorted(object.items(), key=_safe_tuple) key, ent = items[0] rep = self._repr(key, context, level) write(rep) write(': ') self._format(ent, stream, indent + _len(rep) + 2, allowance + 1, context, level) if length > 1: for key, ent in items[1:]: rep = self._repr(key, context, level) write(',\n%s%s: ' % (' '*indent, rep)) self._format(ent, stream, indent + _len(rep) + 2, allowance + 1, context, level) indent = indent - self._indent_per_level del context[objid] write('}') return if ((issubclass(typ, list) and r is list.__repr__) or (issubclass(typ, tuple) and r is tuple.__repr__) or (issubclass(typ, set) and r is set.__repr__) or (issubclass(typ, frozenset) and r is frozenset.__repr__) ): anyobj = cast(Any, object) # TODO Collection? length = _len(anyobj) if issubclass(typ, list): write('[') endchar = ']' lst = anyobj elif issubclass(typ, set): if not length: write('set()') return write('{') endchar = '}' lst = sorted(anyobj, key=_safe_key) elif issubclass(typ, frozenset): if not length: write('frozenset()') return write('frozenset({') endchar = '})' lst = sorted(anyobj, key=_safe_key) indent += 10 else: write('(') endchar = ')' lst = list(anyobj) if self._indent_per_level > 1: write((self._indent_per_level - 1) * ' ') if length: context[objid] = 1 indent = indent + self._indent_per_level self._format(lst[0], stream, indent, allowance + 1, context, level) if length > 1: for ent in lst[1:]: write(',\n' + ' '*indent) self._format(ent, stream, indent, allowance + 1, context, level) indent = indent - self._indent_per_level del context[objid] if issubclass(typ, tuple) and length == 1: write(',') write(endchar) return write(rep) def _repr(self, object: object, context: Dict[int, int], level: int) -> str: repr, readable, recursive = self.format(object, context.copy(), self._depth, level) if not readable: self._readable = False if recursive: self._recursive = True return repr def format(self, object: object, context: Dict[int, int], maxlevels: int, level: int) -> Tuple[str, int, int]: """Format object for a specific context, returning a string and flags indicating whether the representation is 'readable' and whether the object represents a recursive construct. """ return _safe_repr(object, context, maxlevels, level) # Return triple (repr_string, isreadable, isrecursive). def _safe_repr(object: object, context: Dict[int, int], maxlevels: int, level: int) -> Tuple[str, bool, bool]: typ = _type(object) if typ is str: s = cast(str, object) if 'locale' not in _sys.modules: return repr(object), True, False if "'" in s and '"' not in s: closure = '"' quotes = {'"': '\\"'} else: closure = "'" quotes = {"'": "\\'"} qget = quotes.get sio = _StringIO() write = sio.write for char in s: if char.isalpha(): write(char) else: write(qget(char, repr(char)[1:-1])) return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False r = getattr(typ, "__repr__", None) if issubclass(typ, dict) and r is dict.__repr__: if not object: return "{}", True, False objid = _id(object) if maxlevels and level >= maxlevels: return "{...}", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] # type: List[str] append = components.append level += 1 saferepr = _safe_repr items = sorted((cast(dict, object)).items(), key=_safe_tuple) for k, v in items: krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) append("%s: %s" % (krepr, vrepr)) readable = readable and kreadable and vreadable if krecur or vrecur: recursive = True del context[objid] return "{%s}" % _commajoin(components), readable, recursive if (issubclass(typ, list) and r is list.__repr__) or \ (issubclass(typ, tuple) and r is tuple.__repr__): anyobj = cast(Any, object) # TODO Sequence? if issubclass(typ, list): if not object: return "[]", True, False format = "[%s]" elif _len(anyobj) == 1: format = "(%s,)" else: if not object: return "()", True, False format = "(%s)" objid = _id(object) if maxlevels and level >= maxlevels: return format % "...", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 for o in anyobj: orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) append(orepr) if not oreadable: readable = False if orecur: recursive = True del context[objid] return format % _commajoin(components), readable, recursive rep = repr(object) return rep, bool(rep and not rep.startswith('<')), False def _recursion(object: object) -> str: return ("" % (_type(object).__name__, _id(object))) def _perfcheck(object: object = None) -> None: import time if object is None: object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000 p = PrettyPrinter() t1 = time.time() _safe_repr(object, {}, None, 0) t2 = time.time() p.pformat(object) t3 = time.time() print("_safe_repr:", t2 - t1) print("pformat:", t3 - t2) if __name__ == "__main__": _perfcheck() mypy-0.761/test-data/stdlib-samples/3.2/random.py0000644€tŠÔÚ€2›s®0000006406413576752246025677 0ustar jukkaDROPBOX\Domain Users00000000000000"""Random variable generators. integers -------- uniform within range sequences --------- pick random element pick random sample generate random permutation distributions on the real line: ------------------------------ uniform triangular normal (Gaussian) lognormal negative exponential gamma beta pareto Weibull distributions on the circle (angles 0 to 2pi) --------------------------------------------- circular uniform von Mises General notes on the underlying Mersenne Twister core generator: * The period is 2**19937-1. * It is one of the most extensively tested generators in existence. * The random() method is implemented in C, executes in a single Python step, and is, therefore, threadsafe. """ from warnings import warn as _warn from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin from os import urandom as _urandom from collections import Set as _Set, Sequence as _Sequence from hashlib import sha512 as _sha512 from typing import ( Any, TypeVar, Iterable, Sequence, List, Callable, Set, cast, SupportsInt, Union ) __all__ = ["Random","seed","random","uniform","randint","choice","sample", "randrange","shuffle","normalvariate","lognormvariate", "expovariate","vonmisesvariate","gammavariate","triangular", "gauss","betavariate","paretovariate","weibullvariate", "getstate","setstate", "getrandbits", "SystemRandom"] NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0) TWOPI = 2.0*_pi LOG4 = _log(4.0) SG_MAGICCONST = 1.0 + _log(4.5) BPF = 53 # Number of bits in a float RECIP_BPF = 2**-BPF # type: float # Translated by Guido van Rossum from C source provided by # Adrian Baddeley. Adapted by Raymond Hettinger for use with # the Mersenne Twister and os.urandom() core generators. import _random T = TypeVar('T') class Random(_random.Random): """Random number generator base class used by bound module functions. Used to instantiate instances of Random to get generators that don't share state. Class Random can also be subclassed if you want to use a different basic generator of your own devising: in that case, override the following methods: random(), seed(), getstate(), and setstate(). Optionally, implement a getrandbits() method so that randrange() can cover arbitrarily large ranges. """ VERSION = 3 # used by getstate/setstate gauss_next = 0.0 def __init__(self, x: object = None) -> None: """Initialize an instance. Optional argument x controls seeding, as for Random.seed(). """ self.seed(x) self.gauss_next = None def seed(self, a: Any = None, version: int = 2) -> None: """Initialize internal state from hashable object. None or no argument seeds from current time or from an operating system specific randomness source if available. For version 2 (the default), all of the bits are used if *a *is a str, bytes, or bytearray. For version 1, the hash() of *a* is used instead. If *a* is an int, all bits are used. """ if a is None: try: a = int.from_bytes(_urandom(32), 'big') except NotImplementedError: import time a = int(time.time() * 256) # use fractional seconds if version == 2: if isinstance(a, (str, bytes, bytearray)): if isinstance(a, str): a = a.encode() a += _sha512(a).digest() a = int.from_bytes(a, 'big') super().seed(a) self.gauss_next = None def getstate(self) -> tuple: """Return internal state; can be passed to setstate() later.""" return self.VERSION, super().getstate(), self.gauss_next def setstate(self, state: tuple) -> None: """Restore internal state from object returned by getstate().""" version = state[0] if version == 3: version, internalstate, self.gauss_next = state super().setstate(internalstate) elif version == 2: version, internalstate, self.gauss_next = state # In version 2, the state was saved as signed ints, which causes # inconsistencies between 32/64-bit systems. The state is # really unsigned 32-bit ints, so we convert negative ints from # version 2 to positive longs for version 3. try: internalstate = tuple(x % (2**32) for x in internalstate) except ValueError as e: raise TypeError() super().setstate(internalstate) else: raise ValueError("state with version %s passed to " "Random.setstate() of version %s" % (version, self.VERSION)) ## ---- Methods below this point do not need to be overridden when ## ---- subclassing for the purpose of using a different core generator. ## -------------------- pickle support ------------------- def __getstate__(self) -> object: # for pickle return self.getstate() def __setstate__(self, state: Any) -> None: # for pickle self.setstate(state) def __reduce__(self) -> tuple: return self.__class__, (), self.getstate() ## -------------------- integer methods ------------------- def randrange(self, start: SupportsInt, stop: SupportsInt = None, step: int = 1, int: Callable[[SupportsInt], int] = int) -> int: """Choose a random item from range(start, stop[, step]). This fixes the problem with randint() which includes the endpoint; in Python this is usually not what you want. Do not supply the 'int' argument. """ # This code is a bit messy to make it fast for the # common case while still doing adequate error checking. istart = int(start) if istart != start: raise ValueError("non-integer arg 1 for randrange()") if stop is None: if istart > 0: return self._randbelow(istart) raise ValueError("empty range for randrange()") # stop argument supplied. istop = int(stop) if istop != stop: raise ValueError("non-integer stop for randrange()") width = istop - istart if step == 1 and width > 0: return istart + self._randbelow(width) if step == 1: raise ValueError("empty range for randrange() (%d,%d, %d)" % (istart, istop, width)) # Non-unit step argument supplied. istep = int(step) if istep != step: raise ValueError("non-integer step for randrange()") if istep > 0: n = (width + istep - 1) // istep elif istep < 0: n = (width + istep + 1) // istep else: raise ValueError("zero step for randrange()") if n <= 0: raise ValueError("empty range for randrange()") return istart + istep*self._randbelow(n) def randint(self, a: int, b: int) -> int: """Return random integer in range [a, b], including both end points. """ return self.randrange(a, b+1) def _randbelow(self, n: int, int: Callable[[float], int] = int, maxsize: int = 1< int: "Return a random int in the range [0,n). Raises ValueError if n==0." getrandbits = self.getrandbits # Only call self.getrandbits if the original random() builtin method # has not been overridden or if a new getrandbits() was supplied. if type(self.random) is BuiltinMethod or type(getrandbits) is Method: k = n.bit_length() # don't use (n-1) here because n can be 1 r = getrandbits(k) # 0 <= r < 2**k while r >= n: r = getrandbits(k) return r # There's an overriden random() method but no new getrandbits() method, # so we can only use random() from here. random = self.random if n >= maxsize: _warn("Underlying random() generator does not supply \n" "enough bits to choose from a population range this large.\n" "To remove the range limitation, add a getrandbits() method.") return int(random() * n) rem = maxsize % n limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0 s = random() while s >= limit: s = random() return int(s*maxsize) % n ## -------------------- sequence methods ------------------- def choice(self, seq: Sequence[T]) -> T: """Choose a random element from a non-empty sequence.""" try: i = self._randbelow(len(seq)) except ValueError: raise IndexError('Cannot choose from an empty sequence') return seq[i] def shuffle(self, x: List[T], random: Callable[[], float] = None, int: Callable[[float], int] = int) -> None: """x, random=random.random -> shuffle list x in place; return None. Optional arg random is a 0-argument function returning a random float in [0.0, 1.0); by default, the standard random.random. """ randbelow = self._randbelow for i in reversed(range(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] j = randbelow(i+1) if random is None else int(random() * (i+1)) x[i], x[j] = x[j], x[i] def sample(self, population: Union[_Set[T], _Sequence[T]], k: int) -> List[T]: """Chooses k unique random elements from a population sequence or set. Returns a new list containing elements from the population while leaving the original population unchanged. The resulting list is in selection order so that all sub-slices will also be valid random samples. This allows raffle winners (the sample) to be partitioned into grand prize and second place winners (the subslices). Members of the population need not be hashable or unique. If the population contains repeats, then each occurrence is a possible selection in the sample. To choose a sample in a range of integers, use range as an argument. This is especially fast and space efficient for sampling from a large population: sample(range(10000000), 60) """ # Sampling without replacement entails tracking either potential # selections (the pool) in a list or previous selections in a set. # When the number of selections is small compared to the # population, then tracking selections is efficient, requiring # only a small set and an occasional reselection. For # a larger number of selections, the pool tracking method is # preferred since the list takes less space than the # set and it doesn't suffer from frequent reselections. if isinstance(population, _Set): population = list(population) if not isinstance(population, _Sequence): raise TypeError("Population must be a sequence or set. For dicts, use list(d).") randbelow = self._randbelow n = len(population) if not (0 <= k and k <= n): raise ValueError("Sample larger than population") result = [cast(T, None)] * k setsize = 21 # size of a small set minus size of an empty list if k > 5: setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets if n <= setsize: # An n-length list is smaller than a k-length set pool = list(population) for i in range(k): # invariant: non-selected at [0,n-i) j = randbelow(n-i) result[i] = pool[j] pool[j] = pool[n-i-1] # move non-selected item into vacancy else: selected = set() # type: Set[int] selected_add = selected.add for i in range(k): j = randbelow(n) while j in selected: j = randbelow(n) selected_add(j) result[i] = population[j] return result ## -------------------- real-valued distributions ------------------- ## -------------------- uniform distribution ------------------- def uniform(self, a: float, b: float) -> float: "Get a random number in the range [a, b) or [a, b] depending on rounding." return a + (b-a) * self.random() ## -------------------- triangular -------------------- def triangular(self, low: float = 0.0, high: float = 1.0, mode: float = None) -> float: """Triangular distribution. Continuous distribution bounded by given lower and upper limits, and having a given mode value in-between. http://en.wikipedia.org/wiki/Triangular_distribution """ u = self.random() c = 0.5 if mode is None else (mode - low) / (high - low) if u > c: u = 1.0 - u c = 1.0 - c low, high = high, low return low + (high - low) * (u * c) ** 0.5 ## -------------------- normal distribution -------------------- def normalvariate(self, mu: float, sigma: float) -> float: """Normal distribution. mu is the mean, and sigma is the standard deviation. """ # mu = mean, sigma = standard deviation # Uses Kinderman and Monahan method. Reference: Kinderman, # A.J. and Monahan, J.F., "Computer generation of random # variables using the ratio of uniform deviates", ACM Trans # Math Software, 3, (1977), pp257-260. random = self.random while 1: u1 = random() u2 = 1.0 - random() z = NV_MAGICCONST*(u1-0.5)/u2 zz = z*z/4.0 if zz <= -_log(u2): break return mu + z*sigma ## -------------------- lognormal distribution -------------------- def lognormvariate(self, mu: float, sigma: float) -> float: """Log normal distribution. If you take the natural logarithm of this distribution, you'll get a normal distribution with mean mu and standard deviation sigma. mu can have any value, and sigma must be greater than zero. """ return _exp(self.normalvariate(mu, sigma)) ## -------------------- exponential distribution -------------------- def expovariate(self, lambd: float) -> float: """Exponential distribution. lambd is 1.0 divided by the desired mean. It should be nonzero. (The parameter would be called "lambda", but that is a reserved word in Python.) Returned values range from 0 to positive infinity if lambd is positive, and from negative infinity to 0 if lambd is negative. """ # lambd: rate lambd = 1/mean # ('lambda' is a Python reserved word) # we use 1-random() instead of random() to preclude the # possibility of taking the log of zero. return -_log(1.0 - self.random())/lambd ## -------------------- von Mises distribution -------------------- def vonmisesvariate(self, mu: float, kappa: float) -> float: """Circular data distribution. mu is the mean angle, expressed in radians between 0 and 2*pi, and kappa is the concentration parameter, which must be greater than or equal to zero. If kappa is equal to zero, this distribution reduces to a uniform random angle over the range 0 to 2*pi. """ # mu: mean angle (in radians between 0 and 2*pi) # kappa: concentration parameter kappa (>= 0) # if kappa = 0 generate uniform random angle # Based upon an algorithm published in: Fisher, N.I., # "Statistical Analysis of Circular Data", Cambridge # University Press, 1993. # Thanks to Magnus Kessler for a correction to the # implementation of step 4. random = self.random if kappa <= 1e-6: return TWOPI * random() a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa) b = (a - _sqrt(2.0 * a))/(2.0 * kappa) r = (1.0 + b * b)/(2.0 * b) while 1: u1 = random() z = _cos(_pi * u1) f = (1.0 + r * z)/(r + z) c = kappa * (r - f) u2 = random() if u2 < c * (2.0 - c) or u2 <= c * _exp(1.0 - c): break u3 = random() if u3 > 0.5: theta = (mu % TWOPI) + _acos(f) else: theta = (mu % TWOPI) - _acos(f) return theta ## -------------------- gamma distribution -------------------- def gammavariate(self, alpha: float, beta: float) -> float: """Gamma distribution. Not the gamma function! Conditions on the parameters are alpha > 0 and beta > 0. The probability distribution function is: x ** (alpha - 1) * math.exp(-x / beta) pdf(x) = -------------------------------------- math.gamma(alpha) * beta ** alpha """ # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2 # Warning: a few older sources define the gamma distribution in terms # of alpha > -1.0 if alpha <= 0.0 or beta <= 0.0: raise ValueError('gammavariate: alpha and beta must be > 0.0') random = self.random if alpha > 1.0: # Uses R.C.H. Cheng, "The generation of Gamma # variables with non-integral shape parameters", # Applied Statistics, (1977), 26, No. 1, p71-74 ainv = _sqrt(2.0 * alpha - 1.0) bbb = alpha - LOG4 ccc = alpha + ainv while 1: u1 = random() if not (1e-7 < u1 and u1 < .9999999): continue u2 = 1.0 - random() v = _log(u1/(1.0-u1))/ainv x = alpha*_exp(v) z = u1*u1*u2 r = bbb+ccc*v-x if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z): return x * beta elif alpha == 1.0: # expovariate(1) u = random() while u <= 1e-7: u = random() return -_log(u) * beta else: # alpha is between 0 and 1 (exclusive) # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle while 1: u = random() b = (_e + alpha)/_e p = b*u if p <= 1.0: x = p ** (1.0/alpha) else: x = -_log((b-p)/alpha) u1 = random() if p > 1.0: if u1 <= x ** (alpha - 1.0): break elif u1 <= _exp(-x): break return x * beta ## -------------------- Gauss (faster alternative) -------------------- def gauss(self, mu: float, sigma: float) -> float: """Gaussian distribution. mu is the mean, and sigma is the standard deviation. This is slightly faster than the normalvariate() function. Not thread-safe without a lock around calls. """ # When x and y are two variables from [0, 1), uniformly # distributed, then # # cos(2*pi*x)*sqrt(-2*log(1-y)) # sin(2*pi*x)*sqrt(-2*log(1-y)) # # are two *independent* variables with normal distribution # (mu = 0, sigma = 1). # (Lambert Meertens) # (corrected version; bug discovered by Mike Miller, fixed by LM) # Multithreading note: When two threads call this function # simultaneously, it is possible that they will receive the # same return value. The window is very small though. To # avoid this, you have to use a lock around all calls. (I # didn't want to slow this down in the serial case by using a # lock here.) random = self.random z = self.gauss_next self.gauss_next = None if z is None: x2pi = random() * TWOPI g2rad = _sqrt(-2.0 * _log(1.0 - random())) z = _cos(x2pi) * g2rad self.gauss_next = _sin(x2pi) * g2rad return mu + z*sigma ## -------------------- beta -------------------- ## See ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html ## for Ivan Frohne's insightful analysis of why the original implementation: ## ## def betavariate(self, alpha, beta): ## # Discrete Event Simulation in C, pp 87-88. ## ## y = self.expovariate(alpha) ## z = self.expovariate(1.0/beta) ## return z/(y+z) ## ## was dead wrong, and how it probably got that way. def betavariate(self, alpha: float, beta: float) -> 'float': """Beta distribution. Conditions on the parameters are alpha > 0 and beta > 0. Returned values range between 0 and 1. """ # This version due to Janne Sinkkonen, and matches all the std # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution"). y = self.gammavariate(alpha, 1.) if y == 0: return 0.0 else: return y / (y + self.gammavariate(beta, 1.)) ## -------------------- Pareto -------------------- def paretovariate(self, alpha: float) -> float: """Pareto distribution. alpha is the shape parameter.""" # Jain, pg. 495 u = 1.0 - self.random() return 1.0 / u ** (1.0/alpha) ## -------------------- Weibull -------------------- def weibullvariate(self, alpha: float, beta: float) -> float: """Weibull distribution. alpha is the scale parameter and beta is the shape parameter. """ # Jain, pg. 499; bug fix courtesy Bill Arms u = 1.0 - self.random() return alpha * (-_log(u)) ** (1.0/beta) ## --------------- Operating System Random Source ------------------ class SystemRandom(Random): """Alternate random number generator using sources provided by the operating system (such as /dev/urandom on Unix or CryptGenRandom on Windows). Not available on all systems (see os.urandom() for details). """ def random(self) -> float: """Get the next random number in the range [0.0, 1.0).""" return (int.from_bytes(_urandom(7), 'big') >> 3) * RECIP_BPF def getrandbits(self, k: int) -> int: """getrandbits(k) -> x. Generates a long int with k random bits.""" if k <= 0: raise ValueError('number of bits must be greater than zero') if k != int(k): raise TypeError('number of bits should be an integer') numbytes = (k + 7) // 8 # bits / 8 and rounded up x = int.from_bytes(_urandom(numbytes), 'big') return x >> (numbytes * 8 - k) # trim excess bits def seed(self, a: object = None, version: int = None) -> None: "Stub method. Not used for a system random number generator." return def _notimplemented(self, *args: Any, **kwds: Any) -> Any: "Method should not be called for a system random number generator." raise NotImplementedError('System entropy source does not have state.') getstate = setstate = _notimplemented # Create one instance, seeded from current time, and export its methods # as module-level functions. The functions share state across all uses #(both in the user's code and in the Python libraries), but that's fine # for most programs and is easier for the casual user than making them # instantiate their own Random() instance. _inst = Random() seed = _inst.seed random = _inst.random uniform = _inst.uniform triangular = _inst.triangular randint = _inst.randint choice = _inst.choice randrange = _inst.randrange sample = _inst.sample shuffle = _inst.shuffle normalvariate = _inst.normalvariate lognormvariate = _inst.lognormvariate expovariate = _inst.expovariate vonmisesvariate = _inst.vonmisesvariate gammavariate = _inst.gammavariate gauss = _inst.gauss betavariate = _inst.betavariate paretovariate = _inst.paretovariate weibullvariate = _inst.weibullvariate getstate = _inst.getstate setstate = _inst.setstate getrandbits = _inst.getrandbits ## -------------------- test program -------------------- def _test_generator(n: int, func: Any, args: tuple) -> None: import time print(n, 'times', func.__name__) total = 0.0 sqsum = 0.0 smallest = 1e10 largest = -1e10 t0 = time.time() for i in range(n): x = func(*args) # type: float total += x sqsum = sqsum + x*x smallest = min(x, smallest) largest = max(x, largest) t1 = time.time() print(round(t1-t0, 3), 'sec,', end=' ') avg = total/n stddev = _sqrt(sqsum/n - avg*avg) print('avg %g, stddev %g, min %g, max %g' % \ (avg, stddev, smallest, largest)) def _test(N: int = 2000) -> None: _test_generator(N, random, ()) _test_generator(N, normalvariate, (0.0, 1.0)) _test_generator(N, lognormvariate, (0.0, 1.0)) _test_generator(N, vonmisesvariate, (0.0, 1.0)) _test_generator(N, gammavariate, (0.01, 1.0)) _test_generator(N, gammavariate, (0.1, 1.0)) _test_generator(N, gammavariate, (0.1, 2.0)) _test_generator(N, gammavariate, (0.5, 1.0)) _test_generator(N, gammavariate, (0.9, 1.0)) _test_generator(N, gammavariate, (1.0, 1.0)) _test_generator(N, gammavariate, (2.0, 1.0)) _test_generator(N, gammavariate, (20.0, 1.0)) _test_generator(N, gammavariate, (200.0, 1.0)) _test_generator(N, gauss, (0.0, 1.0)) _test_generator(N, betavariate, (3.0, 3.0)) _test_generator(N, triangular, (0.0, 1.0, 1.0/3.0)) if __name__ == '__main__': _test() mypy-0.761/test-data/stdlib-samples/3.2/shutil.py0000644€tŠÔÚ€2›s®0000006606313576752246025730 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utility functions for copying and archiving files and directory trees. XXX The functions here don't copy the resource fork or other metadata on Mac. """ import os import sys import stat from os.path import abspath import fnmatch import collections import errno import tarfile import builtins from typing import ( Any, AnyStr, IO, List, Iterable, Callable, Tuple, Dict, Sequence, cast ) from types import TracebackType try: import bz2 _BZ2_SUPPORTED = True except ImportError: _BZ2_SUPPORTED = False try: from pwd import getpwnam as _getpwnam getpwnam = _getpwnam except ImportError: getpwnam = None try: from grp import getgrnam as _getgrnam getgrnam = _getgrnam except ImportError: getgrnam = None __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", "copytree", "move", "rmtree", "Error", "SpecialFileError", "ExecError", "make_archive", "get_archive_formats", "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive", "ignore_patterns"] class Error(EnvironmentError): pass class SpecialFileError(EnvironmentError): """Raised when trying to do a kind of operation (e.g. copying) which is not supported on a special file (e.g. a named pipe)""" class ExecError(EnvironmentError): """Raised when a command could not be executed""" class ReadError(EnvironmentError): """Raised when an archive cannot be read""" class RegistryError(Exception): """Raised when a registery operation with the archiving and unpacking registeries fails""" try: _WindowsError = WindowsError # type: type except NameError: _WindowsError = None # Function aliases to be patched in test cases rename = os.rename open = builtins.open def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr], length: int = 16*1024) -> None: """copy data from file-like object fsrc to file-like object fdst""" while 1: buf = fsrc.read(length) if not buf: break fdst.write(buf) def _samefile(src: str, dst: str) -> bool: # Macintosh, Unix. if hasattr(os.path, 'samefile'): try: return os.path.samefile(src, dst) except OSError: return False # All other platforms: check for same pathname. return (os.path.normcase(os.path.abspath(src)) == os.path.normcase(os.path.abspath(dst))) def copyfile(src: str, dst: str) -> None: """Copy data from src to dst""" if _samefile(src, dst): raise Error("`%s` and `%s` are the same file" % (src, dst)) for fn in [src, dst]: try: st = os.stat(fn) except OSError: # File most likely does not exist pass else: # XXX What about other special files? (sockets, devices...) if stat.S_ISFIFO(st.st_mode): raise SpecialFileError("`%s` is a named pipe" % fn) with open(src, 'rb') as fsrc: with open(dst, 'wb') as fdst: copyfileobj(fsrc, fdst) def copymode(src: str, dst: str) -> None: """Copy mode bits from src to dst""" if hasattr(os, 'chmod'): st = os.stat(src) mode = stat.S_IMODE(st.st_mode) os.chmod(dst, mode) def copystat(src: str, dst: str) -> None: """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" st = os.stat(src) mode = stat.S_IMODE(st.st_mode) if hasattr(os, 'utime'): os.utime(dst, (st.st_atime, st.st_mtime)) if hasattr(os, 'chmod'): os.chmod(dst, mode) if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): try: os.chflags(dst, st.st_flags) except OSError as why: if (not hasattr(errno, 'EOPNOTSUPP') or why.errno != errno.EOPNOTSUPP): raise def copy(src: str, dst: str) -> None: """Copy data and mode bits ("cp src dst"). The destination may be a directory. """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst) copymode(src, dst) def copy2(src: str, dst: str) -> None: """Copy data and all stat info ("cp -p src dst"). The destination may be a directory. """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst) copystat(src, dst) def ignore_patterns(*patterns: str) -> Callable[[str, List[str]], Iterable[str]]: """Function that can be used as copytree() ignore parameter. Patterns is a sequence of glob-style patterns that are used to exclude files""" def _ignore_patterns(path: str, names: List[str]) -> Iterable[str]: ignored_names = [] # type: List[str] for pattern in patterns: ignored_names.extend(fnmatch.filter(names, pattern)) return set(ignored_names) return _ignore_patterns def copytree(src: str, dst: str, symlinks: bool = False, ignore: Callable[[str, List[str]], Iterable[str]] = None, copy_function: Callable[[str, str], None] = copy2, ignore_dangling_symlinks: bool = False) -> None: """Recursively copy a directory tree. The destination directory must not already exist. If exception(s) occur, an Error is raised with a list of reasons. If the optional symlinks flag is true, symbolic links in the source tree result in symbolic links in the destination tree; if it is false, the contents of the files pointed to by symbolic links are copied. If the file pointed by the symlink doesn't exist, an exception will be added in the list of errors raised in an Error exception at the end of the copy process. You can set the optional ignore_dangling_symlinks flag to true if you want to silence this exception. Notice that this has no effect on platforms that don't support os.symlink. The optional ignore argument is a callable. If given, it is called with the `src` parameter, which is the directory being visited by copytree(), and `names` which is the list of `src` contents, as returned by os.listdir(): callable(src, names) -> ignored_names Since copytree() is called recursively, the callable will be called once for each directory that is copied. It returns a list of names relative to the `src` directory that should not be copied. The optional copy_function argument is a callable that will be used to copy each file. It will be called with the source path and the destination path as arguments. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used. """ names = os.listdir(src) if ignore is not None: ignored_names = ignore(src, names) else: ignored_names = set() os.makedirs(dst) errors = [] # type: List[Tuple[str, str, str]] for name in names: if name in ignored_names: continue srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if os.path.islink(srcname): linkto = os.readlink(srcname) if symlinks: os.symlink(linkto, dstname) else: # ignore dangling symlink if the flag is on if not os.path.exists(linkto) and ignore_dangling_symlinks: continue # otherwise let the copy occurs. copy2 will raise an error copy_function(srcname, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks, ignore, copy_function) else: # Will raise a SpecialFileError for unsupported file types copy_function(srcname, dstname) # catch the Error from the recursive copytree so that we can # continue with other files except Error as err: errors.extend(err.args[0]) except EnvironmentError as why: errors.append((srcname, dstname, str(why))) try: copystat(src, dst) except OSError as why: if _WindowsError is not None and isinstance(why, _WindowsError): # Copying file access times may fail on Windows pass else: errors.append((src, dst, str(why))) if errors: raise Error(errors) def rmtree(path: str, ignore_errors: bool = False, onerror: Callable[[Any, str, Tuple[type, BaseException, TracebackType]], None] = None) -> None: """Recursively delete a directory tree. If ignore_errors is set, errors are ignored; otherwise, if onerror is set, it is called to handle the error with arguments (func, path, exc_info) where func is os.listdir, os.remove, or os.rmdir; path is the argument to that function that caused it to fail; and exc_info is a tuple returned by sys.exc_info(). If ignore_errors is false and onerror is None, an exception is raised. """ if ignore_errors: def _onerror(x: Any, y: str, z: Tuple[type, BaseException, TracebackType]) -> None: pass onerror = _onerror elif onerror is None: def __onerror(x: Any, y: str, z: Tuple[type, BaseException, TracebackType]) -> None: raise onerror = __onerror try: if os.path.islink(path): # symlinks to directories are forbidden, see bug #1669 raise OSError("Cannot call rmtree on a symbolic link") except OSError: onerror(os.path.islink, path, sys.exc_info()) # can't continue even if onerror hook returns return names = [] # type: List[str] try: names = os.listdir(path) except os.error as err: onerror(os.listdir, path, sys.exc_info()) for name in names: fullname = os.path.join(path, name) try: mode = os.lstat(fullname).st_mode except os.error: mode = 0 if stat.S_ISDIR(mode): rmtree(fullname, ignore_errors, onerror) else: try: os.remove(fullname) except os.error as err: onerror(os.remove, fullname, sys.exc_info()) try: os.rmdir(path) except os.error: onerror(os.rmdir, path, sys.exc_info()) def _basename(path: str) -> str: # A basename() variant which first strips the trailing slash, if present. # Thus we always get the last component of the path, even for directories. return os.path.basename(path.rstrip(os.path.sep)) def move(src: str, dst: str) -> None: """Recursively move a file or directory to another location. This is similar to the Unix "mv" command. If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist. If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics. If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. A lot more could be done here... A look at a mv.c shows a lot of the issues this implementation glosses over. """ real_dst = dst if os.path.isdir(dst): if _samefile(src, dst): # We might be on a case insensitive filesystem, # perform the rename anyway. os.rename(src, dst) return real_dst = os.path.join(dst, _basename(src)) if os.path.exists(real_dst): raise Error("Destination path '%s' already exists" % real_dst) try: os.rename(src, real_dst) except OSError as exc: if os.path.isdir(src): if _destinsrc(src, dst): raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) copytree(src, real_dst, symlinks=True) rmtree(src) else: copy2(src, real_dst) os.unlink(src) def _destinsrc(src: str, dst: str) -> bool: src = abspath(src) dst = abspath(dst) if not src.endswith(os.path.sep): src += os.path.sep if not dst.endswith(os.path.sep): dst += os.path.sep return dst.startswith(src) def _get_gid(name: str) -> int: """Returns a gid, given a group name.""" if getgrnam is None or name is None: return None try: result = getgrnam(name) except KeyError: result = None if result is not None: return result.gr_gid return None def _get_uid(name: str) -> int: """Returns an uid, given a user name.""" if getpwnam is None or name is None: return None try: result = getpwnam(name) except KeyError: result = None if result is not None: return result.pw_uid return None def _make_tarball(base_name: str, base_dir: str, compress: str = "gzip", verbose: bool = False, dry_run: bool = False, owner: str = None, group: str = None, logger: Any = None) -> str: """Create a (possibly compressed) tar file from all the files under 'base_dir'. 'compress' must be "gzip" (the default), "bzip2", or None. 'owner' and 'group' can be used to define an owner and a group for the archive that is being built. If not provided, the current owner and group will be used. The output tar file will be named 'base_name' + ".tar", possibly plus the appropriate compression extension (".gz", or ".bz2"). Returns the output filename. """ tar_compression = {'gzip': 'gz', None: ''} compress_ext = {'gzip': '.gz'} if _BZ2_SUPPORTED: tar_compression['bzip2'] = 'bz2' compress_ext['bzip2'] = '.bz2' # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext.keys(): raise ValueError("bad value for 'compress', or compression format not " "supported : {0}".format(compress)) archive_name = base_name + '.tar' + compress_ext.get(compress, '') archive_dir = os.path.dirname(archive_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) # creating the tarball if logger is not None: logger.info('Creating tar archive') uid = _get_uid(owner) gid = _get_gid(group) def _set_uid_gid(tarinfo): if gid is not None: tarinfo.gid = gid tarinfo.gname = group if uid is not None: tarinfo.uid = uid tarinfo.uname = owner return tarinfo if not dry_run: tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) try: tar.add(base_dir, filter=_set_uid_gid) finally: tar.close() return archive_name def _call_external_zip(base_dir: str, zip_filename: str, verbose: bool = False, dry_run: bool = False) -> None: # XXX see if we want to keep an external call here if verbose: zipoptions = "-r" else: zipoptions = "-rq" from distutils.errors import DistutilsExecError from distutils.spawn import spawn try: spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) except DistutilsExecError: # XXX really should distinguish between "couldn't find # external 'zip' command" and "zip failed". raise ExecError(("unable to create zip file '%s': " "could neither import the 'zipfile' module nor " "find a standalone zip utility") % zip_filename) def _make_zipfile(base_name: str, base_dir: str, verbose: bool = False, dry_run: bool = False, logger: Any = None) -> str: """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_name' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises ExecError. Returns the name of the output zip file. """ zip_filename = base_name + ".zip" archive_dir = os.path.dirname(base_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) # If zipfile module is not available, try spawning an external 'zip' # command. try: import zipfile except ImportError: zipfile = None if zipfile is None: _call_external_zip(base_dir, zip_filename, verbose, dry_run) else: if logger is not None: logger.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) if not dry_run: zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) for dirpath, dirnames, filenames in os.walk(base_dir): for name in filenames: path = os.path.normpath(os.path.join(dirpath, name)) if os.path.isfile(path): zip.write(path, path) if logger is not None: logger.info("adding '%s'", path) zip.close() return zip_filename _ARCHIVE_FORMATS = { 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), 'zip': (_make_zipfile, [],"ZIP file") } # type: Dict[str, Tuple[Any, Sequence[Tuple[str, str]], str]] if _BZ2_SUPPORTED: _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file") def get_archive_formats() -> List[Tuple[str, str]]: """Returns a list of supported formats for archiving and unarchiving. Each element of the returned sequence is a tuple (name, description) """ formats = [(name, registry[2]) for name, registry in _ARCHIVE_FORMATS.items()] formats.sort() return formats def register_archive_format(name: str, function: Any, extra_args: Sequence[Tuple[str, Any]] = None, description: str = '') -> None: """Registers an archive format. name is the name of the format. function is the callable that will be used to create archives. If provided, extra_args is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_archive_formats() function. """ if extra_args is None: extra_args = [] if not callable(function): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') for element in extra_args: if not isinstance(element, (tuple, list)) or len(cast(tuple, element)) !=2 : raise TypeError('extra_args elements are : (arg_name, value)') _ARCHIVE_FORMATS[name] = (function, extra_args, description) def unregister_archive_format(name: str) -> None: del _ARCHIVE_FORMATS[name] def make_archive(base_name: str, format: str, root_dir: str = None, base_dir: str = None, verbose: bool = False, dry_run: bool = False, owner: str = None, group: str = None, logger: Any = None) -> str: """Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific extension; 'format' is the archive format: one of "zip", "tar", "bztar" or "gztar". 'root_dir' is a directory that will be the root directory of the archive; ie. we typically chdir into 'root_dir' before creating the archive. 'base_dir' is the directory where we start archiving from; ie. 'base_dir' will be the common prefix of all files and directories in the archive. 'root_dir' and 'base_dir' both default to the current directory. Returns the name of the archive file. 'owner' and 'group' are used when creating a tar archive. By default, uses the current owner and group. """ save_cwd = os.getcwd() if root_dir is not None: if logger is not None: logger.debug("changing into '%s'", root_dir) base_name = os.path.abspath(base_name) if not dry_run: os.chdir(root_dir) if base_dir is None: base_dir = os.curdir kwargs = {'dry_run': dry_run, 'logger': logger} try: format_info = _ARCHIVE_FORMATS[format] except KeyError: raise ValueError("unknown archive format '%s'" % format) func = format_info[0] for arg, val in format_info[1]: kwargs[arg] = val if format != 'zip': kwargs['owner'] = owner kwargs['group'] = group try: filename = func(base_name, base_dir, **kwargs) finally: if root_dir is not None: if logger is not None: logger.debug("changing back to '%s'", save_cwd) os.chdir(save_cwd) return filename def get_unpack_formats() -> List[Tuple[str, List[str], str]]: """Returns a list of supported formats for unpacking. Each element of the returned sequence is a tuple (name, extensions, description) """ formats = [(name, info[0], info[3]) for name, info in _UNPACK_FORMATS.items()] formats.sort() return formats def _check_unpack_options(extensions: List[str], function: Any, extra_args: Sequence[Tuple[str, Any]]) -> None: """Checks what gets registered as an unpacker.""" # first make sure no other unpacker is registered for this extension existing_extensions = {} # type: Dict[str, str] for name, info in _UNPACK_FORMATS.items(): for ext in info[0]: existing_extensions[ext] = name for extension in extensions: if extension in existing_extensions: msg = '%s is already registered for "%s"' raise RegistryError(msg % (extension, existing_extensions[extension])) if not callable(function): raise TypeError('The registered function must be a callable') def register_unpack_format(name: str, extensions: List[str], function: Any, extra_args: Sequence[Tuple[str, Any]] = None, description: str = '') -> None: """Registers an unpack format. `name` is the name of the format. `extensions` is a list of extensions corresponding to the format. `function` is the callable that will be used to unpack archives. The callable will receive archives to unpack. If it's unable to handle an archive, it needs to raise a ReadError exception. If provided, `extra_args` is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_unpack_formats() function. """ if extra_args is None: extra_args = [] _check_unpack_options(extensions, function, extra_args) _UNPACK_FORMATS[name] = extensions, function, extra_args, description def unregister_unpack_format(name: str) -> None: """Removes the pack format from the registery.""" del _UNPACK_FORMATS[name] def _ensure_directory(path: str) -> None: """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) if not os.path.isdir(dirname): os.makedirs(dirname) def _unpack_zipfile(filename: str, extract_dir: str) -> None: """Unpack zip `filename` to `extract_dir` """ try: import zipfile except ImportError: raise ReadError('zlib not supported, cannot unpack this archive.') if not zipfile.is_zipfile(filename): raise ReadError("%s is not a zip file" % filename) zip = zipfile.ZipFile(filename) try: for info in zip.infolist(): name = info.filename # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name: continue target = os.path.join(extract_dir, *name.split('/')) if not target: continue _ensure_directory(target) if not name.endswith('/'): # file data = zip.read(info.filename) f = open(target,'wb') try: f.write(data) finally: f.close() del data finally: zip.close() def _unpack_tarfile(filename: str, extract_dir: str) -> None: """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` """ try: tarobj = tarfile.open(filename) except tarfile.TarError: raise ReadError( "%s is not a compressed or uncompressed tar file" % filename) try: tarobj.extractall(extract_dir) finally: tarobj.close() _UNPACK_FORMATS = { 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") } # type: Dict[str, Tuple[List[str], Any, Sequence[Tuple[str, Any]], str]] if _BZ2_SUPPORTED: _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], "bzip2'ed tar-file") def _find_unpack_format(filename: str) -> str: for name, info in _UNPACK_FORMATS.items(): for extension in info[0]: if filename.endswith(extension): return name return None def unpack_archive(filename: str, extract_dir: str = None, format: str = None) -> None: """Unpack an archive. `filename` is the name of the archive. `extract_dir` is the name of the target directory, where the archive is unpacked. If not provided, the current working directory is used. `format` is the archive format: one of "zip", "tar", or "gztar". Or any other registered format. If not provided, unpack_archive will use the filename extension and see if an unpacker was registered for that extension. In case none is found, a ValueError is raised. """ if extract_dir is None: extract_dir = os.getcwd() if format is not None: try: format_info = _UNPACK_FORMATS[format] except KeyError: raise ValueError("Unknown unpack format '{0}'".format(format)) func = format_info[1] func(filename, extract_dir, **dict(format_info[2])) else: # we need to look at the registered unpackers supported extensions format = _find_unpack_format(filename) if format is None: raise ReadError("Unknown archive format '{0}'".format(filename)) func = _UNPACK_FORMATS[format][1] kwargs = dict(_UNPACK_FORMATS[format][2]) func(filename, extract_dir, **kwargs) mypy-0.761/test-data/stdlib-samples/3.2/subprocess.py0000644€tŠÔÚ€2›s®0000020110413576752246026573 0ustar jukkaDROPBOX\Domain Users00000000000000# subprocess - Subprocesses with accessible I/O streams # # For more information about this module, see PEP 324. # # Copyright (c) 2003-2005 by Peter Astrand # # Licensed to PSF under a Contributor Agreement. # See http://www.python.org/2.4/license for licensing details. r"""subprocess - Subprocesses with accessible I/O streams This module allows you to spawn processes, connect to their input/output/error pipes, and obtain their return codes. This module intends to replace several other, older modules and functions, like: os.system os.spawn* Information about how the subprocess module can be used to replace these modules and functions can be found below. Using the subprocess module =========================== This module defines one class called Popen: class Popen(args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=True, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0, restore_signals=True, start_new_session=False, pass_fds=()): Arguments are: args should be a string, or a sequence of program arguments. The program to execute is normally the first item in the args sequence or string, but can be explicitly set by using the executable argument. On POSIX, with shell=False (default): In this case, the Popen class uses os.execvp() to execute the child program. args should normally be a sequence. A string will be treated as a sequence with the string as the only item (the program to execute). On POSIX, with shell=True: If args is a string, it specifies the command string to execute through the shell. If args is a sequence, the first item specifies the command string, and any additional items will be treated as additional shell arguments. On Windows: the Popen class uses CreateProcess() to execute the child program, which operates on strings. If args is a sequence, it will be converted to a string using the list2cmdline method. Please note that not all MS Windows applications interpret the command line the same way: The list2cmdline is designed for applications using the same rules as the MS C runtime. bufsize, if given, has the same meaning as the corresponding argument to the built-in open() function: 0 means unbuffered, 1 means line buffered, any other positive value means use a buffer of (approximately) that size. A negative bufsize means to use the system default, which usually means fully buffered. The default value for bufsize is 0 (unbuffered). stdin, stdout and stderr specify the executed programs' standard input, standard output and standard error file handles, respectively. Valid values are PIPE, an existing file descriptor (a positive integer), an existing file object, and None. PIPE indicates that a new pipe to the child should be created. With None, no redirection will occur; the child's file handles will be inherited from the parent. Additionally, stderr can be STDOUT, which indicates that the stderr data from the applications should be captured into the same file handle as for stdout. On POSIX, if preexec_fn is set to a callable object, this object will be called in the child process just before the child is executed. The use of preexec_fn is not thread safe, using it in the presence of threads could lead to a deadlock in the child process before the new executable is executed. If close_fds is true, all file descriptors except 0, 1 and 2 will be closed before the child process is executed. The default for close_fds varies by platform: Always true on POSIX. True when stdin/stdout/stderr are None on Windows, false otherwise. pass_fds is an optional sequence of file descriptors to keep open between the parent and child. Providing any pass_fds implicitly sets close_fds to true. if shell is true, the specified command will be executed through the shell. If cwd is not None, the current directory will be changed to cwd before the child is executed. On POSIX, if restore_signals is True all signals that Python sets to SIG_IGN are restored to SIG_DFL in the child process before the exec. Currently this includes the SIGPIPE, SIGXFZ and SIGXFSZ signals. This parameter does nothing on Windows. On POSIX, if start_new_session is True, the setsid() system call will be made in the child process prior to executing the command. If env is not None, it defines the environment variables for the new process. If universal_newlines is true, the file objects stdout and stderr are opened as a text files, but lines may be terminated by any of '\n', the Unix end-of-line convention, '\r', the old Macintosh convention or '\r\n', the Windows convention. All of these external representations are seen as '\n' by the Python program. Note: This feature is only available if Python is built with universal newline support (the default). Also, the newlines attribute of the file objects stdout, stdin and stderr are not updated by the communicate() method. The startupinfo and creationflags, if given, will be passed to the underlying CreateProcess() function. They can specify things such as appearance of the main window and priority for the new process. (Windows only) This module also defines some shortcut functions: call(*popenargs, **kwargs): Run command with arguments. Wait for command to complete, then return the returncode attribute. The arguments are the same as for the Popen constructor. Example: >>> retcode = subprocess.call(["ls", "-l"]) check_call(*popenargs, **kwargs): Run command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: >>> subprocess.check_call(["ls", "-l"]) 0 getstatusoutput(cmd): Return (status, output) of executing cmd in a shell. Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple (status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the returned output will contain output or error messages. A trailing newline is stripped from the output. The exit status for the command can be interpreted according to the rules for the C function wait(). Example: >>> subprocess.getstatusoutput('ls /bin/ls') (0, '/bin/ls') >>> subprocess.getstatusoutput('cat /bin/junk') (256, 'cat: /bin/junk: No such file or directory') >>> subprocess.getstatusoutput('/bin/junk') (256, 'sh: /bin/junk: not found') getoutput(cmd): Return output (stdout or stderr) of executing cmd in a shell. Like getstatusoutput(), except the exit status is ignored and the return value is a string containing the command's output. Example: >>> subprocess.getoutput('ls /bin/ls') '/bin/ls' check_output(*popenargs, **kwargs): Run command with arguments and return its output as a byte string. If the exit code was non-zero it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. The arguments are the same as for the Popen constructor. Example: >>> output = subprocess.check_output(["ls", "-l", "/dev/null"]) Exceptions ---------- Exceptions raised in the child process, before the new program has started to execute, will be re-raised in the parent. Additionally, the exception object will have one extra attribute called 'child_traceback', which is a string containing traceback information from the childs point of view. The most common exception raised is OSError. This occurs, for example, when trying to execute a non-existent file. Applications should prepare for OSErrors. A ValueError will be raised if Popen is called with invalid arguments. check_call() and check_output() will raise CalledProcessError, if the called process returns a non-zero return code. Security -------- Unlike some other popen functions, this implementation will never call /bin/sh implicitly. This means that all characters, including shell metacharacters, can safely be passed to child processes. Popen objects ============= Instances of the Popen class have the following methods: poll() Check if child process has terminated. Returns returncode attribute. wait() Wait for child process to terminate. Returns returncode attribute. communicate(input=None) Interact with process: Send data to stdin. Read data from stdout and stderr, until end-of-file is reached. Wait for process to terminate. The optional input argument should be a string to be sent to the child process, or None, if no data should be sent to the child. communicate() returns a tuple (stdout, stderr). Note: The data read is buffered in memory, so do not use this method if the data size is large or unlimited. The following attributes are also available: stdin If the stdin argument is PIPE, this attribute is a file object that provides input to the child process. Otherwise, it is None. stdout If the stdout argument is PIPE, this attribute is a file object that provides output from the child process. Otherwise, it is None. stderr If the stderr argument is PIPE, this attribute is file object that provides error output from the child process. Otherwise, it is None. pid The process ID of the child process. returncode The child return code. A None value indicates that the process hasn't terminated yet. A negative value -N indicates that the child was terminated by signal N (POSIX only). Replacing older functions with the subprocess module ==================================================== In this section, "a ==> b" means that b can be used as a replacement for a. Note: All functions in this section fail (more or less) silently if the executed program cannot be found; this module raises an OSError exception. In the following examples, we assume that the subprocess module is imported with "from subprocess import *". Replacing /bin/sh shell backquote --------------------------------- output=`mycmd myarg` ==> output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0] Replacing shell pipe line ------------------------- output=`dmesg | grep hda` ==> p1 = Popen(["dmesg"], stdout=PIPE) p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) output = p2.communicate()[0] Replacing os.system() --------------------- sts = os.system("mycmd" + " myarg") ==> p = Popen("mycmd" + " myarg", shell=True) pid, sts = os.waitpid(p.pid, 0) Note: * Calling the program through the shell is usually not required. * It's easier to look at the returncode attribute than the exitstatus. A more real-world example would look like this: try: retcode = call("mycmd" + " myarg", shell=True) if retcode < 0: print("Child was terminated by signal", -retcode, file=sys.stderr) else: print("Child returned", retcode, file=sys.stderr) except OSError as e: print("Execution failed:", e, file=sys.stderr) Replacing os.spawn* ------------------- P_NOWAIT example: pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg") ==> pid = Popen(["/bin/mycmd", "myarg"]).pid P_WAIT example: retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg") ==> retcode = call(["/bin/mycmd", "myarg"]) Vector example: os.spawnvp(os.P_NOWAIT, path, args) ==> Popen([path] + args[1:]) Environment example: os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env) ==> Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"}) """ import sys mswindows = (sys.platform == "win32") import io import os import traceback import gc import signal import builtins import warnings import errno from typing import ( Any, Tuple, List, Sequence, Callable, Mapping, cast, Set, Dict, IO, TextIO, AnyStr ) from typing_extensions import Literal from types import TracebackType # Exception classes used by this module. class CalledProcessError(Exception): """This exception is raised when a process run by check_call() or check_output() returns a non-zero exit status. The exit status will be stored in the returncode attribute; check_output() will also store the output in the output attribute. """ def __init__(self, returncode: int, cmd: str, output: Any = None) -> None: self.returncode = returncode self.cmd = cmd self.output = output def __str__(self) -> str: return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) if mswindows: import threading import msvcrt import _subprocess class STARTUPINFO: dwFlags = 0 hStdInput = cast(Any, None) hStdOutput = cast(Any, None) hStdError = cast(Any, None) wShowWindow = 0 class pywintypes: error = IOError else: import select _has_poll = hasattr(select, 'poll') import fcntl import pickle try: import _posixsubprocess have_posixsubprocess = True except ImportError: have_posixsubprocess = False warnings.warn("The _posixsubprocess module is not being used. " "Child process reliability may suffer if your " "program uses threads.", RuntimeWarning) # When select or poll has indicated that the file is writable, # we can write up to _PIPE_BUF bytes without risk of blocking. # POSIX defines PIPE_BUF as >= 512. _PIPE_BUF = getattr(select, 'PIPE_BUF', 512) # type: int _FD_CLOEXEC = getattr(fcntl, 'FD_CLOEXEC', 1) # type: int def _set_cloexec(fd: int, cloexec: bool) -> None: old = fcntl.fcntl(fd, fcntl.F_GETFD) if cloexec: fcntl.fcntl(fd, fcntl.F_SETFD, old | _FD_CLOEXEC) else: fcntl.fcntl(fd, fcntl.F_SETFD, old & ~_FD_CLOEXEC) if have_posixsubprocess: _create_pipe = _posixsubprocess.cloexec_pipe else: def __create_pipe() -> Tuple[int, int]: fds = os.pipe() _set_cloexec(fds[0], True) _set_cloexec(fds[1], True) return fds _create_pipe = __create_pipe __all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput", "getoutput", "check_output", "CalledProcessError"] if mswindows: from _subprocess import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP, STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, STD_ERROR_HANDLE, SW_HIDE, STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW) __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP", "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE", "STD_ERROR_HANDLE", "SW_HIDE", "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"]) try: MAXFD = os.sysconf("SC_OPEN_MAX") except: MAXFD = 256 # This lists holds Popen instances for which the underlying process had not # exited at the time its __del__ method got called: those processes are wait()ed # for synchronously from _cleanup() when a new Popen object is created, to avoid # zombie processes. _active = [] # type: List[Popen] def _cleanup() -> None: for inst in _active[:]: res = inst._internal_poll(_deadstate=sys.maxsize) if res is not None: try: _active.remove(inst) except ValueError: # This can happen if two threads create a new Popen instance. # It's harmless that it was already removed, so ignore. pass PIPE = -1 STDOUT = -2 def _eintr_retry_call(func: Any, *args: Any) -> Any: while True: try: return func(*args) except (OSError, IOError) as e: if e.errno == errno.EINTR: continue raise def call(*popenargs: Any, **kwargs: Any) -> int: """Run command with arguments. Wait for command to complete, then return the returncode attribute. The arguments are the same as for the Popen constructor. Example: retcode = call(["ls", "-l"]) """ return Popen(*popenargs, **kwargs).wait() def check_call(*popenargs: Any, **kwargs: Any) -> int: """Run command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: check_call(["ls", "-l"]) """ retcode = call(*popenargs, **kwargs) if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise CalledProcessError(retcode, cmd) return 0 def check_output(*popenargs: Any, **kwargs: Any) -> bytes: r"""Run command with arguments and return its output as a byte string. If the exit code was non-zero it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. The arguments are the same as for the Popen constructor. Example: >>> check_output(["ls", "-l", "/dev/null"]) b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' The stdout argument is not allowed as it is used internally. To capture standard error in the result, use stderr=STDOUT. >>> check_output(["/bin/sh", "-c", ... "ls -l non_existent_file ; exit 0"], ... stderr=STDOUT) b'ls: non_existent_file: No such file or directory\n' """ if 'stdout' in kwargs: raise ValueError('stdout argument not allowed, it will be overridden.') kwargs['stdout'] = PIPE process = Popen(*popenargs, **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise CalledProcessError(retcode, cmd, output=output) return output def list2cmdline(seq: Sequence[str]) -> str: """ Translate a sequence of arguments into a command line string, using the same rules as the MS C runtime: 1) Arguments are delimited by white space, which is either a space or a tab. 2) A string surrounded by double quotation marks is interpreted as a single argument, regardless of white space contained within. A quoted string can be embedded in an argument. 3) A double quotation mark preceded by a backslash is interpreted as a literal double quotation mark. 4) Backslashes are interpreted literally, unless they immediately precede a double quotation mark. 5) If backslashes immediately precede a double quotation mark, every pair of backslashes is interpreted as a literal backslash. If the number of backslashes is odd, the last backslash escapes the next double quotation mark as described in rule 3. """ # See # http://msdn.microsoft.com/en-us/library/17w5ykft.aspx # or search http://msdn.microsoft.com for # "Parsing C++ Command-Line Arguments" result = [] # type: List[str] needquote = False for arg in seq: bs_buf = [] # type: List[str] # Add a space to separate this argument from the others if result: result.append(' ') needquote = (" " in arg) or ("\t" in arg) or not arg if needquote: result.append('"') for c in arg: if c == '\\': # Don't know if we need to double yet. bs_buf.append(c) elif c == '"': # Double backslashes. result.append('\\' * len(bs_buf)*2) bs_buf = [] result.append('\\"') else: # Normal char if bs_buf: result.extend(bs_buf) bs_buf = [] result.append(c) # Add remaining backslashes, if any. if bs_buf: result.extend(bs_buf) if needquote: result.extend(bs_buf) result.append('"') return ''.join(result) # Various tools for executing commands and looking at their output and status. # # NB This only works (and is only relevant) for POSIX. def getstatusoutput(cmd: str) -> Tuple[int, str]: """Return (status, output) of executing cmd in a shell. Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple (status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the returned output will contain output or error messages. A trailing newline is stripped from the output. The exit status for the command can be interpreted according to the rules for the C function wait(). Example: >>> import subprocess >>> subprocess.getstatusoutput('ls /bin/ls') (0, '/bin/ls') >>> subprocess.getstatusoutput('cat /bin/junk') (256, 'cat: /bin/junk: No such file or directory') >>> subprocess.getstatusoutput('/bin/junk') (256, 'sh: /bin/junk: not found') """ pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r') text = pipe.read() sts = pipe.close() if sts is None: sts = 0 if text[-1:] == '\n': text = text[:-1] return sts, text def getoutput(cmd: str) -> str: """Return output (stdout or stderr) of executing cmd in a shell. Like getstatusoutput(), except the exit status is ignored and the return value is a string containing the command's output. Example: >>> import subprocess >>> subprocess.getoutput('ls /bin/ls') '/bin/ls' """ return getstatusoutput(cmd)[1] _PLATFORM_DEFAULT_CLOSE_FDS = object() class Popen(object): def __init__(self, args: Sequence[Any], bufsize: int = 0, executable: str = None, stdin: Any = None, stdout: Any = None, stderr: Any = None, preexec_fn: Callable[[], Any] = None, close_fds: Any = _PLATFORM_DEFAULT_CLOSE_FDS, shell: int = False, cwd: str = None, env: Mapping[str, str] = None, universal_newlines: int = False, startupinfo: 'STARTUPINFO' = None, creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, pass_fds: Any = ()) -> None: """Create new Popen instance.""" _cleanup() self._child_created = False if bufsize is None: bufsize = 0 # Restore default if not isinstance(bufsize, int): raise TypeError("bufsize must be an integer") if mswindows: if preexec_fn is not None: raise ValueError("preexec_fn is not supported on Windows " "platforms") any_stdio_set = (stdin is not None or stdout is not None or stderr is not None) if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: if any_stdio_set: close_fds = False else: close_fds = True elif close_fds and any_stdio_set: raise ValueError( "close_fds is not supported on Windows platforms" " if you redirect stdin/stdout/stderr") else: # POSIX if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: close_fds = True if pass_fds and not close_fds: warnings.warn("pass_fds overriding close_fds.", RuntimeWarning) close_fds = True if startupinfo is not None: raise ValueError("startupinfo is only supported on Windows " "platforms") if creationflags != 0: raise ValueError("creationflags is only supported on Windows " "platforms") self.stdin = None # type: IO[Any] self.stdout = None # type: IO[Any] self.stderr = None # type: IO[Any] self.pid = None # type: int self.returncode = None # type: int self.universal_newlines = universal_newlines # Input and output objects. The general principle is like # this: # # Parent Child # ------ ----- # p2cwrite ---stdin---> p2cread # c2pread <--stdout--- c2pwrite # errread <--stderr--- errwrite # # On POSIX, the child objects are file descriptors. On # Windows, these are Windows file handles. The parent objects # are file descriptors on both platforms. The parent objects # are -1 when not using PIPEs. The child objects are -1 # when not redirecting. (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) = self._get_handles(stdin, stdout, stderr) # We wrap OS handles *before* launching the child, otherwise a # quickly terminating child could make our fds unwrappable # (see #8458). if mswindows: if p2cwrite != -1: p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0) if c2pread != -1: c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0) if errread != -1: errread = msvcrt.open_osfhandle(errread.Detach(), 0) if p2cwrite != -1: self.stdin = io.open(p2cwrite, 'wb', bufsize) if self.universal_newlines: self.stdin = io.TextIOWrapper(self.stdin, write_through=True) if c2pread != -1: self.stdout = io.open(c2pread, 'rb', bufsize) if universal_newlines: self.stdout = io.TextIOWrapper(self.stdout) if errread != -1: self.stderr = io.open(errread, 'rb', bufsize) if universal_newlines: self.stderr = io.TextIOWrapper(self.stderr) try: self._execute_child(args, executable, preexec_fn, close_fds, pass_fds, cwd, env, universal_newlines, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, start_new_session) except: # Cleanup if the child failed starting for f in filter(None, [self.stdin, self.stdout, self.stderr]): try: f.close() except EnvironmentError: # Ignore EBADF or other errors pass raise def _translate_newlines(self, data: bytes, encoding: str) -> str: data = data.replace(b"\r\n", b"\n").replace(b"\r", b"\n") return data.decode(encoding) def __enter__(self) -> 'Popen': return self def __exit__(self, type: type, value: BaseException, traceback: TracebackType) -> Literal[False]: if self.stdout: self.stdout.close() if self.stderr: self.stderr.close() if self.stdin: self.stdin.close() # Wait for the process to terminate, to avoid zombies. self.wait() return False def __del__(self, _maxsize: int = sys.maxsize, _active: List['Popen'] = _active) -> None: # If __init__ hasn't had a chance to execute (e.g. if it # was passed an undeclared keyword argument), we don't # have a _child_created attribute at all. if not getattr(self, '_child_created', False): # We didn't get to successfully create a child process. return # In case the child hasn't been waited on, check if it's done. self._internal_poll(_deadstate=_maxsize) if self.returncode is None and _active is not None: # Child is still running, keep us alive until we can wait on it. _active.append(self) def communicate(self, input: Any = None) -> Tuple[Any, Any]: """Interact with process: Send data to stdin. Read data from stdout and stderr, until end-of-file is reached. Wait for process to terminate. The optional input argument should be a string to be sent to the child process, or None, if no data should be sent to the child. communicate() returns a tuple (stdout, stderr).""" # Optimization: If we are only using one pipe, or no pipe at # all, using select() or threads is unnecessary. if [self.stdin, self.stdout, self.stderr].count(None) >= 2: stdout = None # type: IO[Any] stderr = None # type: IO[Any] if self.stdin: if input: try: self.stdin.write(input) except IOError as e: if e.errno != errno.EPIPE and e.errno != errno.EINVAL: raise self.stdin.close() elif self.stdout: stdout = _eintr_retry_call(self.stdout.read) self.stdout.close() elif self.stderr: stderr = _eintr_retry_call(self.stderr.read) self.stderr.close() self.wait() return (stdout, stderr) return self._communicate(input) def poll(self) -> int: return self._internal_poll() if mswindows: # # Windows methods # def _get_handles(self, stdin: Any, stdout: Any, stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]: """Construct and return tuple with IO objects: p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite """ if stdin is None and stdout is None and stderr is None: return (-1, -1, -1, -1, -1, -1) p2cread, p2cwrite = -1, -1 # type: (Any, Any) c2pread, c2pwrite = -1, -1 # type: (Any, Any) errread, errwrite = -1, -1 # type: (Any, Any) if stdin is None: p2cread = _subprocess.GetStdHandle(_subprocess.STD_INPUT_HANDLE) if p2cread is None: p2cread, _ = _subprocess.CreatePipe(None, 0) elif stdin == PIPE: p2cread, p2cwrite = _subprocess.CreatePipe(None, 0) elif isinstance(stdin, int): p2cread = msvcrt.get_osfhandle(stdin) else: # Assuming file-like object p2cread = msvcrt.get_osfhandle(stdin.fileno()) p2cread = self._make_inheritable(p2cread) if stdout is None: c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE) if c2pwrite is None: _, c2pwrite = _subprocess.CreatePipe(None, 0) elif stdout == PIPE: c2pread, c2pwrite = _subprocess.CreatePipe(None, 0) elif isinstance(stdout, int): c2pwrite = msvcrt.get_osfhandle(stdout) else: # Assuming file-like object c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) c2pwrite = self._make_inheritable(c2pwrite) if stderr is None: errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE) if errwrite is None: _, errwrite = _subprocess.CreatePipe(None, 0) elif stderr == PIPE: errread, errwrite = _subprocess.CreatePipe(None, 0) elif stderr == STDOUT: errwrite = c2pwrite elif isinstance(stderr, int): errwrite = msvcrt.get_osfhandle(stderr) else: # Assuming file-like object errwrite = msvcrt.get_osfhandle(stderr.fileno()) errwrite = self._make_inheritable(errwrite) return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) def _make_inheritable(self, handle: _subprocess.Handle) -> int: """Return a duplicate of handle, which is inheritable""" return _subprocess.DuplicateHandle(_subprocess.GetCurrentProcess(), handle, _subprocess.GetCurrentProcess(), 0, 1, _subprocess.DUPLICATE_SAME_ACCESS) def _find_w9xpopen(self) -> str: """Find and return absolut path to w9xpopen.exe""" w9xpopen = os.path.join( os.path.dirname(_subprocess.GetModuleFileName(0)), "w9xpopen.exe") if not os.path.exists(w9xpopen): # Eeek - file-not-found - possibly an embedding # situation - see if we can locate it in sys.exec_prefix w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe") if not os.path.exists(w9xpopen): raise RuntimeError("Cannot locate w9xpopen.exe, which is " "needed for Popen to work with your " "shell or platform.") return w9xpopen def _execute_child(self, args: Sequence[str], executable: str, preexec_fn: Callable[[], Any], close_fds: Any, pass_fds: Any, cwd: str, env: Mapping[str, str], universal_newlines: int, startupinfo: STARTUPINFO, creationflags: int, shell: int, p2cread: Any, p2cwrite: Any, c2pread: Any, c2pwrite: Any, errread: Any, errwrite: Any, restore_signals: bool, start_new_session: bool) -> None: """Execute program (MS Windows version)""" assert not pass_fds, "pass_fds not supported on Windows." if not isinstance(args, str): args = list2cmdline(args) # Process startup details if startupinfo is None: startupinfo = STARTUPINFO() if -1 not in (p2cread, c2pwrite, errwrite): startupinfo.dwFlags |= _subprocess.STARTF_USESTDHANDLES startupinfo.hStdInput = p2cread startupinfo.hStdOutput = c2pwrite startupinfo.hStdError = errwrite if shell: startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW startupinfo.wShowWindow = _subprocess.SW_HIDE comspec = os.environ.get("COMSPEC", "cmd.exe") args = '{} /c "{}"'.format (comspec, args) if (_subprocess.GetVersion() >= 0x80000000 or os.path.basename(comspec).lower() == "command.com"): # Win9x, or using command.com on NT. We need to # use the w9xpopen intermediate program. For more # information, see KB Q150956 # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) w9xpopen = self._find_w9xpopen() args = '"%s" %s' % (w9xpopen, args) # Not passing CREATE_NEW_CONSOLE has been known to # cause random failures on win9x. Specifically a # dialog: "Your program accessed mem currently in # use at xxx" and a hopeful warning about the # stability of your system. Cost is Ctrl+C won't # kill children. creationflags |= _subprocess.CREATE_NEW_CONSOLE # Start the process try: hp, ht, pid, tid = _subprocess.CreateProcess(executable, cast(str, args), # no special security None, None, int(not close_fds), creationflags, env, cwd, startupinfo) except pywintypes.error as e: # Translate pywintypes.error to WindowsError, which is # a subclass of OSError. FIXME: We should really # translate errno using _sys_errlist (or similar), but # how can this be done from Python? raise WindowsError(*e.args) finally: # Child is launched. Close the parent's copy of those pipe # handles that only the child should have open. You need # to make sure that no handles to the write end of the # output pipe are maintained in this process or else the # pipe will not close when the child process exits and the # ReadFile will hang. if p2cread != -1: p2cread.Close() if c2pwrite != -1: c2pwrite.Close() if errwrite != -1: errwrite.Close() # Retain the process handle, but close the thread handle self._child_created = True self._handle = hp self.pid = pid ht.Close() def _internal_poll(self, _deadstate: int = None) -> int: """Check if child process has terminated. Returns returncode attribute. This method is called by __del__, so it can only refer to objects in its local scope. """ return self._internal_poll_win(_deadstate) from _subprocess import Handle def _internal_poll_win(self, _deadstate: int = None, _WaitForSingleObject: Callable[[Handle, int], int] = _subprocess.WaitForSingleObject, _WAIT_OBJECT_0: int = _subprocess.WAIT_OBJECT_0, _GetExitCodeProcess: Callable[[Handle], int] = _subprocess.GetExitCodeProcess) -> int: if self.returncode is None: if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0: self.returncode = _GetExitCodeProcess(self._handle) return self.returncode def wait(self) -> int: """Wait for child process to terminate. Returns returncode attribute.""" if self.returncode is None: _subprocess.WaitForSingleObject(self._handle, _subprocess.INFINITE) self.returncode = _subprocess.GetExitCodeProcess(self._handle) return self.returncode def _readerthread(self, fh: IO[AnyStr], buffer: List[AnyStr]) -> None: buffer.append(fh.read()) fh.close() def _communicate(self, input: Any) -> Tuple[Any, Any]: stdout = cast(Any, None) # Return stderr = cast(Any, None) # Return if self.stdout: stdout = [] stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout)) stdout_thread.daemon = True stdout_thread.start() if self.stderr: stderr = [] stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr)) stderr_thread.daemon = True stderr_thread.start() if self.stdin: if input is not None: try: self.stdin.write(input) except IOError as e: if e.errno != errno.EPIPE: raise self.stdin.close() if self.stdout: stdout_thread.join() if self.stderr: stderr_thread.join() # All data exchanged. Translate lists into strings. if stdout is not None: stdout = stdout[0] if stderr is not None: stderr = stderr[0] self.wait() return (stdout, stderr) def send_signal(self, sig: int) -> None: """Send a signal to the process """ if sig == signal.SIGTERM: self.terminate() elif sig == signal.CTRL_C_EVENT: os.kill(self.pid, signal.CTRL_C_EVENT) elif sig == signal.CTRL_BREAK_EVENT: os.kill(self.pid, signal.CTRL_BREAK_EVENT) else: raise ValueError("Unsupported signal: {}".format(sig)) def terminate(self) -> None: """Terminates the process """ _subprocess.TerminateProcess(self._handle, 1) def kill(self) -> None: """Terminates the process """ self.terminate() else: # # POSIX methods # def _get_handles(self, stdin: Any, stdout: Any, stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]: """Construct and return tuple with IO objects: p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite """ p2cread, p2cwrite = -1, -1 c2pread, c2pwrite = -1, -1 errread, errwrite = -1, -1 if stdin is None: pass elif stdin == PIPE: p2cread, p2cwrite = _create_pipe() elif isinstance(stdin, int): p2cread = stdin else: # Assuming file-like object p2cread = stdin.fileno() if stdout is None: pass elif stdout == PIPE: c2pread, c2pwrite = _create_pipe() elif isinstance(stdout, int): c2pwrite = stdout else: # Assuming file-like object c2pwrite = stdout.fileno() if stderr is None: pass elif stderr == PIPE: errread, errwrite = _create_pipe() elif stderr == STDOUT: errwrite = c2pwrite elif isinstance(stderr, int): errwrite = stderr else: # Assuming file-like object errwrite = stderr.fileno() return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) def _close_fds(self, fds_to_keep: Set[int]) -> None: start_fd = 3 for fd in sorted(fds_to_keep): if fd >= start_fd: os.closerange(start_fd, fd) start_fd = fd + 1 if start_fd <= MAXFD: os.closerange(start_fd, MAXFD) def _execute_child(self, args: Sequence[str], executable: str, preexec_fn: Callable[[], Any], close_fds: Any, pass_fds: Any, cwd: str, env: Mapping[str, str], universal_newlines: int, startupinfo: 'STARTUPINFO', creationflags: int, shell: int, p2cread: Any, p2cwrite: Any, c2pread: Any, c2pwrite: Any, errread: Any, errwrite: Any, restore_signals: bool, start_new_session: bool) -> None: """Execute program (POSIX version)""" if isinstance(args, str): args = [args] else: args = list(args) if shell: args = ["/bin/sh", "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = _create_pipe() try: try: if have_posixsubprocess: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [os.fsencode(k) + b'=' + os.fsencode(v) for k, v in env.items()] else: env_list = None # Use execv instead of execve. executable_enc = os.fsencode(executable) if os.path.dirname(executable_enc): executable_list = (executable_enc,) # type: tuple else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable_enc) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, sorted(fds_to_keep), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, preexec_fn) self._child_created = True else: # Pure Python implementation: It is not thread safe. # This implementation may deadlock in the child if your # parent process has any other threads running. gc_was_enabled = gc.isenabled() # Disable gc to avoid bug where gc -> file_dealloc -> # write to stderr -> hang. See issue1336 gc.disable() try: self.pid = os.fork() except: if gc_was_enabled: gc.enable() raise self._child_created = True if self.pid == 0: # Child try: # Close parent's pipe ends if p2cwrite != -1: os.close(p2cwrite) if c2pread != -1: os.close(c2pread) if errread != -1: os.close(errread) os.close(errpipe_read) # When duping fds, if there arises a situation # where one of the fds is either 0, 1 or 2, it # is possible that it is overwritten (#12607). if c2pwrite == 0: c2pwrite = os.dup(c2pwrite) if errwrite == 0 or errwrite == 1: errwrite = os.dup(errwrite) # Dup fds for child def _dup2(a: int, b: int) -> None: # dup2() removes the CLOEXEC flag but # we must do it ourselves if dup2() # would be a no-op (issue #10806). if a == b: _set_cloexec(a, False) elif a != -1: os.dup2(a, b) _dup2(p2cread, 0) _dup2(c2pwrite, 1) _dup2(errwrite, 2) # Close pipe fds. Make sure we don't close the # same fd more than once, or standard fds. closed = set() # type: Set[int] for fd in [p2cread, c2pwrite, errwrite]: if fd > 2 and fd not in closed: os.close(fd) closed.add(fd) # Close all other fds, if asked for if close_fds: fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self._close_fds(fds_to_keep) if cwd is not None: os.chdir(cwd) # This is a copy of Python/pythonrun.c # _Py_RestoreSignals(). If that were exposed # as a sys._py_restoresignals func it would be # better.. but this pure python implementation # isn't likely to be used much anymore. if restore_signals: signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ') for sig in signals: if hasattr(signal, sig): signal.signal(getattr(signal, sig), signal.SIG_DFL) if start_new_session and hasattr(os, 'setsid'): os.setsid() if preexec_fn: preexec_fn() if env is None: os.execvp(executable, args) else: os.execvpe(executable, args, env) except: try: exc_type, exc_value = sys.exc_info()[:2] if isinstance(exc_value, OSError): errno_num = exc_value.errno else: errno_num = 0 message = '%s:%x:%s' % (exc_type.__name__, errno_num, exc_value) messageb = message.encode(errors="surrogatepass") os.write(errpipe_write, messageb) except Exception: # We MUST not allow anything odd happening # above to prevent us from exiting below. pass # This exitcode won't be reported to applications # so it really doesn't matter what we return. os._exit(255) # Parent if gc_was_enabled: gc.enable() finally: # be sure the FD is closed no matter what os.close(errpipe_write) if p2cread != -1 and p2cwrite != -1: os.close(p2cread) if c2pwrite != -1 and c2pread != -1: os.close(c2pwrite) if errwrite != -1 and errread != -1: os.close(errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) data = bytearray() while True: part = _eintr_retry_call(os.read, errpipe_read, 50000) data += part if not part or len(data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if data: try: _eintr_retry_call(os.waitpid, self.pid, 0) except OSError as e: if e.errno != errno.ECHILD: raise try: (exception_name, hex_errno, err_msg_b) = bytes(data).split(b':', 2) except ValueError: print('Bad exception data:', repr(data)) exception_name = b'RuntimeError' hex_errno = b'0' err_msg_b = b'Unknown' child_exception_type = getattr( builtins, exception_name.decode('ascii'), RuntimeError) for fd in (p2cwrite, c2pread, errread): if fd != -1: os.close(fd) err_msg = err_msg_b.decode(errors="surrogatepass") if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) if errno_num != 0: err_msg = os.strerror(errno_num) if errno_num == errno.ENOENT: err_msg += ': ' + repr(args[0]) raise child_exception_type(errno_num, err_msg) raise child_exception_type(err_msg) def _handle_exitstatus( self, sts: int, _WIFSIGNALED: Callable[[int], bool] = os.WIFSIGNALED, _WTERMSIG: Callable[[int], int] = os.WTERMSIG, _WIFEXITED: Callable[[int], bool] = os.WIFEXITED, _WEXITSTATUS: Callable[[int], int] = os.WEXITSTATUS) -> None: # This method is called (indirectly) by __del__, so it cannot # refer to anything outside of its local scope.""" if _WIFSIGNALED(sts): self.returncode = -_WTERMSIG(sts) elif _WIFEXITED(sts): self.returncode = _WEXITSTATUS(sts) else: # Should never happen raise RuntimeError("Unknown child exit status!") def _internal_poll(self, _deadstate: int = None) -> int: """Check if child process has terminated. Returns returncode attribute. This method is called by __del__, so it cannot reference anything outside of the local scope (nor can any methods it calls). """ return self._internal_poll_posix(_deadstate) def _internal_poll_posix(self, _deadstate: int = None, _waitpid: Callable[[int, int], Tuple[int, int]] = os.waitpid, _WNOHANG: int = os.WNOHANG, _os_error: Any = os.error) -> int: if self.returncode is None: try: pid, sts = _waitpid(self.pid, _WNOHANG) if pid == self.pid: self._handle_exitstatus(sts) except _os_error: if _deadstate is not None: self.returncode = _deadstate return self.returncode def wait(self) -> int: """Wait for child process to terminate. Returns returncode attribute.""" if self.returncode is None: try: pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0) except OSError as e: if e.errno != errno.ECHILD: raise # This happens if SIGCLD is set to be ignored or waiting # for child processes has otherwise been disabled for our # process. This child is dead, we can't get the status. sts = 0 self._handle_exitstatus(sts) return self.returncode def _communicate(self, input: Any) -> Tuple[Any, Any]: if self.stdin: # Flush stdio buffer. This might block, if the user has # been writing to .stdin in an uncontrolled fashion. self.stdin.flush() if not input: self.stdin.close() if _has_poll: stdout, stderr = self._communicate_with_poll(input) else: stdout, stderr = self._communicate_with_select(input) # All data exchanged. Translate lists into strings. if stdout is not None: stdout2 = b''.join(stdout) else: stdout2 = None if stderr is not None: stderr2 = b''.join(stderr) else: stderr2 = None # Translate newlines, if requested. # This also turns bytes into strings. stdout3 = cast(Any, stdout2) stderr3 = cast(Any, stderr2) if self.universal_newlines: if stdout is not None: stdout3 = self._translate_newlines( stdout2, cast(TextIO, self.stdout).encoding) if stderr is not None: stderr3 = self._translate_newlines( stderr2, cast(TextIO, self.stderr).encoding) self.wait() return (stdout3, stderr3) def _communicate_with_poll(self, input: Any) -> Tuple[List[bytes], List[bytes]]: stdout = None # type: List[bytes] # Return stderr = None # type: List[bytes] # Return fd2file = {} # type: Dict[int, Any] fd2output = {} # type: Dict[int, List[bytes]] poller = select.poll() def register_and_append(file_obj: IO[Any], eventmask: int) -> None: poller.register(file_obj.fileno(), eventmask) fd2file[file_obj.fileno()] = file_obj def close_unregister_and_remove(fd: int) -> None: poller.unregister(fd) fd2file[fd].close() fd2file.pop(fd) if self.stdin and input: register_and_append(self.stdin, select.POLLOUT) select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI if self.stdout: register_and_append(self.stdout, select_POLLIN_POLLPRI) fd2output[self.stdout.fileno()] = stdout = [] if self.stderr: register_and_append(self.stderr, select_POLLIN_POLLPRI) fd2output[self.stderr.fileno()] = stderr = [] input_offset = 0 while fd2file: try: ready = poller.poll() except select.error as e: if e.args[0] == errno.EINTR: continue raise # XXX Rewrite these to use non-blocking I/O on the # file objects; they are no longer using C stdio! for fd, mode in ready: if mode & select.POLLOUT: chunk = input[input_offset : input_offset + _PIPE_BUF] try: input_offset += os.write(fd, chunk) except OSError as e2: if e2.errno == errno.EPIPE: close_unregister_and_remove(fd) else: raise else: if input_offset >= len(input): close_unregister_and_remove(fd) elif mode & select_POLLIN_POLLPRI: data = os.read(fd, 4096) if not data: close_unregister_and_remove(fd) fd2output[fd].append(data) else: # Ignore hang up or errors. close_unregister_and_remove(fd) return (stdout, stderr) def _communicate_with_select(self, input: Any) -> Tuple[List[bytes], List[bytes]]: read_set = [] # type: List[IO[Any]] write_set = [] # type: List[IO[Any]] stdout = None # type: List[bytes] # Return stderr = None # type: List[bytes] # Return if self.stdin and input: write_set.append(self.stdin) if self.stdout: read_set.append(self.stdout) stdout = [] if self.stderr: read_set.append(self.stderr) stderr = [] input_offset = 0 while read_set or write_set: try: rlist, wlist, xlist = select.select(read_set, write_set, []) except select.error as e: if e.args[0] == errno.EINTR: continue raise # XXX Rewrite these to use non-blocking I/O on the # file objects; they are no longer using C stdio! if self.stdin in wlist: chunk = input[input_offset : input_offset + _PIPE_BUF] try: bytes_written = os.write(self.stdin.fileno(), chunk) except OSError as oe: if oe.errno == errno.EPIPE: self.stdin.close() write_set.remove(self.stdin) else: raise else: input_offset += bytes_written if input_offset >= len(input): self.stdin.close() write_set.remove(self.stdin) if self.stdout in rlist: data = os.read(self.stdout.fileno(), 1024) if not data: self.stdout.close() read_set.remove(self.stdout) stdout.append(data) if self.stderr in rlist: data = os.read(self.stderr.fileno(), 1024) if not data: self.stderr.close() read_set.remove(self.stderr) stderr.append(data) return (stdout, stderr) def send_signal(self, sig: int) -> None: """Send a signal to the process """ os.kill(self.pid, sig) def terminate(self) -> None: """Terminate the process with SIGTERM """ self.send_signal(signal.SIGTERM) def kill(self) -> None: """Kill the process with SIGKILL """ self.send_signal(signal.SIGKILL) def _demo_posix() -> None: # # Example 1: Simple redirection: Get process list # plist = Popen(["ps"], stdout=PIPE).communicate()[0] print("Process list:") print(plist) # # Example 2: Change uid before executing child # if os.getuid() == 0: p = Popen(["id"], preexec_fn=lambda: os.setuid(100)) p.wait() # # Example 3: Connecting several subprocesses # print("Looking for 'hda'...") p1 = Popen(["dmesg"], stdout=PIPE) p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) print(repr(p2.communicate()[0])) # # Example 4: Catch execution error # print() print("Trying a weird file...") try: print(Popen(["/this/path/does/not/exist"]).communicate()) except OSError as e: if e.errno == errno.ENOENT: print("The file didn't exist. I thought so...") else: print("Error", e.errno) else: print("Gosh. No error.", file=sys.stderr) def _demo_windows() -> None: # # Example 1: Connecting several subprocesses # print("Looking for 'PROMPT' in set output...") p1 = Popen("set", stdout=PIPE, shell=True) p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) print(repr(p2.communicate()[0])) # # Example 2: Simple execution of program # print("Executing calc...") p = Popen("calc") p.wait() if __name__ == "__main__": if mswindows: _demo_windows() else: _demo_posix() mypy-0.761/test-data/stdlib-samples/3.2/tempfile.py0000644€tŠÔÚ€2›s®0000005652113576752246026223 0ustar jukkaDROPBOX\Domain Users00000000000000"""Temporary files. This module provides generic, low- and high-level interfaces for creating temporary files and directories. The interfaces listed as "safe" just below can be used without fear of race conditions. Those listed as "unsafe" cannot, and are provided for backward compatibility only. This module also provides some data items to the user: TMP_MAX - maximum number of names that will be tried before giving up. template - the default prefix for all temporary names. You may change this to control the default prefix. tempdir - If this is set to a string before the first use of any routine from this module, it will be considered as another candidate location to store temporary files. """ __all__ = [ "NamedTemporaryFile", "TemporaryFile", # high level safe interfaces "SpooledTemporaryFile", "TemporaryDirectory", "mkstemp", "mkdtemp", # low level safe interfaces "mktemp", # deprecated unsafe interface "TMP_MAX", "gettempprefix", # constants "tempdir", "gettempdir" ] # Imports. import warnings as _warnings import sys as _sys import io as _io import os as _os import errno as _errno from random import Random as _Random from typing import ( Any as _Any, Callable as _Callable, Iterator as _Iterator, List as _List, Tuple as _Tuple, Dict as _Dict, Iterable as _Iterable, IO as _IO, cast as _cast, Optional as _Optional, Type as _Type, ) from typing_extensions import Literal from types import TracebackType as _TracebackType try: import fcntl as _fcntl except ImportError: def _set_cloexec(fd: int) -> None: pass else: def _set_cloexec(fd: int) -> None: try: flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0) except IOError: pass else: # flags read successfully, modify flags |= _fcntl.FD_CLOEXEC _fcntl.fcntl(fd, _fcntl.F_SETFD, flags) try: import _thread _allocate_lock = _thread.allocate_lock # type: _Callable[[], _Any] except ImportError: import _dummy_thread _allocate_lock = _dummy_thread.allocate_lock _text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL if hasattr(_os, 'O_NOINHERIT'): _text_openflags |= _os.O_NOINHERIT if hasattr(_os, 'O_NOFOLLOW'): _text_openflags |= _os.O_NOFOLLOW _bin_openflags = _text_openflags if hasattr(_os, 'O_BINARY'): _bin_openflags |= _os.O_BINARY if hasattr(_os, 'TMP_MAX'): TMP_MAX = _os.TMP_MAX else: TMP_MAX = 10000 template = "tmp" # Internal routines. _once_lock = _allocate_lock() if hasattr(_os, "lstat"): _stat = _os.lstat # type: _Callable[[str], object] elif hasattr(_os, "stat"): _stat = _os.stat else: # Fallback. All we need is something that raises os.error if the # file doesn't exist. def __stat(fn: str) -> object: try: f = open(fn) except IOError: raise _os.error() f.close() return None _stat = __stat def _exists(fn: str) -> bool: try: _stat(fn) except _os.error: return False else: return True class _RandomNameSequence(_Iterator[str]): """An instance of _RandomNameSequence generates an endless sequence of unpredictable strings which can safely be incorporated into file names. Each string is six characters long. Multiple threads can safely use the same instance at the same time. _RandomNameSequence is an iterator.""" characters = "abcdefghijklmnopqrstuvwxyz0123456789_" @property def rng(self) -> _Random: cur_pid = _os.getpid() if cur_pid != getattr(self, '_rng_pid', None): self._rng = _Random() self._rng_pid = cur_pid return self._rng def __iter__(self) -> _Iterator[str]: return self def __next__(self) -> str: c = self.characters choose = self.rng.choice letters = [choose(c) for dummy in "123456"] return ''.join(letters) def _candidate_tempdir_list() -> _List[str]: """Generate a list of candidate temporary directories which _get_default_tempdir will try.""" dirlist = [] # type: _List[str] # First, try the environment. for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = _os.getenv(envname) if dirname: dirlist.append(dirname) # Failing that, try OS-specific locations. if _os.name == 'nt': dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ]) else: dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ]) # As a last resort, the current directory. try: dirlist.append(_os.getcwd()) except (AttributeError, _os.error): dirlist.append(_os.curdir) return dirlist def _get_default_tempdir() -> str: """Calculate the default directory to use for temporary files. This routine should be called exactly once. We determine whether or not a candidate temp dir is usable by trying to create and write to a file in that directory. If this is successful, the test file is deleted. To prevent denial of service, the name of the test file must be randomized.""" namer = _RandomNameSequence() dirlist = _candidate_tempdir_list() for dir in dirlist: if dir != _os.curdir: dir = _os.path.normcase(_os.path.abspath(dir)) # Try only a few names per directory. for seq in range(100): name = next(namer) filename = _os.path.join(dir, name) try: fd = _os.open(filename, _bin_openflags, 0o600) fp = _io.open(fd, 'wb') fp.write(b'blat') fp.close() _os.unlink(filename) fp = fd = None return dir except (OSError, IOError) as e: if e.args[0] != _errno.EEXIST: break # no point trying more names in this directory pass raise IOError(_errno.ENOENT, "No usable temporary directory found in %s" % dirlist) _name_sequence = None # type: _RandomNameSequence def _get_candidate_names() -> _RandomNameSequence: """Common setup sequence for all user-callable interfaces.""" global _name_sequence if _name_sequence is None: _once_lock.acquire() try: if _name_sequence is None: _name_sequence = _RandomNameSequence() finally: _once_lock.release() return _name_sequence def _mkstemp_inner(dir: str, pre: str, suf: str, flags: int) -> _Tuple[int, str]: """Code common to mkstemp, TemporaryFile, and NamedTemporaryFile.""" names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, pre + name + suf) try: fd = _os.open(file, flags, 0o600) _set_cloexec(fd) return (fd, _os.path.abspath(file)) except OSError as e: if e.errno == _errno.EEXIST: continue # try again raise raise IOError(_errno.EEXIST, "No usable temporary file name found") # User visible interfaces. def gettempprefix() -> str: """Accessor for tempdir.template.""" return template tempdir = None # type: str def gettempdir() -> str: """Accessor for tempfile.tempdir.""" global tempdir if tempdir is None: _once_lock.acquire() try: if tempdir is None: tempdir = _get_default_tempdir() finally: _once_lock.release() return tempdir def mkstemp(suffix: str = "", prefix: str = template, dir: str = None, text: bool = False) -> _Tuple[int, str]: """User-callable function to create and return a unique temporary file. The return value is a pair (fd, name) where fd is the file descriptor returned by os.open, and name is the filename. If 'suffix' is specified, the file name will end with that suffix, otherwise there will be no suffix. If 'prefix' is specified, the file name will begin with that prefix, otherwise a default prefix is used. If 'dir' is specified, the file will be created in that directory, otherwise a default directory is used. If 'text' is specified and true, the file is opened in text mode. Else (the default) the file is opened in binary mode. On some operating systems, this makes no difference. The file is readable and writable only by the creating user ID. If the operating system uses permission bits to indicate whether a file is executable, the file is executable by no one. The file descriptor is not inherited by children of this process. Caller is responsible for deleting the file when done with it. """ if dir is None: dir = gettempdir() if text: flags = _text_openflags else: flags = _bin_openflags return _mkstemp_inner(dir, prefix, suffix, flags) def mkdtemp(suffix: str = "", prefix: str = template, dir: str = None) -> str: """User-callable function to create and return a unique temporary directory. The return value is the pathname of the directory. Arguments are as for mkstemp, except that the 'text' argument is not accepted. The directory is readable, writable, and searchable only by the creating user. Caller is responsible for deleting the directory when done with it. """ if dir is None: dir = gettempdir() names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, prefix + name + suffix) try: _os.mkdir(file, 0o700) return file except OSError as e: if e.errno == _errno.EEXIST: continue # try again raise raise IOError(_errno.EEXIST, "No usable temporary directory name found") def mktemp(suffix: str = "", prefix: str = template, dir: str = None) -> str: """User-callable function to return a unique temporary file name. The file is not created. Arguments are as for mkstemp, except that the 'text' argument is not accepted. This function is unsafe and should not be used. The file name refers to a file that did not exist at some point, but by the time you get around to creating it, someone else may have beaten you to the punch. """ ## from warnings import warn as _warn ## _warn("mktemp is a potential security risk to your program", ## RuntimeWarning, stacklevel=2) if dir is None: dir = gettempdir() names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, prefix + name + suffix) if not _exists(file): return file raise IOError(_errno.EEXIST, "No usable temporary filename found") class _TemporaryFileWrapper: """Temporary file wrapper This class provides a wrapper around files opened for temporary use. In particular, it seeks to automatically remove the file when it is no longer needed. """ def __init__(self, file: _IO[_Any], name: str, delete: bool = True) -> None: self.file = file self.name = name self.close_called = False self.delete = delete if _os.name != 'nt': # Cache the unlinker so we don't get spurious errors at # shutdown when the module-level "os" is None'd out. Note # that this must be referenced as self.unlink, because the # name TemporaryFileWrapper may also get None'd out before # __del__ is called. self.unlink = _os.unlink def __getattr__(self, name: str) -> _Any: # Attribute lookups are delegated to the underlying file # and cached for non-numeric results # (i.e. methods are cached, closed and friends are not) file = _cast(_Any, self).__dict__['file'] # type: _IO[_Any] a = getattr(file, name) if not isinstance(a, int): setattr(self, name, a) return a # The underlying __enter__ method returns the wrong object # (self.file) so override it to return the wrapper def __enter__(self) -> '_TemporaryFileWrapper': self.file.__enter__() return self # iter() doesn't use __getattr__ to find the __iter__ method def __iter__(self) -> _Iterator[_Any]: return iter(self.file) # NT provides delete-on-close as a primitive, so we don't need # the wrapper to do anything special. We still use it so that # file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile. if _os.name != 'nt': def close(self) -> None: if not self.close_called: self.close_called = True self.file.close() if self.delete: self.unlink(self.name) def __del__(self) -> None: self.close() # Need to trap __exit__ as well to ensure the file gets # deleted when used in a with statement def __exit__(self, exc: _Type[BaseException], value: BaseException, tb: _Optional[_TracebackType]) -> bool: result = self.file.__exit__(exc, value, tb) self.close() return result else: def __exit__(self, # type: ignore[misc] exc: _Type[BaseException], value: BaseException, tb: _Optional[_TracebackType]) -> Literal[False]: self.file.__exit__(exc, value, tb) return False def NamedTemporaryFile(mode: str = 'w+b', buffering: int = -1, encoding: str = None, newline: str = None, suffix: str = "", prefix: str = template, dir: str = None, delete: bool = True) -> _IO[_Any]: """Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to io.open (default "w+b"). 'buffering' -- the buffer size argument to io.open (default -1). 'encoding' -- the encoding argument to io.open (default None) 'newline' -- the newline argument to io.open (default None) 'delete' -- whether the file is deleted on close (default True). The file is created as mkstemp() would do it. Returns an object with a file-like interface; the name of the file is accessible as file.name. The file will be automatically deleted when it is closed unless the 'delete' argument is set to False. """ if dir is None: dir = gettempdir() flags = _bin_openflags # Setting O_TEMPORARY in the flags causes the OS to delete # the file when it is closed. This is only supported by Windows. if _os.name == 'nt' and delete: flags |= _os.O_TEMPORARY (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags) file = _io.open(fd, mode, buffering=buffering, newline=newline, encoding=encoding) return _cast(_IO[_Any], _TemporaryFileWrapper(file, name, delete)) if _os.name != 'posix' or _sys.platform == 'cygwin': # On non-POSIX and Cygwin systems, assume that we cannot unlink a file # while it is open. TemporaryFile = NamedTemporaryFile else: def _TemporaryFile(mode: str = 'w+b', buffering: int = -1, encoding: str = None, newline: str = None, suffix: str = "", prefix: str = template, dir: str = None, delete: bool = True) -> _IO[_Any]: """Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to io.open (default "w+b"). 'buffering' -- the buffer size argument to io.open (default -1). 'encoding' -- the encoding argument to io.open (default None) 'newline' -- the newline argument to io.open (default None) The file is created as mkstemp() would do it. Returns an object with a file-like interface. The file has no name, and will cease to exist when it is closed. """ if dir is None: dir = gettempdir() flags = _bin_openflags (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags) try: _os.unlink(name) return _io.open(fd, mode, buffering=buffering, newline=newline, encoding=encoding) except: _os.close(fd) raise TemporaryFile = _TemporaryFile class SpooledTemporaryFile: """Temporary file wrapper, specialized to switch from StringIO to a real file when it exceeds a certain size or when a fileno is needed. """ _rolled = False _file = None # type: _Any # BytesIO, StringIO or TemporaryFile def __init__(self, max_size: int = 0, mode: str = 'w+b', buffering: int = -1, encoding: str = None, newline: str = None, suffix: str = "", prefix: str = template, dir: str = None) -> None: if 'b' in mode: self._file = _io.BytesIO() else: # Setting newline="\n" avoids newline translation; # this is important because otherwise on Windows we'd # hget double newline translation upon rollover(). self._file = _io.StringIO(newline="\n") self._max_size = max_size self._rolled = False self._TemporaryFileArgs = { 'mode': mode, 'buffering': buffering, 'suffix': suffix, 'prefix': prefix, 'encoding': encoding, 'newline': newline, 'dir': dir} # type: _Dict[str, _Any] def _check(self, file: _IO[_Any]) -> None: if self._rolled: return max_size = self._max_size if max_size and file.tell() > max_size: self.rollover() def rollover(self) -> None: if self._rolled: return file = self._file newfile = self._file = TemporaryFile(**self._TemporaryFileArgs) self._TemporaryFileArgs = None newfile.write(file.getvalue()) newfile.seek(file.tell(), 0) self._rolled = True # The method caching trick from NamedTemporaryFile # won't work here, because _file may change from a # _StringIO instance to a real file. So we list # all the methods directly. # Context management protocol def __enter__(self) -> 'SpooledTemporaryFile': if self._file.closed: raise ValueError("Cannot enter context with closed file") return self def __exit__(self, exc: type, value: BaseException, tb: _TracebackType) -> Literal[False]: self._file.close() return False # file protocol def __iter__(self) -> _Iterable[_Any]: return self._file.__iter__() def close(self) -> None: self._file.close() @property def closed(self) -> bool: return self._file.closed @property def encoding(self) -> str: return self._file.encoding def fileno(self) -> int: self.rollover() return self._file.fileno() def flush(self) -> None: self._file.flush() def isatty(self) -> bool: return self._file.isatty() @property def mode(self) -> str: return self._file.mode @property def name(self) -> str: return self._file.name @property def newlines(self) -> _Any: return self._file.newlines #def next(self): # return self._file.next def read(self, n: int = -1) -> _Any: return self._file.read(n) def readline(self, limit: int = -1) -> _Any: return self._file.readline(limit) def readlines(self, *args) -> _List[_Any]: return self._file.readlines(*args) def seek(self, offset: int, whence: int = 0) -> None: self._file.seek(offset, whence) @property def softspace(self) -> bool: return self._file.softspace def tell(self) -> int: return self._file.tell() def truncate(self) -> None: self._file.truncate() def write(self, s: _Any) -> int: file = self._file # type: _IO[_Any] rv = file.write(s) self._check(file) return rv def writelines(self, iterable: _Iterable[_Any]) -> None: file = self._file # type: _IO[_Any] file.writelines(iterable) self._check(file) #def xreadlines(self, *args) -> _Any: # return self._file.xreadlines(*args) class TemporaryDirectory(object): """Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. For example: with TemporaryDirectory() as tmpdir: ... Upon exiting the context, the directory and everthing contained in it are removed. """ def __init__(self, suffix: str = "", prefix: str = template, dir: str = None) -> None: self._closed = False self.name = None # type: str # Handle mkdtemp throwing an exception self.name = mkdtemp(suffix, prefix, dir) # XXX (ncoghlan): The following code attempts to make # this class tolerant of the module nulling out process # that happens during CPython interpreter shutdown # Alas, it doesn't actually manage it. See issue #10188 self._listdir = _os.listdir self._path_join = _os.path.join self._isdir = _os.path.isdir self._islink = _os.path.islink self._remove = _os.remove self._rmdir = _os.rmdir self._os_error = _os.error self._warn = _warnings.warn def __repr__(self) -> str: return "<{} {!r}>".format(self.__class__.__name__, self.name) def __enter__(self) -> str: return self.name def cleanup(self, _warn: bool = False) -> None: if self.name and not self._closed: try: self._rmtree(self.name) except (TypeError, AttributeError) as ex: # Issue #10188: Emit a warning on stderr # if the directory could not be cleaned # up due to missing globals if "None" not in str(ex): raise print("ERROR: {!r} while cleaning up {!r}".format(ex, self,), file=_sys.stderr) return self._closed = True if _warn: self._warn("Implicitly cleaning up {!r}".format(self), ResourceWarning) def __exit__(self, exc: type, value: BaseException, tb: _TracebackType) -> Literal[False]: self.cleanup() return False def __del__(self) -> None: # Issue a ResourceWarning if implicit cleanup needed self.cleanup(_warn=True) def _rmtree(self, path: str) -> None: # Essentially a stripped down version of shutil.rmtree. We can't # use globals because they may be None'ed out at shutdown. for name in self._listdir(path): fullname = self._path_join(path, name) try: isdir = self._isdir(fullname) and not self._islink(fullname) except self._os_error: isdir = False if isdir: self._rmtree(fullname) else: try: self._remove(fullname) except self._os_error: pass try: self._rmdir(path) except self._os_error: pass mypy-0.761/test-data/stdlib-samples/3.2/test/0000755€tŠÔÚ€2›s®0000000000013576752267025015 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/stdlib-samples/3.2/test/__init__.py0000644€tŠÔÚ€2›s®0000000000013576752246027111 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/stdlib-samples/3.2/test/mypy.ini0000644€tŠÔÚ€2›s®0000000002613576752246026507 0ustar jukkaDROPBOX\Domain Users00000000000000[mypy] mypy_path = .. mypy-0.761/test-data/stdlib-samples/3.2/test/randv2_32.pck0000644€tŠÔÚ€2›s®0000001653513576752246027223 0ustar jukkaDROPBOX\Domain Users00000000000000crandom Random p0 (tRp1 (I2 (I-2147483648 I-845974985 I-1294090086 I1193659239 I-1849481736 I-946579732 I-34406770 I1749049471 I1997774682 I1432026457 I1288127073 I-943175655 I-1718073964 I339993548 I-1045260575 I582505037 I-1555108250 I-1114765620 I1578648750 I-350384412 I-20845848 I-288255314 I738790953 I1901249641 I1999324672 I-277361068 I-1515885839 I2061761596 I-809068089 I1287981136 I258129492 I-6303745 I-765148337 I1090344911 I1653434703 I-1242923628 I1639171313 I-1870042660 I-1655014050 I345609048 I2093410138 I1963263374 I-2122098342 I1336859961 I-810942729 I945857753 I2103049942 I623922684 I1418349549 I690877342 I754973107 I-1605111847 I1607137813 I-1704917131 I1317536428 I1714882872 I-1665385120 I1823694397 I-1790836866 I-1696724812 I-603979847 I-498599394 I-341265291 I927388804 I1778562135 I1716895781 I1023198122 I1726145967 I941955525 I1240148950 I-1929634545 I-1288147083 I-519318335 I754559777 I-707571958 I374604022 I420424061 I-1095443486 I1621934944 I-1220502522 I-140049608 I-918917122 I304341024 I-1637446057 I-353934485 I1973436235 I433380241 I-686759465 I-2111563154 I-573422032 I804304541 I1513063483 I1417381689 I-804778729 I211756408 I544537322 I890881641 I150378374 I1765739392 I1011604116 I584889095 I1400520554 I413747808 I-1741992587 I-1882421574 I-1373001903 I-1885348538 I903819480 I1083220038 I-1318105424 I1740421404 I1693089625 I775965557 I1319608037 I-2127475785 I-367562895 I-1416273451 I1693000327 I-1217438421 I834405522 I-128287275 I864057548 I-973917356 I7304111 I1712253182 I1353897741 I672982288 I1778575559 I-403058377 I-38540378 I-1393713496 I13193171 I1127196200 I205176472 I-2104790506 I299985416 I1403541685 I-1018270667 I-1980677490 I-1182625797 I1637015181 I-1795357414 I1514413405 I-924516237 I-1841873650 I-1014591269 I1576616065 I-1319103135 I-120847840 I2062259778 I-9285070 I1160890300 I-575137313 I-1509108275 I46701926 I-287560914 I-256824960 I577558250 I900598310 I944607867 I2121154920 I-1170505192 I-1347170575 I77247778 I-1899015765 I1234103327 I1027053658 I1934632322 I-792031234 I1147322536 I1290655117 I1002059715 I1325898538 I896029793 I-790940694 I-980470721 I-1922648255 I-951672814 I291543943 I1158740218 I-1959023736 I-1977185236 I1527900076 I514104195 I-814154113 I-593157883 I-1023704660 I1285688377 I-2117525386 I768954360 I-38676846 I-799848659 I-1305517259 I-1938213641 I-462146758 I-1663302892 I1899591069 I-22935388 I-275856976 I-443736893 I-739441156 I93862068 I-838105669 I1735629845 I-817484206 I280814555 I1753547179 I1811123479 I1974543632 I-48447465 I-642694345 I-531149613 I518698953 I-221642627 I-686519187 I776644303 I257774400 I-1499134857 I-1055273455 I-237023943 I1981752330 I-917671662 I-372905983 I1588058420 I1171936660 I-1730977121 I1360028989 I1769469287 I1910709542 I-852692959 I1396944667 I-1723999155 I-310975435 I-1965453954 I-1636858570 I2005650794 I680293715 I1355629386 I844514684 I-1909152807 I-808646074 I1936510018 I1134413810 I-143411047 I-1478436304 I1394969244 I-1170110660 I1963112086 I-1518351049 I-1506287443 I-455023090 I-855366028 I-1746785568 I933990882 I-703625141 I-285036872 I188277905 I1471578620 I-981382835 I-586974220 I945619758 I1608778444 I-1708548066 I-1897629320 I-42617810 I-836840790 I539154487 I-235706962 I332074418 I-575700589 I1534608003 I632116560 I-1819760653 I642052958 I-722391771 I-1104719475 I-1196847084 I582413973 I1563394876 I642007944 I108989456 I361625014 I677308625 I-1806529496 I-959050708 I-1858251070 I-216069832 I701624579 I501238033 I12287030 I1895107107 I2089098638 I-874806230 I1236279203 I563718890 I-544352489 I-1879707498 I1767583393 I-1776604656 I-693294301 I-88882831 I169303357 I1299196152 I-1122791089 I-379157172 I1934671851 I1575736961 I-19573174 I-1401511009 I9305167 I-1115174467 I1670735537 I1226436501 I-2004524535 I1767463878 I-1722855079 I-559413926 I1529810851 I1201272087 I-1297130971 I-1188149982 I1396557188 I-370358342 I-1006619702 I1600942463 I906087130 I-76991909 I2069580179 I-1674195181 I-2098404729 I-940972459 I-573399187 I-1930386277 I-721311199 I-647834744 I1452181671 I688681916 I1812793731 I1704380620 I-1389615179 I866287837 I-1435265007 I388400782 I-147986600 I-1613598851 I-1040347408 I782063323 I-239282031 I-575966722 I-1865208174 I-481365146 I579572803 I-1239481494 I335361280 I-429722947 I1881772789 I1908103808 I1653690013 I-1668588344 I1933787953 I-2033480609 I22162797 I-1516527040 I-461232482 I-16201372 I-2043092030 I114990337 I-1524090084 I1456374020 I458606440 I-1928083218 I227773125 I-1129028159 I1678689 I1575896907 I-1792935220 I-151387575 I64084088 I-95737215 I1337335688 I-1963466345 I1243315130 I-1798518411 I-546013212 I-607065396 I1219824160 I1715218469 I-1368163783 I1701552913 I-381114888 I1068821717 I266062971 I-2066513172 I1767407229 I-780936414 I-705413443 I-1256268847 I1646874149 I1107690353 I839133072 I67001749 I860763503 I884880613 I91977084 I755371933 I420745153 I-578480690 I-1520193551 I1011369331 I-99754575 I-733141064 I-500598588 I1081124271 I-1341266575 I921002612 I-848852487 I-1904467341 I-1294256973 I-94074714 I-1778758498 I-1401188547 I2101830578 I2058864877 I-272875991 I-1375854779 I-1332937870 I619425525 I-1034529639 I-36454393 I-2030499985 I-1637127500 I-1408110287 I-2108625749 I-961007436 I1475654951 I-791946251 I1667792115 I1818978830 I1897980514 I1959546477 I-74478911 I-508643347 I461594399 I538802715 I-2094970071 I-2076660253 I1091358944 I1944029246 I-343957436 I-1915845022 I1237620188 I1144125174 I1522190520 I-670252952 I-19469226 I675626510 I758750096 I909724354 I-1846259652 I544669343 I445182495 I-821519930 I-1124279685 I-1668995122 I1653284793 I-678555151 I-687513207 I1558259445 I-1978866839 I1558835601 I1732138472 I-1904793363 I620020296 I1562597874 I1942617227 I-549632552 I721603795 I417978456 I-1355281522 I-538065208 I-1079523196 I187375699 I449064972 I1018083947 I1632388882 I-493269866 I92769041 I1477146750 I1782708404 I444873376 I1085851104 I-6823272 I-1302251853 I1602050688 I-1042187824 I287161745 I-1972094479 I103271491 I2131619773 I-2064115870 I766815498 I990861458 I-1664407378 I1083746756 I-1018331904 I-677315687 I-951670647 I-952356874 I451460609 I-818615564 I851439508 I656362634 I-1351240485 I823378078 I1985597385 I597757740 I-1512303057 I1590872798 I1108424213 I818850898 I-1368594306 I-201107761 I1793370378 I1247597611 I-1594326264 I-601653890 I427642759 I248322113 I-292545338 I1708985870 I1917042771 I429354503 I-478470329 I793960014 I369939133 I1728189157 I-518963626 I-278523974 I-1877289696 I-2088617658 I-1367940049 I-62295925 I197975119 I-252900777 I803430539 I485759441 I-528283480 I-1287443963 I-478617444 I-861906946 I-649095555 I-893184337 I2050571322 I803433133 I1629574571 I1649720417 I-2050225209 I1208598977 I720314344 I-615166251 I-835077127 I-1405372429 I995698064 I148123240 I-943016676 I-594609622 I-1381596711 I1017195301 I-1268893013 I-1815985179 I-1393570351 I-870027364 I-476064472 I185582645 I569863326 I1098584267 I-1599147006 I-485054391 I-852098365 I1477320135 I222316762 I-1515583064 I-935051367 I393383063 I819617226 I722921837 I-1241806499 I-1358566385 I1666813591 I1333875114 I-1663688317 I-47254623 I-885800726 I307388991 I-1219459496 I1374870300 I2132047877 I-1385624198 I-245139206 I1015139214 I-926198559 I1969798868 I-1950480619 I-559193432 I-1256446518 I-1983476981 I790179655 I1004289659 I1541827617 I1555805575 I501127333 I-1123446797 I-453230915 I2035104883 I1296122398 I-1843698604 I-715464588 I337143971 I-1972119192 I606777909 I726977302 I-1149501872 I-1963733522 I-1797504644 I624 tp2 Ntp3 b.mypy-0.761/test-data/stdlib-samples/3.2/test/randv2_64.pck0000644€tŠÔÚ€2›s®0000001630513576752246027223 0ustar jukkaDROPBOX\Domain Users00000000000000crandom Random p0 (tRp1 (I2 (I2147483648 I1812115682 I2741755497 I1028055730 I809166036 I2773628650 I62321950 I535290043 I349877800 I976167039 I2490696940 I3631326955 I2107991114 I2941205793 I3199611605 I1871971556 I1456108540 I2984591044 I140836801 I4203227310 I3652722980 I4031971234 I555769760 I697301296 I2347638880 I3302335858 I320255162 I2553586608 I1570224361 I2838780912 I2315834918 I2351348158 I3545433015 I2292018579 I1177569331 I758497559 I2913311175 I1014948880 I1793619243 I3982451053 I3850988342 I2393984324 I1583100093 I3144742543 I3655047493 I3507532385 I3094515442 I350042434 I2455294844 I1038739312 I313809152 I189433072 I1653165452 I4186650593 I19281455 I2589680619 I4145931590 I4283266118 I636283172 I943618337 I3170184633 I2308766231 I634615159 I538152647 I2079576891 I1029442616 I3410689412 I1370292761 I1071718978 I2139496322 I1876699543 I3485866187 I3157490130 I1633105386 I1453253160 I3841322080 I3789608924 I4110770792 I95083673 I931354627 I2065389591 I3448339827 I3348204577 I3263528560 I2411324590 I4003055026 I1869670093 I2737231843 I4150701155 I2689667621 I2993263224 I3239890140 I1191430483 I1214399779 I3623428533 I1817058866 I3052274451 I326030082 I1505129312 I2306812262 I1349150363 I1099127895 I2543465574 I2396380193 I503926466 I1607109730 I3451716817 I58037114 I4290081119 I947517597 I3083440186 I520522630 I2948962496 I4184319574 I2957636335 I668374201 I2325446473 I472785314 I3791932366 I573017189 I2185725379 I1262251492 I3525089379 I2951262653 I1305347305 I940958122 I3343754566 I359371744 I3874044973 I396897232 I147188248 I716683703 I4013880315 I1133359586 I1794612249 I3480815192 I3988787804 I1729355809 I573408542 I1419310934 I1770030447 I3552845567 I1693976502 I1271189893 I2298236738 I2049219027 I3464198070 I1233574082 I1007451781 I1838253750 I687096593 I1131375603 I1223013895 I1490478435 I339265439 I4232792659 I491538536 I2816256769 I1044097522 I2566227049 I748762793 I1511830494 I3593259822 I4121279213 I3735541309 I3609794797 I1939942331 I377570434 I1437957554 I1831285696 I55062811 I2046783110 I1303902283 I1838349877 I420993556 I1256392560 I2795216506 I2783687924 I3322303169 I512794749 I308405826 I517164429 I3320436022 I1328403632 I2269184746 I3729522810 I3304314450 I2238756124 I1690581361 I3813277532 I4119706879 I2659447875 I388818978 I2064580814 I1586227676 I2627522685 I2017792269 I547928109 I859107450 I1062238929 I858886237 I3795783146 I4173914756 I3835915965 I3329504821 I3494579904 I838863205 I3399734724 I4247387481 I3618414834 I2984433798 I2165205561 I4260685684 I3045904244 I3450093836 I3597307595 I3215851166 I3162801328 I2558283799 I950068105 I1829664117 I3108542987 I2378860527 I790023460 I280087750 I1171478018 I2333653728 I3976932140 I896746152 I1802494195 I1232873794 I2749440836 I2032037296 I2012091682 I1296131034 I3892133385 I908161334 I2296791795 I548169794 I696265 I893156828 I426904709 I3565374535 I2655906825 I2792178515 I2406814632 I4038847579 I3123934642 I2197503004 I3535032597 I2266216689 I2117613462 I1787448518 I1875089416 I2037165384 I1140676321 I3606296464 I3229138231 I2458267132 I1874651171 I3331900867 I1000557654 I1432861701 I473636323 I2691783927 I1871437447 I1328016401 I4118690062 I449467602 I681789035 I864889442 I1200888928 I75769445 I4008690037 I2464577667 I4167795823 I3070097648 I2579174882 I1216886568 I3810116343 I2249507485 I3266903480 I3671233480 I100191658 I3087121334 I365063087 I3821275176 I2165052848 I1282465245 I3601570637 I3132413236 I2780570459 I3222142917 I3129794692 I2611590811 I947031677 I2991908938 I750997949 I3632575131 I1632014461 I2846484755 I2347261779 I2903959448 I1397316686 I1904578392 I774649578 I3164598558 I2429587609 I738244516 I1563304975 I1399317414 I1021316297 I3187933234 I2126780757 I4011907847 I4095169219 I3358010054 I2729978247 I3736811646 I3009656410 I2893043637 I4027447385 I1239610110 I1488806900 I2674866844 I442876374 I2853687260 I2785921005 I3151378528 I1180567 I2803146964 I982221759 I2192919417 I3087026181 I2480838002 I738452921 I687986185 I3049371676 I3636492954 I3468311299 I2379621102 I788988633 I1643210601 I2983998168 I2492730801 I2586048705 I604073029 I4121082815 I1496476928 I2972357110 I2663116968 I2642628592 I2116052039 I487186279 I2577680328 I3974766614 I730776636 I3842528855 I1929093695 I44626622 I3989908833 I1695426222 I3675479382 I3051784964 I1514876613 I1254036595 I2420450649 I3034377361 I2332990590 I1535175126 I185834384 I1107372900 I1707278185 I1286285295 I3332574225 I2785672437 I883170645 I2005666473 I3403131327 I4122021352 I1464032858 I3702576112 I260554598 I1837731650 I2594435345 I75771049 I2012484289 I3058649775 I29979703 I3861335335 I2506495152 I3786448704 I442947790 I2582724774 I4291336243 I2568189843 I1923072690 I1121589611 I837696302 I3284631720 I3865021324 I3576453165 I2559531629 I1459231762 I3506550036 I3754420159 I2622000757 I124228596 I1084328605 I1692830753 I547273558 I674282621 I655259103 I3188629610 I490502174 I2081001293 I3191330704 I4109943593 I1859948504 I3163806460 I508833168 I1256371033 I2709253790 I2068956572 I3092842814 I3913926529 I2039638759 I981982529 I536094190 I368855295 I51993975 I1597480732 I4058175522 I2155896702 I3196251991 I1081913893 I3952353788 I3545548108 I2370669647 I2206572308 I2576392991 I1732303374 I1153136290 I537641955 I1738691747 I3232854186 I2539632206 I2829760278 I3058187853 I1202425792 I3762361970 I2863949342 I2640635867 I376638744 I1857679757 I330798087 I1457400505 I1135610046 I606400715 I1859536026 I509811335 I529772308 I2579273244 I1890382004 I3959908876 I2612335971 I2834052227 I1434475986 I3684202717 I4015011345 I582567852 I3689969571 I3934753460 I3034960691 I208573292 I4004113742 I3992904842 I2587153719 I3529179079 I1565424987 I779130678 I1048582935 I3213591622 I3607793434 I3951254937 I2047811901 I7508850 I248544605 I4210090324 I2331490884 I70057213 I776474945 I1345528889 I3290403612 I1664955269 I1533143116 I545003424 I4141564478 I1257326139 I868843601 I2337603029 I1918131449 I1843439523 I1125519035 I673340118 I421408852 I1520454906 I1804722630 I3621254196 I2329968000 I39464672 I430583134 I294026512 I53978525 I2892276105 I1418863764 I3419054451 I1391595797 I3544981798 I4191780858 I825672357 I2972000844 I1571305069 I4231982845 I3611916419 I3045163168 I2982349733 I278572141 I4215338078 I839860504 I1819151779 I1412347479 I1386770353 I3914589491 I3783104977 I4124296733 I830546258 I89825624 I4110601328 I2545483429 I300600527 I516641158 I3693021034 I2852912854 I3240039868 I4167407959 I1479557946 I3621188804 I1391590944 I3578441128 I1227055556 I406898396 I3064054983 I25835338 I402664165 I4097682779 I2106728012 I203613622 I3045467686 I1381726438 I3798670110 I1342314961 I3552497361 I535913619 I2625787583 I1606574307 I1101269630 I1950513752 I1121355862 I3586816903 I438529984 I2473182121 I1229997203 I405445940 I1695535315 I427014336 I3916768430 I392298359 I1884642868 I1244730821 I741058080 I567479957 I3527621168 I3191971011 I3267069104 I4108668146 I1520795587 I166581006 I473794477 I1562126550 I929843010 I889533294 I1266556608 I874518650 I3520162092 I3013765049 I4220231414 I547246449 I3998093769 I3737193746 I3872944207 I793651876 I2606384318 I875991012 I1394836334 I4102011644 I854380426 I2618666767 I2568302000 I1995512132 I229491093 I2673500286 I3364550739 I3836923416 I243656987 I3944388983 I4064949677 I1416956378 I1703244487 I3990798829 I2023425781 I3926702214 I1229015501 I3174247824 I624 tp2 Ntp3 b.mypy-0.761/test-data/stdlib-samples/3.2/test/randv3.pck0000644€tŠÔÚ€2›s®0000001750413576752246026715 0ustar jukkaDROPBOX\Domain Users00000000000000crandom Random p0 (tRp1 (I3 (L2147483648L L994081831L L2806287265L L2228999830L L3396498069L L2956805457L L3273927761L L920726507L L1862624492L L2921292485L L1779526843L L2469105503L L251696293L L1254390717L L779197080L L3165356830L L2007365218L L1870028812L L2896519363L L1855578438L L979518416L L3481710246L L3191861507L L3993006593L L2967971479L L3353342753L L3576782572L L339685558L L2367675732L L116208555L L1220054437L L486597056L L1912115141L L1037044792L L4096904723L L3409146175L L3701651227L L315824610L L4138604583L L1385764892L L191878900L L2320582219L L3420677494L L2776503169L L1148247403L L829555069L L902064012L L2934642741L L2477108577L L2583928217L L1658612579L L2865447913L L129147346L L3691171887L L1569328110L L1372860143L L1054139183L L1617707080L L69020592L L3810271603L L1853953416L L3499803073L L1027545027L L3229043605L L250848720L L3324932626L L3537002962L L2494323345L L3238103962L L4147541579L L3636348186L L3025455083L L2678771977L L584700256L L3461826909L L854511420L L943463552L L3609239025L L3977577989L L253070090L L777394544L L2144086567L L1092947992L L854327284L L2222750082L L360183510L L1312466483L L3227531091L L2235022500L L3013060530L L2541091298L L3480126342L L1839762775L L2632608190L L1108889403L L3045050923L L731513126L L3505436788L L3062762017L L1667392680L L1354126500L L1143573930L L2816645702L L2100356873L L2817679106L L1210746010L L2409915248L L2910119964L L2309001420L L220351824L L3667352871L L3993148590L L2886160232L L4239393701L L1189270581L L3067985541L L147374573L L2355164869L L3696013550L L4227037846L L1905112743L L3312843689L L2930678266L L1828795355L L76933594L L3987100796L L1288361435L L3464529151L L965498079L L1444623093L L1372893415L L1536235597L L1341994850L L963594758L L2115295754L L982098685L L1053433904L L2078469844L L3059765792L L1753606181L L2130171254L L567588194L L529629426L L3621523534L L3027576564L L1176438083L L4096287858L L1168574683L L1425058962L L1429631655L L2902106759L L761900641L L1329183956L L1947050932L L447490289L L3282516276L L200037389L L921868197L L3331403999L L4088760249L L2188326318L L288401961L L1360802675L L314302808L L3314639210L L3749821203L L2286081570L L2768939062L L3200541016L L2133495482L L385029880L L4217232202L L3171617231L L1660846653L L2459987621L L2691776124L L4225030408L L3595396773L L1103680661L L539064057L L1492841101L L166195394L L757973658L L533893054L L2784879594L L1021821883L L2350548162L L176852116L L3503166025L L148079914L L1633466236L L2773090165L L1162846701L L3575737795L L1624178239L L2454894710L L3014691938L L526355679L L1870824081L L3362425857L L3907566665L L3462563184L L2229112004L L4203735748L L1557442481L L924133999L L1906634214L L880459727L L4065895870L L141426254L L1258450159L L3243115027L L1574958840L L313939294L L3055664260L L3459714255L L531778790L L509505506L L1620227491L L2675554942L L2516509560L L3797299887L L237135890L L3203142213L L1087745310L L1897151854L L3936590041L L132765167L L2385908063L L1360600289L L3574567769L L2752788114L L2644228966L L2377705183L L601277909L L4046480498L L324401408L L3279931760L L2227059377L L1538827493L L4220532064L L478044564L L2917117761L L635492832L L2319763261L L795944206L L1820473234L L1673151409L L1404095402L L1661067505L L3217106938L L2406310683L L1931309248L L2458622868L L3323670524L L3266852755L L240083943L L3168387397L L607722198L L1256837690L L3608124913L L4244969357L L1289959293L L519750328L L3229482463L L1105196988L L1832684479L L3761037224L L2363631822L L3297957711L L572766355L L1195822137L L2239207981L L2034241203L L163540514L L288160255L L716403680L L4019439143L L1536281935L L2345100458L L2786059178L L2822232109L L987025395L L3061166559L L490422513L L2551030115L L2638707620L L1344728502L L714108911L L2831719700L L2188615369L L373509061L L1351077504L L3136217056L L783521095L L2554949468L L2662499550L L1203826951L L1379632388L L1918858985L L607465976L L1980450237L L3540079211L L3397813410L L2913309266L L2289572621L L4133935327L L4166227663L L3371801704L L3065474909L L3580562343L L3832172378L L2556130719L L310473705L L3734014346L L2490413810L L347233056L L526668037L L1158393656L L544329703L L2150085419L L3914038146L L1060237586L L4159394837L L113205121L L309966775L L4098784465L L3635222960L L2417516569L L2089579233L L1725807541L L2728122526L L2365836523L L2504078522L L1443946869L L2384171411L L997046534L L3249131657L L1699875986L L3618097146L L1716038224L L2629818607L L2929217876L L1367250314L L1726434951L L1388496325L L2107602181L L2822366842L L3052979190L L3796798633L L1543813381L L959000121L L1363845999L L2952528150L L874184932L L1888387194L L2328695295L L3442959855L L841805947L L1087739275L L3230005434L L3045399265L L1161817318L L2898673139L L860011094L L940539782L L1297818080L L4243941623L L1577613033L L4204131887L L3819057225L L1969439558L L3297963932L L241874069L L3517033453L L2295345664L L1098911422L L886955008L L1477397621L L4279347332L L3616558791L L2384411957L L742537731L L764221540L L2871698900L L3530636393L L691256644L L758730966L L1717773090L L2751856377L L3188484000L L3767469670L L1623863053L L3533236793L L4099284176L L723921107L L310594036L L223978745L L2266565776L L201843303L L2969968546L L3351170888L L3465113624L L2712246712L L1521383057L L2384461798L L216357551L L2167301975L L3144653194L L2781220155L L3620747666L L95971265L L4255400243L L59999757L L4174273472L L3974511524L L1007123950L L3112477628L L806461512L L3148074008L L528352882L L2545979588L L2562281969L L3010249477L L1886331611L L3210656433L L1034099976L L2906893579L L1197048779L L1870004401L L3898300490L L2686856402L L3975723478L L613043532L L2565674353L L3760045310L L3468984376L L4126258L L303855424L L3988963552L L276256796L L544071807L L1023872062L L1747461519L L1975571260L L4033766958L L2946555557L L1492957796L L958271685L L46480515L L907760635L L1306626357L L819652378L L1172300279L L1116851319L L495601075L L1157715330L L534220108L L377320028L L1672286106L L2066219284L L1842386355L L2546059464L L1839457336L L3476194446L L3050550028L L594705582L L1905813535L L1813033412L L2700858157L L169067972L L4252889045L L1921944555L L497671474L L210143935L L2688398489L L325158375L L3450846447L L891760597L L712802536L L1132557436L L1417044075L L1639889660L L1746379970L L1478741647L L2817563486L L2573612532L L4266444457L L2911601615L L804745411L L2207254652L L1189140646L L3829725111L L3637367348L L1944731747L L2193440343L L1430195413L L1173515229L L1582618217L L2070767037L L247908936L L1460675439L L556001596L L327629335L L1036133876L L4228129605L L999174048L L3635804039L L1416550481L L1270540269L L4280743815L L39607659L L1552540623L L2762294062L L504137289L L4117044239L L1417130225L L1342970056L L1755716449L L1169447322L L2731401356L L2319976745L L2869221479L L23972655L L2251495389L L1429860878L L3728135992L L4241432973L L3698275076L L216416432L L4040046960L L246077176L L894675685L L3932282259L L3097205100L L2128818650L L1319010656L L1601974009L L2552960957L L3554016055L L4209395641L L2013340102L L3370447801L L2307272002L L1795091354L L202109401L L988345070L L2514870758L L1132726850L L582746224L L3112305421L L1843020683L L3600189223L L1101349165L L4211905855L L2866677581L L2881621130L L4165324109L L4238773191L L3635649550L L2670481044L L2996248219L L1676992480L L3473067050L L4205793699L L4019490897L L1579990481L L1899617990L L1136347713L L1802842268L L3591752960L L1197308739L L433629786L L4032142790L L3148041979L L3312138845L L3896860449L L3298182567L L907605170L L1658664067L L2682980313L L2523523173L L1208722103L L3808530363L L1079003946L L4282402864L L2041010073L L2667555071L L688018180L L1405121012L L4167994076L L3504695336L L1923944749L L1143598790L L3936268898L L3606243846L L1017420080L L4026211169L L596529763L L1844259624L L2840216282L L2673807759L L3407202575L L2737971083L L4075423068L L3684057432L L3146627241L L599650513L L69773114L L1257035919L L807485291L L2376230687L L3036593147L L2642411658L L106080044L L2199622729L L291834511L L2697611361L L11689733L L625123952L L3226023062L L3229663265L L753059444L L2843610189L L624L tp2 Ntp3 b.mypy-0.761/test-data/stdlib-samples/3.2/test/subprocessdata/0000755€tŠÔÚ€2›s®0000000000013576752267030037 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py0000644€tŠÔÚ€2›s®0000000106113576752246032400 0ustar jukkaDROPBOX\Domain Users00000000000000"""When called as a script, print a comma-separated list of the open file descriptors on stdout.""" import errno import os try: _MAXFD = os.sysconf("SC_OPEN_MAX") except: _MAXFD = 256 if __name__ == "__main__": fds = [] for fd in range(0, _MAXFD): try: st = os.fstat(fd) except OSError as e: if e.errno == errno.EBADF: continue raise # Ignore Solaris door files if st.st_mode & 0xF000 != 0xd000: fds.append(fd) print(','.join(map(str, fds))) mypy-0.761/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py0000644€tŠÔÚ€2›s®0000000020213576752246033061 0ustar jukkaDROPBOX\Domain Users00000000000000"""When called as a script, consumes the input""" import sys if __name__ == "__main__": for line in sys.stdin: pass mypy-0.761/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py0000644€tŠÔÚ€2›s®0000000023713576752246031340 0ustar jukkaDROPBOX\Domain Users00000000000000"""When ran as a script, simulates cat with no arguments.""" import sys if __name__ == "__main__": for line in sys.stdin: sys.stdout.write(line) mypy-0.761/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py0000644€tŠÔÚ€2›s®0000000037513576752246031531 0ustar jukkaDROPBOX\Domain Users00000000000000"""When called with a single argument, simulated fgrep with a single argument and no options.""" import sys if __name__ == "__main__": pattern = sys.argv[1] for line in sys.stdin: if pattern in line: sys.stdout.write(line) mypy-0.761/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py0000644€tŠÔÚ€2›s®0000000056413576752246033544 0ustar jukkaDROPBOX\Domain Users00000000000000import signal, subprocess, sys # On Linux this causes os.waitpid to fail with OSError as the OS has already # reaped our child process. The wait() passing the OSError on to the caller # and causing us to exit with an error is what we are testing against. signal.signal(signal.SIGCHLD, signal.SIG_IGN) subprocess.Popen([sys.executable, '-c', 'print("albatross")']).wait() mypy-0.761/test-data/stdlib-samples/3.2/test/support.py0000644€tŠÔÚ€2›s®0000015476613576752246027123 0ustar jukkaDROPBOX\Domain Users00000000000000"""Supporting definitions for the Python regression tests.""" if __name__ != 'test.support': raise ImportError('support must be imported from the test package') import contextlib import errno import functools import gc import socket import sys import os import platform import shutil import warnings import unittest import importlib import collections import re import subprocess import imp import time import sysconfig import fnmatch import logging.handlers import _thread, threading from typing import Any, Dict, cast #try: # import multiprocessing.process #except ImportError: # multiprocessing = None __all__ = [ "Error", "TestFailed", "ResourceDenied", "import_module", "verbose", "use_resources", "max_memuse", "record_original_stdout", "get_original_stdout", "unload", "unlink", "rmtree", "forget", "is_resource_enabled", "requires", "requires_mac_ver", "find_unused_port", "bind_port", "fcmp", "is_jython", "TESTFN", "HOST", "FUZZ", "SAVEDCWD", "temp_cwd", "findfile", "sortdict", "check_syntax_error", "open_urlresource", "check_warnings", "CleanImport", "EnvironmentVarGuard", "TransientResource", "captured_output", "captured_stdout", "captured_stdin", "captured_stderr", "time_out", "socket_peer_reset", "ioerror_peer_reset", "run_with_locale", 'temp_umask', "transient_internet", "set_memlimit", "bigmemtest", "bigaddrspacetest", "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup", "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail", "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754", "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink", "import_fresh_module", "failfast", ] class Error(Exception): """Base class for regression test exceptions.""" class TestFailed(Error): """Test failed.""" class ResourceDenied(unittest.SkipTest): """Test skipped because it requested a disallowed resource. This is raised when a test calls requires() for a resource that has not be enabled. It is used to distinguish between expected and unexpected skips. """ @contextlib.contextmanager def _ignore_deprecated_imports(ignore=True): """Context manager to suppress package and module deprecation warnings when importing them. If ignore is False, this context manager has no effect.""" if ignore: with warnings.catch_warnings(): warnings.filterwarnings("ignore", ".+ (module|package)", DeprecationWarning) yield None else: yield None def import_module(name, deprecated=False): """Import and return the module to be tested, raising SkipTest if it is not available. If deprecated is True, any module or package deprecation messages will be suppressed.""" with _ignore_deprecated_imports(deprecated): try: return importlib.import_module(name) except ImportError as msg: raise unittest.SkipTest(str(msg)) def _save_and_remove_module(name, orig_modules): """Helper function to save and remove a module from sys.modules Raise ImportError if the module can't be imported.""" # try to import the module and raise an error if it can't be imported if name not in sys.modules: __import__(name) del sys.modules[name] for modname in list(sys.modules): if modname == name or modname.startswith(name + '.'): orig_modules[modname] = sys.modules[modname] del sys.modules[modname] def _save_and_block_module(name, orig_modules): """Helper function to save and block a module in sys.modules Return True if the module was in sys.modules, False otherwise.""" saved = True try: orig_modules[name] = sys.modules[name] except KeyError: saved = False sys.modules[name] = None return saved def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): """Imports and returns a module, deliberately bypassing the sys.modules cache and importing a fresh copy of the module. Once the import is complete, the sys.modules cache is restored to its original state. Modules named in fresh are also imported anew if needed by the import. If one of these modules can't be imported, None is returned. Importing of modules named in blocked is prevented while the fresh import takes place. If deprecated is True, any module or package deprecation messages will be suppressed.""" # NOTE: test_heapq, test_json and test_warnings include extra sanity checks # to make sure that this utility function is working as expected with _ignore_deprecated_imports(deprecated): # Keep track of modules saved for later restoration as well # as those which just need a blocking entry removed orig_modules = {} names_to_remove = [] _save_and_remove_module(name, orig_modules) try: for fresh_name in fresh: _save_and_remove_module(fresh_name, orig_modules) for blocked_name in blocked: if not _save_and_block_module(blocked_name, orig_modules): names_to_remove.append(blocked_name) fresh_module = importlib.import_module(name) except ImportError: fresh_module = None finally: for orig_name, module in orig_modules.items(): sys.modules[orig_name] = module for name_to_remove in names_to_remove: del sys.modules[name_to_remove] return fresh_module def get_attribute(obj, name): """Get an attribute, raising SkipTest if AttributeError is raised.""" try: attribute = getattr(obj, name) except AttributeError: raise unittest.SkipTest("module %s has no attribute %s" % ( obj.__name__, name)) else: return attribute verbose = 1 # Flag set to 0 by regrtest.py use_resources = None # type: Any # Flag set to [] by regrtest.py max_memuse = 0 # Disable bigmem tests (they will still be run with # small sizes, to make sure they work.) real_max_memuse = 0 failfast = False match_tests = None # type: Any # _original_stdout is meant to hold stdout at the time regrtest began. # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. # The point is to have some flavor of stdout the user can actually see. _original_stdout = None # type: 'Any' def record_original_stdout(stdout): global _original_stdout _original_stdout = stdout def get_original_stdout(): return _original_stdout or sys.stdout def unload(name): try: del sys.modules[name] except KeyError: pass def unlink(filename): try: os.unlink(filename) except OSError as error: # The filename need not exist. if error.errno not in (errno.ENOENT, errno.ENOTDIR): raise def rmtree(path): try: shutil.rmtree(path) except OSError as error: # Unix returns ENOENT, Windows returns ESRCH. if error.errno not in (errno.ENOENT, errno.ESRCH): raise def make_legacy_pyc(source): """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location. The choice of .pyc or .pyo extension is done based on the __debug__ flag value. :param source: The file system path to the source file. The source file does not need to exist, however the PEP 3147 pyc file must exist. :return: The file system path to the legacy pyc file. """ pyc_file = imp.cache_from_source(source) up_one = os.path.dirname(os.path.abspath(source)) if __debug__: ch = 'c' else: ch = 'o' legacy_pyc = os.path.join(up_one, source + ch) os.rename(pyc_file, legacy_pyc) return legacy_pyc def forget(modname): """'Forget' a module was ever imported. This removes the module from sys.modules and deletes any PEP 3147 or legacy .pyc and .pyo files. """ unload(modname) for dirname in sys.path: source = os.path.join(dirname, modname + '.py') # It doesn't matter if they exist or not, unlink all possible # combinations of PEP 3147 and legacy pyc and pyo files. unlink(source + 'c') unlink(source + 'o') unlink(imp.cache_from_source(source, debug_override=True)) unlink(imp.cache_from_source(source, debug_override=False)) # On some platforms, should not run gui test even if it is allowed # in `use_resources'. #if sys.platform.startswith('win'): #import ctypes #import ctypes.wintypes #def _is_gui_available(): # UOI_FLAGS = 1 # WSF_VISIBLE = 0x0001 # class USEROBJECTFLAGS(ctypes.Structure): # _fields_ = [("fInherit", ctypes.wintypes.BOOL), # ("fReserved", ctypes.wintypes.BOOL), # ("dwFlags", ctypes.wintypes.DWORD)] # dll = ctypes.windll.user32 # h = dll.GetProcessWindowStation() # if not h: # raise ctypes.WinError() # uof = USEROBJECTFLAGS() # needed = ctypes.wintypes.DWORD() # res = dll.GetUserObjectInformationW(h, # UOI_FLAGS, # ctypes.byref(uof), # ctypes.sizeof(uof), # ctypes.byref(needed)) # if not res: # raise ctypes.WinError() # return bool(uof.dwFlags & WSF_VISIBLE) #else: def _is_gui_available(): return True def is_resource_enabled(resource): """Test whether a resource is enabled. Known resources are set by regrtest.py.""" return use_resources is not None and resource in use_resources def requires(resource, msg=None): """Raise ResourceDenied if the specified resource is not available. If the caller's module is __main__ then automatically return True. The possibility of False being returned occurs when regrtest.py is executing. """ if resource == 'gui' and not _is_gui_available(): raise unittest.SkipTest("Cannot use the 'gui' resource") # see if the caller's module is __main__ - if so, treat as if # the resource was set if sys._getframe(1).f_globals.get("__name__") == "__main__": return if not is_resource_enabled(resource): if msg is None: msg = "Use of the `%s' resource not enabled" % resource raise ResourceDenied(msg) def requires_mac_ver(*min_version): """Decorator raising SkipTest if the OS is Mac OS X and the OS X version if less than min_version. For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version is lesser than 10.5. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kw): if sys.platform == 'darwin': version_txt = platform.mac_ver()[0] try: version = tuple(map(int, version_txt.split('.'))) except ValueError: pass else: if version < min_version: min_version_txt = '.'.join(map(str, min_version)) raise unittest.SkipTest( "Mac OS X %s or higher required, not %s" % (min_version_txt, version_txt)) return func(*args, **kw) wrapper.min_version = min_version return wrapper return decorator HOST = 'localhost' def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): """Returns an unused port that should be suitable for binding. This is achieved by creating a temporary socket with the same family and type as the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to the specified host address (defaults to 0.0.0.0) with the port set to 0, eliciting an unused ephemeral port from the OS. The temporary socket is then closed and deleted, and the ephemeral port is returned. Either this method or bind_port() should be used for any tests where a server socket needs to be bound to a particular port for the duration of the test. Which one to use depends on whether the calling code is creating a python socket, or if an unused port needs to be provided in a constructor or passed to an external program (i.e. the -accept argument to openssl's s_server mode). Always prefer bind_port() over find_unused_port() where possible. Hard coded ports should *NEVER* be used. As soon as a server socket is bound to a hard coded port, the ability to run multiple instances of the test simultaneously on the same host is compromised, which makes the test a ticking time bomb in a buildbot environment. On Unix buildbots, this may simply manifest as a failed test, which can be recovered from without intervention in most cases, but on Windows, the entire python process can completely and utterly wedge, requiring someone to log in to the buildbot and manually kill the affected process. (This is easy to reproduce on Windows, unfortunately, and can be traced to the SO_REUSEADDR socket option having different semantics on Windows versus Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, listen and then accept connections on identical host/ports. An EADDRINUSE socket.error will be raised at some point (depending on the platform and the order bind and listen were called on each socket). However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE will ever be raised when attempting to bind two identical host/ports. When accept() is called on each socket, the second caller's process will steal the port from the first caller, leaving them both in an awkwardly wedged state where they'll no longer respond to any signals or graceful kills, and must be forcibly killed via OpenProcess()/TerminateProcess(). The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option instead of SO_REUSEADDR, which effectively affords the same semantics as SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open Source world compared to Windows ones, this is a common mistake. A quick look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when openssl.exe is called with the 's_server' option, for example. See http://bugs.python.org/issue2550 for more info. The following site also has a very thorough description about the implications of both REUSEADDR and EXCLUSIVEADDRUSE on Windows: http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) XXX: although this approach is a vast improvement on previous attempts to elicit unused ports, it rests heavily on the assumption that the ephemeral port returned to us by the OS won't immediately be dished back out to some other process when we close and delete our temporary socket but before our calling code has a chance to bind the returned port. We can deal with this issue if/when we come across it. """ tempsock = socket.socket(family, socktype) port = bind_port(tempsock) tempsock.close() #del tempsock return port def bind_port(sock, host=HOST): """Bind the socket to a free port and return the port number. Relies on ephemeral ports in order to ensure we are using an unbound port. This is important as many tests may be running simultaneously, especially in a buildbot environment. This method raises an exception if the sock.family is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR or SO_REUSEPORT set on it. Tests should *never* set these socket options for TCP/IP sockets. The only case for setting these options is testing multicasting via multiple UDP sockets. Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. on Windows), it will be set on the socket. This will prevent anyone else from bind()'ing to our host/port for the duration of the test. """ if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: if hasattr(socket, 'SO_REUSEADDR'): if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: raise TestFailed("tests should never set the SO_REUSEADDR " \ "socket option on TCP/IP sockets!") if hasattr(socket, 'SO_REUSEPORT'): if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: raise TestFailed("tests should never set the SO_REUSEPORT " \ "socket option on TCP/IP sockets!") if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'): sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) sock.bind((host, 0)) port = sock.getsockname()[1] return port FUZZ = 1e-6 def fcmp(x, y): # fuzzy comparison function if isinstance(x, float) or isinstance(y, float): try: fuzz = (abs(x) + abs(y)) * FUZZ if abs(x-y) <= fuzz: return 0 except: pass elif type(x) == type(y) and isinstance(x, (tuple, list)): for i in range(min(len(x), len(y))): outcome = fcmp(x[i], y[i]) if outcome != 0: return outcome return (len(x) > len(y)) - (len(x) < len(y)) return (x > y) - (x < y) # decorator for skipping tests on non-IEEE 754 platforms requires_IEEE_754 = unittest.skipUnless( cast(Any, float).__getformat__("double").startswith("IEEE"), "test requires IEEE 754 doubles") is_jython = sys.platform.startswith('java') TESTFN = '' # Filename used for testing if os.name == 'java': # Jython disallows @ in module names TESTFN = '$test' else: TESTFN = '@test' # Disambiguate TESTFN for parallel testing, while letting it remain a valid # module name. TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid()) # TESTFN_UNICODE is a non-ascii filename TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" if sys.platform == 'darwin': # In Mac OS X's VFS API file names are, by definition, canonically # decomposed Unicode, encoded using UTF-8. See QA1173: # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html import unicodedata TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) TESTFN_ENCODING = sys.getfilesystemencoding() # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be # encoded by the filesystem encoding (in strict mode). It can be None if we # cannot generate such filename. TESTFN_UNENCODABLE = None # type: Any if os.name in ('nt', 'ce'): # skip win32s (0) or Windows 9x/ME (1) if sys.getwindowsversion().platform >= 2: # Different kinds of characters from various languages to minimize the # probability that the whole name is encodable to MBCS (issue #9819) TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" try: TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) except UnicodeEncodeError: pass else: print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' 'Unicode filename tests may not be effective' % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) TESTFN_UNENCODABLE = None # Mac OS X denies unencodable filenames (invalid utf-8) elif sys.platform != 'darwin': try: # ascii and utf-8 cannot encode the byte 0xff b'\xff'.decode(TESTFN_ENCODING) except UnicodeDecodeError: # 0xff will be encoded using the surrogate character u+DCFF TESTFN_UNENCODABLE = TESTFN \ + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') else: # File system encoding (eg. ISO-8859-* encodings) can encode # the byte 0xff. Skip some unicode filename tests. pass # Save the initial cwd SAVEDCWD = os.getcwd() @contextlib.contextmanager def temp_cwd(name='tempcwd', quiet=False, path=None): """ Context manager that temporarily changes the CWD. An existing path may be provided as *path*, in which case this function makes no changes to the file system. Otherwise, the new CWD is created in the current directory and it's named *name*. If *quiet* is False (default) and it's not possible to create or change the CWD, an error is raised. If it's True, only a warning is raised and the original CWD is used. """ saved_dir = os.getcwd() is_temporary = False if path is None: path = name try: os.mkdir(name) is_temporary = True except OSError: if not quiet: raise warnings.warn('tests may fail, unable to create temp CWD ' + name, RuntimeWarning, stacklevel=3) try: os.chdir(path) except OSError: if not quiet: raise warnings.warn('tests may fail, unable to change the CWD to ' + name, RuntimeWarning, stacklevel=3) try: yield os.getcwd() finally: os.chdir(saved_dir) if is_temporary: rmtree(name) @contextlib.contextmanager def temp_umask(umask): """Context manager that temporarily sets the process umask.""" oldmask = os.umask(umask) try: yield None finally: os.umask(oldmask) def findfile(file, here=__file__, subdir=None): """Try to find a file on sys.path and the working directory. If it is not found the argument passed to the function is returned (this does not necessarily signal failure; could still be the legitimate path).""" if os.path.isabs(file): return file if subdir is not None: file = os.path.join(subdir, file) path = sys.path path = [os.path.dirname(here)] + path for dn in path: fn = os.path.join(dn, file) if os.path.exists(fn): return fn return file def sortdict(dict): "Like repr(dict), but in sorted order." items = sorted(dict.items()) reprpairs = ["%r: %r" % pair for pair in items] withcommas = ", ".join(reprpairs) return "{%s}" % withcommas def make_bad_fd(): """ Create an invalid file descriptor by opening and closing a file and return its fd. """ file = open(TESTFN, "wb") try: return file.fileno() finally: file.close() unlink(TESTFN) def check_syntax_error(testcase, statement): raise NotImplementedError('no compile built-in') #testcase.assertRaises(SyntaxError, compile, statement, # '', 'exec') def open_urlresource(url, *args, **kw): from urllib import request, parse check = kw.pop('check', None) filename = parse.urlparse(url)[2].split('/')[-1] # '/': it's URL! fn = os.path.join(os.path.dirname(__file__), "data", filename) def check_valid_file(fn): f = open(fn, *args, **kw) if check is None: return f elif check(f): f.seek(0) return f f.close() if os.path.exists(fn): f = check_valid_file(fn) if f is not None: return f unlink(fn) # Verify the requirement before downloading the file requires('urlfetch') print('\tfetching %s ...' % url, file=get_original_stdout()) f = request.urlopen(url, timeout=15) try: with open(fn, "wb") as out: s = f.read() while s: out.write(s) s = f.read() finally: f.close() f = check_valid_file(fn) if f is not None: return f raise TestFailed('invalid resource "%s"' % fn) class WarningsRecorder(object): """Convenience wrapper for the warnings list returned on entry to the warnings.catch_warnings() context manager. """ def __init__(self, warnings_list): self._warnings = warnings_list self._last = 0 def __getattr__(self, attr): if len(self._warnings) > self._last: return getattr(self._warnings[-1], attr) elif attr in warnings.WarningMessage._WARNING_DETAILS: return None raise AttributeError("%r has no attribute %r" % (self, attr)) #@property #def warnings(self): # return self._warnings[self._last:] def reset(self): self._last = len(self._warnings) def _filterwarnings(filters, quiet=False): """Catch the warnings, then check if all the expected warnings have been raised and re-raise unexpected warnings. If 'quiet' is True, only re-raise the unexpected warnings. """ # Clear the warning registry of the calling module # in order to re-raise the warnings. frame = sys._getframe(2) registry = frame.f_globals.get('__warningregistry__') if registry: registry.clear() with warnings.catch_warnings(record=True) as w: # Set filter "always" to record all warnings. Because # test_warnings swap the module, we need to look up in # the sys.modules dictionary. sys.modules['warnings'].simplefilter("always") yield WarningsRecorder(w) # Filter the recorded warnings reraise = list(w) missing = [] for msg, cat in filters: seen = False for w in reraise[:]: warning = w.message # Filter out the matching messages if (re.match(msg, str(warning), re.I) and issubclass(warning.__class__, cat)): seen = True reraise.remove(w) if not seen and not quiet: # This filter caught nothing missing.append((msg, cat.__name__)) if reraise: raise AssertionError("unhandled warning %s" % reraise[0]) if missing: raise AssertionError("filter (%r, %s) did not catch any warning" % missing[0]) @contextlib.contextmanager def check_warnings(*filters, **kwargs): """Context manager to silence warnings. Accept 2-tuples as positional arguments: ("message regexp", WarningCategory) Optional argument: - if 'quiet' is True, it does not fail if a filter catches nothing (default True without argument, default False if some filters are defined) Without argument, it defaults to: check_warnings(("", Warning), quiet=True) """ quiet = kwargs.get('quiet') if not filters: filters = (("", Warning),) # Preserve backward compatibility if quiet is None: quiet = True return _filterwarnings(filters, quiet) class CleanImport(object): """Context manager to force import to return a new module reference. This is useful for testing module-level behaviours, such as the emission of a DeprecationWarning on import. Use like this: with CleanImport("foo"): importlib.import_module("foo") # new reference """ def __init__(self, *module_names): self.original_modules = sys.modules.copy() for module_name in module_names: if module_name in sys.modules: module = sys.modules[module_name] # It is possible that module_name is just an alias for # another module (e.g. stub for modules renamed in 3.x). # In that case, we also need delete the real module to clear # the import cache. if module.__name__ != module_name: del sys.modules[module.__name__] del sys.modules[module_name] def __enter__(self): return self def __exit__(self, *ignore_exc): sys.modules.update(self.original_modules) class EnvironmentVarGuard(dict): """Class to help protect the environment variable properly. Can be used as a context manager.""" def __init__(self): self._environ = os.environ self._changed = {} def __getitem__(self, envvar): return self._environ[envvar] def __setitem__(self, envvar, value): # Remember the initial value on the first access if envvar not in self._changed: self._changed[envvar] = self._environ.get(envvar) self._environ[envvar] = value def __delitem__(self, envvar): # Remember the initial value on the first access if envvar not in self._changed: self._changed[envvar] = self._environ.get(envvar) if envvar in self._environ: del self._environ[envvar] def keys(self): return self._environ.keys() def __iter__(self): return iter(self._environ) def __len__(self): return len(self._environ) def set(self, envvar, value): self[envvar] = value def unset(self, envvar): del self[envvar] def __enter__(self): return self def __exit__(self, *ignore_exc): for k, v in self._changed.items(): if v is None: if k in self._environ: del self._environ[k] else: self._environ[k] = v os.environ = self._environ class DirsOnSysPath(object): """Context manager to temporarily add directories to sys.path. This makes a copy of sys.path, appends any directories given as positional arguments, then reverts sys.path to the copied settings when the context ends. Note that *all* sys.path modifications in the body of the context manager, including replacement of the object, will be reverted at the end of the block. """ def __init__(self, *paths): self.original_value = sys.path[:] self.original_object = sys.path sys.path.extend(paths) def __enter__(self): return self def __exit__(self, *ignore_exc): sys.path = self.original_object sys.path[:] = self.original_value class TransientResource(object): """Raise ResourceDenied if an exception is raised while the context manager is in effect that matches the specified exception and attributes.""" def __init__(self, exc, **kwargs): self.exc = exc self.attrs = kwargs def __enter__(self): return self def __exit__(self, type_=None, value=None, traceback=None): """If type_ is a subclass of self.exc and value has attributes matching self.attrs, raise ResourceDenied. Otherwise let the exception propagate (if any).""" if type_ is not None and issubclass(self.exc, type_): for attr, attr_value in self.attrs.items(): if not hasattr(value, attr): break if getattr(value, attr) != attr_value: break else: raise ResourceDenied("an optional resource is not available") # Context managers that raise ResourceDenied when various issues # with the Internet connection manifest themselves as exceptions. # XXX deprecate these and use transient_internet() instead time_out = TransientResource(IOError, errno=errno.ETIMEDOUT) socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET) ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET) @contextlib.contextmanager def transient_internet(resource_name, *, timeout=30.0, errnos=()): """Return a context manager that raises ResourceDenied when various issues with the Internet connection manifest themselves as exceptions.""" default_errnos = [ ('ECONNREFUSED', 111), ('ECONNRESET', 104), ('EHOSTUNREACH', 113), ('ENETUNREACH', 101), ('ETIMEDOUT', 110), ] default_gai_errnos = [ ('EAI_AGAIN', -3), ('EAI_FAIL', -4), ('EAI_NONAME', -2), ('EAI_NODATA', -5), # Encountered when trying to resolve IPv6-only hostnames ('WSANO_DATA', 11004), ] denied = ResourceDenied("Resource '%s' is not available" % resource_name) captured_errnos = errnos gai_errnos = [] if not captured_errnos: captured_errnos = [getattr(errno, name, num) for name, num in default_errnos] gai_errnos = [getattr(socket, name, num) for name, num in default_gai_errnos] def filter_error(err): n = getattr(err, 'errno', None) if (isinstance(err, socket.timeout) or (isinstance(err, socket.gaierror) and n in gai_errnos) or n in captured_errnos): if not verbose: sys.stderr.write(denied.args[0] + "\n") raise denied from err old_timeout = socket.getdefaulttimeout() try: if timeout is not None: socket.setdefaulttimeout(timeout) yield None except IOError as err: # urllib can wrap original socket errors multiple times (!), we must # unwrap to get at the original error. while True: a = err.args if len(a) >= 1 and isinstance(a[0], IOError): err = a[0] # The error can also be wrapped as args[1]: # except socket.error as msg: # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) elif len(a) >= 2 and isinstance(a[1], IOError): err = a[1] else: break filter_error(err) raise # XXX should we catch generic exceptions and look for their # __cause__ or __context__? finally: socket.setdefaulttimeout(old_timeout) @contextlib.contextmanager def captured_output(stream_name): """Return a context manager used by captured_stdout/stdin/stderr that temporarily replaces the sys stream *stream_name* with a StringIO.""" import io orig_stdout = getattr(sys, stream_name) setattr(sys, stream_name, io.StringIO()) try: yield getattr(sys, stream_name) finally: setattr(sys, stream_name, orig_stdout) def captured_stdout(): """Capture the output of sys.stdout: with captured_stdout() as s: print("hello") self.assertEqual(s.getvalue(), "hello") """ return captured_output("stdout") def captured_stderr(): return captured_output("stderr") def captured_stdin(): return captured_output("stdin") def gc_collect(): """Force as many objects as possible to be collected. In non-CPython implementations of Python, this is needed because timely deallocation is not guaranteed by the garbage collector. (Even in CPython this can be the case in case of reference cycles.) This means that __del__ methods may be called later than expected and weakrefs may remain alive for longer than expected. This function tries its best to force all garbage objects to disappear. """ gc.collect() if is_jython: time.sleep(0.1) gc.collect() gc.collect() def python_is_optimized(): """Find if Python was built with optimizations.""" cflags = sysconfig.get_config_var('PY_CFLAGS') or '' final_opt = "" for opt in cflags.split(): if opt.startswith('-O'): final_opt = opt return final_opt and final_opt != '-O0' #======================================================================= # Decorator for running a function in a different locale, correctly resetting # it afterwards. def run_with_locale(catstr, *locales): def decorator(func): def inner(*args, **kwds): try: import locale category = getattr(locale, catstr) orig_locale = locale.setlocale(category) except AttributeError: # if the test author gives us an invalid category string raise except: # cannot retrieve original locale, so do nothing locale = orig_locale = None else: for loc in locales: try: locale.setlocale(category, loc) break except: pass # now run the function, resetting the locale on exceptions try: return func(*args, **kwds) finally: if locale and orig_locale: locale.setlocale(category, orig_locale) inner.__name__ = func.__name__ inner.__doc__ = func.__doc__ return inner return decorator #======================================================================= # Big-memory-test support. Separate from 'resources' because memory use # should be configurable. # Some handy shorthands. Note that these are used for byte-limits as well # as size-limits, in the various bigmem tests _1M = 1024*1024 _1G = 1024 * _1M _2G = 2 * _1G _4G = 4 * _1G MAX_Py_ssize_t = sys.maxsize def set_memlimit(limit): global max_memuse global real_max_memuse sizes = { 'k': 1024, 'm': _1M, 'g': _1G, 't': 1024*_1G, } m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit, re.IGNORECASE | re.VERBOSE) if m is None: raise ValueError('Invalid memory limit %r' % (limit,)) memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()]) real_max_memuse = memlimit if memlimit > MAX_Py_ssize_t: memlimit = MAX_Py_ssize_t if memlimit < _2G - 1: raise ValueError('Memory limit %r too low to be useful' % (limit,)) max_memuse = memlimit def _memory_watchdog(start_evt, finish_evt, period=10.0): """A function which periodically watches the process' memory consumption and prints it out. """ # XXX: because of the GIL, and because the very long operations tested # in most bigmem tests are uninterruptible, the loop below gets woken up # much less often than expected. # The polling code should be rewritten in raw C, without holding the GIL, # and push results onto an anonymous pipe. try: page_size = os.sysconf('SC_PAGESIZE') except (ValueError, AttributeError): try: page_size = os.sysconf('SC_PAGE_SIZE') except (ValueError, AttributeError): page_size = 4096 procfile = '/proc/{pid}/statm'.format(pid=os.getpid()) try: f = open(procfile, 'rb') except IOError as e: warnings.warn('/proc not available for stats: {}'.format(e), RuntimeWarning) sys.stderr.flush() return with f: start_evt.set() old_data = -1 while not finish_evt.wait(period): f.seek(0) statm = f.read().decode('ascii') data = int(statm.split()[5]) if data != old_data: old_data = data print(" ... process data size: {data:.1f}G" .format(data=data * page_size / (1024 ** 3))) def bigmemtest(size, memuse, dry_run=True): """Decorator for bigmem tests. 'minsize' is the minimum useful size for the test (in arbitrary, test-interpreted units.) 'memuse' is the number of 'bytes per size' for the test, or a good estimate of it. if 'dry_run' is False, it means the test doesn't support dummy runs when -M is not specified. """ def decorator(f): def wrapper(self): size = wrapper.size memuse = wrapper.memuse if not real_max_memuse: maxsize = 5147 else: maxsize = size if ((real_max_memuse or not dry_run) and real_max_memuse < maxsize * memuse): raise unittest.SkipTest( "not enough memory: %.1fG minimum needed" % (size * memuse / (1024 ** 3))) if real_max_memuse and verbose and threading: print() print(" ... expected peak memory use: {peak:.1f}G" .format(peak=size * memuse / (1024 ** 3))) sys.stdout.flush() start_evt = threading.Event() finish_evt = threading.Event() t = threading.Thread(target=_memory_watchdog, args=(start_evt, finish_evt, 0.5)) t.daemon = True t.start() start_evt.set() else: t = None try: return f(self, maxsize) finally: if t: finish_evt.set() t.join() wrapper.size = size wrapper.memuse = memuse return wrapper return decorator def bigaddrspacetest(f): """Decorator for tests that fill the address space.""" def wrapper(self): if max_memuse < MAX_Py_ssize_t: if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31: raise unittest.SkipTest( "not enough memory: try a 32-bit build instead") else: raise unittest.SkipTest( "not enough memory: %.1fG minimum needed" % (MAX_Py_ssize_t / (1024 ** 3))) else: return f(self) return wrapper #======================================================================= # unittest integration. class BasicTestRunner: def run(self, test): result = unittest.TestResult() test(result) return result def _id(obj): return obj def requires_resource(resource): if resource == 'gui' and not _is_gui_available(): return unittest.skip("resource 'gui' is not available") if is_resource_enabled(resource): return _id else: return unittest.skip("resource {0!r} is not enabled".format(resource)) def cpython_only(test): """ Decorator for tests only applicable on CPython. """ return impl_detail(cpython=True)(test) def impl_detail(msg=None, **guards): if check_impl_detail(**guards): return _id if msg is None: guardnames, default = _parse_guards(guards) if default: msg = "implementation detail not available on {0}" else: msg = "implementation detail specific to {0}" guardnames = sorted(guardnames.keys()) msg = msg.format(' or '.join(guardnames)) return unittest.skip(msg) def _parse_guards(guards): # Returns a tuple ({platform_name: run_me}, default_value) if not guards: return ({'cpython': True}, False) is_true = list(guards.values())[0] assert list(guards.values()) == [is_true] * len(guards) # all True or all False return (guards, not is_true) # Use the following check to guard CPython's implementation-specific tests -- # or to run them only on the implementation(s) guarded by the arguments. def check_impl_detail(**guards): """This function returns True or False depending on the host platform. Examples: if check_impl_detail(): # only on CPython (default) if check_impl_detail(jython=True): # only on Jython if check_impl_detail(cpython=False): # everywhere except on CPython """ guards, default = _parse_guards(guards) return guards.get(platform.python_implementation().lower(), default) def _filter_suite(suite, pred): """Recursively filter test cases in a suite based on a predicate.""" newtests = [] for test in suite._tests: if isinstance(test, unittest.TestSuite): _filter_suite(test, pred) newtests.append(test) else: if pred(test): newtests.append(test) suite._tests = newtests def _run_suite(suite): """Run tests from a unittest.TestSuite-derived class.""" if verbose: runner = unittest.TextTestRunner(sys.stdout, verbosity=2, failfast=failfast) else: runner = BasicTestRunner() result = runner.run(suite) if not result.wasSuccessful(): if len(result.errors) == 1 and not result.failures: err = result.errors[0][1] elif len(result.failures) == 1 and not result.errors: err = result.failures[0][1] else: err = "multiple errors occurred" if not verbose: err += "; run in verbose mode for details" raise TestFailed(err) def run_unittest(*classes): """Run tests from unittest.TestCase-derived classes.""" valid_types = (unittest.TestSuite, unittest.TestCase) suite = unittest.TestSuite() for cls in classes: if isinstance(cls, str): if cls in sys.modules: suite.addTest(unittest.findTestCases(sys.modules[cls])) else: raise ValueError("str arguments must be keys in sys.modules") elif isinstance(cls, valid_types): suite.addTest(cls) else: suite.addTest(unittest.makeSuite(cls)) def case_pred(test): if match_tests is None: return True for name in test.id().split("."): if fnmatch.fnmatchcase(name, match_tests): return True return False _filter_suite(suite, case_pred) _run_suite(suite) #======================================================================= # doctest driver. def run_doctest(module, verbosity=None): """Run doctest on the given module. Return (#failures, #tests). If optional argument verbosity is not specified (or is None), pass support's belief about verbosity on to doctest. Else doctest's usual behavior is used (it searches sys.argv for -v). """ import doctest if verbosity is None: verbosity = verbose else: verbosity = None f, t = doctest.testmod(module, verbose=verbosity) if f: raise TestFailed("%d of %d doctests failed" % (f, t)) if verbose: print('doctest (%s) ... %d tests with zero failures' % (module.__name__, t)) return f, t #======================================================================= # Support for saving and restoring the imported modules. def modules_setup(): return sys.modules.copy(), def modules_cleanup(oldmodules): # Encoders/decoders are registered permanently within the internal # codec cache. If we destroy the corresponding modules their # globals will be set to None which will trip up the cached functions. encodings = [(k, v) for k, v in sys.modules.items() if k.startswith('encodings.')] sys.modules.clear() sys.modules.update(encodings) # XXX: This kind of problem can affect more than just encodings. In particular # extension modules (such as _ssl) don't cope with reloading properly. # Really, test modules should be cleaning out the test specific modules they # know they added (ala test_runpy) rather than relying on this function (as # test_importhooks and test_pkg do currently). # Implicitly imported *real* modules should be left alone (see issue 10556). sys.modules.update(oldmodules) #======================================================================= # Threading support to prevent reporting refleaks when running regrtest.py -R # NOTE: we use thread._count() rather than threading.enumerate() (or the # moral equivalent thereof) because a threading.Thread object is still alive # until its __bootstrap() method has returned, even after it has been # unregistered from the threading module. # thread._count(), on the other hand, only gets decremented *after* the # __bootstrap() method has returned, which gives us reliable reference counts # at the end of a test run. def threading_setup(): if _thread: return _thread._count(), threading._dangling.copy() else: return 1, () def threading_cleanup(*original_values): if not _thread: return _MAX_COUNT = 10 for count in range(_MAX_COUNT): values = _thread._count(), threading._dangling if values == original_values: break time.sleep(0.1) gc_collect() # XXX print a warning in case of failure? def reap_threads(func): """Use this function when threads are being used. This will ensure that the threads are cleaned up even when the test fails. If threading is unavailable this function does nothing. """ if not _thread: return func @functools.wraps(func) def decorator(*args): key = threading_setup() try: return func(*args) finally: threading_cleanup(*key) return decorator def reap_children(): """Use this function at the end of test_main() whenever sub-processes are started. This will help ensure that no extra children (zombies) stick around to hog resources and create problems when looking for refleaks. """ # Reap all our dead child processes so we don't leave zombies around. # These hog resources and might be causing some of the buildbots to die. if hasattr(os, 'waitpid'): any_process = -1 while True: try: # This will raise an exception on Windows. That's ok. pid, status = os.waitpid(any_process, os.WNOHANG) if pid == 0: break except: break @contextlib.contextmanager def swap_attr(obj, attr, new_val): """Temporary swap out an attribute with a new object. Usage: with swap_attr(obj, "attr", 5): ... This will set obj.attr to 5 for the duration of the with: block, restoring the old value at the end of the block. If `attr` doesn't exist on `obj`, it will be created and then deleted at the end of the block. """ if hasattr(obj, attr): real_val = getattr(obj, attr) setattr(obj, attr, new_val) try: yield None finally: setattr(obj, attr, real_val) else: setattr(obj, attr, new_val) try: yield None finally: delattr(obj, attr) @contextlib.contextmanager def swap_item(obj, item, new_val): """Temporary swap out an item with a new object. Usage: with swap_item(obj, "item", 5): ... This will set obj["item"] to 5 for the duration of the with: block, restoring the old value at the end of the block. If `item` doesn't exist on `obj`, it will be created and then deleted at the end of the block. """ if item in obj: real_val = obj[item] obj[item] = new_val try: yield None finally: obj[item] = real_val else: obj[item] = new_val try: yield None finally: del obj[item] def strip_python_stderr(stderr): """Strip the stderr of a Python process from potential debug output emitted by the interpreter. This will typically be run on the result of the communicate() method of a subprocess.Popen object. """ stderr = re.sub(br"\[\d+ refs\]\r?\n?$", b"", stderr).strip() return stderr def args_from_interpreter_flags(): """Return a list of command-line arguments reproducing the current settings in sys.flags.""" flag_opt_map = { 'bytes_warning': 'b', 'dont_write_bytecode': 'B', 'hash_randomization': 'R', 'ignore_environment': 'E', 'no_user_site': 's', 'no_site': 'S', 'optimize': 'O', 'verbose': 'v', } args = [] for flag, opt in flag_opt_map.items(): v = getattr(sys.flags, flag) if v > 0: args.append('-' + opt * v) return args #============================================================ # Support for assertions about logging. #============================================================ class TestHandler(logging.handlers.BufferingHandler): def __init__(self, matcher): # BufferingHandler takes a "capacity" argument # so as to know when to flush. As we're overriding # shouldFlush anyway, we can set a capacity of zero. # You can call flush() manually to clear out the # buffer. logging.handlers.BufferingHandler.__init__(self, 0) self.matcher = matcher def shouldFlush(self, record): return False def emit(self, record): self.format(record) self.buffer.append(record.__dict__) def matches(self, **kwargs): """ Look for a saved dict whose keys/values match the supplied arguments. """ result = False for d in self.buffer: if self.matcher.matches(d, **kwargs): result = True break return result class Matcher(object): _partial_matches = ('msg', 'message') def matches(self, d, **kwargs): """ Try to match a single dict with the supplied arguments. Keys whose values are strings and which are in self._partial_matches will be checked for partial (i.e. substring) matches. You can extend this scheme to (for example) do regular expression matching, etc. """ result = True for k in kwargs: v = kwargs[k] dv = d.get(k) if not self.match_value(k, dv, v): result = False break return result def match_value(self, k, dv, v): """ Try to match a single stored value (dv) with a supplied value (v). """ if type(v) != type(dv): result = False elif type(dv) is not str or k not in self._partial_matches: result = (v == dv) else: result = dv.find(v) >= 0 return result _can_symlink = None # type: Any def can_symlink(): global _can_symlink if _can_symlink is not None: return _can_symlink symlink_path = TESTFN + "can_symlink" try: os.symlink(TESTFN, symlink_path) can = True except (OSError, NotImplementedError, AttributeError): can = False else: os.remove(symlink_path) _can_symlink = can return can def skip_unless_symlink(test): """Skip decorator for tests that require functional symlink""" ok = can_symlink() msg = "Requires functional symlink implementation" if ok: return test else: return unittest.skip(msg)(test) def patch(test_instance, object_to_patch, attr_name, new_value): """Override 'object_to_patch'.'attr_name' with 'new_value'. Also, add a cleanup procedure to 'test_instance' to restore 'object_to_patch' value for 'attr_name'. The 'attr_name' should be a valid attribute for 'object_to_patch'. """ # check that 'attr_name' is a real attribute for 'object_to_patch' # will raise AttributeError if it does not exist getattr(object_to_patch, attr_name) # keep a copy of the old value attr_is_local = False try: old_value = object_to_patch.__dict__[attr_name] except (AttributeError, KeyError): old_value = getattr(object_to_patch, attr_name, None) else: attr_is_local = True # restore the value when the test is done def cleanup(): if attr_is_local: setattr(object_to_patch, attr_name, old_value) else: delattr(object_to_patch, attr_name) test_instance.addCleanup(cleanup) # actually override the attribute setattr(object_to_patch, attr_name, new_value) mypy-0.761/test-data/stdlib-samples/3.2/test/test_base64.py0000644€tŠÔÚ€2›s®0000002703713576752246027520 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test import support import base64 import binascii import sys import subprocess from typing import Any class LegacyBase64TestCase(unittest.TestCase): def test_encodebytes(self) -> None: eq = self.assertEqual eq(base64.encodebytes(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=\n") eq(base64.encodebytes(b"a"), b"YQ==\n") eq(base64.encodebytes(b"ab"), b"YWI=\n") eq(base64.encodebytes(b"abc"), b"YWJj\n") eq(base64.encodebytes(b""), b"") eq(base64.encodebytes(b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}"), b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n") self.assertRaises(TypeError, base64.encodebytes, "") def test_decodebytes(self) -> None: eq = self.assertEqual eq(base64.decodebytes(b"d3d3LnB5dGhvbi5vcmc=\n"), b"www.python.org") eq(base64.decodebytes(b"YQ==\n"), b"a") eq(base64.decodebytes(b"YWI=\n"), b"ab") eq(base64.decodebytes(b"YWJj\n"), b"abc") eq(base64.decodebytes(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"), b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}") eq(base64.decodebytes(b''), b'') self.assertRaises(TypeError, base64.decodebytes, "") def test_encode(self) -> None: eq = self.assertEqual from io import BytesIO infp = BytesIO(b'abcdefghijklmnopqrstuvwxyz' b'ABCDEFGHIJKLMNOPQRSTUVWXYZ' b'0123456789!@#0^&*();:<>,. []{}') outfp = BytesIO() base64.encode(infp, outfp) eq(outfp.getvalue(), b'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE' b'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT' b'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n') def test_decode(self) -> None: from io import BytesIO infp = BytesIO(b'd3d3LnB5dGhvbi5vcmc=') outfp = BytesIO() base64.decode(infp, outfp) self.assertEqual(outfp.getvalue(), b'www.python.org') class BaseXYTestCase(unittest.TestCase): def test_b64encode(self) -> None: eq = self.assertEqual # Test default alphabet eq(base64.b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=") eq(base64.b64encode(b'\x00'), b'AA==') eq(base64.b64encode(b"a"), b"YQ==") eq(base64.b64encode(b"ab"), b"YWI=") eq(base64.b64encode(b"abc"), b"YWJj") eq(base64.b64encode(b""), b"") eq(base64.b64encode(b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}"), b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Test with arbitrary alternative characters eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=b'*$'), b'01a*b$cd') # Check if passing a str object raises an error self.assertRaises(TypeError, base64.b64encode, "") self.assertRaises(TypeError, base64.b64encode, b"", altchars="") # Test standard alphabet eq(base64.standard_b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=") eq(base64.standard_b64encode(b"a"), b"YQ==") eq(base64.standard_b64encode(b"ab"), b"YWI=") eq(base64.standard_b64encode(b"abc"), b"YWJj") eq(base64.standard_b64encode(b""), b"") eq(base64.standard_b64encode(b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}"), b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Check if passing a str object raises an error self.assertRaises(TypeError, base64.standard_b64encode, "") self.assertRaises(TypeError, base64.standard_b64encode, b"", altchars="") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64encode(b'\xd3V\xbeo\xf7\x1d'), b'01a-b_cd') # Check if passing a str object raises an error self.assertRaises(TypeError, base64.urlsafe_b64encode, "") def test_b64decode(self) -> None: eq = self.assertEqual eq(base64.b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org") eq(base64.b64decode(b'AA=='), b'\x00') eq(base64.b64decode(b"YQ=="), b"a") eq(base64.b64decode(b"YWI="), b"ab") eq(base64.b64decode(b"YWJj"), b"abc") eq(base64.b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}") eq(base64.b64decode(b''), b'') # Test with arbitrary alternative characters eq(base64.b64decode(b'01a*b$cd', altchars=b'*$'), b'\xd3V\xbeo\xf7\x1d') # Check if passing a str object raises an error self.assertRaises(TypeError, base64.b64decode, "") self.assertRaises(TypeError, base64.b64decode, b"", altchars="") # Test standard alphabet eq(base64.standard_b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org") eq(base64.standard_b64decode(b"YQ=="), b"a") eq(base64.standard_b64decode(b"YWI="), b"ab") eq(base64.standard_b64decode(b"YWJj"), b"abc") eq(base64.standard_b64decode(b""), b"") eq(base64.standard_b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}") # Check if passing a str object raises an error self.assertRaises(TypeError, base64.standard_b64decode, "") self.assertRaises(TypeError, base64.standard_b64decode, b"", altchars="") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64decode(b'01a-b_cd'), b'\xd3V\xbeo\xf7\x1d') self.assertRaises(TypeError, base64.urlsafe_b64decode, "") def test_b64decode_padding_error(self) -> None: self.assertRaises(binascii.Error, base64.b64decode, b'abc') def test_b64decode_invalid_chars(self) -> None: # issue 1466065: Test some invalid characters. tests = ((b'%3d==', b'\xdd'), (b'$3d==', b'\xdd'), (b'[==', b''), (b'YW]3=', b'am'), (b'3{d==', b'\xdd'), (b'3d}==', b'\xdd'), (b'@@', b''), (b'!', b''), (b'YWJj\nYWI=', b'abcab')) for bstr, res in tests: self.assertEqual(base64.b64decode(bstr), res) with self.assertRaises(binascii.Error): base64.b64decode(bstr, validate=True) def test_b32encode(self) -> None: eq = self.assertEqual eq(base64.b32encode(b''), b'') eq(base64.b32encode(b'\x00'), b'AA======') eq(base64.b32encode(b'a'), b'ME======') eq(base64.b32encode(b'ab'), b'MFRA====') eq(base64.b32encode(b'abc'), b'MFRGG===') eq(base64.b32encode(b'abcd'), b'MFRGGZA=') eq(base64.b32encode(b'abcde'), b'MFRGGZDF') self.assertRaises(TypeError, base64.b32encode, "") def test_b32decode(self) -> None: eq = self.assertEqual eq(base64.b32decode(b''), b'') eq(base64.b32decode(b'AA======'), b'\x00') eq(base64.b32decode(b'ME======'), b'a') eq(base64.b32decode(b'MFRA===='), b'ab') eq(base64.b32decode(b'MFRGG==='), b'abc') eq(base64.b32decode(b'MFRGGZA='), b'abcd') eq(base64.b32decode(b'MFRGGZDF'), b'abcde') self.assertRaises(TypeError, base64.b32decode, "") def test_b32decode_casefold(self) -> None: eq = self.assertEqual eq(base64.b32decode(b'', True), b'') eq(base64.b32decode(b'ME======', True), b'a') eq(base64.b32decode(b'MFRA====', True), b'ab') eq(base64.b32decode(b'MFRGG===', True), b'abc') eq(base64.b32decode(b'MFRGGZA=', True), b'abcd') eq(base64.b32decode(b'MFRGGZDF', True), b'abcde') # Lower cases eq(base64.b32decode(b'me======', True), b'a') eq(base64.b32decode(b'mfra====', True), b'ab') eq(base64.b32decode(b'mfrgg===', True), b'abc') eq(base64.b32decode(b'mfrggza=', True), b'abcd') eq(base64.b32decode(b'mfrggzdf', True), b'abcde') # Expected exceptions self.assertRaises(TypeError, base64.b32decode, b'me======') # Mapping zero and one eq(base64.b32decode(b'MLO23456'), b'b\xdd\xad\xf3\xbe') eq(base64.b32decode(b'M1023456', map01=b'L'), b'b\xdd\xad\xf3\xbe') eq(base64.b32decode(b'M1023456', map01=b'I'), b'b\x1d\xad\xf3\xbe') self.assertRaises(TypeError, base64.b32decode, b"", map01="") def test_b32decode_error(self) -> None: self.assertRaises(binascii.Error, base64.b32decode, b'abc') self.assertRaises(binascii.Error, base64.b32decode, b'ABCDEF==') def test_b16encode(self) -> None: eq = self.assertEqual eq(base64.b16encode(b'\x01\x02\xab\xcd\xef'), b'0102ABCDEF') eq(base64.b16encode(b'\x00'), b'00') self.assertRaises(TypeError, base64.b16encode, "") def test_b16decode(self) -> None: eq = self.assertEqual eq(base64.b16decode(b'0102ABCDEF'), b'\x01\x02\xab\xcd\xef') eq(base64.b16decode(b'00'), b'\x00') # Lower case is not allowed without a flag self.assertRaises(binascii.Error, base64.b16decode, b'0102abcdef') # Case fold eq(base64.b16decode(b'0102abcdef', True), b'\x01\x02\xab\xcd\xef') self.assertRaises(TypeError, base64.b16decode, "") def test_ErrorHeritage(self) -> None: self.assertTrue(issubclass(binascii.Error, ValueError)) class TestMain(unittest.TestCase): def get_output(self, *args_tuple: str, **options: Any) -> Any: args = [sys.executable, '-m', 'base64'] + list(args_tuple) return subprocess.check_output(args, **options) def test_encode_decode(self) -> None: output = self.get_output('-t') self.assertSequenceEqual(output.splitlines(), [ b"b'Aladdin:open sesame'", br"b'QWxhZGRpbjpvcGVuIHNlc2FtZQ==\n'", b"b'Aladdin:open sesame'", ]) def test_encode_file(self) -> None: with open(support.TESTFN, 'wb') as fp: fp.write(b'a\xffb\n') output = self.get_output('-e', support.TESTFN) self.assertEqual(output.rstrip(), b'Yf9iCg==') with open(support.TESTFN, 'rb') as fp: output = self.get_output('-e', stdin=fp) self.assertEqual(output.rstrip(), b'Yf9iCg==') def test_decode(self) -> None: with open(support.TESTFN, 'wb') as fp: fp.write(b'Yf9iCg==') output = self.get_output('-d', support.TESTFN) self.assertEqual(output.rstrip(), b'a\xffb') def test_main() -> None: support.run_unittest(__name__) if __name__ == '__main__': test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_fnmatch.py0000644€tŠÔÚ€2›s®0000000603713576752246030051 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for the fnmatch module.""" from test import support import unittest from fnmatch import fnmatch, fnmatchcase, translate, filter from typing import Any, AnyStr, Callable class FnmatchTestCase(unittest.TestCase): def check_match(self, filename: AnyStr, pattern: AnyStr, should_match: int = 1, fn: Any = fnmatch) -> None: # see #270 if should_match: self.assertTrue(fn(filename, pattern), "expected %r to match pattern %r" % (filename, pattern)) else: self.assertTrue(not fn(filename, pattern), "expected %r not to match pattern %r" % (filename, pattern)) def test_fnmatch(self) -> None: check = self.check_match check('abc', 'abc') check('abc', '?*?') check('abc', '???*') check('abc', '*???') check('abc', '???') check('abc', '*') check('abc', 'ab[cd]') check('abc', 'ab[!de]') check('abc', 'ab[de]', 0) check('a', '??', 0) check('a', 'b', 0) # these test that '\' is handled correctly in character sets; # see SF bug #409651 check('\\', r'[\]') check('a', r'[!\]') check('\\', r'[!\]', 0) # test that filenames with newlines in them are handled correctly. # http://bugs.python.org/issue6665 check('foo\nbar', 'foo*') check('foo\nbar\n', 'foo*') check('\nfoo', 'foo*', False) check('\n', '*') def test_mix_bytes_str(self) -> None: self.assertRaises(TypeError, fnmatch, 'test', b'*') self.assertRaises(TypeError, fnmatch, b'test', '*') self.assertRaises(TypeError, fnmatchcase, 'test', b'*') self.assertRaises(TypeError, fnmatchcase, b'test', '*') def test_fnmatchcase(self) -> None: check = self.check_match check('AbC', 'abc', 0, fnmatchcase) check('abc', 'AbC', 0, fnmatchcase) def test_bytes(self) -> None: self.check_match(b'test', b'te*') self.check_match(b'test\xff', b'te*\xff') self.check_match(b'foo\nbar', b'foo*') class TranslateTestCase(unittest.TestCase): def test_translate(self) -> None: self.assertEqual(translate('*'), r'.*\Z(?ms)') self.assertEqual(translate('?'), r'.\Z(?ms)') self.assertEqual(translate('a?b*'), r'a.b.*\Z(?ms)') self.assertEqual(translate('[abc]'), r'[abc]\Z(?ms)') self.assertEqual(translate('[]]'), r'[]]\Z(?ms)') self.assertEqual(translate('[!x]'), r'[^x]\Z(?ms)') self.assertEqual(translate('[^x]'), r'[\\^x]\Z(?ms)') self.assertEqual(translate('[x'), r'\\[x\Z(?ms)') class FilterTestCase(unittest.TestCase): def test_filter(self) -> None: self.assertEqual(filter(['a', 'b'], 'a'), ['a']) def test_main() -> None: support.run_unittest(FnmatchTestCase, TranslateTestCase, FilterTestCase) if __name__ == "__main__": test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_genericpath.py0000644€tŠÔÚ€2›s®0000002632313576752246030722 0ustar jukkaDROPBOX\Domain Users00000000000000""" Tests common to genericpath, macpath, ntpath and posixpath """ import unittest from test import support import os import genericpath import imp imp.reload(genericpath) # Make sure we are using the local copy import sys from typing import Any, List def safe_rmdir(dirname: str) -> None: try: os.rmdir(dirname) except OSError: pass class GenericTest(unittest.TestCase): # The path module to be tested pathmodule = genericpath # type: Any common_attributes = ['commonprefix', 'getsize', 'getatime', 'getctime', 'getmtime', 'exists', 'isdir', 'isfile'] attributes = [] # type: List[str] def test_no_argument(self) -> None: for attr in self.common_attributes + self.attributes: with self.assertRaises(TypeError): getattr(self.pathmodule, attr)() self.fail("{}.{}() did not raise a TypeError" .format(self.pathmodule.__name__, attr)) def test_commonprefix(self) -> None: commonprefix = self.pathmodule.commonprefix self.assertEqual( commonprefix([]), "" ) self.assertEqual( commonprefix(["/home/swenson/spam", "/home/swen/spam"]), "/home/swen" ) self.assertEqual( commonprefix(["/home/swen/spam", "/home/swen/eggs"]), "/home/swen/" ) self.assertEqual( commonprefix(["/home/swen/spam", "/home/swen/spam"]), "/home/swen/spam" ) self.assertEqual( commonprefix(["home:swenson:spam", "home:swen:spam"]), "home:swen" ) self.assertEqual( commonprefix([":home:swen:spam", ":home:swen:eggs"]), ":home:swen:" ) self.assertEqual( commonprefix([":home:swen:spam", ":home:swen:spam"]), ":home:swen:spam" ) self.assertEqual( commonprefix([b"/home/swenson/spam", b"/home/swen/spam"]), b"/home/swen" ) self.assertEqual( commonprefix([b"/home/swen/spam", b"/home/swen/eggs"]), b"/home/swen/" ) self.assertEqual( commonprefix([b"/home/swen/spam", b"/home/swen/spam"]), b"/home/swen/spam" ) self.assertEqual( commonprefix([b"home:swenson:spam", b"home:swen:spam"]), b"home:swen" ) self.assertEqual( commonprefix([b":home:swen:spam", b":home:swen:eggs"]), b":home:swen:" ) self.assertEqual( commonprefix([b":home:swen:spam", b":home:swen:spam"]), b":home:swen:spam" ) testlist = ['', 'abc', 'Xbcd', 'Xb', 'XY', 'abcd', 'aXc', 'abd', 'ab', 'aX', 'abcX'] for s1 in testlist: for s2 in testlist: p = commonprefix([s1, s2]) self.assertTrue(s1.startswith(p)) self.assertTrue(s2.startswith(p)) if s1 != s2: n = len(p) self.assertNotEqual(s1[n:n+1], s2[n:n+1]) def test_getsize(self) -> None: f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertEqual(self.pathmodule.getsize(support.TESTFN), 3) finally: if not f.closed: f.close() support.unlink(support.TESTFN) def test_time(self) -> None: f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() f = open(support.TESTFN, "ab") f.write(b"bar") f.close() f = open(support.TESTFN, "rb") d = f.read() f.close() self.assertEqual(d, b"foobar") self.assertLessEqual( self.pathmodule.getctime(support.TESTFN), self.pathmodule.getmtime(support.TESTFN) ) finally: if not f.closed: f.close() support.unlink(support.TESTFN) def test_exists(self) -> None: self.assertIs(self.pathmodule.exists(support.TESTFN), False) f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertIs(self.pathmodule.exists(support.TESTFN), True) if not self.pathmodule == genericpath: self.assertIs(self.pathmodule.lexists(support.TESTFN), True) finally: if not f.closed: f.close() support.unlink(support.TESTFN) def test_isdir(self) -> None: self.assertIs(self.pathmodule.isdir(support.TESTFN), False) f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertIs(self.pathmodule.isdir(support.TESTFN), False) os.remove(support.TESTFN) os.mkdir(support.TESTFN) self.assertIs(self.pathmodule.isdir(support.TESTFN), True) os.rmdir(support.TESTFN) finally: if not f.closed: f.close() support.unlink(support.TESTFN) safe_rmdir(support.TESTFN) def test_isfile(self) -> None: self.assertIs(self.pathmodule.isfile(support.TESTFN), False) f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertIs(self.pathmodule.isfile(support.TESTFN), True) os.remove(support.TESTFN) os.mkdir(support.TESTFN) self.assertIs(self.pathmodule.isfile(support.TESTFN), False) os.rmdir(support.TESTFN) finally: if not f.closed: f.close() support.unlink(support.TESTFN) safe_rmdir(support.TESTFN) # Following TestCase is not supposed to be run from test_genericpath. # It is inherited by other test modules (macpath, ntpath, posixpath). class CommonTest(GenericTest): # The path module to be tested pathmodule = None # type: Any common_attributes = GenericTest.common_attributes + [ # Properties 'curdir', 'pardir', 'extsep', 'sep', 'pathsep', 'defpath', 'altsep', 'devnull', # Methods 'normcase', 'splitdrive', 'expandvars', 'normpath', 'abspath', 'join', 'split', 'splitext', 'isabs', 'basename', 'dirname', 'lexists', 'islink', 'ismount', 'expanduser', 'normpath', 'realpath', ] def test_normcase(self) -> None: normcase = self.pathmodule.normcase # check that normcase() is idempotent for p in ["FoO/./BaR", b"FoO/./BaR"]: p = normcase(p) self.assertEqual(p, normcase(p)) self.assertEqual(normcase(''), '') self.assertEqual(normcase(b''), b'') # check that normcase raises a TypeError for invalid types for path in (None, True, 0, 2.5, [], bytearray(b''), {'o','o'}): self.assertRaises(TypeError, normcase, path) def test_splitdrive(self) -> None: # splitdrive for non-NT paths splitdrive = self.pathmodule.splitdrive self.assertEqual(splitdrive("/foo/bar"), ("", "/foo/bar")) self.assertEqual(splitdrive("foo:bar"), ("", "foo:bar")) self.assertEqual(splitdrive(":foo:bar"), ("", ":foo:bar")) self.assertEqual(splitdrive(b"/foo/bar"), (b"", b"/foo/bar")) self.assertEqual(splitdrive(b"foo:bar"), (b"", b"foo:bar")) self.assertEqual(splitdrive(b":foo:bar"), (b"", b":foo:bar")) def test_expandvars(self) -> None: if self.pathmodule.__name__ == 'macpath': self.skipTest('macpath.expandvars is a stub') expandvars = self.pathmodule.expandvars with support.EnvironmentVarGuard() as env: env.clear() env["foo"] = "bar" env["{foo"] = "baz1" env["{foo}"] = "baz2" self.assertEqual(expandvars("foo"), "foo") self.assertEqual(expandvars("$foo bar"), "bar bar") self.assertEqual(expandvars("${foo}bar"), "barbar") self.assertEqual(expandvars("$[foo]bar"), "$[foo]bar") self.assertEqual(expandvars("$bar bar"), "$bar bar") self.assertEqual(expandvars("$?bar"), "$?bar") self.assertEqual(expandvars("${foo}bar"), "barbar") self.assertEqual(expandvars("$foo}bar"), "bar}bar") self.assertEqual(expandvars("${foo"), "${foo") self.assertEqual(expandvars("${{foo}}"), "baz1}") self.assertEqual(expandvars("$foo$foo"), "barbar") self.assertEqual(expandvars("$bar$bar"), "$bar$bar") self.assertEqual(expandvars(b"foo"), b"foo") self.assertEqual(expandvars(b"$foo bar"), b"bar bar") self.assertEqual(expandvars(b"${foo}bar"), b"barbar") self.assertEqual(expandvars(b"$[foo]bar"), b"$[foo]bar") self.assertEqual(expandvars(b"$bar bar"), b"$bar bar") self.assertEqual(expandvars(b"$?bar"), b"$?bar") self.assertEqual(expandvars(b"${foo}bar"), b"barbar") self.assertEqual(expandvars(b"$foo}bar"), b"bar}bar") self.assertEqual(expandvars(b"${foo"), b"${foo") self.assertEqual(expandvars(b"${{foo}}"), b"baz1}") self.assertEqual(expandvars(b"$foo$foo"), b"barbar") self.assertEqual(expandvars(b"$bar$bar"), b"$bar$bar") def test_abspath(self) -> None: self.assertIn("foo", self.pathmodule.abspath("foo")) self.assertIn(b"foo", self.pathmodule.abspath(b"foo")) # Abspath returns bytes when the arg is bytes for path in (b'', b'foo', b'f\xf2\xf2', b'/foo', b'C:\\'): self.assertIsInstance(self.pathmodule.abspath(path), bytes) def test_realpath(self) -> None: self.assertIn("foo", self.pathmodule.realpath("foo")) self.assertIn(b"foo", self.pathmodule.realpath(b"foo")) def test_normpath_issue5827(self) -> None: # Make sure normpath preserves unicode for path in ('', '.', '/', '\\', '///foo/.//bar//'): self.assertIsInstance(self.pathmodule.normpath(path), str) def test_abspath_issue3426(self) -> None: # Check that abspath returns unicode when the arg is unicode # with both ASCII and non-ASCII cwds. abspath = self.pathmodule.abspath for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'): self.assertIsInstance(abspath(path), str) unicwd = '\xe7w\xf0' try: fsencoding = support.TESTFN_ENCODING or "ascii" unicwd.encode(fsencoding) except (AttributeError, UnicodeEncodeError): # FS encoding is probably ASCII pass else: with support.temp_cwd(unicwd): for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'): self.assertIsInstance(abspath(path), str) @unittest.skipIf(sys.platform == 'darwin', "Mac OS X denies the creation of a directory with an invalid utf8 name") def test_nonascii_abspath(self) -> None: # Test non-ASCII, non-UTF8 bytes in the path. with support.temp_cwd(b'\xe7w\xf0'): self.test_abspath() def test_main() -> None: support.run_unittest(GenericTest) if __name__=="__main__": test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_getopt.py0000644€tŠÔÚ€2›s®0000001576413576752246027742 0ustar jukkaDROPBOX\Domain Users00000000000000# test_getopt.py # David Goodger 2000-08-19 from test.support import verbose, run_doctest, run_unittest, EnvironmentVarGuard import unittest import getopt from typing import cast, Any sentinel = object() class GetoptTests(unittest.TestCase): def setUp(self) -> None: self.env = EnvironmentVarGuard() if "POSIXLY_CORRECT" in self.env: del self.env["POSIXLY_CORRECT"] def tearDown(self) -> None: self.env.__exit__() del self.env def assertError(self, *args: Any, **kwargs: Any) -> None: # JLe: work around mypy bug #229 cast(Any, self.assertRaises)(getopt.GetoptError, *args, **kwargs) def test_short_has_arg(self) -> None: self.assertTrue(getopt.short_has_arg('a', 'a:')) self.assertFalse(getopt.short_has_arg('a', 'a')) self.assertError(getopt.short_has_arg, 'a', 'b') def test_long_has_args(self) -> None: has_arg, option = getopt.long_has_args('abc', ['abc=']) self.assertTrue(has_arg) self.assertEqual(option, 'abc') has_arg, option = getopt.long_has_args('abc', ['abc']) self.assertFalse(has_arg) self.assertEqual(option, 'abc') has_arg, option = getopt.long_has_args('abc', ['abcd']) self.assertFalse(has_arg) self.assertEqual(option, 'abcd') self.assertError(getopt.long_has_args, 'abc', ['def']) self.assertError(getopt.long_has_args, 'abc', []) self.assertError(getopt.long_has_args, 'abc', ['abcd','abcde']) def test_do_shorts(self) -> None: opts, args = getopt.do_shorts([], 'a', 'a', []) self.assertEqual(opts, [('-a', '')]) self.assertEqual(args, []) opts, args = getopt.do_shorts([], 'a1', 'a:', []) self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, []) #opts, args = getopt.do_shorts([], 'a=1', 'a:', []) #self.assertEqual(opts, [('-a', '1')]) #self.assertEqual(args, []) opts, args = getopt.do_shorts([], 'a', 'a:', ['1']) self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, []) opts, args = getopt.do_shorts([], 'a', 'a:', ['1', '2']) self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, ['2']) self.assertError(getopt.do_shorts, [], 'a1', 'a', []) self.assertError(getopt.do_shorts, [], 'a', 'a:', []) def test_do_longs(self) -> None: opts, args = getopt.do_longs([], 'abc', ['abc'], []) self.assertEqual(opts, [('--abc', '')]) self.assertEqual(args, []) opts, args = getopt.do_longs([], 'abc=1', ['abc='], []) self.assertEqual(opts, [('--abc', '1')]) self.assertEqual(args, []) opts, args = getopt.do_longs([], 'abc=1', ['abcd='], []) self.assertEqual(opts, [('--abcd', '1')]) self.assertEqual(args, []) opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], []) self.assertEqual(opts, [('--abc', '')]) self.assertEqual(args, []) # Much like the preceding, except with a non-alpha character ("-") in # option name that precedes "="; failed in # http://python.org/sf/126863 opts, args = getopt.do_longs([], 'foo=42', ['foo-bar', 'foo=',], []) self.assertEqual(opts, [('--foo', '42')]) self.assertEqual(args, []) self.assertError(getopt.do_longs, [], 'abc=1', ['abc'], []) self.assertError(getopt.do_longs, [], 'abc', ['abc='], []) def test_getopt(self) -> None: # note: the empty string between '-a' and '--beta' is significant: # it simulates an empty string option argument ('-a ""') on the # command line. cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a', '', '--beta', 'arg1', 'arg2'] opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta']) self.assertEqual(opts, [('-a', '1'), ('-b', ''), ('--alpha', '2'), ('--beta', ''), ('-a', '3'), ('-a', ''), ('--beta', '')]) # Note ambiguity of ('-b', '') and ('-a', '') above. This must be # accounted for in the code that calls getopt(). self.assertEqual(args, ['arg1', 'arg2']) self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta']) def test_gnu_getopt(self) -> None: # Test handling of GNU style scanning mode. cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2'] # GNU style opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta=']) self.assertEqual(args, ['arg1']) self.assertEqual(opts, [('-a', ''), ('-b', '1'), ('--alpha', ''), ('--beta', '2')]) # recognize "-" as an argument opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', []) self.assertEqual(args, ['-']) self.assertEqual(opts, [('-a', ''), ('-b', '-')]) # Posix style via + opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta=']) self.assertEqual(opts, [('-a', '')]) self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2']) # Posix style via POSIXLY_CORRECT self.env["POSIXLY_CORRECT"] = "1" opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta=']) self.assertEqual(opts, [('-a', '')]) self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2']) def test_libref_examples(self) -> None: s = """ Examples from the Library Reference: Doc/lib/libgetopt.tex An example using only Unix style options: >>> import getopt >>> args = '-a -b -cfoo -d bar a1 a2'.split() >>> args ['-a', '-b', '-cfoo', '-d', 'bar', 'a1', 'a2'] >>> optlist, args = getopt.getopt(args, 'abc:d:') >>> optlist [('-a', ''), ('-b', ''), ('-c', 'foo'), ('-d', 'bar')] >>> args ['a1', 'a2'] Using long option names is equally easy: >>> s = '--condition=foo --testing --output-file abc.def -x a1 a2' >>> args = s.split() >>> args ['--condition=foo', '--testing', '--output-file', 'abc.def', '-x', 'a1', 'a2'] >>> optlist, args = getopt.getopt(args, 'x', [ ... 'condition=', 'output-file=', 'testing']) >>> optlist [('--condition', 'foo'), ('--testing', ''), ('--output-file', 'abc.def'), ('-x', '')] >>> args ['a1', 'a2'] """ import types m = types.ModuleType("libreftest", s) run_doctest(m, verbose) def test_issue4629(self) -> None: longopts, shortopts = getopt.getopt(['--help='], '', ['help=']) self.assertEqual(longopts, [('--help', '')]) longopts, shortopts = getopt.getopt(['--help=x'], '', ['help=']) self.assertEqual(longopts, [('--help', 'x')]) self.assertRaises(getopt.GetoptError, getopt.getopt, ['--help='], '', ['help']) def test_main() -> None: run_unittest(GetoptTests) if __name__ == "__main__": test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_glob.py0000644€tŠÔÚ€2›s®0000001073013576752246027347 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test.support import run_unittest, TESTFN, skip_unless_symlink, can_symlink import glob import os import shutil from typing import TypeVar, Iterable, List, cast T = TypeVar('T') class GlobTests(unittest.TestCase): tempdir = '' # JLe: work around mypy issue #231 def norm(self, first: str, *parts: str) -> str: return os.path.normpath(os.path.join(self.tempdir, first, *parts)) def mktemp(self, *parts: str) -> None: filename = self.norm(*parts) base, file = os.path.split(filename) if not os.path.exists(base): os.makedirs(base) f = open(filename, 'w') f.close() def setUp(self) -> None: self.tempdir = TESTFN+"_dir" self.mktemp('a', 'D') self.mktemp('aab', 'F') self.mktemp('aaa', 'zzzF') self.mktemp('ZZZ') self.mktemp('a', 'bcd', 'EF') self.mktemp('a', 'bcd', 'efg', 'ha') if can_symlink(): os.symlink(self.norm('broken'), self.norm('sym1')) os.symlink(self.norm('broken'), self.norm('sym2')) def tearDown(self) -> None: shutil.rmtree(self.tempdir) def glob(self, *parts: str) -> List[str]: if len(parts) == 1: pattern = parts[0] else: pattern = os.path.join(*parts) p = os.path.join(self.tempdir, pattern) res = glob.glob(p) self.assertEqual(list(glob.iglob(p)), res) return res def assertSequencesEqual_noorder(self, l1: Iterable[T], l2: Iterable[T]) -> None: self.assertEqual(set(l1), set(l2)) def test_glob_literal(self) -> None: eq = self.assertSequencesEqual_noorder eq(self.glob('a'), [self.norm('a')]) eq(self.glob('a', 'D'), [self.norm('a', 'D')]) eq(self.glob('aab'), [self.norm('aab')]) eq(self.glob('zymurgy'), cast(List[str], [])) # JLe: work around #230 # test return types are unicode, but only if os.listdir # returns unicode filenames uniset = set([str]) tmp = os.listdir('.') if set(type(x) for x in tmp) == uniset: u1 = glob.glob('*') u2 = glob.glob('./*') self.assertEqual(set(type(r) for r in u1), uniset) self.assertEqual(set(type(r) for r in u2), uniset) def test_glob_one_directory(self) -> None: eq = self.assertSequencesEqual_noorder eq(self.glob('a*'), map(self.norm, ['a', 'aab', 'aaa'])) eq(self.glob('*a'), map(self.norm, ['a', 'aaa'])) eq(self.glob('aa?'), map(self.norm, ['aaa', 'aab'])) eq(self.glob('aa[ab]'), map(self.norm, ['aaa', 'aab'])) eq(self.glob('*q'), cast(List[str], [])) # JLe: work around #230 def test_glob_nested_directory(self) -> None: eq = self.assertSequencesEqual_noorder if os.path.normcase("abCD") == "abCD": # case-sensitive filesystem eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF')]) else: # case insensitive filesystem eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF'), self.norm('a', 'bcd', 'efg')]) eq(self.glob('a', 'bcd', '*g'), [self.norm('a', 'bcd', 'efg')]) def test_glob_directory_names(self) -> None: eq = self.assertSequencesEqual_noorder eq(self.glob('*', 'D'), [self.norm('a', 'D')]) eq(self.glob('*', '*a'), cast(List[str], [])) # JLe: work around #230 eq(self.glob('a', '*', '*', '*a'), [self.norm('a', 'bcd', 'efg', 'ha')]) eq(self.glob('?a?', '*F'), map(self.norm, [os.path.join('aaa', 'zzzF'), os.path.join('aab', 'F')])) def test_glob_directory_with_trailing_slash(self) -> None: # We are verifying that when there is wildcard pattern which # ends with os.sep doesn't blow up. res = glob.glob(self.tempdir + '*' + os.sep) self.assertEqual(len(res), 1) # either of these results are reasonable self.assertIn(res[0], [self.tempdir, self.tempdir + os.sep]) @skip_unless_symlink def test_glob_broken_symlinks(self) -> None: eq = self.assertSequencesEqual_noorder eq(self.glob('sym*'), [self.norm('sym1'), self.norm('sym2')]) eq(self.glob('sym1'), [self.norm('sym1')]) eq(self.glob('sym2'), [self.norm('sym2')]) def test_main() -> None: run_unittest(GlobTests) if __name__ == "__main__": test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_posixpath.py0000644€tŠÔÚ€2›s®0000005420013576752246030443 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test import support, test_genericpath import posixpath import genericpath import imp imp.reload(posixpath) # Make sure we are using the local copy imp.reload(genericpath) import os import sys from posixpath import realpath, abspath, dirname, basename import posix from typing import cast, Any, TypeVar, Callable T = TypeVar('T') # An absolute path to a temporary filename for testing. We can't rely on TESTFN # being an absolute path, so we need this. ABSTFN = abspath(support.TESTFN) def skip_if_ABSTFN_contains_backslash( test: Callable[[T], None]) -> Callable[[T], None]: """ On Windows, posixpath.abspath still returns paths with backslashes instead of posix forward slashes. If this is the case, several tests fail, so skip them. """ found_backslash = '\\' in ABSTFN msg = "ABSTFN is not a posix path - tests fail" return [test, unittest.skip(msg)(test)][found_backslash] def safe_rmdir(dirname: str) -> None: try: os.rmdir(dirname) except OSError: pass class PosixPathTest(unittest.TestCase): def setUp(self) -> None: self.tearDown() def tearDown(self) -> None: for suffix in ["", "1", "2"]: support.unlink(support.TESTFN + suffix) safe_rmdir(support.TESTFN + suffix) def test_join(self) -> None: self.assertEqual(posixpath.join("/foo", "bar", "/bar", "baz"), "/bar/baz") self.assertEqual(posixpath.join("/foo", "bar", "baz"), "/foo/bar/baz") self.assertEqual(posixpath.join("/foo/", "bar/", "baz/"), "/foo/bar/baz/") self.assertEqual(posixpath.join(b"/foo", b"bar", b"/bar", b"baz"), b"/bar/baz") self.assertEqual(posixpath.join(b"/foo", b"bar", b"baz"), b"/foo/bar/baz") self.assertEqual(posixpath.join(b"/foo/", b"bar/", b"baz/"), b"/foo/bar/baz/") self.assertRaises(TypeError, posixpath.join, b"bytes", "str") self.assertRaises(TypeError, posixpath.join, "str", b"bytes") def test_split(self) -> None: self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar")) self.assertEqual(posixpath.split("/"), ("/", "")) self.assertEqual(posixpath.split("foo"), ("", "foo")) self.assertEqual(posixpath.split("////foo"), ("////", "foo")) self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar")) self.assertEqual(posixpath.split(b"/foo/bar"), (b"/foo", b"bar")) self.assertEqual(posixpath.split(b"/"), (b"/", b"")) self.assertEqual(posixpath.split(b"foo"), (b"", b"foo")) self.assertEqual(posixpath.split(b"////foo"), (b"////", b"foo")) self.assertEqual(posixpath.split(b"//foo//bar"), (b"//foo", b"bar")) def splitextTest(self, path: str, filename: str, ext: str) -> None: self.assertEqual(posixpath.splitext(path), (filename, ext)) self.assertEqual(posixpath.splitext("/" + path), ("/" + filename, ext)) self.assertEqual(posixpath.splitext("abc/" + path), ("abc/" + filename, ext)) self.assertEqual(posixpath.splitext("abc.def/" + path), ("abc.def/" + filename, ext)) self.assertEqual(posixpath.splitext("/abc.def/" + path), ("/abc.def/" + filename, ext)) self.assertEqual(posixpath.splitext(path + "/"), (filename + ext + "/", "")) pathb = bytes(path, "ASCII") filenameb = bytes(filename, "ASCII") extb = bytes(ext, "ASCII") self.assertEqual(posixpath.splitext(pathb), (filenameb, extb)) self.assertEqual(posixpath.splitext(b"/" + pathb), (b"/" + filenameb, extb)) self.assertEqual(posixpath.splitext(b"abc/" + pathb), (b"abc/" + filenameb, extb)) self.assertEqual(posixpath.splitext(b"abc.def/" + pathb), (b"abc.def/" + filenameb, extb)) self.assertEqual(posixpath.splitext(b"/abc.def/" + pathb), (b"/abc.def/" + filenameb, extb)) self.assertEqual(posixpath.splitext(pathb + b"/"), (filenameb + extb + b"/", b"")) def test_splitext(self) -> None: self.splitextTest("foo.bar", "foo", ".bar") self.splitextTest("foo.boo.bar", "foo.boo", ".bar") self.splitextTest("foo.boo.biff.bar", "foo.boo.biff", ".bar") self.splitextTest(".csh.rc", ".csh", ".rc") self.splitextTest("nodots", "nodots", "") self.splitextTest(".cshrc", ".cshrc", "") self.splitextTest("...manydots", "...manydots", "") self.splitextTest("...manydots.ext", "...manydots", ".ext") self.splitextTest(".", ".", "") self.splitextTest("..", "..", "") self.splitextTest("........", "........", "") self.splitextTest("", "", "") def test_isabs(self) -> None: self.assertIs(posixpath.isabs(""), False) self.assertIs(posixpath.isabs("/"), True) self.assertIs(posixpath.isabs("/foo"), True) self.assertIs(posixpath.isabs("/foo/bar"), True) self.assertIs(posixpath.isabs("foo/bar"), False) self.assertIs(posixpath.isabs(b""), False) self.assertIs(posixpath.isabs(b"/"), True) self.assertIs(posixpath.isabs(b"/foo"), True) self.assertIs(posixpath.isabs(b"/foo/bar"), True) self.assertIs(posixpath.isabs(b"foo/bar"), False) def test_basename(self) -> None: self.assertEqual(posixpath.basename("/foo/bar"), "bar") self.assertEqual(posixpath.basename("/"), "") self.assertEqual(posixpath.basename("foo"), "foo") self.assertEqual(posixpath.basename("////foo"), "foo") self.assertEqual(posixpath.basename("//foo//bar"), "bar") self.assertEqual(posixpath.basename(b"/foo/bar"), b"bar") self.assertEqual(posixpath.basename(b"/"), b"") self.assertEqual(posixpath.basename(b"foo"), b"foo") self.assertEqual(posixpath.basename(b"////foo"), b"foo") self.assertEqual(posixpath.basename(b"//foo//bar"), b"bar") def test_dirname(self) -> None: self.assertEqual(posixpath.dirname("/foo/bar"), "/foo") self.assertEqual(posixpath.dirname("/"), "/") self.assertEqual(posixpath.dirname("foo"), "") self.assertEqual(posixpath.dirname("////foo"), "////") self.assertEqual(posixpath.dirname("//foo//bar"), "//foo") self.assertEqual(posixpath.dirname(b"/foo/bar"), b"/foo") self.assertEqual(posixpath.dirname(b"/"), b"/") self.assertEqual(posixpath.dirname(b"foo"), b"") self.assertEqual(posixpath.dirname(b"////foo"), b"////") self.assertEqual(posixpath.dirname(b"//foo//bar"), b"//foo") def test_islink(self) -> None: self.assertIs(posixpath.islink(support.TESTFN + "1"), False) self.assertIs(posixpath.lexists(support.TESTFN + "2"), False) f = open(support.TESTFN + "1", "wb") try: f.write(b"foo") f.close() self.assertIs(posixpath.islink(support.TESTFN + "1"), False) if support.can_symlink(): os.symlink(support.TESTFN + "1", support.TESTFN + "2") self.assertIs(posixpath.islink(support.TESTFN + "2"), True) os.remove(support.TESTFN + "1") self.assertIs(posixpath.islink(support.TESTFN + "2"), True) self.assertIs(posixpath.exists(support.TESTFN + "2"), False) self.assertIs(posixpath.lexists(support.TESTFN + "2"), True) finally: if not f.closed: f.close() @staticmethod def _create_file(filename: str) -> None: with open(filename, 'wb') as f: f.write(b'foo') def test_samefile(self) -> None: test_fn = support.TESTFN + "1" self._create_file(test_fn) self.assertTrue(posixpath.samefile(test_fn, test_fn)) self.assertRaises(TypeError, posixpath.samefile) @unittest.skipIf( sys.platform.startswith('win'), "posixpath.samefile does not work on links in Windows") @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") def test_samefile_on_links(self) -> None: test_fn1 = support.TESTFN + "1" test_fn2 = support.TESTFN + "2" self._create_file(test_fn1) os.symlink(test_fn1, test_fn2) self.assertTrue(posixpath.samefile(test_fn1, test_fn2)) os.remove(test_fn2) self._create_file(test_fn2) self.assertFalse(posixpath.samefile(test_fn1, test_fn2)) def test_samestat(self) -> None: test_fn = support.TESTFN + "1" self._create_file(test_fn) test_fns = [test_fn]*2 stats = map(os.stat, test_fns) self.assertTrue(posixpath.samestat(*stats)) @unittest.skipIf( sys.platform.startswith('win'), "posixpath.samestat does not work on links in Windows") @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") def test_samestat_on_links(self) -> None: test_fn1 = support.TESTFN + "1" test_fn2 = support.TESTFN + "2" self._create_file(test_fn1) test_fns = [test_fn1, test_fn2] cast(Any, os.symlink)(*test_fns) stats = map(os.stat, test_fns) self.assertTrue(posixpath.samestat(*stats)) os.remove(test_fn2) self._create_file(test_fn2) stats = map(os.stat, test_fns) self.assertFalse(posixpath.samestat(*stats)) self.assertRaises(TypeError, posixpath.samestat) def test_ismount(self) -> None: self.assertIs(posixpath.ismount("/"), True) self.assertIs(posixpath.ismount(b"/"), True) def test_ismount_non_existent(self) -> None: # Non-existent mountpoint. self.assertIs(posixpath.ismount(ABSTFN), False) try: os.mkdir(ABSTFN) self.assertIs(posixpath.ismount(ABSTFN), False) finally: safe_rmdir(ABSTFN) @unittest.skipUnless(support.can_symlink(), "Test requires symlink support") def test_ismount_symlinks(self) -> None: # Symlinks are never mountpoints. try: os.symlink("/", ABSTFN) self.assertIs(posixpath.ismount(ABSTFN), False) finally: os.unlink(ABSTFN) @unittest.skipIf(posix is None, "Test requires posix module") def test_ismount_different_device(self) -> None: # Simulate the path being on a different device from its parent by # mocking out st_dev. save_lstat = os.lstat def fake_lstat(path): st_ino = 0 st_dev = 0 if path == ABSTFN: st_dev = 1 st_ino = 1 return posix.stat_result((0, st_ino, st_dev, 0, 0, 0, 0, 0, 0, 0)) try: setattr(os, 'lstat', fake_lstat) # mypy: can't modify os directly self.assertIs(posixpath.ismount(ABSTFN), True) finally: setattr(os, 'lstat', save_lstat) def test_expanduser(self) -> None: self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), str) self.assertIsInstance(posixpath.expanduser(b"~/"), bytes) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/") ) self.assertEqual( posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/") ) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes) self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes) with support.EnvironmentVarGuard() as env: env['HOME'] = '/' self.assertEqual(posixpath.expanduser("~"), "/") # expanduser should fall back to using the password database del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir self.assertEqual(posixpath.expanduser("~"), home) def test_normpath(self) -> None: self.assertEqual(posixpath.normpath(""), ".") self.assertEqual(posixpath.normpath("/"), "/") self.assertEqual(posixpath.normpath("//"), "//") self.assertEqual(posixpath.normpath("///"), "/") self.assertEqual(posixpath.normpath("///foo/.//bar//"), "/foo/bar") self.assertEqual(posixpath.normpath("///foo/.//bar//.//..//.//baz"), "/foo/baz") self.assertEqual(posixpath.normpath("///..//./foo/.//bar"), "/foo/bar") self.assertEqual(posixpath.normpath(b""), b".") self.assertEqual(posixpath.normpath(b"/"), b"/") self.assertEqual(posixpath.normpath(b"//"), b"//") self.assertEqual(posixpath.normpath(b"///"), b"/") self.assertEqual(posixpath.normpath(b"///foo/.//bar//"), b"/foo/bar") self.assertEqual(posixpath.normpath(b"///foo/.//bar//.//..//.//baz"), b"/foo/baz") self.assertEqual(posixpath.normpath(b"///..//./foo/.//bar"), b"/foo/bar") @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_basic(self) -> None: # Basic operation. try: os.symlink(ABSTFN+"1", ABSTFN) self.assertEqual(realpath(ABSTFN), ABSTFN+"1") finally: support.unlink(ABSTFN) @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_relative(self) -> None: try: os.symlink(posixpath.relpath(ABSTFN+"1"), ABSTFN) self.assertEqual(realpath(ABSTFN), ABSTFN+"1") finally: support.unlink(ABSTFN) @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_symlink_loops(self) -> None: # Bug #930024, return the path unchanged if we get into an infinite # symlink loop. try: old_path = abspath('.') os.symlink(ABSTFN, ABSTFN) self.assertEqual(realpath(ABSTFN), ABSTFN) os.symlink(ABSTFN+"1", ABSTFN+"2") os.symlink(ABSTFN+"2", ABSTFN+"1") self.assertEqual(realpath(ABSTFN+"1"), ABSTFN+"1") self.assertEqual(realpath(ABSTFN+"2"), ABSTFN+"2") # Test using relative path as well. os.chdir(dirname(ABSTFN)) self.assertEqual(realpath(basename(ABSTFN)), ABSTFN) finally: os.chdir(old_path) support.unlink(ABSTFN) support.unlink(ABSTFN+"1") support.unlink(ABSTFN+"2") @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_resolve_parents(self) -> None: # We also need to resolve any symlinks in the parents of a relative # path passed to realpath. E.g.: current working directory is # /usr/doc with 'doc' being a symlink to /usr/share/doc. We call # realpath("a"). This should return /usr/share/doc/a/. try: old_path = abspath('.') os.mkdir(ABSTFN) os.mkdir(ABSTFN + "/y") os.symlink(ABSTFN + "/y", ABSTFN + "/k") os.chdir(ABSTFN + "/k") self.assertEqual(realpath("a"), ABSTFN + "/y/a") finally: os.chdir(old_path) support.unlink(ABSTFN + "/k") safe_rmdir(ABSTFN + "/y") safe_rmdir(ABSTFN) @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_resolve_before_normalizing(self) -> None: # Bug #990669: Symbolic links should be resolved before we # normalize the path. E.g.: if we have directories 'a', 'k' and 'y' # in the following hierarchy: # a/k/y # # and a symbolic link 'link-y' pointing to 'y' in directory 'a', # then realpath("link-y/..") should return 'k', not 'a'. try: old_path = abspath('.') os.mkdir(ABSTFN) os.mkdir(ABSTFN + "/k") os.mkdir(ABSTFN + "/k/y") os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y") # Absolute path. self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k") # Relative path. os.chdir(dirname(ABSTFN)) self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."), ABSTFN + "/k") finally: os.chdir(old_path) support.unlink(ABSTFN + "/link-y") safe_rmdir(ABSTFN + "/k/y") safe_rmdir(ABSTFN + "/k") safe_rmdir(ABSTFN) @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_resolve_first(self) -> None: # Bug #1213894: The first component of the path, if not absolute, # must be resolved too. try: old_path = abspath('.') os.mkdir(ABSTFN) os.mkdir(ABSTFN + "/k") os.symlink(ABSTFN, ABSTFN + "link") os.chdir(dirname(ABSTFN)) base = basename(ABSTFN) self.assertEqual(realpath(base + "link"), ABSTFN) self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k") finally: os.chdir(old_path) support.unlink(ABSTFN + "link") safe_rmdir(ABSTFN + "/k") safe_rmdir(ABSTFN) def test_relpath(self) -> None: real_getcwd = os.getcwd # mypy: can't modify os directly setattr(os, 'getcwd', lambda: r"/home/user/bar") try: curdir = os.path.split(os.getcwd())[-1] self.assertRaises(ValueError, posixpath.relpath, "") self.assertEqual(posixpath.relpath("a"), "a") self.assertEqual(posixpath.relpath(posixpath.abspath("a")), "a") self.assertEqual(posixpath.relpath("a/b"), "a/b") self.assertEqual(posixpath.relpath("../a/b"), "../a/b") self.assertEqual(posixpath.relpath("a", "../b"), "../"+curdir+"/a") self.assertEqual(posixpath.relpath("a/b", "../c"), "../"+curdir+"/a/b") self.assertEqual(posixpath.relpath("a", "b/c"), "../../a") self.assertEqual(posixpath.relpath("a", "a"), ".") self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x/y/z"), '../../../foo/bar/bat') self.assertEqual(posixpath.relpath("/foo/bar/bat", "/foo/bar"), 'bat') self.assertEqual(posixpath.relpath("/foo/bar/bat", "/"), 'foo/bar/bat') self.assertEqual(posixpath.relpath("/", "/foo/bar/bat"), '../../..') self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x"), '../foo/bar/bat') self.assertEqual(posixpath.relpath("/x", "/foo/bar/bat"), '../../../x') self.assertEqual(posixpath.relpath("/", "/"), '.') self.assertEqual(posixpath.relpath("/a", "/a"), '.') self.assertEqual(posixpath.relpath("/a/b", "/a/b"), '.') finally: setattr(os, 'getcwd', real_getcwd) def test_relpath_bytes(self) -> None: real_getcwdb = os.getcwdb # mypy: can't modify os directly setattr(os, 'getcwdb', lambda: br"/home/user/bar") try: curdir = os.path.split(os.getcwdb())[-1] self.assertRaises(ValueError, posixpath.relpath, b"") self.assertEqual(posixpath.relpath(b"a"), b"a") self.assertEqual(posixpath.relpath(posixpath.abspath(b"a")), b"a") self.assertEqual(posixpath.relpath(b"a/b"), b"a/b") self.assertEqual(posixpath.relpath(b"../a/b"), b"../a/b") self.assertEqual(posixpath.relpath(b"a", b"../b"), b"../"+curdir+b"/a") self.assertEqual(posixpath.relpath(b"a/b", b"../c"), b"../"+curdir+b"/a/b") self.assertEqual(posixpath.relpath(b"a", b"b/c"), b"../../a") self.assertEqual(posixpath.relpath(b"a", b"a"), b".") self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x/y/z"), b'../../../foo/bar/bat') self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/foo/bar"), b'bat') self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/"), b'foo/bar/bat') self.assertEqual(posixpath.relpath(b"/", b"/foo/bar/bat"), b'../../..') self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x"), b'../foo/bar/bat') self.assertEqual(posixpath.relpath(b"/x", b"/foo/bar/bat"), b'../../../x') self.assertEqual(posixpath.relpath(b"/", b"/"), b'.') self.assertEqual(posixpath.relpath(b"/a", b"/a"), b'.') self.assertEqual(posixpath.relpath(b"/a/b", b"/a/b"), b'.') self.assertRaises(TypeError, posixpath.relpath, b"bytes", "str") self.assertRaises(TypeError, posixpath.relpath, "str", b"bytes") finally: setattr(os, 'getcwdb', real_getcwdb) def test_sameopenfile(self) -> None: fname = support.TESTFN + "1" with open(fname, "wb") as a, open(fname, "wb") as b: self.assertTrue(posixpath.sameopenfile(a.fileno(), b.fileno())) class PosixCommonTest(test_genericpath.CommonTest): pathmodule = posixpath attributes = ['relpath', 'samefile', 'sameopenfile', 'samestat'] def test_main() -> None: support.run_unittest(PosixPathTest, PosixCommonTest) if __name__=="__main__": test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_pprint.py0000644€tŠÔÚ€2›s®0000006514313576752246027750 0ustar jukkaDROPBOX\Domain Users00000000000000import pprint import test.support import unittest import test.test_set import random import collections import itertools from typing import List, Any, Dict, Tuple, cast, Callable # list, tuple and dict subclasses that do or don't overwrite __repr__ class list2(list): pass class list3(list): def __repr__(self) -> str: return list.__repr__(self) class tuple2(tuple): pass class tuple3(tuple): def __repr__(self) -> str: return tuple.__repr__(self) class dict2(dict): pass class dict3(dict): def __repr__(self) -> str: return dict.__repr__(self) class Unorderable: def __repr__(self) -> str: return str(id(self)) class QueryTestCase(unittest.TestCase): def setUp(self) -> None: self.a = list(range(100)) # type: List[Any] self.b = list(range(200)) # type: List[Any] self.a[-12] = self.b def test_basic(self) -> None: # Verify .isrecursive() and .isreadable() w/o recursion pp = pprint.PrettyPrinter() for safe in (2, 2.0, complex(0.0, 2.0), "abc", [3], (2,2), {3: 3}, "yaddayadda", self.a, self.b): # module-level convenience functions self.assertFalse(pprint.isrecursive(safe), "expected not isrecursive for %r" % (safe,)) self.assertTrue(pprint.isreadable(safe), "expected isreadable for %r" % (safe,)) # PrettyPrinter methods self.assertFalse(pp.isrecursive(safe), "expected not isrecursive for %r" % (safe,)) self.assertTrue(pp.isreadable(safe), "expected isreadable for %r" % (safe,)) def test_knotted(self) -> None: # Verify .isrecursive() and .isreadable() w/ recursion # Tie a knot. self.b[67] = self.a # Messy dict. self.d = {} # type: Dict[int, dict] self.d[0] = self.d[1] = self.d[2] = self.d pp = pprint.PrettyPrinter() for icky in self.a, self.b, self.d, (self.d, self.d): self.assertTrue(pprint.isrecursive(icky), "expected isrecursive") self.assertFalse(pprint.isreadable(icky), "expected not isreadable") self.assertTrue(pp.isrecursive(icky), "expected isrecursive") self.assertFalse(pp.isreadable(icky), "expected not isreadable") # Break the cycles. self.d.clear() del self.a[:] del self.b[:] for safe in self.a, self.b, self.d, (self.d, self.d): # module-level convenience functions self.assertFalse(pprint.isrecursive(safe), "expected not isrecursive for %r" % (safe,)) self.assertTrue(pprint.isreadable(safe), "expected isreadable for %r" % (safe,)) # PrettyPrinter methods self.assertFalse(pp.isrecursive(safe), "expected not isrecursive for %r" % (safe,)) self.assertTrue(pp.isreadable(safe), "expected isreadable for %r" % (safe,)) def test_unreadable(self) -> None: # Not recursive but not readable anyway pp = pprint.PrettyPrinter() for unreadable in type(3), pprint, pprint.isrecursive: # module-level convenience functions self.assertFalse(pprint.isrecursive(unreadable), "expected not isrecursive for %r" % (unreadable,)) self.assertFalse(pprint.isreadable(unreadable), "expected not isreadable for %r" % (unreadable,)) # PrettyPrinter methods self.assertFalse(pp.isrecursive(unreadable), "expected not isrecursive for %r" % (unreadable,)) self.assertFalse(pp.isreadable(unreadable), "expected not isreadable for %r" % (unreadable,)) def test_same_as_repr(self) -> None: # Simple objects, small containers and classes that overwrite __repr__ # For those the result should be the same as repr(). # Ahem. The docs don't say anything about that -- this appears to # be testing an implementation quirk. Starting in Python 2.5, it's # not true for dicts: pprint always sorts dicts by key now; before, # it sorted a dict display if and only if the display required # multiple lines. For that reason, dicts with more than one element # aren't tested here. for simple in (0, 0, complex(0.0), 0.0, "", b"", (), tuple2(), tuple3(), [], list2(), list3(), {}, dict2(), dict3(), self.assertTrue, pprint, -6, -6, complex(-6.,-6.), -1.5, "x", b"x", (3,), [3], {3: 6}, (1,2), [3,4], {5: 6}, tuple2((1,2)), tuple3((1,2)), tuple3(range(100)), # type: ignore [3,4], list2(cast(Any, [3,4])), list3(cast(Any, [3,4])), list3(cast(Any, range(100))), dict2(cast(Any, {5: 6})), dict3(cast(Any, {5: 6})), # JLe: work around mypy issue #233 range(10, -11, -1) ): native = repr(simple) for function in "pformat", "saferepr": f = getattr(pprint, function) got = f(simple) self.assertEqual(native, got, "expected %s got %s from pprint.%s" % (native, got, function)) def test_basic_line_wrap(self) -> None: # verify basic line-wrapping operation o = {'RPM_cal': 0, 'RPM_cal2': 48059, 'Speed_cal': 0, 'controldesk_runtime_us': 0, 'main_code_runtime_us': 0, 'read_io_runtime_us': 0, 'write_io_runtime_us': 43690} exp = """\ {'RPM_cal': 0, 'RPM_cal2': 48059, 'Speed_cal': 0, 'controldesk_runtime_us': 0, 'main_code_runtime_us': 0, 'read_io_runtime_us': 0, 'write_io_runtime_us': 43690}""" # JLe: work around mypy issue #232 for type in cast(List[Any], [dict, dict2]): self.assertEqual(pprint.pformat(type(o)), exp) o2 = range(100) exp = '[%s]' % ',\n '.join(map(str, o2)) for type in cast(List[Any], [list, list2]): self.assertEqual(pprint.pformat(type(o2)), exp) o3 = tuple(range(100)) exp = '(%s)' % ',\n '.join(map(str, o3)) for type in cast(List[Any], [tuple, tuple2]): self.assertEqual(pprint.pformat(type(o3)), exp) # indent parameter o4 = range(100) exp = '[ %s]' % ',\n '.join(map(str, o4)) for type in cast(List[Any], [list, list2]): self.assertEqual(pprint.pformat(type(o4), indent=4), exp) def test_nested_indentations(self) -> None: o1 = list(range(10)) o2 = {'first':1, 'second':2, 'third':3} o = [o1, o2] expected = """\ [ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], { 'first': 1, 'second': 2, 'third': 3}]""" self.assertEqual(pprint.pformat(o, indent=4, width=42), expected) def test_sorted_dict(self) -> None: # Starting in Python 2.5, pprint sorts dict displays by key regardless # of how small the dictionary may be. # Before the change, on 32-bit Windows pformat() gave order # 'a', 'c', 'b' here, so this test failed. d = {'a': 1, 'b': 1, 'c': 1} self.assertEqual(pprint.pformat(d), "{'a': 1, 'b': 1, 'c': 1}") self.assertEqual(pprint.pformat([d, d]), "[{'a': 1, 'b': 1, 'c': 1}, {'a': 1, 'b': 1, 'c': 1}]") # The next one is kind of goofy. The sorted order depends on the # alphabetic order of type names: "int" < "str" < "tuple". Before # Python 2.5, this was in the test_same_as_repr() test. It's worth # keeping around for now because it's one of few tests of pprint # against a crazy mix of types. self.assertEqual(pprint.pformat({"xy\tab\n": (3,), 5: [[]], (): {}}), r"{5: [[]], 'xy\tab\n': (3,), (): {}}") def test_ordered_dict(self) -> None: words = 'the quick brown fox jumped over a lazy dog'.split() d = collections.OrderedDict(zip(words, itertools.count())) self.assertEqual(pprint.pformat(d), """\ {'the': 0, 'quick': 1, 'brown': 2, 'fox': 3, 'jumped': 4, 'over': 5, 'a': 6, 'lazy': 7, 'dog': 8}""") def test_subclassing(self) -> None: o = {'names with spaces': 'should be presented using repr()', 'others.should.not.be': 'like.this'} exp = """\ {'names with spaces': 'should be presented using repr()', others.should.not.be: like.this}""" self.assertEqual(DottedPrettyPrinter().pformat(o), exp) @test.support.cpython_only def test_set_reprs(self) -> None: # This test creates a complex arrangement of frozensets and # compares the pretty-printed repr against a string hard-coded in # the test. The hard-coded repr depends on the sort order of # frozensets. # # However, as the docs point out: "Since sets only define # partial ordering (subset relationships), the output of the # list.sort() method is undefined for lists of sets." # # In a nutshell, the test assumes frozenset({0}) will always # sort before frozenset({1}), but: # # >>> frozenset({0}) < frozenset({1}) # False # >>> frozenset({1}) < frozenset({0}) # False # # Consequently, this test is fragile and # implementation-dependent. Small changes to Python's sort # algorithm cause the test to fail when it should pass. self.assertEqual(pprint.pformat(set()), 'set()') self.assertEqual(pprint.pformat(set(range(3))), '{0, 1, 2}') self.assertEqual(pprint.pformat(frozenset()), 'frozenset()') self.assertEqual(pprint.pformat(frozenset(range(3))), 'frozenset({0, 1, 2})') cube_repr_tgt = """\ {frozenset(): frozenset({frozenset({2}), frozenset({0}), frozenset({1})}), frozenset({0}): frozenset({frozenset(), frozenset({0, 2}), frozenset({0, 1})}), frozenset({1}): frozenset({frozenset(), frozenset({1, 2}), frozenset({0, 1})}), frozenset({2}): frozenset({frozenset(), frozenset({1, 2}), frozenset({0, 2})}), frozenset({1, 2}): frozenset({frozenset({2}), frozenset({1}), frozenset({0, 1, 2})}), frozenset({0, 2}): frozenset({frozenset({2}), frozenset({0}), frozenset({0, 1, 2})}), frozenset({0, 1}): frozenset({frozenset({0}), frozenset({1}), frozenset({0, 1, 2})}), frozenset({0, 1, 2}): frozenset({frozenset({1, 2}), frozenset({0, 2}), frozenset({0, 1})})}""" cube = test.test_set.cube(3) self.assertEqual(pprint.pformat(cube), cube_repr_tgt) cubo_repr_tgt = """\ {frozenset({frozenset({0, 2}), frozenset({0})}): frozenset({frozenset({frozenset({0, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 1})}), frozenset({frozenset(), frozenset({0})}), frozenset({frozenset({2}), frozenset({0, 2})})}), frozenset({frozenset({0, 1}), frozenset({1})}): frozenset({frozenset({frozenset({0, 1}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 1})}), frozenset({frozenset({1}), frozenset({1, 2})}), frozenset({frozenset(), frozenset({1})})}), frozenset({frozenset({1, 2}), frozenset({1})}): frozenset({frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({2}), frozenset({1, 2})}), frozenset({frozenset(), frozenset({1})}), frozenset({frozenset({1}), frozenset({0, 1})})}), frozenset({frozenset({1, 2}), frozenset({2})}): frozenset({frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({1}), frozenset({1, 2})}), frozenset({frozenset({2}), frozenset({0, 2})}), frozenset({frozenset(), frozenset({2})})}), frozenset({frozenset(), frozenset({0})}): frozenset({frozenset({frozenset({0}), frozenset({0, 1})}), frozenset({frozenset({0}), frozenset({0, 2})}), frozenset({frozenset(), frozenset({1})}), frozenset({frozenset(), frozenset({2})})}), frozenset({frozenset(), frozenset({1})}): frozenset({frozenset({frozenset(), frozenset({0})}), frozenset({frozenset({1}), frozenset({1, 2})}), frozenset({frozenset(), frozenset({2})}), frozenset({frozenset({1}), frozenset({0, 1})})}), frozenset({frozenset({2}), frozenset()}): frozenset({frozenset({frozenset({2}), frozenset({1, 2})}), frozenset({frozenset(), frozenset({0})}), frozenset({frozenset(), frozenset({1})}), frozenset({frozenset({2}), frozenset({0, 2})})}), frozenset({frozenset({0, 1, 2}), frozenset({0, 1})}): frozenset({frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 1})}), frozenset({frozenset({1}), frozenset({0, 1})})}), frozenset({frozenset({0}), frozenset({0, 1})}): frozenset({frozenset({frozenset(), frozenset({0})}), frozenset({frozenset({0, 1}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 2})}), frozenset({frozenset({1}), frozenset({0, 1})})}), frozenset({frozenset({2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({0, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({2}), frozenset({1, 2})}), frozenset({frozenset({0}), frozenset({0, 2})}), frozenset({frozenset(), frozenset({2})})}), frozenset({frozenset({0, 1, 2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0, 1}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 2})}), frozenset({frozenset({2}), frozenset({0, 2})})}), frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}): frozenset({frozenset({frozenset({0, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0, 1}), frozenset({0, 1, 2})}), frozenset({frozenset({2}), frozenset({1, 2})}), frozenset({frozenset({1}), frozenset({1, 2})})})}""" cubo = test.test_set.linegraph(cube) self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt) def test_depth(self) -> None: nested_tuple = (1, (2, (3, (4, (5, 6))))) nested_dict = {1: {2: {3: {4: {5: {6: 6}}}}}} nested_list = [1, [2, [3, [4, [5, [6, []]]]]]] self.assertEqual(pprint.pformat(nested_tuple), repr(nested_tuple)) self.assertEqual(pprint.pformat(nested_dict), repr(nested_dict)) self.assertEqual(pprint.pformat(nested_list), repr(nested_list)) lv1_tuple = '(1, (...))' lv1_dict = '{1: {...}}' lv1_list = '[1, [...]]' self.assertEqual(pprint.pformat(nested_tuple, depth=1), lv1_tuple) self.assertEqual(pprint.pformat(nested_dict, depth=1), lv1_dict) self.assertEqual(pprint.pformat(nested_list, depth=1), lv1_list) def test_sort_unorderable_values(self) -> None: # Issue 3976: sorted pprints fail for unorderable values. n = 20 keys = [Unorderable() for i in range(n)] random.shuffle(keys) skeys = sorted(keys, key=id) clean = lambda s: s.replace(' ', '').replace('\n','') # type: Callable[[str], str] self.assertEqual(clean(pprint.pformat(set(keys))), '{' + ','.join(map(repr, skeys)) + '}') self.assertEqual(clean(pprint.pformat(frozenset(keys))), 'frozenset({' + ','.join(map(repr, skeys)) + '})') self.assertEqual(clean(pprint.pformat(dict.fromkeys(keys))), '{' + ','.join('%r:None' % k for k in skeys) + '}') class DottedPrettyPrinter(pprint.PrettyPrinter): def format(self, object: object, context: Dict[int, Any], maxlevels: int, level: int) -> Tuple[str, int, int]: if isinstance(object, str): if ' ' in object: return repr(object), 1, 0 else: return object, 0, 0 else: return pprint.PrettyPrinter.format( self, object, context, maxlevels, level) def test_main() -> None: test.support.run_unittest(QueryTestCase) if __name__ == "__main__": test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_random.py0000644€tŠÔÚ€2›s®0000005203213576752246027705 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 import unittest import random import time import pickle import warnings from math import log, exp, pi, fsum, sin from test import support from typing import Any, Dict, List, Callable, Generic, TypeVar, cast RT = TypeVar('RT', random.Random, random.SystemRandom) class TestBasicOps(unittest.TestCase, Generic[RT]): # Superclass with tests common to all generators. # Subclasses must arrange for self.gen to retrieve the Random instance # to be tested. gen = None # type: RT # Either Random or SystemRandom def randomlist(self, n: int) -> List[float]: """Helper function to make a list of random numbers""" return [self.gen.random() for i in range(n)] def test_autoseed(self) -> None: self.gen.seed() state1 = self.gen.getstate() time.sleep(0.1) self.gen.seed() # diffent seeds at different times state2 = self.gen.getstate() self.assertNotEqual(state1, state2) def test_saverestore(self) -> None: N = 1000 self.gen.seed() state = self.gen.getstate() randseq = self.randomlist(N) self.gen.setstate(state) # should regenerate the same sequence self.assertEqual(randseq, self.randomlist(N)) def test_seedargs(self) -> None: for arg in [None, 0, 0, 1, 1, -1, -1, 10**20, -(10**20), 3.14, complex(1., 2.), 'a', tuple('abc')]: self.gen.seed(arg) for arg in [list(range(3)), {'one': 1}]: self.assertRaises(TypeError, self.gen.seed, arg) self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4) self.assertRaises(TypeError, type(self.gen), []) # type: ignore # mypy issue 1846 def test_choice(self) -> None: choice = self.gen.choice with self.assertRaises(IndexError): choice([]) self.assertEqual(choice([50]), 50) self.assertIn(choice([25, 75]), [25, 75]) def test_sample(self) -> None: # For the entire allowable range of 0 <= k <= N, validate that # the sample is of the correct length and contains only unique items N = 100 population = range(N) for k in range(N+1): s = self.gen.sample(population, k) self.assertEqual(len(s), k) uniq = set(s) self.assertEqual(len(uniq), k) self.assertTrue(uniq <= set(population)) self.assertEqual(self.gen.sample([], 0), []) # test edge case N==k==0 def test_sample_distribution(self) -> None: # For the entire allowable range of 0 <= k <= N, validate that # sample generates all possible permutations n = 5 pop = range(n) trials = 10000 # large num prevents false negatives without slowing normal case def factorial(n: int) -> int: if n == 0: return 1 return n * factorial(n - 1) for k in range(n): expected = factorial(n) // factorial(n-k) perms = {} # type: Dict[tuple, object] for i in range(trials): perms[tuple(self.gen.sample(pop, k))] = None if len(perms) == expected: break else: self.fail() def test_sample_inputs(self) -> None: # SF bug #801342 -- population can be any iterable defining __len__() self.gen.sample(set(range(20)), 2) self.gen.sample(range(20), 2) self.gen.sample(range(20), 2) self.gen.sample(str('abcdefghijklmnopqrst'), 2) self.gen.sample(tuple('abcdefghijklmnopqrst'), 2) def test_sample_on_dicts(self) -> None: self.assertRaises(TypeError, self.gen.sample, dict.fromkeys('abcdef'), 2) def test_gauss(self) -> None: # Ensure that the seed() method initializes all the hidden state. In # particular, through 2.2.1 it failed to reset a piece of state used # by (and only by) the .gauss() method. for seed in 1, 12, 123, 1234, 12345, 123456, 654321: self.gen.seed(seed) x1 = self.gen.random() y1 = self.gen.gauss(0, 1) self.gen.seed(seed) x2 = self.gen.random() y2 = self.gen.gauss(0, 1) self.assertEqual(x1, x2) self.assertEqual(y1, y2) def test_pickling(self) -> None: state = pickle.dumps(self.gen) origseq = [self.gen.random() for i in range(10)] newgen = pickle.loads(state) restoredseq = [newgen.random() for i in range(10)] self.assertEqual(origseq, restoredseq) def test_bug_1727780(self) -> None: # verify that version-2-pickles can be loaded # fine, whether they are created on 32-bit or 64-bit # platforms, and that version-3-pickles load fine. files = [("randv2_32.pck", 780), ("randv2_64.pck", 866), ("randv3.pck", 343)] for file, value in files: f = open(support.findfile(file),"rb") r = pickle.load(f) f.close() self.assertEqual(int(r.random()*1000), value) def test_bug_9025(self) -> None: # Had problem with an uneven distribution in int(n*random()) # Verify the fix by checking that distributions fall within expectations. n = 100000 randrange = self.gen.randrange k = sum(randrange(6755399441055744) % 3 == 2 for i in range(n)) self.assertTrue(0.30 < k/n and k/n < .37, (k/n)) class SystemRandom_TestBasicOps(TestBasicOps[random.SystemRandom]): gen = random.SystemRandom() def test_autoseed(self) -> None: # Doesn't need to do anything except not fail self.gen.seed() def test_saverestore(self) -> None: self.assertRaises(NotImplementedError, self.gen.getstate) self.assertRaises(NotImplementedError, self.gen.setstate, None) def test_seedargs(self) -> None: # Doesn't need to do anything except not fail self.gen.seed(100) def test_gauss(self) -> None: self.gen.gauss_next = None self.gen.seed(100) self.assertEqual(self.gen.gauss_next, None) def test_pickling(self) -> None: self.assertRaises(NotImplementedError, pickle.dumps, self.gen) def test_53_bits_per_float(self) -> None: # This should pass whenever a C double has 53 bit precision. span = 2 ** 53 # type: int cum = 0 for i in range(100): cum |= int(self.gen.random() * span) self.assertEqual(cum, span-1) def test_bigrand(self) -> None: # The randrange routine should build-up the required number of bits # in stages so that all bit positions are active. span = 2 ** 500 # type: int cum = 0 for i in range(100): r = self.gen.randrange(span) self.assertTrue(0 <= r < span) cum |= r self.assertEqual(cum, span-1) def test_bigrand_ranges(self) -> None: for i in [40,80, 160, 200, 211, 250, 375, 512, 550]: start = self.gen.randrange(2 ** i) stop = self.gen.randrange(2 ** (i-2)) if stop <= start: return self.assertTrue(start <= self.gen.randrange(start, stop) < stop) def test_rangelimits(self) -> None: for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]: self.assertEqual(set(range(start,stop)), set([self.gen.randrange(start,stop) for i in range(100)])) def test_genrandbits(self) -> None: # Verify ranges for k in range(1, 1000): self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k) # Verify all bits active getbits = self.gen.getrandbits for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]: cum = 0 for i in range(100): cum |= getbits(span) self.assertEqual(cum, 2**span-1) # Verify argument checking self.assertRaises(TypeError, self.gen.getrandbits) self.assertRaises(TypeError, self.gen.getrandbits, 1, 2) self.assertRaises(ValueError, self.gen.getrandbits, 0) self.assertRaises(ValueError, self.gen.getrandbits, -1) self.assertRaises(TypeError, self.gen.getrandbits, 10.1) def test_randbelow_logic(self, _log: Callable[[float, float], float] = log, int: Callable[[float], int] = int) -> None: # check bitcount transition points: 2**i and 2**(i+1)-1 # show that: k = int(1.001 + _log(n, 2)) # is equal to or one greater than the number of bits in n for i in range(1, 1000): n = 1 << i # check an exact power of two numbits = i+1 k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) self.assertEqual(n, 2**(k-1)) n += n - 1 # check 1 below the next power of two k = int(1.00001 + _log(n, 2)) self.assertIn(k, [numbits, numbits+1]) self.assertTrue(2**k > n > 2**(k-2)) n -= n >> 15 # check a little farther below the next power of two k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) # note the stronger assertion self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion class MersenneTwister_TestBasicOps(TestBasicOps[random.Random]): gen = random.Random() def test_guaranteed_stable(self) -> None: # These sequences are guaranteed to stay the same across versions of python self.gen.seed(3456147, version=1) self.assertEqual([self.gen.random().hex() for i in range(4)], ['0x1.ac362300d90d2p-1', '0x1.9d16f74365005p-1', '0x1.1ebb4352e4c4dp-1', '0x1.1a7422abf9c11p-1']) self.gen.seed("the quick brown fox", version=2) self.assertEqual([self.gen.random().hex() for i in range(4)], ['0x1.1239ddfb11b7cp-3', '0x1.b3cbb5c51b120p-4', '0x1.8c4f55116b60fp-1', '0x1.63eb525174a27p-1']) def test_setstate_first_arg(self) -> None: self.assertRaises(ValueError, self.gen.setstate, (1, None, None)) def test_setstate_middle_arg(self) -> None: # Wrong type, s/b tuple self.assertRaises(TypeError, self.gen.setstate, (2, None, None)) # Wrong length, s/b 625 self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None)) # Wrong type, s/b tuple of 625 ints self.assertRaises(TypeError, self.gen.setstate, (2, tuple(['a',]*625), None)) # Last element s/b an int also self.assertRaises(TypeError, self.gen.setstate, (2, cast(Any, (0,))*624+('a',), None)) def test_referenceImplementation(self) -> None: # Compare the python implementation with results from the original # code. Create 2000 53-bit precision random floats. Compare only # the last ten entries to show that the independent implementations # are tracking. Here is the main() function needed to create the # list of expected random numbers: # void main(void){ # int i; # unsigned long init[4]={61731, 24903, 614, 42143}, length=4; # init_by_array(init, length); # for (i=0; i<2000; i++) { # printf("%.15f ", genrand_res53()); # if (i%5==4) printf("\n"); # } # } expected = [0.45839803073713259, 0.86057815201978782, 0.92848331726782152, 0.35932681119782461, 0.081823493762449573, 0.14332226470169329, 0.084297823823520024, 0.53814864671831453, 0.089215024911993401, 0.78486196105372907] self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96)) actual = self.randomlist(2000)[-10:] for a, e in zip(actual, expected): self.assertAlmostEqual(a,e,places=14) def test_strong_reference_implementation(self) -> None: # Like test_referenceImplementation, but checks for exact bit-level # equality. This should pass on any box where C double contains # at least 53 bits of precision (the underlying algorithm suffers # no rounding errors -- all results are exact). from math import ldexp expected = [0x0eab3258d2231f, 0x1b89db315277a5, 0x1db622a5518016, 0x0b7f9af0d575bf, 0x029e4c4db82240, 0x04961892f5d673, 0x02b291598e4589, 0x11388382c15694, 0x02dad977c9e1fe, 0x191d96d4d334c6] self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96)) actual = self.randomlist(2000)[-10:] for a, e in zip(actual, expected): self.assertEqual(int(ldexp(a, 53)), e) def test_long_seed(self) -> None: # This is most interesting to run in debug mode, just to make sure # nothing blows up. Under the covers, a dynamically resized array # is allocated, consuming space proportional to the number of bits # in the seed. Unfortunately, that's a quadratic-time algorithm, # so don't make this horribly big. seed = (1 << (10000 * 8)) - 1 # about 10K bytes self.gen.seed(seed) def test_53_bits_per_float(self) -> None: # This should pass whenever a C double has 53 bit precision. span = 2 ** 53 # type: int cum = 0 for i in range(100): cum |= int(self.gen.random() * span) self.assertEqual(cum, span-1) def test_bigrand(self) -> None: # The randrange routine should build-up the required number of bits # in stages so that all bit positions are active. span = 2 ** 500 # type: int cum = 0 for i in range(100): r = self.gen.randrange(span) self.assertTrue(0 <= r < span) cum |= r self.assertEqual(cum, span-1) def test_bigrand_ranges(self) -> None: for i in [40,80, 160, 200, 211, 250, 375, 512, 550]: start = self.gen.randrange(2 ** i) stop = self.gen.randrange(2 ** (i-2)) if stop <= start: return self.assertTrue(start <= self.gen.randrange(start, stop) < stop) def test_rangelimits(self) -> None: for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]: self.assertEqual(set(range(start,stop)), set([self.gen.randrange(start,stop) for i in range(100)])) def test_genrandbits(self) -> None: # Verify cross-platform repeatability self.gen.seed(1234567) self.assertEqual(self.gen.getrandbits(100), 97904845777343510404718956115) # Verify ranges for k in range(1, 1000): self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k) # Verify all bits active getbits = self.gen.getrandbits for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]: cum = 0 for i in range(100): cum |= getbits(span) self.assertEqual(cum, 2**span-1) # Verify argument checking self.assertRaises(TypeError, self.gen.getrandbits) self.assertRaises(TypeError, self.gen.getrandbits, 'a') self.assertRaises(TypeError, self.gen.getrandbits, 1, 2) self.assertRaises(ValueError, self.gen.getrandbits, 0) self.assertRaises(ValueError, self.gen.getrandbits, -1) def test_randbelow_logic(self, _log: Callable[[int, float], float] = log, int: Callable[[float], int] = int) -> None: # check bitcount transition points: 2**i and 2**(i+1)-1 # show that: k = int(1.001 + _log(n, 2)) # is equal to or one greater than the number of bits in n for i in range(1, 1000): n = 1 << i # check an exact power of two numbits = i+1 k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) self.assertEqual(n, 2**(k-1)) n += n - 1 # check 1 below the next power of two k = int(1.00001 + _log(n, 2)) self.assertIn(k, [numbits, numbits+1]) self.assertTrue(2**k > n > 2**(k-2)) n -= n >> 15 # check a little farther below the next power of two k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) # note the stronger assertion self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion def test_randrange_bug_1590891(self) -> None: start = 1000000000000 stop = -100000000000000000000 step = -200 x = self.gen.randrange(start, stop, step) self.assertTrue(stop < x <= start) self.assertEqual((x+stop)%step, 0) def gamma(z: float, sqrt2pi: float = (2.0*pi)**0.5) -> float: # Reflection to right half of complex plane if z < 0.5: return pi / sin(pi*z) / gamma(1.0-z) # Lanczos approximation with g=7 az = z + (7.0 - 0.5) return az ** (z-0.5) / exp(az) * sqrt2pi * fsum([ 0.9999999999995183, 676.5203681218835 / z, -1259.139216722289 / (z+1.0), 771.3234287757674 / (z+2.0), -176.6150291498386 / (z+3.0), 12.50734324009056 / (z+4.0), -0.1385710331296526 / (z+5.0), 0.9934937113930748e-05 / (z+6.0), 0.1659470187408462e-06 / (z+7.0), ]) class TestDistributions(unittest.TestCase): def test_zeroinputs(self) -> None: # Verify that distributions can handle a series of zero inputs' g = random.Random() x = [g.random() for i in range(50)] + [0.0]*5 def patch() -> None: setattr(g, 'random', x[:].pop) patch(); g.uniform(1.0,10.0) patch(); g.paretovariate(1.0) patch(); g.expovariate(1.0) patch(); g.weibullvariate(1.0, 1.0) patch(); g.normalvariate(0.0, 1.0) patch(); g.gauss(0.0, 1.0) patch(); g.lognormvariate(0.0, 1.0) patch(); g.vonmisesvariate(0.0, 1.0) patch(); g.gammavariate(0.01, 1.0) patch(); g.gammavariate(1.0, 1.0) patch(); g.gammavariate(200.0, 1.0) patch(); g.betavariate(3.0, 3.0) patch(); g.triangular(0.0, 1.0, 1.0/3.0) def test_avg_std(self) -> None: # Use integration to test distribution average and standard deviation. # Only works for distributions which do not consume variates in pairs g = random.Random() N = 5000 x = [i/float(N) for i in range(1,N)] variate = None # type: Any for variate, args, mu, sigmasqrd in [ (g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12), (g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0), (g.expovariate, (1.5,), 1/1.5, 1/1.5**2), (g.paretovariate, (5.0,), 5.0/(5.0-1), 5.0/((5.0-1)**2*(5.0-2))), (g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0), gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]: setattr(g, 'random', x[:].pop) y = [] # type: List[float] for i in range(len(x)): try: y.append(variate(*args)) except IndexError: pass s1 = s2 = 0.0 for e in y: s1 += e s2 += (e - mu) ** 2 N = len(y) self.assertAlmostEqual(s1/N, mu, places=2) self.assertAlmostEqual(s2/(N-1), sigmasqrd, places=2) class TestModule(unittest.TestCase): def testMagicConstants(self) -> None: self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141) self.assertAlmostEqual(random.TWOPI, 6.28318530718) self.assertAlmostEqual(random.LOG4, 1.38629436111989) self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627) def test__all__(self) -> None: # tests validity but not completeness of the __all__ list self.assertTrue(set(random.__all__) <= set(dir(random))) def test_random_subclass_with_kwargs(self) -> None: # SF bug #1486663 -- this used to erroneously raise a TypeError class Subclass(random.Random): def __init__(self, newarg: object = None) -> None: random.Random.__init__(self) Subclass(newarg=1) def test_main(verbose: bool = None) -> None: testclasses = [MersenneTwister_TestBasicOps, TestDistributions, TestModule] try: random.SystemRandom().random() except NotImplementedError: pass else: testclasses.append(SystemRandom_TestBasicOps) support.run_unittest(*testclasses) # verify reference counting import sys if verbose and hasattr(sys, "gettotalrefcount"): counts = [None] * 5 # type: List[int] for i in range(len(counts)): support.run_unittest(*testclasses) counts[i] = sys.gettotalrefcount() print(counts) if __name__ == "__main__": test_main(verbose=True) mypy-0.761/test-data/stdlib-samples/3.2/test/test_set.py0000644€tŠÔÚ€2›s®0000017611513576752246027231 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test import support import gc import weakref import operator import copy import pickle from random import randrange, shuffle import sys import warnings import collections from typing import Set, Any class PassThru(Exception): pass def check_pass_thru(): raise PassThru yield 1 class BadCmp: def __hash__(self): return 1 def __eq__(self, other): raise RuntimeError class ReprWrapper: 'Used to test self-referential repr() calls' def __repr__(self): return repr(self.value) #class HashCountingInt(int): # 'int-like object that counts the number of times __hash__ is called' # def __init__(self, *args): # self.hash_count = 0 # def __hash__(self): # self.hash_count += 1 # return int.__hash__(self) class TestJointOps(unittest.TestCase): # Tests common to both set and frozenset def setUp(self): self.word = word = 'simsalabim' self.otherword = 'madagascar' self.letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' self.s = self.thetype(word) self.d = dict.fromkeys(word) def test_new_or_init(self): self.assertRaises(TypeError, self.thetype, [], 2) self.assertRaises(TypeError, set().__init__, a=1) def test_uniquification(self): actual = sorted(self.s) expected = sorted(self.d) self.assertEqual(actual, expected) self.assertRaises(PassThru, self.thetype, check_pass_thru()) self.assertRaises(TypeError, self.thetype, [[]]) def test_len(self): self.assertEqual(len(self.s), len(self.d)) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) self.assertRaises(TypeError, self.s.__contains__, [[]]) s = self.thetype([frozenset(self.letters)]) self.assertIn(self.thetype(self.letters), s) def test_union(self): u = self.s.union(self.otherword) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.otherword) self.assertEqual(self.s, self.thetype(self.word)) self.assertEqual(type(u), self.basetype) self.assertRaises(PassThru, self.s.union, check_pass_thru()) self.assertRaises(TypeError, self.s.union, [[]]) for C in set, frozenset, dict.fromkeys, str, list, tuple: self.assertEqual(self.thetype('abcba').union(C('cdc')), set('abcd')) self.assertEqual(self.thetype('abcba').union(C('efgfe')), set('abcefg')) self.assertEqual(self.thetype('abcba').union(C('ccb')), set('abc')) self.assertEqual(self.thetype('abcba').union(C('ef')), set('abcef')) self.assertEqual(self.thetype('abcba').union(C('ef'), C('fg')), set('abcefg')) # Issue #6573 x = self.thetype() self.assertEqual(x.union(set([1]), x, set([2])), self.thetype([1, 2])) def test_or(self): i = self.s.union(self.otherword) self.assertEqual(self.s | set(self.otherword), i) self.assertEqual(self.s | frozenset(self.otherword), i) try: self.s | self.otherword except TypeError: pass else: self.fail("s|t did not screen-out general iterables") def test_intersection(self): i = self.s.intersection(self.otherword) for c in self.letters: self.assertEqual(c in i, c in self.d and c in self.otherword) self.assertEqual(self.s, self.thetype(self.word)) self.assertEqual(type(i), self.basetype) self.assertRaises(PassThru, self.s.intersection, check_pass_thru()) for C in set, frozenset, dict.fromkeys, str, list, tuple: self.assertEqual(self.thetype('abcba').intersection(C('cdc')), set('cc')) self.assertEqual(self.thetype('abcba').intersection(C('efgfe')), set('')) self.assertEqual(self.thetype('abcba').intersection(C('ccb')), set('bc')) self.assertEqual(self.thetype('abcba').intersection(C('ef')), set('')) self.assertEqual(self.thetype('abcba').intersection(C('cbcf'), C('bag')), set('b')) s = self.thetype('abcba') z = s.intersection() if self.thetype == frozenset(): self.assertEqual(id(s), id(z)) else: self.assertNotEqual(id(s), id(z)) def test_isdisjoint(self): def f(s1, s2): 'Pure python equivalent of isdisjoint()' return not set(s1).intersection(s2) for larg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef': s1 = self.thetype(larg) for rarg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef': for C in set, frozenset, dict.fromkeys, str, list, tuple: s2 = C(rarg) actual = s1.isdisjoint(s2) expected = f(s1, s2) self.assertEqual(actual, expected) self.assertTrue(actual is True or actual is False) def test_and(self): i = self.s.intersection(self.otherword) self.assertEqual(self.s & set(self.otherword), i) self.assertEqual(self.s & frozenset(self.otherword), i) try: self.s & self.otherword except TypeError: pass else: self.fail("s&t did not screen-out general iterables") def test_difference(self): i = self.s.difference(self.otherword) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.otherword) self.assertEqual(self.s, self.thetype(self.word)) self.assertEqual(type(i), self.basetype) self.assertRaises(PassThru, self.s.difference, check_pass_thru()) self.assertRaises(TypeError, self.s.difference, [[]]) for C in set, frozenset, dict.fromkeys, str, list, tuple: self.assertEqual(self.thetype('abcba').difference(C('cdc')), set('ab')) self.assertEqual(self.thetype('abcba').difference(C('efgfe')), set('abc')) self.assertEqual(self.thetype('abcba').difference(C('ccb')), set('a')) self.assertEqual(self.thetype('abcba').difference(C('ef')), set('abc')) self.assertEqual(self.thetype('abcba').difference(), set('abc')) self.assertEqual(self.thetype('abcba').difference(C('a'), C('b')), set('c')) def test_sub(self): i = self.s.difference(self.otherword) self.assertEqual(self.s - set(self.otherword), i) self.assertEqual(self.s - frozenset(self.otherword), i) try: self.s - self.otherword except TypeError: pass else: self.fail("s-t did not screen-out general iterables") def test_symmetric_difference(self): i = self.s.symmetric_difference(self.otherword) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.otherword)) self.assertEqual(self.s, self.thetype(self.word)) self.assertEqual(type(i), self.basetype) self.assertRaises(PassThru, self.s.symmetric_difference, check_pass_thru()) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) for C in set, frozenset, dict.fromkeys, str, list, tuple: self.assertEqual(self.thetype('abcba').symmetric_difference(C('cdc')), set('abd')) self.assertEqual(self.thetype('abcba').symmetric_difference(C('efgfe')), set('abcefg')) self.assertEqual(self.thetype('abcba').symmetric_difference(C('ccb')), set('a')) self.assertEqual(self.thetype('abcba').symmetric_difference(C('ef')), set('abcef')) def test_xor(self): i = self.s.symmetric_difference(self.otherword) self.assertEqual(self.s ^ set(self.otherword), i) self.assertEqual(self.s ^ frozenset(self.otherword), i) try: self.s ^ self.otherword except TypeError: pass else: self.fail("s^t did not screen-out general iterables") def test_equality(self): self.assertEqual(self.s, set(self.word)) self.assertEqual(self.s, frozenset(self.word)) self.assertEqual(self.s == self.word, False) self.assertNotEqual(self.s, set(self.otherword)) self.assertNotEqual(self.s, frozenset(self.otherword)) self.assertEqual(self.s != self.word, True) def test_setOfFrozensets(self): t = map(frozenset, ['abcdef', 'bcd', 'bdcb', 'fed', 'fedccba']) s = self.thetype(t) self.assertEqual(len(s), 3) def test_sub_and_super(self): p, q, r = map(self.thetype, ['ab', 'abcde', 'def']) self.assertTrue(p < q) self.assertTrue(p <= q) self.assertTrue(q <= q) self.assertTrue(q > p) self.assertTrue(q >= p) self.assertFalse(q < r) self.assertFalse(q <= r) self.assertFalse(q > r) self.assertFalse(q >= r) self.assertTrue(set('a').issubset('abc')) self.assertTrue(set('abc').issuperset('a')) self.assertFalse(set('a').issubset('cbs')) self.assertFalse(set('cbs').issuperset('a')) def test_pickling(self): for i in range(pickle.HIGHEST_PROTOCOL + 1): p = pickle.dumps(self.s, i) dup = pickle.loads(p) self.assertEqual(self.s, dup, "%s != %s" % (self.s, dup)) if type(self.s) not in (set, frozenset): self.s.x = 10 p = pickle.dumps(self.s) dup = pickle.loads(p) self.assertEqual(self.s.x, dup.x) def test_deepcopy(self): class Tracer: def __init__(self, value): self.value = value def __hash__(self): return self.value def __deepcopy__(self, memo=None): return Tracer(self.value + 1) t = Tracer(10) s = self.thetype([t]) dup = copy.deepcopy(s) self.assertNotEqual(id(s), id(dup)) for elem in dup: newt = elem self.assertNotEqual(id(t), id(newt)) self.assertEqual(t.value + 1, newt.value) def test_gc(self): # Create a nest of cycles to exercise overall ref count check class A: pass s = set(A() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = set([elem]) def test_subclass_with_custom_hash(self): raise NotImplementedError() # runtime computed base class below # Bug #1257731 class H: # (self.thetype): def __hash__(self): return int(id(self) & 0x7fffffff) s=H() f=set() f.add(s) self.assertIn(s, f) f.remove(s) f.add(s) f.discard(s) def test_badcmp(self): s = self.thetype([BadCmp()]) # Detect comparison errors during insertion and lookup self.assertRaises(RuntimeError, self.thetype, [BadCmp(), BadCmp()]) self.assertRaises(RuntimeError, s.__contains__, BadCmp()) # Detect errors during mutating operations if hasattr(s, 'add'): self.assertRaises(RuntimeError, s.add, BadCmp()) self.assertRaises(RuntimeError, s.discard, BadCmp()) self.assertRaises(RuntimeError, s.remove, BadCmp()) def test_cyclical_repr(self): w = ReprWrapper() s = self.thetype([w]) w.value = s if self.thetype == set: self.assertEqual(repr(s), '{set(...)}') else: name = repr(s).partition('(')[0] # strip class name self.assertEqual(repr(s), '%s({%s(...)})' % (name, name)) def test_cyclical_print(self): w = ReprWrapper() s = self.thetype([w]) w.value = s fo = open(support.TESTFN, "w") try: fo.write(str(s)) fo.close() fo = open(support.TESTFN, "r") self.assertEqual(fo.read(), repr(s)) finally: fo.close() support.unlink(support.TESTFN) def test_do_not_rehash_dict_keys(self): raise NotImplementedError() # cannot subclass int n = 10 d = None # dict.fromkeys(map(HashCountingInt, range(n))) self.assertEqual(sum(elem.hash_count for elem in d), n) s = self.thetype(d) self.assertEqual(sum(elem.hash_count for elem in d), n) s.difference(d) self.assertEqual(sum(elem.hash_count for elem in d), n) if hasattr(s, 'symmetric_difference_update'): s.symmetric_difference_update(d) self.assertEqual(sum(elem.hash_count for elem in d), n) d2 = dict.fromkeys(set(d)) self.assertEqual(sum(elem.hash_count for elem in d), n) d3 = dict.fromkeys(frozenset(d)) self.assertEqual(sum(elem.hash_count for elem in d), n) d3 = dict.fromkeys(frozenset(d), 123) self.assertEqual(sum(elem.hash_count for elem in d), n) self.assertEqual(d3, dict.fromkeys(d, 123)) def test_container_iterator(self): # Bug #3680: tp_traverse was not implemented for set iterator object class C(object): pass obj = C() ref = weakref.ref(obj) container = set([obj, 1]) obj.x = iter(container) obj = None container = None gc.collect() self.assertTrue(ref() is None, "Cycle was not collected") class TestSet(TestJointOps): thetype = set basetype = set def test_init(self): s = self.thetype() s.__init__(self.word) self.assertEqual(s, set(self.word)) s.__init__(self.otherword) self.assertEqual(s, set(self.otherword)) self.assertRaises(TypeError, s.__init__, s, 2); self.assertRaises(TypeError, s.__init__, 1) def test_constructor_identity(self): s = self.thetype(range(3)) t = self.thetype(s) self.assertNotEqual(id(s), id(t)) def test_set_literal(self): raise NotImplementedError() #s = set([1,2,3]) #t = {1,2,3} #self.assertEqual(s, t) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, set()) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) self.assertEqual(type(dup), self.basetype) def test_add(self): self.s.add('Q') self.assertIn('Q', self.s) dup = self.s.copy() self.s.add('Q') self.assertEqual(self.s, dup) self.assertRaises(TypeError, self.s.add, []) def test_remove(self): self.s.remove('a') self.assertNotIn('a', self.s) self.assertRaises(KeyError, self.s.remove, 'Q') self.assertRaises(TypeError, self.s.remove, []) s = self.thetype([frozenset(self.word)]) self.assertIn(self.thetype(self.word), s) s.remove(self.thetype(self.word)) self.assertNotIn(self.thetype(self.word), s) self.assertRaises(KeyError, self.s.remove, self.thetype(self.word)) def test_remove_keyerror_unpacking(self): # bug: www.python.org/sf/1576657 for v1 in ['Q', (1,)]: try: self.s.remove(v1) except KeyError as e: v2 = e.args[0] self.assertEqual(v1, v2) else: self.fail() def test_remove_keyerror_set(self): key = self.thetype([3, 4]) try: self.s.remove(key) except KeyError as e: self.assertTrue(e.args[0] is key, "KeyError should be {0}, not {1}".format(key, e.args[0])) else: self.fail() def test_discard(self): self.s.discard('a') self.assertNotIn('a', self.s) self.s.discard('Q') self.assertRaises(TypeError, self.s.discard, []) s = self.thetype([frozenset(self.word)]) self.assertIn(self.thetype(self.word), s) s.discard(self.thetype(self.word)) self.assertNotIn(self.thetype(self.word), s) s.discard(self.thetype(self.word)) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assertNotIn(elem, self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.otherword) self.assertEqual(retval, None) for c in (self.word + self.otherword): self.assertIn(c, self.s) self.assertRaises(PassThru, self.s.update, check_pass_thru()) self.assertRaises(TypeError, self.s.update, [[]]) for p, q in (('cdc', 'abcd'), ('efgfe', 'abcefg'), ('ccb', 'abc'), ('ef', 'abcef')): for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.update(C(p)), None) self.assertEqual(s, set(q)) for p in ('cdc', 'efgfe', 'ccb', 'ef', 'abcda'): q = 'ahi' for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.update(C(p), C(q)), None) self.assertEqual(s, set(s) | set(p) | set(q)) def test_ior(self): self.s |= set(self.otherword) for c in (self.word + self.otherword): self.assertIn(c, self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.otherword) self.assertEqual(retval, None) for c in (self.word + self.otherword): if c in self.otherword and c in self.word: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(PassThru, self.s.intersection_update, check_pass_thru()) self.assertRaises(TypeError, self.s.intersection_update, [[]]) for p, q in (('cdc', 'c'), ('efgfe', ''), ('ccb', 'bc'), ('ef', '')): for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.intersection_update(C(p)), None) self.assertEqual(s, set(q)) ss = 'abcba' s = self.thetype(ss) t = 'cbc' self.assertEqual(s.intersection_update(C(p), C(t)), None) self.assertEqual(s, set('abcba')&set(p)&set(t)) def test_iand(self): self.s &= set(self.otherword) for c in (self.word + self.otherword): if c in self.otherword and c in self.word: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_difference_update(self): retval = self.s.difference_update(self.otherword) self.assertEqual(retval, None) for c in (self.word + self.otherword): if c in self.word and c not in self.otherword: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(PassThru, self.s.difference_update, check_pass_thru()) self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')): for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.difference_update(C(p)), None) self.assertEqual(s, set(q)) s = self.thetype('abcdefghih') s.difference_update() self.assertEqual(s, self.thetype('abcdefghih')) s = self.thetype('abcdefghih') s.difference_update(C('aba')) self.assertEqual(s, self.thetype('cdefghih')) s = self.thetype('abcdefghih') s.difference_update(C('cdc'), C('aba')) self.assertEqual(s, self.thetype('efghih')) def test_isub(self): self.s -= set(self.otherword) for c in (self.word + self.otherword): if c in self.word and c not in self.otherword: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.otherword) self.assertEqual(retval, None) for c in (self.word + self.otherword): if (c in self.word) ^ (c in self.otherword): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(PassThru, self.s.symmetric_difference_update, check_pass_thru()) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) for p, q in (('cdc', 'abd'), ('efgfe', 'abcefg'), ('ccb', 'a'), ('ef', 'abcef')): for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.symmetric_difference_update(C(p)), None) self.assertEqual(s, set(q)) def test_ixor(self): self.s ^= set(self.otherword) for c in (self.word + self.otherword): if (c in self.word) ^ (c in self.otherword): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, self.thetype()) t = self.s.copy() t ^= t self.assertEqual(t, self.thetype()) def test_weakref(self): s = self.thetype('gallahad') p = weakref.proxy(s) self.assertEqual(str(p), str(s)) s = None self.assertRaises(ReferenceError, str, p) def test_rich_compare(self): class TestRichSetCompare: def __gt__(self, some_set): self.gt_called = True return False def __lt__(self, some_set): self.lt_called = True return False def __ge__(self, some_set): self.ge_called = True return False def __le__(self, some_set): self.le_called = True return False # This first tries the builtin rich set comparison, which doesn't know # how to handle the custom object. Upon returning NotImplemented, the # corresponding comparison on the right object is invoked. myset = {1, 2, 3} myobj = TestRichSetCompare() myset < myobj self.assertTrue(myobj.gt_called) myobj = TestRichSetCompare() myset > myobj self.assertTrue(myobj.lt_called) myobj = TestRichSetCompare() myset <= myobj self.assertTrue(myobj.ge_called) myobj = TestRichSetCompare() myset >= myobj self.assertTrue(myobj.le_called) # C API test only available in a debug build if hasattr(set, "test_c_api"): def test_c_api(self): self.assertEqual(set().test_c_api(), True) class SetSubclass(set): pass class TestSetSubclass(TestSet): thetype = SetSubclass basetype = set class SetSubclassWithKeywordArgs(set): def __init__(self, iterable=[], newarg=None): set.__init__(self, iterable) class TestSetSubclassWithKeywordArgs(TestSet): def test_keywords_in_subclass(self): 'SF bug #1486663 -- this used to erroneously raise a TypeError' SetSubclassWithKeywordArgs(newarg=1) class TestFrozenSet(TestJointOps): thetype = frozenset basetype = frozenset def test_init(self): s = self.thetype(self.word) s.__init__(self.otherword) self.assertEqual(s, set(self.word)) def test_singleton_empty_frozenset(self): f = frozenset() efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''), frozenset(), frozenset([]), frozenset(()), frozenset(''), frozenset(range(0)), frozenset(frozenset()), frozenset(f), f] # All of the empty frozensets should have just one id() self.assertEqual(len(set(map(id, efs))), 1) def test_constructor_identity(self): s = self.thetype(range(3)) t = self.thetype(s) self.assertEqual(id(s), id(t)) def test_hash(self): self.assertEqual(hash(self.thetype('abcdeb')), hash(self.thetype('ebecda'))) # make sure that all permutations give the same hash value n = 100 seq = [randrange(n) for i in range(n)] results = set() for i in range(200): shuffle(seq) results.add(hash(self.thetype(seq))) self.assertEqual(len(results), 1) def test_copy(self): dup = self.s.copy() self.assertEqual(id(self.s), id(dup)) def test_frozen_as_dictkey(self): seq = list(range(10)) + list('abcdefg') + ['apple'] key1 = self.thetype(seq) key2 = self.thetype(reversed(seq)) self.assertEqual(key1, key2) self.assertNotEqual(id(key1), id(key2)) d = {} d[key1] = 42 self.assertEqual(d[key2], 42) def test_hash_caching(self): f = self.thetype('abcdcda') self.assertEqual(hash(f), hash(f)) def test_hash_effectiveness(self): n = 13 hashvalues = set() addhashvalue = hashvalues.add elemmasks = [(i+1, 1<=": "issuperset", } reverse = {"==": "==", "!=": "!=", "<": ">", ">": "<", "<=": ">=", ">=": "<=", } def test_issubset(self): raise NotImplementedError() # eval not supported below x = self.left y = self.right for case in "!=", "==", "<", "<=", ">", ">=": expected = case in self.cases # Test the binary infix spelling. result = None ## eval("x" + case + "y", locals()) self.assertEqual(result, expected) # Test the "friendly" method-name spelling, if one exists. if case in TestSubsets.case2method: method = getattr(x, TestSubsets.case2method[case]) result = method(y) self.assertEqual(result, expected) # Now do the same for the operands reversed. rcase = TestSubsets.reverse[case] result = None ## eval("y" + rcase + "x", locals()) self.assertEqual(result, expected) if rcase in TestSubsets.case2method: method = getattr(y, TestSubsets.case2method[rcase]) result = method(x) self.assertEqual(result, expected) #------------------------------------------------------------------------------ class TestSubsetEqualEmpty(TestSubsets): left = set() # type: Any right = set() # type: Any name = "both empty" cases = "==", "<=", ">=" #------------------------------------------------------------------------------ class TestSubsetEqualNonEmpty(TestSubsets): left = set([1, 2]) right = set([1, 2]) name = "equal pair" cases = "==", "<=", ">=" #------------------------------------------------------------------------------ class TestSubsetEmptyNonEmpty(TestSubsets): left = set() # type: Any right = set([1, 2]) name = "one empty, one non-empty" cases = "!=", "<", "<=" #------------------------------------------------------------------------------ class TestSubsetPartial(TestSubsets): left = set([1]) right = set([1, 2]) name = "one a non-empty proper subset of other" cases = "!=", "<", "<=" #------------------------------------------------------------------------------ class TestSubsetNonOverlap(TestSubsets): left = set([1]) right = set([2]) name = "neither empty, neither contains" cases = "!=" #============================================================================== class TestOnlySetsInBinaryOps(unittest.TestCase): def test_eq_ne(self): # Unlike the others, this is testing that == and != *are* allowed. self.assertEqual(self.other == self.set, False) self.assertEqual(self.set == self.other, False) self.assertEqual(self.other != self.set, True) self.assertEqual(self.set != self.other, True) def test_ge_gt_le_lt(self): self.assertRaises(TypeError, lambda: self.set < self.other) self.assertRaises(TypeError, lambda: self.set <= self.other) self.assertRaises(TypeError, lambda: self.set > self.other) self.assertRaises(TypeError, lambda: self.set >= self.other) self.assertRaises(TypeError, lambda: self.other < self.set) self.assertRaises(TypeError, lambda: self.other <= self.set) self.assertRaises(TypeError, lambda: self.other > self.set) self.assertRaises(TypeError, lambda: self.other >= self.set) def test_update_operator(self): try: self.set |= self.other except TypeError: pass else: self.fail("expected TypeError") def test_update(self): if self.otherIsIterable: self.set.update(self.other) else: self.assertRaises(TypeError, self.set.update, self.other) def test_union(self): self.assertRaises(TypeError, lambda: self.set | self.other) self.assertRaises(TypeError, lambda: self.other | self.set) if self.otherIsIterable: self.set.union(self.other) else: self.assertRaises(TypeError, self.set.union, self.other) def test_intersection_update_operator(self): try: self.set &= self.other except TypeError: pass else: self.fail("expected TypeError") def test_intersection_update(self): if self.otherIsIterable: self.set.intersection_update(self.other) else: self.assertRaises(TypeError, self.set.intersection_update, self.other) def test_intersection(self): self.assertRaises(TypeError, lambda: self.set & self.other) self.assertRaises(TypeError, lambda: self.other & self.set) if self.otherIsIterable: self.set.intersection(self.other) else: self.assertRaises(TypeError, self.set.intersection, self.other) def test_sym_difference_update_operator(self): try: self.set ^= self.other except TypeError: pass else: self.fail("expected TypeError") def test_sym_difference_update(self): if self.otherIsIterable: self.set.symmetric_difference_update(self.other) else: self.assertRaises(TypeError, self.set.symmetric_difference_update, self.other) def test_sym_difference(self): self.assertRaises(TypeError, lambda: self.set ^ self.other) self.assertRaises(TypeError, lambda: self.other ^ self.set) if self.otherIsIterable: self.set.symmetric_difference(self.other) else: self.assertRaises(TypeError, self.set.symmetric_difference, self.other) def test_difference_update_operator(self): try: self.set -= self.other except TypeError: pass else: self.fail("expected TypeError") def test_difference_update(self): if self.otherIsIterable: self.set.difference_update(self.other) else: self.assertRaises(TypeError, self.set.difference_update, self.other) def test_difference(self): self.assertRaises(TypeError, lambda: self.set - self.other) self.assertRaises(TypeError, lambda: self.other - self.set) if self.otherIsIterable: self.set.difference(self.other) else: self.assertRaises(TypeError, self.set.difference, self.other) #------------------------------------------------------------------------------ class TestOnlySetsNumeric(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = 19 self.otherIsIterable = False #------------------------------------------------------------------------------ class TestOnlySetsDict(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = {1:2, 3:4} self.otherIsIterable = True #------------------------------------------------------------------------------ class TestOnlySetsOperator(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = operator.add self.otherIsIterable = False #------------------------------------------------------------------------------ class TestOnlySetsTuple(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = (2, 4, 6) self.otherIsIterable = True #------------------------------------------------------------------------------ class TestOnlySetsString(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = 'abc' self.otherIsIterable = True #------------------------------------------------------------------------------ class TestOnlySetsGenerator(TestOnlySetsInBinaryOps): def setUp(self): def gen(): for i in range(0, 10, 2): yield i self.set = set((1, 2, 3)) self.other = gen() self.otherIsIterable = True #============================================================================== class TestCopying(unittest.TestCase): def test_copy(self): dup = self.set.copy() dup_list = sorted(dup, key=repr) set_list = sorted(self.set, key=repr) self.assertEqual(len(dup_list), len(set_list)) for i in range(len(dup_list)): self.assertTrue(dup_list[i] is set_list[i]) def test_deep_copy(self): dup = copy.deepcopy(self.set) ##print type(dup), repr(dup) dup_list = sorted(dup, key=repr) set_list = sorted(self.set, key=repr) self.assertEqual(len(dup_list), len(set_list)) for i in range(len(dup_list)): self.assertEqual(dup_list[i], set_list[i]) #------------------------------------------------------------------------------ class TestCopyingEmpty(TestCopying): def setUp(self): self.set = set() #------------------------------------------------------------------------------ class TestCopyingSingleton(TestCopying): def setUp(self): self.set = set(["hello"]) #------------------------------------------------------------------------------ class TestCopyingTriple(TestCopying): def setUp(self): self.set = set(["zero", 0, None]) #------------------------------------------------------------------------------ class TestCopyingTuple(TestCopying): def setUp(self): self.set = set([(1, 2)]) #------------------------------------------------------------------------------ class TestCopyingNested(TestCopying): def setUp(self): self.set = set([((1, 2), (3, 4))]) #============================================================================== class TestIdentities(unittest.TestCase): def setUp(self): self.a = set('abracadabra') self.b = set('alacazam') def test_binopsVsSubsets(self): a, b = self.a, self.b self.assertTrue(a - b < a) self.assertTrue(b - a < b) self.assertTrue(a & b < a) self.assertTrue(a & b < b) self.assertTrue(a | b > a) self.assertTrue(a | b > b) self.assertTrue(a ^ b < a | b) def test_commutativity(self): a, b = self.a, self.b self.assertEqual(a&b, b&a) self.assertEqual(a|b, b|a) self.assertEqual(a^b, b^a) if a != b: self.assertNotEqual(a-b, b-a) def test_summations(self): # check that sums of parts equal the whole a, b = self.a, self.b self.assertEqual((a-b)|(a&b)|(b-a), a|b) self.assertEqual((a&b)|(a^b), a|b) self.assertEqual(a|(b-a), a|b) self.assertEqual((a-b)|b, a|b) self.assertEqual((a-b)|(a&b), a) self.assertEqual((b-a)|(a&b), b) self.assertEqual((a-b)|(b-a), a^b) def test_exclusion(self): # check that inverse operations show non-overlap a, b, zero = self.a, self.b, set() self.assertEqual((a-b)&b, zero) self.assertEqual((b-a)&a, zero) self.assertEqual((a&b)&(a^b), zero) # Tests derived from test_itertools.py ======================================= def R(seqn): 'Regular generator' for i in seqn: yield i class G: 'Sequence using __getitem__' def __init__(self, seqn): self.seqn = seqn def __getitem__(self, i): return self.seqn[i] class I: 'Sequence using iterator protocol' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self def __next__(self): if self.i >= len(self.seqn): raise StopIteration v = self.seqn[self.i] self.i += 1 return v class Ig: 'Sequence using iterator protocol defined with a generator' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): for val in self.seqn: yield val class X: 'Missing __getitem__ and __iter__' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __next__(self): if self.i >= len(self.seqn): raise StopIteration v = self.seqn[self.i] self.i += 1 return v class N: 'Iterator missing __next__()' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self class E: 'Test propagation of exceptions' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self def __next__(self): 3 // 0 class S: 'Test immediate stop' def __init__(self, seqn): pass def __iter__(self): return self def __next__(self): raise StopIteration from itertools import chain def L(seqn): 'Test multiple tiers of iterators' return chain(map(lambda x:x, R(Ig(G(seqn))))) class TestVariousIteratorArgs(unittest.TestCase): def test_constructor(self): for cons in (set, frozenset): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(sorted(cons(g(s)), key=repr), sorted(g(s), key=repr)) self.assertRaises(TypeError, cons , X(s)) self.assertRaises(TypeError, cons , N(s)) self.assertRaises(ZeroDivisionError, cons , E(s)) def test_inline_methods(self): s = set('november') for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'): for meth in (s.union, s.intersection, s.difference, s.symmetric_difference, s.isdisjoint): for g in (G, I, Ig, L, R): expected = meth(data) actual = meth(G(data)) if isinstance(expected, bool): self.assertEqual(actual, expected) else: self.assertEqual(sorted(actual, key=repr), sorted(expected, key=repr)) self.assertRaises(TypeError, meth, X(s)) self.assertRaises(TypeError, meth, N(s)) self.assertRaises(ZeroDivisionError, meth, E(s)) def test_inplace_methods(self): for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'): for methname in ('update', 'intersection_update', 'difference_update', 'symmetric_difference_update'): for g in (G, I, Ig, S, L, R): s = set('january') t = s.copy() getattr(s, methname)(list(g(data))) getattr(t, methname)(g(data)) self.assertEqual(sorted(s, key=repr), sorted(t, key=repr)) self.assertRaises(TypeError, getattr(set('january'), methname), X(data)) self.assertRaises(TypeError, getattr(set('january'), methname), N(data)) self.assertRaises(ZeroDivisionError, getattr(set('january'), methname), E(data)) be_bad = set2 = dict2 = None # type: Any class bad_eq: def __eq__(self, other): if be_bad: set2.clear() raise ZeroDivisionError return self is other def __hash__(self): return 0 class bad_dict_clear: def __eq__(self, other): if be_bad: dict2.clear() return self is other def __hash__(self): return 0 class TestWeirdBugs(unittest.TestCase): def test_8420_set_merge(self): # This used to segfault global be_bad, set2, dict2 be_bad = False set1 = {bad_eq()} set2 = {bad_eq() for i in range(75)} be_bad = True self.assertRaises(ZeroDivisionError, set1.update, set2) be_bad = False set1 = {bad_dict_clear()} dict2 = {bad_dict_clear(): None} be_bad = True set1.symmetric_difference_update(dict2) # Application tests (based on David Eppstein's graph recipes ==================================== def powerset(U): """Generates all subsets of a set or sequence U.""" U = iter(U) try: x = frozenset([next(U)]) for S in powerset(U): yield S yield S | x except StopIteration: yield frozenset() def cube(n): """Graph of n-dimensional hypercube.""" singletons = [frozenset([x]) for x in range(n)] return dict([(x, frozenset([x^s for s in singletons])) for x in powerset(range(n))]) def linegraph(G): """Graph, the vertices of which are edges of G, with two vertices being adjacent iff the corresponding edges share a vertex.""" L = {} for x in G: for y in G[x]: nx = [frozenset([x,z]) for z in G[x] if z != y] ny = [frozenset([y,z]) for z in G[y] if z != x] L[frozenset([x,y])] = frozenset(nx+ny) return L def faces(G): 'Return a set of faces in G. Where a face is a set of vertices on that face' # currently limited to triangles,squares, and pentagons f = set() for v1, edges in G.items(): for v2 in edges: for v3 in G[v2]: if v1 == v3: continue if v1 in G[v3]: f.add(frozenset([v1, v2, v3])) else: for v4 in G[v3]: if v4 == v2: continue if v1 in G[v4]: f.add(frozenset([v1, v2, v3, v4])) else: for v5 in G[v4]: if v5 == v3 or v5 == v2: continue if v1 in G[v5]: f.add(frozenset([v1, v2, v3, v4, v5])) return f class TestGraphs(unittest.TestCase): def test_cube(self): g = cube(3) # vert --> {v1, v2, v3} vertices1 = set(g) self.assertEqual(len(vertices1), 8) # eight vertices for edge in g.values(): self.assertEqual(len(edge), 3) # each vertex connects to three edges vertices2 = set() for edges in g.values(): for v in edges: vertices2.add(v) self.assertEqual(vertices1, vertices2) # edge vertices in original set cubefaces = faces(g) self.assertEqual(len(cubefaces), 6) # six faces for face in cubefaces: self.assertEqual(len(face), 4) # each face is a square def test_cuboctahedron(self): # http://en.wikipedia.org/wiki/Cuboctahedron # 8 triangular faces and 6 square faces # 12 indentical vertices each connecting a triangle and square g = cube(3) cuboctahedron = linegraph(g) # V( --> {V1, V2, V3, V4} self.assertEqual(len(cuboctahedron), 12)# twelve vertices vertices = set(cuboctahedron) for edges in cuboctahedron.values(): self.assertEqual(len(edges), 4) # each vertex connects to four other vertices othervertices = set(edge for edges in cuboctahedron.values() for edge in edges) self.assertEqual(vertices, othervertices) # edge vertices in original set cubofaces = faces(cuboctahedron) facesizes = collections.defaultdict(int) for face in cubofaces: facesizes[len(face)] += 1 self.assertEqual(facesizes[3], 8) # eight triangular faces self.assertEqual(facesizes[4], 6) # six square faces for vertex in cuboctahedron: edge = vertex # Cuboctahedron vertices are edges in Cube self.assertEqual(len(edge), 2) # Two cube vertices define an edge for cubevert in edge: self.assertIn(cubevert, g) #============================================================================== def test_main(verbose=None): test_classes = ( TestSet, TestSetSubclass, TestSetSubclassWithKeywordArgs, TestFrozenSet, TestFrozenSetSubclass, TestSetOfSets, TestExceptionPropagation, TestBasicOpsEmpty, TestBasicOpsSingleton, TestBasicOpsTuple, TestBasicOpsTriple, TestBasicOpsString, TestBasicOpsBytes, TestBasicOpsMixedStringBytes, TestBinaryOps, TestUpdateOps, TestMutate, TestSubsetEqualEmpty, TestSubsetEqualNonEmpty, TestSubsetEmptyNonEmpty, TestSubsetPartial, TestSubsetNonOverlap, TestOnlySetsNumeric, TestOnlySetsDict, TestOnlySetsOperator, TestOnlySetsTuple, TestOnlySetsString, TestOnlySetsGenerator, TestCopyingEmpty, TestCopyingSingleton, TestCopyingTriple, TestCopyingTuple, TestCopyingNested, TestIdentities, TestVariousIteratorArgs, TestGraphs, TestWeirdBugs, ) support.run_unittest(*test_classes) # verify reference counting if verbose and hasattr(sys, "gettotalrefcount"): import gc counts = [None] * 5 for i in range(len(counts)): support.run_unittest(*test_classes) gc.collect() counts[i] = sys.gettotalrefcount() print(counts) if __name__ == "__main__": test_main(verbose=True) mypy-0.761/test-data/stdlib-samples/3.2/test/test_shutil.py0000644€tŠÔÚ€2›s®0000010741513576752246027743 0ustar jukkaDROPBOX\Domain Users00000000000000# Copyright (C) 2003 Python Software Foundation import unittest import shutil import tempfile import sys import stat import os import os.path import functools from test import support from test.support import TESTFN from os.path import splitdrive from distutils.spawn import find_executable, spawn from shutil import (_make_tarball, _make_zipfile, make_archive, register_archive_format, unregister_archive_format, get_archive_formats, Error, unpack_archive, register_unpack_format, RegistryError, unregister_unpack_format, get_unpack_formats) import tarfile import warnings from test import support from test.support import check_warnings, captured_stdout from typing import ( Any, Callable, Tuple, List, Sequence, BinaryIO, IO, Union, cast ) from types import TracebackType import bz2 BZ2_SUPPORTED = True TESTFN2 = TESTFN + "2" import grp import pwd UID_GID_SUPPORT = True import zlib import zipfile ZIP_SUPPORT = True def _fake_rename(*args: Any, **kwargs: Any) -> None: # Pretend the destination path is on a different filesystem. raise OSError() def mock_rename(func: Any) -> Any: @functools.wraps(func) def wrap(*args: Any, **kwargs: Any) -> Any: try: builtin_rename = shutil.rename shutil.rename = cast(Any, _fake_rename) return func(*args, **kwargs) finally: shutil.rename = cast(Any, builtin_rename) return wrap class TestShutil(unittest.TestCase): def setUp(self) -> None: super().setUp() self.tempdirs = [] # type: List[str] def tearDown(self) -> None: super().tearDown() while self.tempdirs: d = self.tempdirs.pop() shutil.rmtree(d, os.name in ('nt', 'cygwin')) def write_file(self, path: Union[str, List[str], tuple], content: str = 'xxx') -> None: """Writes a file in the given path. path can be a string or a sequence. """ if isinstance(path, list): path = os.path.join(*path) elif isinstance(path, tuple): path = cast(str, os.path.join(*path)) f = open(path, 'w') try: f.write(content) finally: f.close() def mkdtemp(self) -> str: """Create a temporary directory that will be cleaned up. Returns the path of the directory. """ d = tempfile.mkdtemp() self.tempdirs.append(d) return d def test_rmtree_errors(self) -> None: # filename is guaranteed not to exist filename = tempfile.mktemp() self.assertRaises(OSError, shutil.rmtree, filename) # See bug #1071513 for why we don't run this on cygwin # and bug #1076467 for why we don't run this as root. if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin' and not (hasattr(os, 'geteuid') and os.geteuid() == 0)): def test_on_error(self) -> None: self.errorState = 0 os.mkdir(TESTFN) self.childpath = os.path.join(TESTFN, 'a') f = open(self.childpath, 'w') f.close() old_dir_mode = os.stat(TESTFN).st_mode old_child_mode = os.stat(self.childpath).st_mode # Make unwritable. os.chmod(self.childpath, stat.S_IREAD) os.chmod(TESTFN, stat.S_IREAD) shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror) # Test whether onerror has actually been called. self.assertEqual(self.errorState, 2, "Expected call to onerror function did not happen.") # Make writable again. os.chmod(TESTFN, old_dir_mode) os.chmod(self.childpath, old_child_mode) # Clean up. shutil.rmtree(TESTFN) def check_args_to_onerror(self, func: Callable[[str], Any], arg: str, exc: Tuple[type, BaseException, TracebackType]) -> None: # test_rmtree_errors deliberately runs rmtree # on a directory that is chmod 400, which will fail. # This function is run when shutil.rmtree fails. # 99.9% of the time it initially fails to remove # a file in the directory, so the first time through # func is os.remove. # However, some Linux machines running ZFS on # FUSE experienced a failure earlier in the process # at os.listdir. The first failure may legally # be either. if self.errorState == 0: if func is os.remove: self.assertEqual(arg, self.childpath) else: self.assertIs(func, os.listdir, "func must be either os.remove or os.listdir") self.assertEqual(arg, TESTFN) self.assertTrue(issubclass(exc[0], OSError)) self.errorState = 1 else: self.assertEqual(func, os.rmdir) self.assertEqual(arg, TESTFN) self.assertTrue(issubclass(exc[0], OSError)) self.errorState = 2 def test_rmtree_dont_delete_file(self) -> None: # When called on a file instead of a directory, don't delete it. handle, path = tempfile.mkstemp() os.fdopen(handle).close() self.assertRaises(OSError, shutil.rmtree, path) os.remove(path) def _write_data(self, path: str, data: str) -> None: f = open(path, "w") f.write(data) f.close() def test_copytree_simple(self) -> None: def read_data(path: str) -> str: f = open(path) data = f.read() f.close() return data src_dir = tempfile.mkdtemp() dst_dir = os.path.join(tempfile.mkdtemp(), 'destination') self._write_data(os.path.join(src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') try: shutil.copytree(src_dir, dst_dir) self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt'))) self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir'))) self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir', 'test.txt'))) actual = read_data(os.path.join(dst_dir, 'test.txt')) self.assertEqual(actual, '123') actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt')) self.assertEqual(actual, '456') finally: for path in ( os.path.join(src_dir, 'test.txt'), os.path.join(dst_dir, 'test.txt'), os.path.join(src_dir, 'test_dir', 'test.txt'), os.path.join(dst_dir, 'test_dir', 'test.txt'), ): if os.path.exists(path): os.remove(path) for path in (src_dir, os.path.dirname(dst_dir) ): if os.path.exists(path): shutil.rmtree(path) def test_copytree_with_exclude(self) -> None: def read_data(path: str) -> str: f = open(path) data = f.read() f.close() return data # creating data join = os.path.join exists = os.path.exists src_dir = tempfile.mkdtemp() try: dst_dir = join(tempfile.mkdtemp(), 'destination') self._write_data(join(src_dir, 'test.txt'), '123') self._write_data(join(src_dir, 'test.tmp'), '123') os.mkdir(join(src_dir, 'test_dir')) self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2')) self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2', 'subdir')) os.mkdir(join(src_dir, 'test_dir2', 'subdir2')) self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'), '456') self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'), '456') # testing glob-like patterns try: patterns = shutil.ignore_patterns('*.tmp', 'test_dir2') shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied self.assertTrue(exists(join(dst_dir, 'test.txt'))) self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) try: patterns = shutil.ignore_patterns('*.tmp', 'subdir*') shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) # testing callable-style try: def _filter(src: str, names: Sequence[str]) -> List[str]: res = [] # type: List[str] for name in names: path = os.path.join(src, name) if (os.path.isdir(path) and path.split()[-1] == 'subdir'): res.append(name) elif os.path.splitext(path)[-1] in ('.py'): res.append(name) return res shutil.copytree(src_dir, dst_dir, ignore=_filter) # checking the result: some elements should not be copied self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2', 'test.py'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) finally: shutil.rmtree(src_dir) shutil.rmtree(os.path.dirname(dst_dir)) @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') def test_dont_copy_file_onto_link_to_itself(self) -> None: # Temporarily disable test on Windows. if os.name == 'nt': return # bug 851123. os.mkdir(TESTFN) src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') try: with open(src, 'w') as f: f.write('cheddar') os.link(src, dst) self.assertRaises(shutil.Error, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) finally: shutil.rmtree(TESTFN, ignore_errors=True) @support.skip_unless_symlink def test_dont_copy_file_onto_symlink_to_itself(self) -> None: # bug 851123. os.mkdir(TESTFN) src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') try: with open(src, 'w') as f: f.write('cheddar') # Using `src` here would mean we end up with a symlink pointing # to TESTFN/TESTFN/cheese, while it should point at # TESTFN/cheese. os.symlink('cheese', dst) self.assertRaises(shutil.Error, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) finally: shutil.rmtree(TESTFN, ignore_errors=True) @support.skip_unless_symlink def test_rmtree_on_symlink(self) -> None: # bug 1669. os.mkdir(TESTFN) try: src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') os.mkdir(src) os.symlink(src, dst) self.assertRaises(OSError, shutil.rmtree, dst) finally: shutil.rmtree(TESTFN, ignore_errors=True) if hasattr(os, "mkfifo"): # Issue #3002: copyfile and copytree block indefinitely on named pipes def test_copyfile_named_pipe(self) -> None: os.mkfifo(TESTFN) try: self.assertRaises(shutil.SpecialFileError, shutil.copyfile, TESTFN, TESTFN2) self.assertRaises(shutil.SpecialFileError, shutil.copyfile, __file__, TESTFN) finally: os.remove(TESTFN) @support.skip_unless_symlink def test_copytree_named_pipe(self) -> None: os.mkdir(TESTFN) try: subdir = os.path.join(TESTFN, "subdir") os.mkdir(subdir) pipe = os.path.join(subdir, "mypipe") os.mkfifo(pipe) try: shutil.copytree(TESTFN, TESTFN2) except shutil.Error as e: errors = e.args[0] self.assertEqual(len(errors), 1) src, dst, error_msg = errors[0] self.assertEqual("`%s` is a named pipe" % pipe, error_msg) else: self.fail("shutil.Error should have been raised") finally: shutil.rmtree(TESTFN, ignore_errors=True) shutil.rmtree(TESTFN2, ignore_errors=True) def test_copytree_special_func(self) -> None: src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') self._write_data(os.path.join(src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') copied = [] # type: List[Tuple[str, str]] def _copy(src: str, dst: str) -> None: copied.append((src, dst)) shutil.copytree(src_dir, dst_dir, copy_function=_copy) self.assertEqual(len(copied), 2) @support.skip_unless_symlink def test_copytree_dangling_symlinks(self) -> None: # a dangling symlink raises an error at the end src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt')) os.mkdir(os.path.join(src_dir, 'test_dir')) self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') self.assertRaises(Error, shutil.copytree, src_dir, dst_dir) # a dangling symlink is ignored with the proper flag dst_dir = os.path.join(self.mkdtemp(), 'destination2') shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True) self.assertNotIn('test.txt', os.listdir(dst_dir)) # a dangling symlink is copied if symlinks=True dst_dir = os.path.join(self.mkdtemp(), 'destination3') shutil.copytree(src_dir, dst_dir, symlinks=True) self.assertIn('test.txt', os.listdir(dst_dir)) def _copy_file(self, method: Callable[[str, str], None]) -> Tuple[str, str]: fname = 'test.txt' tmpdir = self.mkdtemp() self.write_file([tmpdir, fname]) file1 = os.path.join(tmpdir, fname) tmpdir2 = self.mkdtemp() method(file1, tmpdir2) file2 = os.path.join(tmpdir2, fname) return (file1, file2) @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod') def test_copy(self) -> None: # Ensure that the copied file exists and has the same mode bits. file1, file2 = self._copy_file(shutil.copy) self.assertTrue(os.path.exists(file2)) self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode) @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod') @unittest.skipUnless(hasattr(os, 'utime'), 'requires os.utime') def test_copy2(self) -> None: # Ensure that the copied file exists and has the same mode and # modification time bits. file1, file2 = self._copy_file(shutil.copy2) self.assertTrue(os.path.exists(file2)) file1_stat = os.stat(file1) file2_stat = os.stat(file2) self.assertEqual(file1_stat.st_mode, file2_stat.st_mode) for attr in 'st_atime', 'st_mtime': # The modification times may be truncated in the new file. self.assertLessEqual(getattr(file1_stat, attr), getattr(file2_stat, attr) + 1) if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'): self.assertEqual(getattr(file1_stat, 'st_flags'), getattr(file2_stat, 'st_flags')) @unittest.skipUnless(zlib, "requires zlib") def test_make_tarball(self) -> None: # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) self.write_file([tmpdir, 'sub', 'file3'], 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory os.rmdir(tmpdir2) unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], "source and target should be on same drive") base_name = os.path.join(tmpdir2, 'archive') # working with relative paths to avoid tar warnings old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], '.') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], '.', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) def _tarinfo(self, path: str) -> tuple: tar = tarfile.open(path) try: names = tar.getnames() names.sort() return tuple(names) finally: tar.close() def _create_files(self) -> Tuple[str, str, str]: # creating something to tar tmpdir = self.mkdtemp() dist = os.path.join(tmpdir, 'dist') os.mkdir(dist) self.write_file([dist, 'file1'], 'xxx') self.write_file([dist, 'file2'], 'xxx') os.mkdir(os.path.join(dist, 'sub')) self.write_file([dist, 'sub', 'file3'], 'xxx') os.mkdir(os.path.join(dist, 'sub2')) tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') return tmpdir, tmpdir2, base_name @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(find_executable('tar') and find_executable('gzip'), 'Need the tar command to run') def test_tarfile_vs_tar(self) -> None: tmpdir, tmpdir2, base_name = self._create_files() old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # now create another tarball using `tar` tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist'] gzip_cmd = ['gzip', '-f9', 'archive2.tar'] old_dir = os.getcwd() os.chdir(tmpdir) try: with captured_stdout() as s: spawn(tar_cmd) spawn(gzip_cmd) finally: os.chdir(old_dir) self.assertTrue(os.path.exists(tarball2)) # let's compare both tarballs self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) # now for a dry_run base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist', compress=None, dry_run=True) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') def test_make_zipfile(self) -> None: # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory os.rmdir(tmpdir2) base_name = os.path.join(tmpdir2, 'archive') _make_zipfile(base_name, tmpdir) # check if the compressed tarball was created tarball = base_name + '.zip' self.assertTrue(os.path.exists(tarball)) def test_make_archive(self) -> None: tmpdir = self.mkdtemp() base_name = os.path.join(tmpdir, 'archive') self.assertRaises(ValueError, make_archive, base_name, 'xxx') @unittest.skipUnless(zlib, "Requires zlib") def test_make_archive_owner_group(self) -> None: # testing make_archive with owner and group, with various combinations # this works even if there's not gid/uid support if UID_GID_SUPPORT: group = grp.getgrgid(0).gr_name owner = pwd.getpwuid(0).pw_name else: group = owner = 'root' base_dir, root_dir, base_name = self._create_files() base_name = os.path.join(self.mkdtemp() , 'archive') res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'zip', root_dir, base_dir) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh') self.assertTrue(os.path.exists(res)) @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") def test_tarfile_root_owner(self) -> None: tmpdir, tmpdir2, base_name = self._create_files() old_dir = os.getcwd() os.chdir(tmpdir) group = grp.getgrgid(0).gr_name owner = pwd.getpwuid(0).pw_name try: archive_name = _make_tarball(base_name, 'dist', compress=None, owner=owner, group=group) finally: os.chdir(old_dir) # check if the compressed tarball was created self.assertTrue(os.path.exists(archive_name)) # now checks the rights archive = tarfile.open(archive_name) try: for member in archive.getmembers(): self.assertEqual(member.uid, 0) self.assertEqual(member.gid, 0) finally: archive.close() def test_make_archive_cwd(self) -> None: current_dir = os.getcwd() def _breaks(*args: Any, **kw: Any) -> None: raise RuntimeError() register_archive_format('xxx', _breaks, [], 'xxx file') try: try: make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) except Exception: pass self.assertEqual(os.getcwd(), current_dir) finally: unregister_archive_format('xxx') def test_register_archive_format(self) -> None: self.assertRaises(TypeError, register_archive_format, 'xxx', 1) self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: 1/0, 1) self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: 1/0, [(1, 2), (1, 2, 3)]) register_archive_format('xxx', lambda: 1/0, [('x', 2)], 'xxx file') formats = [name for name, params in get_archive_formats()] self.assertIn('xxx', formats) unregister_archive_format('xxx') formats = [name for name, params in get_archive_formats()] self.assertNotIn('xxx', formats) def _compare_dirs(self, dir1: str, dir2: str) -> List[str]: # check that dir1 and dir2 are equivalent, # return the diff diff = [] # type: List[str] for root, dirs, files in os.walk(dir1): for file_ in files: path = os.path.join(root, file_) target_path = os.path.join(dir2, os.path.split(path)[-1]) if not os.path.exists(target_path): diff.append(file_) return diff @unittest.skipUnless(zlib, "Requires zlib") def test_unpack_archive(self) -> None: formats = ['tar', 'gztar', 'zip'] if BZ2_SUPPORTED: formats.append('bztar') for format in formats: tmpdir = self.mkdtemp() base_dir, root_dir, base_name = self._create_files() tmpdir2 = self.mkdtemp() filename = make_archive(base_name, format, root_dir, base_dir) # let's try to unpack it now unpack_archive(filename, tmpdir2) diff = self._compare_dirs(tmpdir, tmpdir2) self.assertEqual(diff, []) # and again, this time with the format specified tmpdir3 = self.mkdtemp() unpack_archive(filename, tmpdir3, format=format) diff = self._compare_dirs(tmpdir, tmpdir3) self.assertEqual(diff, []) self.assertRaises(shutil.ReadError, unpack_archive, TESTFN) self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx') def test_unpack_registery(self) -> None: formats = get_unpack_formats() def _boo(filename: str, extract_dir: str, extra: int) -> None: self.assertEqual(extra, 1) self.assertEqual(filename, 'stuff.boo') self.assertEqual(extract_dir, 'xx') register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)]) unpack_archive('stuff.boo', 'xx') # trying to register a .boo unpacker again self.assertRaises(RegistryError, register_unpack_format, 'Boo2', ['.boo'], _boo) # should work now unregister_unpack_format('Boo') register_unpack_format('Boo2', ['.boo'], _boo) self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats()) self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats()) # let's leave a clean state unregister_unpack_format('Boo2') self.assertEqual(get_unpack_formats(), formats) class TestMove(unittest.TestCase): def setUp(self) -> None: filename = "foo" self.src_dir = tempfile.mkdtemp() self.dst_dir = tempfile.mkdtemp() self.src_file = os.path.join(self.src_dir, filename) self.dst_file = os.path.join(self.dst_dir, filename) with open(self.src_file, "wb") as f: f.write(b"spam") def tearDown(self) -> None: for d in (self.src_dir, self.dst_dir): try: if d: shutil.rmtree(d) except: pass def _check_move_file(self, src: str, dst: str, real_dst: str) -> None: with open(src, "rb") as f: contents = f.read() shutil.move(src, dst) with open(real_dst, "rb") as f: self.assertEqual(contents, f.read()) self.assertFalse(os.path.exists(src)) def _check_move_dir(self, src: str, dst: str, real_dst: str) -> None: contents = sorted(os.listdir(src)) shutil.move(src, dst) self.assertEqual(contents, sorted(os.listdir(real_dst))) self.assertFalse(os.path.exists(src)) def test_move_file(self) -> None: # Move a file to another location on the same filesystem. self._check_move_file(self.src_file, self.dst_file, self.dst_file) def test_move_file_to_dir(self) -> None: # Move a file inside an existing dir on the same filesystem. self._check_move_file(self.src_file, self.dst_dir, self.dst_file) @mock_rename def test_move_file_other_fs(self) -> None: # Move a file to an existing dir on another filesystem. self.test_move_file() @mock_rename def test_move_file_to_dir_other_fs(self) -> None: # Move a file to another location on another filesystem. self.test_move_file_to_dir() def test_move_dir(self) -> None: # Move a dir to another location on the same filesystem. dst_dir = tempfile.mktemp() try: self._check_move_dir(self.src_dir, dst_dir, dst_dir) finally: try: shutil.rmtree(dst_dir) except: pass @mock_rename def test_move_dir_other_fs(self) -> None: # Move a dir to another location on another filesystem. self.test_move_dir() def test_move_dir_to_dir(self) -> None: # Move a dir inside an existing dir on the same filesystem. self._check_move_dir(self.src_dir, self.dst_dir, os.path.join(self.dst_dir, os.path.basename(self.src_dir))) @mock_rename def test_move_dir_to_dir_other_fs(self) -> None: # Move a dir inside an existing dir on another filesystem. self.test_move_dir_to_dir() def test_existing_file_inside_dest_dir(self) -> None: # A file with the same name inside the destination dir already exists. with open(self.dst_file, "wb"): pass self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir) def test_dont_move_dir_in_itself(self) -> None: # Moving a dir inside itself raises an Error. dst = os.path.join(self.src_dir, "bar") self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst) def test_destinsrc_false_negative(self) -> None: os.mkdir(TESTFN) try: for src, dst in [('srcdir', 'srcdir/dest')]: src = os.path.join(TESTFN, src) dst = os.path.join(TESTFN, dst) self.assertTrue(shutil._destinsrc(src, dst), msg='_destinsrc() wrongly concluded that ' 'dst (%s) is not in src (%s)' % (dst, src)) finally: shutil.rmtree(TESTFN, ignore_errors=True) def test_destinsrc_false_positive(self) -> None: os.mkdir(TESTFN) try: for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]: src = os.path.join(TESTFN, src) dst = os.path.join(TESTFN, dst) self.assertFalse(shutil._destinsrc(src, dst), msg='_destinsrc() wrongly concluded that ' 'dst (%s) is in src (%s)' % (dst, src)) finally: shutil.rmtree(TESTFN, ignore_errors=True) class TestCopyFile(unittest.TestCase): _delete = False class Faux(object): _entered = False _exited_with = None # type: tuple _raised = False def __init__(self, raise_in_exit: bool = False, suppress_at_exit: bool = True) -> None: self._raise_in_exit = raise_in_exit self._suppress_at_exit = suppress_at_exit def read(self, *args: Any) -> str: return '' def __enter__(self) -> None: self._entered = True def __exit__(self, exc_type: type, exc_val: BaseException, exc_tb: TracebackType) -> bool: self._exited_with = exc_type, exc_val, exc_tb if self._raise_in_exit: self._raised = True raise IOError("Cannot close") return self._suppress_at_exit def tearDown(self) -> None: shutil.open = open def _set_shutil_open(self, func: Any) -> None: shutil.open = func self._delete = True def test_w_source_open_fails(self) -> None: def _open(filename: str, mode: str= 'r') -> BinaryIO: if filename == 'srcfile': raise IOError('Cannot open "srcfile"') assert 0 # shouldn't reach here. self._set_shutil_open(_open) self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile') def test_w_dest_open_fails(self) -> None: srcfile = TestCopyFile.Faux() def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux: if filename == 'srcfile': return srcfile if filename == 'destfile': raise IOError('Cannot open "destfile"') assert 0 # shouldn't reach here. self._set_shutil_open(_open) shutil.copyfile('srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(srcfile._exited_with[0] is IOError) self.assertEqual(srcfile._exited_with[1].args, ('Cannot open "destfile"',)) def test_w_dest_close_fails(self) -> None: srcfile = TestCopyFile.Faux() destfile = TestCopyFile.Faux(True) def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux: if filename == 'srcfile': return srcfile if filename == 'destfile': return destfile assert 0 # shouldn't reach here. self._set_shutil_open(_open) shutil.copyfile('srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(destfile._entered) self.assertTrue(destfile._raised) self.assertTrue(srcfile._exited_with[0] is IOError) self.assertEqual(srcfile._exited_with[1].args, ('Cannot close',)) def test_w_source_close_fails(self) -> None: srcfile = TestCopyFile.Faux(True) destfile = TestCopyFile.Faux() def _open(filename: str, mode: str= 'r') -> TestCopyFile.Faux: if filename == 'srcfile': return srcfile if filename == 'destfile': return destfile assert 0 # shouldn't reach here. self._set_shutil_open(_open) self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(destfile._entered) self.assertFalse(destfile._raised) self.assertTrue(srcfile._exited_with[0] is None) self.assertTrue(srcfile._raised) def test_move_dir_caseinsensitive(self) -> None: # Renames a folder to the same name # but a different case. self.src_dir = tempfile.mkdtemp() dst_dir = os.path.join( os.path.dirname(self.src_dir), os.path.basename(self.src_dir).upper()) self.assertNotEqual(self.src_dir, dst_dir) try: shutil.move(self.src_dir, dst_dir) self.assertTrue(os.path.isdir(dst_dir)) finally: if os.path.exists(dst_dir): os.rmdir(dst_dir) def test_main() -> None: support.run_unittest(TestShutil, TestMove, TestCopyFile) if __name__ == '__main__': test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_subprocess.py0000644€tŠÔÚ€2›s®0000021742013576752246030621 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test import support import subprocess import sys import signal import io import os import errno import tempfile import time import re import sysconfig import warnings import select import shutil import gc import resource from typing import Any, Dict, Callable, Iterable, List, Set, Tuple, cast mswindows = (sys.platform == "win32") # # Depends on the following external programs: Python # if mswindows: SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), ' 'os.O_BINARY);') else: SETBINARY = '' try: mkstemp = tempfile.mkstemp except AttributeError: # tempfile.mkstemp is not available def _mkstemp() -> Tuple[int, str]: """Replacement for mkstemp, calling mktemp.""" fname = tempfile.mktemp() return os.open(fname, os.O_RDWR|os.O_CREAT), fname mkstemp = cast(Any, _mkstemp) class BaseTestCase(unittest.TestCase): def setUp(self) -> None: # Try to minimize the number of children we have so this test # doesn't crash on some buildbots (Alphas in particular). support.reap_children() def tearDown(self) -> None: for inst in subprocess._active: inst.wait() subprocess._cleanup() self.assertFalse(subprocess._active, "subprocess._active not empty") def assertStderrEqual(self, stderr: bytes, expected: bytes, msg: object = None) -> None: # In a debug build, stuff like "[6580 refs]" is printed to stderr at # shutdown time. That frustrates tests trying to check stderr produced # from a spawned Python process. actual = support.strip_python_stderr(stderr) self.assertEqual(actual, expected, msg) class ProcessTestCase(BaseTestCase): def test_call_seq(self) -> None: # call() function with sequence argument rc = subprocess.call([sys.executable, "-c", "import sys; sys.exit(47)"]) self.assertEqual(rc, 47) def test_check_call_zero(self) -> None: # check_call() function with zero return code rc = subprocess.check_call([sys.executable, "-c", "import sys; sys.exit(0)"]) self.assertEqual(rc, 0) def test_check_call_nonzero(self) -> None: # check_call() function with non-zero return code with self.assertRaises(subprocess.CalledProcessError) as c: subprocess.check_call([sys.executable, "-c", "import sys; sys.exit(47)"]) self.assertEqual(c.exception.returncode, 47) def test_check_output(self) -> None: # check_output() function with zero return code output = subprocess.check_output( [sys.executable, "-c", "print('BDFL')"]) self.assertIn(b'BDFL', cast(Any, output)) # see #39 def test_check_output_nonzero(self) -> None: # check_call() function with non-zero return code with self.assertRaises(subprocess.CalledProcessError) as c: subprocess.check_output( [sys.executable, "-c", "import sys; sys.exit(5)"]) self.assertEqual(c.exception.returncode, 5) def test_check_output_stderr(self) -> None: # check_output() function stderr redirected to stdout output = subprocess.check_output( [sys.executable, "-c", "import sys; sys.stderr.write('BDFL')"], stderr=subprocess.STDOUT) self.assertIn(b'BDFL', cast(Any, output)) # see #39 def test_check_output_stdout_arg(self) -> None: # check_output() function stderr redirected to stdout with self.assertRaises(ValueError) as c: output = subprocess.check_output( [sys.executable, "-c", "print('will not be run')"], stdout=sys.stdout) self.fail("Expected ValueError when stdout arg supplied.") self.assertIn('stdout', c.exception.args[0]) def test_call_kwargs(self) -> None: # call() function with keyword args newenv = os.environ.copy() newenv["FRUIT"] = "banana" rc = subprocess.call([sys.executable, "-c", 'import sys, os;' 'sys.exit(os.getenv("FRUIT")=="banana")'], env=newenv) self.assertEqual(rc, 1) def test_invalid_args(self) -> None: # Popen() called with invalid arguments should raise TypeError # but Popen.__del__ should not complain (issue #12085) with support.captured_stderr() as s: self.assertRaises(TypeError, subprocess.Popen, invalid_arg_name=1) argcount = subprocess.Popen.__init__.__code__.co_argcount too_many_args = [0] * (argcount + 1) self.assertRaises(TypeError, subprocess.Popen, *too_many_args) self.assertEqual(s.getvalue(), '') def test_stdin_none(self) -> None: # .stdin is None when not redirected p = subprocess.Popen([sys.executable, "-c", 'print("banana")'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) p.wait() self.assertEqual(p.stdin, None) def test_stdout_none(self) -> None: # .stdout is None when not redirected p = subprocess.Popen([sys.executable, "-c", 'print(" this bit of output is from a ' 'test of stdout in a different ' 'process ...")'], stdin=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdin.close) self.addCleanup(p.stderr.close) p.wait() self.assertEqual(p.stdout, None) def test_stderr_none(self) -> None: # .stderr is None when not redirected p = subprocess.Popen([sys.executable, "-c", 'print("banana")'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stdin.close) p.wait() self.assertEqual(p.stderr, None) def test_executable_with_cwd(self) -> None: python_dir = os.path.dirname(os.path.realpath(sys.executable)) p = subprocess.Popen(["somethingyoudonthave", "-c", "import sys; sys.exit(47)"], executable=sys.executable, cwd=python_dir) p.wait() self.assertEqual(p.returncode, 47) @unittest.skipIf(sysconfig.is_python_build(), "need an installed Python. See #7774") def test_executable_without_cwd(self) -> None: # For a normal installation, it should work without 'cwd' # argument. For test runs in the build directory, see #7774. p = subprocess.Popen(["somethingyoudonthave", "-c", "import sys; sys.exit(47)"], executable=sys.executable) p.wait() self.assertEqual(p.returncode, 47) def test_stdin_pipe(self) -> None: # stdin redirection p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.exit(sys.stdin.read() == "pear")'], stdin=subprocess.PIPE) p.stdin.write(b"pear") p.stdin.close() p.wait() self.assertEqual(p.returncode, 1) def test_stdin_filedes(self) -> None: # stdin is set to open file descriptor tf = tempfile.TemporaryFile() self.addCleanup(tf.close) d = tf.fileno() os.write(d, b"pear") os.lseek(d, 0, 0) p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.exit(sys.stdin.read() == "pear")'], stdin=d) p.wait() self.assertEqual(p.returncode, 1) def test_stdin_fileobj(self) -> None: # stdin is set to open file object tf = tempfile.TemporaryFile() self.addCleanup(tf.close) tf.write(b"pear") tf.seek(0) p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.exit(sys.stdin.read() == "pear")'], stdin=tf) p.wait() self.assertEqual(p.returncode, 1) def test_stdout_pipe(self) -> None: # stdout redirection p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stdout.write("orange")'], stdout=subprocess.PIPE) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read(), b"orange") def test_stdout_filedes(self) -> None: # stdout is set to open file descriptor tf = tempfile.TemporaryFile() self.addCleanup(tf.close) d = tf.fileno() p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stdout.write("orange")'], stdout=d) p.wait() os.lseek(d, 0, 0) self.assertEqual(os.read(d, 1024), b"orange") def test_stdout_fileobj(self) -> None: # stdout is set to open file object tf = tempfile.TemporaryFile() self.addCleanup(tf.close) p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stdout.write("orange")'], stdout=tf) p.wait() tf.seek(0) self.assertEqual(tf.read(), b"orange") def test_stderr_pipe(self) -> None: # stderr redirection p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stderr.write("strawberry")'], stderr=subprocess.PIPE) self.addCleanup(p.stderr.close) self.assertStderrEqual(p.stderr.read(), b"strawberry") def test_stderr_filedes(self) -> None: # stderr is set to open file descriptor tf = tempfile.TemporaryFile() self.addCleanup(tf.close) d = tf.fileno() p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stderr.write("strawberry")'], stderr=d) p.wait() os.lseek(d, 0, 0) self.assertStderrEqual(os.read(d, 1024), b"strawberry") def test_stderr_fileobj(self) -> None: # stderr is set to open file object tf = tempfile.TemporaryFile() self.addCleanup(tf.close) p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stderr.write("strawberry")'], stderr=tf) p.wait() tf.seek(0) self.assertStderrEqual(tf.read(), b"strawberry") def test_stdout_stderr_pipe(self) -> None: # capture stdout and stderr to the same pipe p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple");' 'sys.stdout.flush();' 'sys.stderr.write("orange")'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) self.addCleanup(p.stdout.close) self.assertStderrEqual(p.stdout.read(), b"appleorange") def test_stdout_stderr_file(self) -> None: # capture stdout and stderr to the same open file tf = tempfile.TemporaryFile() self.addCleanup(tf.close) p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple");' 'sys.stdout.flush();' 'sys.stderr.write("orange")'], stdout=tf, stderr=tf) p.wait() tf.seek(0) self.assertStderrEqual(tf.read(), b"appleorange") def test_stdout_filedes_of_stdout(self) -> None: # stdout is set to 1 (#1531862). cmd = r"import sys, os; sys.exit(os.write(sys.stdout.fileno(), b'.\n'))" rc = subprocess.call([sys.executable, "-c", cmd], stdout=1) self.assertEqual(rc, 2) def test_cwd(self) -> None: tmpdir = tempfile.gettempdir() # We cannot use os.path.realpath to canonicalize the path, # since it doesn't expand Tru64 {memb} strings. See bug 1063571. cwd = os.getcwd() os.chdir(tmpdir) tmpdir = os.getcwd() os.chdir(cwd) p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(os.getcwd())'], stdout=subprocess.PIPE, cwd=tmpdir) self.addCleanup(p.stdout.close) normcase = os.path.normcase self.assertEqual(normcase(p.stdout.read().decode("utf-8")), normcase(tmpdir)) def test_env(self) -> None: newenv = os.environ.copy() newenv["FRUIT"] = "orange" with subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(os.getenv("FRUIT"))'], stdout=subprocess.PIPE, env=newenv) as p: stdout, stderr = p.communicate() self.assertEqual(stdout, b"orange") # Windows requires at least the SYSTEMROOT environment variable to start # Python @unittest.skipIf(sys.platform == 'win32', 'cannot test an empty env on Windows') @unittest.skipIf(sysconfig.get_config_var('Py_ENABLE_SHARED') is not None, 'the python library cannot be loaded ' 'with an empty environment') def test_empty_env(self) -> None: with subprocess.Popen([sys.executable, "-c", 'import os; ' 'print(list(os.environ.keys()))'], stdout=subprocess.PIPE, env={}) as p: stdout, stderr = p.communicate() self.assertIn(stdout.strip(), [b"[]", # Mac OS X adds __CF_USER_TEXT_ENCODING variable to an empty # environment b"['__CF_USER_TEXT_ENCODING']"]) def test_communicate_stdin(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.exit(sys.stdin.read() == "pear")'], stdin=subprocess.PIPE) p.communicate(b"pear") self.assertEqual(p.returncode, 1) def test_communicate_stdout(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stdout.write("pineapple")'], stdout=subprocess.PIPE) (stdout, stderr) = p.communicate() self.assertEqual(stdout, b"pineapple") self.assertEqual(stderr, None) def test_communicate_stderr(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stderr.write("pineapple")'], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() self.assertEqual(stdout, None) self.assertStderrEqual(stderr, b"pineapple") def test_communicate(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stderr.write("pineapple");' 'sys.stdout.write(sys.stdin.read())'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) (stdout, stderr) = p.communicate(b"banana") self.assertEqual(stdout, b"banana") self.assertStderrEqual(stderr, b"pineapple") # Test for the fd leak reported in http://bugs.python.org/issue2791. def test_communicate_pipe_fd_leak(self) -> None: for stdin_pipe in (False, True): for stdout_pipe in (False, True): for stderr_pipe in (False, True): options = {} # type: Dict[str, Any] if stdin_pipe: options['stdin'] = subprocess.PIPE if stdout_pipe: options['stdout'] = subprocess.PIPE if stderr_pipe: options['stderr'] = subprocess.PIPE if not options: continue p = subprocess.Popen([sys.executable, "-c", "pass"], **options) p.communicate() if p.stdin is not None: self.assertTrue(p.stdin.closed) if p.stdout is not None: self.assertTrue(p.stdout.closed) if p.stderr is not None: self.assertTrue(p.stderr.closed) def test_communicate_returns(self) -> None: # communicate() should return None if no redirection is active p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(47)"]) (stdout, stderr) = p.communicate() self.assertEqual(stdout, None) self.assertEqual(stderr, None) def test_communicate_pipe_buf(self) -> None: # communicate() with writes larger than pipe_buf # This test will probably deadlock rather than fail, if # communicate() does not work properly. x, y = os.pipe() if mswindows: pipe_buf = 512 else: pipe_buf = os.fpathconf(x, "PC_PIPE_BUF") os.close(x) os.close(y) p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(sys.stdin.read(47));' 'sys.stderr.write("xyz"*%d);' 'sys.stdout.write(sys.stdin.read())' % pipe_buf], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) string_to_write = b"abc"*pipe_buf (stdout, stderr) = p.communicate(string_to_write) self.assertEqual(stdout, string_to_write) def test_writes_before_communicate(self) -> None: # stdin.write before communicate() p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(sys.stdin.read())'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) p.stdin.write(b"banana") (stdout, stderr) = p.communicate(b"split") self.assertEqual(stdout, b"bananasplit") self.assertStderrEqual(stderr, b"") def test_universal_newlines(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' + SETBINARY + 'sys.stdout.write(sys.stdin.readline());' 'sys.stdout.flush();' 'sys.stdout.write("line2\\n");' 'sys.stdout.flush();' 'sys.stdout.write(sys.stdin.read());' 'sys.stdout.flush();' 'sys.stdout.write("line4\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line5\\r\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line6\\r");' 'sys.stdout.flush();' 'sys.stdout.write("\\nline7");' 'sys.stdout.flush();' 'sys.stdout.write("\\nline8");'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=1) p.stdin.write("line1\n") self.assertEqual(p.stdout.readline(), "line1\n") p.stdin.write("line3\n") p.stdin.close() self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.readline(), "line2\n") self.assertEqual(p.stdout.read(6), "line3\n") self.assertEqual(p.stdout.read(), "line4\nline5\nline6\nline7\nline8") def test_universal_newlines_communicate(self) -> None: # universal newlines through communicate() p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' + SETBINARY + 'sys.stdout.write("line2\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line4\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line5\\r\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line6\\r");' 'sys.stdout.flush();' 'sys.stdout.write("\\nline7");' 'sys.stdout.flush();' 'sys.stdout.write("\\nline8");'], stderr=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=1) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) # BUG: can't give a non-empty stdin because it breaks both the # select- and poll-based communicate() implementations. (stdout, stderr) = p.communicate() self.assertEqual(stdout, "line2\nline4\nline5\nline6\nline7\nline8") def test_universal_newlines_communicate_stdin(self) -> None: # universal newlines through communicate(), with only stdin p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' + SETBINARY + '''\nif True: s = sys.stdin.readline() assert s == "line1\\n", repr(s) s = sys.stdin.read() assert s == "line3\\n", repr(s) '''], stdin=subprocess.PIPE, universal_newlines=1) (stdout, stderr) = p.communicate("line1\nline3\n") self.assertEqual(p.returncode, 0) def test_no_leaking(self) -> None: # Make sure we leak no resources if not mswindows: max_handles = 1026 # too much for most UNIX systems else: max_handles = 2050 # too much for (at least some) Windows setups handles = [] # type: List[int] tmpdir = tempfile.mkdtemp() try: for i in range(max_handles): try: tmpfile = os.path.join(tmpdir, support.TESTFN) handles.append(os.open(tmpfile, os.O_WRONLY|os.O_CREAT)) except OSError as e: if e.errno != errno.EMFILE: raise break else: self.skipTest("failed to reach the file descriptor limit " "(tried %d)" % max_handles) # Close a couple of them (should be enough for a subprocess) for i in range(10): os.close(handles.pop()) # Loop creating some subprocesses. If one of them leaks some fds, # the next loop iteration will fail by reaching the max fd limit. for i in range(15): p = subprocess.Popen([sys.executable, "-c", "import sys;" "sys.stdout.write(sys.stdin.read())"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) data = p.communicate(b"lime")[0] self.assertEqual(data, b"lime") finally: for h in handles: os.close(h) shutil.rmtree(tmpdir) def test_list2cmdline(self) -> None: self.assertEqual(subprocess.list2cmdline(['a b c', 'd', 'e']), '"a b c" d e') self.assertEqual(subprocess.list2cmdline(['ab"c', '\\', 'd']), 'ab\\"c \\ d') self.assertEqual(subprocess.list2cmdline(['ab"c', ' \\', 'd']), 'ab\\"c " \\\\" d') self.assertEqual(subprocess.list2cmdline(['a\\\\\\b', 'de fg', 'h']), 'a\\\\\\b "de fg" h') self.assertEqual(subprocess.list2cmdline(['a\\"b', 'c', 'd']), 'a\\\\\\"b c d') self.assertEqual(subprocess.list2cmdline(['a\\\\b c', 'd', 'e']), '"a\\\\b c" d e') self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']), '"a\\\\b\\ c" d e') self.assertEqual(subprocess.list2cmdline(['ab', '']), 'ab ""') def test_poll(self) -> None: p = subprocess.Popen([sys.executable, "-c", "import time; time.sleep(1)"]) count = 0 while p.poll() is None: time.sleep(0.1) count += 1 # We expect that the poll loop probably went around about 10 times, # but, based on system scheduling we can't control, it's possible # poll() never returned None. It "should be" very rare that it # didn't go around at least twice. self.assertGreaterEqual(count, 2) # Subsequent invocations should just return the returncode self.assertEqual(p.poll(), 0) def test_wait(self) -> None: p = subprocess.Popen([sys.executable, "-c", "import time; time.sleep(2)"]) self.assertEqual(p.wait(), 0) # Subsequent invocations should just return the returncode self.assertEqual(p.wait(), 0) def test_invalid_bufsize(self) -> None: # an invalid type of the bufsize argument should raise # TypeError. with self.assertRaises(TypeError): subprocess.Popen([sys.executable, "-c", "pass"], cast(Any, "orange")) def test_bufsize_is_none(self) -> None: # bufsize=None should be the same as bufsize=0. p = subprocess.Popen([sys.executable, "-c", "pass"], None) self.assertEqual(p.wait(), 0) # Again with keyword arg p = subprocess.Popen([sys.executable, "-c", "pass"], bufsize=None) self.assertEqual(p.wait(), 0) def test_leaking_fds_on_error(self) -> None: # see bug #5179: Popen leaks file descriptors to PIPEs if # the child fails to execute; this will eventually exhaust # the maximum number of open fds. 1024 seems a very common # value for that limit, but Windows has 2048, so we loop # 1024 times (each call leaked two fds). for i in range(1024): # Windows raises IOError. Others raise OSError. with self.assertRaises(EnvironmentError) as c: subprocess.Popen(['nonexisting_i_hope'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) # ignore errors that indicate the command was not found if c.exception.errno not in (errno.ENOENT, errno.EACCES): raise c.exception def test_issue8780(self) -> None: # Ensure that stdout is inherited from the parent # if stdout=PIPE is not used code = ';'.join([ 'import subprocess, sys', 'retcode = subprocess.call(' "[sys.executable, '-c', 'print(\"Hello World!\")'])", 'assert retcode == 0']) output = subprocess.check_output([sys.executable, '-c', code]) self.assertTrue(output.startswith(b'Hello World!'), ascii(output)) def test_handles_closed_on_exception(self) -> None: # If CreateProcess exits with an error, ensure the # duplicate output handles are released ifhandle, ifname = mkstemp() ofhandle, ofname = mkstemp() efhandle, efname = mkstemp() try: subprocess.Popen (["*"], stdin=ifhandle, stdout=ofhandle, stderr=efhandle) except OSError: os.close(ifhandle) os.remove(ifname) os.close(ofhandle) os.remove(ofname) os.close(efhandle) os.remove(efname) self.assertFalse(os.path.exists(ifname)) self.assertFalse(os.path.exists(ofname)) self.assertFalse(os.path.exists(efname)) def test_communicate_epipe(self) -> None: # Issue 10963: communicate() should hide EPIPE p = subprocess.Popen([sys.executable, "-c", 'pass'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) p.communicate(b"x" * 2**20) def test_communicate_epipe_only_stdin(self) -> None: # Issue 10963: communicate() should hide EPIPE p = subprocess.Popen([sys.executable, "-c", 'pass'], stdin=subprocess.PIPE) self.addCleanup(p.stdin.close) time.sleep(2) p.communicate(b"x" * 2**20) @unittest.skipUnless(hasattr(signal, 'SIGALRM'), "Requires signal.SIGALRM") def test_communicate_eintr(self) -> None: # Issue #12493: communicate() should handle EINTR def handler(signum, frame): pass old_handler = signal.signal(signal.SIGALRM, handler) self.addCleanup(signal.signal, signal.SIGALRM, old_handler) # the process is running for 2 seconds args = [sys.executable, "-c", 'import time; time.sleep(2)'] for stream in ('stdout', 'stderr'): kw = {stream: subprocess.PIPE} # type: Dict[str, Any] with subprocess.Popen(args, **kw) as process: signal.alarm(1) # communicate() will be interrupted by SIGALRM process.communicate() # context manager class _SuppressCoreFiles(object): """Try to prevent core files from being created.""" old_limit = None # type: Tuple[int, int] def __enter__(self) -> None: """Try to save previous ulimit, then set it to (0, 0).""" if resource is not None: try: self.old_limit = resource.getrlimit(resource.RLIMIT_CORE) resource.setrlimit(resource.RLIMIT_CORE, (0, 0)) except (ValueError, resource.error): pass if sys.platform == 'darwin': # Check if the 'Crash Reporter' on OSX was configured # in 'Developer' mode and warn that it will get triggered # when it is. # # This assumes that this context manager is used in tests # that might trigger the next manager. value = subprocess.Popen(['/usr/bin/defaults', 'read', 'com.apple.CrashReporter', 'DialogType'], stdout=subprocess.PIPE).communicate()[0] if value.strip() == b'developer': print("this tests triggers the Crash Reporter, " "that is intentional", end='') sys.stdout.flush() def __exit__(self, *args: Any) -> None: """Return core file behavior to default.""" if self.old_limit is None: return if resource is not None: try: resource.setrlimit(resource.RLIMIT_CORE, self.old_limit) except (ValueError, resource.error): pass @unittest.skipIf(mswindows, "POSIX specific tests") class POSIXProcessTestCase(BaseTestCase): def test_exceptions(self) -> None: nonexistent_dir = "/_this/pa.th/does/not/exist" try: os.chdir(nonexistent_dir) except OSError as e: # This avoids hard coding the errno value or the OS perror() # string and instead capture the exception that we want to see # below for comparison. desired_exception = e desired_exception.strerror += ': ' + repr(sys.executable) else: self.fail("chdir to nonexistant directory %s succeeded." % nonexistent_dir) # Error in the child re-raised in the parent. try: p = subprocess.Popen([sys.executable, "-c", ""], cwd=nonexistent_dir) except OSError as e: # Test that the child process chdir failure actually makes # it up to the parent process as the correct exception. self.assertEqual(desired_exception.errno, e.errno) self.assertEqual(desired_exception.strerror, e.strerror) else: self.fail("Expected OSError: %s" % desired_exception) def test_restore_signals(self) -> None: # Code coverage for both values of restore_signals to make sure it # at least does not blow up. # A test for behavior would be complex. Contributions welcome. subprocess.call([sys.executable, "-c", ""], restore_signals=True) subprocess.call([sys.executable, "-c", ""], restore_signals=False) def test_start_new_session(self) -> None: # For code coverage of calling setsid(). We don't care if we get an # EPERM error from it depending on the test execution environment, that # still indicates that it was called. try: output = subprocess.check_output( [sys.executable, "-c", "import os; print(os.getpgid(os.getpid()))"], start_new_session=True) except OSError as e: if e.errno != errno.EPERM: raise else: parent_pgid = os.getpgid(os.getpid()) child_pgid = int(output) self.assertNotEqual(parent_pgid, child_pgid) def test_run_abort(self) -> None: # returncode handles signal termination with _SuppressCoreFiles(): p = subprocess.Popen([sys.executable, "-c", 'import os; os.abort()']) p.wait() self.assertEqual(-p.returncode, signal.SIGABRT) def test_preexec(self) -> None: # DISCLAIMER: Setting environment variables is *not* a good use # of a preexec_fn. This is merely a test. p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(os.getenv("FRUIT"))'], stdout=subprocess.PIPE, preexec_fn=lambda: os.putenv("FRUIT", "apple")) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read(), b"apple") def test_preexec_exception(self) -> None: def raise_it(): raise ValueError("What if two swallows carried a coconut?") try: p = subprocess.Popen([sys.executable, "-c", ""], preexec_fn=raise_it) except RuntimeError as e: self.assertTrue( subprocess._posixsubprocess, "Expected a ValueError from the preexec_fn") except ValueError as e2: self.assertIn("coconut", e2.args[0]) else: self.fail("Exception raised by preexec_fn did not make it " "to the parent process.") def test_preexec_gc_module_failure(self) -> None: # This tests the code that disables garbage collection if the child # process will execute any Python. def raise_runtime_error(): raise RuntimeError("this shouldn't escape") enabled = gc.isenabled() orig_gc_disable = gc.disable orig_gc_isenabled = gc.isenabled try: gc.disable() self.assertFalse(gc.isenabled()) subprocess.call([sys.executable, '-c', ''], preexec_fn=lambda: None) self.assertFalse(gc.isenabled(), "Popen enabled gc when it shouldn't.") gc.enable() self.assertTrue(gc.isenabled()) subprocess.call([sys.executable, '-c', ''], preexec_fn=lambda: None) self.assertTrue(gc.isenabled(), "Popen left gc disabled.") setattr(gc, 'disable', raise_runtime_error) self.assertRaises(RuntimeError, subprocess.Popen, [sys.executable, '-c', ''], preexec_fn=lambda: None) del gc.isenabled # force an AttributeError self.assertRaises(AttributeError, subprocess.Popen, [sys.executable, '-c', ''], preexec_fn=lambda: None) finally: setattr(gc, 'disable', orig_gc_disable) setattr(gc, 'isenabled', orig_gc_isenabled) if not enabled: gc.disable() def test_args_string(self) -> None: # args is a string fd, fname = mkstemp() # reopen in text mode with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260 fobj.write("#!/bin/sh\n") fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" % sys.executable) os.chmod(fname, 0o700) p = subprocess.Popen(fname) p.wait() os.remove(fname) self.assertEqual(p.returncode, 47) def test_invalid_args(self) -> None: # invalid arguments should raise ValueError self.assertRaises(ValueError, subprocess.call, [sys.executable, "-c", "import sys; sys.exit(47)"], startupinfo=47) self.assertRaises(ValueError, subprocess.call, [sys.executable, "-c", "import sys; sys.exit(47)"], creationflags=47) def test_shell_sequence(self) -> None: # Run command through the shell (sequence) newenv = os.environ.copy() newenv["FRUIT"] = "apple" p = subprocess.Popen(["echo $FRUIT"], shell=1, stdout=subprocess.PIPE, env=newenv) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple") def test_shell_string(self) -> None: # Run command through the shell (string) newenv = os.environ.copy() newenv["FRUIT"] = "apple" p = subprocess.Popen("echo $FRUIT", shell=1, stdout=subprocess.PIPE, env=newenv) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple") def test_call_string(self) -> None: # call() function with string argument on UNIX fd, fname = mkstemp() # reopen in text mode with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260 fobj.write("#!/bin/sh\n") fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" % sys.executable) os.chmod(fname, 0o700) rc = subprocess.call(fname) os.remove(fname) self.assertEqual(rc, 47) def test_specific_shell(self) -> None: # Issue #9265: Incorrect name passed as arg[0]. shells = [] # type: List[str] for prefix in ['/bin', '/usr/bin/', '/usr/local/bin']: for name in ['bash', 'ksh']: sh = os.path.join(prefix, name) if os.path.isfile(sh): shells.append(sh) if not shells: # Will probably work for any shell but csh. self.skipTest("bash or ksh required for this test") sh = '/bin/sh' if os.path.isfile(sh) and not os.path.islink(sh): # Test will fail if /bin/sh is a symlink to csh. shells.append(sh) for sh in shells: p = subprocess.Popen("echo $0", executable=sh, shell=True, stdout=subprocess.PIPE) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read().strip(), bytes(sh, 'ascii')) def _kill_process(self, method: str, *args: Any) -> subprocess.Popen: # Do not inherit file handles from the parent. # It should fix failures on some platforms. p = subprocess.Popen([sys.executable, "-c", """if 1: import sys, time sys.stdout.write('x\\n') sys.stdout.flush() time.sleep(30) """], close_fds=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Wait for the interpreter to be completely initialized before # sending any signal. p.stdout.read(1) getattr(p, method)(*args) return p def test_send_signal(self) -> None: p = self._kill_process('send_signal', signal.SIGINT) _, stderr = p.communicate() self.assertIn(b'KeyboardInterrupt', stderr) self.assertNotEqual(p.wait(), 0) def test_kill(self) -> None: p = self._kill_process('kill') _, stderr = p.communicate() self.assertStderrEqual(stderr, b'') self.assertEqual(p.wait(), -signal.SIGKILL) def test_terminate(self) -> None: p = self._kill_process('terminate') _, stderr = p.communicate() self.assertStderrEqual(stderr, b'') self.assertEqual(p.wait(), -signal.SIGTERM) def check_close_std_fds(self, fds: Iterable[int]) -> None: # Issue #9905: test that subprocess pipes still work properly with # some standard fds closed stdin = 0 newfds = [] # type: List[int] for a in fds: b = os.dup(a) newfds.append(b) if a == 0: stdin = b try: for fd in fds: os.close(fd) out, err = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple");' 'sys.stdout.flush();' 'sys.stderr.write("orange")'], stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() err = support.strip_python_stderr(err) self.assertEqual((out, err), (b'apple', b'orange')) finally: for b, a in zip(newfds, fds): os.dup2(b, a) for b in newfds: os.close(b) def test_close_fd_0(self) -> None: self.check_close_std_fds([0]) def test_close_fd_1(self) -> None: self.check_close_std_fds([1]) def test_close_fd_2(self) -> None: self.check_close_std_fds([2]) def test_close_fds_0_1(self) -> None: self.check_close_std_fds([0, 1]) def test_close_fds_0_2(self) -> None: self.check_close_std_fds([0, 2]) def test_close_fds_1_2(self) -> None: self.check_close_std_fds([1, 2]) def test_close_fds_0_1_2(self) -> None: # Issue #10806: test that subprocess pipes still work properly with # all standard fds closed. self.check_close_std_fds([0, 1, 2]) def test_remapping_std_fds(self) -> None: # open up some temporary files temps = [mkstemp() for i in range(3)] try: temp_fds = [fd for fd, fname in temps] # unlink the files -- we won't need to reopen them for fd, fname in temps: os.unlink(fname) # write some data to what will become stdin, and rewind os.write(temp_fds[1], b"STDIN") os.lseek(temp_fds[1], 0, 0) # move the standard file descriptors out of the way saved_fds = [os.dup(fd) for fd in range(3)] try: # duplicate the file objects over the standard fd's for fd, temp_fd in enumerate(temp_fds): os.dup2(temp_fd, fd) # now use those files in the "wrong" order, so that subprocess # has to rearrange them in the child p = subprocess.Popen([sys.executable, "-c", 'import sys; got = sys.stdin.read();' 'sys.stdout.write("got %s"%got); sys.stderr.write("err")'], stdin=temp_fds[1], stdout=temp_fds[2], stderr=temp_fds[0]) p.wait() finally: # restore the original fd's underneath sys.stdin, etc. for std, saved in enumerate(saved_fds): os.dup2(saved, std) os.close(saved) for fd in temp_fds: os.lseek(fd, 0, 0) out = os.read(temp_fds[2], 1024) err = support.strip_python_stderr(os.read(temp_fds[0], 1024)) self.assertEqual(out, b"got STDIN") self.assertEqual(err, b"err") finally: for fd in temp_fds: os.close(fd) def check_swap_fds(self, stdin_no: int, stdout_no: int, stderr_no: int) -> None: # open up some temporary files temps = [mkstemp() for i in range(3)] temp_fds = [fd for fd, fname in temps] try: # unlink the files -- we won't need to reopen them for fd, fname in temps: os.unlink(fname) # save a copy of the standard file descriptors saved_fds = [os.dup(fd) for fd in range(3)] try: # duplicate the temp files over the standard fd's 0, 1, 2 for fd, temp_fd in enumerate(temp_fds): os.dup2(temp_fd, fd) # write some data to what will become stdin, and rewind os.write(stdin_no, b"STDIN") os.lseek(stdin_no, 0, 0) # now use those files in the given order, so that subprocess # has to rearrange them in the child p = subprocess.Popen([sys.executable, "-c", 'import sys; got = sys.stdin.read();' 'sys.stdout.write("got %s"%got); sys.stderr.write("err")'], stdin=stdin_no, stdout=stdout_no, stderr=stderr_no) p.wait() for fd in temp_fds: os.lseek(fd, 0, 0) out = os.read(stdout_no, 1024) err = support.strip_python_stderr(os.read(stderr_no, 1024)) finally: for std, saved in enumerate(saved_fds): os.dup2(saved, std) os.close(saved) self.assertEqual(out, b"got STDIN") self.assertEqual(err, b"err") finally: for fd in temp_fds: os.close(fd) # When duping fds, if there arises a situation where one of the fds is # either 0, 1 or 2, it is possible that it is overwritten (#12607). # This tests all combinations of this. def test_swap_fds(self) -> None: self.check_swap_fds(0, 1, 2) self.check_swap_fds(0, 2, 1) self.check_swap_fds(1, 0, 2) self.check_swap_fds(1, 2, 0) self.check_swap_fds(2, 0, 1) self.check_swap_fds(2, 1, 0) def test_surrogates_error_message(self) -> None: def prepare() -> None: raise ValueError("surrogate:\uDCff") try: subprocess.call( [sys.executable, "-c", "pass"], preexec_fn=prepare) except ValueError as err: # Pure Python implementations keeps the message self.assertIsNone(subprocess._posixsubprocess) self.assertEqual(str(err), "surrogate:\uDCff") except RuntimeError as err2: # _posixsubprocess uses a default message self.assertIsNotNone(subprocess._posixsubprocess) self.assertEqual(str(err2), "Exception occurred in preexec_fn.") else: self.fail("Expected ValueError or RuntimeError") def test_undecodable_env(self) -> None: for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')): # test str with surrogates script = "import os; print(ascii(os.getenv(%s)))" % repr(key) env = os.environ.copy() env[key] = value # Use C locale to get ascii for the locale encoding to force # surrogate-escaping of \xFF in the child process; otherwise it can # be decoded as-is if the default locale is latin-1. env['LC_ALL'] = 'C' stdout = subprocess.check_output( [sys.executable, "-c", script], env=env) stdout = stdout.rstrip(b'\n\r') self.assertEqual(stdout.decode('ascii'), ascii(value)) # test bytes keyb = key.encode("ascii", "surrogateescape") valueb = value.encode("ascii", "surrogateescape") script = "import os; print(ascii(os.getenvb(%s)))" % repr(keyb) envb = dict(os.environ.copy().items()) # type: Dict[Any, Any] envb[keyb] = valueb stdout = subprocess.check_output( [sys.executable, "-c", script], env=envb) stdout = stdout.rstrip(b'\n\r') self.assertEqual(stdout.decode('ascii'), ascii(valueb)) def test_bytes_program(self) -> None: abs_program = os.fsencode(sys.executable) path, programs = os.path.split(sys.executable) program = os.fsencode(programs) # absolute bytes path exitcode = subprocess.call([abs_program, "-c", "pass"]) self.assertEqual(exitcode, 0) # bytes program, unicode PATH env = os.environ.copy() env["PATH"] = path exitcode = subprocess.call([program, "-c", "pass"], env=env) self.assertEqual(exitcode, 0) # bytes program, bytes PATH envb = os.environb.copy() envb[b"PATH"] = os.fsencode(path) exitcode = subprocess.call([program, "-c", "pass"], env=envb) self.assertEqual(exitcode, 0) def test_pipe_cloexec(self) -> None: sleeper = support.findfile("input_reader.py", subdir="subprocessdata") fd_status = support.findfile("fd_status.py", subdir="subprocessdata") p1 = subprocess.Popen([sys.executable, sleeper], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=False) self.addCleanup(p1.communicate, b'') p2 = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=False) output, error = p2.communicate() result_fds = set(map(int, output.split(b','))) unwanted_fds = set([p1.stdin.fileno(), p1.stdout.fileno(), p1.stderr.fileno()]) self.assertFalse(result_fds & unwanted_fds, "Expected no fds from %r to be open in child, " "found %r" % (unwanted_fds, result_fds & unwanted_fds)) def test_pipe_cloexec_real_tools(self) -> None: qcat = support.findfile("qcat.py", subdir="subprocessdata") qgrep = support.findfile("qgrep.py", subdir="subprocessdata") subdata = b'zxcvbn' data = subdata * 4 + b'\n' p1 = subprocess.Popen([sys.executable, qcat], stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=False) p2 = subprocess.Popen([sys.executable, qgrep, subdata], stdin=p1.stdout, stdout=subprocess.PIPE, close_fds=False) self.addCleanup(p1.wait) self.addCleanup(p2.wait) def kill_p1() -> None: #try: p1.terminate() #except ProcessLookupError: # pass def kill_p2() -> None: #try: p2.terminate() #except ProcessLookupError: # pass self.addCleanup(kill_p1) self.addCleanup(kill_p2) p1.stdin.write(data) p1.stdin.close() readfiles, ignored1, ignored2 = select.select([p2.stdout], [], [], 10) self.assertTrue(readfiles, "The child hung") self.assertEqual(p2.stdout.read(), data) p1.stdout.close() p2.stdout.close() def test_close_fds(self) -> None: fd_status = support.findfile("fd_status.py", subdir="subprocessdata") fds = os.pipe() self.addCleanup(os.close, fds[0]) self.addCleanup(os.close, fds[1]) open_fds = set([fds[0], fds[1]]) # add a bunch more fds for _ in range(9): fd = os.open("/dev/null", os.O_RDONLY) self.addCleanup(os.close, fd) open_fds.add(fd) p = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=False) output, ignored = p.communicate() remaining_fds = set(map(int, output.split(b','))) self.assertEqual(remaining_fds & open_fds, open_fds, "Some fds were closed") p = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=True) output, ignored = p.communicate() remaining_fds = set(map(int, output.split(b','))) self.assertFalse(remaining_fds & open_fds, "Some fds were left open") self.assertIn(1, remaining_fds, "Subprocess failed") # Keep some of the fd's we opened open in the subprocess. # This tests _posixsubprocess.c's proper handling of fds_to_keep. fds_to_keep = set(open_fds.pop() for _ in range(8)) p = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=True, pass_fds=()) output, ignored = p.communicate() remaining_fds = set(map(int, output.split(b','))) self.assertFalse(remaining_fds & fds_to_keep & open_fds, "Some fds not in pass_fds were left open") self.assertIn(1, remaining_fds, "Subprocess failed") # Mac OS X Tiger (10.4) has a kernel bug: sometimes, the file # descriptor of a pipe closed in the parent process is valid in the # child process according to fstat(), but the mode of the file # descriptor is invalid, and read or write raise an error. @support.requires_mac_ver(10, 5) def test_pass_fds(self) -> None: fd_status = support.findfile("fd_status.py", subdir="subprocessdata") open_fds = set() # type: Set[int] for x in range(5): fds = os.pipe() self.addCleanup(os.close, fds[0]) self.addCleanup(os.close, fds[1]) open_fds.update([fds[0], fds[1]]) for fd in open_fds: p = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=True, pass_fds=(fd, )) output, ignored = p.communicate() remaining_fds = set(map(int, output.split(b','))) to_be_closed = open_fds - {fd} self.assertIn(fd, remaining_fds, "fd to be passed not passed") self.assertFalse(remaining_fds & to_be_closed, "fd to be closed passed") # pass_fds overrides close_fds with a warning. with self.assertWarns(RuntimeWarning) as context: self.assertFalse(subprocess.call( [sys.executable, "-c", "import sys; sys.exit(0)"], close_fds=False, pass_fds=(fd, ))) self.assertIn('overriding close_fds', str(context.warning)) def test_stdout_stdin_are_single_inout_fd(self) -> None: with io.open(os.devnull, "r+") as inout: p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"], stdout=inout, stdin=inout) p.wait() def test_stdout_stderr_are_single_inout_fd(self) -> None: with io.open(os.devnull, "r+") as inout: p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"], stdout=inout, stderr=inout) p.wait() def test_stderr_stdin_are_single_inout_fd(self) -> None: with io.open(os.devnull, "r+") as inout: p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"], stderr=inout, stdin=inout) p.wait() def test_wait_when_sigchild_ignored(self) -> None: # NOTE: sigchild_ignore.py may not be an effective test on all OSes. sigchild_ignore = support.findfile("sigchild_ignore.py", subdir="subprocessdata") p = subprocess.Popen([sys.executable, sigchild_ignore], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() self.assertEqual(0, p.returncode, "sigchild_ignore.py exited" " non-zero with this error:\n%s" % stderr.decode('utf8')) def test_select_unbuffered(self) -> None: # Issue #11459: bufsize=0 should really set the pipes as # unbuffered (and therefore let select() work properly). select = support.import_module("select") p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple")'], stdout=subprocess.PIPE, bufsize=0) f = p.stdout self.addCleanup(f.close) try: self.assertEqual(f.read(4), b"appl") self.assertIn(f, select.select([f], [], [], 0.0)[0]) finally: p.wait() def test_zombie_fast_process_del(self) -> None: # Issue #12650: on Unix, if Popen.__del__() was called before the # process exited, it wouldn't be added to subprocess._active, and would # remain a zombie. # spawn a Popen, and delete its reference before it exits p = subprocess.Popen([sys.executable, "-c", 'import sys, time;' 'time.sleep(0.2)'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) ident = id(p) pid = p.pid del p # check that p is in the active processes list self.assertIn(ident, [id(o) for o in subprocess._active]) def test_leak_fast_process_del_killed(self) -> None: # Issue #12650: on Unix, if Popen.__del__() was called before the # process exited, and the process got killed by a signal, it would never # be removed from subprocess._active, which triggered a FD and memory # leak. # spawn a Popen, delete its reference and kill it p = subprocess.Popen([sys.executable, "-c", 'import time;' 'time.sleep(3)'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) ident = id(p) pid = p.pid del p os.kill(pid, signal.SIGKILL) # check that p is in the active processes list self.assertIn(ident, [id(o) for o in subprocess._active]) # let some time for the process to exit, and create a new Popen: this # should trigger the wait() of p time.sleep(0.2) with self.assertRaises(EnvironmentError) as c: with subprocess.Popen(['nonexisting_i_hope'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: pass # p should have been wait()ed on, and removed from the _active list self.assertRaises(OSError, os.waitpid, pid, 0) self.assertNotIn(ident, [id(o) for o in subprocess._active]) @unittest.skipUnless(mswindows, "Windows specific tests") class Win32ProcessTestCase(BaseTestCase): def test_startupinfo(self) -> None: # startupinfo argument # We uses hardcoded constants, because we do not want to # depend on win32all. STARTF_USESHOWWINDOW = 1 SW_MAXIMIZE = 3 startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags = STARTF_USESHOWWINDOW startupinfo.wShowWindow = SW_MAXIMIZE # Since Python is a console process, it won't be affected # by wShowWindow, but the argument should be silently # ignored subprocess.call([sys.executable, "-c", "import sys; sys.exit(0)"], startupinfo=startupinfo) def test_creationflags(self) -> None: # creationflags argument CREATE_NEW_CONSOLE = 16 sys.stderr.write(" a DOS box should flash briefly ...\n") subprocess.call(sys.executable + ' -c "import time; time.sleep(0.25)"', creationflags=CREATE_NEW_CONSOLE) def test_invalid_args(self) -> None: # invalid arguments should raise ValueError self.assertRaises(ValueError, subprocess.call, [sys.executable, "-c", "import sys; sys.exit(47)"], preexec_fn=lambda: 1) self.assertRaises(ValueError, subprocess.call, [sys.executable, "-c", "import sys; sys.exit(47)"], stdout=subprocess.PIPE, close_fds=True) def test_close_fds(self) -> None: # close file descriptors rc = subprocess.call([sys.executable, "-c", "import sys; sys.exit(47)"], close_fds=True) self.assertEqual(rc, 47) def test_shell_sequence(self) -> None: # Run command through the shell (sequence) newenv = os.environ.copy() newenv["FRUIT"] = "physalis" p = subprocess.Popen(["set"], shell=1, stdout=subprocess.PIPE, env=newenv) self.addCleanup(p.stdout.close) self.assertIn(b"physalis", p.stdout.read()) def test_shell_string(self) -> None: # Run command through the shell (string) newenv = os.environ.copy() newenv["FRUIT"] = "physalis" p = subprocess.Popen("set", shell=1, stdout=subprocess.PIPE, env=newenv) self.addCleanup(p.stdout.close) self.assertIn(b"physalis", p.stdout.read()) def test_call_string(self) -> None: # call() function with string argument on Windows rc = subprocess.call(sys.executable + ' -c "import sys; sys.exit(47)"') self.assertEqual(rc, 47) def _kill_process(self, method: str, *args: Any) -> None: # Some win32 buildbot raises EOFError if stdin is inherited p = subprocess.Popen([sys.executable, "-c", """if 1: import sys, time sys.stdout.write('x\\n') sys.stdout.flush() time.sleep(30) """], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) # Wait for the interpreter to be completely initialized before # sending any signal. p.stdout.read(1) getattr(p, method)(*args) _, stderr = p.communicate() self.assertStderrEqual(stderr, b'') returncode = p.wait() self.assertNotEqual(returncode, 0) def test_send_signal(self) -> None: self._kill_process('send_signal', signal.SIGTERM) def test_kill(self) -> None: self._kill_process('kill') def test_terminate(self) -> None: self._kill_process('terminate') # The module says: # "NB This only works (and is only relevant) for UNIX." # # Actually, getoutput should work on any platform with an os.popen, but # I'll take the comment as given, and skip this suite. @unittest.skipUnless(os.name == 'posix', "only relevant for UNIX") class CommandTests(unittest.TestCase): def test_getoutput(self) -> None: self.assertEqual(subprocess.getoutput('echo xyzzy'), 'xyzzy') self.assertEqual(subprocess.getstatusoutput('echo xyzzy'), (0, 'xyzzy')) # we use mkdtemp in the next line to create an empty directory # under our exclusive control; from that, we can invent a pathname # that we _know_ won't exist. This is guaranteed to fail. dir = None # type: str try: dir = tempfile.mkdtemp() name = os.path.join(dir, "foo") status, output = subprocess.getstatusoutput('cat ' + name) self.assertNotEqual(status, 0) finally: if dir is not None: os.rmdir(dir) @unittest.skipUnless(getattr(subprocess, '_has_poll', False), "poll system call not supported") class ProcessTestCaseNoPoll(ProcessTestCase): def setUp(self) -> None: subprocess._has_poll = False ProcessTestCase.setUp(self) def tearDown(self) -> None: subprocess._has_poll = True ProcessTestCase.tearDown(self) #@unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False), # "_posixsubprocess extension module not found.") #class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase): # @classmethod # def setUpClass(cls): # global subprocess # assert subprocess._posixsubprocess # # Reimport subprocess while forcing _posixsubprocess to not exist. # with support.check_warnings(('.*_posixsubprocess .* not being used.*', # RuntimeWarning)): # subprocess = support.import_fresh_module( # 'subprocess', blocked=['_posixsubprocess']) # assert not subprocess._posixsubprocess # # @classmethod # def tearDownClass(cls): # global subprocess # # Reimport subprocess as it should be, restoring order to the universe#. # subprocess = support.import_fresh_module('subprocess') # assert subprocess._posixsubprocess class HelperFunctionTests(unittest.TestCase): @unittest.skipIf(mswindows, "errno and EINTR make no sense on windows") def test_eintr_retry_call(self) -> None: record_calls = [] # type: List[Any] def fake_os_func(*args: Any) -> tuple: record_calls.append(args) if len(record_calls) == 2: raise OSError(errno.EINTR, "fake interrupted system call") return tuple(reversed(args)) self.assertEqual((999, 256), subprocess._eintr_retry_call(fake_os_func, 256, 999)) self.assertEqual([(256, 999)], record_calls) # This time there will be an EINTR so it will loop once. self.assertEqual((666,), subprocess._eintr_retry_call(fake_os_func, 666)) self.assertEqual([(256, 999), (666,), (666,)], record_calls) @unittest.skipUnless(mswindows, "Windows-specific tests") class CommandsWithSpaces (BaseTestCase): def setUp(self) -> None: super().setUp() f, fname = mkstemp(".py", "te st") self.fname = fname.lower () os.write(f, b"import sys;" b"sys.stdout.write('%d %s' % (len(sys.argv), [a.lower () for a in sys.argv]))" ) os.close(f) def tearDown(self) -> None: os.remove(self.fname) super().tearDown() def with_spaces(self, *args: Any, **kwargs: Any) -> None: kwargs['stdout'] = subprocess.PIPE p = subprocess.Popen(*args, **kwargs) self.addCleanup(p.stdout.close) self.assertEqual( p.stdout.read ().decode("mbcs"), "2 [%r, 'ab cd']" % self.fname ) def test_shell_string_with_spaces(self) -> None: # call() function with string argument with spaces on Windows self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname, "ab cd"), shell=1) def test_shell_sequence_with_spaces(self) -> None: # call() function with sequence argument with spaces on Windows self.with_spaces([sys.executable, self.fname, "ab cd"], shell=1) def test_noshell_string_with_spaces(self) -> None: # call() function with string argument with spaces on Windows self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname, "ab cd")) def test_noshell_sequence_with_spaces(self) -> None: # call() function with sequence argument with spaces on Windows self.with_spaces([sys.executable, self.fname, "ab cd"]) class ContextManagerTests(BaseTestCase): def test_pipe(self) -> None: with subprocess.Popen([sys.executable, "-c", "import sys;" "sys.stdout.write('stdout');" "sys.stderr.write('stderr');"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: self.assertEqual(proc.stdout.read(), b"stdout") self.assertStderrEqual(proc.stderr.read(), b"stderr") self.assertTrue(proc.stdout.closed) self.assertTrue(proc.stderr.closed) def test_returncode(self) -> None: with subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(100)"]) as proc: pass # __exit__ calls wait(), so the returncode should be set self.assertEqual(proc.returncode, 100) def test_communicate_stdin(self) -> None: with subprocess.Popen([sys.executable, "-c", "import sys;" "sys.exit(sys.stdin.read() == 'context')"], stdin=subprocess.PIPE) as proc: proc.communicate(b"context") self.assertEqual(proc.returncode, 1) def test_invalid_args(self) -> None: with self.assertRaises(EnvironmentError) as c: with subprocess.Popen(['nonexisting_i_hope'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: pass if c.exception.errno != errno.ENOENT: # ignore "no such file" raise c.exception def test_main(): unit_tests = (ProcessTestCase, POSIXProcessTestCase, Win32ProcessTestCase, #ProcessTestCasePOSIXPurePython, CommandTests, ProcessTestCaseNoPoll, HelperFunctionTests, CommandsWithSpaces, ContextManagerTests, ) support.run_unittest(*unit_tests) support.reap_children() if __name__ == "__main__": unittest.main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_tempfile.py0000644€tŠÔÚ€2›s®0000011153513576752246030236 0ustar jukkaDROPBOX\Domain Users00000000000000# tempfile.py unit tests. import tempfile import os import signal import sys import re import warnings import unittest from test import support from typing import Any, AnyStr, List, Dict, IO if hasattr(os, 'stat'): import stat has_stat = 1 else: has_stat = 0 has_textmode = (tempfile._text_openflags != tempfile._bin_openflags) has_spawnl = hasattr(os, 'spawnl') # TEST_FILES may need to be tweaked for systems depending on the maximum # number of files that can be opened at one time (see ulimit -n) if sys.platform in ('openbsd3', 'openbsd4'): TEST_FILES = 48 else: TEST_FILES = 100 # This is organized as one test for each chunk of code in tempfile.py, # in order of their appearance in the file. Testing which requires # threads is not done here. # Common functionality. class TC(unittest.TestCase): str_check = re.compile(r"[a-zA-Z0-9_-]{6}$") def setUp(self) -> None: self._warnings_manager = support.check_warnings() self._warnings_manager.__enter__() warnings.filterwarnings("ignore", category=RuntimeWarning, message="mktemp", module=__name__) def tearDown(self) -> None: self._warnings_manager.__exit__(None, None, None) def failOnException(self, what: str, ei: tuple = None) -> None: if ei is None: ei = sys.exc_info() self.fail("%s raised %s: %s" % (what, ei[0], ei[1])) def nameCheck(self, name: str, dir: str, pre: str, suf: str) -> None: (ndir, nbase) = os.path.split(name) npre = nbase[:len(pre)] nsuf = nbase[len(nbase)-len(suf):] # check for equality of the absolute paths! self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir), "file '%s' not in directory '%s'" % (name, dir)) self.assertEqual(npre, pre, "file '%s' does not begin with '%s'" % (nbase, pre)) self.assertEqual(nsuf, suf, "file '%s' does not end with '%s'" % (nbase, suf)) nbase = nbase[len(pre):len(nbase)-len(suf)] self.assertTrue(self.str_check.match(nbase), "random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/" % nbase) test_classes = [] # type: List[type] class test_exports(TC): def test_exports(self) -> None: # There are no surprising symbols in the tempfile module dict = tempfile.__dict__ expected = { "NamedTemporaryFile" : 1, "TemporaryFile" : 1, "mkstemp" : 1, "mkdtemp" : 1, "mktemp" : 1, "TMP_MAX" : 1, "gettempprefix" : 1, "gettempdir" : 1, "tempdir" : 1, "template" : 1, "SpooledTemporaryFile" : 1, "TemporaryDirectory" : 1, } unexp = [] # type: List[str] for key in dict: if key[0] != '_' and key not in expected: unexp.append(key) self.assertTrue(len(unexp) == 0, "unexpected keys: %s" % unexp) test_classes.append(test_exports) class test__RandomNameSequence(TC): """Test the internal iterator object _RandomNameSequence.""" def setUp(self) -> None: self.r = tempfile._RandomNameSequence() super().setUp() def test_get_six_char_str(self) -> None: # _RandomNameSequence returns a six-character string s = next(self.r) self.nameCheck(s, '', '', '') def test_many(self) -> None: # _RandomNameSequence returns no duplicate strings (stochastic) dict = {} # type: Dict[str, int] r = self.r for i in range(TEST_FILES): s = next(r) self.nameCheck(s, '', '', '') self.assertNotIn(s, dict) dict[s] = 1 def supports_iter(self) -> None: # _RandomNameSequence supports the iterator protocol i = 0 r = self.r try: for s in r: i += 1 if i == 20: break except: self.failOnException("iteration") @unittest.skipUnless(hasattr(os, 'fork'), "os.fork is required for this test") def test_process_awareness(self) -> None: # ensure that the random source differs between # child and parent. read_fd, write_fd = os.pipe() pid = None # type: int try: pid = os.fork() if not pid: os.close(read_fd) os.write(write_fd, next(self.r).encode("ascii")) os.close(write_fd) # bypass the normal exit handlers- leave those to # the parent. os._exit(0) parent_value = next(self.r) child_value = os.read(read_fd, len(parent_value)).decode("ascii") finally: if pid: # best effort to ensure the process can't bleed out # via any bugs above try: os.kill(pid, signal.SIGKILL) except EnvironmentError: pass os.close(read_fd) os.close(write_fd) self.assertNotEqual(child_value, parent_value) test_classes.append(test__RandomNameSequence) class test__candidate_tempdir_list(TC): """Test the internal function _candidate_tempdir_list.""" def test_nonempty_list(self) -> None: # _candidate_tempdir_list returns a nonempty list of strings cand = tempfile._candidate_tempdir_list() self.assertFalse(len(cand) == 0) for c in cand: self.assertIsInstance(c, str) def test_wanted_dirs(self) -> None: # _candidate_tempdir_list contains the expected directories # Make sure the interesting environment variables are all set. with support.EnvironmentVarGuard() as env: for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = os.getenv(envname) if not dirname: env[envname] = os.path.abspath(envname) cand = tempfile._candidate_tempdir_list() for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = os.getenv(envname) if not dirname: raise ValueError self.assertIn(dirname, cand) try: dirname = os.getcwd() except (AttributeError, os.error): dirname = os.curdir self.assertIn(dirname, cand) # Not practical to try to verify the presence of OS-specific # paths in this list. test_classes.append(test__candidate_tempdir_list) # We test _get_default_tempdir by testing gettempdir. class test__get_candidate_names(TC): """Test the internal function _get_candidate_names.""" def test_retval(self) -> None: # _get_candidate_names returns a _RandomNameSequence object obj = tempfile._get_candidate_names() self.assertIsInstance(obj, tempfile._RandomNameSequence) def test_same_thing(self) -> None: # _get_candidate_names always returns the same object a = tempfile._get_candidate_names() b = tempfile._get_candidate_names() self.assertTrue(a is b) test_classes.append(test__get_candidate_names) class test__mkstemp_inner(TC): """Test the internal function _mkstemp_inner.""" class mkstemped: _bflags = tempfile._bin_openflags _tflags = tempfile._text_openflags def __init__(self, dir: str, pre: str, suf: str, bin: int) -> None: if bin: flags = self._bflags else: flags = self._tflags (self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags) self._close = os.close self._unlink = os.unlink def write(self, str: bytes) -> None: os.write(self.fd, str) def __del__(self) -> None: self._close(self.fd) self._unlink(self.name) def do_create(self, dir: str = None, pre: str = "", suf: str= "", bin: int = 1) -> mkstemped: if dir is None: dir = tempfile.gettempdir() try: file = test__mkstemp_inner.mkstemped(dir, pre, suf, bin) # see #259 except: self.failOnException("_mkstemp_inner") self.nameCheck(file.name, dir, pre, suf) return file def test_basic(self) -> None: # _mkstemp_inner can create files self.do_create().write(b"blat") self.do_create(pre="a").write(b"blat") self.do_create(suf="b").write(b"blat") self.do_create(pre="a", suf="b").write(b"blat") self.do_create(pre="aa", suf=".txt").write(b"blat") def test_basic_many(self) -> None: # _mkstemp_inner can create many files (stochastic) extant = list(range(TEST_FILES)) # type: List[Any] for i in extant: extant[i] = self.do_create(pre="aa") def test_choose_directory(self) -> None: # _mkstemp_inner can create files in a user-selected directory dir = tempfile.mkdtemp() try: self.do_create(dir=dir).write(b"blat") finally: os.rmdir(dir) def test_file_mode(self) -> None: # _mkstemp_inner creates files with the proper mode if not has_stat: return # ugh, can't use SkipTest. file = self.do_create() mode = stat.S_IMODE(os.stat(file.name).st_mode) expected = 0o600 if sys.platform in ('win32', 'os2emx'): # There's no distinction among 'user', 'group' and 'world'; # replicate the 'user' bits. user = expected >> 6 expected = user * (1 + 8 + 64) self.assertEqual(mode, expected) def test_noinherit(self) -> None: # _mkstemp_inner file handles are not inherited by child processes if not has_spawnl: return # ugh, can't use SkipTest. if support.verbose: v="v" else: v="q" file = self.do_create() fd = "%d" % file.fd try: me = __file__ # type: str except NameError: me = sys.argv[0] # We have to exec something, so that FD_CLOEXEC will take # effect. The core of this test is therefore in # tf_inherit_check.py, which see. tester = os.path.join(os.path.dirname(os.path.abspath(me)), "tf_inherit_check.py") # On Windows a spawn* /path/ with embedded spaces shouldn't be quoted, # but an arg with embedded spaces should be decorated with double # quotes on each end if sys.platform in ('win32',): decorated = '"%s"' % sys.executable tester = '"%s"' % tester else: decorated = sys.executable retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd) self.assertFalse(retval < 0, "child process caught fatal signal %d" % -retval) self.assertFalse(retval > 0, "child process reports failure %d"%retval) def test_textmode(self) -> None: # _mkstemp_inner can create files in text mode if not has_textmode: return # ugh, can't use SkipTest. # A text file is truncated at the first Ctrl+Z byte f = self.do_create(bin=0) f.write(b"blat\x1a") f.write(b"extra\n") os.lseek(f.fd, 0, os.SEEK_SET) self.assertEqual(os.read(f.fd, 20), b"blat") test_classes.append(test__mkstemp_inner) class test_gettempprefix(TC): """Test gettempprefix().""" def test_sane_template(self) -> None: # gettempprefix returns a nonempty prefix string p = tempfile.gettempprefix() self.assertIsInstance(p, str) self.assertTrue(len(p) > 0) def test_usable_template(self) -> None: # gettempprefix returns a usable prefix string # Create a temp directory, avoiding use of the prefix. # Then attempt to create a file whose name is # prefix + 'xxxxxx.xxx' in that directory. p = tempfile.gettempprefix() + "xxxxxx.xxx" d = tempfile.mkdtemp(prefix="") try: p = os.path.join(d, p) try: fd = os.open(p, os.O_RDWR | os.O_CREAT) except: self.failOnException("os.open") os.close(fd) os.unlink(p) finally: os.rmdir(d) test_classes.append(test_gettempprefix) class test_gettempdir(TC): """Test gettempdir().""" def test_directory_exists(self) -> None: # gettempdir returns a directory which exists dir = tempfile.gettempdir() self.assertTrue(os.path.isabs(dir) or dir == os.curdir, "%s is not an absolute path" % dir) self.assertTrue(os.path.isdir(dir), "%s is not a directory" % dir) def test_directory_writable(self) -> None: # gettempdir returns a directory writable by the user # sneaky: just instantiate a NamedTemporaryFile, which # defaults to writing into the directory returned by # gettempdir. try: file = tempfile.NamedTemporaryFile() file.write(b"blat") file.close() except: self.failOnException("create file in %s" % tempfile.gettempdir()) def test_same_thing(self) -> None: # gettempdir always returns the same object a = tempfile.gettempdir() b = tempfile.gettempdir() self.assertTrue(a is b) test_classes.append(test_gettempdir) class test_mkstemp(TC): """Test mkstemp().""" def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> None: if dir is None: dir = tempfile.gettempdir() try: (fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf) (ndir, nbase) = os.path.split(name) adir = os.path.abspath(dir) self.assertEqual(adir, ndir, "Directory '%s' incorrectly returned as '%s'" % (adir, ndir)) except: self.failOnException("mkstemp") try: self.nameCheck(name, dir, pre, suf) finally: os.close(fd) os.unlink(name) def test_basic(self) -> None: # mkstemp can create files self.do_create() self.do_create(pre="a") self.do_create(suf="b") self.do_create(pre="a", suf="b") self.do_create(pre="aa", suf=".txt") self.do_create(dir=".") def test_choose_directory(self) -> None: # mkstemp can create directories in a user-selected directory dir = tempfile.mkdtemp() try: self.do_create(dir=dir) finally: os.rmdir(dir) test_classes.append(test_mkstemp) class test_mkdtemp(TC): """Test mkdtemp().""" def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> str: if dir is None: dir = tempfile.gettempdir() try: name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf) except: self.failOnException("mkdtemp") try: self.nameCheck(name, dir, pre, suf) return name except: os.rmdir(name) raise def test_basic(self) -> None: # mkdtemp can create directories os.rmdir(self.do_create()) os.rmdir(self.do_create(pre="a")) os.rmdir(self.do_create(suf="b")) os.rmdir(self.do_create(pre="a", suf="b")) os.rmdir(self.do_create(pre="aa", suf=".txt")) def test_basic_many(self) -> None: # mkdtemp can create many directories (stochastic) extant = list(range(TEST_FILES)) # type: List[Any] try: for i in extant: extant[i] = self.do_create(pre="aa") finally: for i in extant: if(isinstance(i, str)): os.rmdir(i) def test_choose_directory(self) -> None: # mkdtemp can create directories in a user-selected directory dir = tempfile.mkdtemp() try: os.rmdir(self.do_create(dir=dir)) finally: os.rmdir(dir) def test_mode(self) -> None: # mkdtemp creates directories with the proper mode if not has_stat: return # ugh, can't use SkipTest. dir = self.do_create() try: mode = stat.S_IMODE(os.stat(dir).st_mode) mode &= 0o777 # Mask off sticky bits inherited from /tmp expected = 0o700 if sys.platform in ('win32', 'os2emx'): # There's no distinction among 'user', 'group' and 'world'; # replicate the 'user' bits. user = expected >> 6 expected = user * (1 + 8 + 64) self.assertEqual(mode, expected) finally: os.rmdir(dir) test_classes.append(test_mkdtemp) class test_mktemp(TC): """Test mktemp().""" # For safety, all use of mktemp must occur in a private directory. # We must also suppress the RuntimeWarning it generates. def setUp(self) -> None: self.dir = tempfile.mkdtemp() super().setUp() def tearDown(self) -> None: if self.dir: os.rmdir(self.dir) self.dir = None super().tearDown() class mktemped: def _unlink(self, path: str) -> None: os.unlink(path) _bflags = tempfile._bin_openflags def __init__(self, dir: str, pre: str, suf: str) -> None: self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf) # Create the file. This will raise an exception if it's # mysteriously appeared in the meanwhile. os.close(os.open(self.name, self._bflags, 0o600)) def __del__(self) -> None: self._unlink(self.name) def do_create(self, pre: str = "", suf: str = "") -> mktemped: try: file = test_mktemp.mktemped(self.dir, pre, suf) # see #259 except: self.failOnException("mktemp") self.nameCheck(file.name, self.dir, pre, suf) return file def test_basic(self) -> None: # mktemp can choose usable file names self.do_create() self.do_create(pre="a") self.do_create(suf="b") self.do_create(pre="a", suf="b") self.do_create(pre="aa", suf=".txt") def test_many(self) -> None: # mktemp can choose many usable file names (stochastic) extant = list(range(TEST_FILES)) # type: List[Any] for i in extant: extant[i] = self.do_create(pre="aa") ## def test_warning(self): ## # mktemp issues a warning when used ## warnings.filterwarnings("error", ## category=RuntimeWarning, ## message="mktemp") ## self.assertRaises(RuntimeWarning, ## tempfile.mktemp, dir=self.dir) test_classes.append(test_mktemp) # We test _TemporaryFileWrapper by testing NamedTemporaryFile. class test_NamedTemporaryFile(TC): """Test NamedTemporaryFile().""" def do_create(self, dir: str = None, pre: str = "", suf: str = "", delete: bool = True) -> IO[Any]: if dir is None: dir = tempfile.gettempdir() try: file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf, delete=delete) except: self.failOnException("NamedTemporaryFile") self.nameCheck(file.name, dir, pre, suf) return file def test_basic(self) -> None: # NamedTemporaryFile can create files self.do_create() self.do_create(pre="a") self.do_create(suf="b") self.do_create(pre="a", suf="b") self.do_create(pre="aa", suf=".txt") def test_creates_named(self) -> None: # NamedTemporaryFile creates files with names f = tempfile.NamedTemporaryFile() self.assertTrue(os.path.exists(f.name), "NamedTemporaryFile %s does not exist" % f.name) def test_del_on_close(self) -> None: # A NamedTemporaryFile is deleted when closed dir = tempfile.mkdtemp() try: f = tempfile.NamedTemporaryFile(dir=dir) f.write(b'blat') f.close() self.assertFalse(os.path.exists(f.name), "NamedTemporaryFile %s exists after close" % f.name) finally: os.rmdir(dir) def test_dis_del_on_close(self) -> None: # Tests that delete-on-close can be disabled dir = tempfile.mkdtemp() tmp = None # type: str try: f = tempfile.NamedTemporaryFile(dir=dir, delete=False) tmp = f.name f.write(b'blat') f.close() self.assertTrue(os.path.exists(f.name), "NamedTemporaryFile %s missing after close" % f.name) finally: if tmp is not None: os.unlink(tmp) os.rmdir(dir) def test_multiple_close(self) -> None: # A NamedTemporaryFile can be closed many times without error f = tempfile.NamedTemporaryFile() f.write(b'abc\n') f.close() try: f.close() f.close() except: self.failOnException("close") def test_context_manager(self) -> None: # A NamedTemporaryFile can be used as a context manager with tempfile.NamedTemporaryFile() as f: self.assertTrue(os.path.exists(f.name)) self.assertFalse(os.path.exists(f.name)) def use_closed(): with f: pass self.assertRaises(ValueError, use_closed) # How to test the mode and bufsize parameters? test_classes.append(test_NamedTemporaryFile) class test_SpooledTemporaryFile(TC): """Test SpooledTemporaryFile().""" def do_create(self, max_size: int = 0, dir: str = None, pre: str = "", suf: str = "") -> tempfile.SpooledTemporaryFile: if dir is None: dir = tempfile.gettempdir() try: file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf) except: self.failOnException("SpooledTemporaryFile") return file def test_basic(self) -> None: # SpooledTemporaryFile can create files f = self.do_create() self.assertFalse(f._rolled) f = self.do_create(max_size=100, pre="a", suf=".txt") self.assertFalse(f._rolled) def test_del_on_close(self) -> None: # A SpooledTemporaryFile is deleted when closed dir = tempfile.mkdtemp() try: f = tempfile.SpooledTemporaryFile(max_size=10, dir=dir) self.assertFalse(f._rolled) f.write(b'blat ' * 5) self.assertTrue(f._rolled) filename = f.name f.close() self.assertFalse(isinstance(filename, str) and os.path.exists(filename), "SpooledTemporaryFile %s exists after close" % filename) finally: os.rmdir(dir) def test_rewrite_small(self) -> None: # A SpooledTemporaryFile can be written to multiple within the max_size f = self.do_create(max_size=30) self.assertFalse(f._rolled) for i in range(5): f.seek(0, 0) f.write(b'x' * 20) self.assertFalse(f._rolled) def test_write_sequential(self) -> None: # A SpooledTemporaryFile should hold exactly max_size bytes, and roll # over afterward f = self.do_create(max_size=30) self.assertFalse(f._rolled) f.write(b'x' * 20) self.assertFalse(f._rolled) f.write(b'x' * 10) self.assertFalse(f._rolled) f.write(b'x') self.assertTrue(f._rolled) def test_writelines(self) -> None: # Verify writelines with a SpooledTemporaryFile f = self.do_create() f.writelines([b'x', b'y', b'z']) f.seek(0) buf = f.read() self.assertEqual(buf, b'xyz') def test_writelines_sequential(self) -> None: # A SpooledTemporaryFile should hold exactly max_size bytes, and roll # over afterward f = self.do_create(max_size=35) f.writelines([b'x' * 20, b'x' * 10, b'x' * 5]) self.assertFalse(f._rolled) f.write(b'x') self.assertTrue(f._rolled) def test_sparse(self) -> None: # A SpooledTemporaryFile that is written late in the file will extend # when that occurs f = self.do_create(max_size=30) self.assertFalse(f._rolled) f.seek(100, 0) self.assertFalse(f._rolled) f.write(b'x') self.assertTrue(f._rolled) def test_fileno(self) -> None: # A SpooledTemporaryFile should roll over to a real file on fileno() f = self.do_create(max_size=30) self.assertFalse(f._rolled) self.assertTrue(f.fileno() > 0) self.assertTrue(f._rolled) def test_multiple_close_before_rollover(self) -> None: # A SpooledTemporaryFile can be closed many times without error f = tempfile.SpooledTemporaryFile() f.write(b'abc\n') self.assertFalse(f._rolled) f.close() try: f.close() f.close() except: self.failOnException("close") def test_multiple_close_after_rollover(self) -> None: # A SpooledTemporaryFile can be closed many times without error f = tempfile.SpooledTemporaryFile(max_size=1) f.write(b'abc\n') self.assertTrue(f._rolled) f.close() try: f.close() f.close() except: self.failOnException("close") def test_bound_methods(self) -> None: # It should be OK to steal a bound method from a SpooledTemporaryFile # and use it independently; when the file rolls over, those bound # methods should continue to function f = self.do_create(max_size=30) read = f.read write = f.write seek = f.seek write(b"a" * 35) write(b"b" * 35) seek(0, 0) self.assertEqual(read(70), b'a'*35 + b'b'*35) def test_text_mode(self) -> None: # Creating a SpooledTemporaryFile with a text mode should produce # a file object reading and writing (Unicode) text strings. f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10) f.write("abc\n") f.seek(0) self.assertEqual(f.read(), "abc\n") f.write("def\n") f.seek(0) self.assertEqual(f.read(), "abc\ndef\n") f.write("xyzzy\n") f.seek(0) self.assertEqual(f.read(), "abc\ndef\nxyzzy\n") # Check that Ctrl+Z doesn't truncate the file f.write("foo\x1abar\n") f.seek(0) self.assertEqual(f.read(), "abc\ndef\nxyzzy\nfoo\x1abar\n") def test_text_newline_and_encoding(self) -> None: f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10, newline='', encoding='utf-8') f.write("\u039B\r\n") f.seek(0) self.assertEqual(f.read(), "\u039B\r\n") self.assertFalse(f._rolled) f.write("\u039B" * 20 + "\r\n") f.seek(0) self.assertEqual(f.read(), "\u039B\r\n" + ("\u039B" * 20) + "\r\n") self.assertTrue(f._rolled) def test_context_manager_before_rollover(self) -> None: # A SpooledTemporaryFile can be used as a context manager with tempfile.SpooledTemporaryFile(max_size=1) as f: self.assertFalse(f._rolled) self.assertFalse(f.closed) self.assertTrue(f.closed) def use_closed(): with f: pass self.assertRaises(ValueError, use_closed) def test_context_manager_during_rollover(self) -> None: # A SpooledTemporaryFile can be used as a context manager with tempfile.SpooledTemporaryFile(max_size=1) as f: self.assertFalse(f._rolled) f.write(b'abc\n') f.flush() self.assertTrue(f._rolled) self.assertFalse(f.closed) self.assertTrue(f.closed) def use_closed(): with f: pass self.assertRaises(ValueError, use_closed) def test_context_manager_after_rollover(self) -> None: # A SpooledTemporaryFile can be used as a context manager f = tempfile.SpooledTemporaryFile(max_size=1) f.write(b'abc\n') f.flush() self.assertTrue(f._rolled) with f: self.assertFalse(f.closed) self.assertTrue(f.closed) def use_closed(): with f: pass self.assertRaises(ValueError, use_closed) test_classes.append(test_SpooledTemporaryFile) class test_TemporaryFile(TC): """Test TemporaryFile().""" def test_basic(self) -> None: # TemporaryFile can create files # No point in testing the name params - the file has no name. try: tempfile.TemporaryFile() except: self.failOnException("TemporaryFile") def test_has_no_name(self) -> None: # TemporaryFile creates files with no names (on this system) dir = tempfile.mkdtemp() f = tempfile.TemporaryFile(dir=dir) f.write(b'blat') # Sneaky: because this file has no name, it should not prevent # us from removing the directory it was created in. try: os.rmdir(dir) except: ei = sys.exc_info() # cleanup f.close() os.rmdir(dir) self.failOnException("rmdir", ei) def test_multiple_close(self) -> None: # A TemporaryFile can be closed many times without error f = tempfile.TemporaryFile() f.write(b'abc\n') f.close() try: f.close() f.close() except: self.failOnException("close") # How to test the mode and bufsize parameters? def test_mode_and_encoding(self) -> None: def roundtrip(input: AnyStr, *args: Any, **kwargs: Any) -> None: with tempfile.TemporaryFile(*args, **kwargs) as fileobj: fileobj.write(input) fileobj.seek(0) self.assertEqual(input, fileobj.read()) roundtrip(b"1234", "w+b") roundtrip("abdc\n", "w+") roundtrip("\u039B", "w+", encoding="utf-16") roundtrip("foo\r\n", "w+", newline="") if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile: test_classes.append(test_TemporaryFile) # Helper for test_del_on_shutdown class NulledModules: def __init__(self, *modules: Any) -> None: self.refs = [mod.__dict__ for mod in modules] self.contents = [ref.copy() for ref in self.refs] def __enter__(self) -> None: for d in self.refs: for key in d: d[key] = None def __exit__(self, *exc_info: Any) -> None: for d, c in zip(self.refs, self.contents): d.clear() d.update(c) class test_TemporaryDirectory(TC): """Test TemporaryDirectory().""" def do_create(self, dir: str = None, pre: str = "", suf: str = "", recurse: int = 1) -> tempfile.TemporaryDirectory: if dir is None: dir = tempfile.gettempdir() try: tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf) except: self.failOnException("TemporaryDirectory") self.nameCheck(tmp.name, dir, pre, suf) # Create a subdirectory and some files if recurse: self.do_create(tmp.name, pre, suf, recurse-1) with open(os.path.join(tmp.name, "test.txt"), "wb") as f: f.write(b"Hello world!") return tmp def test_mkdtemp_failure(self) -> None: # Check no additional exception if mkdtemp fails # Previously would raise AttributeError instead # (noted as part of Issue #10188) with tempfile.TemporaryDirectory() as nonexistent: pass with self.assertRaises(os.error): tempfile.TemporaryDirectory(dir=nonexistent) def test_explicit_cleanup(self) -> None: # A TemporaryDirectory is deleted when cleaned up dir = tempfile.mkdtemp() try: d = self.do_create(dir=dir) self.assertTrue(os.path.exists(d.name), "TemporaryDirectory %s does not exist" % d.name) d.cleanup() self.assertFalse(os.path.exists(d.name), "TemporaryDirectory %s exists after cleanup" % d.name) finally: os.rmdir(dir) @support.skip_unless_symlink def test_cleanup_with_symlink_to_a_directory(self) -> None: # cleanup() should not follow symlinks to directories (issue #12464) d1 = self.do_create() d2 = self.do_create() # Symlink d1/foo -> d2 os.symlink(d2.name, os.path.join(d1.name, "foo")) # This call to cleanup() should not follow the "foo" symlink d1.cleanup() self.assertFalse(os.path.exists(d1.name), "TemporaryDirectory %s exists after cleanup" % d1.name) self.assertTrue(os.path.exists(d2.name), "Directory pointed to by a symlink was deleted") self.assertEqual(os.listdir(d2.name), ['test.txt'], "Contents of the directory pointed to by a symlink " "were deleted") d2.cleanup() @support.cpython_only def test_del_on_collection(self) -> None: # A TemporaryDirectory is deleted when garbage collected dir = tempfile.mkdtemp() try: d = self.do_create(dir=dir) name = d.name del d # Rely on refcounting to invoke __del__ self.assertFalse(os.path.exists(name), "TemporaryDirectory %s exists after __del__" % name) finally: os.rmdir(dir) @unittest.expectedFailure # See issue #10188 def test_del_on_shutdown(self) -> None: # A TemporaryDirectory may be cleaned up during shutdown # Make sure it works with the relevant modules nulled out with self.do_create() as dir: d = self.do_create(dir=dir) # Mimic the nulling out of modules that # occurs during system shutdown modules = [os, os.path] if has_stat: modules.append(stat) # Currently broken, so suppress the warning # that is otherwise emitted on stdout with support.captured_stderr() as err: with NulledModules(*modules): d.cleanup() # Currently broken, so stop spurious exception by # indicating the object has already been closed d._closed = True # And this assert will fail, as expected by the # unittest decorator... self.assertFalse(os.path.exists(d.name), "TemporaryDirectory %s exists after cleanup" % d.name) def test_warnings_on_cleanup(self) -> None: # Two kinds of warning on shutdown # Issue 10888: may write to stderr if modules are nulled out # ResourceWarning will be triggered by __del__ with self.do_create() as dir: if os.sep != '\\': # Embed a backslash in order to make sure string escaping # in the displayed error message is dealt with correctly suffix = '\\check_backslash_handling' else: suffix = '' d = self.do_create(dir=dir, suf=suffix) #Check for the Issue 10888 message modules = [os, os.path] if has_stat: modules.append(stat) with support.captured_stderr() as err: with NulledModules(*modules): d.cleanup() message = err.getvalue().replace('\\\\', '\\') self.assertIn("while cleaning up", message) self.assertIn(d.name, message) # Check for the resource warning with support.check_warnings(('Implicitly', ResourceWarning), quiet=False): warnings.filterwarnings("always", category=ResourceWarning) d.__del__() self.assertFalse(os.path.exists(d.name), "TemporaryDirectory %s exists after __del__" % d.name) def test_multiple_close(self) -> None: # Can be cleaned-up many times without error d = self.do_create() d.cleanup() try: d.cleanup() d.cleanup() except: self.failOnException("cleanup") def test_context_manager(self) -> None: # Can be used as a context manager d = self.do_create() with d as name: self.assertTrue(os.path.exists(name)) self.assertEqual(name, d.name) self.assertFalse(os.path.exists(name)) test_classes.append(test_TemporaryDirectory) def test_main() -> None: support.run_unittest(*test_classes) if __name__ == "__main__": test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/test_textwrap.py0000644€tŠÔÚ€2›s®0000005463013576752246030311 0ustar jukkaDROPBOX\Domain Users00000000000000# # Test suite for the textwrap module. # # Original tests written by Greg Ward . # Converted to PyUnit by Peter Hansen . # Currently maintained by Greg Ward. # # $Id$ # import unittest from test import support from typing import Any, List, Sequence from textwrap import TextWrapper, wrap, fill, dedent class BaseTestCase(unittest.TestCase): '''Parent class with utility methods for textwrap tests.''' wrapper = None # type: TextWrapper def show(self, textin: Sequence[str]) -> str: if isinstance(textin, list): results = [] # type: List[str] for i in range(len(textin)): results.append(" %d: %r" % (i, textin[i])) result = '\n'.join(results) elif isinstance(textin, str): result = " %s\n" % repr(textin) return result def check(self, result: Sequence[str], expect: Sequence[str]) -> None: self.assertEqual(result, expect, 'expected:\n%s\nbut got:\n%s' % ( self.show(expect), self.show(result))) def check_wrap(self, text: str, width: int, expect: Sequence[str], **kwargs: Any) -> None: result = wrap(text, width, **kwargs) self.check(result, expect) def check_split(self, text: str, expect: Sequence[str]) -> None: result = self.wrapper._split(text) self.assertEqual(result, expect, "\nexpected %r\n" "but got %r" % (expect, result)) class WrapTestCase(BaseTestCase): def setUp(self) -> None: self.wrapper = TextWrapper(width=45) def test_simple(self) -> None: # Simple case: just words, spaces, and a bit of punctuation text = "Hello there, how are you this fine day? I'm glad to hear it!" self.check_wrap(text, 12, ["Hello there,", "how are you", "this fine", "day? I'm", "glad to hear", "it!"]) self.check_wrap(text, 42, ["Hello there, how are you this fine day?", "I'm glad to hear it!"]) self.check_wrap(text, 80, [text]) def test_whitespace(self) -> None: # Whitespace munging and end-of-sentence detection text = """\ This is a paragraph that already has line breaks. But some of its lines are much longer than the others, so it needs to be wrapped. Some lines are \ttabbed too. What a mess! """ expect = ["This is a paragraph that already has line", "breaks. But some of its lines are much", "longer than the others, so it needs to be", "wrapped. Some lines are tabbed too. What a", "mess!"] wrapper = TextWrapper(45, fix_sentence_endings=True) result = wrapper.wrap(text) self.check(result, expect) results = wrapper.fill(text) self.check(results, '\n'.join(expect)) def test_fix_sentence_endings(self) -> None: wrapper = TextWrapper(60, fix_sentence_endings=True) # SF #847346: ensure that fix_sentence_endings=True does the # right thing even on input short enough that it doesn't need to # be wrapped. text = "A short line. Note the single space." expect = ["A short line. Note the single space."] self.check(wrapper.wrap(text), expect) # Test some of the hairy end cases that _fix_sentence_endings() # is supposed to handle (the easy stuff is tested in # test_whitespace() above). text = "Well, Doctor? What do you think?" expect = ["Well, Doctor? What do you think?"] self.check(wrapper.wrap(text), expect) text = "Well, Doctor?\nWhat do you think?" self.check(wrapper.wrap(text), expect) text = 'I say, chaps! Anyone for "tennis?"\nHmmph!' expect = ['I say, chaps! Anyone for "tennis?" Hmmph!'] self.check(wrapper.wrap(text), expect) wrapper.width = 20 expect = ['I say, chaps!', 'Anyone for "tennis?"', 'Hmmph!'] self.check(wrapper.wrap(text), expect) text = 'And she said, "Go to hell!"\nCan you believe that?' expect = ['And she said, "Go to', 'hell!" Can you', 'believe that?'] self.check(wrapper.wrap(text), expect) wrapper.width = 60 expect = ['And she said, "Go to hell!" Can you believe that?'] self.check(wrapper.wrap(text), expect) text = 'File stdio.h is nice.' expect = ['File stdio.h is nice.'] self.check(wrapper.wrap(text), expect) def test_wrap_short(self) -> None: # Wrapping to make short lines longer text = "This is a\nshort paragraph." self.check_wrap(text, 20, ["This is a short", "paragraph."]) self.check_wrap(text, 40, ["This is a short paragraph."]) def test_wrap_short_1line(self) -> None: # Test endcases text = "This is a short line." self.check_wrap(text, 30, ["This is a short line."]) self.check_wrap(text, 30, ["(1) This is a short line."], initial_indent="(1) ") def test_hyphenated(self) -> None: # Test breaking hyphenated words text = ("this-is-a-useful-feature-for-" "reformatting-posts-from-tim-peters'ly") self.check_wrap(text, 40, ["this-is-a-useful-feature-for-", "reformatting-posts-from-tim-peters'ly"]) self.check_wrap(text, 41, ["this-is-a-useful-feature-for-", "reformatting-posts-from-tim-peters'ly"]) self.check_wrap(text, 42, ["this-is-a-useful-feature-for-reformatting-", "posts-from-tim-peters'ly"]) def test_hyphenated_numbers(self) -> None: # Test that hyphenated numbers (eg. dates) are not broken like words. text = ("Python 1.0.0 was released on 1994-01-26. Python 1.0.1 was\n" "released on 1994-02-15.") self.check_wrap(text, 30, ['Python 1.0.0 was released on', '1994-01-26. Python 1.0.1 was', 'released on 1994-02-15.']) self.check_wrap(text, 40, ['Python 1.0.0 was released on 1994-01-26.', 'Python 1.0.1 was released on 1994-02-15.']) text = "I do all my shopping at 7-11." self.check_wrap(text, 25, ["I do all my shopping at", "7-11."]) self.check_wrap(text, 27, ["I do all my shopping at", "7-11."]) self.check_wrap(text, 29, ["I do all my shopping at 7-11."]) def test_em_dash(self) -> None: # Test text with em-dashes text = "Em-dashes should be written -- thus." self.check_wrap(text, 25, ["Em-dashes should be", "written -- thus."]) # Probe the boundaries of the properly written em-dash, # ie. " -- ". self.check_wrap(text, 29, ["Em-dashes should be written", "-- thus."]) expect = ["Em-dashes should be written --", "thus."] self.check_wrap(text, 30, expect) self.check_wrap(text, 35, expect) self.check_wrap(text, 36, ["Em-dashes should be written -- thus."]) # The improperly written em-dash is handled too, because # it's adjacent to non-whitespace on both sides. text = "You can also do--this or even---this." expect = ["You can also do", "--this or even", "---this."] self.check_wrap(text, 15, expect) self.check_wrap(text, 16, expect) expect = ["You can also do--", "this or even---", "this."] self.check_wrap(text, 17, expect) self.check_wrap(text, 19, expect) expect = ["You can also do--this or even", "---this."] self.check_wrap(text, 29, expect) self.check_wrap(text, 31, expect) expect = ["You can also do--this or even---", "this."] self.check_wrap(text, 32, expect) self.check_wrap(text, 35, expect) # All of the above behaviour could be deduced by probing the # _split() method. text = "Here's an -- em-dash and--here's another---and another!" expect = ["Here's", " ", "an", " ", "--", " ", "em-", "dash", " ", "and", "--", "here's", " ", "another", "---", "and", " ", "another!"] self.check_split(text, expect) text = "and then--bam!--he was gone" expect = ["and", " ", "then", "--", "bam!", "--", "he", " ", "was", " ", "gone"] self.check_split(text, expect) def test_unix_options (self) -> None: # Test that Unix-style command-line options are wrapped correctly. # Both Optik (OptionParser) and Docutils rely on this behaviour! text = "You should use the -n option, or --dry-run in its long form." self.check_wrap(text, 20, ["You should use the", "-n option, or --dry-", "run in its long", "form."]) self.check_wrap(text, 21, ["You should use the -n", "option, or --dry-run", "in its long form."]) expect = ["You should use the -n option, or", "--dry-run in its long form."] self.check_wrap(text, 32, expect) self.check_wrap(text, 34, expect) self.check_wrap(text, 35, expect) self.check_wrap(text, 38, expect) expect = ["You should use the -n option, or --dry-", "run in its long form."] self.check_wrap(text, 39, expect) self.check_wrap(text, 41, expect) expect = ["You should use the -n option, or --dry-run", "in its long form."] self.check_wrap(text, 42, expect) # Again, all of the above can be deduced from _split(). text = "the -n option, or --dry-run or --dryrun" expect = ["the", " ", "-n", " ", "option,", " ", "or", " ", "--dry-", "run", " ", "or", " ", "--dryrun"] self.check_split(text, expect) def test_funky_hyphens (self) -> None: # Screwy edge cases cooked up by David Goodger. All reported # in SF bug #596434. self.check_split("what the--hey!", ["what", " ", "the", "--", "hey!"]) self.check_split("what the--", ["what", " ", "the--"]) self.check_split("what the--.", ["what", " ", "the--."]) self.check_split("--text--.", ["--text--."]) # When I first read bug #596434, this is what I thought David # was talking about. I was wrong; these have always worked # fine. The real problem is tested in test_funky_parens() # below... self.check_split("--option", ["--option"]) self.check_split("--option-opt", ["--option-", "opt"]) self.check_split("foo --option-opt bar", ["foo", " ", "--option-", "opt", " ", "bar"]) def test_punct_hyphens(self) -> None: # Oh bother, SF #965425 found another problem with hyphens -- # hyphenated words in single quotes weren't handled correctly. # In fact, the bug is that *any* punctuation around a hyphenated # word was handled incorrectly, except for a leading "--", which # was special-cased for Optik and Docutils. So test a variety # of styles of punctuation around a hyphenated word. # (Actually this is based on an Optik bug report, #813077). self.check_split("the 'wibble-wobble' widget", ['the', ' ', "'wibble-", "wobble'", ' ', 'widget']) self.check_split('the "wibble-wobble" widget', ['the', ' ', '"wibble-', 'wobble"', ' ', 'widget']) self.check_split("the (wibble-wobble) widget", ['the', ' ', "(wibble-", "wobble)", ' ', 'widget']) self.check_split("the ['wibble-wobble'] widget", ['the', ' ', "['wibble-", "wobble']", ' ', 'widget']) def test_funky_parens (self) -> None: # Second part of SF bug #596434: long option strings inside # parentheses. self.check_split("foo (--option) bar", ["foo", " ", "(--option)", " ", "bar"]) # Related stuff -- make sure parens work in simpler contexts. self.check_split("foo (bar) baz", ["foo", " ", "(bar)", " ", "baz"]) self.check_split("blah (ding dong), wubba", ["blah", " ", "(ding", " ", "dong),", " ", "wubba"]) def test_initial_whitespace(self) -> None: # SF bug #622849 reported inconsistent handling of leading # whitespace; let's test that a bit, shall we? text = " This is a sentence with leading whitespace." self.check_wrap(text, 50, [" This is a sentence with leading whitespace."]) self.check_wrap(text, 30, [" This is a sentence with", "leading whitespace."]) def test_no_drop_whitespace(self) -> None: # SF patch #1581073 text = " This is a sentence with much whitespace." self.check_wrap(text, 10, [" This is a", " ", "sentence ", "with ", "much white", "space."], drop_whitespace=False) def test_split(self) -> None: # Ensure that the standard _split() method works as advertised # in the comments text = "Hello there -- you goof-ball, use the -b option!" result = self.wrapper._split(text) self.check(result, ["Hello", " ", "there", " ", "--", " ", "you", " ", "goof-", "ball,", " ", "use", " ", "the", " ", "-b", " ", "option!"]) def test_break_on_hyphens(self) -> None: # Ensure that the break_on_hyphens attributes work text = "yaba daba-doo" self.check_wrap(text, 10, ["yaba daba-", "doo"], break_on_hyphens=True) self.check_wrap(text, 10, ["yaba", "daba-doo"], break_on_hyphens=False) def test_bad_width(self) -> None: # Ensure that width <= 0 is caught. text = "Whatever, it doesn't matter." self.assertRaises(ValueError, wrap, text, 0) self.assertRaises(ValueError, wrap, text, -1) def test_no_split_at_umlaut(self) -> None: text = "Die Empf\xe4nger-Auswahl" self.check_wrap(text, 13, ["Die", "Empf\xe4nger-", "Auswahl"]) def test_umlaut_followed_by_dash(self) -> None: text = "aa \xe4\xe4-\xe4\xe4" self.check_wrap(text, 7, ["aa \xe4\xe4-", "\xe4\xe4"]) class LongWordTestCase (BaseTestCase): def setUp(self) -> None: self.wrapper = TextWrapper() self.text = '''\ Did you say "supercalifragilisticexpialidocious?" How *do* you spell that odd word, anyways? ''' def test_break_long(self) -> None: # Wrap text with long words and lots of punctuation self.check_wrap(self.text, 30, ['Did you say "supercalifragilis', 'ticexpialidocious?" How *do*', 'you spell that odd word,', 'anyways?']) self.check_wrap(self.text, 50, ['Did you say "supercalifragilisticexpialidocious?"', 'How *do* you spell that odd word, anyways?']) # SF bug 797650. Prevent an infinite loop by making sure that at # least one character gets split off on every pass. self.check_wrap('-'*10+'hello', 10, ['----------', ' h', ' e', ' l', ' l', ' o'], subsequent_indent = ' '*15) # bug 1146. Prevent a long word to be wrongly wrapped when the # preceding word is exactly one character shorter than the width self.check_wrap(self.text, 12, ['Did you say ', '"supercalifr', 'agilisticexp', 'ialidocious?', '" How *do*', 'you spell', 'that odd', 'word,', 'anyways?']) def test_nobreak_long(self) -> None: # Test with break_long_words disabled self.wrapper.break_long_words = False self.wrapper.width = 30 expect = ['Did you say', '"supercalifragilisticexpialidocious?"', 'How *do* you spell that odd', 'word, anyways?' ] result = self.wrapper.wrap(self.text) self.check(result, expect) # Same thing with kwargs passed to standalone wrap() function. result = wrap(self.text, width=30, break_long_words=0) self.check(result, expect) class IndentTestCases(BaseTestCase): # called before each test method def setUp(self) -> None: self.text = '''\ This paragraph will be filled, first without any indentation, and then with some (including a hanging indent).''' def test_fill(self) -> None: # Test the fill() method expect = '''\ This paragraph will be filled, first without any indentation, and then with some (including a hanging indent).''' result = fill(self.text, 40) self.check(result, expect) def test_initial_indent(self) -> None: # Test initial_indent parameter expect = [" This paragraph will be filled,", "first without any indentation, and then", "with some (including a hanging indent)."] result = wrap(self.text, 40, initial_indent=" ") self.check(result, expect) expects = "\n".join(expect) results = fill(self.text, 40, initial_indent=" ") self.check(results, expects) def test_subsequent_indent(self) -> None: # Test subsequent_indent parameter expect = '''\ * This paragraph will be filled, first without any indentation, and then with some (including a hanging indent).''' result = fill(self.text, 40, initial_indent=" * ", subsequent_indent=" ") self.check(result, expect) # Despite the similar names, DedentTestCase is *not* the inverse # of IndentTestCase! class DedentTestCase(unittest.TestCase): def assertUnchanged(self, text: str) -> None: """assert that dedent() has no effect on 'text'""" self.assertEqual(text, dedent(text)) def test_dedent_nomargin(self) -> None: # No lines indented. text = "Hello there.\nHow are you?\nOh good, I'm glad." self.assertUnchanged(text) # Similar, with a blank line. text = "Hello there.\n\nBoo!" self.assertUnchanged(text) # Some lines indented, but overall margin is still zero. text = "Hello there.\n This is indented." self.assertUnchanged(text) # Again, add a blank line. text = "Hello there.\n\n Boo!\n" self.assertUnchanged(text) def test_dedent_even(self) -> None: # All lines indented by two spaces. text = " Hello there.\n How are ya?\n Oh good." expect = "Hello there.\nHow are ya?\nOh good." self.assertEqual(expect, dedent(text)) # Same, with blank lines. text = " Hello there.\n\n How are ya?\n Oh good.\n" expect = "Hello there.\n\nHow are ya?\nOh good.\n" self.assertEqual(expect, dedent(text)) # Now indent one of the blank lines. text = " Hello there.\n \n How are ya?\n Oh good.\n" expect = "Hello there.\n\nHow are ya?\nOh good.\n" self.assertEqual(expect, dedent(text)) def test_dedent_uneven(self) -> None: # Lines indented unevenly. text = '''\ def foo(): while 1: return foo ''' expect = '''\ def foo(): while 1: return foo ''' self.assertEqual(expect, dedent(text)) # Uneven indentation with a blank line. text = " Foo\n Bar\n\n Baz\n" expect = "Foo\n Bar\n\n Baz\n" self.assertEqual(expect, dedent(text)) # Uneven indentation with a whitespace-only line. text = " Foo\n Bar\n \n Baz\n" expect = "Foo\n Bar\n\n Baz\n" self.assertEqual(expect, dedent(text)) # dedent() should not mangle internal tabs def test_dedent_preserve_internal_tabs(self) -> None: text = " hello\tthere\n how are\tyou?" expect = "hello\tthere\nhow are\tyou?" self.assertEqual(expect, dedent(text)) # make sure that it preserves tabs when it's not making any # changes at all self.assertEqual(expect, dedent(expect)) # dedent() should not mangle tabs in the margin (i.e. # tabs and spaces both count as margin, but are *not* # considered equivalent) def test_dedent_preserve_margin_tabs(self) -> None: text = " hello there\n\thow are you?" self.assertUnchanged(text) # same effect even if we have 8 spaces text = " hello there\n\thow are you?" self.assertUnchanged(text) # dedent() only removes whitespace that can be uniformly removed! text = "\thello there\n\thow are you?" expect = "hello there\nhow are you?" self.assertEqual(expect, dedent(text)) text = " \thello there\n \thow are you?" self.assertEqual(expect, dedent(text)) text = " \t hello there\n \t how are you?" self.assertEqual(expect, dedent(text)) text = " \thello there\n \t how are you?" expect = "hello there\n how are you?" self.assertEqual(expect, dedent(text)) def test_main() -> None: support.run_unittest(WrapTestCase, LongWordTestCase, IndentTestCases, DedentTestCase) if __name__ == '__main__': test_main() mypy-0.761/test-data/stdlib-samples/3.2/test/tf_inherit_check.py0000644€tŠÔÚ€2›s®0000000110213576752246030646 0ustar jukkaDROPBOX\Domain Users00000000000000# Helper script for test_tempfile.py. argv[2] is the number of a file # descriptor which should _not_ be open. Check this by attempting to # write to it -- if we succeed, something is wrong. import sys import os verbose = (sys.argv[1] == 'v') try: fd = int(sys.argv[2]) try: os.write(fd, b"blat") except os.error: # Success -- could not write to fd. sys.exit(0) else: if verbose: sys.stderr.write("fd %d is open in child" % fd) sys.exit(1) except Exception: if verbose: raise sys.exit(1) mypy-0.761/test-data/stdlib-samples/3.2/textwrap.py0000644€tŠÔÚ€2›s®0000003733213576752246026273 0ustar jukkaDROPBOX\Domain Users00000000000000"""Text wrapping and filling. """ # Copyright (C) 1999-2001 Gregory P. Ward. # Copyright (C) 2002, 2003 Python Software Foundation. # Written by Greg Ward import string, re from typing import Dict, List, Any __all__ = ['TextWrapper', 'wrap', 'fill', 'dedent'] # Hardcode the recognized whitespace characters to the US-ASCII # whitespace characters. The main reason for doing this is that in # ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales # that character winds up in string.whitespace. Respecting # string.whitespace in those cases would 1) make textwrap treat 0xa0 the # same as any other whitespace char, which is clearly wrong (it's a # *non-breaking* space), 2) possibly cause problems with Unicode, # since 0xa0 is not in range(128). _whitespace = '\t\n\x0b\x0c\r ' class TextWrapper: """ Object for wrapping/filling text. The public interface consists of the wrap() and fill() methods; the other methods are just there for subclasses to override in order to tweak the default behaviour. If you want to completely replace the main wrapping algorithm, you'll probably have to override _wrap_chunks(). Several instance attributes control various aspects of wrapping: width (default: 70) the maximum width of wrapped lines (unless break_long_words is false) initial_indent (default: "") string that will be prepended to the first line of wrapped output. Counts towards the line's width. subsequent_indent (default: "") string that will be prepended to all lines save the first of wrapped output; also counts towards each line's width. expand_tabs (default: true) Expand tabs in input text to spaces before further processing. Each tab will become 1 .. 8 spaces, depending on its position in its line. If false, each tab is treated as a single character. replace_whitespace (default: true) Replace all whitespace characters in the input text by spaces after tab expansion. Note that if expand_tabs is false and replace_whitespace is true, every tab will be converted to a single space! fix_sentence_endings (default: false) Ensure that sentence-ending punctuation is always followed by two spaces. Off by default because the algorithm is (unavoidably) imperfect. break_long_words (default: true) Break words longer than 'width'. If false, those words will not be broken, and some lines might be longer than 'width'. break_on_hyphens (default: true) Allow breaking hyphenated words. If true, wrapping will occur preferably on whitespaces and right after hyphens part of compound words. drop_whitespace (default: true) Drop leading and trailing whitespace from lines. """ unicode_whitespace_trans = {} # type: Dict[int, int] uspace = ord(' ') for x in _whitespace: unicode_whitespace_trans[ord(x)] = uspace # This funky little regex is just the trick for splitting # text up into word-wrappable chunks. E.g. # "Hello there -- you goof-ball, use the -b option!" # splits into # Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option! # (after stripping out empty strings). wordsep_re = re.compile( r'(\s+|' # any whitespace r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|' # hyphenated words r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash # This less funky little regex just split on recognized spaces. E.g. # "Hello there -- you goof-ball, use the -b option!" # splits into # Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/ wordsep_simple_re = re.compile(r'(\s+)') # XXX this is not locale- or charset-aware -- string.lowercase # is US-ASCII only (and therefore English-only) sentence_end_re = re.compile(r'[a-z]' # lowercase letter r'[\.\!\?]' # sentence-ending punct. r'[\"\']?' # optional end-of-quote r'\Z') # end of chunk def __init__(self, width: int = 70, initial_indent: str = "", subsequent_indent: str = "", expand_tabs: bool = True, replace_whitespace: bool = True, fix_sentence_endings: bool = False, break_long_words: bool = True, drop_whitespace: bool = True, break_on_hyphens: bool = True) -> None: self.width = width self.initial_indent = initial_indent self.subsequent_indent = subsequent_indent self.expand_tabs = expand_tabs self.replace_whitespace = replace_whitespace self.fix_sentence_endings = fix_sentence_endings self.break_long_words = break_long_words self.drop_whitespace = drop_whitespace self.break_on_hyphens = break_on_hyphens # -- Private methods ----------------------------------------------- # (possibly useful for subclasses to override) def _munge_whitespace(self, text: str) -> str: """_munge_whitespace(text : string) -> string Munge whitespace in text: expand tabs and convert all other whitespace characters to spaces. Eg. " foo\tbar\n\nbaz" becomes " foo bar baz". """ if self.expand_tabs: text = text.expandtabs() if self.replace_whitespace: text = text.translate(self.unicode_whitespace_trans) return text def _split(self, text: str) -> List[str]: """_split(text : string) -> [string] Split the text to wrap into indivisible chunks. Chunks are not quite the same as words; see _wrap_chunks() for full details. As an example, the text Look, goof-ball -- use the -b option! breaks into the following chunks: 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', 'use', ' ', 'the', ' ', '-b', ' ', 'option!' if break_on_hyphens is True, or in: 'Look,', ' ', 'goof-ball', ' ', '--', ' ', 'use', ' ', 'the', ' ', '-b', ' ', option!' otherwise. """ if self.break_on_hyphens is True: chunks = self.wordsep_re.split(text) else: chunks = self.wordsep_simple_re.split(text) chunks = [c for c in chunks if c] return chunks def _fix_sentence_endings(self, chunks: List[str]) -> None: """_fix_sentence_endings(chunks : [string]) Correct for sentence endings buried in 'chunks'. Eg. when the original text contains "... foo.\nBar ...", munge_whitespace() and split() will convert that to [..., "foo.", " ", "Bar", ...] which has one too few spaces; this method simply changes the one space to two. """ i = 0 patsearch = self.sentence_end_re.search while i < len(chunks)-1: if chunks[i+1] == " " and patsearch(chunks[i]): chunks[i+1] = " " i += 2 else: i += 1 def _handle_long_word(self, reversed_chunks: List[str], cur_line: List[str], cur_len: int, width: int) -> None: """_handle_long_word(chunks : [string], cur_line : [string], cur_len : int, width : int) Handle a chunk of text (most likely a word, not whitespace) that is too long to fit in any line. """ # Figure out when indent is larger than the specified width, and make # sure at least one character is stripped off on every pass if width < 1: space_left = 1 else: space_left = width - cur_len # If we're allowed to break long words, then do so: put as much # of the next chunk onto the current line as will fit. if self.break_long_words: cur_line.append(reversed_chunks[-1][:space_left]) reversed_chunks[-1] = reversed_chunks[-1][space_left:] # Otherwise, we have to preserve the long word intact. Only add # it to the current line if there's nothing already there -- # that minimizes how much we violate the width constraint. elif not cur_line: cur_line.append(reversed_chunks.pop()) # If we're not allowed to break long words, and there's already # text on the current line, do nothing. Next time through the # main loop of _wrap_chunks(), we'll wind up here again, but # cur_len will be zero, so the next line will be entirely # devoted to the long word that we can't handle right now. def _wrap_chunks(self, chunks: List[str]) -> List[str]: """_wrap_chunks(chunks : [string]) -> [string] Wrap a sequence of text chunks and return a list of lines of length 'self.width' or less. (If 'break_long_words' is false, some lines may be longer than this.) Chunks correspond roughly to words and the whitespace between them: each chunk is indivisible (modulo 'break_long_words'), but a line break can come between any two chunks. Chunks should not have internal whitespace; ie. a chunk is either all whitespace or a "word". Whitespace chunks will be removed from the beginning and end of lines, but apart from that whitespace is preserved. """ lines = [] # type: List[str] if self.width <= 0: raise ValueError("invalid width %r (must be > 0)" % self.width) # Arrange in reverse order so items can be efficiently popped # from a stack of chucks. chunks.reverse() while chunks: # Start the list of chunks that will make up the current line. # cur_len is just the length of all the chunks in cur_line. cur_line = [] # type: List[str] cur_len = 0 # Figure out which static string will prefix this line. if lines: indent = self.subsequent_indent else: indent = self.initial_indent # Maximum width for this line. width = self.width - len(indent) # First chunk on line is whitespace -- drop it, unless this # is the very beginning of the text (ie. no lines started yet). if self.drop_whitespace and chunks[-1].strip() == '' and lines: del chunks[-1] while chunks: l = len(chunks[-1]) # Can at least squeeze this chunk onto the current line. if cur_len + l <= width: cur_line.append(chunks.pop()) cur_len += l # Nope, this line is full. else: break # The current line is full, and the next chunk is too big to # fit on *any* line (not just this one). if chunks and len(chunks[-1]) > width: self._handle_long_word(chunks, cur_line, cur_len, width) # If the last chunk on this line is all whitespace, drop it. if self.drop_whitespace and cur_line and cur_line[-1].strip() == '': del cur_line[-1] # Convert current line back to a string and store it in list # of all lines (return value). if cur_line: lines.append(indent + ''.join(cur_line)) return lines # -- Public interface ---------------------------------------------- def wrap(self, text: str) -> List[str]: """wrap(text : string) -> [string] Reformat the single paragraph in 'text' so it fits in lines of no more than 'self.width' columns, and return a list of wrapped lines. Tabs in 'text' are expanded with string.expandtabs(), and all other whitespace characters (including newline) are converted to space. """ text = self._munge_whitespace(text) chunks = self._split(text) if self.fix_sentence_endings: self._fix_sentence_endings(chunks) return self._wrap_chunks(chunks) def fill(self, text: str) -> str: """fill(text : string) -> string Reformat the single paragraph in 'text' to fit in lines of no more than 'self.width' columns, and return a new string containing the entire wrapped paragraph. """ return "\n".join(self.wrap(text)) # -- Convenience interface --------------------------------------------- def wrap(text: str, width: int = 70, **kwargs: Any) -> List[str]: """Wrap a single paragraph of text, returning a list of wrapped lines. Reformat the single paragraph in 'text' so it fits in lines of no more than 'width' columns, and return a list of wrapped lines. By default, tabs in 'text' are expanded with string.expandtabs(), and all other whitespace characters (including newline) are converted to space. See TextWrapper class for available keyword args to customize wrapping behaviour. """ w = TextWrapper(width=width, **kwargs) return w.wrap(text) def fill(text: str, width: int = 70, **kwargs: Any) -> str: """Fill a single paragraph of text, returning a new string. Reformat the single paragraph in 'text' to fit in lines of no more than 'width' columns, and return a new string containing the entire wrapped paragraph. As with wrap(), tabs are expanded and other whitespace characters converted to space. See TextWrapper class for available keyword args to customize wrapping behaviour. """ w = TextWrapper(width=width, **kwargs) return w.fill(text) # -- Loosely related functionality ------------------------------------- _whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE) _leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE) def dedent(text: str) -> str: """Remove any common leading whitespace from every line in `text`. This can be used to make triple-quoted strings line up with the left edge of the display, while still presenting them in the source code in indented form. Note that tabs and spaces are both treated as whitespace, but they are not equal: the lines " hello" and "\thello" are considered to have no common leading whitespace. (This behaviour is new in Python 2.5; older versions of this module incorrectly expanded tabs before searching for common leading whitespace.) """ # Look for the longest leading string of spaces and tabs common to # all lines. margin = None # type: str text = _whitespace_only_re.sub('', text) indents = _leading_whitespace_re.findall(text) for indent in indents: if margin is None: margin = indent # Current line more deeply indented than previous winner: # no change (previous winner is still on top). elif indent.startswith(margin): pass # Current line consistent with and no deeper than previous winner: # it's the new winner. elif margin.startswith(indent): margin = indent # Current line and previous winner have no common whitespace: # there is no margin. else: margin = "" break # sanity check (testing/debugging only) if 0 and margin: for line in text.split("\n"): assert not line or line.startswith(margin), \ "line = %r, margin = %r" % (line, margin) if margin: text = re.sub(r'(?m)^' + margin, '', text) return text if __name__ == "__main__": #print dedent("\tfoo\n\tbar") #print dedent(" \thello there\n \t how are you?") print(dedent("Hello there.\n This is indented.")) mypy-0.761/test-data/unit/0000755€tŠÔÚ€2›s®0000000000013576752267021570 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/unit/README.md0000644€tŠÔÚ€2›s®0000001545713576752246023060 0ustar jukkaDROPBOX\Domain Users00000000000000Tests ===== Quick Start ----------- To add a simple unit test for a new feature you developed, open or create a `test-data/unit/check-*.test` file with a name that roughly relates to the feature you added. Add the test in this format anywhere in the file: [case testNewSyntaxBasics] # flags: --python-version 3.6 x: int x = 5 y: int = 5 a: str a = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") b: str = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") zzz: int zzz: str # E: Name 'zzz' already defined - no code here is executed, just type checked - optional `# flags: ` indicates which flags to use for this unit test - `# E: abc...` indicates that this line should result in type check error with text "abc..." - note a space after `E:` and `flags:` - `# E:12` adds column number to the expected error - use `\` to escape the `#` character and indicate that the rest of the line is part of the error message - repeating `# E: ` several times in one line indicates multiple expected errors in one line - `W: ...` and `N: ...` works exactly like `E:`, but report a warning and a note respectively - lines that don't contain the above should cause no type check errors - optional `[builtins fixtures/...]` tells the type checker to use stubs from the indicated file (see Fixtures section below) - optional `[out]` is an alternative to the "# E:" notation: it indicates that any text after it contains the expected type checking error messages. Usually, "E: " is preferred because it makes it easier to associate the errors with the code generating them at a glance, and to change the code of the test without having to change line numbers in `[out]` - an empty `[out]` section has no effect - to run just this test, use `pytest -n0 -k testNewSyntaxBasics` Fixtures -------- The unit tests use minimal stubs for builtins, so a lot of operations are not possible. You should generally define any needed classes within the test case instead of relying on builtins, though clearly this is not always an option (see below for more about stubs in test cases). This way tests run much faster and don't break if the stubs change. If your test crashes mysteriously even though the code works when run manually, you should make sure you have all the stubs you need for your test case, including built-in classes such as `list` or `dict`, as these are not included by default. Where the stubs for builtins come from for a given test: - The builtins used by default in unit tests live in `test-data/unit/lib-stub`. - Individual test cases can override the builtins stubs by using `[builtins fixtures/foo.pyi]`; this targets files in `test-data/unit/fixtures`. Feel free to modify existing files there or create new ones as you deem fit. - Test cases can also use `[typing fixtures/typing-full.pyi]` to use a more complete stub for `typing` that contains the async types, among other things. - Feel free to add additional stubs to that `fixtures` directory, but generally don't expand files in `lib-stub` without first discussing the addition with other mypy developers, as additions could slow down the test suite. Running tests and linting ------------------------- First install any additional dependencies needed for testing: $ python3 -m pip install -U -r test-requirements.txt You must also have a Python 2.7 binary installed that can import the `typing` module: $ python2 -m pip install -U typing The unit test suites are driven by the `pytest` framework. To run all tests, run `pytest` in the mypy repository: $ pytest Note that some tests will be disabled for older python versions. This will run all tests, including integration and regression tests, and will type check mypy and verify that all stubs are valid. This may take several minutes to run, so you don't want to use this all the time while doing development. Test suites for individual components are in the files `mypy/test/test*.py`. You can run tests from a specific module directly, a specific suite within a module, or a test in a suite (even if it's data-driven): $ pytest mypy/test/testdiff.py $ pytest mypy/test/testsemanal.py::SemAnalTypeInfoSuite $ pytest -n0 mypy/test/testargs.py::ArgSuite::test_coherence $ pytest -n0 mypy/test/testcheck.py::TypeCheckSuite::testCallingVariableWithFunctionType To control which tests are run and how, you can use the `-k` switch: $ pytest -k "MethodCall" You can also run the type checker for manual testing without installing it by setting up the Python module search path suitably: $ export PYTHONPATH=$PWD $ python3 -m mypy PROGRAM.py You will have to manually install the `typing` module if you're running Python 3.4 or earlier. You can also execute mypy as a module $ python3 -m mypy PROGRAM.py You can check a module or string instead of a file: $ python3 -m mypy PROGRAM.py $ python3 -m mypy -m MODULE $ python3 -m mypy -c 'import MODULE' To run mypy on itself: $ python3 -m mypy --config-file mypy_self_check.ini -p mypy To run the linter: $ flake8 You can also run all of the above tests together with: $ python3 runtests.py Many test suites store test case descriptions in text files (`test-data/unit/*.test`). The module `mypy.test.data` parses these descriptions. Python evaluation test cases are a little different from unit tests (`mypy/test/testpythoneval.py`, `test-data/unit/pythoneval.test`). These type check programs and run them. Unlike the unit tests, these use the full builtins and library stubs instead of minimal ones. Run them using `pytest -k testpythoneval`. `pytest` determines the number of processes to use. The default (set in `./pytest.ini`) is the number of logical cores; this can be overridden using `-n` option. To run a single process, use `pytest -n0`. Note that running more processes than logical cores is likely to significantly decrease performance. Debugging --------- You can use interactive debuggers like `pdb` to debug failing tests. You need to pass the `-n0` option to disable parallelization: $ pytest -n0 --pdb -k MethodCall You can also write `import pdb; pdb.set_trace()` in code to enter the debugger. The `--mypy-verbose` flag can be used to enable additional debug output from most tests (as if `--verbose` had been passed to mypy): $ pytest -n0 --mypy-verbose -k MethodCall Coverage reports ---------------- There is an experimental feature to generate coverage reports. To use this feature, you need to `pip install -U lxml`. This is an extension module and requires various library headers to install; on a Debian-derived system the command `apt-get install python3-dev libxml2-dev libxslt1-dev` may provide the necessary dependencies. To use the feature, pass e.g. `--txt-report "$(mktemp -d)"`. mypy-0.761/test-data/unit/check-abstract.test0000644€tŠÔÚ€2›s®0000006065413576752246025357 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases for abstract classes. -- Subtyping with abstract classes -- ------------------------------- [case testAbstractClassSubclasses] from abc import abstractmethod, ABCMeta i = None # type: I j = None # type: J a = None # type: A b = None # type: B c = None # type: C def f(): i, j, a, b, c # Prevent redefinition j = c # E: Incompatible types in assignment (expression has type "C", variable has type "J") a = i # E: Incompatible types in assignment (expression has type "I", variable has type "A") a = j # E: Incompatible types in assignment (expression has type "J", variable has type "A") b = i # E: Incompatible types in assignment (expression has type "I", variable has type "B") i = a i = b i = c j = a j = b a = b class I(metaclass=ABCMeta): @abstractmethod def f(self): pass class J(metaclass=ABCMeta): @abstractmethod def g(self): pass class A(I, J): pass class B(A): pass class C(I): pass [case testAbstractClassSubtypingViaExtension] from abc import abstractmethod, ABCMeta i = None # type: I j = None # type: J a = None # type: A o = None # type: object def f(): i, j, a, o # Prevent redefinition j = i # E: Incompatible types in assignment (expression has type "I", variable has type "J") a = i # E: Incompatible types in assignment (expression has type "I", variable has type "A") a = j # E: Incompatible types in assignment (expression has type "J", variable has type "A") i = o # E: Incompatible types in assignment (expression has type "object", variable has type "I") j = o # E: Incompatible types in assignment (expression has type "object", variable has type "J") i = a j = a i = j o = i o = j class I(metaclass=ABCMeta): @abstractmethod def f(self): pass class J(I): pass class A(J): pass [case testInheritingAbstractClassInSubclass] from abc import abstractmethod, ABCMeta i = None # type: I a = None # type: A b = None # type: B if int(): i = a # E: Incompatible types in assignment (expression has type "A", variable has type "I") if int(): b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = b if int(): i = b class I(metaclass=ABCMeta): @abstractmethod def f(self): pass class A: pass class B(A, I): pass -- Abstract class objects -- ---------------------- [case testAbstractClassAsTypeObject] from abc import abstractmethod, ABCMeta o = None # type: object t = None # type: type o = I t = I class I(metaclass=ABCMeta): @abstractmethod def f(self): pass [case testAbstractClassInCasts] from typing import cast from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @abstractmethod def f(self): pass class A(I): pass class B: pass i, a, b = None, None, None # type: (I, A, B) o = None # type: object if int(): a = cast(I, o) # E: Incompatible types in assignment (expression has type "I", variable has type "A") if int(): b = cast(B, i) # Ok; a subclass of B might inherit I if int(): i = cast(I, b) # Ok; a subclass of B might inherit I if int(): i = cast(I, o) if int(): i = cast(I, a) [case testInstantiatingClassThatImplementsAbstractMethod] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def f(self): pass class B(A): def f(self): pass B() [out] [case testInstantiatingAbstractClass] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): pass class B(metaclass=ABCMeta): @abstractmethod def f(self): pass A() # OK B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'f' [out] [case testInstantiatingClassWithInheritedAbstractMethod] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def f(self): pass @abstractmethod def g(self): pass class B(A): pass B() # E: Cannot instantiate abstract class 'B' with abstract attributes 'f' and 'g' [out] [case testInstantiationAbstractsInTypeForFunctions] from typing import Type from abc import abstractmethod class A: @abstractmethod def m(self) -> None: pass class B(A): pass class C(B): def m(self) -> None: pass def f(cls: Type[A]) -> A: return cls() # OK def g() -> A: return A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'm' f(A) # E: Only concrete class can be given where "Type[A]" is expected f(B) # E: Only concrete class can be given where "Type[A]" is expected f(C) # OK x: Type[B] f(x) # OK [out] [case testInstantiationAbstractsInTypeForAliases] from typing import Type from abc import abstractmethod class A: @abstractmethod def m(self) -> None: pass class B(A): pass class C(B): def m(self) -> None: pass def f(cls: Type[A]) -> A: return cls() # OK Alias = A GoodAlias = C Alias() # E: Cannot instantiate abstract class 'A' with abstract attribute 'm' GoodAlias() f(Alias) # E: Only concrete class can be given where "Type[A]" is expected f(GoodAlias) [out] [case testInstantiationAbstractsInTypeForVariables] from typing import Type from abc import abstractmethod class A: @abstractmethod def m(self) -> None: pass class B(A): pass class C(B): def m(self) -> None: pass var: Type[A] var() if int(): var = A # E: Can only assign concrete classes to a variable of type "Type[A]" if int(): var = B # E: Can only assign concrete classes to a variable of type "Type[A]" if int(): var = C # OK var_old = None # type: Type[A] # Old syntax for variable annotations var_old() if int(): var_old = A # E: Can only assign concrete classes to a variable of type "Type[A]" if int(): var_old = B # E: Can only assign concrete classes to a variable of type "Type[A]" if int(): var_old = C # OK [out] [case testInstantiationAbstractsInTypeForClassMethods] from typing import Type from abc import abstractmethod class Logger: @staticmethod def log(a: Type[C]): pass class C: @classmethod def action(cls) -> None: cls() #OK for classmethods Logger.log(cls) #OK for classmethods @abstractmethod def m(self) -> None: pass [builtins fixtures/classmethod.pyi] [out] [case testInstantiatingClassWithInheritedAbstractMethodAndSuppression] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def a(self): pass @abstractmethod def b(self): pass @abstractmethod def c(self): pass @abstractmethod def d(self): pass @abstractmethod def e(self): pass @abstractmethod def f(self): pass @abstractmethod def g(self): pass @abstractmethod def h(self): pass @abstractmethod def i(self): pass @abstractmethod def j(self): pass a = A() # E: Cannot instantiate abstract class 'A' with abstract attributes 'a', 'b', ... and 'j' (7 methods suppressed) [out] -- Implementing abstract methods -- ----------------------------- [case testImplementingAbstractMethod] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> int: pass @abstractmethod def g(self, x: int) -> int: pass class B(A): def f(self, x: str) -> int: \ # E: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" pass def g(self, x: int) -> int: pass [out] [case testImplementingAbstractMethodWithMultipleBaseClasses] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> int: pass class J(metaclass=ABCMeta): @abstractmethod def g(self, x: str) -> str: pass class A(I, J): def f(self, x: str) -> int: pass \ # E: Argument 1 of "f" is incompatible with supertype "I"; supertype defines the argument type as "int" def g(self, x: str) -> int: pass \ # E: Return type "int" of "g" incompatible with return type "str" in supertype "J" def h(self) -> int: pass # Not related to any base class [out] [case testImplementingAbstractMethodWithExtension] from abc import abstractmethod, ABCMeta import typing class J(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> int: pass class I(J): pass class A(I): def f(self, x: str) -> int: pass \ # E: Argument 1 of "f" is incompatible with supertype "J"; supertype defines the argument type as "int" [out] [case testInvalidOverridingAbstractMethod] from abc import abstractmethod, ABCMeta import typing class J(metaclass=ABCMeta): @abstractmethod def f(self, x: 'J') -> None: pass class I(J): @abstractmethod def f(self, x: 'I') -> None: pass # E: Argument 1 of "f" is incompatible with supertype "J"; supertype defines the argument type as "J" [out] [case testAbstractClassCoAndContraVariance] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, a: A) -> 'I': pass @abstractmethod def g(self, a: A) -> 'I': pass @abstractmethod def h(self, a: 'I') -> A: pass class A(I): def h(self, a: 'A') -> 'I': # Fail pass def f(self, a: 'I') -> 'I': pass def g(self, a: 'A') -> 'A': pass [out] main:11: error: Argument 1 of "h" is incompatible with supertype "I"; supertype defines the argument type as "I" main:11: error: Return type "I" of "h" incompatible with return type "A" in supertype "I" -- Accessing abstract members -- -------------------------- [case testAccessingAbstractMethod] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @abstractmethod def f(self, a: int) -> str: pass i, a, b = None, None, None # type: (I, int, str) if int(): a = i.f(a) # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): b = i.f(b) # E: Argument 1 to "f" of "I" has incompatible type "str"; expected "int" i.g() # E: "I" has no attribute "g" if int(): b = i.f(a) [case testAccessingInheritedAbstractMethod] from abc import abstractmethod, ABCMeta class J(metaclass=ABCMeta): @abstractmethod def f(self, a: int) -> str: pass class I(J): pass i, a, b = None, None, None # type: (I, int, str) if int(): a = i.f(1) # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): b = i.f(1) -- Any (dynamic) types -- ------------------- [case testAbstractClassWithAllDynamicTypes] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, x): pass @abstractmethod def g(self, x): pass class A(I): def f(self, x): pass def g(self, x, y) -> None: pass \ # E: Signature of "g" incompatible with supertype "I" [out] [case testAbstractClassWithAllDynamicTypes2] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, x): pass @abstractmethod def g(self, x): pass class A(I): def f(self, x): pass def g(self, x, y): pass [out] [case testAbstractClassWithImplementationUsingDynamicTypes] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> None: pass @abstractmethod def g(self, x: int) -> None: pass class A(I): def f(self, x): pass def g(self, x, y): pass [out] -- Special cases -- ------------- [case testMultipleAbstractBases] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def g(self) -> None: pass class C(A, B): @abstractmethod def h(self) -> None: pass [case testMemberAccessWithMultipleAbstractBaseClasses] from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def g(self) -> None: pass class C(A, B): pass x = None # type: C x.f() x.g() x.f(x) # E: Too many arguments for "f" of "A" x.g(x) # E: Too many arguments for "g" of "B" [case testInstantiatingAbstractClassWithMultipleBaseClasses] from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def g(self) -> None: pass class C(A, B): def f(self) -> None: pass class D(A, B): def g(self) -> None: pass class E(A, B): def f(self) -> None: pass def g(self) -> None: pass C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'g' D() # E: Cannot instantiate abstract class 'D' with abstract attribute 'f' E() [case testInconsistentMro] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): pass class B(object, A): pass \ # E: Cannot determine consistent method resolution order (MRO) for "B" [case testOverloadedAbstractMethod] from foo import * [file foo.pyi] from abc import abstractmethod, ABCMeta from typing import overload class A(metaclass=ABCMeta): @abstractmethod @overload def f(self, x: int) -> int: pass @abstractmethod @overload def f(self, x: str) -> str: pass class B(A): @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f' B() B().f(1) a = B() # type: A a.f(1) a.f('') a.f(B()) # E: No overload variant of "f" of "A" matches argument type "B" \ # N: Possible overload variants: \ # N: def f(self, x: int) -> int \ # N: def f(self, x: str) -> str [case testOverloadedAbstractMethodWithAlternativeDecoratorOrder] from foo import * [file foo.pyi] from abc import abstractmethod, ABCMeta from typing import overload class A(metaclass=ABCMeta): @overload @abstractmethod def f(self, x: int) -> int: pass @overload @abstractmethod def f(self, x: str) -> str: pass class B(A): @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f' B() B().f(1) a = B() # type: A a.f(1) a.f('') a.f(B()) # E: No overload variant of "f" of "A" matches argument type "B" \ # N: Possible overload variants: \ # N: def f(self, x: int) -> int \ # N: def f(self, x: str) -> str [case testOverloadedAbstractMethodVariantMissingDecorator1] from foo import * [file foo.pyi] from abc import abstractmethod, ABCMeta from typing import overload class A(metaclass=ABCMeta): @abstractmethod \ # E: Overloaded method has both abstract and non-abstract variants @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass [out] [case testOverloadedAbstractMethodVariantMissingDecorator1] from foo import * [file foo.pyi] from abc import abstractmethod, ABCMeta from typing import overload class A(metaclass=ABCMeta): @overload \ # E: Overloaded method has both abstract and non-abstract variants def f(self, x: int) -> int: pass @abstractmethod @overload def f(self, x: str) -> str: pass [out] [case testMultipleInheritanceAndAbstractMethod] import typing from abc import abstractmethod, ABCMeta class A: def f(self, x: str) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def f(self, x: str) -> None: pass class C(A, B): pass [case testMultipleInheritanceAndAbstractMethod2] import typing from abc import abstractmethod, ABCMeta class A: def f(self, x: str) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> None: pass class C(A, B): pass [out] main:8: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" [case testCallAbstractMethodBeforeDefinition] import typing from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): def f(self) -> None: self.g(1) # E: Argument 1 to "g" of "A" has incompatible type "int"; expected "str" @abstractmethod def g(self, x: str) -> None: pass [out] [case testAbstractOperatorMethods1] import typing from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def __lt__(self, other: 'A') -> int: pass @abstractmethod def __gt__(self, other: 'A') -> int: pass [case testAbstractOperatorMethods2] import typing from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def __radd__(self, other: 'C') -> str: pass # Error class B: @abstractmethod def __add__(self, other: 'A') -> int: pass class C: def __add__(self, other: int) -> B: pass [out] [case testAbstractClassWithAnyBase] from typing import Any from abc import abstractmethod, ABCMeta A: Any class D(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class C(A, D): pass C() # A might implement 'f' -- Abstract properties -- ------------------- [case testReadOnlyAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass def f(a: A) -> None: a.x() # E: "int" not callable a.x = 1 # E: Property "x" defined in "A" is read-only [out] [case testReadOnlyAbstractPropertyForwardRef] from abc import abstractproperty, ABCMeta def f(a: A) -> None: a.x() # E: "int" not callable a.x = 1 # E: Property "x" defined in "A" is read-only class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass [out] [case testReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta def f(a: A) -> None: a.x.y # E: "int" has no attribute "y" a.x = 1 class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, x: int) -> None: pass [out] [case testInstantiateClassWithReadOnlyAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): pass b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x' [case testInstantiateClassWithReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, x: int) -> None: pass class B(A): pass b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x' [case testImplementAbstractPropertyViaProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): @property def x(self) -> int: pass b = B() b.x() # E: "int" not callable [builtins fixtures/property.pyi] [case testImplementReradWriteAbstractPropertyViaProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, v: int) -> None: pass class B(A): @property def x(self) -> int: pass @x.setter def x(self, v: int) -> None: pass b = B() b.x.y # E: "int" has no attribute "y" [builtins fixtures/property.pyi] [case testImplementAbstractPropertyViaPropertyInvalidType] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): @property def x(self) -> str: pass # E: Return type "str" of "x" incompatible with return type "int" in supertype "A" b = B() b.x() # E: "str" not callable [builtins fixtures/property.pyi] [case testCantImplementAbstractPropertyViaInstanceVariable] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): def __init__(self) -> None: self.x = 1 # E b = B() # E b.x.y # E [builtins fixtures/property.pyi] [out] main:7: error: Property "x" defined in "A" is read-only main:8: error: Cannot instantiate abstract class 'B' with abstract attribute 'x' main:9: error: "int" has no attribute "y" [case testSuperWithAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): @property def x(self) -> int: return super().x.y # E: "int" has no attribute "y" [builtins fixtures/property.pyi] [out] [case testSuperWithReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, v: int) -> None: pass class B(A): @property def x(self) -> int: return super().x.y # E @x.setter def x(self, v: int) -> None: super().x = '' # E [builtins fixtures/property.pyi] [out] main:10: error: "int" has no attribute "y" main:13: error: Invalid assignment target [case testOnlyImplementGetterOfReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, v: int) -> None: pass class B(A): @property # E def x(self) -> int: pass b = B() b.x.y # E [builtins fixtures/property.pyi] [out] main:8: error: Read-only property cannot override read-write property main:11: error: "int" has no attribute "y" [case testDynamicallyTypedReadOnlyAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self): pass def f(a: A) -> None: a.x.y a.x = 1 # E: Property "x" defined in "A" is read-only [out] [case testDynamicallyTypedReadOnlyAbstractPropertyForwardRef] from abc import abstractproperty, ABCMeta def f(a: A) -> None: a.x.y a.x = 1 # E: Property "x" defined in "A" is read-only class A(metaclass=ABCMeta): @abstractproperty def x(self): pass [out] [case testDynamicallyTypedReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta def f(a: A) -> None: a.x.y a.x = 1 class A(metaclass=ABCMeta): @abstractproperty def x(self): pass @x.setter def x(self, x): pass [out] [case testMixinTypedAbstractProperty] from abc import ABCMeta, abstractproperty class A(metaclass=ABCMeta): @abstractproperty def foo(cls) -> str: pass class Mixin: foo = "foo" class C(Mixin, A): pass [out] [case testMixinTypedProperty] class A: @property def foo(cls) -> str: pass class Mixin: foo = "foo" class C(Mixin, A): pass [builtins fixtures/property.pyi] [out] [case testMixinSubtypedProperty] class X: pass class Y(X): pass class A: @property def foo(cls) -> X: pass class Mixin: foo = Y() class C(Mixin, A): pass [builtins fixtures/property.pyi] [out] [case testMixinTypedPropertyReversed] class A: @property def foo(cls) -> str: pass class Mixin: foo = "foo" class C(A, Mixin): # E: Definition of "foo" in base class "A" is incompatible with definition in base class "Mixin" pass [builtins fixtures/property.pyi] [out] -- Special cases -- ------------- [case testNestedAbstractClass] from abc import abstractmethod, ABCMeta class A: class B(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class C(B): pass A.B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'f' A.C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'f' [case testAbstractNewTypeAllowed] from typing import NewType, Mapping Config = NewType('Config', Mapping[str, str]) bad = Mapping[str, str]() # E: Cannot instantiate abstract class 'Mapping' with abstract attribute '__iter__' default = Config({'cannot': 'modify'}) # OK default[1] = 2 # E: Unsupported target for indexed assignment [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testSubclassOfABCFromDictionary] from abc import abstractmethod, ABCMeta class MyAbstractType(metaclass=ABCMeta): @abstractmethod def do(self): pass class MyConcreteA(MyAbstractType): def do(self): print('A') class MyConcreteB(MyAbstractType): def do(self): print('B') class MyAbstractA(MyAbstractType): @abstractmethod def do(self): pass class MyAbstractB(MyAbstractType): @abstractmethod def do(self): pass my_concrete_types = { 'A': MyConcreteA, 'B': MyConcreteB, } my_abstract_types = { 'A': MyAbstractA, 'B': MyAbstractB, } reveal_type(my_concrete_types) # N: Revealed type is 'builtins.dict[builtins.str*, def () -> __main__.MyAbstractType]' reveal_type(my_abstract_types) # N: Revealed type is 'builtins.dict[builtins.str*, def () -> __main__.MyAbstractType]' a = my_concrete_types['A']() a.do() b = my_concrete_types['B']() b.do() c = my_abstract_types['A']() # E: Cannot instantiate abstract class 'MyAbstractType' with abstract attribute 'do' c.do() d = my_abstract_types['B']() # E: Cannot instantiate abstract class 'MyAbstractType' with abstract attribute 'do' d.do() [builtins fixtures/dict.pyi] mypy-0.761/test-data/unit/check-annotated.test0000644€tŠÔÚ€2›s®0000000677413576752246025534 0ustar jukkaDROPBOX\Domain Users00000000000000[case testAnnotated0] from typing_extensions import Annotated x: Annotated[int, ...] reveal_type(x) # N: Revealed type is 'builtins.int' [case testAnnotated1] from typing import Union from typing_extensions import Annotated x: Annotated[Union[int, str], ...] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' [case testAnnotated2] from typing_extensions import Annotated x: Annotated[int, THESE, ARE, IGNORED, FOR, NOW] reveal_type(x) # N: Revealed type is 'builtins.int' [case testAnnotated3] from typing_extensions import Annotated x: Annotated[int, -+~12.3, "som"[e], more(anno+a+ions, that=[are]), (b"ignored",), 4, N.O.W, ...] reveal_type(x) # N: Revealed type is 'builtins.int' [case testAnnotatedBadType] from typing_extensions import Annotated x: Annotated[XXX, ...] # E: Name 'XXX' is not defined reveal_type(x) # N: Revealed type is 'Any' [case testAnnotatedBadNoArgs] from typing_extensions import Annotated x: Annotated # E: Annotated[...] must have exactly one type argument and at least one annotation reveal_type(x) # N: Revealed type is 'Any' [case testAnnotatedBadOneArg] from typing_extensions import Annotated x: Annotated[int] # E: Annotated[...] must have exactly one type argument and at least one annotation reveal_type(x) # N: Revealed type is 'Any' [case testAnnotatedNested0] from typing_extensions import Annotated x: Annotated[Annotated[int, ...], ...] reveal_type(x) # N: Revealed type is 'builtins.int' [case testAnnotatedNested1] from typing import Union from typing_extensions import Annotated x: Annotated[Annotated[Union[int, str], ...], ...] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' [case testAnnotatedNestedBadType] from typing_extensions import Annotated x: Annotated[Annotated[XXX, ...], ...] # E: Name 'XXX' is not defined reveal_type(x) # N: Revealed type is 'Any' [case testAnnotatedNestedBadNoArgs] from typing_extensions import Annotated x: Annotated[Annotated, ...] # E: Annotated[...] must have exactly one type argument and at least one annotation reveal_type(x) # N: Revealed type is 'Any' [case testAnnotatedNestedBadOneArg] from typing_extensions import Annotated x: Annotated[Annotated[int], ...] # E: Annotated[...] must have exactly one type argument and at least one annotation reveal_type(x) # N: Revealed type is 'Any' [case testAnnotatedNoImport] x: Annotated[int, ...] # E: Name 'Annotated' is not defined reveal_type(x) # N: Revealed type is 'Any' [case testAnnotatedDifferentName] from typing_extensions import Annotated as An x: An[int, ...] reveal_type(x) # N: Revealed type is 'builtins.int' [case testAnnotatedAliasSimple] from typing import Tuple from typing_extensions import Annotated Alias = Annotated[Tuple[int, ...], ...] x: Alias reveal_type(x) # N: Revealed type is 'builtins.tuple[builtins.int]' [case testAnnotatedAliasTypeVar] from typing import TypeVar from typing_extensions import Annotated T = TypeVar('T') Alias = Annotated[T, ...] x: Alias[int] reveal_type(x) # N: Revealed type is 'builtins.int' [case testAnnotatedAliasGenericTuple] from typing import TypeVar, Tuple from typing_extensions import Annotated T = TypeVar('T') Alias = Annotated[Tuple[T, T], ...] x: Alias[int] reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' [case testAnnotatedAliasGenericUnion] from typing import TypeVar, Union from typing_extensions import Annotated T = TypeVar('T') Alias = Annotated[Union[T, str], ...] x: Alias[int] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' mypy-0.761/test-data/unit/check-async-await.test0000644€tŠÔÚ€2›s®0000005070413576752246025767 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for async def and await (PEP 492) -- --------------------------------------- [case testAsyncDefPass] async def f() -> int: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncDefReturn] async def f() -> int: return 0 reveal_type(f()) # N: Revealed type is 'typing.Coroutine[Any, Any, builtins.int]' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncDefMissingReturn] # flags: --warn-no-return async def f() -> int: make_this_not_trivial = 1 [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:2: error: Missing return statement [case testAsyncDefReturnWithoutValue] async def f() -> int: make_this_not_trivial = 1 return [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:4: error: Return value expected [case testAwaitCoroutine] async def f() -> int: x = await f() reveal_type(x) # N: Revealed type is 'builtins.int*' return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] [case testAwaitDefaultContext] from typing import TypeVar T = TypeVar('T') async def f(x: T) -> T: y = await f(x) reveal_type(y) return y [typing fixtures/typing-full.pyi] [out] main:6: note: Revealed type is 'T`-1' [case testAwaitAnyContext] from typing import Any, TypeVar T = TypeVar('T') async def f(x: T) -> T: y = await f(x) # type: Any reveal_type(y) return y [typing fixtures/typing-full.pyi] [out] main:6: note: Revealed type is 'Any' [case testAwaitExplicitContext] from typing import TypeVar T = TypeVar('T') async def f(x: T) -> T: y = await f(x) # type: int reveal_type(y) return x [typing fixtures/typing-full.pyi] [out] main:5: error: Argument 1 to "f" has incompatible type "T"; expected "int" main:6: note: Revealed type is 'builtins.int' [case testAwaitGeneratorError] from typing import Any, Generator def g() -> Generator[int, None, str]: yield 0 return '' async def f() -> int: x = await g() return x [typing fixtures/typing-full.pyi] [out] main:7: error: Incompatible types in "await" (actual type "Generator[int, None, str]", expected type "Awaitable[Any]") [case testAwaitIteratorError] from typing import Any, Iterator def g() -> Iterator[Any]: yield async def f() -> int: x = await g() return x [typing fixtures/typing-full.pyi] [out] main:6: error: Incompatible types in "await" (actual type "Iterator[Any]", expected type "Awaitable[Any]") [case testAwaitArgumentError] def g() -> int: return 0 async def f() -> int: x = await g() return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:5: error: Incompatible types in "await" (actual type "int", expected type "Awaitable[Any]") [case testAwaitResultError] async def g() -> int: return 0 async def f() -> str: x = await g() # type: str return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAwaitReturnError] async def g() -> int: return 0 async def f() -> str: x = await g() return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:6: error: Incompatible return value type (got "int", expected "str") [case testAsyncFor] from typing import AsyncIterator class C(AsyncIterator[int]): async def __anext__(self) -> int: return 0 async def f() -> None: async for x in C(): reveal_type(x) # N: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncForError] from typing import AsyncIterator async def f() -> None: async for x in [1]: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:4: error: "List[int]" has no attribute "__aiter__" (not async iterable) [case testAsyncForTypeComments] from typing import AsyncIterator, Union class C(AsyncIterator[int]): async def __anext__(self) -> int: return 0 async def f() -> None: async for x in C(): # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass async for y in C(): # type: int pass async for z in C(): # type: Union[int, str] reveal_type(z) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncForComprehension] # flags: --python-version 3.6 from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple T = TypeVar('T') class asyncify(Generic[T], AsyncIterator[T]): def __init__(self, iterable: Iterable[T]) -> None: self.iterable = iter(iterable) def __aiter__(self) -> AsyncIterator[T]: return self async def __anext__(self) -> T: try: return next(self.iterable) except StopIteration: raise StopAsyncIteration async def listcomp(obj: Iterable[int]): lst = [i async for i in asyncify(obj)] reveal_type(lst) # N: Revealed type is 'builtins.list[builtins.int*]' lst2 = [i async for i in asyncify(obj) for j in obj] reveal_type(lst2) # N: Revealed type is 'builtins.list[builtins.int*]' async def setcomp(obj: Iterable[int]): lst = {i async for i in asyncify(obj)} reveal_type(lst) # N: Revealed type is 'builtins.set[builtins.int*]' async def dictcomp(obj: Iterable[Tuple[int, str]]): lst = {i: j async for i, j in asyncify(obj)} reveal_type(lst) # N: Revealed type is 'builtins.dict[builtins.int*, builtins.str*]' async def generatorexp(obj: Iterable[int]): lst = (i async for i in asyncify(obj)) reveal_type(lst) # N: Revealed type is 'typing.AsyncGenerator[builtins.int*, None]' lst2 = (i async for i in asyncify(obj) for i in obj) reveal_type(lst2) # N: Revealed type is 'typing.AsyncGenerator[builtins.int*, None]' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncForComprehensionErrors] # flags: --python-version 3.6 from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple T = TypeVar('T') class asyncify(Generic[T], AsyncIterator[T]): def __init__(self, iterable: Iterable[T]) -> None: self.iterable = iter(iterable) def __aiter__(self) -> AsyncIterator[T]: return self async def __anext__(self) -> T: try: return next(self.iterable) except StopIteration: raise StopAsyncIteration async def wrong_iterable(obj: Iterable[int]): [i async for i in obj] [i for i in asyncify(obj)] {i: i async for i in obj} {i: i for i in asyncify(obj)} [out] main:18: error: "Iterable[int]" has no attribute "__aiter__" (not async iterable) main:19: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) main:20: error: "Iterable[int]" has no attribute "__aiter__" (not async iterable) main:21: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWith] class C: async def __aenter__(self) -> int: pass async def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: reveal_type(x) # N: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithError] class C: def __enter__(self) -> int: pass def __exit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:6: error: "C" has no attribute "__aenter__"; maybe "__enter__"? main:6: error: "C" has no attribute "__aexit__"; maybe "__exit__"? [case testAsyncWithErrorBadAenter] class C: def __aenter__(self) -> int: pass async def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for "__aenter__" (actual type "int", expected type "Awaitable[Any]") pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAenter2] class C: def __aenter__(self) -> None: pass async def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: # E: "None" has no attribute "__await__" pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAexit] class C: async def __aenter__(self) -> int: pass def __aexit__(self, x, y, z) -> int: pass async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for "__aexit__" (actual type "int", expected type "Awaitable[Any]") pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAexit2] class C: async def __aenter__(self) -> int: pass def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: # E: "None" has no attribute "__await__" pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithTypeComments] class C: async def __aenter__(self) -> int: pass async def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: # type: int pass async with C() as y, C() as z: # type: str, int # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass async with C() as a: # type: int, int # E: Syntax error in type annotation # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testNoYieldInAsyncDef] # flags: --python-version 3.5 async def f(): yield None # E: 'yield' in async function async def g(): yield # E: 'yield' in async function async def h(): x = yield # E: 'yield' in async function [builtins fixtures/async_await.pyi] [case testNoYieldFromInAsyncDef] async def f(): yield from [] async def g(): x = yield from [] [builtins fixtures/async_await.pyi] [out] main:3: error: 'yield from' in async function main:5: error: 'yield from' in async function [case testNoAsyncDefInPY2_python2] async def f(): # E: invalid syntax pass [case testYieldFromNoAwaitable] from typing import Any, Generator async def f() -> str: return '' def g() -> Generator[Any, None, str]: x = yield from f() return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:6: error: "yield from" can't be applied to "Coroutine[Any, Any, str]" [case testAwaitableSubclass] from typing import Any, AsyncIterator, Awaitable, Generator class A(Awaitable[int]): def __await__(self) -> Generator[Any, None, int]: yield return 0 class C: def __aenter__(self) -> A: return A() def __aexit__(self, *a) -> A: return A() class I(AsyncIterator[int]): def __aiter__(self) -> 'I': return self def __anext__(self) -> A: return A() async def main() -> None: x = await A() reveal_type(x) # N: Revealed type is 'builtins.int' async with C() as y: reveal_type(y) # N: Revealed type is 'builtins.int' async for z in I(): reveal_type(z) # N: Revealed type is 'builtins.int' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testYieldTypeCheckInDecoratedCoroutine] from typing import Generator from types import coroutine @coroutine def f() -> Generator[int, str, int]: x = yield 0 x = yield '' # E: Incompatible types in "yield" (actual type "str", expected type "int") reveal_type(x) # N: Revealed type is 'builtins.str' if x: return 0 else: return '' # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] -- Async generators (PEP 525), some test cases adapted from the PEP text -- --------------------------------------------------------------------- [case testAsyncGenerator] # flags: --python-version 3.6 from typing import AsyncGenerator, Generator async def f() -> int: return 42 async def g() -> AsyncGenerator[int, None]: value = await f() reveal_type(value) # N: Revealed type is 'builtins.int*' yield value yield 'not an int' # E: Incompatible types in "yield" (actual type "str", expected type "int") # return without a value is fine return reveal_type(g) # N: Revealed type is 'def () -> typing.AsyncGenerator[builtins.int, None]' reveal_type(g()) # N: Revealed type is 'typing.AsyncGenerator[builtins.int, None]' async def h() -> None: async for item in g(): reveal_type(item) # N: Revealed type is 'builtins.int*' async def wrong_return() -> Generator[int, None, None]: # E: The return type of an async generator function should be "AsyncGenerator" or one of its supertypes yield 3 [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorReturnIterator] # flags: --python-version 3.6 from typing import AsyncIterator async def gen() -> AsyncIterator[int]: yield 3 yield 'not an int' # E: Incompatible types in "yield" (actual type "str", expected type "int") async def use_gen() -> None: async for item in gen(): reveal_type(item) # N: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorManualIter] # flags: --python-version 3.6 from typing import AsyncGenerator async def genfunc() -> AsyncGenerator[int, None]: yield 1 yield 2 async def user() -> None: gen = genfunc() reveal_type(gen.__aiter__()) # N: Revealed type is 'typing.AsyncGenerator[builtins.int*, None]' reveal_type(await gen.__anext__()) # N: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorAsend] # flags: --python-version 3.6 from typing import AsyncGenerator async def f() -> None: pass async def gen() -> AsyncGenerator[int, str]: await f() v = yield 42 reveal_type(v) # N: Revealed type is 'builtins.str' await f() async def h() -> None: g = gen() await g.asend(()) # E: Argument 1 to "asend" of "AsyncGenerator" has incompatible type "Tuple[]"; expected "str" reveal_type(await g.asend('hello')) # N: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorAthrow] # flags: --python-version 3.6 from typing import AsyncGenerator async def gen() -> AsyncGenerator[str, int]: try: yield 'hello' except BaseException: yield 'world' async def h() -> None: g = gen() v = await g.asend(1) reveal_type(v) # N: Revealed type is 'builtins.str*' reveal_type(await g.athrow(BaseException)) # N: Revealed type is 'builtins.str*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorNoSyncIteration] # flags: --python-version 3.6 from typing import AsyncGenerator async def gen() -> AsyncGenerator[int, None]: for i in [1, 2, 3]: yield i def h() -> None: for i in gen(): pass [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] main:9: error: "AsyncGenerator[int, None]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) [case testAsyncGeneratorNoYieldFrom] # flags: --python-version 3.6 from typing import AsyncGenerator async def f() -> AsyncGenerator[int, None]: pass async def gen() -> AsyncGenerator[int, None]: yield from f() # E: 'yield from' in async function [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorNoReturnWithValue] # flags: --python-version 3.6 from typing import AsyncGenerator async def return_int() -> AsyncGenerator[int, None]: yield 1 return 42 # E: 'return' with value in async generator is not allowed async def return_none() -> AsyncGenerator[int, None]: yield 1 return None # E: 'return' with value in async generator is not allowed def f() -> None: return async def return_f() -> AsyncGenerator[int, None]: yield 1 return f() # E: 'return' with value in async generator is not allowed [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- The full matrix of coroutine compatibility -- ------------------------------------------ [case testFullCoroutineMatrix] from typing import Any, AsyncIterator, Awaitable, Generator, Iterator from types import coroutine # The various things you might try to use in `await` or `yield from`. def plain_generator() -> Generator[str, None, int]: yield 'a' return 1 async def plain_coroutine() -> int: return 1 @coroutine def decorated_generator() -> Generator[str, None, int]: yield 'a' return 1 @coroutine async def decorated_coroutine() -> int: return 1 class It(Iterator[str]): def __iter__(self) -> 'It': return self def __next__(self) -> str: return 'a' def other_iterator() -> It: return It() class Aw(Awaitable[int]): def __await__(self) -> Generator[str, Any, int]: yield 'a' return 1 def other_coroutine() -> Aw: return Aw() # The various contexts in which `await` or `yield from` might occur. def plain_host_generator() -> Generator[str, None, None]: yield 'a' x = 0 x = yield from plain_generator() x = yield from plain_coroutine() # E: "yield from" can't be applied to "Coroutine[Any, Any, int]" x = yield from decorated_generator() x = yield from decorated_coroutine() # E: "yield from" can't be applied to "AwaitableGenerator[Any, Any, int, Coroutine[Any, Any, int]]" x = yield from other_iterator() x = yield from other_coroutine() # E: "yield from" can't be applied to "Aw" async def plain_host_coroutine() -> None: x = 0 x = await plain_generator() # E: Incompatible types in "await" (actual type "Generator[str, None, int]", expected type "Awaitable[Any]") x = await plain_coroutine() x = await decorated_generator() x = await decorated_coroutine() x = await other_iterator() # E: Incompatible types in "await" (actual type "It", expected type "Awaitable[Any]") x = await other_coroutine() @coroutine def decorated_host_generator() -> Generator[str, None, None]: yield 'a' x = 0 x = yield from plain_generator() x = yield from plain_coroutine() x = yield from decorated_generator() x = yield from decorated_coroutine() x = yield from other_iterator() x = yield from other_coroutine() # E: "yield from" can't be applied to "Aw" @coroutine async def decorated_host_coroutine() -> None: x = 0 x = await plain_generator() # E: Incompatible types in "await" (actual type "Generator[str, None, int]", expected type "Awaitable[Any]") x = await plain_coroutine() x = await decorated_generator() x = await decorated_coroutine() x = await other_iterator() # E: Incompatible types in "await" (actual type "It", expected type "Awaitable[Any]") x = await other_coroutine() [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] [case testAsyncGenDisallowUntyped] # flags: --disallow-untyped-defs # These should not crash from typing import AsyncGenerator, Any async def f() -> AsyncGenerator[int, None]: yield 0 async def g() -> AsyncGenerator[Any, None]: yield 0 [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] [case testAsyncGenDisallowUntypedTriggers] # flags: --disallow-untyped-defs from typing import AsyncGenerator, Any async def f() -> AsyncGenerator[Any, Any]: yield None async def h() -> Any: yield 0 async def g(): # E: Function is missing a return type annotation yield 0 [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] [case testAsyncOverloadedFunction] from typing import overload @overload async def f(x: int) -> int: ... @overload async def f(x: str) -> str: ... async def f(x): pass reveal_type(f) # N: Revealed type is 'Overload(def (x: builtins.int) -> typing.Coroutine[Any, Any, builtins.int], def (x: builtins.str) -> typing.Coroutine[Any, Any, builtins.str])' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncForwardRefInBody] async def f() -> None: forwardref: C class C: pass def dec(x): pass @dec async def g() -> None: forwardref: C class C: pass reveal_type(f) # N: Revealed type is 'def () -> typing.Coroutine[Any, Any, None]' reveal_type(g) # N: Revealed type is 'Any' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] mypy-0.761/test-data/unit/check-attr.test0000644€tŠÔÚ€2›s®0000010133513576752246024516 0ustar jukkaDROPBOX\Domain Users00000000000000[case testAttrsSimple] import attr @attr.s class A: a = attr.ib() _b = attr.ib() c = attr.ib(18) _d = attr.ib(validator=None, default=18) E = 18 def foo(self): return self.a reveal_type(A) # N: Revealed type is 'def (a: Any, b: Any, c: Any =, d: Any =) -> __main__.A' A(1, [2]) A(1, [2], '3', 4) A(1, 2, 3, 4) A(1, [2], '3', 4, 5) # E: Too many arguments for "A" [builtins fixtures/list.pyi] [case testAttrsAnnotated] import attr from typing import List, ClassVar @attr.s class A: a: int = attr.ib() _b: List[int] = attr.ib() c: str = attr.ib('18') _d: int = attr.ib(validator=None, default=18) E = 7 F: ClassVar[int] = 22 reveal_type(A) # N: Revealed type is 'def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> __main__.A' A(1, [2]) A(1, [2], '3', 4) A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str" A(1, [2], '3', 4, 5) # E: Too many arguments for "A" [builtins fixtures/list.pyi] [case testAttrsPython2Annotations] import attr from typing import List, ClassVar @attr.s class A: a = attr.ib() # type: int _b = attr.ib() # type: List[int] c = attr.ib('18') # type: str _d = attr.ib(validator=None, default=18) # type: int E = 7 F: ClassVar[int] = 22 reveal_type(A) # N: Revealed type is 'def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> __main__.A' A(1, [2]) A(1, [2], '3', 4) A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str" A(1, [2], '3', 4, 5) # E: Too many arguments for "A" [builtins fixtures/list.pyi] [case testAttrsAutoAttribs] import attr from typing import List, ClassVar @attr.s(auto_attribs=True) class A: a: int _b: List[int] c: str = '18' _d: int = attr.ib(validator=None, default=18) E = 7 F: ClassVar[int] = 22 reveal_type(A) # N: Revealed type is 'def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> __main__.A' A(1, [2]) A(1, [2], '3', 4) A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str" A(1, [2], '3', 4, 5) # E: Too many arguments for "A" [builtins fixtures/list.pyi] [case testAttrsUntypedNoUntypedDefs] # flags: --disallow-untyped-defs import attr @attr.s class A: a = attr.ib() # E: Need type annotation for 'a' _b = attr.ib() # E: Need type annotation for '_b' c = attr.ib(18) # E: Need type annotation for 'c' _d = attr.ib(validator=None, default=18) # E: Need type annotation for '_d' E = 18 [builtins fixtures/bool.pyi] [case testAttrsWrongReturnValue] import attr @attr.s class A: x: int = attr.ib(8) def foo(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") @attr.s class B: x = attr.ib(8) # type: int def foo(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") @attr.dataclass class C: x: int = 8 def foo(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") @attr.s class D: x = attr.ib(8, type=int) def foo(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") [builtins fixtures/bool.pyi] [case testAttrsSeriousNames] from attr import attrib, attrs from typing import List @attrs(init=True) class A: a = attrib() _b: List[int] = attrib() c = attrib(18) _d = attrib(validator=None, default=18) CLASS_VAR = 18 reveal_type(A) # N: Revealed type is 'def (a: Any, b: builtins.list[builtins.int], c: Any =, d: Any =) -> __main__.A' A(1, [2]) A(1, [2], '3', 4) A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" A(1, [2], '3', 4, 5) # E: Too many arguments for "A" [builtins fixtures/list.pyi] [case testAttrsDefaultErrors] import attr @attr.s class A: x = attr.ib(default=17) y = attr.ib() # E: Non-default attributes not allowed after default attributes. @attr.s(auto_attribs=True) class B: x: int = 17 y: int # E: Non-default attributes not allowed after default attributes. @attr.s(auto_attribs=True) class C: x: int = attr.ib(default=17) y: int # E: Non-default attributes not allowed after default attributes. @attr.s class D: x = attr.ib() y = attr.ib() # E: Non-default attributes not allowed after default attributes. @x.default def foo(self): return 17 [builtins fixtures/bool.pyi] [case testAttrsNotBooleans] import attr x = True @attr.s(cmp=x) # E: "cmp" argument must be True or False. class A: a = attr.ib(init=x) # E: "init" argument must be True or False. [builtins fixtures/bool.pyi] [case testAttrsInitFalse] from attr import attrib, attrs @attrs(auto_attribs=True, init=False) class A: a: int _b: int c: int = 18 _d: int = attrib(validator=None, default=18) reveal_type(A) # N: Revealed type is 'def () -> __main__.A' A() A(1, [2]) # E: Too many arguments for "A" A(1, [2], '3', 4) # E: Too many arguments for "A" [builtins fixtures/list.pyi] [case testAttrsInitAttribFalse] from attr import attrib, attrs @attrs class A: a = attrib(init=False) b = attrib() reveal_type(A) # N: Revealed type is 'def (b: Any) -> __main__.A' [builtins fixtures/bool.pyi] [case testAttrsCmpTrue] from attr import attrib, attrs @attrs(auto_attribs=True) class A: a: int reveal_type(A) # N: Revealed type is 'def (a: builtins.int) -> __main__.A' reveal_type(A.__eq__) # N: Revealed type is 'def (self: __main__.A, other: builtins.object) -> builtins.bool' reveal_type(A.__ne__) # N: Revealed type is 'def (self: __main__.A, other: builtins.object) -> builtins.bool' reveal_type(A.__lt__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(A.__le__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(A.__gt__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(A.__ge__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' A(1) < A(2) A(1) <= A(2) A(1) > A(2) A(1) >= A(2) A(1) == A(2) A(1) != A(2) A(1) < 1 # E: Unsupported operand types for < ("A" and "int") A(1) <= 1 # E: Unsupported operand types for <= ("A" and "int") A(1) > 1 # E: Unsupported operand types for > ("A" and "int") A(1) >= 1 # E: Unsupported operand types for >= ("A" and "int") A(1) == 1 A(1) != 1 1 < A(1) # E: Unsupported operand types for > ("A" and "int") 1 <= A(1) # E: Unsupported operand types for >= ("A" and "int") 1 > A(1) # E: Unsupported operand types for < ("A" and "int") 1 >= A(1) # E: Unsupported operand types for <= ("A" and "int") 1 == A(1) 1 != A(1) [builtins fixtures/attr.pyi] [case testAttrsEqFalse] from attr import attrib, attrs @attrs(auto_attribs=True, eq=False) class A: a: int reveal_type(A) # N: Revealed type is 'def (a: builtins.int) -> __main__.A' reveal_type(A.__eq__) # N: Revealed type is 'def (builtins.object, builtins.object) -> builtins.bool' reveal_type(A.__ne__) # N: Revealed type is 'def (builtins.object, builtins.object) -> builtins.bool' A(1) < A(2) # E: Unsupported left operand type for < ("A") A(1) <= A(2) # E: Unsupported left operand type for <= ("A") A(1) > A(2) # E: Unsupported left operand type for > ("A") A(1) >= A(2) # E: Unsupported left operand type for >= ("A") A(1) == A(2) A(1) != A(2) A(1) < 1 # E: Unsupported left operand type for < ("A") A(1) <= 1 # E: Unsupported left operand type for <= ("A") A(1) > 1 # E: Unsupported left operand type for > ("A") A(1) >= 1 # E: Unsupported left operand type for >= ("A") A(1) == 1 A(1) != 1 1 < A(1) # E: Unsupported left operand type for < ("int") 1 <= A(1) # E: Unsupported left operand type for <= ("int") 1 > A(1) # E: Unsupported left operand type for > ("int") 1 >= A(1) # E: Unsupported left operand type for >= ("int") 1 == A(1) 1 != A(1) [builtins fixtures/attr.pyi] [case testAttrsOrderFalse] from attr import attrib, attrs @attrs(auto_attribs=True, order=False) class A: a: int reveal_type(A) # N: Revealed type is 'def (a: builtins.int) -> __main__.A' reveal_type(A.__eq__) # N: Revealed type is 'def (self: __main__.A, other: builtins.object) -> builtins.bool' reveal_type(A.__ne__) # N: Revealed type is 'def (self: __main__.A, other: builtins.object) -> builtins.bool' A(1) < A(2) # E: Unsupported left operand type for < ("A") A(1) <= A(2) # E: Unsupported left operand type for <= ("A") A(1) > A(2) # E: Unsupported left operand type for > ("A") A(1) >= A(2) # E: Unsupported left operand type for >= ("A") A(1) == A(2) A(1) != A(2) A(1) < 1 # E: Unsupported left operand type for < ("A") A(1) <= 1 # E: Unsupported left operand type for <= ("A") A(1) > 1 # E: Unsupported left operand type for > ("A") A(1) >= 1 # E: Unsupported left operand type for >= ("A") A(1) == 1 A(1) != 1 1 < A(1) # E: Unsupported left operand type for < ("int") 1 <= A(1) # E: Unsupported left operand type for <= ("int") 1 > A(1) # E: Unsupported left operand type for > ("int") 1 >= A(1) # E: Unsupported left operand type for >= ("int") 1 == A(1) 1 != A(1) [builtins fixtures/attr.pyi] [case testAttrsCmpEqOrderValues] from attr import attrib, attrs @attrs(cmp=True) class DeprecatedTrue: ... @attrs(cmp=False) class DeprecatedFalse: ... @attrs(cmp=False, eq=True) # E: Don't mix `cmp` with `eq' and `order` class Mixed: ... @attrs(order=True, eq=False) # E: eq must be True if order is True class Confused: ... [builtins fixtures/attr.pyi] [case testAttrsInheritance] import attr @attr.s class A: a: int = attr.ib() @attr.s class B: b: str = attr.ib() @attr.s class C(A, B): c: bool = attr.ib() reveal_type(C) # N: Revealed type is 'def (a: builtins.int, b: builtins.str, c: builtins.bool) -> __main__.C' [builtins fixtures/bool.pyi] [case testAttrsNestedInClasses] import attr @attr.s class C: y = attr.ib() @attr.s class D: x: int = attr.ib() reveal_type(C) # N: Revealed type is 'def (y: Any) -> __main__.C' reveal_type(C.D) # N: Revealed type is 'def (x: builtins.int) -> __main__.C.D' [builtins fixtures/bool.pyi] [case testAttrsInheritanceOverride] import attr @attr.s class A: a: int = attr.ib() x: int = attr.ib() @attr.s class B(A): b: str = attr.ib() x: int = attr.ib(default=22) @attr.s class C(B): c: bool = attr.ib() # No error here because the x below overwrites the x above. x: int = attr.ib() reveal_type(A) # N: Revealed type is 'def (a: builtins.int, x: builtins.int) -> __main__.A' reveal_type(B) # N: Revealed type is 'def (a: builtins.int, b: builtins.str, x: builtins.int =) -> __main__.B' reveal_type(C) # N: Revealed type is 'def (a: builtins.int, b: builtins.str, c: builtins.bool, x: builtins.int) -> __main__.C' [builtins fixtures/bool.pyi] [case testAttrsTypeEquals] import attr @attr.s class A: a = attr.ib(type=int) b = attr.ib(18, type=int) reveal_type(A) # N: Revealed type is 'def (a: builtins.int, b: builtins.int =) -> __main__.A' [builtins fixtures/bool.pyi] [case testAttrsFrozen] import attr @attr.s(frozen=True) class A: a = attr.ib() a = A(5) a.a = 16 # E: Property "a" defined in "A" is read-only [builtins fixtures/bool.pyi] [case testAttrsDataClass] import attr from typing import List, ClassVar @attr.dataclass class A: a: int _b: List[str] c: str = '18' _d: int = attr.ib(validator=None, default=18) E = 7 F: ClassVar[int] = 22 reveal_type(A) # N: Revealed type is 'def (a: builtins.int, b: builtins.list[builtins.str], c: builtins.str =, d: builtins.int =) -> __main__.A' A(1, ['2']) [builtins fixtures/list.pyi] [case testAttrsTypeAlias] from typing import List import attr Alias = List[int] @attr.s(auto_attribs=True) class A: Alias2 = List[str] x: Alias y: Alias2 = attr.ib() reveal_type(A) # N: Revealed type is 'def (x: builtins.list[builtins.int], y: builtins.list[builtins.str]) -> __main__.A' [builtins fixtures/list.pyi] [case testAttrsGeneric] from typing import TypeVar, Generic, List import attr T = TypeVar('T') @attr.s(auto_attribs=True) class A(Generic[T]): x: List[T] y: T = attr.ib() def foo(self) -> List[T]: return [self.y] def bar(self) -> T: return self.x[0] def problem(self) -> T: return self.x # E: Incompatible return value type (got "List[T]", expected "T") reveal_type(A) # N: Revealed type is 'def [T] (x: builtins.list[T`1], y: T`1) -> __main__.A[T`1]' a = A([1], 2) reveal_type(a) # N: Revealed type is '__main__.A[builtins.int*]' reveal_type(a.x) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(a.y) # N: Revealed type is 'builtins.int*' A(['str'], 7) # E: Cannot infer type argument 1 of "A" A([1], '2') # E: Cannot infer type argument 1 of "A" [builtins fixtures/list.pyi] [case testAttrsGenericClassmethod] from typing import TypeVar, Generic, Optional import attr T = TypeVar('T') @attr.s(auto_attribs=True) class A(Generic[T]): x: Optional[T] @classmethod def clsmeth(cls) -> None: reveal_type(cls) # N: Revealed type is 'Type[__main__.A[T`1]]' [builtins fixtures/classmethod.pyi] [case testAttrsForwardReference] import attr @attr.s(auto_attribs=True) class A: parent: 'B' @attr.s(auto_attribs=True) class B: parent: A reveal_type(A) # N: Revealed type is 'def (parent: __main__.B) -> __main__.A' reveal_type(B) # N: Revealed type is 'def (parent: __main__.A) -> __main__.B' A(B(None)) [builtins fixtures/list.pyi] [case testAttrsForwardReferenceInClass] import attr @attr.s(auto_attribs=True) class A: parent: A.B @attr.s(auto_attribs=True) class B: parent: A reveal_type(A) # N: Revealed type is 'def (parent: __main__.A.B) -> __main__.A' reveal_type(A.B) # N: Revealed type is 'def (parent: __main__.A) -> __main__.A.B' A(A.B(None)) [builtins fixtures/list.pyi] [case testAttrsImporting] from helper import A reveal_type(A) # N: Revealed type is 'def (a: builtins.int, b: builtins.str) -> helper.A' [file helper.py] import attr @attr.s(auto_attribs=True) class A: a: int b: str = attr.ib() [builtins fixtures/list.pyi] [case testAttrsOtherMethods] import attr @attr.s(auto_attribs=True) class A: a: int b: str = attr.ib() @classmethod def new(cls) -> A: reveal_type(cls) # N: Revealed type is 'Type[__main__.A]' return cls(6, 'hello') @classmethod def bad(cls) -> A: return cls(17) # E: Too few arguments for "A" def foo(self) -> int: return self.a reveal_type(A) # N: Revealed type is 'def (a: builtins.int, b: builtins.str) -> __main__.A' a = A.new() reveal_type(a.foo) # N: Revealed type is 'def () -> builtins.int' [builtins fixtures/classmethod.pyi] [case testAttrsOtherOverloads] import attr from typing import overload, Union @attr.s class A: a = attr.ib() b = attr.ib(default=3) @classmethod def other(cls) -> str: return "..." @overload @classmethod def foo(cls, x: int) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x: Union[int, str]) -> Union[int, str]: reveal_type(cls) # N: Revealed type is 'Type[__main__.A]' reveal_type(cls.other()) # N: Revealed type is 'builtins.str' return x reveal_type(A.foo(3)) # N: Revealed type is 'builtins.int' reveal_type(A.foo("foo")) # N: Revealed type is 'builtins.str' [builtins fixtures/classmethod.pyi] [case testAttrsDefaultDecorator] import attr @attr.s class C(object): x: int = attr.ib(default=1) y: int = attr.ib() @y.default def name_does_not_matter(self): return self.x + 1 C() [builtins fixtures/list.pyi] [case testAttrsValidatorDecorator] import attr @attr.s class C(object): x = attr.ib() @x.validator def check(self, attribute, value): if value > 42: raise ValueError("x must be smaller or equal to 42") C(42) C(43) [builtins fixtures/exception.pyi] [case testAttrsLocalVariablesInClassMethod] import attr @attr.s(auto_attribs=True) class A: a: int b: int = attr.ib() @classmethod def new(cls, foo: int) -> A: a = foo b = a return cls(a, b) [builtins fixtures/classmethod.pyi] [case testAttrsUnionForward] import attr from typing import Union, List @attr.s(auto_attribs=True) class A: frob: List['AOrB'] class B: pass AOrB = Union[A, B] reveal_type(A) # N: Revealed type is 'def (frob: builtins.list[Union[__main__.A, __main__.B]]) -> __main__.A' reveal_type(B) # N: Revealed type is 'def () -> __main__.B' A([B()]) [builtins fixtures/list.pyi] [case testAttrsUsingConvert] import attr def convert(s:int) -> str: return 'hello' @attr.s class C: x: str = attr.ib(convert=convert) # E: convert is deprecated, use converter # Because of the convert the __init__ takes an int, but the variable is a str. reveal_type(C) # N: Revealed type is 'def (x: builtins.int) -> __main__.C' reveal_type(C(15).x) # N: Revealed type is 'builtins.str' [builtins fixtures/list.pyi] [case testAttrsUsingConverter] import attr import helper def converter2(s:int) -> str: return 'hello' @attr.s class C: x: str = attr.ib(converter=helper.converter) y: str = attr.ib(converter=converter2) # Because of the converter the __init__ takes an int, but the variable is a str. reveal_type(C) # N: Revealed type is 'def (x: builtins.int, y: builtins.int) -> __main__.C' reveal_type(C(15, 16).x) # N: Revealed type is 'builtins.str' [file helper.py] def converter(s:int) -> str: return 'hello' [builtins fixtures/list.pyi] [case testAttrsUsingConvertAndConverter] import attr def converter(s:int) -> str: return 'hello' @attr.s class C: x: str = attr.ib(converter=converter, convert=converter) # E: Can't pass both `convert` and `converter`. [builtins fixtures/list.pyi] [case testAttrsUsingBadConverter] # flags: --no-strict-optional import attr from typing import overload @overload def bad_overloaded_converter(x: int, y: int) -> int: ... @overload def bad_overloaded_converter(x: str, y: str) -> str: ... def bad_overloaded_converter(x, y=7): return x def bad_converter() -> str: return '' @attr.dataclass class A: bad: str = attr.ib(converter=bad_converter) bad_overloaded: int = attr.ib(converter=bad_overloaded_converter) reveal_type(A) [out] main:16: error: Cannot determine __init__ type from converter main:16: error: Argument "converter" has incompatible type "Callable[[], str]"; expected "Callable[[Any], str]" main:17: error: Cannot determine __init__ type from converter main:17: error: Argument "converter" has incompatible type overloaded function; expected "Callable[[Any], int]" main:18: note: Revealed type is 'def (bad: Any, bad_overloaded: Any) -> __main__.A' [builtins fixtures/list.pyi] [case testAttrsUsingBadConverterReprocess] # flags: --no-strict-optional import attr from typing import overload forward: 'A' @overload def bad_overloaded_converter(x: int, y: int) -> int: ... @overload def bad_overloaded_converter(x: str, y: str) -> str: ... def bad_overloaded_converter(x, y=7): return x def bad_converter() -> str: return '' @attr.dataclass class A: bad: str = attr.ib(converter=bad_converter) bad_overloaded: int = attr.ib(converter=bad_overloaded_converter) reveal_type(A) [out] main:17: error: Cannot determine __init__ type from converter main:17: error: Argument "converter" has incompatible type "Callable[[], str]"; expected "Callable[[Any], str]" main:18: error: Cannot determine __init__ type from converter main:18: error: Argument "converter" has incompatible type overloaded function; expected "Callable[[Any], int]" main:19: note: Revealed type is 'def (bad: Any, bad_overloaded: Any) -> __main__.A' [builtins fixtures/list.pyi] [case testAttrsUsingUnsupportedConverter] import attr class Thing: def do_it(self, int) -> str: ... thing = Thing() def factory(default: int): ... @attr.s class C: x: str = attr.ib(converter=thing.do_it) # E: Unsupported converter, only named functions and types are currently supported y: str = attr.ib(converter=lambda x: x) # E: Unsupported converter, only named functions and types are currently supported z: str = attr.ib(converter=factory(8)) # E: Unsupported converter, only named functions and types are currently supported reveal_type(C) # N: Revealed type is 'def (x: Any, y: Any, z: Any) -> __main__.C' [builtins fixtures/list.pyi] [case testAttrsUsingConverterAndSubclass] import attr def converter(s:int) -> str: return 'hello' @attr.s class C: x: str = attr.ib(converter=converter) @attr.s class A(C): pass # Because of the convert the __init__ takes an int, but the variable is a str. reveal_type(A) # N: Revealed type is 'def (x: builtins.int) -> __main__.A' reveal_type(A(15).x) # N: Revealed type is 'builtins.str' [builtins fixtures/list.pyi] [case testAttrsUsingConverterWithTypes] from typing import overload import attr @attr.dataclass class A: x: str @attr.s class C: x: complex = attr.ib(converter=complex) y: int = attr.ib(converter=int) z: A = attr.ib(converter=A) o = C("1", "2", "3") o = C(1, 2, "3") [builtins fixtures/attr.pyi] [case testAttrsCmpWithSubclasses] import attr @attr.s class A: pass @attr.s class B: pass @attr.s class C(A, B): pass @attr.s class D(A): pass reveal_type(A.__lt__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(B.__lt__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(C.__lt__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(D.__lt__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' A() < A() B() < B() A() < B() # E: Unsupported operand types for < ("A" and "B") C() > A() C() > B() C() > C() C() > D() # E: Unsupported operand types for > ("C" and "D") D() >= A() D() >= B() # E: Unsupported operand types for >= ("D" and "B") D() >= C() # E: Unsupported operand types for >= ("D" and "C") D() >= D() A() <= 1 # E: Unsupported operand types for <= ("A" and "int") B() <= 1 # E: Unsupported operand types for <= ("B" and "int") C() <= 1 # E: Unsupported operand types for <= ("C" and "int") D() <= 1 # E: Unsupported operand types for <= ("D" and "int") A() == A() B() == A() C() == A() D() == A() A() == int B() == int C() == int D() == int [builtins fixtures/list.pyi] [case testAttrsComplexSuperclass] import attr @attr.s class C: x: int = attr.ib(default=1) y: int = attr.ib() @y.default def name_does_not_matter(self): return self.x + 1 @attr.s class A(C): z: int = attr.ib(default=18) reveal_type(C) # N: Revealed type is 'def (x: builtins.int =, y: builtins.int =) -> __main__.C' reveal_type(A) # N: Revealed type is 'def (x: builtins.int =, y: builtins.int =, z: builtins.int =) -> __main__.A' [builtins fixtures/list.pyi] [case testAttrsMultiAssign] import attr @attr.s class A: x, y, z = attr.ib(), attr.ib(type=int), attr.ib(default=17) reveal_type(A) # N: Revealed type is 'def (x: Any, y: builtins.int, z: Any =) -> __main__.A' [builtins fixtures/list.pyi] [case testAttrsMultiAssign2] import attr @attr.s class A: x = y = z = attr.ib() # E: Too many names for one attribute [builtins fixtures/list.pyi] [case testAttrsPrivateInit] import attr @attr.s class C(object): _x = attr.ib(init=False, default=42) C() C(_x=42) # E: Unexpected keyword argument "_x" for "C" [builtins fixtures/list.pyi] [case testAttrsAutoMustBeAll] import attr @attr.s(auto_attribs=True) class A: a: int b = 17 # The following forms are not allowed with auto_attribs=True c = attr.ib() # E: Need type annotation for 'c' d, e = attr.ib(), attr.ib() # E: Need type annotation for 'd' # E: Need type annotation for 'e' f = g = attr.ib() # E: Need type annotation for 'f' # E: Need type annotation for 'g' [builtins fixtures/bool.pyi] [case testAttrsRepeatedName] import attr @attr.s class A: a = attr.ib(default=8) b = attr.ib() a = attr.ib() reveal_type(A) # N: Revealed type is 'def (b: Any, a: Any) -> __main__.A' @attr.s class B: a: int = attr.ib(default=8) b: int = attr.ib() a: int = attr.ib() # E: Name 'a' already defined on line 10 reveal_type(B) # N: Revealed type is 'def (b: builtins.int, a: builtins.int) -> __main__.B' @attr.s(auto_attribs=True) class C: a: int = 8 b: int a: int = attr.ib() # E: Name 'a' already defined on line 16 reveal_type(C) # N: Revealed type is 'def (a: builtins.int, b: builtins.int) -> __main__.C' [builtins fixtures/bool.pyi] [case testAttrsNewStyleClassPy2] # flags: --py2 import attr @attr.s class Good(object): pass @attr.s class Bad: # E: attrs only works with new-style classes pass [builtins_py2 fixtures/bool.pyi] [case testAttrsAutoAttribsPy2] # flags: --py2 import attr @attr.s(auto_attribs=True) # E: auto_attribs is not supported in Python 2 class A(object): x = attr.ib() [builtins_py2 fixtures/bool.pyi] [case testAttrsFrozenSubclass] import attr @attr.dataclass class NonFrozenBase: a: int @attr.dataclass(frozen=True) class FrozenBase: a: int @attr.dataclass(frozen=True) class FrozenNonFrozen(NonFrozenBase): b: int @attr.dataclass(frozen=True) class FrozenFrozen(FrozenBase): b: int @attr.dataclass class NonFrozenFrozen(FrozenBase): b: int # Make sure these are untouched non_frozen_base = NonFrozenBase(1) non_frozen_base.a = 17 frozen_base = FrozenBase(1) frozen_base.a = 17 # E: Property "a" defined in "FrozenBase" is read-only a = FrozenNonFrozen(1, 2) a.a = 17 # E: Property "a" defined in "FrozenNonFrozen" is read-only a.b = 17 # E: Property "b" defined in "FrozenNonFrozen" is read-only b = FrozenFrozen(1, 2) b.a = 17 # E: Property "a" defined in "FrozenFrozen" is read-only b.b = 17 # E: Property "b" defined in "FrozenFrozen" is read-only c = NonFrozenFrozen(1, 2) c.a = 17 # E: Property "a" defined in "NonFrozenFrozen" is read-only c.b = 17 # E: Property "b" defined in "NonFrozenFrozen" is read-only [builtins fixtures/bool.pyi] [case testAttrsCallableAttributes] from typing import Callable import attr def blah(a: int, b: int) -> bool: return True @attr.s(auto_attribs=True) class F: _cb: Callable[[int, int], bool] = blah def foo(self) -> bool: return self._cb(5, 6) @attr.s class G: _cb: Callable[[int, int], bool] = attr.ib(blah) def foo(self) -> bool: return self._cb(5, 6) @attr.s(auto_attribs=True, frozen=True) class FFrozen(F): def bar(self) -> bool: return self._cb(5, 6) [builtins fixtures/callable.pyi] [case testAttrsWithFactory] from typing import List import attr def my_factory() -> int: return 7 @attr.s class A: x: List[int] = attr.ib(factory=list) y: int = attr.ib(factory=my_factory) A() [builtins fixtures/list.pyi] [case testAttrsFactoryAndDefault] import attr @attr.s class A: x: int = attr.ib(factory=int, default=7) # E: Can't pass both `default` and `factory`. [builtins fixtures/bool.pyi] [case testAttrsFactoryBadReturn] import attr def my_factory() -> int: return 7 @attr.s class A: x: int = attr.ib(factory=list) # E: Incompatible types in assignment (expression has type "List[T]", variable has type "int") y: str = attr.ib(factory=my_factory) # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/list.pyi] [case testAttrsDefaultAndInit] import attr @attr.s class C: a = attr.ib(init=False, default=42) b = attr.ib() # Ok because previous attribute is init=False c = attr.ib(default=44) d = attr.ib(init=False) # Ok because this attribute is init=False e = attr.ib() # E: Non-default attributes not allowed after default attributes. [builtins fixtures/bool.pyi] [case testAttrsOptionalConverter] # flags: --strict-optional import attr from attr.converters import optional from typing import Optional def converter(s:int) -> str: return 'hello' @attr.s class A: y: Optional[int] = attr.ib(converter=optional(int)) z: Optional[str] = attr.ib(converter=optional(converter)) A(None, None) [builtins fixtures/attr.pyi] [case testAttrsTypeVarNoCollision] from typing import TypeVar, Generic import attr T = TypeVar("T", bytes, str) # Make sure the generated __le__ (and friends) don't use T for their arguments. @attr.s(auto_attribs=True) class A(Generic[T]): v: T [builtins fixtures/attr.pyi] [case testAttrsKwOnlyAttrib] import attr @attr.s class A: a = attr.ib(kw_only=True) A() # E: Missing named argument "a" for "A" A(15) # E: Too many positional arguments for "A" A(a=15) [builtins fixtures/attr.pyi] [case testAttrsKwOnlyClass] import attr @attr.s(kw_only=True, auto_attribs=True) class A: a: int b: bool A() # E: Missing named argument "a" for "A" # E: Missing named argument "b" for "A" A(b=True, a=15) [builtins fixtures/attr.pyi] [case testAttrsKwOnlyClassNoInit] import attr @attr.s(kw_only=True) class B: a = attr.ib(init=False) b = attr.ib() B(b=True) [builtins fixtures/attr.pyi] [case testAttrsKwOnlyWithDefault] import attr @attr.s class C: a = attr.ib(0) b = attr.ib(kw_only=True) c = attr.ib(16, kw_only=True) C(b=17) [builtins fixtures/attr.pyi] [case testAttrsKwOnlyClassWithMixedDefaults] import attr @attr.s(kw_only=True) class D: a = attr.ib(10) b = attr.ib() c = attr.ib(15) D(b=17) [builtins fixtures/attr.pyi] [case testAttrsKwOnlySubclass] import attr @attr.s class A2: a = attr.ib(default=0) @attr.s class B2(A2): b = attr.ib(kw_only=True) B2(b=1) [builtins fixtures/attr.pyi] [case testAttrsNonKwOnlyAfterKwOnly] import attr @attr.s(kw_only=True) class A: a = attr.ib(default=0) @attr.s class B(A): b = attr.ib() # E: Non keyword-only attributes are not allowed after a keyword-only attribute. @attr.s class C: a = attr.ib(kw_only=True) b = attr.ib(15) # E: Non keyword-only attributes are not allowed after a keyword-only attribute. [builtins fixtures/attr.pyi] [case testAttrsKwOnlyPy2] # flags: --py2 import attr @attr.s(kw_only=True) # E: kw_only is not supported in Python 2 class A(object): x = attr.ib() @attr.s class B(object): x = attr.ib(kw_only=True) # E: kw_only is not supported in Python 2 [builtins_py2 fixtures/bool.pyi] [case testAttrsDisallowUntypedWorksForward] # flags: --disallow-untyped-defs import attr from typing import List @attr.s class B: x: C = attr.ib() class C(List[C]): pass reveal_type(B) # N: Revealed type is 'def (x: __main__.C) -> __main__.B' [builtins fixtures/list.pyi] [case testDisallowUntypedWorksForwardBad] # flags: --disallow-untyped-defs import attr @attr.s class B: x = attr.ib() # E: Need type annotation for 'x' reveal_type(B) # N: Revealed type is 'def (x: Any) -> __main__.B' [builtins fixtures/list.pyi] [case testAttrsDefaultDecoratorDeferred] defer: Yes import attr @attr.s class C(object): x: int = attr.ib(default=1) y: int = attr.ib() @y.default def inc(self): return self.x + 1 class Yes: ... [builtins fixtures/list.pyi] [case testAttrsValidatorDecoratorDeferred] defer: Yes import attr @attr.s class C(object): x = attr.ib() @x.validator def check(self, attribute, value): if value > 42: raise ValueError("x must be smaller or equal to 42") C(42) C(43) class Yes: ... [builtins fixtures/exception.pyi] [case testTypeInAttrUndefined] import attr @attr.s class C: total = attr.ib(type=Bad) # E: Name 'Bad' is not defined [builtins fixtures/bool.pyi] [case testTypeInAttrForwardInRuntime] import attr @attr.s class C: total = attr.ib(type=Forward) reveal_type(C.total) # N: Revealed type is '__main__.Forward' C('no') # E: Argument 1 to "C" has incompatible type "str"; expected "Forward" class Forward: ... [builtins fixtures/bool.pyi] [case testDefaultInAttrForward] import attr @attr.s class C: total = attr.ib(default=func()) def func() -> int: ... C() C(1) C(1, 2) # E: Too many arguments for "C" [builtins fixtures/bool.pyi] [case testTypeInAttrUndefinedFrozen] import attr @attr.s(frozen=True) class C: total = attr.ib(type=Bad) # E: Name 'Bad' is not defined C(0).total = 1 # E: Property "total" defined in "C" is read-only [builtins fixtures/bool.pyi] [case testTypeInAttrDeferredStar] import lib [file lib.py] import attr MYPY = False if MYPY: # Force deferral from other import * @attr.s class C: total = attr.ib(type=int) C() # E: Too few arguments for "C" C('no') # E: Argument 1 to "C" has incompatible type "str"; expected "int" [file other.py] import lib [builtins fixtures/bool.pyi] [case testAttrsDefaultsMroOtherFile] import a [file a.py] import attr from b import A1, A2 @attr.s class Asdf(A1, A2): # E: Non-default attributes not allowed after default attributes. pass [file b.py] import attr @attr.s class A1: a: str = attr.ib('test') @attr.s class A2: b: int = attr.ib() [builtins fixtures/list.pyi] mypy-0.761/test-data/unit/check-basic.test0000644€tŠÔÚ€2›s®0000002661013576752246024627 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] [case testAssignmentAndVarDef] a = None # type: A b = None # type: B if int(): a = a if int(): a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [case testConstructionAndAssignment] x = None # type: A x = A() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: def __init__(self): pass class B: def __init__(self): pass [case testInheritInitFromObject] x = None # type: A if int(): x = A() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A(object): pass class B(object): pass [case testImplicitInheritInitFromObject] x = None # type: A o = None # type: object if int(): x = o # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): x = A() if int(): o = x class A: pass class B: pass [out] [case testTooManyConstructorArgs] import typing object(object()) [out] main:2: error: Too many arguments for "object" [case testVarDefWithInit] import typing a = A() # type: A b = object() # type: A class A: pass [out] main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testInheritanceBasedSubtyping] import typing x = B() # type: A y = A() # type: B # Fail class A: pass class B(A): pass [out] main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testDeclaredVariableInParentheses] (x) = None # type: int if int(): x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): x = 1 -- Simple functions and calling -- ---------------------------- [case testFunction] import typing def f(x: 'A') -> None: pass f(A()) f(B()) # Fail class A: pass class B: pass [out] main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A" [case testNotCallable] import typing A()() class A: pass [out] main:2: error: "A" not callable [case testSubtypeArgument] import typing def f(x: 'A', y: 'B') -> None: pass f(B(), A()) # Fail f(B(), B()) class A: pass class B(A): pass [out] main:3: error: Argument 2 to "f" has incompatible type "A"; expected "B" [case testInvalidArgumentCount] import typing def f(x, y) -> None: pass f(object()) f(object(), object(), object()) [out] main:3: error: Too few arguments for "f" main:4: error: Too many arguments for "f" -- Locals -- ------ [case testLocalVariables] def f() -> None: x = None # type: A y = None # type: B if int(): x = x x = y # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [case testLocalVariableScope] def f() -> None: x: A x = A() def g() -> None: x: B x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") class A: pass class B: pass [case testFunctionArguments] import typing def f(x: 'A', y: 'B') -> None: if int(): x = y # E: Incompatible types in assignment (expression has type "B", variable has type "A") x = x y = B() class A: pass class B: pass [case testLocalVariableInitialization] import typing def f() -> None: a = A() # type: A b = B() # type: A # Fail class A: pass class B: pass [out] main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testVariableInitializationWithSubtype] import typing x = B() # type: A y = A() # type: B # Fail class A: pass class B(A): pass [out] main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") -- Misc -- ---- [case testInvalidReturn] import typing def f() -> 'A': return B() class A: pass class B: pass [out] main:3: error: Incompatible return value type (got "B", expected "A") [case testTopLevelContextAndInvalidReturn] import typing def f() -> 'A': return B() a = B() # type: A class A: pass class B: pass [out] main:3: error: Incompatible return value type (got "B", expected "A") main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testEmptyReturnInAnyTypedFunction] from typing import Any def f() -> Any: return [case testEmptyYieldInAnyTypedFunction] from typing import Any def f() -> Any: yield [case testModule__name__] import typing x = __name__ # type: str a = __name__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A") class A: pass [builtins fixtures/primitives.pyi] [case testModule__doc__] import typing x = __doc__ # type: str a = __doc__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A") class A: pass [builtins fixtures/primitives.pyi] [case testModule__file__] import typing x = __file__ # type: str a = __file__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A") class A: pass [builtins fixtures/primitives.pyi] [case test__package__] import typing x = __package__ # type: str a = __file__ # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int") -- Scoping and shadowing -- --------------------- [case testLocalVariableShadowing] a = None # type: A if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = A() def f() -> None: a = None # type: B if int(): a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = B() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = A() class A: pass class B: pass [case testGlobalDefinedInBlockWithType] class A: pass while A: a = None # type: A if int(): a = A() a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") -- # type: signatures -- ------------------ [case testFunctionSignatureAsComment] def f(x): # type: (int) -> str return 1 f('') [out] main:2: error: Incompatible return value type (got "int", expected "str") main:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testMethodSignatureAsComment] class A: def f(self, x): # type: (int) -> str self.f('') # Fail return 1 A().f('') # Fail [out] main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" main:5: error: Incompatible return value type (got "int", expected "str") main:6: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testTrailingCommaParsing-skip] x = 1 x in 1, if x in 1, : pass [out] [case testInitReturnTypeError] class C: def __init__(self): # type: () -> int pass [out] main:2: error: The return type of "__init__" must be None -- WritesCache signals to testcheck to do the cache validation [case testWritesCache] import a import d [file a.py] import b import c [file b.py] [file c.py] [file d.py] [case testWritesCacheErrors] import a import d [file a.py] import b import c [file b.py] [file c.py] [file d.py] import e [file e.py] 1+'no' # E: Unsupported operand types for + ("int" and "str") [case testModuleAsTypeNoCrash] import mock from typing import Union class A: ... class B: ... x: Union[mock, A] # E: Module "mock" is not valid as a type if isinstance(x, B): pass [file mock.py] [builtins fixtures/isinstance.pyi] [out] [case testModuleAsTypeNoCrash2] import mock from typing import overload, Any, Union @overload def f(x: int) -> int: ... @overload def f(x: str) -> Union[mock, str]: ... # E: Module "mock" is not valid as a type def f(x): pass x: Any f(x) [file mock.py] [builtins fixtures/isinstance.pyi] [out] [case testPartialTypeComments] def foo( a, # type: str b, args=None, ): # type: (...) -> None pass [case testNoneHasBool] none = None b = none.__bool__() reveal_type(b) # N: Revealed type is 'builtins.bool' [builtins fixtures/bool.pyi] [case testAssignmentInvariantNoteForList] from typing import List x: List[int] y: List[float] y = x # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[float]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] [case testAssignmentInvariantNoteForDict] from typing import Dict x: Dict[str, int] y: Dict[str, float] y = x # E: Incompatible types in assignment (expression has type "Dict[str, int]", variable has type "Dict[str, float]") \ # N: "Dict" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Mapping" instead, which is covariant in the value type [builtins fixtures/dict.pyi] [case testDistinctTypes] # flags: --strict-optional import b [file a.py] from typing import NamedTuple from typing_extensions import TypedDict from enum import Enum class A: pass N = NamedTuple('N', [('x', int)]) D = TypedDict('D', {'x': int}) class B(Enum): b = 10 [file b.py] from typing import List, Optional, Union, Sequence, NamedTuple, Tuple, Type from typing_extensions import Literal, Final, TypedDict from enum import Enum import a class A: pass N = NamedTuple('N', [('x', int)]) class B(Enum): b = 10 D = TypedDict('D', {'y': int}) def foo() -> Optional[A]: b = True return a.A() if b else None # E: Incompatible return value type (got "Optional[a.A]", expected "Optional[b.A]") def bar() -> List[A]: l = [a.A()] return l # E: Incompatible return value type (got "List[a.A]", expected "List[b.A]") def baz() -> Union[A, int]: b = True return a.A() if b else 10 # E: Incompatible return value type (got "Union[a.A, int]", expected "Union[b.A, int]") def spam() -> Optional[A]: return a.A() # E: Incompatible return value type (got "a.A", expected "Optional[b.A]") def eggs() -> Sequence[A]: x = [a.A()] return x # E: Incompatible return value type (got "List[a.A]", expected "Sequence[b.A]") def eggs2() -> Sequence[N]: x = [a.N(0)] return x # E: Incompatible return value type (got "List[a.N]", expected "Sequence[b.N]") def asdf1() -> Sequence[Tuple[a.A, A]]: x = [(a.A(), a.A())] return x # E: Incompatible return value type (got "List[Tuple[a.A, a.A]]", expected "Sequence[Tuple[a.A, b.A]]") def asdf2() -> Sequence[Tuple[A, a.A]]: x = [(a.A(), a.A())] return x # E: Incompatible return value type (got "List[Tuple[a.A, a.A]]", expected "Sequence[Tuple[b.A, a.A]]") def arg() -> Tuple[A, A]: return A() # E: Incompatible return value type (got "A", expected "Tuple[A, A]") def types() -> Sequence[Type[A]]: x = [a.A] return x # E: Incompatible return value type (got "List[Type[a.A]]", expected "Sequence[Type[b.A]]") def literal() -> Sequence[Literal[B.b]]: x = [a.B.b] # type: List[Literal[a.B.b]] return x # E: Incompatible return value type (got "List[Literal[a.B.b]]", expected "Sequence[Literal[b.B.b]]") def typeddict() -> Sequence[D]: x = [{'x': 0}] # type: List[a.D] return x # E: Incompatible return value type (got "List[a.D]", expected "Sequence[b.D]") a = (a.A(), A()) a.x # E: "Tuple[a.A, b.A]" has no attribute "x" [builtins fixtures/dict.pyi] [case testReturnAnyFromFunctionDeclaredToReturnObject] # flags: --warn-return-any from typing import Any def f() -> object: x: Any = 1 return x mypy-0.761/test-data/unit/check-bound.test0000644€tŠÔÚ€2›s®0000001070213576752246024650 0ustar jukkaDROPBOX\Domain Users00000000000000-- Enforcement of upper bounds -- --------------------------- [case testBoundOnGenericFunction] from typing import TypeVar class A: pass class B(A): pass class C(A): pass class D: pass T = TypeVar('T', bound=A) U = TypeVar('U') def f(x: T) -> T: pass def g(x: U) -> U: return f(x) # E: Value of type variable "T" of "f" cannot be "U" f(A()) f(B()) f(D()) # E: Value of type variable "T" of "f" cannot be "D" b = B() if int(): b = f(b) if int(): b = f(C()) # E: Incompatible types in assignment (expression has type "C", variable has type "B") [case testBoundOnGenericClass] from typing import TypeVar, Generic class A: pass class B(A): pass T = TypeVar('T', bound=A) class G(Generic[T]): def __init__(self, x: T) -> None: pass v = None # type: G[A] w = None # type: G[B] x = None # type: G[str] # E: Type argument "builtins.str" of "G" must be a subtype of "__main__.A" y = G('a') # E: Value of type variable "T" of "G" cannot be "str" z = G(A()) z = G(B()) [case testBoundVoid] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class C(Generic[T]): t = None # type: T def get(self) -> T: return self.t c1 = None # type: C[None] c1.get() d = c1.get() reveal_type(d) # N: Revealed type is 'None' [case testBoundAny] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class C(Generic[T]): def __init__(self, x: T) -> None: pass def f(x: T) -> T: return x def g(): pass f(g()) C(g()) z = None # type: C [case testBoundHigherOrderWithVoid] from typing import TypeVar, Callable class A: pass T = TypeVar('T', bound=A) def f(g: Callable[[], T]) -> T: return g() def h() -> None: pass f(h) a = f(h) reveal_type(a) # N: Revealed type is 'None' [case testBoundInheritance] from typing import TypeVar, Generic class A: pass T = TypeVar('T') TA = TypeVar('TA', bound=A) class C(Generic[TA]): pass class D0(C[TA], Generic[TA]): pass class D1(C[T], Generic[T]): pass # E: Type argument "T`1" of "C" must be a subtype of "__main__.A" class D2(C[A]): pass class D3(C[str]): pass # E: Type argument "builtins.str" of "C" must be a subtype of "__main__.A" -- Using information from upper bounds -- ----------------------------------- [case testBoundGenericFunctions] from typing import TypeVar class A: pass class B(A): pass T = TypeVar('T') TA = TypeVar('TA', bound=A) TB = TypeVar('TB', bound=B) def f(x: T) -> T: return x def g(x: TA) -> TA: return f(x) def h(x: TB) -> TB: return g(x) def g2(x: TA) -> TA: return h(x) # Fail def j(x: TA) -> A: return x def k(x: TA) -> B: return x # Fail [out] main:16: error: Value of type variable "TB" of "h" cannot be "TA" main:21: error: Incompatible return value type (got "TA", expected "B") [case testBoundMethodUsage] from typing import TypeVar class A0: def foo(self) -> None: pass class A(A0): def bar(self) -> None: pass a = 1 @property def b(self) -> int: return self.a class B(A): def baz(self) -> None: pass T = TypeVar('T', bound=A) def f(x: T) -> T: x.foo() x.bar() x.baz() # E: "T" has no attribute "baz" x.a x.b return x b = f(B()) [builtins fixtures/property.pyi] [out] [case testBoundClassMethod] from typing import TypeVar class A0: @classmethod def foo(cls, x: int) -> int: pass class A(A0): pass T = TypeVar('T', bound=A) def f(x: T) -> int: return x.foo(22) [builtins fixtures/classmethod.pyi] [case testBoundClassMethodWithNamedTupleBase] from typing import NamedTuple, Type, TypeVar class A(NamedTuple): @classmethod def foo(cls) -> None: ... T = TypeVar('T', bound=A) def f(x: Type[T]) -> None: reveal_type(x.foo) # N: Revealed type is 'def ()' x.foo() [builtins fixtures/classmethod.pyi] [case testBoundStaticMethod] from typing import TypeVar class A0: @staticmethod def foo(x: int) -> int: pass class A(A0): pass T = TypeVar('T', bound=A) def f(x: T) -> int: return x.foo(22) [builtins fixtures/staticmethod.pyi] [case testBoundOnDecorator] from typing import TypeVar, Callable, Any, cast T = TypeVar('T', bound=Callable[..., Any]) def twice(f: T) -> T: def result(*args, **kwargs) -> Any: f(*args, **kwargs) return f(*args, **kwargs) return cast(T, result) @twice def foo(x: int) -> int: return x a = 1 b = foo(a) if int(): b = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") twice(a) # E: Value of type variable "T" of "twice" cannot be "int" [builtins fixtures/args.pyi] mypy-0.761/test-data/unit/check-callable.test0000644€tŠÔÚ€2›s®0000002070113576752246025300 0ustar jukkaDROPBOX\Domain Users00000000000000[case testCallableDef] def f() -> None: pass if callable(f): f() else: f += 5 [builtins fixtures/callable.pyi] [case testCallableLambda] f = lambda: None if callable(f): f() else: f += 5 [builtins fixtures/callable.pyi] [case testCallableNotCallable] x = 5 if callable(x): x() else: x += 5 [builtins fixtures/callable.pyi] [case testUnion] from typing import Callable, Union x = 5 # type: Union[int, Callable[[], str]] if callable(x): y = x() + 'test' else: z = x + 6 [builtins fixtures/callable.pyi] [case testUnionMultipleReturnTypes] from typing import Callable, Union x = 5 # type: Union[int, Callable[[], str], Callable[[], int]] if callable(x): y = x() + 2 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[str, int]" else: z = x + 6 [builtins fixtures/callable.pyi] [case testUnionMultipleNonCallableTypes] from typing import Callable, Union x = 5 # type: Union[int, str, Callable[[], str]] if callable(x): y = x() + 'test' else: z = x + 6 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" [builtins fixtures/callable.pyi] [case testCallableThenIsinstance] from typing import Callable, Union x = 5 # type: Union[int, str, Callable[[], str], Callable[[], int]] if callable(x): y = x() if isinstance(y, int): b1 = y + 2 else: b2 = y + 'test' else: if isinstance(x, int): b3 = x + 3 else: b4 = x + 'test2' [builtins fixtures/callable.pyi] [case testIsinstanceThenCallable] from typing import Callable, Union x = 5 # type: Union[int, str, Callable[[], str], Callable[[], int]] if isinstance(x, int): b1 = x + 1 else: if callable(x): y = x() if isinstance(y, int): b2 = y + 1 else: b3 = y + 'test' else: b4 = x + 'test2' [builtins fixtures/callable.pyi] [case testCallableWithDifferentArgTypes] from typing import Callable, Union x = 5 # type: Union[int, Callable[[], None], Callable[[int], None]] if callable(x): x() # E: Too few arguments [builtins fixtures/callable.pyi] [case testClassInitializer] from typing import Callable, Union class A: x = 5 a = A # type: Union[A, Callable[[], A]] if callable(a): a = a() a.x + 6 [builtins fixtures/callable.pyi] [case testCallableVariables] from typing import Union class A: x = 5 class B: x = int x = A() # type: Union[A, B] if callable(x.x): y = x.x() else: y = x.x + 5 [builtins fixtures/callable.pyi] [case testCallableAnd] from typing import Union, Callable x = 5 # type: Union[int, Callable[[], str]] if callable(x) and x() == 'test': x() else: x + 5 # E: Unsupported left operand type for + ("Callable[[], str]") \ # N: Left operand is of type "Union[int, Callable[[], str]]" [builtins fixtures/callable.pyi] [case testCallableOr] from typing import Union, Callable x = 5 # type: Union[int, Callable[[], str]] if callable(x) or x() == 'test': # E: "int" not callable x() # E: "int" not callable else: x + 5 [builtins fixtures/callable.pyi] [case testCallableOrOtherType] from typing import Union, Callable x = 5 # type: Union[int, Callable[[], str]] if callable(x) or x == 2: pass else: pass [builtins fixtures/callable.pyi] [case testAnyCallable] from typing import Any x = 5 # type: Any if callable(x): reveal_type(x) # N: Revealed type is 'Any' else: reveal_type(x) # N: Revealed type is 'Any' [builtins fixtures/callable.pyi] [case testCallableCallableClasses] from typing import Union class A: pass class B: def __call__(self) -> None: pass a = A() # type: A b = B() # type: B c = A() # type: Union[A, B] if callable(a): 5 + 'test' # E: Unsupported operand types for + ("int" and "str") if not callable(b): 5 + 'test' if callable(c): reveal_type(c) # N: Revealed type is '__main__.B' else: reveal_type(c) # N: Revealed type is '__main__.A' [builtins fixtures/callable.pyi] [case testCallableNestedUnions] from typing import Callable, Union T = Union[Union[int, Callable[[], int]], Union[str, Callable[[], str]]] def f(t: T) -> None: if callable(t): reveal_type(t()) # N: Revealed type is 'Union[builtins.int, builtins.str]' else: reveal_type(t) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/callable.pyi] [case testCallableTypeVarEmpty] from typing import TypeVar T = TypeVar('T') def f(t: T) -> T: if callable(t): return 5 # E: Incompatible return value type (got "int", expected "T") else: return t [builtins fixtures/callable.pyi] [case testCallableTypeVarUnion] from typing import Callable, TypeVar, Union T = TypeVar('T', int, Callable[[], int], Union[str, Callable[[], str]]) def f(t: T) -> None: if callable(t): reveal_type(t()) # N: Revealed type is 'Any' \ # N: Revealed type is 'builtins.int' \ # N: Revealed type is 'builtins.str' else: reveal_type(t) # N: Revealed type is 'builtins.int*' # N: Revealed type is 'builtins.str' [builtins fixtures/callable.pyi] [case testCallableTypeVarBound] from typing import TypeVar class A: def __call__(self) -> str: return 'hi' T = TypeVar('T', bound=A) def f(t: T) -> str: if callable(t): return t() else: return 5 [builtins fixtures/callable.pyi] [case testCallableTypeType] from typing import Type class A: pass T = Type[A] def f(t: T) -> A: if callable(t): return t() else: return 5 [builtins fixtures/callable.pyi] [case testCallableTypeUnion] from abc import ABCMeta, abstractmethod from typing import Type, Union class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B: pass x = B # type: Union[Type[A], Type[B]] if callable(x): # Abstract classes raise an error when called, but are indeed `callable` pass else: 'test' + 5 [builtins fixtures/callable.pyi] [case testCallableUnionOfTypes] from abc import ABCMeta, abstractmethod from typing import Type, Union class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B: pass x = B # type: Type[Union[A, B]] if callable(x): # Abstract classes raise an error when called, but are indeed `callable` pass else: 'test' + 5 [builtins fixtures/callable.pyi] [case testCallableObject] def f(o: object) -> None: if callable(o): o(1,2,3) 1 + 'boom' # E: Unsupported operand types for + ("int" and "str") o('hi') + 12 reveal_type(o) # N: Revealed type is '__main__.' [builtins fixtures/callable.pyi] [case testCallableObject2] class Foo(object): def bar(self) -> None: pass def g(o: Foo) -> None: o.bar() if callable(o): o.foo() # E: "Foo" has no attribute "foo" o.bar() o(1,2,3) else: o.bar() [builtins fixtures/callable.pyi] [case testCallableObjectAny] from typing import Any class Foo(Any): def bar(self) -> None: pass def g(o: Foo) -> None: o.bar() o.baz() if callable(o): o('test') o.lurr(1,2,3) [builtins fixtures/callable.pyi] [case testCallableObjectGeneric] from typing import TypeVar, Generic T = TypeVar('T') class Test(Generic[T]): def __self__(self, x: T) -> None: self.x = x def g(o: Test[T], x: T) -> T: if callable(o): o.foo() # E: "Test[T]" has no attribute "foo" o(1,2,3) o.x = x o.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "T") 1 + o.x # E: Unsupported operand types for + ("int" and "T") return o.x return x [builtins fixtures/callable.pyi] [case testCallablePromote] def take_float(f: float) -> None: pass def g(o: int) -> None: if callable(o): take_float(o) o(1,2,3) [builtins fixtures/callable.pyi] [case testCallableTuple] from typing import NamedTuple Thing = NamedTuple('Thing', [('s', str), ('n', int)]) def g(o: Thing) -> None: if callable(o): o.s + o.n # E: Unsupported operand types for + ("str" and "int") i, s = o i + s # E: Unsupported operand types for + ("str" and "int") o(1,2,3) [builtins fixtures/callable.pyi] [case testCallableNoArgs] if callable(): # E: Too few arguments for "callable" pass [builtins fixtures/callable.pyi] mypy-0.761/test-data/unit/check-class-namedtuple.test0000644€tŠÔÚ€2›s®0000004653313576752246027015 0ustar jukkaDROPBOX\Domain Users00000000000000[case testNewNamedTupleOldPythonVersion] # flags: --python-version 3.5 from typing import NamedTuple class E(NamedTuple): # E: NamedTuple class syntax is only supported in Python 3.6 pass [case testNewNamedTupleNoUnderscoreFields] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int _y: int # E: NamedTuple field name cannot start with an underscore: _y _z: int # E: NamedTuple field name cannot start with an underscore: _z [case testNewNamedTupleAccessingAttributes] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x: X x.x x.y x.z # E: "X" has no attribute "z" [case testNewNamedTupleAttributesAreReadOnly] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int x: X x.x = 5 # E: Property "x" defined in "X" is read-only x.y = 5 # E: "X" has no attribute "y" class A(X): pass a: A a.x = 5 # E: Property "x" defined in "X" is read-only [case testNewNamedTupleCreateWithPositionalArguments] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x = X(1, '2') x.x x.z # E: "X" has no attribute "z" x = X(1) # E: Too few arguments for "X" x = X(1, '2', 3) # E: Too many arguments for "X" [case testNewNamedTupleShouldBeSingleBase] # flags: --python-version 3.6 from typing import NamedTuple class A: ... class X(NamedTuple, A): # E: NamedTuple should be a single base pass [case testCreateNewNamedTupleWithKeywordArguments] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x = X(x=1, y='x') x = X(1, y='x') x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X" x = X(y='x') # E: Missing positional argument "x" in call to "X" [case testNewNamedTupleCreateAndUseAsTuple] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x = X(1, 'x') a, b = x a, b, c = x # E: Need more than 2 values to unpack (3 expected) [case testNewNamedTupleWithItemTypes] # flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): a: int b: str n = N(1, 'x') s: str = n.a # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i: int = n.b # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") x, y = n if int(): x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNewNamedTupleConstructorArgumentTypes] # flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): a: int b: str n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int" n = N(1, b=2) # E: Argument "b" to "N" has incompatible type "int"; expected "str" N(1, 'x') N(b='x', a=1) [case testNewNamedTupleAsBaseClass] # flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): a: int b: str class X(N): pass x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str" s = '' i = 0 if int(): s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): i, s = x if int(): s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testNewNamedTupleSelfTypeWithNamedTupleAsBase] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): a: int b: str class B(A): def f(self, x: int) -> None: self.f(self.a) self.f(self.b) # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int" i = 0 s = '' if int(): i, s = self i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") [out] [case testNewNamedTupleTypeReferenceToClassDerivedFrom] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): a: int b: str class B(A): def f(self, x: 'B') -> None: i = 0 s = '' if int(): self = x i, s = x i, s = x.a, x.b i, s = x.a, x.a # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") [case testNewNamedTupleSubtyping] # flags: --python-version 3.6 from typing import NamedTuple, Tuple class A(NamedTuple): a: int b: str class B(A): pass a = A(1, '') b = B(1, '') t: Tuple[int, str] if int(): b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A") if int(): b = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B") if int(): t = a if int(): t = (1, '') if int(): t = b if int(): a = b [case testNewNamedTupleSimpleTypeInference] # flags: --python-version 3.6 from typing import NamedTuple, Tuple class A(NamedTuple): a: int l = [A(1), A(2)] a = A(1) a = l[0] (i,) = l[0] i, i = l[0] # E: Need more than 1 value to unpack (2 expected) l = [A(1)] a = (1,) # E: Incompatible types in assignment (expression has type "Tuple[int]", \ variable has type "A") [builtins fixtures/list.pyi] [case testNewNamedTupleMissingClassAttribute] # flags: --python-version 3.6 from typing import NamedTuple class MyNamedTuple(NamedTuple): a: int b: str MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x" [case testNewNamedTupleEmptyItems] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): ... [case testNewNamedTupleForwardRef] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): b: 'B' class B: ... a = A(B()) a = A(1) # E: Argument 1 to "A" has incompatible type "int"; expected "B" [case testNewNamedTupleProperty] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): a: int class B(A): @property def b(self) -> int: return self.a class C(B): pass B(1).b C(2).b [builtins fixtures/property.pyi] [case testNewNamedTupleAsDict] # flags: --python-version 3.6 from typing import NamedTuple, Any class X(NamedTuple): x: Any y: Any x: X reveal_type(x._asdict()) # N: Revealed type is 'builtins.dict[builtins.str, Any]' [builtins fixtures/dict.pyi] [case testNewNamedTupleReplaceTyped] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x: X reveal_type(x._replace()) # N: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' x._replace(x=5) x._replace(y=5) # E: Argument "y" to "_replace" of "X" has incompatible type "int"; expected "str" [case testNewNamedTupleFields] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str reveal_type(X._fields) # N: Revealed type is 'Tuple[builtins.str, builtins.str]' reveal_type(X._field_types) # N: Revealed type is 'builtins.dict[builtins.str, Any]' reveal_type(X._field_defaults) # N: Revealed type is 'builtins.dict[builtins.str, Any]' reveal_type(X.__annotations__) # N: Revealed type is 'builtins.dict[builtins.str, Any]' [builtins fixtures/dict.pyi] [case testNewNamedTupleUnit] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): pass x: X = X() x._replace() x._fields[0] # E: Tuple index out of range [case testNewNamedTupleJoinNamedTuple] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str class Y(NamedTuple): x: int y: str reveal_type([X(3, 'b'), Y(1, 'a')]) # N: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' [builtins fixtures/list.pyi] [case testNewNamedTupleJoinTuple] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str reveal_type([(3, 'b'), X(1, 'a')]) # N: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' reveal_type([X(1, 'a'), (3, 'b')]) # N: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' [builtins fixtures/list.pyi] [case testNewNamedTupleWithTooManyArguments] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y = z = 2 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" def f(self): pass [case testNewNamedTupleWithInvalidItems2] # flags: --python-version 3.6 import typing class X(typing.NamedTuple): x: int y = 1 x.x: int z: str = 'z' aa: int [out] main:6: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" main:7: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" main:7: error: Type cannot be declared in assignment to non-self attribute main:7: error: "int" has no attribute "x" main:9: error: Non-default NamedTuple fields cannot follow default fields [builtins fixtures/list.pyi] [case testNewNamedTupleWithoutTypesSpecified] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y = 2 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" [case testTypeUsingTypeCNamedTuple] # flags: --python-version 3.6 from typing import NamedTuple, Type class N(NamedTuple): x: int y: str def f(a: Type[N]): a() [builtins fixtures/list.pyi] [out] main:9: error: Too few arguments for "N" [case testNewNamedTupleWithDefaults] # flags: --python-version 3.6 from typing import List, NamedTuple, Optional class X(NamedTuple): x: int y: int = 2 reveal_type(X(1)) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.X]' reveal_type(X(1, 2)) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.X]' X(1, 'a') # E: Argument 2 to "X" has incompatible type "str"; expected "int" X(1, z=3) # E: Unexpected keyword argument "z" for "X" class HasNone(NamedTuple): x: int y: Optional[int] = None reveal_type(HasNone(1)) # N: Revealed type is 'Tuple[builtins.int, Union[builtins.int, None], fallback=__main__.HasNone]' class Parameterized(NamedTuple): x: int y: List[int] = [1] + [2] z: List[int] = [] reveal_type(Parameterized(1)) # N: Revealed type is 'Tuple[builtins.int, builtins.list[builtins.int], builtins.list[builtins.int], fallback=__main__.Parameterized]' Parameterized(1, ['not an int']) # E: List item 0 has incompatible type "str"; expected "int" class Default: pass class UserDefined(NamedTuple): x: Default = Default() reveal_type(UserDefined()) # N: Revealed type is 'Tuple[__main__.Default, fallback=__main__.UserDefined]' reveal_type(UserDefined(Default())) # N: Revealed type is 'Tuple[__main__.Default, fallback=__main__.UserDefined]' UserDefined(1) # E: Argument 1 to "UserDefined" has incompatible type "int"; expected "Default" [builtins fixtures/list.pyi] [case testNewNamedTupleWithDefaultsStrictOptional] # flags: --strict-optional --python-version 3.6 from typing import List, NamedTuple, Optional class HasNone(NamedTuple): x: int y: Optional[int] = None reveal_type(HasNone(1)) # N: Revealed type is 'Tuple[builtins.int, Union[builtins.int, None], fallback=__main__.HasNone]' HasNone(None) # E: Argument 1 to "HasNone" has incompatible type "None"; expected "int" HasNone(1, y=None) HasNone(1, y=2) class CannotBeNone(NamedTuple): x: int y: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [builtins fixtures/list.pyi] [case testNewNamedTupleWrongType] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: int = 'not an int' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNewNamedTupleErrorInDefault] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int = 1 + '1' # E: Unsupported operand types for + ("int" and "str") [case testNewNamedTupleInheritance] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: str y: int = 3 class Y(X): def method(self) -> str: self.y return self.x reveal_type(Y('a')) # N: Revealed type is 'Tuple[builtins.str, builtins.int, fallback=__main__.Y]' Y(y=1, x='1').method() class CallsBaseInit(X): def __init__(self, x: str) -> None: super().__init__(x) # E: Too many arguments for "__init__" of "object" [case testNewNamedTupleWithMethods] from typing import NamedTuple class XMeth(NamedTuple): x: int def double(self) -> int: return self.x async def asyncdouble(self) -> int: return self.x class XRepr(NamedTuple): x: int y: int = 1 def __str__(self) -> str: return 'string' def __add__(self, other: XRepr) -> int: return 0 reveal_type(XMeth(1).double()) # N: Revealed type is 'builtins.int' reveal_type(XMeth(1).asyncdouble()) # N: Revealed type is 'typing.Coroutine[Any, Any, builtins.int]' reveal_type(XMeth(42).x) # N: Revealed type is 'builtins.int' reveal_type(XRepr(42).__str__()) # N: Revealed type is 'builtins.str' reveal_type(XRepr(1, 2).__add__(XRepr(3))) # N: Revealed type is 'builtins.int' [typing fixtures/typing-full.pyi] [case testNewNamedTupleOverloading] from typing import NamedTuple, overload class Overloader(NamedTuple): x: int @overload def method(self, y: str) -> str: pass @overload def method(self, y: int) -> int: pass def method(self, y): return y reveal_type(Overloader(1).method('string')) # N: Revealed type is 'builtins.str' reveal_type(Overloader(1).method(1)) # N: Revealed type is 'builtins.int' Overloader(1).method(('tuple',)) # E: No overload variant of "method" of "Overloader" matches argument type "Tuple[str]" \ # N: Possible overload variants: \ # N: def method(self, y: str) -> str \ # N: def method(self, y: int) -> int [case testNewNamedTupleMethodInheritance] from typing import NamedTuple, TypeVar T = TypeVar('T') class Base(NamedTuple): x: int def copy(self: T) -> T: reveal_type(self) # N: Revealed type is 'T`-1' return self def good_override(self) -> int: reveal_type(self) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.Base]' reveal_type(self[0]) # N: Revealed type is 'builtins.int' self[0] = 3 # E: Unsupported target for indexed assignment reveal_type(self.x) # N: Revealed type is 'builtins.int' self.x = 3 # E: Property "x" defined in "Base" is read-only self[1] # E: Tuple index out of range reveal_type(self[T]) # N: Revealed type is 'builtins.int' return self.x def bad_override(self) -> int: return self.x class Child(Base): def new_method(self) -> int: reveal_type(self) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.Child]' reveal_type(self[0]) # N: Revealed type is 'builtins.int' self[0] = 3 # E: Unsupported target for indexed assignment reveal_type(self.x) # N: Revealed type is 'builtins.int' self.x = 3 # E: Property "x" defined in "Base" is read-only self[1] # E: Tuple index out of range return self.x def good_override(self) -> int: return 0 def bad_override(self) -> str: # E: Return type "str" of "bad_override" incompatible with return type "int" in supertype "Base" return 'incompatible' def takes_base(base: Base) -> int: return base.x reveal_type(Base(1).copy()) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.Base]' reveal_type(Child(1).copy()) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.Child]' reveal_type(Base(1).good_override()) # N: Revealed type is 'builtins.int' reveal_type(Child(1).good_override()) # N: Revealed type is 'builtins.int' reveal_type(Base(1).bad_override()) # N: Revealed type is 'builtins.int' reveal_type(takes_base(Base(1))) # N: Revealed type is 'builtins.int' reveal_type(takes_base(Child(1))) # N: Revealed type is 'builtins.int' [builtins fixtures/tuple.pyi] [case testNewNamedTupleIllegalNames] from typing import Callable, NamedTuple class XMethBad(NamedTuple): x: int def _fields(self): # E: Cannot overwrite NamedTuple attribute "_fields" return 'no chance for this' class MagicalFields(NamedTuple): x: int def __slots__(self) -> None: pass # E: Cannot overwrite NamedTuple attribute "__slots__" def __new__(cls) -> MagicalFields: pass # E: Cannot overwrite NamedTuple attribute "__new__" def _source(self) -> int: pass # E: Cannot overwrite NamedTuple attribute "_source" __annotations__ = {'x': float} # E: NamedTuple field name cannot start with an underscore: __annotations__ \ # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" \ # E: Cannot overwrite NamedTuple attribute "__annotations__" class AnnotationsAsAMethod(NamedTuple): x: int # This fails at runtime because typing.py assumes that __annotations__ is a dictionary. def __annotations__(self) -> float: # E: Cannot overwrite NamedTuple attribute "__annotations__" return 1.0 class ReuseNames(NamedTuple): x: int def x(self) -> str: # E: Name 'x' already defined on line 22 return '' def y(self) -> int: return 0 y: str # E: Name 'y' already defined on line 26 class ReuseCallableNamed(NamedTuple): z: Callable[[ReuseNames], int] def z(self) -> int: # E: Name 'z' already defined on line 31 return 0 [builtins fixtures/dict.pyi] [case testNewNamedTupleDocString] from typing import NamedTuple class Documented(NamedTuple): """This is a docstring.""" x: int reveal_type(Documented.__doc__) # N: Revealed type is 'builtins.str' reveal_type(Documented(1).x) # N: Revealed type is 'builtins.int' class BadDoc(NamedTuple): x: int def __doc__(self) -> str: return '' reveal_type(BadDoc(1).__doc__()) # N: Revealed type is 'builtins.str' [case testNewNamedTupleClassMethod] from typing import NamedTuple class HasClassMethod(NamedTuple): x: str @classmethod def new(cls, f: str) -> 'HasClassMethod': reveal_type(cls) # N: Revealed type is 'Type[Tuple[builtins.str, fallback=__main__.HasClassMethod]]' reveal_type(HasClassMethod) # N: Revealed type is 'def (x: builtins.str) -> Tuple[builtins.str, fallback=__main__.HasClassMethod]' return cls(x=f) [builtins fixtures/classmethod.pyi] [case testNewNamedTupleStaticMethod] from typing import NamedTuple class HasStaticMethod(NamedTuple): x: str @staticmethod def new(f: str) -> 'HasStaticMethod': return HasStaticMethod(x=f) [builtins fixtures/classmethod.pyi] [case testNewNamedTupleProperty] from typing import NamedTuple class HasStaticMethod(NamedTuple): x: str @property def size(self) -> int: reveal_type(self) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.HasStaticMethod]' return 4 [builtins fixtures/property.pyi] mypy-0.761/test-data/unit/check-classes.test0000644€tŠÔÚ€2›s®0000051442513576752246025211 0ustar jukkaDROPBOX\Domain Users00000000000000-- Methods -- ------- [case testMethodCall] a = None # type: A b = None # type: B a.foo(B()) # Fail a.bar(B(), A()) # Fail a.foo(A()) b.bar(B(), A()) class A: def foo(self, x: 'A') -> None: pass class B: def bar(self, x: 'B', y: A) -> None: pass [out] main:5: error: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A" main:6: error: "A" has no attribute "bar" [case testMethodCallWithSubtype] a = None # type: A a.foo(A()) a.foo(B()) a.bar(A()) # Fail a.bar(B()) class A: def foo(self, x: 'A') -> None: pass def bar(self, x: 'B') -> None: pass class B(A): pass [out] main:5: error: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B" [case testInheritingMethod] a = None # type: B a.foo(A()) # Fail a.foo(B()) class A: def foo(self, x: 'B') -> None: pass class B(A): pass [targets __main__, __main__, __main__.A.foo] [out] main:3: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B" [case testMethodCallWithInvalidNumberOfArguments] a = None # type: A a.foo() # Fail a.foo(object(), A()) # Fail class A: def foo(self, x: 'A') -> None: pass [out] main:3: error: Too few arguments for "foo" of "A" main:4: error: Too many arguments for "foo" of "A" main:4: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A" [case testMethodBody] import typing class A: def f(self) -> None: a = object() # type: A # Fail [out] main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testMethodArguments] import typing class A: def f(self, a: 'A', b: 'B') -> None: if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = A() b = B() a = a a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") class B: pass [out] [case testReturnFromMethod] import typing class A: def f(self) -> 'A': return B() # Fail return A() class B: pass [out] main:4: error: Incompatible return value type (got "B", expected "A") [case testSelfArgument] import typing class A: def f(self) -> None: o = self # type: B # Fail self.g() # Fail a = self # type: A self.f() class B: pass [out] main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:5: error: "A" has no attribute "g" [case testAssignToMethodViaInstance] import typing class A: def f(self): pass A().f = None # E: Cannot assign to a method -- Attributes -- ---------- [case testReferToInvalidAttribute] class A: def __init__(self) -> None: self.x = object() a: A a.y # E: "A" has no attribute "y" a.y = object() # E: "A" has no attribute "y" a.x a.x = object() [case testReferToInvalidAttributeUnannotatedInit] class A: def __init__(self): self.x = object() a: A a.y # E: "A" has no attribute "y" a.y = object() # E: "A" has no attribute "y" a.x a.x = object() [case testArgumentTypeInference] class A: def __init__(self, aa: 'A', bb: 'B') -> None: self.a = aa self.b = bb class B: pass a = None # type: A b = None # type: B a.a = b # Fail a.b = a # Fail b.a # Fail a.a = a a.b = b [out] main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:10: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:11: error: "B" has no attribute "a" [case testExplicitAttributeInBody] a = None # type: A a.x = object() # Fail a.x = A() class A: x = None # type: A [out] main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testAttributeDefinedInNonInitMethod] import typing class A: def f(self) -> None: self.x = 1 self.y = '' self.x = 1 a = A() a.x = 1 a.y = '' a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") a.z = 0 # E: "A" has no attribute "z" [case testInheritanceAndAttributeAssignment] import typing class A: def f(self) -> None: self.x = 0 class B(A): def f(self) -> None: self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [targets __main__, __main__.A.f, __main__.B.f] [case testAssignmentToAttributeInMultipleMethods] import typing class A: def f(self) -> None: self.x = 0 def g(self) -> None: self.x = '' # Fail def __init__(self) -> None: self.x = '' # Fail [out] main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:8: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testClassNamesDefinedOnSelUsedInClassBody] class A(object): def f(self): self.attr = 1 attr = 0 class B(object): attr = 0 def f(self): self.attr = 1 class C(object): attr = 0 def f(self): self.attr = 1 attr = 0 class D(object): def g(self): self.attr = 1 attr = 0 def f(self): self.attr = 1 [out] [case testClassNamesDefinedOnSelUsedInClassBodyReveal] class A(object): def f(self) -> None: self.attr = 1 attr # E: Name 'attr' is not defined class B(object): attr = 0 def f(self) -> None: reveal_type(self.attr) # N: Revealed type is 'builtins.int' [out] -- Method overriding -- ----------------- [case testMethodOverridingWithIdenticalSignature] import typing class A: def f(self, x: 'A') -> None: pass def g(self, x: 'B' , y: object) -> 'A': pass def h(self) -> None: pass class B(A): def f(self, x: A) -> None: pass def g(self, x: 'B' , y: object) -> A: pass def h(self) -> None: pass [out] [case testMethodOverridingWithCovariantType] import typing class A: def f(self, x: 'A', y: 'B') -> 'A': pass def g(self, x: 'A', y: 'B') -> 'A': pass class B(A): def f(self, x: A, y: 'B') -> 'B': pass def g(self, x: A, y: A) -> 'A': pass [out] [case testMethodOverridingWithIncompatibleTypes] import typing class A: def f(self, x: 'A', y: 'B') -> 'A': pass def g(self, x: 'A', y: 'B') -> 'A': pass def h(self, x: 'A', y: 'B') -> 'A': pass class B(A): def f(self, x: 'B', y: 'B') -> A: pass # Fail def g(self, x: A, y: A) -> A: pass def h(self, x: A, y: 'B') -> object: pass # Fail [out] main:7: error: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "A" main:9: error: Return type "object" of "h" incompatible with return type "A" in supertype "A" [case testEqMethodsOverridingWithNonObjects] class A: def __eq__(self, other: A) -> bool: pass # Fail [builtins fixtures/attr.pyi] [out] main:2: error: Argument 1 of "__eq__" is incompatible with supertype "object"; supertype defines the argument type as "object" main:2: note: It is recommended for "__eq__" to work with arbitrary objects, for example: main:2: note: def __eq__(self, other: object) -> bool: main:2: note: if not isinstance(other, A): main:2: note: return NotImplemented main:2: note: return [case testMethodOverridingWithIncompatibleArgumentCount] import typing class A: def f(self, x: 'A') -> None: pass def g(self, x: 'A', y: 'B') -> 'A': pass class B(A): def f(self, x: A, y: A) -> None: pass # Fail def g(self, x: A) -> A: pass # Fail [out] main:6: error: Signature of "f" incompatible with supertype "A" main:7: error: Signature of "g" incompatible with supertype "A" [case testMethodOverridingAcrossDeepInheritanceHierarchy1] import typing class A: def f(self, x: 'B') -> None: pass class B(A): pass class C(B): # with gap in implementations def f(self, x: 'C') -> None: # Fail pass [out] main:6: error: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "B" [case testMethodOverridingAcrossDeepInheritanceHierarchy2] import typing class A: def f(self) -> 'B': pass class B(A): def f(self) -> 'C': pass class C(B): # with multiple implementations def f(self) -> B: # Fail pass [out] main:7: error: Return type "B" of "f" incompatible with return type "C" in supertype "B" [case testMethodOverridingWithVoidReturnValue] import typing class A: def f(self) -> None: pass def g(self) -> 'A': pass class B(A): def f(self) -> A: pass # Fail def g(self) -> None: pass [out] main:6: error: Return type "A" of "f" incompatible with return type "None" in supertype "A" [case testOverride__new__WithDifferentSignature] class A: def __new__(cls, x: int) -> A: pass class B(A): def __new__(cls) -> B: pass [case testOverride__new__AndCallObject] from typing import TypeVar, Generic class A: def __new__(cls, x: int) -> 'A': return object.__new__(cls) T = TypeVar('T') class B(Generic[T]): def __new__(cls, foo: T) -> 'B[T]': x = object.__new__(cls) # object.__new__ doesn't have a great type :( reveal_type(x) # N: Revealed type is 'Any' return x [builtins fixtures/__new__.pyi] [case testInnerFunctionNotOverriding] class A: def f(self) -> int: pass class B(A): def g(self) -> None: def f(self) -> str: pass [case testOverride__init_subclass__WithDifferentSignature] class A: def __init_subclass__(cls, x: int) -> None: pass class B(A): # E: Too few arguments for "__init_subclass__" of "A" def __init_subclass__(cls) -> None: pass [case testOverrideWithDecorator] from typing import Callable def int_to_none(f: Callable[..., int]) -> Callable[..., None]: ... def str_to_int(f: Callable[..., str]) -> Callable[..., int]: ... class A: def f(self) -> None: pass def g(self) -> str: pass def h(self) -> None: pass class B(A): @int_to_none def f(self) -> int: pass @str_to_int def g(self) -> str: pass # E: Signature of "g" incompatible with supertype "A" @int_to_none @str_to_int def h(self) -> str: pass [case testOverrideDecorated] from typing import Callable def str_to_int(f: Callable[..., str]) -> Callable[..., int]: ... class A: @str_to_int def f(self) -> str: pass @str_to_int def g(self) -> str: pass @str_to_int def h(self) -> str: pass class B(A): def f(self) -> int: pass def g(self) -> str: pass # E: Signature of "g" incompatible with supertype "A" @str_to_int def h(self) -> str: pass [case testOverrideWithDecoratorReturningAny] def dec(f): pass class A: def f(self) -> str: pass class B(A): @dec def f(self) -> int: pass [case testOverrideWithDecoratorReturningInstance] def dec(f) -> str: pass class A: def f(self) -> str: pass @dec def g(self) -> int: pass @dec def h(self) -> int: pass class B(A): @dec def f(self) -> int: pass # E: Signature of "f" incompatible with supertype "A" def g(self) -> int: pass # E: Signature of "g" incompatible with supertype "A" @dec def h(self) -> str: pass [case testOverrideStaticMethodWithStaticMethod] class A: @staticmethod def f(x: int, y: str) -> None: pass @staticmethod def g(x: int, y: str) -> None: pass class B(A): @staticmethod def f(x: int, y: str) -> None: pass @staticmethod def g(x: str, y: str) -> None: pass # E: Argument 1 of "g" is incompatible with supertype "A"; supertype defines the argument type as "int" [builtins fixtures/classmethod.pyi] [case testOverrideClassMethodWithClassMethod] class A: @classmethod def f(cls, x: int, y: str) -> None: pass @classmethod def g(cls, x: int, y: str) -> None: pass class B(A): @classmethod def f(cls, x: int, y: str) -> None: pass @classmethod def g(cls, x: str, y: str) -> None: pass # E: Argument 1 of "g" is incompatible with supertype "A"; supertype defines the argument type as "int" [builtins fixtures/classmethod.pyi] [case testOverrideClassMethodWithStaticMethod] class A: @classmethod def f(cls, x: int) -> None: pass @classmethod def g(cls, x: int) -> int: pass @classmethod def h(cls) -> int: pass class B(A): @staticmethod def f(x: int) -> None: pass @staticmethod def g(x: str) -> int: pass # E: Argument 1 of "g" is incompatible with supertype "A"; supertype defines the argument type as "int" @staticmethod def h() -> int: pass [builtins fixtures/classmethod.pyi] [case testOverrideStaticMethodWithClassMethod] class A: @staticmethod def f(x: int) -> None: pass @staticmethod def g(x: str) -> int: pass @staticmethod def h() -> int: pass class B(A): @classmethod def f(cls, x: int) -> None: pass @classmethod def g(cls, x: int) -> int: pass # E: Argument 1 of "g" is incompatible with supertype "A"; supertype defines the argument type as "str" @classmethod def h(cls) -> int: pass [builtins fixtures/classmethod.pyi] -- Constructors -- ------------ [case testTrivialConstructor] import typing a = A() # type: A b = A() # type: B # Fail class A: def __init__(self) -> None: pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testConstructor] import typing a = A(B()) # type: A aa = A(object()) # type: A # Fail b = A(B()) # type: B # Fail class A: def __init__(self, x: 'B') -> None: pass class B: pass [out] main:3: error: Argument 1 to "A" has incompatible type "object"; expected "B" main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testConstructorWithTwoArguments] import typing a = A(C(), B()) # type: A # Fail class A: def __init__(self, x: 'B', y: 'C') -> None: pass class B: pass class C(B): pass [out] main:2: error: Argument 2 to "A" has incompatible type "B"; expected "C" [case testInheritedConstructor] import typing b = B(C()) # type: B a = B(D()) # type: A # Fail class A: def __init__(self, x: 'C') -> None: pass class B(A): pass class C: pass class D: pass [out] main:3: error: Argument 1 to "B" has incompatible type "D"; expected "C" [case testOverridingWithIncompatibleConstructor] import typing A() # Fail B(C()) # Fail A(C()) B() class A: def __init__(self, x: 'C') -> None: pass class B(A): def __init__(self) -> None: pass class C: pass [out] main:2: error: Too few arguments for "A" main:3: error: Too many arguments for "B" [case testConstructorWithReturnValueType] import typing class A: def __init__(self) -> 'A': pass [out] main:3: error: The return type of "__init__" must be None [case testConstructorWithImplicitReturnValueType] import typing class A: def __init__(self, x: int): pass [out] [case testDecoratedConstructorWithImplicitReturnValueType] import typing from typing import Callable def deco(fn: Callable) -> Callable: return fn class A: @deco def __init__(self, x: int): pass [out] [case testOverloadedConstructorWithImplicitReturnValueType] from foo import * [file foo.pyi] from typing import overload class Foo: @overload def __init__(self, a: int): pass @overload def __init__(self, a: str): pass [case testConstructorWithAnyReturnValueType] import typing from typing import Any class A: def __init__(self) -> Any: pass # E: The return type of "__init__" must be None [case testDecoratedConstructorWithAnyReturnValueType] import typing from typing import Callable, Any def deco(fn: Callable) -> Callable: return fn class A: @deco def __init__(self) -> Any: pass # E: The return type of "__init__" must be None [case testOverloadedConstructorWithAnyReturnValueType] from foo import * [file foo.pyi] from typing import overload, Any class Foo: @overload def __init__(self, a: int) -> Any: # E: The return type of "__init__" must be None pass @overload def __init__(self, a: str) -> Any: # E: The return type of "__init__" must be None pass [case testInitSubclassWithReturnValueType] import typing class A: def __init_subclass__(cls) -> 'A': pass [out] main:3: error: The return type of "__init_subclass__" must be None [case testInitSubclassWithImplicitReturnValueType] import typing class A: def __init_subclass__(cls, x: int=1): pass [out] [case testDecoratedInitSubclassWithImplicitReturnValueType] import typing from typing import Callable def deco(fn: Callable) -> Callable: return fn class A: @deco def __init_subclass__(cls, x: int=1): pass [out] [case testOverloadedInitSubclassWithImplicitReturnValueType] from foo import * [file foo.pyi] from typing import overload class Foo: @overload def __init_subclass__(cls, a: int): pass @overload def __init_subclass__(cls, a: str): pass [case testInitSubclassWithAnyReturnValueType] import typing from typing import Any class A: def __init_subclass__(cls) -> Any: pass # E: The return type of "__init_subclass__" must be None [case testDecoratedInitSubclassWithAnyReturnValueType] import typing from typing import Callable, Any def deco(fn: Callable) -> Callable: return fn class A: @deco def __init_subclass__(cls) -> Any: pass # E: The return type of "__init_subclass__" must be None [out] [case testOverloadedInitSubclassWithAnyReturnValueType] from foo import * [file foo.pyi] from typing import overload, Any class Foo: @overload def __init_subclass__(cls, a: int) -> Any: # E: The return type of "__init_subclass__" must be None pass @overload def __init_subclass__(cls, a: str) -> Any: # E: The return type of "__init_subclass__" must be None pass [case testGlobalFunctionInitWithReturnType] import typing a = __init__() # type: A b = __init__() # type: B # Fail def __init__() -> 'A': pass class A: pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAccessingInit] from typing import Any, cast class A: def __init__(self, a: 'A') -> None: pass a = None # type: A a.__init__(a) # E: Cannot access "__init__" directly (cast(Any, a)).__init__(a) [case testDeepInheritanceHierarchy] import typing d = C() # type: D # E: Incompatible types in assignment (expression has type "C", variable has type "D") if int(): d = B() # E: Incompatible types in assignment (expression has type "B", variable has type "D") if int(): d = A() # E: Incompatible types in assignment (expression has type "A", variable has type "D") if int(): d = D2() # E: Incompatible types in assignment (expression has type "D2", variable has type "D") a = D() # type: A if int(): a = D2() b = D() # type: B if int(): b = D2() class A: pass class B(A): pass class C(B): pass class D(C): pass class D2(C): pass -- Attribute access in class body -- ------------------------------ [case testDataAttributeRefInClassBody] import typing class B: pass class A: x = B() y = x b = x # type: B if int(): b = x c = x # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): c = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") [out] [case testMethodRefInClassBody] from typing import Callable class B: pass class A: def f(self) -> None: pass g = f h = f # type: Callable[[A], None] if int(): h = f g = h ff = f # type: Callable[[B], None] # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[B], None]") if int(): g = ff # E: Incompatible types in assignment (expression has type "Callable[[B], None]", variable has type "Callable[[A], None]") [out] -- Arbitrary statements in class body -- ---------------------------------- [case testStatementsInClassBody] import typing class B: pass class A: for x in [A()]: y = x if int(): y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): x = A() if int(): y = A() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [builtins fixtures/for.pyi] [out] -- Class attributes -- ---------------- [case testAccessMethodViaClass] import typing class A: def f(self) -> None: pass A.f(A()) A.f(object()) # E: Argument 1 to "f" of "A" has incompatible type "object"; expected "A" A.f() # E: Too few arguments for "f" of "A" A.f(None, None) # E: Too many arguments for "f" of "A" [case testAccessAttributeViaClass] import typing class B: pass class A: x = None # type: A a = A.x # type: A b = A.x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAccessingUndefinedAttributeViaClass] import typing class A: pass A.x # E: "Type[A]" has no attribute "x" [case testAccessingUndefinedAttributeViaClassWithOverloadedInit] from foo import * [file foo.pyi] from typing import overload class A: @overload def __init__(self): pass @overload def __init__(self, x): pass A.x # E: "Type[A]" has no attribute "x" [case testAccessMethodOfClassWithOverloadedInit] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __init__(self) -> None: pass @overload def __init__(self, x: Any) -> None: pass def f(self) -> None: pass A.f(A()) A.f() # E: Too few arguments for "f" of "A" [case testAssignmentToClassDataAttribute] import typing class B: pass class A: x = None # type: B A.x = B() A.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "B") [case testAssignmentToInferredClassDataAttribute] import typing class B: pass class A: x = B() A.x = B() A.x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testInitMethodUnbound] class B: pass class A: def __init__(self, b: B) -> None: pass a = None # type: A b = None # type: B A.__init__(a, b) A.__init__(b, b) # E: Argument 1 to "__init__" of "A" has incompatible type "B"; expected "A" A.__init__(a, a) # E: Argument 2 to "__init__" of "A" has incompatible type "A"; expected "B" [case testAssignToMethodViaClass] import typing class A: def f(self): pass A.f = None # E: Cannot assign to a method [case testAssignToNestedClassViaClass] import typing class A: class B: pass A.B = None # E: Cannot assign to a type [targets __main__] [case testAccessingClassAttributeWithTypeInferenceIssue] x = C.x # E: Cannot determine type of 'x' def f() -> int: return 1 class C: x = f() [builtins fixtures/list.pyi] [case testAccessingClassAttributeWithTypeInferenceIssue2] class C: x = [] x = C.x [builtins fixtures/list.pyi] [out] main:2: error: Need type annotation for 'x' (hint: "x: List[] = ...") [case testAccessingGenericClassAttribute] from typing import Generic, TypeVar T = TypeVar('T') class A(Generic[T]): x = None # type: T A.x # E: Access to generic instance variables via class is ambiguous A[int].x # E: Access to generic instance variables via class is ambiguous [targets __main__] [case testAccessingNestedGenericClassAttribute] from typing import Generic, List, TypeVar, Union T = TypeVar('T') U = TypeVar('U') class A(Generic[T, U]): x = None # type: Union[T, List[U]] A.x # E: Access to generic instance variables via class is ambiguous A[int, int].x # E: Access to generic instance variables via class is ambiguous [builtins fixtures/list.pyi] -- Nested classes -- -------------- [case testClassWithinFunction] def f() -> None: class A: def g(self) -> None: pass a = None # type: A a.g() a.g(a) # E: Too many arguments for "g" of "A" [targets __main__, __main__.f] [case testConstructNestedClass] import typing class A: class B: pass b = B() if int(): b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = B(b) # E: Too many arguments for "B" [out] [case testConstructNestedClassWithCustomInit] import typing class A: def f(self) -> None: class B: def __init__(self, a: 'A') -> None: pass b = B(A()) if int(): b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = B() # E: Too few arguments for "B" [out] [case testDeclareVariableWithNestedClassType] def f() -> None: class A: pass a = None # type: A if int(): a = A() a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") [out] [case testExternalReferenceToClassWithinClass] class A: class B: pass b = None # type: A.B if int(): b = A.B() if int(): b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = A.B(b) # E: Too many arguments for "B" [case testAliasNestedClass] class Outer: class Inner: def make_int(self) -> int: return 1 reveal_type(Inner().make_int) # N: Revealed type is 'def () -> builtins.int' some_int = Inner().make_int() reveal_type(Outer.Inner.make_int) # N: Revealed type is 'def (self: __main__.Outer.Inner) -> builtins.int' reveal_type(Outer().some_int) # N: Revealed type is 'builtins.int' Bar = Outer.Inner reveal_type(Bar.make_int) # N: Revealed type is 'def (self: __main__.Outer.Inner) -> builtins.int' x = Bar() # type: Bar def produce() -> Bar: reveal_type(Bar().make_int) # N: Revealed type is 'def () -> builtins.int' return Bar() [case testInnerClassPropertyAccess] class Foo: class Meta: name = 'Bar' meta = Meta reveal_type(Foo.Meta) # N: Revealed type is 'def () -> __main__.Foo.Meta' reveal_type(Foo.meta) # N: Revealed type is 'def () -> __main__.Foo.Meta' reveal_type(Foo.Meta.name) # N: Revealed type is 'builtins.str' reveal_type(Foo.meta.name) # N: Revealed type is 'builtins.str' reveal_type(Foo().Meta) # N: Revealed type is 'def () -> __main__.Foo.Meta' reveal_type(Foo().meta) # N: Revealed type is 'def () -> __main__.Foo.Meta' reveal_type(Foo().meta.name) # N: Revealed type is 'builtins.str' reveal_type(Foo().Meta.name) # N: Revealed type is 'builtins.str' -- Declaring attribute type in method -- ---------------------------------- [case testDeclareAttributeTypeInInit] class A: def __init__(self): self.x = None # type: int a = None # type: A a.x = 1 a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testAccessAttributeDeclaredInInitBeforeDeclaration] a = None # type: A a.x = 1 a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") class A: def __init__(self): self.x = None # type: int -- Special cases -- ------------- [case testMultipleClassDefinition] import typing A() class A: pass class A: pass [out] main:4: error: Name 'A' already defined on line 3 [case testDocstringInClass] import typing class A: """Foo""" class B: 'x' y = B() [builtins fixtures/primitives.pyi] [case testErrorMessageInFunctionNestedWithinMethod] import typing class A: def f(self) -> None: def g() -> None: "" + 1 # E: Unsupported operand types for + ("str" and "int") "" + 1 # E: Unsupported operand types for + ("str" and "int") [out] -- Static methods -- -------------- [case testSimpleStaticMethod] import typing class A: @staticmethod def f(x: int) -> None: pass A.f(1) A().f(1) A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [builtins fixtures/staticmethod.pyi] [case testBuiltinStaticMethod] import typing int.from_bytes(b'', '') int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes" [builtins fixtures/staticmethod.pyi] [case testAssignStaticMethodOnInstance] import typing class A: @staticmethod def f(x: int) -> None: pass A().f = A.f # E: Cannot assign to a method [builtins fixtures/staticmethod.pyi] -- Class methods -- ------------- [case testSimpleClassMethod] import typing class A: @classmethod def f(cls, x: int) -> None: pass A.f(1) A().f(1) A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [builtins fixtures/classmethod.pyi] [targets __main__, __main__.A.f] [case testBuiltinClassMethod] import typing int.from_bytes(b'', '') int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes" [builtins fixtures/classmethod.pyi] [case testAssignClassMethodOnClass] import typing class A: @classmethod def f(cls, x: int) -> None: pass A.f = A.f # E: Cannot assign to a method [builtins fixtures/classmethod.pyi] [case testAssignClassMethodOnInstance] import typing class A: @classmethod def f(cls, x: int) -> None: pass A().f = A.f # E: Cannot assign to a method [builtins fixtures/classmethod.pyi] [case testClassMethodCalledInClassMethod] import typing class C: @classmethod def foo(cls) -> None: pass @classmethod def bar(cls) -> None: cls() cls(1) # E: Too many arguments for "C" cls.bar() cls.bar(1) # E: Too many arguments for "bar" of "C" cls.bozo() # E: "Type[C]" has no attribute "bozo" [builtins fixtures/classmethod.pyi] [out] [case testClassMethodCalledOnClass] import typing class C: @classmethod def foo(cls) -> None: pass C.foo() C.foo(1) # E: Too many arguments for "foo" of "C" C.bozo() # E: "Type[C]" has no attribute "bozo" [builtins fixtures/classmethod.pyi] [case testClassMethodCalledOnInstance] import typing class C: @classmethod def foo(cls) -> None: pass C().foo() C().foo(1) # E: Too many arguments for "foo" of "C" C.bozo() # E: "Type[C]" has no attribute "bozo" [builtins fixtures/classmethod.pyi] [case testClassMethodMayCallAbstractMethod] from abc import abstractmethod import typing class C: @classmethod def foo(cls) -> None: cls().bar() @abstractmethod def bar(self) -> None: pass [builtins fixtures/classmethod.pyi] [case testClassMethodSubclassing] class A: @classmethod def f(cls) -> None: pass def g(self) -> None: pass class B(A): def f(self) -> None: pass # E: Signature of "f" incompatible with supertype "A" @classmethod def g(cls) -> None: pass class C(A): @staticmethod def f() -> None: pass [builtins fixtures/classmethod.pyi] -- Properties -- ---------- [case testAccessingReadOnlyProperty] import typing class A: @property def f(self) -> str: pass a = A() reveal_type(a.f) # N: Revealed type is 'builtins.str' [builtins fixtures/property.pyi] [case testAssigningToReadOnlyProperty] import typing class A: @property def f(self) -> str: pass A().f = '' # E: Property "f" defined in "A" is read-only [builtins fixtures/property.pyi] [case testAssigningToInheritedReadOnlyProperty] class A: @property def f(self) -> str: pass class B(A): pass class C(A): @property def f(self) -> str: pass A().f = '' # E: Property "f" defined in "A" is read-only B().f = '' # E: Property "f" defined in "A" is read-only C().f = '' # E: Property "f" defined in "C" is read-only [builtins fixtures/property.pyi] [case testPropertyGetterBody] import typing class A: @property def f(self) -> str: self.x = 1 self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") return '' [builtins fixtures/property.pyi] [out] [case testDynamicallyTypedProperty] import typing class A: @property def f(self): pass a = A() a.f.xx a.f = '' # E: Property "f" defined in "A" is read-only [builtins fixtures/property.pyi] [case testPropertyWithSetter] import typing class A: @property def f(self) -> int: return 1 @f.setter def f(self, x: int) -> None: pass a = A() a.f = a.f a.f.x # E: "int" has no attribute "x" a.f = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") a.f = 1 reveal_type(a.f) # N: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [case testPropertyWithDeleterButNoSetter] import typing class A: @property def f(self) -> int: return 1 @f.deleter def f(self, x) -> None: pass a = A() a.f = a.f # E: Property "f" defined in "A" is read-only a.f.x # E: "int" has no attribute "x" [builtins fixtures/property.pyi] -- Descriptors -- ----------- [case testAccessingNonDataDescriptor] from typing import Any class D: def __get__(self, inst: Any, own: Any) -> str: return 's' class A: f = D() a = A() reveal_type(a.f) # N: Revealed type is 'builtins.str' [case testSettingNonDataDescriptor] from typing import Any class D: def __get__(self, inst: Any, own: Any) -> str: return 's' class A: f = D() a = A() a.f = 'foo' a.f = D() # E: Incompatible types in assignment (expression has type "D", variable has type "str") [case testSettingDataDescriptor] from typing import Any class D: def __get__(self, inst: Any, own: Any) -> str: return 's' def __set__(self, inst: Any, value: str) -> None: pass class A: f = D() a = A() a.f = '' a.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testReadingDescriptorWithoutDunderGet] from typing import Union, Any class D: def __set__(self, inst: Any, value: str) -> None: pass class A: f = D() def __init__(self): self.f = 's' a = A() reveal_type(a.f) # N: Revealed type is '__main__.D' [case testAccessingDescriptorFromClass] # flags: --strict-optional from d import D, Base class A(Base): f = D() reveal_type(A.f) # N: Revealed type is 'd.D' reveal_type(A().f) # N: Revealed type is 'builtins.str' [file d.pyi] from typing import TypeVar, Type, Generic, overload class Base: pass class D: def __init__(self) -> None: pass @overload def __get__(self, inst: None, own: Type[Base]) -> D: pass @overload def __get__(self, inst: Base, own: Type[Base]) -> str: pass [builtins fixtures/bool.pyi] [case testAccessingDescriptorFromClassWrongBase] # flags: --strict-optional from d import D, Base class A: f = D() reveal_type(A.f) reveal_type(A().f) [file d.pyi] from typing import TypeVar, Type, Generic, overload class Base: pass class D: def __init__(self) -> None: pass @overload def __get__(self, inst: None, own: Type[Base]) -> D: pass @overload def __get__(self, inst: Base, own: Type[Base]) -> str: pass [builtins fixtures/bool.pyi] [out] main:5: error: Argument 2 to "__get__" of "D" has incompatible type "Type[A]"; expected "Type[Base]" main:5: note: Revealed type is 'd.D' main:6: error: No overload variant of "__get__" of "D" matches argument types "A", "Type[A]" main:6: note: Possible overload variants: main:6: note: def __get__(self, inst: None, own: Type[Base]) -> D main:6: note: def __get__(self, inst: Base, own: Type[Base]) -> str main:6: note: Revealed type is 'Any' [case testAccessingGenericNonDataDescriptor] from typing import TypeVar, Type, Generic, Any V = TypeVar('V') class D(Generic[V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: Any, own: Type) -> V: return self.v class A: f = D(10) g = D('10') a = A() reveal_type(a.f) # N: Revealed type is 'builtins.int*' reveal_type(a.g) # N: Revealed type is 'builtins.str*' [case testSettingGenericDataDescriptor] from typing import TypeVar, Type, Generic, Any V = TypeVar('V') class D(Generic[V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: Any, own: Type) -> V: return self.v def __set__(self, inst: Any, v: V) -> None: pass class A: f = D(10) g = D('10') a = A() a.f = 1 a.f = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") a.g = '' a.g = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAccessingGenericDescriptorFromClass] # flags: --strict-optional from d import D class A: f = D(10) # type: D[A, int] g = D('10') # type: D[A, str] reveal_type(A.f) # N: Revealed type is 'd.D[__main__.A*, builtins.int*]' reveal_type(A.g) # N: Revealed type is 'd.D[__main__.A*, builtins.str*]' reveal_type(A().f) # N: Revealed type is 'builtins.int*' reveal_type(A().g) # N: Revealed type is 'builtins.str*' [file d.pyi] from typing import TypeVar, Type, Generic, overload T = TypeVar('T') V = TypeVar('V') class D(Generic[T, V]): def __init__(self, v: V) -> None: pass @overload def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass @overload def __get__(self, inst: T, own: Type[T]) -> V: pass [builtins fixtures/bool.pyi] [case testAccessingGenericDescriptorFromInferredClass] # flags: --strict-optional from typing import Type from d import D class A: f = D(10) # type: D[A, int] g = D('10') # type: D[A, str] def f(some_class: Type[A]): reveal_type(some_class.f) reveal_type(some_class.g) [file d.pyi] from typing import TypeVar, Type, Generic, overload T = TypeVar('T') V = TypeVar('V') class D(Generic[T, V]): def __init__(self, v: V) -> None: pass @overload def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass @overload def __get__(self, inst: T, own: Type[T]) -> V: pass [builtins fixtures/bool.pyi] [out] main:8: note: Revealed type is 'd.D[__main__.A*, builtins.int*]' main:9: note: Revealed type is 'd.D[__main__.A*, builtins.str*]' [case testAccessingGenericDescriptorFromClassBadOverload] # flags: --strict-optional from d import D class A: f = D(10) # type: D[A, int] reveal_type(A.f) [file d.pyi] from typing import TypeVar, Type, Generic, overload T = TypeVar('T') V = TypeVar('V') class D(Generic[T, V]): def __init__(self, v: V) -> None: pass @overload def __get__(self, inst: None, own: None) -> 'D[T, V]': pass @overload def __get__(self, inst: T, own: Type[T]) -> V: pass [builtins fixtures/bool.pyi] [out] main:5: error: No overload variant of "__get__" of "D" matches argument types "None", "Type[A]" main:5: note: Possible overload variants: main:5: note: def __get__(self, inst: None, own: None) -> D[A, int] main:5: note: def __get__(self, inst: A, own: Type[A]) -> int main:5: note: Revealed type is 'Any' [case testAccessingNonDataDescriptorSubclass] from typing import Any class C: def __get__(self, inst: Any, own: Any) -> str: return 's' class D(C): pass class A: f = D() a = A() reveal_type(a.f) # N: Revealed type is 'builtins.str' [case testSettingDataDescriptorSubclass] from typing import Any class C: def __get__(self, inst: Any, own: Any) -> str: return 's' def __set__(self, inst: Any, v: str) -> None: pass class D(C): pass class A: f = D() a = A() a.f = '' a.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testReadingDescriptorSubclassWithoutDunderGet] from typing import Union, Any class C: def __set__(self, inst: Any, v: str) -> None: pass class D(C): pass class A: f = D() def __init__(self): self.f = 's' a = A() reveal_type(a.f) # N: Revealed type is '__main__.D' [case testAccessingGenericNonDataDescriptorSubclass] from typing import TypeVar, Type, Generic, Any V = TypeVar('V') class C(Generic[V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: Any, own: Type) -> V: return self.v class D(C[V], Generic[V]): pass class A: f = D(10) g = D('10') a = A() reveal_type(a.f) # N: Revealed type is 'builtins.int*' reveal_type(a.g) # N: Revealed type is 'builtins.str*' [case testSettingGenericDataDescriptorSubclass] from typing import TypeVar, Type, Generic T = TypeVar('T') V = TypeVar('V') class C(Generic[T, V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: T, own: Type[T]) -> V: return self.v def __set__(self, inst: T, v: V) -> None: pass class D(C[T, V], Generic[T, V]): pass class A: f = D(10) # type: D[A, int] g = D('10') # type: D[A, str] a = A() a.f = 1 a.f = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") a.g = '' a.g = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testSetDescriptorOnClass] from typing import TypeVar, Type, Generic T = TypeVar('T') V = TypeVar('V') class D(Generic[T, V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: T, own: Type[T]) -> V: return self.v def __set__(self, inst: T, v: V) -> None: pass class A: f = D(10) # type: D[A, int] A.f = D(20) A.f = D('some string') # E: Argument 1 to "D" has incompatible type "str"; expected "int" [case testSetDescriptorOnInferredClass] from typing import TypeVar, Type, Generic, Any V = TypeVar('V') class D(Generic[V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: Any, own: Type) -> V: return self.v def __set__(self, inst: Any, v: V) -> None: pass class A: f = D(10) def f(some_class: Type[A]): A.f = D(20) A.f = D('some string') [out] main:11: error: Argument 1 to "D" has incompatible type "str"; expected "int" [case testDescriptorUncallableDunderSet] class D: __set__ = 's' class A: f = D() A().f = 'x' # E: __main__.D.__set__ is not callable [case testDescriptorDunderSetTooFewArgs] class D: def __set__(self, inst): pass class A: f = D() A().f = 'x' # E: Too many arguments for "__set__" [case testDescriptorDunderSetTooManyArgs] class D: def __set__(self, inst, v, other): pass class A: f = D() A().f = 'x' # E: Too few arguments for "__set__" [case testDescriptorDunderSetWrongArgTypes] class D: def __set__(self, inst: str, v:str) -> None: pass class A: f = D() A().f = 'x' # E: Argument 1 to "__set__" of "D" has incompatible type "A"; expected "str" [case testDescriptorUncallableDunderGet] class D: __get__ = 's' class A: f = D() A().f # E: __main__.D.__get__ is not callable [case testDescriptorDunderGetTooFewArgs] class D: def __get__(self, inst): pass class A: f = D() A().f # E: Too many arguments for "__get__" [case testDescriptorDunderGetTooManyArgs] class D: def __get__(self, inst, own, other): pass class A: f = D() A().f = 'x' # E: Too few arguments for "__get__" [case testDescriptorDunderGetWrongArgTypeForInstance] from typing import Any class D: def __get__(self, inst: str, own: Any) -> Any: pass class A: f = D() A().f # E: Argument 1 to "__get__" of "D" has incompatible type "A"; expected "str" [case testDescriptorDunderGetWrongArgTypeForOwner] from typing import Any class D: def __get__(self, inst: Any, own: str) -> Any: pass class A: f = D() A().f # E: Argument 2 to "__get__" of "D" has incompatible type "Type[A]"; expected "str" [case testDescriptorGetSetDifferentTypes] from typing import Any class D: def __get__(self, inst: Any, own: Any) -> str: return 's' def __set__(self, inst: Any, v: int) -> None: pass class A: f = D() a = A() a.f = 1 reveal_type(a.f) # N: Revealed type is 'builtins.str' [case testDescriptorGetUnion] from typing import Any, Union class String: def __get__(self, inst: Any, owner: Any) -> str: return '' class A: attr: str class B: attr = String() def foo(x: Union[A, B]) -> None: reveal_type(x.attr) # N: Revealed type is 'builtins.str' -- _promote decorators -- ------------------- [case testSimpleDucktypeDecorator] from typing import _promote class A: pass @_promote(A) class B: pass a = None # type: A b = None # type: B if int(): b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = b [typing fixtures/typing-full.pyi] [case testDucktypeTransitivityDecorator] from typing import _promote class A: pass @_promote(A) class B: pass @_promote(B) class C: pass a = None # type: A c = None # type: C if int(): c = a # E: Incompatible types in assignment (expression has type "A", variable has type "C") a = c [typing fixtures/typing-full.pyi] -- Hard coded type promotions -- -------------------------- [case testHardCodedTypePromotions] import typing def f(x: float) -> None: pass def g(x: complex) -> None: pass f(1) g(1) g(1.1) [builtins fixtures/complex.pyi] -- Operator methods -- ---------------- [case testOperatorMethodOverrideIntroducingOverloading] from foo import * [file foo.pyi] from typing import overload class A: def __add__(self, x: int) -> int: pass class B(A): @overload # E: Signature of "__add__" incompatible with supertype "A" \ # N: Overloaded operator methods can't have wider argument types in overrides def __add__(self, x: int) -> int: pass @overload def __add__(self, x: str) -> str: pass [out] [case testOperatorMethodOverrideWideningArgumentType] import typing class A: def __add__(self, x: int) -> int: pass class B(A): def __add__(self, x: object) -> int: pass [out] [case testOperatorMethodOverrideNarrowingReturnType] import typing class A: def __add__(self, x: int) -> 'A': pass class B(A): def __add__(self, x: int) -> 'B': pass [case testOperatorMethodOverrideWithDynamicallyTyped] import typing class A: def __add__(self, x: int) -> 'A': pass class B(A): def __add__(self, x): pass [case testOperatorMethodAgainstSameType] class A: def __add__(self, x: int) -> 'A': if isinstance(x, int): return A() else: return NotImplemented def __radd__(self, x: 'A') -> 'A': if isinstance(x, A): return A() else: return NotImplemented class B(A): pass # Note: This is a runtime error. If we run x.__add__(y) # where x and y are *not* the same type, Python will not try # calling __radd__. A() + A() # E: Unsupported operand types for + ("A" and "A") # Here, Python *will* call __radd__(...) reveal_type(B() + A()) # N: Revealed type is '__main__.A' reveal_type(A() + B()) # N: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testOperatorMethodOverrideWithIdenticalOverloadedType] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: int) -> 'A': pass @overload def __add__(self, x: str) -> 'A': pass class B(A): @overload def __add__(self, x: int) -> 'A': pass @overload def __add__(self, x: str) -> 'A': pass [case testOverloadedOperatorMethodOverrideWithDynamicallyTypedMethod] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __add__(self, x: int) -> 'A': pass @overload def __add__(self, x: str) -> 'A': pass class B(A): def __add__(self, x): pass class C(A): def __add__(self, x: Any) -> A: pass [case testOverloadedOperatorMethodOverrideWithNewItem] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __add__(self, x: int) -> 'A': pass @overload def __add__(self, x: str) -> 'A': pass class B(A): @overload # E: Signature of "__add__" incompatible with supertype "A" \ # N: Overloaded operator methods can't have wider argument types in overrides def __add__(self, x: int) -> A: pass @overload def __add__(self, x: str) -> A: pass @overload def __add__(self, x: type) -> A: pass [case testOverloadedOperatorMethodOverrideWithSwitchedItemOrder] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __add__(self, x: 'B') -> 'B': pass @overload def __add__(self, x: 'A') -> 'A': pass class B(A): @overload def __add__(self, x: 'A') -> 'A': pass @overload def __add__(self, x: 'B') -> 'B': pass class C(A): @overload def __add__(self, x: 'B') -> 'B': pass @overload def __add__(self, x: 'A') -> 'A': pass [out] tmp/foo.pyi:8: error: Signature of "__add__" incompatible with supertype "A" tmp/foo.pyi:8: note: Overload variants must be defined in the same order as they are in "A" tmp/foo.pyi:11: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [case testReverseOperatorMethodArgumentType] from typing import Any class A: pass class B: def __radd__(self, x: A) -> int: pass # Error class C: def __radd__(self, x: A) -> Any: pass class D: def __radd__(self, x: A) -> object: pass [out] [case testReverseOperatorMethodArgumentType2] from typing import Any, Tuple, Callable class A: def __radd__(self, x: Tuple[int, str]) -> int: pass class B: def __radd__(self, x: Callable[[], int]) -> int: pass class C: def __radd__(self, x: Any) -> int: pass [out] [case testReverseOperatorMethodInvalid] from foo import * [file foo.pyi] class A: ... class B: def __rmul__(self) -> A: ... class C: def __radd__(self, other, oops) -> int: ... [out] tmp/foo.pyi:3: error: Invalid signature "def (foo.B) -> foo.A" tmp/foo.pyi:5: error: Invalid signature "def (foo.C, Any, Any) -> builtins.int" [case testReverseOperatorOrderingCase1] class A: def __radd__(self, other: 'A') -> int: ... # Note: Python only tries calling __add__ and never __radd__, even though it's present A() + A() # E: Unsupported left operand type for + ("A") [case testReverseOperatorOrderingCase2] class A: def __lt__(self, other: object) -> bool: ... # Not all operators have the above shortcut though. reveal_type(A() > A()) # N: Revealed type is 'builtins.bool' reveal_type(A() < A()) # N: Revealed type is 'builtins.bool' [builtins fixtures/bool.pyi] [case testReverseOperatorOrderingCase3] class A: def __add__(self, other: B) -> int: ... class B: def __radd__(self, other: A) -> str: ... # E: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping # Normally, we try calling __add__ before __radd__ reveal_type(A() + B()) # N: Revealed type is 'builtins.int' [case testReverseOperatorOrderingCase4] class A: def __add__(self, other: B) -> int: ... class B(A): def __radd__(self, other: A) -> str: ... # E: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping # However, if B is a subtype of A, we try calling __radd__ first. reveal_type(A() + B()) # N: Revealed type is 'builtins.str' [case testReverseOperatorOrderingCase5] # Note: these two methods are not unsafely overlapping because __radd__ is # never called -- see case 1. class A: def __add__(self, other: B) -> int: ... def __radd__(self, other: A) -> str: ... class B(A): pass # ...but only if B specifically defines a new __radd__. reveal_type(A() + B()) # N: Revealed type is 'builtins.int' [case testReverseOperatorOrderingCase6] class A: def __add__(self, other: B) -> int: ... def __radd__(self, other: A) -> str: ... class B(A): # Although A.__radd__ can never be called, B.__radd__ *can* be -- so the # unsafe overlap check kicks in here. def __radd__(self, other: A) -> str: ... # E: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping reveal_type(A() + B()) # N: Revealed type is 'builtins.str' [case testReverseOperatorOrderingCase7] class A: def __add__(self, other: B) -> int: ... def __radd__(self, other: A) -> str: ... class B(A): def __radd__(self, other: A) -> str: ... # E: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping class C(B): pass # A refinement made by a parent also counts reveal_type(A() + C()) # N: Revealed type is 'builtins.str' [case testReverseOperatorWithOverloads1] from typing import overload class A: def __add__(self, other: C) -> int: ... class B: def __add__(self, other: C) -> int: ... class C: @overload def __radd__(self, other: A) -> str: ... # E: Signatures of "__radd__" of "C" and "__add__" of "A" are unsafely overlapping @overload def __radd__(self, other: B) -> str: ... # E: Signatures of "__radd__" of "C" and "__add__" of "B" are unsafely overlapping def __radd__(self, other): pass reveal_type(A() + C()) # N: Revealed type is 'builtins.int' reveal_type(B() + C()) # N: Revealed type is 'builtins.int' [case testReverseOperatorWithOverloads2] from typing import overload, Union class Num1: def __add__(self, other: Num1) -> Num1: ... def __radd__(self, other: Num1) -> Num1: ... class Num2(Num1): @overload def __add__(self, other: Num2) -> Num2: ... @overload def __add__(self, other: Num1) -> Num2: ... def __add__(self, other): pass @overload def __radd__(self, other: Num2) -> Num2: ... @overload def __radd__(self, other: Num1) -> Num2: ... def __radd__(self, other): pass class Num3(Num1): def __add__(self, other: Union[Num1, Num3]) -> Num3: ... def __radd__(self, other: Union[Num1, Num3]) -> Num3: ... reveal_type(Num1() + Num2()) # N: Revealed type is '__main__.Num2' reveal_type(Num2() + Num1()) # N: Revealed type is '__main__.Num2' reveal_type(Num1() + Num3()) # N: Revealed type is '__main__.Num3' reveal_type(Num3() + Num1()) # N: Revealed type is '__main__.Num3' reveal_type(Num2() + Num3()) # N: Revealed type is '__main__.Num2' reveal_type(Num3() + Num2()) # N: Revealed type is '__main__.Num3' [case testDivReverseOperatorPython3] # No error: __div__ has no special meaning in Python 3 class A1: def __div__(self, x: B1) -> int: ... class B1: def __rdiv__(self, x: A1) -> str: ... class A2: def __truediv__(self, x: B2) -> int: ... class B2: def __rtruediv__(self, x: A2) -> str: ... # E: Signatures of "__rtruediv__" of "B2" and "__truediv__" of "A2" are unsafely overlapping A1() / B1() # E: Unsupported left operand type for / ("A1") reveal_type(A2() / B2()) # N: Revealed type is 'builtins.int' [case testDivReverseOperatorPython2] # flags: --python-version 2.7 # Note: if 'from __future__ import division' is called, we use # __truediv__. Otherwise, we use __div__. So, we check both: class A1: def __div__(self, x): # type: (B1) -> int pass class B1: def __rdiv__(self, x): # E: Signatures of "__rdiv__" of "B1" and "__div__" of "A1" are unsafely overlapping # type: (A1) -> str pass class A2: def __truediv__(self, x): # type: (B2) -> int pass class B2: def __rtruediv__(self, x): # E: Signatures of "__rtruediv__" of "B2" and "__truediv__" of "A2" are unsafely overlapping # type: (A2) -> str pass # That said, mypy currently doesn't handle the actual division operation very # gracefully -- it doesn't correctly switch to using __truediv__ when # 'from __future__ import division' is included, it doesn't display a very # graceful error if __div__ is missing but __truediv__ is present... # Also see https://github.com/python/mypy/issues/2048 reveal_type(A1() / B1()) # N: Revealed type is 'builtins.int' A2() / B2() # E: "A2" has no attribute "__div__" [case testReverseOperatorMethodForwardIsAny] from typing import Any def deco(f: Any) -> Any: return f class C: @deco def __add__(self, other: C) -> C: return C() def __radd__(self, other: C) -> C: return C() [out] [case testReverseOperatorMethodForwardIsAny2] from typing import Any def deco(f: Any) -> Any: return f class C: __add__ = None # type: Any def __radd__(self, other: C) -> C: return C() [out] [case testReverseOperatorMethodForwardIsAny3] from typing import Any def deco(f: Any) -> Any: return f class C: __add__ = 42 def __radd__(self, other: C) -> C: return C() [out] main:5: error: Forward operator "__add__" is not callable [case testOverloadedReverseOperatorMethodArgumentType] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __radd__(self, x: 'A') -> str: pass @overload def __radd__(self, x: 'A') -> Any: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [out] [case testReverseOperatorMethodArgumentTypeAndOverloadedMethod] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: int) -> int: pass @overload def __add__(self, x: str) -> int: pass def __radd__(self, x: 'A') -> str: pass [case testReverseOperatorStar] class B: def __radd__(*self) -> int: pass def __rsub__(*self: 'B') -> int: pass [case testReverseOperatorTypeVar1] from typing import TypeVar, Any T = TypeVar("T", bound='Real') class Real: def __add__(self, other: Any) -> str: ... class Fraction(Real): def __radd__(self, other: T) -> T: ... # E: Signatures of "__radd__" of "Fraction" and "__add__" of "T" are unsafely overlapping # Note: When doing A + B and if B is a subtype of A, we will always call B.__radd__(A) first # and only try A.__add__(B) second if necessary. reveal_type(Real() + Fraction()) # N: Revealed type is '__main__.Real*' # Note: When doing A + A, we only ever call A.__add__(A), never A.__radd__(A). reveal_type(Fraction() + Fraction()) # N: Revealed type is 'builtins.str' [case testReverseOperatorTypeVar2a] from typing import TypeVar T = TypeVar("T", bound='Real') class Real: def __add__(self, other: Fraction) -> str: ... class Fraction(Real): def __radd__(self, other: T) -> T: ... # E: Signatures of "__radd__" of "Fraction" and "__add__" of "T" are unsafely overlapping reveal_type(Real() + Fraction()) # N: Revealed type is '__main__.Real*' reveal_type(Fraction() + Fraction()) # N: Revealed type is 'builtins.str' [case testReverseOperatorTypeVar2b] from typing import TypeVar T = TypeVar("T", Real, Fraction) class Real: def __add__(self, other: Fraction) -> str: ... class Fraction(Real): def __radd__(self, other: T) -> T: ... # E: Signatures of "__radd__" of "Fraction" and "__add__" of "Real" are unsafely overlapping reveal_type(Real() + Fraction()) # N: Revealed type is '__main__.Real*' reveal_type(Fraction() + Fraction()) # N: Revealed type is 'builtins.str' [case testReverseOperatorTypeVar3] from typing import TypeVar, Any T = TypeVar("T", bound='Real') class Real: def __add__(self, other: FractionChild) -> str: ... class Fraction(Real): def __radd__(self, other: T) -> T: ... # E: Signatures of "__radd__" of "Fraction" and "__add__" of "T" are unsafely overlapping class FractionChild(Fraction): pass reveal_type(Real() + Fraction()) # N: Revealed type is '__main__.Real*' reveal_type(FractionChild() + Fraction()) # N: Revealed type is '__main__.FractionChild*' reveal_type(FractionChild() + FractionChild()) # N: Revealed type is 'builtins.str' # Runtime error: we try calling __add__, it doesn't match, and we don't try __radd__ since # the LHS and the RHS are not the same. Fraction() + Fraction() # E: Unsupported operand types for + ("Fraction" and "Fraction") [case testReverseOperatorTypeType] from typing import TypeVar, Type class Real(type): def __add__(self, other: FractionChild) -> str: ... class Fraction(Real): def __radd__(self, other: Type['A']) -> Real: ... # E: Signatures of "__radd__" of "Fraction" and "__add__" of "Type[A]" are unsafely overlapping class FractionChild(Fraction): pass class A(metaclass=Real): pass [case testOperatorDoubleUnionIntFloat] from typing import Union a: Union[int, float] b: int c: float reveal_type(a + a) # N: Revealed type is 'builtins.float' reveal_type(a + b) # N: Revealed type is 'builtins.float' reveal_type(b + a) # N: Revealed type is 'builtins.float' reveal_type(a + c) # N: Revealed type is 'builtins.float' reveal_type(c + a) # N: Revealed type is 'builtins.float' [builtins fixtures/ops.pyi] [case testOperatorDoubleUnionStandardSubtyping] from typing import Union class Parent: def __add__(self, x: Parent) -> Parent: pass def __radd__(self, x: Parent) -> Parent: pass class Child(Parent): def __add__(self, x: Parent) -> Child: pass def __radd__(self, x: Parent) -> Child: pass a: Union[Parent, Child] b: Parent c: Child reveal_type(a + a) # N: Revealed type is '__main__.Parent' reveal_type(a + b) # N: Revealed type is '__main__.Parent' reveal_type(b + a) # N: Revealed type is '__main__.Parent' reveal_type(a + c) # N: Revealed type is '__main__.Child' reveal_type(c + a) # N: Revealed type is '__main__.Child' [case testOperatorDoubleUnionNoRelationship1] from typing import Union class Foo: def __add__(self, x: Foo) -> Foo: pass def __radd__(self, x: Foo) -> Foo: pass class Bar: def __add__(self, x: Bar) -> Bar: pass def __radd__(self, x: Bar) -> Bar: pass a: Union[Foo, Bar] b: Foo c: Bar a + a # E: Unsupported operand types for + ("Foo" and "Bar") \ # E: Unsupported operand types for + ("Bar" and "Foo") \ # N: Both left and right operands are unions a + b # E: Unsupported operand types for + ("Bar" and "Foo") \ # N: Left operand is of type "Union[Foo, Bar]" b + a # E: Unsupported operand types for + ("Foo" and "Bar") \ # N: Right operand is of type "Union[Foo, Bar]" a + c # E: Unsupported operand types for + ("Foo" and "Bar") \ # N: Left operand is of type "Union[Foo, Bar]" c + a # E: Unsupported operand types for + ("Bar" and "Foo") \ # N: Right operand is of type "Union[Foo, Bar]" [case testOperatorDoubleUnionNoRelationship2] from typing import Union class Foo: def __add__(self, x: Foo) -> Foo: pass def __radd__(self, x: Foo) -> Foo: pass class Bar: def __add__(self, x: Union[Foo, Bar]) -> Bar: pass def __radd__(self, x: Union[Foo, Bar]) -> Bar: pass a: Union[Foo, Bar] b: Foo c: Bar reveal_type(a + a) # N: Revealed type is 'Union[__main__.Foo, __main__.Bar]' reveal_type(a + b) # N: Revealed type is 'Union[__main__.Foo, __main__.Bar]' reveal_type(b + a) # N: Revealed type is 'Union[__main__.Foo, __main__.Bar]' reveal_type(a + c) # N: Revealed type is '__main__.Bar' reveal_type(c + a) # N: Revealed type is '__main__.Bar' [case testOperatorDoubleUnionNaiveAdd] from typing import Union class A: pass class B: pass class C: def __radd__(self, x: A) -> int: pass class D: def __radd__(self, x: B) -> str: pass x: Union[A, B] y: Union[C, D] x + y # E: Unsupported operand types for + ("A" and "D") \ # E: Unsupported operand types for + ("B" and "C") \ # N: Both left and right operands are unions [case testOperatorDoubleUnionInterwovenUnionAdd] from typing import Union class Out1: pass class Out2: pass class Out3: pass class Out4: pass class A: def __add__(self, x: D) -> Out1: pass class B: def __add__(self, x: C) -> Out2: pass class C: def __radd__(self, x: A) -> Out3: pass class D: def __radd__(self, x: B) -> Out4: pass x: Union[A, B] y: Union[C, D] reveal_type(x + y) # N: Revealed type is 'Union[__main__.Out3, __main__.Out1, __main__.Out2, __main__.Out4]' reveal_type(A() + y) # N: Revealed type is 'Union[__main__.Out3, __main__.Out1]' reveal_type(B() + y) # N: Revealed type is 'Union[__main__.Out2, __main__.Out4]' reveal_type(x + C()) # N: Revealed type is 'Union[__main__.Out3, __main__.Out2]' reveal_type(x + D()) # N: Revealed type is 'Union[__main__.Out1, __main__.Out4]' [case testOperatorDoubleUnionDivisionPython2] # flags: --python-version 2.7 from typing import Union def f(a): # type: (Union[int, float]) -> None a /= 1.1 b = a / 1.1 reveal_type(b) # N: Revealed type is 'builtins.float' [builtins_py2 fixtures/ops.pyi] [case testOperatorDoubleUnionDivisionPython3] from typing import Union def f(a): # type: (Union[int, float]) -> None a /= 1.1 b = a / 1.1 reveal_type(b) # N: Revealed type is 'builtins.float' [builtins fixtures/ops.pyi] [case testOperatorWithInference] from typing import TypeVar, Iterable, Union T = TypeVar('T') def sum(x: Iterable[T]) -> Union[T, int]: ... def len(x: Iterable[T]) -> int: ... x = [1.1, 2.2, 3.3] reveal_type(sum(x)) # N: Revealed type is 'builtins.float*' reveal_type(sum(x) / len(x)) # N: Revealed type is 'builtins.float' [builtins fixtures/floatdict.pyi] [case testOperatorWithEmptyListAndSum] from typing import TypeVar, Iterable, Union, overload T = TypeVar('T') S = TypeVar('S') @overload def sum(x: Iterable[T]) -> Union[T, int]: ... @overload def sum(x: Iterable[T], default: S) -> Union[T, S]: ... def sum(*args): pass x = ["a", "b", "c"] reveal_type(x + sum([x, x, x], [])) # N: Revealed type is 'builtins.list[builtins.str*]' [builtins fixtures/floatdict.pyi] [case testAbstractReverseOperatorMethod] import typing from abc import abstractmethod class A: @abstractmethod def __lt__(self, x: 'A') -> int: pass class B: @abstractmethod def __lt__(self, x: 'B') -> int: pass @abstractmethod def __gt__(self, x: 'B') -> int: pass [out] [case testOperatorMethodsAndOverloadingSpecialCase] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: 'A') -> int: pass @overload def __add__(self, x: str) -> int: pass class B: def __radd__(self, x: 'A') -> str: pass [out] [case testUnsafeOverlappingWithOperatorMethodsAndOverloading2] from foo import A, B from foo import * [file foo.pyi] from typing import overload class A: def __add__(self, x: 'A') -> int: pass class B: @overload def __radd__(self, x: 'X') -> str: pass # Error @overload def __radd__(self, x: A) -> str: pass # Error class X: def __add__(self, x: B) -> int: pass [out] tmp/foo.pyi:6: error: Signatures of "__radd__" of "B" and "__add__" of "X" are unsafely overlapping [case testUnsafeOverlappingWithLineNo] from typing import TypeVar class Real: def __add__(self, other) -> str: ... class Fraction(Real): def __radd__(self, other: Real) -> Real: ... [out] main:5: error: Signatures of "__radd__" of "Fraction" and "__add__" of "Real" are unsafely overlapping [case testOverlappingNormalAndInplaceOperatorMethod] import typing class A: # Incompatible (potential trouble with __radd__) def __add__(self, x: 'A') -> int: pass def __iadd__(self, x: 'B') -> int: pass class B: # Safe def __add__(self, x: 'C') -> int: pass def __iadd__(self, x: A) -> int: pass class C(A): pass [out] main:5: error: Signatures of "__iadd__" and "__add__" are incompatible [case testOverloadedNormalAndInplaceOperatorMethod] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: int) -> int: pass @overload def __add__(self, x: str) -> int: pass @overload # Error def __iadd__(self, x: int) -> int: pass @overload def __iadd__(self, x: object) -> int: pass class B: @overload def __add__(self, x: int) -> int: pass @overload def __add__(self, x: str) -> str: pass @overload def __iadd__(self, x: int) -> int: pass @overload def __iadd__(self, x: str) -> str: pass [out] tmp/foo.pyi:7: error: Signatures of "__iadd__" and "__add__" are incompatible [case testIntroducingInplaceOperatorInSubclass] import typing class A: def __add__(self, x: 'A') -> 'B': pass class B(A): # __iadd__ effectively partially overrides __add__ def __iadd__(self, x: 'A') -> 'A': pass # Error class C(A): def __iadd__(self, x: int) -> 'B': pass # Error class D(A): def __iadd__(self, x: 'A') -> 'B': pass [out] main:6: error: Return type "A" of "__iadd__" incompatible with return type "B" in "__add__" of supertype "A" main:8: error: Argument 1 of "__iadd__" is incompatible with "__add__" of supertype "A"; supertype defines the argument type as "A" main:8: error: Signatures of "__iadd__" and "__add__" are incompatible [case testGetattribute] a, b = None, None # type: A, B class A: def __getattribute__(self, x: str) -> A: return A() class B: pass a = a.foo b = a.bar [out] main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testGetattributeSignature] class A: def __getattribute__(self, x: str) -> A: pass class B: def __getattribute__(self, x: A) -> B: pass class C: def __getattribute__(self, x: str, y: str) -> C: pass class D: def __getattribute__(self, x: str) -> None: pass [out] main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B" for "__getattribute__" main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C" for "__getattribute__" [case testGetattr] a, b = None, None # type: A, B class A: def __getattr__(self, x: str) -> A: return A() class B: pass a = a.foo b = a.bar [out] main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testGetattrWithGetitem] class A: def __getattr__(self, x: str) -> 'A': return A() a = A() a[0] # E: Value of type "A" is not indexable [case testGetattrWithCall] class A: def __getattr__(self, x: str) -> 'A': return A() a = A() a.y() # E: "A" not callable [case testGetattrWithCallable] from typing import Callable, Any class C: def __getattr__(self, attr: str) -> C: ... def do(cd: Callable[..., Any]) -> None: ... do(C()) # E: Argument 1 to "do" has incompatible type "C"; expected "Callable[..., Any]" [case testGetattrWithCallableTypeVar] from typing import Callable, Any, TypeVar class C: def __getattr__(self, attr: str) -> C: ... T = TypeVar('T', bound=Callable[..., Any]) def do(cd: T) -> T: ... do(C()) # E: Value of type variable "T" of "do" cannot be "C" [case testNestedGetattr] def foo() -> object: def __getattr__() -> None: # no error because not in a class pass return __getattr__ class X: def foo(self) -> object: def __getattr__() -> None: # no error because not directly inside a class pass return __getattr__ [case testGetattrSignature] class A: def __getattr__(self, x: str) -> A: pass class B: def __getattr__(self, x: A) -> B: pass class C: def __getattr__(self, x: str, y: str) -> C: pass class D: def __getattr__(self, x: str) -> None: pass [out] main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B" for "__getattr__" main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C" for "__getattr__" [case testSetattr] from typing import Union, Any class A: def __setattr__(self, name: str, value: Any) -> None: ... a = A() a.test = 'hello' class B: def __setattr__(self, name: str, value: Union[int, str]) -> None: ... b = B() b.both = 1 b.work = '2' class C: def __setattr__(self, name: str, value: str) -> None: ... c = C() c.fail = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "str") class D: __setattr__ = 'hello' # E: Invalid signature "builtins.str" for "__setattr__" d = D() d.crash = 4 # E: "D" has no attribute "crash" class Ex: def __setattr__(self, name: str, value: int) -> None:... test = '42' # type: str e = Ex() e.test = 'hello' e.t = 4 class Super: def __setattr__(self, name: str, value: int) -> None: ... class Sub(Super): ... s = Sub() s.success = 4 s.fail = 'fail' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testSetattrSignature] from typing import Any class Test: def __setattr__() -> None: ... # E: Method must have at least one argument # E: Invalid signature "def ()" for "__setattr__" t = Test() t.crash = 'test' # E: "Test" has no attribute "crash" class A: def __setattr__(self): ... # E: Invalid signature "def (self: __main__.A) -> Any" for "__setattr__" a = A() a.test = 4 # E: "A" has no attribute "test" class B: def __setattr__(self, name, value: int): ... b = B() b.integer = 5 class C: def __setattr__(self, name: int, value: int) -> None: ... # E: Invalid signature "def (__main__.C, builtins.int, builtins.int)" for "__setattr__" c = C() c.check = 13 class X: __setattr__ = ... # type: Any [case testGetattrAndSetattr] from typing import Any class A: def __setattr__(self, name: str, value: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... a = A() a.test = 4 t = a.test class B: def __setattr__(self, name: str, value: int) -> None: ... def __getattr__(self, name: str) -> str: ... integer = 0 b = B() b.at = '3' # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): integer = b.at # E: Incompatible types in assignment (expression has type "str", variable has type "int") -- CallableType objects -- ---------------- [case testCallableObject] import typing a = A() b = B() a() # E: Too few arguments for "__call__" of "A" a(a, a) # E: Too many arguments for "__call__" of "A" if int(): a = a(a) if int(): a = a(b) # E: Argument 1 to "__call__" of "A" has incompatible type "B"; expected "A" if int(): b = a(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") class A: def __call__(self, x: A) -> A: pass class B: pass -- __new__ -- -------- [case testConstructInstanceWith__new__] class C: def __new__(cls, foo: int = None) -> 'C': obj = object.__new__(cls) return obj x = C(foo=12) x.a # E: "C" has no attribute "a" C(foo='') # E: Argument "foo" to "C" has incompatible type "str"; expected "Optional[int]" [builtins fixtures/__new__.pyi] [case testConstructInstanceWithDynamicallyTyped__new__] class C: def __new__(cls, foo): obj = object.__new__(cls) return obj x = C(foo=12) x = C(foo='x') x.a # E: "C" has no attribute "a" C(bar='') # E: Unexpected keyword argument "bar" for "C" [builtins fixtures/__new__.pyi] [case testClassWith__new__AndCompatibilityWithType] class C: def __new__(cls, foo: int = None) -> 'C': obj = object.__new__(cls) return obj def f(x: type) -> None: pass def g(x: int) -> None: pass f(C) g(C) # E: Argument 1 to "g" has incompatible type "Type[C]"; expected "int" [builtins fixtures/__new__.pyi] [case testClassWith__new__AndCompatibilityWithType2] class C: def __new__(cls, foo): obj = object.__new__(cls) return obj def f(x: type) -> None: pass def g(x: int) -> None: pass f(C) g(C) # E: Argument 1 to "g" has incompatible type "Type[C]"; expected "int" [builtins fixtures/__new__.pyi] [case testGenericClassWith__new__] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def __new__(cls, foo: T) -> 'C[T]': obj = object.__new__(cls) return obj def set(self, x: T) -> None: pass c = C('') c.set('') c.set(1) # E: Argument 1 to "set" of "C" has incompatible type "int"; expected "str" [builtins fixtures/__new__.pyi] [case testOverloaded__new__] from foo import * [file foo.pyi] from typing import overload class C: @overload def __new__(cls, foo: int) -> 'C': obj = object.__new__(cls) return obj @overload def __new__(cls, x: str, y: str) -> 'C': obj = object.__new__(cls) return obj c = C(1) c.a # E: "C" has no attribute "a" C('', '') C('') # E: No overload variant of "C" matches argument type "str" \ # N: Possible overload variant: \ # N: def __new__(cls, foo: int) -> C \ # N: <1 more non-matching overload not shown> [builtins fixtures/__new__.pyi] -- Special cases -- ------------- [case testSubclassInt] import typing class A(int): pass n = 0 if int(): n = A() a = A() if int(): a = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "A") [case testForwardReferenceToNestedClass] def f(o: 'B.C') -> None: o.f('') # E: Argument 1 to "f" of "C" has incompatible type "str"; expected "int" class B: class C: def f(self, x: int) -> None: pass [out] [case testForwardReferenceToNestedClassDeep] def f(o: 'B.C.D') -> None: o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int" class B: class C: class D: def f(self, x: int) -> None: pass [out] [case testForwardReferenceToNestedClassWithinClass] class B: def f(self, o: 'C.D') -> None: o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int" class C: class D: def f(self, x: int) -> None: pass [out] [case testClassVsInstanceDisambiguation] class A: pass def f(x: A) -> None: pass f(A) # E: Argument 1 to "f" has incompatible type "Type[A]"; expected "A" [out] -- TODO -- attribute inherited from superclass; assign in __init__ -- refer to attribute before type has been inferred (the initialization in -- __init__ has not been analyzed) [case testAnyBaseClassUnconstrainedConstructor] from typing import Any B = None # type: Any class C(B): pass C(0) C(arg=0) [out] [case testErrorMapToSupertype] import typing class X(Nope): pass # E: Name 'Nope' is not defined a, b = X() # Used to crash here (#2244) -- Class-valued attributes -- ----------------------- [case testClassValuedAttributesBasics] class A: ... class B: a = A bad = lambda: 42 B().bad() # E: Attribute function "bad" with type "Callable[[], int]" does not accept self argument reveal_type(B.a) # N: Revealed type is 'def () -> __main__.A' reveal_type(B().a) # N: Revealed type is 'def () -> __main__.A' reveal_type(B().a()) # N: Revealed type is '__main__.A' class C: a = A def __init__(self) -> None: self.aa = self.a() reveal_type(C().aa) # N: Revealed type is '__main__.A' [out] [case testClassValuedAttributesGeneric] from typing import Generic, TypeVar, Type T = TypeVar('T') class A(Generic[T]): def __init__(self, x: T) -> None: self.x = x class B(Generic[T]): a: Type[A[T]] = A reveal_type(B[int]().a) # N: Revealed type is 'Type[__main__.A[builtins.int*]]' B[int]().a('hi') # E: Argument 1 to "A" has incompatible type "str"; expected "int" class C(Generic[T]): a = A def __init__(self) -> None: self.aa = self.a(42) reveal_type(C().aa) # N: Revealed type is '__main__.A[builtins.int]' [out] [case testClassValuedAttributesAlias] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): ... SameA = A[T, T] class B: a_any = SameA a_int = SameA[int] reveal_type(B().a_any) # N: Revealed type is 'def () -> __main__.A[Any, Any]' reveal_type(B().a_int()) # N: Revealed type is '__main__.A[builtins.int, builtins.int]' class C: a_int = SameA[int] def __init__(self) -> None: self.aa = self.a_int() reveal_type(C().aa) # N: Revealed type is '__main__.A[builtins.int*, builtins.int*]' [out] -- Type[C] -- ------- [case testTypeUsingTypeCBasic] from typing import Type class User: pass class ProUser(User): pass def new_user(user_class: Type[User]) -> User: return user_class() reveal_type(new_user(User)) # N: Revealed type is '__main__.User' reveal_type(new_user(ProUser)) # N: Revealed type is '__main__.User' [out] [case testTypeUsingTypeCDefaultInit] from typing import Type class B: pass def f(A: Type[B]) -> None: A(0) # E: Too many arguments for "B" A() [out] [case testTypeUsingTypeCInitWithArg] from typing import Type class B: def __init__(self, a: int) -> None: pass def f(A: Type[B]) -> None: A(0) A() # E: Too few arguments for "B" [out] [case testTypeUsingTypeCTypeVar] from typing import Type, TypeVar class User: pass class ProUser(User): pass U = TypeVar('U', bound=User) def new_user(user_class: Type[U]) -> U: user = user_class() reveal_type(user) return user pro_user = new_user(ProUser) reveal_type(pro_user) [out] main:7: note: Revealed type is 'U`-1' main:10: note: Revealed type is '__main__.ProUser*' [case testTypeUsingTypeCTypeVarDefaultInit] from typing import Type, TypeVar class B: pass T = TypeVar('T', bound=B) def f(A: Type[T]) -> None: A() A(0) # E: Too many arguments for "B" [out] [case testTypeUsingTypeCTypeVarWithInit] from typing import Type, TypeVar class B: def __init__(self, a: int) -> None: pass T = TypeVar('T', bound=B) def f(A: Type[T]) -> None: A() # E: Too few arguments for "B" A(0) [out] [case testTypeUsingTypeCTwoTypeVars] from typing import Type, TypeVar class User: pass class ProUser(User): pass class WizUser(ProUser): pass U = TypeVar('U', bound=User) def new_user(u_c: Type[U]) -> U: pass P = TypeVar('P', bound=ProUser) def new_pro(pro_c: Type[P]) -> P: return new_user(pro_c) wiz = new_pro(WizUser) reveal_type(wiz) def error(u_c: Type[U]) -> P: return new_pro(u_c) # Error here, see below [out] main:11: note: Revealed type is '__main__.WizUser*' main:13: error: Value of type variable "P" of "new_pro" cannot be "U" main:13: error: Incompatible return value type (got "U", expected "P") [case testTypeUsingTypeCCovariance] from typing import Type, TypeVar class User: pass class ProUser(User): pass def new_user(user_class: Type[User]) -> User: return user_class() def new_pro_user(user_class: Type[ProUser]): new_user(user_class) [out] [case testAllowCovariantArgsInConstructor] from typing import Generic, TypeVar T_co = TypeVar('T_co', covariant=True) class C(Generic[T_co]): def __init__(self, x: T_co) -> None: # This should be allowed self.x = x def meth(self) -> None: reveal_type(self.x) # N: Revealed type is 'T_co`1' reveal_type(C(1).x) # N: Revealed type is 'builtins.int*' [builtins fixtures/property.pyi] [out] [case testTypeUsingTypeCErrorCovariance] from typing import Type, TypeVar class User: pass def new_user(user_class: Type[User]): return user_class() def foo(arg: Type[int]): new_user(arg) # E: Argument 1 to "new_user" has incompatible type "Type[int]"; expected "Type[User]" [out] [case testTypeUsingTypeCUnionOverload] from foo import * [file foo.pyi] from typing import Type, Union, overload class X: @overload def __init__(self) -> None: pass @overload def __init__(self, a: int) -> None: pass class Y: def __init__(self) -> None: pass def bar(o: Type[Union[X, Y]]): pass bar(X) bar(Y) [out] [case testTypeUsingTypeCTypeAny] from typing import Type, Any def foo(arg: Type[Any]): x = arg() x = arg(0) x = arg('', ()) reveal_type(x) # N: Revealed type is 'Any' x.foo class X: pass foo(X) [out] [case testTypeUsingTypeCTypeAnyMember] from typing import Type, Any def foo(arg: Type[Any]): x = arg.member_name arg.new_member_name = 42 # Member access is ok and types as Any reveal_type(x) # N: Revealed type is 'Any' # But Type[Any] is distinct from Any y: int = arg # E: Incompatible types in assignment (expression has type "Type[Any]", variable has type "int") [out] [case testTypeUsingTypeCTypeAnyMemberFallback] from typing import Type, Any def foo(arg: Type[Any]): reveal_type(arg.__str__) # N: Revealed type is 'def () -> builtins.str' reveal_type(arg.mro()) # N: Revealed type is 'builtins.list[builtins.type]' [builtins fixtures/type.pyi] [out] [case testTypeUsingTypeCTypeNoArg] from typing import Type def foo(arg: Type): x = arg() reveal_type(x) # N: Revealed type is 'Any' class X: pass foo(X) [out] [case testTypeUsingTypeCBuiltinType] from typing import Type def foo(arg: type): pass class X: pass def bar(arg: Type[X]): foo(arg) foo(X) [builtins fixtures/tuple.pyi] [out] [case testTypeUsingTypeCClassMethod] from typing import Type class User: @classmethod def foo(cls) -> int: pass def bar(self) -> int: pass def process(cls: Type[User]): reveal_type(cls.foo()) # N: Revealed type is 'builtins.int' obj = cls() reveal_type(cls.bar(obj)) # N: Revealed type is 'builtins.int' cls.mro() # Defined in class type cls.error # E: "Type[User]" has no attribute "error" [builtins fixtures/classmethod.pyi] [out] [case testTypeUsingTypeCClassMethodUnion] from typing import Type, Union class User: @classmethod def foo(cls) -> int: pass def bar(self) -> int: pass class ProUser(User): pass class BasicUser(User): pass def process(cls: Type[Union[BasicUser, ProUser]]): cls.foo() obj = cls() cls.bar(obj) cls.mro() # Defined in class type cls.error # E: Item "type" of "Union[Type[BasicUser], Type[ProUser]]" has no attribute "error" [builtins fixtures/classmethod.pyi] [out] [case testTypeUsingTypeCClassMethodFromTypeVar] from typing import Type, TypeVar class User: @classmethod def foo(cls) -> int: pass def bar(self) -> int: pass U = TypeVar('U', bound=User) def process(cls: Type[U]): reveal_type(cls.foo()) # N: Revealed type is 'builtins.int' obj = cls() reveal_type(cls.bar(obj)) # N: Revealed type is 'builtins.int' cls.mro() # Defined in class type cls.error # E: "Type[U]" has no attribute "error" [builtins fixtures/classmethod.pyi] [out] [case testTypeUsingTypeCClassMethodFromTypeVarUnionBound] # Ideally this would work, but not worth the effort; just don't crash from typing import Type, TypeVar, Union class User: @classmethod def foo(cls) -> int: pass def bar(self) -> int: pass class ProUser(User): pass class BasicUser(User): pass U = TypeVar('U', bound=Union[ProUser, BasicUser]) def process(cls: Type[U]): cls.foo() # E: "Type[U]" has no attribute "foo" obj = cls() cls.bar(obj) # E: "Type[U]" has no attribute "bar" cls.mro() # Defined in class type cls.error # E: "Type[U]" has no attribute "error" [builtins fixtures/classmethod.pyi] [out] [case testTypeUsingTypeCErrorUnsupportedType] from typing import Type, Tuple def foo(arg: Type[Tuple[int]]): # E: Unsupported type Type["Tuple[int]"] arg() [builtins fixtures/tuple.pyi] [out] [case testTypeUsingTypeCOverloadedClass] from foo import * [file foo.pyi] from typing import Type, TypeVar, overload class User: @overload def __init__(self) -> None: pass @overload def __init__(self, arg: int) -> None: pass @classmethod def foo(cls) -> None: pass U = TypeVar('U', bound=User) def new(uc: Type[U]) -> U: uc.foo() u = uc() u.foo() if 1: u = uc(0) u.foo() u = uc('') # Error u.foo(0) # Error return uc() u = new(User) [builtins fixtures/classmethod.pyi] [out] tmp/foo.pyi:17: error: No overload variant of "User" matches argument type "str" tmp/foo.pyi:17: note: Possible overload variant: tmp/foo.pyi:17: note: def __init__(self, arg: int) -> U tmp/foo.pyi:17: note: <1 more non-matching overload not shown> tmp/foo.pyi:18: error: Too many arguments for "foo" of "User" [case testTypeUsingTypeCInUpperBound] from typing import TypeVar, Type class B: pass T = TypeVar('T', bound=Type[B]) def f(a: T): pass [out] [case testTypeUsingTypeCTuple] from typing import Type, Tuple def f(a: Type[Tuple[int, int]]): a() [out] main:2: error: Unsupported type Type["Tuple[int, int]"] [case testTypeUsingTypeCNamedTuple] from typing import Type, NamedTuple N = NamedTuple('N', [('x', int), ('y', int)]) def f(a: Type[N]): a() [builtins fixtures/list.pyi] [out] main:4: error: Too few arguments for "N" [case testTypeUsingTypeCJoin] from typing import Type class B: pass class C(B): pass class D(B): pass def foo(c: Type[C], d: Type[D]) -> None: x = [c, d] reveal_type(x) [builtins fixtures/list.pyi] [out] main:7: note: Revealed type is 'builtins.list[Type[__main__.B]]' [case testTypeEquivalentTypeAny] from typing import Type, Any a = None # type: Type[Any] b = a # type: type x = None # type: type y = x # type: Type[Any] class C: ... p = None # type: type q = p # type: Type[C] [builtins fixtures/list.pyi] [out] [case testTypeEquivalentTypeAny2] from typing import Type, Any, TypeVar, Generic class C: ... x = None # type: type y = None # type: Type[Any] z = None # type: Type[C] lst = [x, y, z] reveal_type(lst) # N: Revealed type is 'builtins.list[builtins.type*]' T1 = TypeVar('T1', bound=type) T2 = TypeVar('T2', bound=Type[Any]) class C1(Generic[T1]): ... class C2(Generic[T2]): ... C1[Type[Any]], C2[type] # both these should not fail [builtins fixtures/list.pyi] [out] [case testTypeEquivalentTypeAnyEdgeCase] class C: pass class M(type): def __init__(cls, x) -> None: type.__init__(cls, x) class Mbad(type): def __init__(cls, x) -> None: type.__init__(C(), x) # E: Argument 1 to "__init__" of "type" has incompatible type "C"; expected "type" [builtins fixtures/primitives.pyi] [out] [case testTypeMatchesOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload, Any class User: pass UserType = User # type: Type[User] @overload def f(a: int) -> Any: pass @overload def f(a: object) -> int: pass reveal_type(f(User)) # N: Revealed type is 'builtins.int' reveal_type(f(UserType)) # N: Revealed type is 'builtins.int' [builtins fixtures/classmethod.pyi] [out] [case testTypeMatchesGeneralTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass UserType = User # type: Type[User] @overload def f(a: type) -> int: return 1 @overload def f(a: int) -> str: return "a" reveal_type(f(User)) # N: Revealed type is 'builtins.int' reveal_type(f(UserType)) # N: Revealed type is 'builtins.int' reveal_type(f(1)) # N: Revealed type is 'builtins.str' [builtins fixtures/classmethod.pyi] [out] [case testTypeMatchesSpecificTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass UserType = User # type: Type[User] @overload def f(a: User) -> User: return User() @overload def f(a: Type[User]) -> int: return 1 @overload def f(a: int) -> str: return "a" reveal_type(f(User)) # N: Revealed type is 'builtins.int' reveal_type(f(UserType)) # N: Revealed type is 'builtins.int' reveal_type(f(User())) # N: Revealed type is 'foo.User' reveal_type(f(1)) # N: Revealed type is 'builtins.str' [builtins fixtures/classmethod.pyi] [out] [case testMixingTypeTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: User) -> Type[User]: return User @overload def f(a: Type[User]) -> User: return a() @overload def f(a: int) -> Type[User]: return User @overload def f(a: str) -> User: return User() reveal_type(f(User())) # N: Revealed type is 'Type[foo.User]' reveal_type(f(User)) # N: Revealed type is 'foo.User' reveal_type(f(3)) # N: Revealed type is 'Type[foo.User]' reveal_type(f("hi")) # N: Revealed type is 'foo.User' [builtins fixtures/classmethod.pyi] [out] [case testGeneralTypeMatchesSpecificTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, Any, overload class User: pass @overload def f(a: Type[User]) -> None: pass @overload def f(a: int) -> None: pass def mock_1() -> type: return User def mock_2() -> Type[Any]: return User f(User) f(mock_1()) f(mock_2()) [builtins fixtures/classmethod.pyi] [out] [case testNonTypeDoesNotMatchOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: Type[User]) -> None: pass @overload def f(a: type) -> None: pass f(3) # E: No overload variant of "f" matches argument type "int" \ # N: Possible overload variants: \ # N: def f(a: Type[User]) -> None \ # N: def f(a: type) -> None [builtins fixtures/classmethod.pyi] [out] [case testInstancesDoNotMatchTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: Type[User]) -> None: pass @overload def f(a: int) -> None: pass f(User) f(User()) # E: No overload variant of "f" matches argument type "User" \ # N: Possible overload variants: \ # N: def f(a: Type[User]) -> None \ # N: def f(a: int) -> None [builtins fixtures/classmethod.pyi] [out] [case testTypeCovarianceWithOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class A: pass class B(A): pass class C(B): pass AType = A # type: Type[A] BType = B # type: Type[B] CType = C # type: Type[C] @overload def f(a: Type[B]) -> None: pass @overload def f(a: int) -> None: pass f(A) # E: Argument 1 to "f" has incompatible type "Type[A]"; expected "Type[B]" f(B) f(C) f(AType) # E: Argument 1 to "f" has incompatible type "Type[A]"; expected "Type[B]" f(BType) f(CType) [builtins fixtures/classmethod.pyi] [out] [case testOverloadedCovariantTypesFail] from foo import * [file foo.pyi] from typing import Type, overload class A: pass class B(A): pass @overload def f(a: Type[B]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(a: Type[A]) -> str: pass [builtins fixtures/classmethod.pyi] [out] [case testDistinctOverloadedCovariantTypesSucceed] from foo import * [file foo.pyi] from typing import Type, overload class A: pass class AChild(A): pass class B: pass class BChild(B): pass @overload def f(a: Type[A]) -> int: pass @overload def f(a: Type[B]) -> str: pass @overload def f(a: A) -> A: pass @overload def f(a: B) -> B: pass reveal_type(f(A)) # N: Revealed type is 'builtins.int' reveal_type(f(AChild)) # N: Revealed type is 'builtins.int' reveal_type(f(B)) # N: Revealed type is 'builtins.str' reveal_type(f(BChild)) # N: Revealed type is 'builtins.str' reveal_type(f(A())) # N: Revealed type is 'foo.A' reveal_type(f(AChild())) # N: Revealed type is 'foo.A' reveal_type(f(B())) # N: Revealed type is 'foo.B' reveal_type(f(BChild())) # N: Revealed type is 'foo.B' [builtins fixtures/classmethod.pyi] [out] [case testSubtypeWithMoreOverloadsThanSupertypeSucceeds] from foo import * [file foo.pyi] from typing import overload class X: pass class Y: pass class Z: pass class A: @overload def f(self, x: X) -> X: pass @overload def f(self, y: Y) -> Y: pass class B(A): @overload def f(self, x: X) -> X: pass @overload def f(self, y: Y) -> Y: pass @overload def f(self, z: Z) -> Z: pass [builtins fixtures/classmethod.pyi] [out] [case testSubtypeOverloadCoveringMultipleSupertypeOverloadsSucceeds] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass class C(A): pass class D: pass class Super: @overload def foo(self, a: B) -> C: pass @overload def foo(self, a: C) -> A: pass @overload def foo(self, a: D) -> D: pass class Sub(Super): @overload def foo(self, a: A) -> C: pass @overload def foo(self, a: D) -> D: pass [builtins fixtures/classmethod.pyi] [out] [case testSubtypeOverloadWithOverlappingArgumentsButWrongReturnType] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass class C: pass class Super: @overload def foo(self, a: A) -> A: pass @overload def foo(self, a: C) -> C: pass class Sub(Super): @overload # E: Signature of "foo" incompatible with supertype "Super" def foo(self, a: A) -> A: pass @overload def foo(self, a: B) -> C: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def foo(self, a: C) -> C: pass [builtins fixtures/classmethod.pyi] [out] [case testTypeTypeOverlapsWithObjectAndType] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(a: object) -> str: pass @overload def g(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(a: type) -> str: pass [builtins fixtures/classmethod.pyi] [out] [case testTypeOverlapsWithObject] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: type) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(a: object) -> str: pass [builtins fixtures/classmethod.pyi] [out] [case testTypeConstructorReturnsTypeType] class User: @classmethod def test_class_method(cls) -> int: pass @staticmethod def test_static_method() -> str: pass def test_instance_method(self) -> None: pass u = User() reveal_type(type(u)) # N: Revealed type is 'Type[__main__.User]' reveal_type(type(u).test_class_method()) # N: Revealed type is 'builtins.int' reveal_type(type(u).test_static_method()) # N: Revealed type is 'builtins.str' type(u).test_instance_method() # E: Too few arguments for "test_instance_method" of "User" [builtins fixtures/classmethod.pyi] [out] [case testObfuscatedTypeConstructorReturnsTypeType] from typing import TypeVar class User: pass f1 = type A = TypeVar('A') def f2(func: A) -> A: return func u = User() reveal_type(f1(u)) # N: Revealed type is 'Type[__main__.User]' reveal_type(f2(type)(u)) # N: Revealed type is 'Type[__main__.User]' [builtins fixtures/classmethod.pyi] [out] [case testTypeConstructorLookalikeFails] class User: pass def fake1(a: object) -> type: return User def fake2(a: int) -> type: return User reveal_type(type(User())) # N: Revealed type is 'Type[__main__.User]' reveal_type(fake1(User())) # N: Revealed type is 'builtins.type' reveal_type(fake2(3)) # N: Revealed type is 'builtins.type' [builtins fixtures/classmethod.pyi] [out] [case testOtherTypeConstructorsSucceed] def foo(self) -> int: return self.attr User = type('User', (object,), {'foo': foo, 'attr': 3}) reveal_type(User) # N: Revealed type is 'builtins.type' [builtins fixtures/args.pyi] [out] [case testTypeTypeComparisonWorks] class User: pass User == User User == type(User()) type(User()) == User type(User()) == type(User()) User != User User != type(User()) type(User()) != User type(User()) != type(User()) int == int int == type(3) type(3) == int type(3) == type(3) int != int int != type(3) type(3) != int type(3) != type(3) User is User User is type(User) type(User) is User type(User) is type(User) int is int int is type(3) type(3) is int type(3) is type(3) int.__eq__(int) int.__eq__(3, 4) [builtins fixtures/args.pyi] [out] main:33: error: Too few arguments for "__eq__" of "int" main:33: error: Unsupported operand types for == ("int" and "Type[int]") [case testMroSetAfterError] class C(str, str): foo = 0 bar = foo [out] main:1: error: Duplicate base class "str" [case testCannotDetermineMro] class A: pass class B(A): pass class C(B): pass class D(A, B): pass # E: Cannot determine consistent method resolution order (MRO) for "D" class E(C, D): pass [case testInconsistentMroLocalRef] class A: pass class B(object, A): # E: Cannot determine consistent method resolution order (MRO) for "B" def readlines(self): pass __iter__ = readlines [case testDynamicMetaclass] class C(metaclass=int()): # E: Dynamic metaclass not supported for 'C' pass [case testDynamicMetaclassCrash] class C(metaclass=int().x): # E: Dynamic metaclass not supported for 'C' pass [case testVariableSubclass] class A: a = 1 # type: int class B(A): a = 1 [out] [case testVariableSubclassAssignMismatch] class A: a = 1 # type: int class B(A): a = "a" [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableSubclassAssignment] class A: a = None # type: int class B(A): def __init__(self) -> None: self.a = "a" [out] main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testVariableSubclassTypeOverwrite] class A: a = None # type: int class B(A): a = None # type: str class C(B): a = "a" [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableSubclassTypeOverwriteImplicit] class A: a = 1 class B(A): a = None # type: str [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableSuperUsage] class A: a = [] # type: list class B(A): a = [1, 2] class C(B): a = B.a + [3] [builtins fixtures/list.pyi] [out] [case testClassAllBases] from typing import Union class A: a = None # type: Union[int, str] class B(A): a = 1 class C(B): a = "str" class D(A): a = "str" [out] main:7: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testVariableTypeVar] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: T class B(A[int]): a = 1 [case testVariableTypeVarInvalid] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: T class B(A[int]): a = "abc" [out] main:6: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableTypeVarIndirectly] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: T class B(A[int]): pass class C(B): a = "a" [out] main:8: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableTypeVarList] from typing import List, TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: List[T] b = None # type: List[T] class B(A[int]): a = [1] b = [''] [builtins fixtures/list.pyi] [out] main:8: error: List item 0 has incompatible type "str"; expected "int" [case testVariableMethod] class A: def a(self) -> None: pass b = 1 class B(A): a = 1 def b(self) -> None: pass [out] main:5: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "Callable[[A], None]") main:6: error: Signature of "b" incompatible with supertype "A" [case testVariableProperty] class A: @property def a(self) -> bool: pass class B(A): a = None # type: bool class C(A): a = True class D(A): a = 1 [builtins fixtures/property.pyi] [out] main:9: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "bool") [case testVariableOverwriteAny] from typing import Any class A: a = 1 class B(A): a = 'x' # type: Any [out] [case testInstanceMethodOverwrite] class B(): def n(self, a: int) -> None: pass class C(B): def m(self, a: int) -> None: pass n = m [out] [case testInstanceMethodOverwriteError] class B(): def n(self, a: int) -> None: pass class C(B): def m(self, a: str) -> None: pass n = m [out] main:5: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "B" defined the type as "Callable[[int], None]") [case testInstanceMethodOverwriteTypevar] from typing import Generic, TypeVar T = TypeVar("T") class B(Generic[T]): def n(self, a: T) -> None: pass class C(B[int]): def m(self, a: int) -> None: pass n = m [case testInstanceMethodOverwriteTwice] class I: def foo(self) -> None: pass class A(I): def foo(self) -> None: pass class B(A): def bar(self) -> None: pass foo = bar class C(B): def bar(self) -> None: pass foo = bar [case testClassMethodOverwrite] class B(): @classmethod def n(self, a: int) -> None: pass class C(B): @classmethod def m(self, a: int) -> None: pass n = m [builtins fixtures/classmethod.pyi] [out] [case testClassMethodOverwriteError] class B(): @classmethod def n(self, a: int) -> None: pass class C(B): @classmethod def m(self, a: str) -> None: pass n = m [builtins fixtures/classmethod.pyi] [out] main:7: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "B" defined the type as "Callable[[int], None]") [case testClassSpec] from typing import Callable class A(): b = None # type: Callable[[A, int], int] class B(A): def c(self, a: int) -> int: pass b = c [case testClassSpecError] from typing import Callable class A(): b = None # type: Callable[[A, int], int] class B(A): def c(self, a: str) -> int: pass b = c [out] main:6: error: Incompatible types in assignment (expression has type "Callable[[str], int]", base class "A" defined the type as "Callable[[int], int]") [case testClassStaticMethod] class A(): @staticmethod def a(a: int) -> None: pass class B(A): @staticmethod def b(a: str) -> None: pass a = b [builtins fixtures/staticmethod.pyi] [out] main:7: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "A" defined the type as "Callable[[int], None]") [case testClassStaticMethodIndirect] class A(): @staticmethod def a(a: int) -> None: pass c = a class B(A): @staticmethod def b(a: str) -> None: pass c = b [builtins fixtures/staticmethod.pyi] [out] main:8: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "A" defined the type as "Callable[[int], None]") [case testClassStaticMethodSubclassing] class A: @staticmethod def a() -> None: pass def b(self) -> None: pass @staticmethod def c() -> None: pass class B(A): def a(self) -> None: pass # E: Signature of "a" incompatible with supertype "A" @classmethod def b(cls) -> None: pass @staticmethod def c() -> None: pass [builtins fixtures/classmethod.pyi] [case testTempNode] class A(): def a(self) -> None: pass class B(A): def b(self) -> None: pass a = c = b [case testListObject] from typing import List class A: x = [] # type: List[object] class B(A): x = [1] [builtins fixtures/list.pyi] [case testClassMemberObject] class A: x = object() class B(A): x = 1 class C(B): x = '' [out] main:6: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testSlots] class A: __slots__ = ("a") class B(A): __slots__ = ("a", "b") [case testClassOrderOfError] class A: x = 1 class B(A): x = "a" class C(B): x = object() [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") main:6: error: Incompatible types in assignment (expression has type "object", base class "B" defined the type as "str") [case testClassOneErrorPerLine] class A: x = 1 class B(A): x = "" x = 1.0 [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testClassIgnoreType_RedefinedAttributeAndGrandparentAttributeTypesNotIgnored] class A: x = 0 class B(A): x = '' # type: ignore class C(B): x = '' [out] [case testClassIgnoreType_RedefinedAttributeTypeIgnoredInChildren] class A: x = 0 class B(A): x = '' # type: ignore class C(B): x = '' # type: ignore [out] [case testInvalidMetaclassStructure] class X(type): pass class Y(type): pass class A(metaclass=X): pass class B(A, metaclass=Y): pass # E: Inconsistent metaclass structure for 'B' [case testMetaclassNoTypeReveal] class M: x = 0 # type: int class A(metaclass=M): pass # E: Metaclasses not inheriting from 'type' are not supported A.x # E: "Type[A]" has no attribute "x" [case testMetaclassTypeReveal] from typing import Type class M(type): x = 0 # type: int class A(metaclass=M): pass def f(TA: Type[A]): reveal_type(TA) # N: Revealed type is 'Type[__main__.A]' reveal_type(TA.x) # N: Revealed type is 'builtins.int' [case testSubclassMetaclass] class M1(type): x = 0 class M2(M1): pass class C(metaclass=M2): pass reveal_type(C.x) # N: Revealed type is 'builtins.int' [case testMetaclassSubclass] from typing import Type class M(type): x = 0 # type: int class A(metaclass=M): pass class B(A): pass def f(TB: Type[B]): reveal_type(TB) # N: Revealed type is 'Type[__main__.B]' reveal_type(TB.x) # N: Revealed type is 'builtins.int' [case testMetaclassIterable] from typing import Iterable, Iterator class ImplicitMeta(type): def __iter__(self) -> Iterator[int]: yield 1 class Implicit(metaclass=ImplicitMeta): pass for _ in Implicit: pass reveal_type(list(Implicit)) # N: Revealed type is 'builtins.list[builtins.int*]' class ExplicitMeta(type, Iterable[int]): def __iter__(self) -> Iterator[int]: yield 1 class Explicit(metaclass=ExplicitMeta): pass for _ in Explicit: pass reveal_type(list(Explicit)) # N: Revealed type is 'builtins.list[builtins.int*]' [builtins fixtures/list.pyi] [case testMetaclassTuple] from typing import Tuple class M(Tuple[int]): pass class C(metaclass=M): pass # E: Invalid metaclass 'M' [builtins fixtures/tuple.pyi] [case testMetaclassOperatorBeforeReversed] class X: def __radd__(self, x: int) -> int: ... class Meta(type): def __add__(cls, x: X) -> str: ... class Concrete(metaclass=Meta): pass reveal_type(Concrete + X()) # N: Revealed type is 'builtins.str' Concrete + "hello" # E: Unsupported operand types for + ("Type[Concrete]" and "str") [case testMetaclassOperatorTypeVar] from typing import Type, TypeVar class MetaClass(type): def __mul__(cls, other: int) -> str: return "" class Test(metaclass=MetaClass): pass S = TypeVar("S", bound=Test) def f(x: Type[Test]) -> str: return x * 0 def g(x: Type[S]) -> str: return reveal_type(x * 0) # N: Revealed type is 'builtins.str' [case testMetaclassGetitem] class M(type): def __getitem__(self, key) -> int: return 1 class A(metaclass=M): pass reveal_type(A[M]) # N: Revealed type is 'builtins.int' [case testMetaclassSelfType] from typing import TypeVar, Type class M(type): pass T = TypeVar('T') class M1(M): def foo(cls: Type[T]) -> T: ... class A(metaclass=M1): pass reveal_type(A.foo()) # N: Revealed type is '__main__.A*' [case testMetaclassAndSkippedImport] # flags: --ignore-missing-imports from missing import M class A(metaclass=M): y = 0 reveal_type(A.y) # N: Revealed type is 'builtins.int' A.x # E: "Type[A]" has no attribute "x" [case testAnyMetaclass] from typing import Any M = None # type: Any class A(metaclass=M): y = 0 reveal_type(A.y) # N: Revealed type is 'builtins.int' A.x # E: "Type[A]" has no attribute "x" [case testInvalidVariableAsMetaclass] from typing import Any M = 0 # type: int MM = 0 class A(metaclass=M): # E: Invalid metaclass 'M' y = 0 class B(metaclass=MM): # E: Invalid metaclass 'MM' y = 0 reveal_type(A.y) # N: Revealed type is 'builtins.int' A.x # E: "Type[A]" has no attribute "x" [case testAnyAsBaseOfMetaclass] from typing import Any, Type M = None # type: Any class MM(M): pass class A(metaclass=MM): y = 0 @classmethod def f(cls) -> None: pass def g(self) -> None: pass def h(a: Type[A], b: Type[object]) -> None: h(a, a) h(b, a) # E: Argument 1 to "h" has incompatible type "Type[object]"; expected "Type[A]" a.f(1) # E: Too many arguments for "f" of "A" reveal_type(a.y) # N: Revealed type is 'builtins.int' x = A # type: MM reveal_type(A.y) # N: Revealed type is 'builtins.int' reveal_type(A.x) # N: Revealed type is 'Any' A.f(1) # E: Too many arguments for "f" of "A" A().g(1) # E: Too many arguments for "g" of "A" [builtins fixtures/classmethod.pyi] [case testMetaclassTypeCallable] class M(type): x = 5 class A(metaclass=M): pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' [case testMetaclassStrictSupertypeOfTypeWithClassmethods] from typing import Type, TypeVar TA = TypeVar('TA', bound='A') TTA = TypeVar('TTA', bound='Type[A]') TM = TypeVar('TM', bound='M') class M(type): def g1(cls: 'Type[A]') -> A: pass # E: The erased type of self "Type[__main__.A]" is not a supertype of its class "__main__.M" def g2(cls: Type[TA]) -> TA: pass # E: The erased type of self "Type[__main__.A]" is not a supertype of its class "__main__.M" def g3(cls: TTA) -> TTA: pass # E: The erased type of self "Type[__main__.A]" is not a supertype of its class "__main__.M" def g4(cls: TM) -> TM: pass m: M class A(metaclass=M): def foo(self): pass reveal_type(A.g1) # N: Revealed type is 'def () -> __main__.A' reveal_type(A.g2) # N: Revealed type is 'def () -> __main__.A*' reveal_type(A.g3) # N: Revealed type is 'def () -> def () -> __main__.A' reveal_type(A.g4) # N: Revealed type is 'def () -> def () -> __main__.A' class B(metaclass=M): def foo(self): pass B.g1 # E: Invalid self argument "Type[B]" to attribute function "g1" with type "Callable[[Type[A]], A]" B.g2 # E: Invalid self argument "Type[B]" to attribute function "g2" with type "Callable[[Type[TA]], TA]" B.g3 # E: Invalid self argument "Type[B]" to attribute function "g3" with type "Callable[[TTA], TTA]" reveal_type(B.g4) # N: Revealed type is 'def () -> def () -> __main__.B' # 4 examples of unsoundness - instantiation, classmethod, staticmethod and ClassVar: ta: Type[A] = m # E: Incompatible types in assignment (expression has type "M", variable has type "Type[A]") a: A = ta() reveal_type(ta.g1) # N: Revealed type is 'def () -> __main__.A' reveal_type(ta.g2) # N: Revealed type is 'def () -> __main__.A*' reveal_type(ta.g3) # N: Revealed type is 'def () -> Type[__main__.A]' reveal_type(ta.g4) # N: Revealed type is 'def () -> Type[__main__.A]' x: M = ta x.g1 # E: Invalid self argument "M" to attribute function "g1" with type "Callable[[Type[A]], A]" x.g2 # E: Invalid self argument "M" to attribute function "g2" with type "Callable[[Type[TA]], TA]" x.g3 # E: Invalid self argument "M" to attribute function "g3" with type "Callable[[TTA], TTA]" reveal_type(x.g4) # N: Revealed type is 'def () -> __main__.M*' def r(ta: Type[TA], tta: TTA) -> None: x: M = ta y: M = tta class Class(metaclass=M): @classmethod def f1(cls: Type[Class]) -> None: pass @classmethod def f2(cls: M) -> None: pass cl: Type[Class] = m # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Class]") reveal_type(cl.f1) # N: Revealed type is 'def ()' reveal_type(cl.f2) # N: Revealed type is 'def ()' x1: M = cl class Static(metaclass=M): @staticmethod def f() -> None: pass s: Type[Static] = m # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Static]") reveal_type(s.f) # N: Revealed type is 'def ()' x2: M = s from typing import ClassVar class Cvar(metaclass=M): x = 1 # type: ClassVar[int] cv: Type[Cvar] = m # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Cvar]") cv.x x3: M = cv [builtins fixtures/classmethod.pyi] [case testMetaclassOverloadResolution] from typing import Type, overload class A: pass class EM(type): pass class E(metaclass=EM): pass class EM1(type): pass class E1(metaclass=EM1): pass @overload def f(x: EM) -> int: ... @overload def f(x: EM1) -> A: ... @overload def f(x: str) -> str: ... def f(x: object) -> object: return '' e: EM reveal_type(f(e)) # N: Revealed type is 'builtins.int' et: Type[E] reveal_type(f(et)) # N: Revealed type is 'builtins.int' e1: EM1 reveal_type(f(e1)) # N: Revealed type is '__main__.A' e1t: Type[E1] reveal_type(f(e1t)) # N: Revealed type is '__main__.A' reveal_type(f('')) # N: Revealed type is 'builtins.str' [case testTypeCErasesGenericsFromC] from typing import Generic, Type, TypeVar K = TypeVar('K') V = TypeVar('V') class ExampleDict(Generic[K, V]): ... D = TypeVar('D') def mkdict(dict_type: Type[D]) -> D: ... reveal_type(mkdict(ExampleDict)) # N: Revealed type is '__main__.ExampleDict*[Any, Any]' [case testTupleForwardBase] from m import a a[0]() # E: "int" not callable [file m.py] from typing import Tuple a = None # type: A class A(Tuple[int, str]): pass [builtins fixtures/tuple.pyi] -- Synthetic types crashes -- ----------------------- [case testCrashOnSelfRecursiveNamedTupleVar] from typing import NamedTuple N = NamedTuple('N', [('x', N)]) # E: Cannot resolve name "N" (possible cyclic definition) n: N reveal_type(n) # N: Revealed type is 'Tuple[Any, fallback=__main__.N]' [case testCrashOnSelfRecursiveTypedDictVar] from mypy_extensions import TypedDict A = TypedDict('A', {'a': 'A'}) # type: ignore a: A [builtins fixtures/isinstancelist.pyi] [case testCrashInJoinOfSelfRecursiveNamedTuples] from typing import NamedTuple class N(NamedTuple): x: N # type: ignore class M(NamedTuple): x: M # type: ignore n: N m: M lst = [n, m] [builtins fixtures/isinstancelist.pyi] [case testCorrectJoinOfSelfRecursiveTypedDicts] from mypy_extensions import TypedDict class N(TypedDict): x: N # E: Cannot resolve name "N" (possible cyclic definition) class M(TypedDict): x: M # E: Cannot resolve name "M" (possible cyclic definition) n: N m: M lst = [n, m] reveal_type(lst[0]['x']) # N: Revealed type is 'Any' [builtins fixtures/isinstancelist.pyi] [case testCrashInForwardRefToNamedTupleWithIsinstance] from typing import Dict, NamedTuple NameDict = Dict[str, 'NameInfo'] class NameInfo(NamedTuple): ast: bool def parse_ast(name_dict: NameDict) -> None: if isinstance(name_dict[''], int): pass reveal_type(name_dict['test']) # N: Revealed type is 'Tuple[builtins.bool, fallback=__main__.NameInfo]' [builtins fixtures/isinstancelist.pyi] [out] [case testCrashInForwardRefToTypedDictWithIsinstance] from mypy_extensions import TypedDict from typing import Dict NameDict = Dict[str, 'NameInfo'] class NameInfo(TypedDict): ast: bool def parse_ast(name_dict: NameDict) -> None: if isinstance(name_dict[''], int): pass reveal_type(name_dict['']['ast']) # N: Revealed type is 'builtins.bool' [builtins fixtures/isinstancelist.pyi] [out] [case testCorrectIsinstanceInForwardRefToNewType] from typing import Dict, NewType NameDict = Dict[str, 'NameInfo'] class Base: ast: bool NameInfo = NewType('NameInfo', Base) def parse_ast(name_dict: NameDict) -> None: if isinstance(name_dict[''], int): pass x = name_dict[''] reveal_type(x) # N: Revealed type is '__main__.NameInfo*' if int(): x = NameInfo(Base()) # OK x = Base() # E: Incompatible types in assignment (expression has type "Base", variable has type "NameInfo") [builtins fixtures/isinstancelist.pyi] [out] [case testNoCrashForwardRefToBrokenDoubleNewType] from typing import Any, Dict, List, NewType Foo = NewType('NotFoo', int) # E: String argument 1 'NotFoo' to NewType(...) does not match variable name 'Foo' Foos = NewType('Foos', List[Foo]) # type: ignore def frob(foos: Dict[Any, Foos]) -> None: foo = foos.get(1) dict(foo) [builtins fixtures/dict.pyi] [out] [case testNoCrashForwardRefToBrokenDoubleNewTypeClass] from typing import Any, Dict, List, NewType Foo = NewType('NotFoo', int) # type: ignore Foos = NewType('Foos', List[Foo]) # type: ignore x: C class C: def frob(self, foos: Dict[Any, Foos]) -> None: foo = foos.get(1) dict(foo) reveal_type(x.frob) # N: Revealed type is 'def (foos: builtins.dict[Any, __main__.Foos])' [builtins fixtures/dict.pyi] [out] [case testNewTypeFromForwardNamedTuple] from typing import NewType, NamedTuple, Tuple NT = NewType('NT', N) class N(NamedTuple): x: int x: NT = N(1) # E: Incompatible types in assignment (expression has type "N", variable has type "NT") x = NT(N(1)) [out] [case testNewTypeFromForwardTypedDict] from typing import NewType, Tuple from mypy_extensions import TypedDict NT = NewType('NT', N) # E: Argument 2 to NewType(...) must be subclassable (got "N") class N(TypedDict): x: int [builtins fixtures/dict.pyi] [out] [case testCorrectAttributeInForwardRefToNamedTuple] from typing import NamedTuple proc: Process reveal_type(proc.state) # N: Revealed type is 'builtins.int' def get_state(proc: 'Process') -> int: return proc.state class Process(NamedTuple): state: int [out] [case testCorrectItemTypeInForwardRefToTypedDict] from mypy_extensions import TypedDict proc: Process reveal_type(proc['state']) # N: Revealed type is 'builtins.int' def get_state(proc: 'Process') -> int: return proc['state'] class Process(TypedDict): state: int [builtins fixtures/isinstancelist.pyi] [out] [case testCorrectDoubleForwardNamedTuple] from typing import NamedTuple x: A class A(NamedTuple): one: 'B' other: int class B(NamedTuple): attr: str y: A y = x reveal_type(x.one.attr) # N: Revealed type is 'builtins.str' [out] [case testCrashOnDoubleForwardTypedDict] from mypy_extensions import TypedDict x: A class A(TypedDict): one: 'B' other: int class B(TypedDict): attr: str reveal_type(x['one']['attr']) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstancelist.pyi] [out] [case testCrashOnForwardUnionOfNamedTuples] from typing import Union, NamedTuple Node = Union['Foo', 'Bar'] class Foo(NamedTuple): x: int class Bar(NamedTuple): x: int def foo(node: Node) -> int: x = node reveal_type(node) # N: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.Foo], Tuple[builtins.int, fallback=__main__.Bar]]' return x.x [out] [case testCrashOnForwardUnionOfTypedDicts] from mypy_extensions import TypedDict from typing import Union NodeType = Union['Foo', 'Bar'] class Foo(TypedDict): x: int class Bar(TypedDict): x: int def foo(node: NodeType) -> int: x = node return x['x'] [builtins fixtures/isinstancelist.pyi] [out] [case testSupportForwardUnionOfNewTypes] from typing import Union, NewType x: Node reveal_type(x.x) # N: Revealed type is 'builtins.int' class A: x: int class B: x: int Node = Union['Foo', 'Bar'] Foo = NewType('Foo', A) Bar = NewType('Bar', B) def foo(node: Node) -> Node: x = node return Foo(A()) [out] [case testForwardReferencesInNewTypeMRORecomputed] from typing import NewType x: Foo Foo = NewType('Foo', B) class A: x: int class B(A): pass reveal_type(x.x) # N: Revealed type is 'builtins.int' [out] [case testCrashOnComplexNamedTupleUnionProperty] from typing import NamedTuple, Union x: AOrB AOrB = Union['A', 'B'] class A(NamedTuple): x: int class B(object): def __init__(self, a: AOrB) -> None: self.a = a @property def x(self) -> int: return self.a.x reveal_type(x.x) # N: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [out] [case testCorrectIsinstanceWithForwardUnion] from typing import Union, NamedTuple ForwardUnion = Union['TP', int] class TP(NamedTuple('TP', [('x', int)])): pass def f(x: ForwardUnion) -> None: reveal_type(x) # N: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.TP], builtins.int]' if isinstance(x, TP): reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.TP]' [builtins fixtures/isinstance.pyi] [out] [case testCrashInvalidArgsSyntheticClassSyntax] from typing import List, NamedTuple from mypy_extensions import TypedDict class TD(TypedDict): x: List[int, str] # E: "list" expects 1 type argument, but 2 given class NM(NamedTuple): x: List[int, str] # E: "list" expects 1 type argument, but 2 given # These two should never crash, reveals are in the next test TD({'x': []}) NM(x=[]) [builtins fixtures/dict.pyi] [out] [case testCrashInvalidArgsSyntheticClassSyntaxReveals] from typing import List, NamedTuple from mypy_extensions import TypedDict class TD(TypedDict): x: List[int, str] # E: "list" expects 1 type argument, but 2 given class NM(NamedTuple): x: List[int, str] # E: "list" expects 1 type argument, but 2 given x: TD x1 = TD({'x': []}) y: NM y1 = NM(x=[]) reveal_type(x) # N: Revealed type is 'TypedDict('__main__.TD', {'x': builtins.list[Any]})' reveal_type(x1) # N: Revealed type is 'TypedDict('__main__.TD', {'x': builtins.list[Any]})' reveal_type(y) # N: Revealed type is 'Tuple[builtins.list[Any], fallback=__main__.NM]' reveal_type(y1) # N: Revealed type is 'Tuple[builtins.list[Any], fallback=__main__.NM]' [builtins fixtures/dict.pyi] [out] [case testCrashInvalidArgsSyntheticFunctionSyntax] from typing import List, NewType, NamedTuple from mypy_extensions import TypedDict TD = TypedDict('TD', {'x': List[int, str]}) # E: "list" expects 1 type argument, but 2 given NM = NamedTuple('NM', [('x', List[int, str])]) # E: "list" expects 1 type argument, but 2 given NT = NewType('NT', List[int, str]) # E: "list" expects 1 type argument, but 2 given # These three should not crash TD({'x': []}) NM(x=[]) NT([]) [builtins fixtures/dict.pyi] [out] [case testCrashForwardSyntheticClassSyntax] from typing import NamedTuple from mypy_extensions import TypedDict class A1(NamedTuple): b: 'B' x: int class A2(TypedDict): b: 'B' x: int class B: pass x: A1 y: A2 reveal_type(x.b) # N: Revealed type is '__main__.B' reveal_type(y['b']) # N: Revealed type is '__main__.B' [builtins fixtures/dict.pyi] [out] [case testCrashForwardSyntheticFunctionSyntax] from typing import NamedTuple from mypy_extensions import TypedDict A1 = NamedTuple('A1', [('b', 'B'), ('x', int)]) A2 = TypedDict('A2', {'b': 'B', 'x': int}) class B: pass x: A1 y: A2 reveal_type(x.b) # N: Revealed type is '__main__.B' reveal_type(y['b']) # N: Revealed type is '__main__.B' [builtins fixtures/dict.pyi] [out] -- Special support for six -- ----------------------- [case testSixMetaclass] import six class M(type): x = 5 class A(six.with_metaclass(M)): pass @six.add_metaclass(M) class B: pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' reveal_type(type(B).x) # N: Revealed type is 'builtins.int' [case testSixMetaclass_python2] import six class M(type): x = 5 class A(six.with_metaclass(M)): pass @six.add_metaclass(M) class B: pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' reveal_type(type(B).x) # N: Revealed type is 'builtins.int' [case testFromSixMetaclass] from six import with_metaclass, add_metaclass class M(type): x = 5 class A(with_metaclass(M)): pass @add_metaclass(M) class B: pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' reveal_type(type(B).x) # N: Revealed type is 'builtins.int' [case testSixMetaclassImportFrom] import six from metadefs import M class A(six.with_metaclass(M)): pass @six.add_metaclass(M) class B: pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' reveal_type(type(B).x) # N: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 [case testSixMetaclassImport] import six import metadefs class A(six.with_metaclass(metadefs.M)): pass @six.add_metaclass(metadefs.M) class B: pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' reveal_type(type(B).x) # N: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 [case testSixMetaclassAndBase] from typing import Iterable, Iterator import six class M(type, Iterable[int]): x = 5 def __iter__(self) -> Iterator[int]: ... class A: def foo(self): pass class B: def bar(self): pass class C1(six.with_metaclass(M, A)): pass @six.add_metaclass(M) class D1(A): pass class C2(six.with_metaclass(M, A, B)): pass @six.add_metaclass(M) class D2(A, B): pass reveal_type(type(C1).x) # N: Revealed type is 'builtins.int' reveal_type(type(D1).x) # N: Revealed type is 'builtins.int' reveal_type(type(C2).x) # N: Revealed type is 'builtins.int' reveal_type(type(D2).x) # N: Revealed type is 'builtins.int' C1().foo() D1().foo() C1().bar() # E: "C1" has no attribute "bar" D1().bar() # E: "D1" has no attribute "bar" for x in C1: reveal_type(x) # N: Revealed type is 'builtins.int*' for x in C2: reveal_type(x) # N: Revealed type is 'builtins.int*' C2().foo() D2().foo() C2().bar() D2().bar() C2().baz() # E: "C2" has no attribute "baz" D2().baz() # E: "D2" has no attribute "baz" [case testSixMetaclassGenerics] from typing import Generic, GenericMeta, TypeVar import six class DestroyableMeta(type): pass class Destroyable(six.with_metaclass(DestroyableMeta)): pass T_co = TypeVar('T_co', bound='Destroyable', covariant=True) class ArcMeta(GenericMeta, DestroyableMeta): pass class Arc(six.with_metaclass(ArcMeta, Generic[T_co], Destroyable)): pass @six.add_metaclass(ArcMeta) class Arc1(Generic[T_co], Destroyable): pass class MyDestr(Destroyable): pass reveal_type(Arc[MyDestr]()) # N: Revealed type is '__main__.Arc[__main__.MyDestr*]' reveal_type(Arc1[MyDestr]()) # N: Revealed type is '__main__.Arc1[__main__.MyDestr*]' [builtins fixtures/bool.pyi] [typing fixtures/typing-full.pyi] [case testSixMetaclassErrors] import six class M(type): pass class A(object): pass def f() -> type: return M class C1(six.with_metaclass(M), object): pass # E: Unsupported dynamic base class "six.with_metaclass" class C2(C1, six.with_metaclass(M)): pass # E: Unsupported dynamic base class "six.with_metaclass" class C3(six.with_metaclass(A)): pass # E: Metaclasses not inheriting from 'type' are not supported @six.add_metaclass(A) # E: Metaclasses not inheriting from 'type' are not supported \ # E: Argument 1 to "add_metaclass" has incompatible type "Type[A]"; expected "Type[type]" class D3(A): pass class C4(six.with_metaclass(M), metaclass=M): pass # E: Multiple metaclass definitions @six.add_metaclass(M) class D4(metaclass=M): pass # E: Multiple metaclass definitions class C5(six.with_metaclass(f())): pass # E: Dynamic metaclass not supported for 'C5' @six.add_metaclass(f()) # E: Dynamic metaclass not supported for 'D5' class D5: pass @six.add_metaclass(M) class CD(six.with_metaclass(M)): pass # E: Multiple metaclass definitions class M1(type): pass class Q1(metaclass=M1): pass @six.add_metaclass(M) class CQA(Q1): pass # E: Inconsistent metaclass structure for 'CQA' class CQW(six.with_metaclass(M, Q1)): pass # E: Inconsistent metaclass structure for 'CQW' [case testSixMetaclassErrors_python2] # flags: --python-version 2.7 import six class M(type): pass class C4(six.with_metaclass(M)): # E: Multiple metaclass definitions __metaclass__ = M [case testSixMetaclassAny] import t # type: ignore import six class E(metaclass=t.M): pass class F(six.with_metaclass(t.M)): pass @six.add_metaclass(t.M) class G: pass -- Special support for future.utils -- -------------------------------- [case testFutureMetaclass] import future.utils class M(type): x = 5 class A(future.utils.with_metaclass(M)): pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' [case testFutureMetaclass_python2] import future.utils class M(type): x = 5 class A(future.utils.with_metaclass(M)): pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' [case testFromFutureMetaclass] from future.utils import with_metaclass class M(type): x = 5 class A(with_metaclass(M)): pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' [case testFutureMetaclassImportFrom] import future.utils from metadefs import M class A(future.utils.with_metaclass(M)): pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 [case testFutureMetaclassImport] import future.utils import metadefs class A(future.utils.with_metaclass(metadefs.M)): pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 [case testFutureMetaclassAndBase] from typing import Iterable, Iterator import future.utils class M(type, Iterable[int]): x = 5 def __iter__(self) -> Iterator[int]: ... class A: def foo(self): pass class B: def bar(self): pass class C1(future.utils.with_metaclass(M, A)): pass class C2(future.utils.with_metaclass(M, A, B)): pass reveal_type(type(C1).x) # N: Revealed type is 'builtins.int' reveal_type(type(C2).x) # N: Revealed type is 'builtins.int' C1().foo() C1().bar() # E: "C1" has no attribute "bar" for x in C1: reveal_type(x) # N: Revealed type is 'builtins.int*' for x in C2: reveal_type(x) # N: Revealed type is 'builtins.int*' C2().foo() C2().bar() C2().baz() # E: "C2" has no attribute "baz" [case testFutureMetaclassGenerics] from typing import Generic, GenericMeta, TypeVar import future.utils class DestroyableMeta(type): pass class Destroyable(future.utils.with_metaclass(DestroyableMeta)): pass T_co = TypeVar('T_co', bound='Destroyable', covariant=True) class ArcMeta(GenericMeta, DestroyableMeta): pass class Arc(future.utils.with_metaclass(ArcMeta, Generic[T_co], Destroyable)): pass class MyDestr(Destroyable): pass reveal_type(Arc[MyDestr]()) # N: Revealed type is '__main__.Arc[__main__.MyDestr*]' [builtins fixtures/bool.pyi] [typing fixtures/typing-full.pyi] [case testFutureMetaclassErrors] import future.utils class M(type): pass class A(object): pass def f() -> type: return M class C1(future.utils.with_metaclass(M), object): pass # E: Unsupported dynamic base class "future.utils.with_metaclass" class C2(C1, future.utils.with_metaclass(M)): pass # E: Unsupported dynamic base class "future.utils.with_metaclass" class C3(future.utils.with_metaclass(A)): pass # E: Metaclasses not inheriting from 'type' are not supported class C4(future.utils.with_metaclass(M), metaclass=M): pass # E: Multiple metaclass definitions class C5(future.utils.with_metaclass(f())): pass # E: Dynamic metaclass not supported for 'C5' class M1(type): pass class Q1(metaclass=M1): pass class CQW(future.utils.with_metaclass(M, Q1)): pass # E: Inconsistent metaclass structure for 'CQW' [case testFutureMetaclassErrors_python2] # flags: --python-version 2.7 import future.utils class M(type): pass class C4(future.utils.with_metaclass(M)): # E: Multiple metaclass definitions __metaclass__ = M [case testFutureMetaclassAny] import t # type: ignore import future.utils class E(metaclass=t.M): pass class F(future.utils.with_metaclass(t.M)): pass -- Misc -- ---- [case testCorrectEnclosingClassPushedInDeferred] class C: def __getattr__(self, attr: str) -> int: x: F return x.f class F: def __init__(self, f: int) -> None: self.f = f [out] [case testCorrectEnclosingClassPushedInDeferred2] from typing import TypeVar T = TypeVar('T', bound=C) class C: def m(self: T) -> T: class Inner: x: F f = x.f return self class F: def __init__(self, f: int) -> None: self.f = f [out] [case testCorrectEnclosingClassPushedInDeferred3] class A: def f(self) -> None: def g(x: int) -> int: return y y = int() [out] [case testMetaclassMemberAccessViaType] from typing import Type class M(type): def m(cls, x: int) -> int: pass class C(metaclass=M): pass x = C y: Type[C] = C reveal_type(type(C).m) # N: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int' reveal_type(type(x).m) # N: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int' reveal_type(type(y).m) # N: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int' [out] [case testMetaclassMemberAccessViaType2] from typing import Any, Type class M(type): def m(cls, x: int) -> int: pass B: Any class C(B, metaclass=M): pass x: Type[C] reveal_type(x.m) # N: Revealed type is 'def (x: builtins.int) -> builtins.int' reveal_type(x.whatever) # N: Revealed type is 'Any' [out] [case testMetaclassMemberAccessViaType3] from typing import Any, Type, TypeVar T = TypeVar('T') class C(Any): def bar(self: T) -> Type[T]: pass def foo(self) -> None: reveal_type(self.bar()) # N: Revealed type is 'Type[__main__.C*]' reveal_type(self.bar().__name__) # N: Revealed type is 'builtins.str' [builtins fixtures/type.pyi] [out] [case testClassDecoratorIsTypeChecked] from typing import Callable, Type def decorate(x: int) -> Callable[[type], type]: # N: "decorate" defined here ... def decorate_forward_ref() -> Callable[[Type[A]], Type[A]]: ... @decorate(y=17) # E: Unexpected keyword argument "y" for "decorate" @decorate() # E: Too few arguments for "decorate" @decorate(22, 25) # E: Too many arguments for "decorate" @decorate_forward_ref() @decorate(11) class A: pass @decorate # E: Argument 1 to "decorate" has incompatible type "Type[A2]"; expected "int" class A2: pass [case testClassDecoratorIncorrect] def not_a_class_decorator(x: int) -> int: ... @not_a_class_decorator(7) class A3: pass # E: "int" not callable not_a_function = 17 @not_a_function() # E: "int" not callable class B: pass @not_a_function class B2: pass # E: "int" not callable b = object() @b.nothing # E: "object" has no attribute "nothing" class C: pass @undefined # E: Name 'undefined' is not defined class D: pass [case testSlotsCompatibility] class A: __slots__ = () class B(A): __slots__ = ('a', 'b') class C: __slots__ = ('x',) class D(B, C): __slots__ = ('aa', 'bb', 'cc') [case testRevealLocalsOnClassVars] class C1(object): t = 'a' y = 3.0 class Inner(object): pass reveal_locals() [out] main:5: note: Revealed local types are: main:5: note: t: builtins.str main:5: note: y: builtins.float [case testAbstractClasses] import a import b [file a.pyi] from abc import ABCMeta, abstractmethod from typing import Protocol class A: # OK, has @abstractmethod @abstractmethod def f(self) -> None: pass class B(A): # E: Class a.B has abstract attributes "f" # N: If it is meant to be abstract, add 'abc.ABCMeta' as an explicit metaclass pass class C(A, metaclass=ABCMeta): # OK, has ABCMeta as a metaclass pass class D(A): # OK, implements the abstract method def f(self) -> None: pass class E(Protocol): # OK, is a protocol @abstractmethod def f(self) -> None: pass class F(E, Protocol): # OK, is a protocol pass [file b.py] # All of these are OK because this is not a stub file. from abc import ABCMeta, abstractmethod from typing import Protocol class A: @abstractmethod def f(self) -> None: pass class B(A): pass class C(A, metaclass=ABCMeta): pass class D(A): def f(self) -> None: pass class E(Protocol): @abstractmethod def f(self) -> None: pass class F(E, Protocol): pass [case testClassMethodOverride] from typing import Callable, Any def deco(f: Callable[..., Any]) -> Callable[..., Any]: ... class B: @classmethod def meth(cls, x: int) -> int: ... class C(B): @classmethod @deco def meth(cls, x: int) -> int: ... [builtins fixtures/classmethod.pyi] [out] [case testGetAttrImportAnnotation] import a x: a.A y: a.A.B.C reveal_type(x) # N: Revealed type is 'Any' reveal_type(y) # N: Revealed type is 'Any' [file a.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [case testGetAttrImportBaseClass] import a class B(a.A): ... [file a.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [case testGetAttrDescriptor] from typing import TypeVar, Generic, Any T = TypeVar('T') class C(Generic[T]): normal: T def __getattr__(self, attr: str) -> T: ... class Descr: def __get__(self, inst: Any, owner: Any) -> int: ... class D(C[Descr]): other: Descr d: D reveal_type(d.normal) # N: Revealed type is 'builtins.int' reveal_type(d.dynamic) # N: Revealed type is '__main__.Descr*' reveal_type(D.other) # N: Revealed type is 'builtins.int' D.dynamic # E: "Type[D]" has no attribute "dynamic" [out] [case testSelfDescriptorAssign] from typing import Any class Descr: def __get__(self, inst: Any, owner: Any) -> int: ... class C: def __init__(self, x: Descr) -> None: self.x = x c = C(Descr()) reveal_type(c.x) # N: Revealed type is '__main__.Descr' [out] [case testForwardInstanceWithWrongArgCount] from typing import TypeVar, Generic T = TypeVar('T') class G(Generic[T]): ... A = G x: A[B[int, int]] # E: "G" expects 1 type argument, but 2 given B = G [out] [case testForwardInstanceWithNoArgs] from typing import TypeVar, Generic T = TypeVar('T') class G(Generic[T]): ... A = G x: A[B] reveal_type(x) # N: Revealed type is '__main__.G[__main__.G[Any]]' B = G [out] [case testForwardInstanceWithBound] # flags: --show-column-numbers from typing import TypeVar, Generic T = TypeVar('T', bound=str) class G(Generic[T]): ... A = G x: A[B[int]] # E B = G [out] main:8:4: error: Type argument "__main__.G[builtins.int]" of "G" must be a subtype of "builtins.str" main:8:6: error: Type argument "builtins.int" of "G" must be a subtype of "builtins.str" [case testExtremeForwardReferencing] from typing import TypeVar, Generic T = TypeVar('T') class B(Generic[T]): ... y: A z: A[int] x = [y, z] reveal_type(x) # N: Revealed type is 'builtins.list[__main__.B*[Any]]' A = B [builtins fixtures/list.pyi] [out] [case testNoneAnyFallback] from typing import Any dynamic: Any class C(dynamic): pass x: None = C() # E: Incompatible types in assignment (expression has type "C", variable has type "None") [out] [case testNoneAnyFallbackDescriptor] from typing import Any from d import Descr dynamic: Any class C(dynamic): id = Descr(int) name = Descr(str) c: C reveal_type(c.id) # N: Revealed type is 'builtins.int*' reveal_type(C.name) # N: Revealed type is 'd.Descr[builtins.str*]' [file d.pyi] from typing import Any, overload, Generic, TypeVar, Type T = TypeVar('T') class Descr(Generic[T]): def __init__(self, tp: Type[T]) -> None: ... @overload def __get__(self, inst: None, owner: Any) -> Descr[T]: ... @overload def __get__(self, inst: object, owner: Any) -> T: ... [out] [case testClassCustomPropertyWorks] from typing import TypeVar, Generic, Callable, Any V = TypeVar('V') class classproperty(Generic[V]): def __init__(self, getter: Callable[[Any], V]) -> None: self.getter = getter def __get__(self, instance: Any, owner: Any) -> V: return self.getter(owner) class C: @classproperty def foo(cls) -> int: return 42 reveal_type(C.foo) # N: Revealed type is 'builtins.int*' reveal_type(C().foo) # N: Revealed type is 'builtins.int*' [out] [case testMultipleInheritanceCycle] import b [file a.py] from b import B class A: ... class C(A, B): ... class D(C): ... class Other: ... [file b.py] from a import Other class B: ... [out] [case testMultipleInheritanceCycle2] import b [file a.py] from b import B class A: ... class C(A, B): ... class D(C): ... class Other: ... a: A b: B c: C d: D d = A() # E: Incompatible types in assignment (expression has type "A", variable has type "D") if int(): d = B() # E: Incompatible types in assignment (expression has type "B", variable has type "D") if int(): d = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") a = D() b = D() c = D() [file b.py] from a import Other class B: ... [out] [case testAllowPropertyAndInit1] class C: def __init__(self, x: int) -> None: self.x = x @property def x(self) -> int: pass @x.setter def x(self, x: int) -> None: pass [builtins fixtures/property.pyi] [out] [case testAllowPropertyAndInit2] class C: @property def x(self) -> int: pass @x.setter def x(self, x: int) -> None: pass def __init__(self, x: int) -> None: self.x = x [builtins fixtures/property.pyi] [case testAllowPropertyAndInit3] class C: def __init__(self, x: int) -> None: self.x = x # type: ignore @property # Should be no error here def x(self) -> int: pass [builtins fixtures/property.pyi] [out] [case testClassMethodBeforeInit1] class Foo: @classmethod def bar(cls) -> Foo: return cls("bar") def __init__(self, baz: str) -> None: self.baz = baz [builtins fixtures/classmethod.pyi] [case testClassMethodBeforeInit2] class Foo: @classmethod def bar(cls) -> Foo: return cls(Bar()) def __init__(self, baz: 'Bar') -> None: self.baz = baz class Bar: pass [builtins fixtures/classmethod.pyi] [case testClassMethodBeforeInit3] from typing import overload class Foo: @classmethod @overload def bar(cls, x: int) -> Foo: ... @classmethod @overload def bar(cls, x: str) -> Foo: ... @classmethod def bar(cls, x: object) -> Foo: return cls(x) def __init__(self, baz: object) -> None: self.baz = baz [builtins fixtures/classmethod.pyi] [case testNewAndInit1] class A: def __init__(self, x: int) -> None: pass class B(A): def __new__(cls) -> B: pass B() [case testNewAndInit2] from typing import Any class A: def __new__(cls, *args: Any) -> 'A': ... class B(A): def __init__(self, x: int) -> None: pass reveal_type(B) # N: Revealed type is 'def (x: builtins.int) -> __main__.B' [case testNewAndInit3] from typing import Any class A: def __new__(cls, *args: Any) -> 'A': ... def __init__(self, x: int) -> None: pass reveal_type(A) # N: Revealed type is 'def (x: builtins.int) -> __main__.A' [case testCyclicDecorator] import b [file a.py] import b import c class A(b.B): @c.deco def meth(self) -> int: ... [file b.py] import a import c class B: @c.deco def meth(self) -> int: ... [file c.py] from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... [out] [case testCyclicOverload] import b [file a.pyi] import b from typing import overload class A(b.B): @overload def meth(self, x: int) -> int: ... @overload def meth(self, x: str) -> str: ... [file b.pyi] import a from typing import overload class B: @overload def meth(self, x: int) -> int: ... @overload def meth(self, x: str) -> str: ... [out] [case testCyclicOverloadDeferred] import b [file a.py] import b from typing import overload, Union class A(b.B): @overload def meth(self, x: int) -> int: ... @overload def meth(self, x: str) -> str: ... def meth(self, x) -> Union[int, str]: reveal_type(other.x) # N: Revealed type is 'builtins.int' return 0 other: Other class Other: def __init__(self) -> None: self.x = f() def f() -> int: ... [file b.py] import a from typing import overload class B: @overload def meth(self, x: int) -> int: ... @overload def meth(self, x: str) -> str: ... def meth(self, x): pass [out] [case testCyclicOverrideAny] import a [file b.py] import a class Sub(a.Base): def x(self) -> int: pass [file a.py] import b class Base: def __init__(self): self.x = 1 [out] [case testCyclicOverrideChecked] import a [file b.py] import a class Sub(a.Base): def x(self) -> int: pass # E: Signature of "x" incompatible with supertype "Base" [file a.py] import b class Base: def __init__(self) -> None: self.x = 1 [out] [case testCyclicOverrideCheckedDecorator] import a [file b.py] import a import c class Sub(a.Base): @c.deco def x(self) -> int: pass # E: Signature of "x" incompatible with supertype "Base" [file a.py] import b import c class Base: def __init__(self) -> None: self.x = 1 [file c.py] from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... [out] [case testCyclicOverrideCheckedDecoratorDeferred] import a [file b.py] import a import c class Sub(a.Base): @c.deco def x(self) -> int: pass # E: Signature of "x" incompatible with supertype "Base" [file a.py] import b import c class Base: def __init__(self) -> None: self.x = f() def f() -> int: ... [file c.py] from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... [out] [case testCyclicOverrideAnyDecoratorDeferred] import a [file b.py] import a import c class Sub(a.Base): @c.deco def x(self) -> int: pass [file a.py] from b import Sub import c class Base: def __init__(self) -> None: self.x = f() def f() -> int: ... [file c.py] from typing import Any, Callable def deco(f: Callable[..., Any]) -> Any: ... [out] [case testCyclicDecoratorDoubleDeferred] import b [file a.py] import b import c class A(b.B): @c.deco def meth(self) -> int: reveal_type(other.x) # N: Revealed type is 'builtins.int' return 0 other: Other class Other: def __init__(self) -> None: self.x = f() def f() -> int: ... [file b.py] from a import A import c class B: @c.deco def meth(self) -> int: pass [file c.py] from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... [out] [case testCyclicDecoratorSuper] import b [file a.py] import b import c class A(b.B): @c.deco def meth(self) -> int: y = super().meth() reveal_type(y) # N: Revealed type is 'Tuple[builtins.int*, builtins.int]' return 0 [file b.py] from a import A import c class B: @c.deco def meth(self) -> int: pass [file c.py] from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... [out] [case testCyclicDecoratorBothDeferred] import b [file a.py] import b import c class A(b.B): @c.deco def meth(self) -> int: pass [file b.py] from a import A import c class B: @c.deco def meth(self) -> int: reveal_type(other.x) # N: Revealed type is 'builtins.int' return 0 other: Other class Other: def __init__(self) -> None: self.x = f() def f() -> int: ... [file c.py] from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... [out] [case testCyclicDecoratorSuperDeferred] import b [file a.py] import b import c class A(b.B): @c.deco def meth(self) -> int: y = super().meth() reveal_type(y) # N: Revealed type is 'Tuple[builtins.int*, builtins.int]' reveal_type(other.x) # N: Revealed type is 'builtins.int' return 0 other: Other class Other: def __init__(self) -> None: self.x = f() def f() -> int: ... [file b.py] from a import A import c class B: @c.deco def meth(self) -> int: pass [file c.py] from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... [out] [case testOptionalDescriptorsBinder] # flags: --strict-optional from typing import Type, TypeVar, Optional T = TypeVar('T') class IntDescr: def __get__(self, obj: T, typ: Type[T]) -> Optional[int]: ... def __set__(self, obj: T, value: Optional[int]) -> None: ... class C: spec = IntDescr() def meth_spec(self) -> None: if self.spec is None: self.spec = 0 reveal_type(self.spec) # N: Revealed type is 'builtins.int' [builtins fixtures/bool.pyi] [case testUnionDescriptorsBinder] from typing import Type, TypeVar, Union T = TypeVar('T') class A: ... class B: ... class UnionDescr: def __get__(self, obj: T, typ: Type[T]) -> Union[A, B]: ... def __set__(self, obj: T, value: Union[A, B]) -> None: ... class C: spec = UnionDescr() def meth_spec(self) -> None: self.spec = A() reveal_type(self.spec) # N: Revealed type is '__main__.A' [builtins fixtures/bool.pyi] [case testSubclassDescriptorsBinder] from typing import Type, TypeVar, Optional T = TypeVar('T') class A: ... class B(A): ... class SubDescr: def __get__(self, obj: T, typ: Type[T]) -> A: ... def __set__(self, obj: T, value: A) -> None: ... class C: spec = SubDescr() def meth_spec(self) -> None: self.spec = B() reveal_type(self.spec) # N: Revealed type is '__main__.B' [builtins fixtures/bool.pyi] [case testClassLevelImport] # flags: --ignore-missing-imports class Test: import a def __init__(self) -> None: some_module = self.a [out] [case testIsInstanceTypeVsMetaclass] from typing import Type class Meta(type): pass class Thing(metaclass=Meta): pass def foo(x: Type[Thing]) -> Type[Thing]: assert isinstance(x, Meta) return x [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTypeVsUnionOfType] from typing import Type, Union class AA: pass class AB: pass class M: pass class A(M, AA): pass class B(M, AB): pass AOrB = Union[A, B] class T(object): def __init__(self, typ: Type[AOrB] = A) -> None: assert isinstance(typ, type(M)) self.typ: Type[AOrB] = typ [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTypeIsSubclass] from typing import Union, Type class C: ... x: Union[C, Type[C]] if isinstance(x, type) and issubclass(x, C): reveal_type(x) # N: Revealed type is 'Type[__main__.C]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTypeByAssert] class A: x = 42 i: type = A assert issubclass(i, A) reveal_type(i.x) # N: Revealed type is 'builtins.int' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTypeTypeVar] from typing import Type, TypeVar, Generic class Base: ... class Sub(Base): ... T = TypeVar('T', bound=Base) class C(Generic[T]): def meth(self, cls: Type[T]) -> None: if not issubclass(cls, Sub): return reveal_type(cls) # N: Revealed type is 'Type[__main__.Sub]' def other(self, cls: Type[T]) -> None: if not issubclass(cls, Sub): return reveal_type(cls) # N: Revealed type is 'Type[__main__.Sub]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTypeSubclass] # flags: --strict-optional from typing import Type, Optional class Base: ... class One(Base): ... class Other(Base): ... def test() -> None: x: Optional[Type[Base]] if int(): x = One elif int(): x = Other else: return reveal_type(x) # N: Revealed type is 'Union[Type[__main__.One], Type[__main__.Other]]' [builtins fixtures/isinstancelist.pyi] [case testMemberRedefinition] class C: def __init__(self) -> None: self.foo = 12 self.foo: int = 12 # E: Attribute 'foo' already defined on line 3 [case testMemberRedefinitionDefinedInClass] class C: foo = 12 def __init__(self) -> None: self.foo: int = 12 # E: Attribute 'foo' already defined on line 2 [case testAbstractInit] from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def __init__(self, a: int) -> None: pass class B(A): pass class C(B): def __init__(self, a: int) -> None: self.c = a a = A(1) # E: Cannot instantiate abstract class 'A' with abstract attribute '__init__' A.c # E: "Type[A]" has no attribute "c" b = B(2) # E: Cannot instantiate abstract class 'B' with abstract attribute '__init__' B.c # E: "Type[B]" has no attribute "c" c = C(3) c.c C.c [case testDecoratedConstructors] from typing import TypeVar, Callable, Any F = TypeVar('F', bound=Callable[..., Any]) def dec(f: F) -> F: ... class A: @dec def __init__(self, x: int) -> None: ... class B: @dec def __new__(cls, x: int) -> B: ... reveal_type(A) # N: Revealed type is 'def (x: builtins.int) -> __main__.A' reveal_type(B) # N: Revealed type is 'def (x: builtins.int) -> __main__.B' [case testDecoratedConstructorsBad] from typing import Callable, Any def dec(f: Callable[[Any, int], Any]) -> int: ... class A: @dec # E: Unsupported decorated constructor type def __init__(self, x: int) -> None: ... class B: @dec # E: Unsupported decorated constructor type def __new__(cls, x: int) -> B: ... [case testIgnorePrivateAttributesTypeCheck] class B: __foo_: int class C(B): __foo_: str [out] [case testIgnorePrivateMethodsTypeCheck] class B: def __foo_(self) -> int: ... class C(B): def __foo_(self) -> str: ... [out] [case testCheckForPrivateMethodsWhenPublicCheck] class B: __foo__: int class C(B): __foo__: str [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testIgnorePrivateMethodsTypeCheck] class A: def __foo_(self) -> int: ... class B: def __foo_(self) -> str: ... class C(A, B): pass [out] [case testAttributeDefOrder1] import a [file a.py] from b import C class D(C): def g(self) -> None: self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") def f(self) -> None: reveal_type(self.x) # N: Revealed type is 'builtins.int' [file b.py] import a class C: def __init__(self) -> None: self.x = 0 [targets b, a, b.C.__init__, a.D.g, a.D.f, __main__] [case testAttributeDefOrder2] class D(C): def g(self) -> None: self.x = '' def f(self) -> None: # https://github.com/python/mypy/issues/7162 reveal_type(self.x) # N: Revealed type is 'builtins.str' class C: def __init__(self) -> None: self.x = 0 class E(C): def g(self) -> None: self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") def f(self) -> None: reveal_type(self.x) # N: Revealed type is 'builtins.int' [targets __main__, __main__, __main__.D.g, __main__.D.f, __main__.C.__init__, __main__.E.g, __main__.E.f] [case testNewReturnType1] class A: def __new__(cls) -> B: pass class B(A): pass reveal_type(A()) # N: Revealed type is '__main__.B' reveal_type(B()) # N: Revealed type is '__main__.B' [case testNewReturnType2] from typing import Any # make sure that __new__ method that return Any are ignored when # determining the return type class A: def __new__(cls): pass class B: def __new__(cls) -> Any: pass reveal_type(A()) # N: Revealed type is '__main__.A' reveal_type(B()) # N: Revealed type is '__main__.B' [case testNewReturnType3] # Check for invalid __new__ typing class A: def __new__(cls) -> int: # E: Incompatible return type for "__new__" (returns "int", but must return a subtype of "A") pass reveal_type(A()) # N: Revealed type is '__main__.A' [case testNewReturnType4] from typing import TypeVar, Type # Check for __new__ using type vars TX = TypeVar('TX', bound='X') class X: def __new__(lol: Type[TX], x: int) -> TX: pass class Y(X): pass reveal_type(X(20)) # N: Revealed type is '__main__.X*' reveal_type(Y(20)) # N: Revealed type is '__main__.Y*' [case testNewReturnType5] from typing import Any, TypeVar, Generic, overload T = TypeVar('T') class O(Generic[T]): @overload def __new__(cls) -> O[int]: pass @overload def __new__(cls, x: int) -> O[str]: pass def __new__(cls, x: int = 0) -> O[Any]: pass reveal_type(O()) # N: Revealed type is '__main__.O[builtins.int]' reveal_type(O(10)) # N: Revealed type is '__main__.O[builtins.str]' [case testNewReturnType6] from typing import Tuple, Optional # Check for some cases that aren't allowed class X: def __new__(cls) -> Optional[Y]: # E: "__new__" must return a class instance (got "Optional[Y]") pass class Y: def __new__(cls) -> Optional[int]: # E: "__new__" must return a class instance (got "Optional[int]") pass [case testNewReturnType7] from typing import NamedTuple # ... test __new__ returning tuple type class A: def __new__(cls) -> 'B': pass N = NamedTuple('N', [('x', int)]) class B(A, N): pass reveal_type(A()) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.B]' [case testNewReturnType8] from typing import TypeVar, Any # test type var from a different argument TX = TypeVar('TX', bound='X') class X: def __new__(cls, x: TX) -> TX: # E: "__new__" must return a class instance (got "TX") pass [case testNewReturnType9] class A: def __new__(cls) -> A: pass class B(A): pass reveal_type(B()) # N: Revealed type is '__main__.B' [case testGenericOverride] from typing import Generic, TypeVar, Any T = TypeVar('T') class B(Generic[T]): x: T class C(B): def __init__(self) -> None: self.x: Any [case testGenericOverridePreciseInvalid] from typing import Generic, TypeVar, Any T = TypeVar('T') class B(Generic[T]): x: T class C(B[str]): def __init__(self) -> None: self.x: int # E: Incompatible types in assignment (expression has type "int", base class "B" defined the type as "str") [case testGenericOverridePreciseValid] from typing import Generic, TypeVar T = TypeVar('T') class B(Generic[T]): x: T class C(B[float]): def __init__(self) -> None: self.x: int # We currently allow covariant overriding. [case testGenericOverrideGeneric] from typing import Generic, TypeVar, List T = TypeVar('T') class B(Generic[T]): x: T class C(B[T]): def __init__(self) -> None: self.x: List[T] # E: Incompatible types in assignment (expression has type "List[T]", base class "B" defined the type as "T") [builtins fixtures/list.pyi] [case testGenericOverrideGenericChained] from typing import Generic, TypeVar, Tuple T = TypeVar('T') S = TypeVar('S') class A(Generic[T]): x: T class B(A[Tuple[T, S]]): ... class C(B[int, T]): def __init__(self) -> None: # TODO: error message could be better. self.x: Tuple[str, T] # E: Incompatible types in assignment (expression has type "Tuple[str, T]", base class "A" defined the type as "Tuple[int, T]") [case testInitSubclassWrongType] class Base: default_name: str def __init_subclass__(cls, default_name: str): super().__init_subclass__() cls.default_name = default_name return class Child(Base, default_name=5): # E: Argument "default_name" to "__init_subclass__" of "Base" has incompatible type "int"; expected "str" pass [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassTooFewArgs] class Base: default_name: str def __init_subclass__(cls, default_name: str, **kwargs): super().__init_subclass__() cls.default_name = default_name return class Child(Base): # E: Too few arguments for "__init_subclass__" of "Base" pass [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassTooFewArgs2] class Base: default_name: str def __init_subclass__(cls, default_name: str, thing: int): super().__init_subclass__() cls.default_name = default_name return # TODO implement this, so that no error is raised? d = {"default_name": "abc", "thing": 0} class Child(Base, **d): # E: Too few arguments for "__init_subclass__" of "Base" pass [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassOK] class Base: default_name: str thing: int def __init_subclass__(cls, default_name: str, thing:int, **kwargs): super().__init_subclass__() cls.default_name = default_name return class Child(Base, thing=5, default_name=""): pass [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassWithMetaclassOK] class Base: thing: int def __init_subclass__(cls, thing: int): cls.thing = thing class Child(Base, metaclass=type, thing=0): pass [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassWithCustomMetaclassOK] class M(type): ... class Child(metaclass=M, thing=0): pass [builtins fixtures/object_with_init_subclass.pyi] [case testTooManyArgsForObject] class A(thing=5): pass [out] main:1: error: Unexpected keyword argument "thing" for "__init_subclass__" of "object" tmp/builtins.pyi:5: note: "__init_subclass__" of "object" defined here [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassWithImports] from init_subclass.a import Base class Child(Base, thing=5): # E: Missing positional argument "default_name" in call to "__init_subclass__" of "Base" pass [file init_subclass/a.py] class Base: default_name: str thing: int def __init_subclass__(cls, default_name: str, thing:int, **kwargs): pass [file init_subclass/__init__.py] [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassWithImportsOK] from init_subclass.a import MidBase class Main(MidBase, test=True): pass [file init_subclass/a.py] class Base: def __init_subclass__(cls, **kwargs) -> None: pass class MidBase(Base): pass [file init_subclass/__init__.py] [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassUnannotated] class A: def __init_subclass__(cls, *args, **kwargs): super().__init_subclass__(*args, **kwargs) class B(A): pass reveal_type(A.__init_subclass__) # N: Revealed type is 'def (*args: Any, **kwargs: Any) -> Any' [builtins fixtures/object_with_init_subclass.pyi] [case testInitSubclassUnannotatedMulti] from typing import ClassVar, List, Type class A: registered_classes: ClassVar[List[Type[A]]] = [] def __init_subclass__(cls, *args, register=True, **kwargs): if register: cls.registered_classes.append(cls) super().__init_subclass__(*args, **kwargs) class B(A): ... class C(A, register=False): ... class D(C): ... [builtins fixtures/object_with_init_subclass.pyi] [case testClassMethodUnannotated] class C: def __new__(cls): ... @classmethod def meth(cls): ... reveal_type(C.meth) # N: Revealed type is 'def () -> Any' reveal_type(C.__new__) # N: Revealed type is 'def (cls: Type[__main__.C]) -> Any' [builtins fixtures/classmethod.pyi] [case testOverrideGenericSelfClassMethod] from typing import Generic, TypeVar, Type, List T = TypeVar('T', bound=A) class A: @classmethod def meth(cls: Type[T]) -> List[T]: ... class B(A): @classmethod def meth(cls: Type[T]) -> List[T]: ... [builtins fixtures/isinstancelist.pyi] [case testCheckUntypedDefsSelf1] # flags: --check-untyped-defs from typing import Generic, TypeVar T = TypeVar('T') class Desc: def __get__(self, x, y): # type: (...) -> bool pass class Foo: y = Desc() def __init__(self): self.x = 0 def foo(self): reveal_type(self.x) # N: Revealed type is 'builtins.int' reveal_type(self.y) # N: Revealed type is 'builtins.bool' self.bar() self.baz() # E: "Foo" has no attribute "baz" @classmethod def bar(cls): cls.baz() # E: "Type[Foo]" has no attribute "baz" class C(Generic[T]): x: T def meth(self): self.x + 1 # E: Unsupported left operand type for + ("T") [builtins fixtures/classmethod.pyi] [case testCheckUntypedDefsSelf2] # flags: --check-untyped-defs class Foo: def __init__(self): self.x = None self.y = [] reveal_type(Foo().x) # N: Revealed type is 'Union[Any, None]' reveal_type(Foo().y) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testCheckUntypedDefsSelf3] # flags: --check-untyped-defs class Foo: def bad(): # E: Method must have at least one argument self.x = 0 # E: Name 'self' is not defined [case testTypeAfterAttributeAccessWithDisallowAnyExpr] # flags: --disallow-any-expr def access_before_declaration(self) -> None: obj = Foo('bar') obj.value x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' x = x + 1 class Foo: def __init__(self, value: str) -> None: self.value = value def access_after_declaration(self) -> None: obj = Foo('bar') obj.value x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' x = x + 1 [case testIsSubClassNarrowDownTypesOfTypeVariables] from typing import Type, TypeVar, Generic class Base: field: int = 42 TypeT = TypeVar("TypeT", bound=type) TypeT1 = TypeVar("TypeT1", bound=Type[Base]) class C1: def method(self, other: type) -> int: if issubclass(other, Base): reveal_type(other) # N: Revealed type is 'Type[__main__.Base]' return other.field return 0 class C2(Generic[TypeT]): def method(self, other: TypeT) -> int: if issubclass(other, Base): reveal_type(other) # N: Revealed type is 'Type[__main__.Base]' return other.field return 0 class C3(Generic[TypeT1]): def method(self, other: TypeT1) -> int: if issubclass(other, Base): reveal_type(other) # N: Revealed type is 'TypeT1`1' return other.field return 0 [builtins fixtures/isinstancelist.pyi] [case testPropertyWithExtraMethod] def dec(f): return f class A: @property def x(self): ... @x.setter def x(self, value) -> None: ... def x(self) -> None: ... # E: Unexpected definition for property "x" @property def y(self) -> int: ... @y.setter def y(self, value: int) -> None: ... @dec def y(self) -> None: ... # TODO: This should generate an error reveal_type(A().y) # N: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [case testEnclosingScopeLambdaNoCrash] class C: x = lambda x: x.y.g() [case testEnclosingScopeLambdaNoCrashExplicit] from typing import Callable class C: x: Callable[[C], int] = lambda x: x.y.g() # E: "C" has no attribute "y" mypy-0.761/test-data/unit/check-classvar.test0000644€tŠÔÚ€2›s®0000001746713576752246025376 0ustar jukkaDROPBOX\Domain Users00000000000000[case testAssignmentOnClass] from typing import ClassVar class A: x = 1 # type: ClassVar[int] A.x = 2 [case testAssignmentOnInstance] from typing import ClassVar class A: x = 1 # type: ClassVar[int] A().x = 2 [out] main:4: error: Cannot assign to class variable "x" via instance [case testAssignmentOnSubclassInstance] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B(A): pass B().x = 2 [out] main:6: error: Cannot assign to class variable "x" via instance [case testOverrideOnSelf] from typing import ClassVar class A: x = None # type: ClassVar[int] def __init__(self) -> None: self.x = 0 [out] main:5: error: Cannot assign to class variable "x" via instance [case testOverrideOnSelfInSubclass] from typing import ClassVar class A: x = None # type: ClassVar[int] class B(A): def __init__(self) -> None: self.x = 0 [out] main:6: error: Cannot assign to class variable "x" via instance [case testReadingFromInstance] from typing import ClassVar class A: x = 1 # type: ClassVar[int] A().x reveal_type(A().x) [out] main:5: note: Revealed type is 'builtins.int' [case testReadingFromSelf] from typing import ClassVar class A: x = 1 # type: ClassVar[int] def __init__(self) -> None: reveal_type(self.x) [out] main:5: note: Revealed type is 'builtins.int' [case testTypecheckSimple] from typing import ClassVar class A: x = 1 # type: ClassVar[int] y = A.x # type: int [case testTypecheckWithUserType] from typing import ClassVar class A: pass class B: x = A() # type: ClassVar[A] [case testTypeCheckOnAssignment] from typing import ClassVar class A: pass class B: pass class C: x = None # type: ClassVar[A] C.x = B() [out] main:8: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeCheckWithOverridden] from typing import ClassVar class A: pass class B(A): pass class C: x = A() # type: ClassVar[A] C.x = B() [case testRevealType] from typing import ClassVar class A: x = None # type: ClassVar[int] reveal_type(A.x) [out] main:4: note: Revealed type is 'builtins.int' [case testInfer] from typing import ClassVar class A: x = 1 # type: ClassVar[int] y = A.x reveal_type(y) [out] main:5: note: Revealed type is 'builtins.int' [case testAssignmentOnUnion] from typing import ClassVar, Union class A: x = None # type: int class B: x = None # type: ClassVar[int] c = A() # type: Union[A, B] c.x = 1 [out] main:7: error: Cannot assign to class variable "x" via instance [case testAssignmentOnInstanceFromType] from typing import ClassVar, Type class A: x = None # type: ClassVar[int] def f(a: Type[A]) -> None: a().x = 0 [out] main:5: error: Cannot assign to class variable "x" via instance [case testAssignmentOnInstanceFromSubclassType] from typing import ClassVar, Type class A: x = None # type: ClassVar[int] class B(A): pass def f(b: Type[B]) -> None: b().x = 0 [out] main:7: error: Cannot assign to class variable "x" via instance [case testClassVarWithList] from typing import ClassVar, List class A: x = None # type: ClassVar[List[int]] A.x = ['a'] A().x.append(1) A().x.append('') [builtins fixtures/list.pyi] [out] main:4: error: List item 0 has incompatible type "str"; expected "int" main:6: error: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [case testClassVarWithUnion] from typing import ClassVar, Union class A: x = None # type: ClassVar[Union[int, str]] class B: pass A.x = 0 A.x = 'a' A.x = B() reveal_type(A().x) [out] main:8: error: Incompatible types in assignment (expression has type "B", variable has type "Union[int, str]") main:9: note: Revealed type is 'Union[builtins.int, builtins.str]' [case testOverrideWithNarrowedUnion] from typing import ClassVar, Union class A: pass class B: pass class C: pass class D: x = None # type: ClassVar[Union[A, B, C]] class E(D): x = None # type: ClassVar[Union[A, B]] [case testOverrideWithExtendedUnion] from typing import ClassVar, Union class A: pass class B: pass class C: pass class D: x = None # type: ClassVar[Union[A, B]] class E(D): x = None # type: ClassVar[Union[A, B, C]] [out] main:8: error: Incompatible types in assignment (expression has type "Union[A, B, C]", base class "D" defined the type as "Union[A, B]") [case testAssignmentToCallableRet] from typing import ClassVar class A: x = None # type: ClassVar[int] def f() -> A: return A() f().x = 0 [out] main:6: error: Cannot assign to class variable "x" via instance [case testOverrideWithIncomatibleType] from typing import ClassVar class A: x = None # type: ClassVar[int] class B(A): x = None # type: ClassVar[str] [out] main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testOverrideWithNormalAttribute] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B(A): x = 2 # type: int [out] main:5: error: Cannot override class variable (previously declared on base class "A") with instance variable [case testOverrideWithAttributeWithClassVar] from typing import ClassVar class A: x = 1 # type: int class B(A): x = 2 # type: ClassVar[int] [out] main:5: error: Cannot override instance variable (previously declared on base class "A") with class variable [case testOverrideClassVarManyBases] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B: x = 2 # type: int class C(A, B): x = 3 # type: ClassVar[int] [out] main:7: error: Cannot override instance variable (previously declared on base class "B") with class variable [case testOverrideClassVarWithClassVar] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B(A): x = 2 # type: ClassVar[int] [case testOverrideClassVarWithImplicitClassVar] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B(A): x = 2 [case testOverrideClassVarWithImplicitThenExplicit] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B(A): x = 2 class C(B): x = 3 # type: ClassVar[int] [case testOverrideOnABCSubclass] from abc import ABCMeta from typing import ClassVar class A(metaclass=ABCMeta): x = None # type: ClassVar[int] class B(A): x = 0 # type: ClassVar[int] [case testAcrossModules] import m reveal_type(m.A().x) m.A().x = 0 [file m.py] from typing import ClassVar class A: x = None # type: ClassVar[int] [out] main:2: note: Revealed type is 'builtins.int' main:3: error: Cannot assign to class variable "x" via instance [case testClassVarWithGeneric] from typing import ClassVar, Generic, TypeVar T = TypeVar('T') class A(Generic[T]): x: ClassVar[T] @classmethod def foo(cls) -> T: return cls.x # OK A.x # E: Access to generic class variables is ambiguous A.x = 1 # E: Access to generic class variables is ambiguous A[int].x # E: Access to generic class variables is ambiguous class Bad(A[int]): pass Bad.x # E: Access to generic class variables is ambiguous class Good(A[int]): x = 42 reveal_type(Good.x) # N: Revealed type is 'builtins.int' [builtins fixtures/classmethod.pyi] [case testClassVarWithNestedGeneric] from typing import ClassVar, Generic, Tuple, TypeVar, Union, Type T = TypeVar('T') U = TypeVar('U') class A(Generic[T, U]): x: ClassVar[Union[T, Tuple[U, Type[U]]]] @classmethod def foo(cls) -> Union[T, Tuple[U, Type[U]]]: return cls.x # OK A.x # E: Access to generic class variables is ambiguous A.x = 1 # E: Access to generic class variables is ambiguous A[int, str].x # E: Access to generic class variables is ambiguous class Bad(A[int, str]): pass Bad.x # E: Access to generic class variables is ambiguous class Good(A[int, str]): x = 42 reveal_type(Good.x) # N: Revealed type is 'builtins.int' [builtins fixtures/classmethod.pyi] mypy-0.761/test-data/unit/check-columns.test0000644€tŠÔÚ€2›s®0000003204313576752246025223 0ustar jukkaDROPBOX\Domain Users00000000000000# Test column numbers in messages. --show-column-numbers is enabled implicitly by test runner. [case testColumnsSyntaxError] f() 1 + [out] main:2:5: error: invalid syntax [case testColumnsNestedFunctions] import typing def f() -> 'A': def g() -> 'B': return A() # E:16: Incompatible return value type (got "A", expected "B") return B() # E:12: Incompatible return value type (got "B", expected "A") class A: pass class B: pass [case testColumnsMethodDefaultArgumentsAndSignatureAsComment] import typing class A: def f(self, x = 1, y = 'hello'): # type: (int, str) -> str pass A().f() A().f(1) A().f('') # E:7: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" A().f(1, 1) # E:10: Argument 2 to "f" of "A" has incompatible type "int"; expected "str" (A().f(1, 'hello', 'hi')) # E:2: Too many arguments for "f" of "A" [case testColumnsInvalidArgumentType] # flags: --strict-optional def f(x: int, y: str) -> None: ... def g(*x: int) -> None: pass def h(**x: int) -> None: pass def ff(x: int) -> None: pass class A: x: str def __neg__(self) -> str: pass def __add__(self, other: int) -> str: pass def __lt__(self, other: int) -> str: pass f( y=0, x=0) # E:4: Argument "y" to "f" has incompatible type "int"; expected "str" f(x=0, y=None) # E:6: Argument "y" to "f" has incompatible type "None"; expected "str" g(1, '', 2) # E:6: Argument 2 to "g" has incompatible type "str"; expected "int" aaa: str h(x=1, y=aaa, z=2) # E:10: Argument "y" to "h" has incompatible type "str"; expected "int" a: A ff(a.x) # E:4: Argument 1 to "ff" has incompatible type "str"; expected "int" ff([1]) # E:4: Argument 1 to "ff" has incompatible type "List[int]"; expected "int" # TODO: Different column in Python 3.8+ #ff([1 for x in [1]]) # Argument 1 to "ff" has incompatible type "List[int]"; expected "int" ff({1: 2}) # E:4: Argument 1 to "ff" has incompatible type "Dict[int, int]"; expected "int" ff(1.1) # E:4: Argument 1 to "ff" has incompatible type "float"; expected "int" # TODO: Different column in Python 3.8+ #ff( ( 1, 1)) # Argument 1 to "ff" has incompatible type "Tuple[int, int]"; expected "int" ff(-a) # E:4: Argument 1 to "ff" has incompatible type "str"; expected "int" ff(a + 1) # E:4: Argument 1 to "ff" has incompatible type "str"; expected "int" ff(a < 1) # E:4: Argument 1 to "ff" has incompatible type "str"; expected "int" ff([''][0]) # E:4: Argument 1 to "ff" has incompatible type "str"; expected "int" class B(A): def f(self) -> None: ff(super().__neg__()) # E:12: Argument 1 to "ff" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testColumnsInvalidArgumentTypeVarArgs] def f(*x: int) -> None: pass def g(**x: int) -> None: pass a = [''] f(*a) # E:4: Argument 1 to "f" has incompatible type "*List[str]"; expected "int" b = {'x': 'y'} g(**b) # E:5: Argument 1 to "g" has incompatible type "**Dict[str, str]"; expected "int" [builtins fixtures/dict.pyi] [case testColumnsMultipleStatementsPerLine] x = 15 y = 'hello' if int(): x = 2; y = x; y += 1 [out] main:4:16: error: Incompatible types in assignment (expression has type "int", variable has type "str") main:4:24: error: Unsupported operand types for + ("str" and "int") [case testColumnsAssignment] class A: x = 0 A().x = '' # E:9: Incompatible types in assignment (expression has type "str", variable has type "int") a = [0] a[0] = '' # E:8: Incompatible types in assignment (expression has type "str", target has type "int") b = 0 c = 0 b, c = 0, '' # E:11: Incompatible types in assignment (expression has type "str", variable has type "int") b, c = '', 0 # E:8: Incompatible types in assignment (expression has type "str", variable has type "int") t = 0, '' b, c = t # E:8: Incompatible types in assignment (expression has type "str", variable has type "int") class B(A): x = '' # E:9: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [builtins fixtures/list.pyi] [case testColumnsAttributeIncompatibleWithBaseClassUsingAnnotation] class A: x: str class B(A): x: int # E:5: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "str") [case testColumnsSimpleIsinstance] import typing def f(x: object, n: int, s: str) -> None: if int(): n = x # E:13: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E:17: Incompatible types in assignment (expression has type "int", variable has type "str") n = x # E:13: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [case testColumnHasNoAttribute] import m if int(): from m import foobaz # E:5: Module 'm' has no attribute 'foobaz'; maybe "foobar"? (1).x # E:2: "int" has no attribute "x" (m.foobaz()) # E:2: Module has no attribute "foobaz"; maybe "foobar"? [file m.py] def foobar(): pass [builtins fixtures/module.pyi] [case testColumnUnexpectedOrMissingKeywordArg] def f(): pass # TODO: Point to "x" instead (f(x=1)) # E:2: Unexpected keyword argument "x" for "f" def g(*, x: int) -> None: pass (g()) # E:2: Missing named argument "x" for "g" [case testColumnDefinedHere] class A: pass if int(): def f(a: 'A') -> None: pass # N:5: "f" defined here (f(b=object())) # E:6: Unexpected keyword argument "b" for "f" [case testColumnInvalidType] from typing import Iterable bad = 0 def f(x: bad): # E:10: Variable "__main__.bad" is not valid as a type y: bad # E:8: Variable "__main__.bad" is not valid as a type if int(): def g(x): # E:5: Variable "__main__.bad" is not valid as a type # type: (bad) -> None y = 0 # type: bad # E:9: Variable "__main__.bad" is not valid as a type z: Iterable[bad] # E:13: Variable "__main__.bad" is not valid as a type h: bad[int] # E:4: Variable "__main__.bad" is not valid as a type [case testColumnInvalidType_python2] from typing import Iterable bad = 0 if int(): def g(x): # E:5: Variable "__main__.bad" is not valid as a type # type: (bad) -> None y = 0 # type: bad # E:9: Variable "__main__.bad" is not valid as a type z = () # type: Iterable[bad] # E:5: Variable "__main__.bad" is not valid as a type [case testColumnFunctionMissingTypeAnnotation] # flags: --disallow-untyped-defs if int(): def f(x: int): # E:5: Function is missing a return type annotation pass def g(x): # E:5: Function is missing a type annotation pass [case testColumnNameIsNotDefined] ((x)) # E:3: Name 'x' is not defined [case testColumnNeedTypeAnnotation] if 1: x = [] # E:5: Need type annotation for 'x' (hint: "x: List[] = ...") [builtins fixtures/list.pyi] [case testColumnCallToUntypedFunction] # flags: --disallow-untyped-calls def f() -> None: (g(1)) # E:6: Call to untyped function "g" in typed context def g(x): pass [case testColumnInvalidArguments] def f(x, y): pass (f()) # E:2: Too few arguments for "f" (f(y=1)) # E:2: Missing positional argument "x" in call to "f" [case testColumnTooFewSuperArgs_python2] class A: def f(self): pass class B(A): def f(self): # type: () -> None super().f() # E:9: Too few arguments for "super" [case testColumnListOrDictItemHasIncompatibleType] from typing import List, Dict x: List[int] = [ 'x', # E:5: List item 0 has incompatible type "str"; expected "int" 1.1] # E:7: List item 1 has incompatible type "float"; expected "int" y: Dict[int, int] = { 'x': 1 # E:5: Dict entry 0 has incompatible type "str": "int"; expected "int": "int" } [builtins fixtures/dict.pyi] [case testColumnCannotDetermineType] (x) # E:2: Cannot determine type of 'x' x = None [case testColumnInvalidIndexing] from typing import List ([1]['']) # E:6: Invalid index type "str" for "List[int]"; expected type "int" (1[1]) # E:2: Value of type "int" is not indexable def f() -> None: 1[1] = 1 # E:5: Unsupported target for indexed assignment [builtins fixtures/list.pyi] [case testColumnTypedDict] from typing import TypedDict class D(TypedDict): x: int t: D = {'x': 'y'} # E:5: Incompatible types (expression has type "str", TypedDict item "x" has type "int") if int(): del t['y'] # E:5: TypedDict "D" has no key 'y' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testColumnSignatureIncompatibleWithSuperType] class A: def f(self, x: int) -> None: pass class B(A): def f(self, x: str) -> None: pass # E:5: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" class C(A): def f(self, x: int) -> int: pass # E:5: Return type "int" of "f" incompatible with return type "None" in supertype "A" class D(A): def f(self) -> None: pass # E:5: Signature of "f" incompatible with supertype "A" [case testColumnMissingTypeParameters] # flags: --disallow-any-generics from typing import List, Callable def f(x: List) -> None: pass # E:10: Missing type parameters for generic type "List" def g(x: list) -> None: pass # E:10: Implicit generic "Any". Use "typing.List" and specify generic parameters if int(): c: Callable # E:8: Missing type parameters for generic type "Callable" [builtins fixtures/list.pyi] [case testColumnIncompatibleDefault] if int(): def f(x: int = '') -> None: # E:20: Incompatible default for argument "x" (default has type "str", argument has type "int") pass [case testColumnMissingProtocolMember] from typing import Protocol class P(Protocol): x: int y: int class C: x: int p: P if int(): p = C() # E:9: Incompatible types in assignment (expression has type "C", variable has type "P") \ # N:9: 'C' is missing following 'P' protocol member: \ # N:9: y [case testColumnRedundantCast] # flags: --warn-redundant-casts from typing import cast y = 1 x = cast(int, y) # E:5: Redundant cast to "int" [case testColumnTypeSignatureHasTooFewArguments] if int(): def f(x, y): # E:5: Type signature has too few arguments # type: (int) -> None pass [case testColumnTypeSignatureHasTooFewArguments_python2] if int(): def f(x, y): # E:5: Type signature has too few arguments # type: (int) -> None pass [case testColumnRevealedType] if int(): reveal_type(1) # N:17: Revealed type is 'Literal[1]?' [case testColumnNonOverlappingEqualityCheck] # flags: --strict-equality if 1 == '': # E:4: Non-overlapping equality check (left operand type: "Literal[1]", right operand type: "Literal['']") pass [builtins fixtures/bool.pyi] [case testColumnValueOfTypeVariableCannotBe] from typing import TypeVar, Generic T = TypeVar('T', int, str) class C(Generic[T]): pass def f(c: C[object]) -> None: pass # E:10: Value of type variable "T" of "C" cannot be "object" (C[object]()) # E:2: Value of type variable "T" of "C" cannot be "object" [case testColumnSyntaxErrorInTypeAnnotation] if int(): def f(x # type: int, ): pass [out] main:2:11: error: Syntax error in type annotation main:2:11: note: Suggestion: Is there a spurious trailing comma? [case testColumnSyntaxErrorInTypeAnnotation2] if int(): # TODO: It would be better to point to the type comment xyz = 0 # type: blurbnard blarb [out] main:3:5: error: syntax error in type comment 'blurbnard blarb' [case testColumnProperty] class A: @property def x(self) -> int: pass @x.setter def x(self, x: int) -> None: pass class B(A): @property # E:6: Read-only property cannot override read-write property def x(self) -> int: pass [builtins fixtures/property.pyi] [case testColumnProperty_python2] class A: @property def x(self): # type: () -> int pass @x.setter def x(self, x): # type: (int) -> None pass class B(A): @property # E:5: Read-only property cannot override read-write property def x(self): # type: () -> int pass [builtins_py2 fixtures/property_py2.pyi] [case testColumnOverloaded] from typing import overload, Any class A: @overload # E:6: An overloaded function outside a stub file must have an implementation def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass [case testColumnFunctionWithTypeVarValues] from typing import TypeVar, List T = TypeVar('T', int, str) def g(x): pass def f(x: T) -> T: (x.bad) # E:6: "int" has no attribute "bad" \ # E:6: "str" has no attribute "bad" g(y=x) # E:5: Unexpected keyword argument "y" for "g" y: List[int, str] # E:8: "list" expects 1 type argument, but 2 given del 1[0] # E:5: "int" has no attribute "__delitem__" bb: List[int] = [''] # E:22: List item 0 has incompatible type "str"; expected "int" # XXX: Disabled because the column differs in 3.8 # aa: List[int] = ['' for x in [1]] # :22: List comprehension has incompatible type List[str]; expected List[int] cc = (1).bad # E:11: "int" has no attribute "bad" n: int = '' # E:14: Incompatible types in assignment (expression has type "str", variable has type "int") return x [builtins fixtures/list.pyi] [case testColumnReturnValueExpected] def f() -> int: return # E:5: Return value expected mypy-0.761/test-data/unit/check-ctypes.test0000644€tŠÔÚ€2›s®0000001717713576752246025065 0ustar jukkaDROPBOX\Domain Users00000000000000[case testCtypesArrayStandardElementType] import ctypes class MyCInt(ctypes.c_int): pass intarr4 = ctypes.c_int * 4 a = intarr4(1, ctypes.c_int(2), MyCInt(3), 4) intarr4(1, 2, 3, "invalid") # E: Array constructor argument 4 of type "str" is not convertible to the array element type "c_int" reveal_type(a) # N: Revealed type is 'ctypes.Array[ctypes.c_int*]' reveal_type(a[0]) # N: Revealed type is 'builtins.int' reveal_type(a[1:3]) # N: Revealed type is 'builtins.list[builtins.int]' a[0] = 42 a[1] = ctypes.c_int(42) a[2] = MyCInt(42) a[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches argument types "int", "bytes" \ # N: Possible overload variants: \ # N: def __setitem__(self, int, Union[c_int, int]) -> None \ # N: def __setitem__(self, slice, List[Union[c_int, int]]) -> None for x in a: reveal_type(x) # N: Revealed type is 'builtins.int*' [builtins fixtures/floatdict.pyi] [case testCtypesArrayCustomElementType] import ctypes from typing import Union, List class MyCInt(ctypes.c_int): pass myintarr4 = MyCInt * 4 mya = myintarr4(1, 2, MyCInt(3), 4) myintarr4(1, ctypes.c_int(2), MyCInt(3), "invalid") # E: Array constructor argument 2 of type "c_int" is not convertible to the array element type "MyCInt" \ # E: Array constructor argument 4 of type "str" is not convertible to the array element type "MyCInt" reveal_type(mya) # N: Revealed type is 'ctypes.Array[__main__.MyCInt*]' reveal_type(mya[0]) # N: Revealed type is '__main__.MyCInt*' reveal_type(mya[1:3]) # N: Revealed type is 'builtins.list[__main__.MyCInt*]' mya[0] = 42 mya[1] = ctypes.c_int(42) # E: No overload variant of "__setitem__" of "Array" matches argument types "int", "c_int" \ # N: Possible overload variants: \ # N: def __setitem__(self, int, Union[MyCInt, int]) -> None \ # N: def __setitem__(self, slice, List[Union[MyCInt, int]]) -> None mya[2] = MyCInt(42) mya[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches argument types "int", "bytes" \ # N: Possible overload variants: \ # N: def __setitem__(self, int, Union[MyCInt, int]) -> None \ # N: def __setitem__(self, slice, List[Union[MyCInt, int]]) -> None for myx in mya: reveal_type(myx) # N: Revealed type is '__main__.MyCInt*' myu: Union[ctypes.Array[ctypes.c_int], List[str]] for myi in myu: reveal_type(myi) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' [builtins fixtures/floatdict.pyi] [case testCtypesArrayUnionElementType] import ctypes from typing import Union class MyCInt(ctypes.c_int): pass mya: ctypes.Array[Union[MyCInt, ctypes.c_uint]] reveal_type(mya) # N: Revealed type is 'ctypes.Array[Union[__main__.MyCInt, ctypes.c_uint]]' reveal_type(mya[0]) # N: Revealed type is 'Union[__main__.MyCInt, builtins.int]' reveal_type(mya[1:3]) # N: Revealed type is 'builtins.list[Union[__main__.MyCInt, builtins.int]]' # The behavior here is not strictly correct, but intentional. # See the comment in mypy.plugins.ctypes._autoconvertible_to_cdata for details. mya[0] = 42 mya[1] = ctypes.c_uint(42) mya[2] = MyCInt(42) mya[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches argument types "int", "bytes" \ # N: Possible overload variants: \ # N: def __setitem__(self, int, Union[MyCInt, int, c_uint]) -> None \ # N: def __setitem__(self, slice, List[Union[MyCInt, int, c_uint]]) -> None for myx in mya: reveal_type(myx) # N: Revealed type is 'Union[__main__.MyCInt, builtins.int]' [builtins fixtures/floatdict.pyi] [case testCtypesCharArrayAttrs] import ctypes ca = (ctypes.c_char * 4)(b'a', b'b', b'c', b'\x00') reveal_type(ca.value) # N: Revealed type is 'builtins.bytes' reveal_type(ca.raw) # N: Revealed type is 'builtins.bytes' [builtins fixtures/floatdict.pyi] [case testCtypesCharPArrayDoesNotCrash] import ctypes # The following line used to crash with "Could not find builtin symbol 'NoneType'" ca = (ctypes.c_char_p * 0)() [builtins fixtures/floatdict.pyi] [case testCtypesCharArrayAttrsPy2] # flags: --py2 import ctypes ca = (ctypes.c_char * 4)('a', 'b', 'c', '\x00') reveal_type(ca.value) # N: Revealed type is 'builtins.str' reveal_type(ca.raw) # N: Revealed type is 'builtins.str' [builtins_py2 fixtures/floatdict_python2.pyi] [case testCtypesWcharArrayAttrs] import ctypes wca = (ctypes.c_wchar * 4)('a', 'b', 'c', '\x00') reveal_type(wca.value) # N: Revealed type is 'builtins.str' wca.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_wchar" [builtins fixtures/floatdict.pyi] [case testCtypesWcharArrayAttrsPy2] # flags: --py2 import ctypes wca = (ctypes.c_wchar * 4)(u'a', u'b', u'c', u'\x00') reveal_type(wca.value) # N: Revealed type is 'builtins.unicode' wca.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_wchar" [builtins_py2 fixtures/floatdict_python2.pyi] [case testCtypesCharUnionArrayAttrs] import ctypes from typing import Union cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_wchar]] reveal_type(cua.value) # N: Revealed type is 'Union[builtins.bytes, builtins.str]' cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_wchar]" [builtins fixtures/floatdict.pyi] [case testCtypesAnyUnionArrayAttrs] import ctypes from typing import Any, Union caa: ctypes.Array[Union[ctypes.c_char, Any]] reveal_type(caa.value) # N: Revealed type is 'Union[builtins.bytes, Any]' reveal_type(caa.raw) # N: Revealed type is 'builtins.bytes' [builtins fixtures/floatdict.pyi] [case testCtypesOtherUnionArrayAttrs] import ctypes from typing import Union cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_int]] cua.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "Union[c_char, c_int]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_int]" [builtins fixtures/floatdict.pyi] [case testCtypesAnyArrayAttrs] import ctypes aa: ctypes.Array[Any] reveal_type(aa.value) # N: Revealed type is 'Any' reveal_type(aa.raw) # N: Revealed type is 'builtins.bytes' [builtins fixtures/floatdict.pyi] [case testCtypesOtherArrayAttrs] import ctypes oa = (ctypes.c_int * 4)(1, 2, 3, 4) oa.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "c_int" oa.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_int" [builtins fixtures/floatdict.pyi] [case testCtypesArrayConstructorStarargs] import ctypes intarr4 = ctypes.c_int * 4 intarr6 = ctypes.c_int * 6 int_values = [1, 2, 3, 4] c_int_values = [ctypes.c_int(1), ctypes.c_int(2), ctypes.c_int(3), ctypes.c_int(4)] reveal_type(intarr4(*int_values)) # N: Revealed type is 'ctypes.Array[ctypes.c_int*]' reveal_type(intarr4(*c_int_values)) # N: Revealed type is 'ctypes.Array[ctypes.c_int*]' reveal_type(intarr6(1, ctypes.c_int(2), *int_values)) # N: Revealed type is 'ctypes.Array[ctypes.c_int*]' reveal_type(intarr6(1, ctypes.c_int(2), *c_int_values)) # N: Revealed type is 'ctypes.Array[ctypes.c_int*]' float_values = [1.0, 2.0, 3.0, 4.0] intarr4(*float_values) # E: Array constructor argument 1 of type "List[float]" is not convertible to the array element type "Iterable[c_int]" [builtins fixtures/floatdict.pyi] [case testCtypesArrayConstructorKwargs] import ctypes intarr4 = ctypes.c_int * 4 x = {"a": 1, "b": 2} intarr4(**x) # E: Too many arguments for "Array" [builtins fixtures/floatdict.pyi] mypy-0.761/test-data/unit/check-custom-plugin.test0000644€tŠÔÚ€2›s®0000005216213576752246026355 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for user-defined plugins -- -- Note: Plugins used by tests live under test-data/unit/plugins. Defining -- plugin files in test cases does not work reliably. [case testFunctionPluginFile] # flags: --config-file tmp/mypy.ini def f() -> str: ... reveal_type(f()) # N: Revealed type is 'builtins.int' [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/fnplugin.py [case testFunctionPlugin] # flags: --config-file tmp/mypy.ini def f() -> str: ... reveal_type(f()) # N: Revealed type is 'builtins.int' [file mypy.ini] \[mypy] plugins=fnplugin [case testFunctionPluginFullnameIsNotNone] # flags: --config-file tmp/mypy.ini from typing import Callable, TypeVar f: Callable[[], None] T = TypeVar('T') def g(x: T) -> T: return x # This strips out the name of a callable g(f)() [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/fnplugin.py [case testTwoPlugins] # flags: --config-file tmp/mypy.ini def f(): ... def g(): ... def h(): ... reveal_type(f()) # N: Revealed type is 'builtins.int' reveal_type(g()) # N: Revealed type is 'builtins.str' reveal_type(h()) # N: Revealed type is 'Any' [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/fnplugin.py, /test-data/unit/plugins/plugin2.py [case testTwoPluginsMixedType] # flags: --config-file tmp/mypy.ini def f(): ... def g(): ... def h(): ... reveal_type(f()) # N: Revealed type is 'builtins.int' reveal_type(g()) # N: Revealed type is 'builtins.str' reveal_type(h()) # N: Revealed type is 'Any' [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/fnplugin.py, plugin2 [case testMissingPluginFile] # flags: --config-file tmp/mypy.ini [file mypy.ini] \[mypy] plugins=missing.py [out] tmp/mypy.ini:2: error: Can't find plugin 'tmp/missing.py' --' (work around syntax highlighting) [case testMissingPlugin] # flags: --config-file tmp/mypy.ini [file mypy.ini] \[mypy] plugins=missing [out] tmp/mypy.ini:2: error: Error importing plugin 'missing': No module named 'missing' [case testMultipleSectionsDefinePlugin] # flags: --config-file tmp/mypy.ini [file mypy.ini] \[acme] plugins=acmeplugin \[mypy] plugins=missing.py \[another] plugins=another_plugin [out] tmp/mypy.ini:4: error: Can't find plugin 'tmp/missing.py' --' (work around syntax highlighting) [case testInvalidPluginExtension] # flags: --config-file tmp/mypy.ini [file mypy.ini] \[mypy] plugins=dir/badext.pyi [file dir/badext.pyi] [out] tmp/mypy.ini:2: error: Plugin 'badext.pyi' does not have a .py extension [case testMissingPluginEntryPoint] # flags: --config-file tmp/mypy.ini [file mypy.ini] \[mypy] plugins = /test-data/unit/plugins/noentry.py [out] tmp/mypy.ini:2: error: Plugin '/test-data/unit/plugins/noentry.py' does not define entry point function "plugin" [case testCustomPluginEntryPointFile] # flags: --config-file tmp/mypy.ini def f() -> str: ... reveal_type(f()) # N: Revealed type is 'builtins.int' [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/customentry.py:register [case testCustomPluginEntryPoint] # flags: --config-file tmp/mypy.ini def f() -> str: ... reveal_type(f()) # N: Revealed type is 'builtins.int' [file mypy.ini] \[mypy] plugins=customentry:register [case testInvalidPluginEntryPointReturnValue] # flags: --config-file tmp/mypy.ini def f(): pass f() [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/badreturn.py [out] tmp/mypy.ini:3: error: Type object expected as the return value of "plugin"; got None (in /test-data/unit/plugins/badreturn.py) [case testInvalidPluginEntryPointReturnValue2] # flags: --config-file tmp/mypy.ini def f(): pass f() [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/badreturn2.py [out] tmp/mypy.ini:2: error: Return value of "plugin" must be a subclass of "mypy.plugin.Plugin" (in /test-data/unit/plugins/badreturn2.py) [case testAttributeTypeHookPlugin] # flags: --config-file tmp/mypy.ini from typing import Callable from m import Signal, DerivedSignal s: Signal[Callable[[int], None]] = Signal() s(1) s('') # E: Argument 1 has incompatible type "str"; expected "int" ds: DerivedSignal[Callable[[int], None]] = DerivedSignal() ds('') # E: Argument 1 has incompatible type "str"; expected "int" [file m.py] from typing import TypeVar, Generic, Callable T = TypeVar('T', bound=Callable[..., None]) class Signal(Generic[T]): __call__: Callable[..., None] # This type is replaced by the plugin class DerivedSignal(Signal[T]): ... [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/attrhook.py [case testAttributeHookPluginForDynamicClass] # flags: --config-file tmp/mypy.ini from m import Magic, DerivedMagic magic = Magic() reveal_type(magic.magic_field) # N: Revealed type is 'builtins.str' reveal_type(magic.non_magic_method()) # N: Revealed type is 'builtins.int' reveal_type(magic.non_magic_field) # N: Revealed type is 'builtins.int' magic.nonexistent_field # E: Field does not exist reveal_type(magic.fallback_example) # N: Revealed type is 'Any' reveal_type(DerivedMagic().magic_field) # N: Revealed type is 'builtins.str' [file m.py] from typing import Any class Magic: # Triggers plugin infrastructure: def __getattr__(self, x: Any) -> Any: ... def non_magic_method(self) -> int: ... non_magic_field: int class DerivedMagic(Magic): ... [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/attrhook2.py [case testTypeAnalyzeHookPlugin] # flags: --config-file tmp/mypy.ini from typing import Callable from mypy_extensions import DefaultArg from m import Signal s: Signal[[int, DefaultArg(str, 'x')]] = Signal() reveal_type(s) # N: Revealed type is 'm.Signal[def (builtins.int, x: builtins.str =)]' s.x # E: "Signal[Callable[[int, str], None]]" has no attribute "x" ss: Signal[int, str] # E: Invalid "Signal" type (expected "Signal[[t, ...]]") [file m.py] from typing import TypeVar, Generic, Callable T = TypeVar('T', bound=Callable[..., None]) class Signal(Generic[T]): __call__: Callable[..., None] [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/type_anal_hook.py [builtins fixtures/dict.pyi] [case testFunctionPluginHookForClass] # flags: --config-file tmp/mypy.ini import mod from mod import AttrInt Alias = AttrInt AnotherAlias = mod.Attr class C: x = Alias() y = mod.AttrInt(required=True) z = AnotherAlias(int, required=False) c = C() reveal_type(c.x) # N: Revealed type is 'Union[builtins.int, None]' reveal_type(c.y) # N: Revealed type is 'builtins.int*' reveal_type(c.z) # N: Revealed type is 'Union[builtins.int*, None]' [file mod.py] from typing import Generic, TypeVar, Type T = TypeVar('T') class Attr(Generic[T]): def __init__(self, tp: Type[T], required: bool = False) -> None: pass def __get__(self, instance: object, owner: type) -> T: pass class AttrInt(Attr[int]): def __init__(self, required: bool = False) -> None: pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/class_callable.py [builtins fixtures/bool.pyi] [out] [case testFunctionPluginHookForReturnedCallable] # flags: --config-file tmp/mypy.ini from m import decorator1, decorator2 @decorator1() def f() -> None: pass @decorator2() def g() -> None: pass reveal_type(f) # N: Revealed type is 'def (*Any, **Any) -> builtins.str' reveal_type(g) # N: Revealed type is 'def (*Any, **Any) -> builtins.int' [file m.py] from typing import Callable def decorator1() -> Callable[..., Callable[..., int]]: pass def decorator2() -> Callable[..., Callable[..., int]]: pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/named_callable.py [case testFunctionMethodContextsHasArgNames] # flags: --config-file tmp/mypy.ini from mod import Class, func reveal_type(Class().method(arg1=1, arg2=2, classname='builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(Class.myclassmethod(arg1=1, arg2=2, classname='builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(Class.mystaticmethod(arg1=1, arg2=2, classname='builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(Class.method(self=Class(), arg1=1, arg2=2, classname='builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(func(arg1=1, arg2=2, classname='builtins.str')) # N: Revealed type is 'builtins.str' [file mod.py] from typing import Any class Class: def method(self, classname: str, arg1: Any, arg2: Any) -> Any: pass @classmethod def myclassmethod(cls, classname: str, arg1: Any, arg2: Any): pass @staticmethod def mystaticmethod(classname: str, arg1: Any, arg2: Any): pass def func(classname: str, arg1: Any, arg2: Any) -> Any: pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/arg_names.py [builtins fixtures/classmethod.pyi] [case testFunctionMethodContextsHasArgNamesPositionals] # flags: --config-file tmp/mypy.ini from mod import Class, func reveal_type(Class().method('builtins.str', arg1=1, arg2=2)) # N: Revealed type is 'builtins.str' reveal_type(Class.myclassmethod('builtins.str', arg1=1, arg2=2)) # N: Revealed type is 'builtins.str' reveal_type(Class.mystaticmethod('builtins.str', arg1=1, arg2=2)) # N: Revealed type is 'builtins.str' reveal_type(Class.method(Class(), 'builtins.str', arg1=1, arg2=2)) # N: Revealed type is 'builtins.str' reveal_type(func('builtins.str', arg1=1, arg2=2)) # N: Revealed type is 'builtins.str' [file mod.py] from typing import Any class Class: def method(self, classname: str, arg1: Any, arg2: Any) -> Any: pass @classmethod def myclassmethod(cls, classname: str, arg1: Any, arg2: Any): pass @staticmethod def mystaticmethod(classname: str, arg1: Any, arg2: Any): pass def func(classname: str, arg1: Any, arg2: Any) -> Any: pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/arg_names.py [builtins fixtures/classmethod.pyi] [case testFunctionMethodContextsHasArgNamesInitMethod] # flags: --config-file tmp/mypy.ini from mod import ClassInit, Outer reveal_type(ClassInit('builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(ClassInit(classname='builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(Outer.NestedClassInit(classname='builtins.str')) # N: Revealed type is 'builtins.str' [file mod.py] from typing import Any class ClassInit: def __init__(self, classname: str): pass class Outer: class NestedClassInit: def __init__(self, classname: str): pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/arg_names.py [case testFunctionMethodContextsHasArgNamesUnfilledArguments] # flags: --config-file tmp/mypy.ini from mod import ClassUnfilled, func_unfilled reveal_type(ClassUnfilled().method(classname='builtins.str', arg1=1)) # N: Revealed type is 'builtins.str' reveal_type(ClassUnfilled().method(arg2=1, classname='builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(ClassUnfilled().method('builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(func_unfilled(classname='builtins.str', arg1=1)) # N: Revealed type is 'builtins.str' reveal_type(func_unfilled(arg2=1, classname='builtins.str')) # N: Revealed type is 'builtins.str' reveal_type(func_unfilled('builtins.str')) # N: Revealed type is 'builtins.str' [file mod.py] from typing import Any class ClassUnfilled: def method(self, classname: str, arg1: Any = None, arg2: Any = None) -> Any: pass def func_unfilled(classname: str, arg1: Any = None, arg2: Any = None) -> Any: pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/arg_names.py [case testFunctionMethodContextsHasArgNamesStarExpressions] # flags: --config-file tmp/mypy.ini from mod import ClassStarExpr, func_star_expr reveal_type(ClassStarExpr().method(classname='builtins.str', arg1=1)) # N: Revealed type is 'builtins.str' reveal_type(ClassStarExpr().method('builtins.str', arg1=1)) # N: Revealed type is 'builtins.str' reveal_type(ClassStarExpr().method('builtins.str', arg1=1, arg2=1)) # N: Revealed type is 'builtins.str' reveal_type(ClassStarExpr().method('builtins.str', 2, 3, 4, arg1=1, arg2=1)) # N: Revealed type is 'builtins.str' reveal_type(func_star_expr(classname='builtins.str', arg1=1)) # N: Revealed type is 'builtins.str' reveal_type(func_star_expr('builtins.str', arg1=1)) # N: Revealed type is 'builtins.str' reveal_type(func_star_expr('builtins.str', 2, 3, 4, arg1=1, arg2=2)) # N: Revealed type is 'builtins.str' [file mod.py] from typing import Any class ClassStarExpr: def method(self, classname: str, *args, **kwargs) -> Any: pass def func_star_expr(classname: str, *args, **kwargs) -> Any: pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/arg_names.py [builtins fixtures/dict.pyi] [case testFunctionMethodContextArgNamesForInheritedMethods] # flags: --config-file tmp/mypy.ini from mod import ClassChild reveal_type(ClassChild().method(classname='builtins.str', arg1=1, arg2=1)) # N: Revealed type is 'builtins.str' reveal_type(ClassChild().method(arg1=1, classname='builtins.str', arg2=1)) # N: Revealed type is 'builtins.str' reveal_type(ClassChild().method('builtins.str', arg1=1, arg2=1)) # N: Revealed type is 'builtins.str' reveal_type(ClassChild.myclassmethod('builtins.str')) # N: Revealed type is 'builtins.str' [file mod.py] from typing import Any class Base: def method(self, classname: str, arg1: Any, arg2: Any) -> Any: pass @classmethod def myclassmethod(cls, classname: str) -> Any: pass class ClassChild(Base): pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/arg_names.py [builtins fixtures/classmethod.pyi] [case testMethodSignatureHook] # flags: --config-file tmp/mypy.ini from typing import Iterator class Foo: # Test that method signature hooks are applied in various cases: explicit method calls, and # implicit dunder method calls through language syntax. # The plugin's method signature hook should turn all str occurrences into int. def __init__(self) -> None: ... def __getitem__(self, index: str) -> str: ... def __setitem__(self, index: str, value: str) -> None: ... def __iter__(self) -> Iterator[str]: ... def __next__(self) -> str: ... def __call__(self, *args: str) -> str: ... def m(self, arg: str) -> str: ... foo = Foo() reveal_type(foo.m(2)) # N: Revealed type is 'builtins.int' reveal_type(foo[3]) # N: Revealed type is 'builtins.int' reveal_type(foo(4, 5, 6)) # N: Revealed type is 'builtins.int' foo[4] = 5 for x in foo: reveal_type(x) # N: Revealed type is 'builtins.int*' [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/method_sig_hook.py [case testMethodSignatureHookNamesFullyQualified] # flags: --config-file tmp/mypy.ini from mypy_extensions import TypedDict from typing import NamedTuple class FullyQualifiedTestClass: @classmethod def class_method(self) -> str: ... def instance_method(self) -> str: ... class FullyQualifiedTestTypedDict(TypedDict): foo: str FullyQualifiedTestNamedTuple = NamedTuple('FullyQualifiedTestNamedTuple', [('foo', str)]) # Check the return types to ensure that the method signature hook is called in each case reveal_type(FullyQualifiedTestClass.class_method()) # N: Revealed type is 'builtins.int' reveal_type(FullyQualifiedTestClass().instance_method()) # N: Revealed type is 'builtins.int' reveal_type(FullyQualifiedTestNamedTuple('')._asdict()) # N: Revealed type is 'builtins.int' [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/fully_qualified_test_hook.py [builtins fixtures/classmethod.pyi] [case testDynamicClassPlugin] # flags: --config-file tmp/mypy.ini from mod import declarative_base, Column, Instr Base = declarative_base() class Model(Base): x: Column[int] class Other: x: Column[int] reveal_type(Model().x) # N: Revealed type is 'mod.Instr[builtins.int]' reveal_type(Other().x) # N: Revealed type is 'mod.Column[builtins.int]' [file mod.py] from typing import Generic, TypeVar def declarative_base(): ... T = TypeVar('T') class Column(Generic[T]): ... class Instr(Generic[T]): ... [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/dyn_class.py [case testDynamicClassPluginNegatives] # flags: --config-file tmp/mypy.ini from mod import declarative_base, Column, Instr, non_declarative_base Bad1 = non_declarative_base() Bad2 = Bad3 = declarative_base() class C1(Bad1): ... # E: Variable "__main__.Bad1" is not valid as a type \ # E: Invalid base class "Bad1" class C2(Bad2): ... # E: Variable "__main__.Bad2" is not valid as a type \ # E: Invalid base class "Bad2" class C3(Bad3): ... # E: Variable "__main__.Bad3" is not valid as a type \ # E: Invalid base class "Bad3" [file mod.py] from typing import Generic, TypeVar def declarative_base(): ... def non_declarative_base(): ... T = TypeVar('T') class Column(Generic[T]): ... class Instr(Generic[T]): ... [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/dyn_class.py [case testDynamicClassHookFromClassMethod] # flags: --config-file tmp/mypy.ini from mod import QuerySet, Manager MyManager = Manager.from_queryset(QuerySet) reveal_type(MyManager()) # N: Revealed type is '__main__.MyManager' reveal_type(MyManager().attr) # N: Revealed type is 'builtins.str' def func(manager: MyManager) -> None: reveal_type(manager) # N: Revealed type is '__main__.MyManager' reveal_type(manager.attr) # N: Revealed type is 'builtins.str' func(MyManager()) [file mod.py] from typing import Generic, TypeVar, Type class QuerySet: attr: str class Manager: @classmethod def from_queryset(cls, queryset_cls: Type[QuerySet]): ... [builtins fixtures/classmethod.pyi] [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/dyn_class_from_method.py [case testBaseClassPluginHookWorksIncremental] # flags: --config-file tmp/mypy.ini import a [file a.py] from base import Base class C(Base): ... [file a.py.2] from base import Base class C(Base): ... reveal_type(C().__magic__) Base.__magic__ [file base.py] from lib import declarative_base Base = declarative_base() [file lib.py] from typing import Any def declarative_base() -> Any: ... [file mypy.ini] \[mypy] python_version=3.6 plugins=/test-data/unit/plugins/common_api_incremental.py [out] [out2] tmp/a.py:3: note: Revealed type is 'builtins.str' tmp/a.py:4: error: "Type[Base]" has no attribute "__magic__" [case testArgKindsMethod] # flags: --config-file tmp/mypy.ini class Class: def method(self, *args, **kwargs): pass Class().method(1, *[2], **{'a': 1}) # E: [[0, 2], [4]] [builtins fixtures/dict.pyi] [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/arg_kinds.py [case testArgKindsFunction] # flags: --config-file tmp/mypy.ini def func(*args, **kwargs): pass func(1, 2, [3, 4], *[5, 6, 7], **{'a': 1}) # E: [[0, 0, 0, 2], [4]] [builtins fixtures/dict.pyi] [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/arg_kinds.py [case testHookCallableInstance] # flags: --config-file tmp/mypy.ini from typing import Generic, TypeVar T = TypeVar("T") class Class(Generic[T]): def __init__(self, one: T): ... def __call__(self, two: T) -> int: ... reveal_type(Class("hi")("there")) # N: Revealed type is 'builtins.str*' instance = Class(3.14) reveal_type(instance(2)) # N: Revealed type is 'builtins.float*' [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/callable_instance.py [case testGetMethodHooksOnUnions] # flags: --config-file tmp/mypy.ini --no-strict-optional from typing import Union class Foo: def meth(self, x: str) -> str: ... class Bar: def meth(self, x: int) -> float: ... class Other: meth: int x: Union[Foo, Bar, Other] if isinstance(x.meth, int): reveal_type(x.meth) # N: Revealed type is 'builtins.int' else: reveal_type(x.meth(int())) # N: Revealed type is 'builtins.int' [builtins fixtures/isinstancelist.pyi] [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/union_method.py [case testGetMethodHooksOnUnionsStrictOptional] # flags: --config-file tmp/mypy.ini --strict-optional from typing import Union class Foo: def meth(self, x: str) -> str: ... class Bar: def meth(self, x: int) -> float: ... class Other: meth: int x: Union[Foo, Bar, Other] if isinstance(x.meth, int): reveal_type(x.meth) # N: Revealed type is 'builtins.int' else: reveal_type(x.meth(int())) # N: Revealed type is 'builtins.int' [builtins fixtures/isinstancelist.pyi] [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/union_method.py [case testGetMethodHooksOnUnionsSpecial] # flags: --config-file tmp/mypy.ini from typing import Union class Foo: def __getitem__(self, x: str) -> str: ... class Bar: def __getitem__(self, x: int) -> float: ... x: Union[Foo, Bar] reveal_type(x[int()]) # N: Revealed type is 'builtins.int' [builtins fixtures/isinstancelist.pyi] [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/union_method.py [case testPluginDependencies] # flags: --config-file tmp/mypy.ini # The top level file here doesn't do anything, but the plugin should add # a dependency on err that will cause err to be processed and an error reported. [file err.py] 1 + 'lol' # E: Unsupported operand types for + ("int" and "str") [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/depshook.py [case testCustomizeMroTrivial] # flags: --config-file tmp/mypy.ini class A: pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/customize_mro.py mypy-0.761/test-data/unit/check-dataclasses.test0000644€tŠÔÚ€2›s®0000004737413576752246026047 0ustar jukkaDROPBOX\Domain Users00000000000000[case testDataclassesBasic] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class Person: name: str age: int def summary(self): return "%s is %d years old." % (self.name, self.age) reveal_type(Person) # N: Revealed type is 'def (name: builtins.str, age: builtins.int) -> __main__.Person' Person('John', 32) Person('Jonh', 21, None) # E: Too many arguments for "Person" [builtins fixtures/list.pyi] [case testDataclassesCustomInit] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class A: a: int def __init__(self, a: str) -> None: pass A('1') [builtins fixtures/list.pyi] [case testDataclassesBasicInheritance] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class Mammal: age: int @dataclass class Person(Mammal): name: str def summary(self): return "%s is %d years old." % (self.name, self.age) reveal_type(Person) # N: Revealed type is 'def (age: builtins.int, name: builtins.str) -> __main__.Person' Mammal(10) Person(32, 'John') Person(21, 'Jonh', None) # E: Too many arguments for "Person" [builtins fixtures/list.pyi] [case testDataclassesDeepInheritance] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class A: a: int @dataclass class B(A): b: int @dataclass class C(B): c: int @dataclass class D(C): d: int reveal_type(A) # N: Revealed type is 'def (a: builtins.int) -> __main__.A' reveal_type(B) # N: Revealed type is 'def (a: builtins.int, b: builtins.int) -> __main__.B' reveal_type(C) # N: Revealed type is 'def (a: builtins.int, b: builtins.int, c: builtins.int) -> __main__.C' reveal_type(D) # N: Revealed type is 'def (a: builtins.int, b: builtins.int, c: builtins.int, d: builtins.int) -> __main__.D' [builtins fixtures/list.pyi] [case testDataclassesOverriding] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class Mammal: age: int @dataclass class Person(Mammal): name: str age: int @dataclass class SpecialPerson(Person): special_factor: float @dataclass class ExtraSpecialPerson(SpecialPerson): age: int special_factor: float name: str reveal_type(Person) # N: Revealed type is 'def (age: builtins.int, name: builtins.str) -> __main__.Person' reveal_type(SpecialPerson) # N: Revealed type is 'def (age: builtins.int, name: builtins.str, special_factor: builtins.float) -> __main__.SpecialPerson' reveal_type(ExtraSpecialPerson) # N: Revealed type is 'def (age: builtins.int, name: builtins.str, special_factor: builtins.float) -> __main__.ExtraSpecialPerson' Person(32, 'John') Person(21, 'John', None) # E: Too many arguments for "Person" SpecialPerson(21, 'John', 0.5) ExtraSpecialPerson(21, 'John', 0.5) [builtins fixtures/list.pyi] [case testDataclassesOverridingWithDefaults] # Issue #5681 https://github.com/python/mypy/issues/5681 # flags: --python-version 3.6 from dataclasses import dataclass from typing import Any @dataclass class Base: some_int: Any some_str: str = 'foo' @dataclass class C(Base): some_int: int reveal_type(C) # N: Revealed type is 'def (some_int: builtins.int, some_str: builtins.str =) -> __main__.C' [builtins fixtures/list.pyi] [case testDataclassesFreezing] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass(frozen=True) class Person: name: str john = Person('John') john.name = 'Ben' # E: Property "name" defined in "Person" is read-only [builtins fixtures/list.pyi] [case testDataclassesFields] # flags: --python-version 3.6 from dataclasses import dataclass, field @dataclass class Person: name: str age: int = field(default=0, init=False) reveal_type(Person) # N: Revealed type is 'def (name: builtins.str) -> __main__.Person' john = Person('John') john.age = 'invalid' # E: Incompatible types in assignment (expression has type "str", variable has type "int") john.age = 24 [builtins fixtures/list.pyi] [case testDataclassesBadInit] # flags: --python-version 3.6 from dataclasses import dataclass, field @dataclass class Person: name: str age: int = field(init=None) # E: No overload variant of "field" matches argument type "None" \ # N: Possible overload variant: \ # N: def field(*, init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> Any \ # N: <2 more non-matching overloads not shown> [builtins fixtures/list.pyi] [case testDataclassesMultiInit] # flags: --python-version 3.6 from dataclasses import dataclass, field from typing import List @dataclass class Person: name: str age: int = field(init=False) friend_names: List[str] = field(init=True) enemy_names: List[str] reveal_type(Person) # N: Revealed type is 'def (name: builtins.str, friend_names: builtins.list[builtins.str], enemy_names: builtins.list[builtins.str]) -> __main__.Person' [builtins fixtures/list.pyi] [case testDataclassesMultiInitDefaults] # flags: --python-version 3.6 from dataclasses import dataclass, field from typing import List, Optional @dataclass class Person: name: str age: int = field(init=False) friend_names: List[str] = field(init=True) enemy_names: List[str] nickname: Optional[str] = None reveal_type(Person) # N: Revealed type is 'def (name: builtins.str, friend_names: builtins.list[builtins.str], enemy_names: builtins.list[builtins.str], nickname: Union[builtins.str, None] =) -> __main__.Person' [builtins fixtures/list.pyi] [case testDataclassesDefaults] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class Application: name: str = 'Unnamed' rating: int = 0 reveal_type(Application) # N: Revealed type is 'def (name: builtins.str =, rating: builtins.int =) -> __main__.Application' app = Application() [builtins fixtures/list.pyi] [case testDataclassesDefaultFactories] # flags: --python-version 3.6 from dataclasses import dataclass, field @dataclass class Application: name: str = 'Unnamed' rating: int = field(default_factory=int) rating_count: int = field() # E: Attributes without a default cannot follow attributes with one [builtins fixtures/list.pyi] [case testDataclassesDefaultFactoryTypeChecking] # flags: --python-version 3.6 from dataclasses import dataclass, field @dataclass class Application: name: str = 'Unnamed' rating: int = field(default_factory=str) # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/list.pyi] [case testDataclassesDefaultOrdering] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class Application: name: str = 'Unnamed' rating: int # E: Attributes without a default cannot follow attributes with one [builtins fixtures/list.pyi] [case testDataclassesClassmethods] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class Application: name: str @classmethod def parse(cls, request: str) -> "Application": return cls(name='...') app = Application.parse('') [builtins fixtures/list.pyi] [builtins fixtures/classmethod.pyi] [case testDataclassesOverloadsAndClassmethods] # flags: --python-version 3.6 from dataclasses import dataclass from typing import overload, Union @dataclass class A: a: int b: str @classmethod def other(cls) -> str: return "..." @overload @classmethod def foo(cls, x: int) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x: Union[int, str]) -> Union[int, str]: reveal_type(cls) # N: Revealed type is 'Type[__main__.A]' reveal_type(cls.other()) # N: Revealed type is 'builtins.str' return x reveal_type(A.foo(3)) # N: Revealed type is 'builtins.int' reveal_type(A.foo("foo")) # N: Revealed type is 'builtins.str' [builtins fixtures/classmethod.pyi] [case testDataclassesClassVars] # flags: --python-version 3.6 from dataclasses import dataclass from typing import ClassVar @dataclass class Application: name: str COUNTER: ClassVar[int] = 0 reveal_type(Application) # N: Revealed type is 'def (name: builtins.str) -> __main__.Application' application = Application("example") application.COUNTER = 1 # E: Cannot assign to class variable "COUNTER" via instance Application.COUNTER = 1 [builtins fixtures/list.pyi] [case testDataclassEquality] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class Application: name: str rating: int app1 = Application("example-1", 5) app2 = Application("example-2", 5) app1 == app2 app1 != app2 app1 == None # E: Unsupported operand types for == ("Application" and "None") [builtins fixtures/list.pyi] [case testDataclassCustomEquality] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass class Application: name: str rating: int def __eq__(self, other: 'Application') -> bool: ... app1 = Application("example-1", 5) app2 = Application("example-2", 5) app1 == app2 app1 != app2 # E: Unsupported left operand type for != ("Application") app1 == None # E: Unsupported operand types for == ("Application" and "None") class SpecializedApplication(Application): ... app1 == SpecializedApplication("example-3", 5) [builtins fixtures/list.pyi] [case testDataclassOrdering] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass(order=True) class Application: name: str rating: int app1 = Application('example-1', 5) app2 = Application('example-2', 5) app1 < app2 app1 > app2 app1 <= app2 app1 >= app2 app1 < 5 # E: Unsupported operand types for < ("Application" and "int") app1 > 5 # E: Unsupported operand types for > ("Application" and "int") app1 <= 5 # E: Unsupported operand types for <= ("Application" and "int") app1 >= 5 # E: Unsupported operand types for >= ("Application" and "int") class SpecializedApplication(Application): ... app3 = SpecializedApplication('example-3', 5) app1 < app3 app1 > app3 app1 <= app3 app1 >= app3 [builtins fixtures/list.pyi] [case testDataclassOrderingWithoutEquality] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass(eq=False, order=True) class Application: # E: eq must be True if order is True ... [builtins fixtures/list.pyi] [case testDataclassOrderingWithCustomMethods] # flags: --python-version 3.6 from dataclasses import dataclass @dataclass(order=True) class Application: def __lt__(self, other: 'Application') -> bool: # E: You may not have a custom __lt__ method when order=True ... [builtins fixtures/list.pyi] [case testDataclassDefaultsInheritance] # flags: --python-version 3.6 from dataclasses import dataclass from typing import Optional @dataclass(order=True) class Application: id: Optional[int] name: str @dataclass class SpecializedApplication(Application): rating: int = 0 reveal_type(SpecializedApplication) # N: Revealed type is 'def (id: Union[builtins.int, None], name: builtins.str, rating: builtins.int =) -> __main__.SpecializedApplication' [builtins fixtures/list.pyi] [case testDataclassGenerics] # flags: --python-version 3.6 from dataclasses import dataclass from typing import Generic, List, Optional, TypeVar T = TypeVar('T') @dataclass class A(Generic[T]): x: T y: T z: List[T] def foo(self) -> List[T]: return [self.x, self.y] def bar(self) -> T: return self.z[0] def problem(self) -> T: return self.z # E: Incompatible return value type (got "List[T]", expected "T") reveal_type(A) # N: Revealed type is 'def [T] (x: T`1, y: T`1, z: builtins.list[T`1]) -> __main__.A[T`1]' A(1, 2, ["a", "b"]) # E: Cannot infer type argument 1 of "A" a = A(1, 2, [1, 2]) reveal_type(a) # N: Revealed type is '__main__.A[builtins.int*]' reveal_type(a.x) # N: Revealed type is 'builtins.int*' reveal_type(a.y) # N: Revealed type is 'builtins.int*' reveal_type(a.z) # N: Revealed type is 'builtins.list[builtins.int*]' s: str = a.bar() # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/list.pyi] [case testDataclassGenericsClassmethod] # flags: --python-version 3.6 from dataclasses import dataclass from typing import Generic, TypeVar T = TypeVar('T') @dataclass class A(Generic[T]): x: T @classmethod def foo(cls) -> None: reveal_type(cls) # N: Revealed type is 'Type[__main__.A[T`1]]' cls.x # E: Access to generic instance variables via class is ambiguous @classmethod def other(cls, x: T) -> A[T]: ... reveal_type(A(0).other) # N: Revealed type is 'def (x: builtins.int*) -> __main__.A[builtins.int*]' [builtins fixtures/classmethod.pyi] [case testDataclassesForwardRefs] from dataclasses import dataclass @dataclass class A: b: 'B' @dataclass class B: x: int reveal_type(A) # N: Revealed type is 'def (b: __main__.B) -> __main__.A' A(b=B(42)) A(b=42) # E: Argument "b" to "A" has incompatible type "int"; expected "B" [builtins fixtures/list.pyi] [case testDataclassesInitVars] from dataclasses import InitVar, dataclass @dataclass class Application: name: str database_name: InitVar[str] reveal_type(Application) # N: Revealed type is 'def (name: builtins.str, database_name: builtins.str) -> __main__.Application' app = Application("example", 42) # E: Argument 2 to "Application" has incompatible type "int"; expected "str" app = Application("example", "apps") app.name app.database_name # E: "Application" has no attribute "database_name" @dataclass class SpecializedApplication(Application): rating: int reveal_type(SpecializedApplication) # N: Revealed type is 'def (name: builtins.str, database_name: builtins.str, rating: builtins.int) -> __main__.SpecializedApplication' app = SpecializedApplication("example", "apps", "five") # E: Argument 3 to "SpecializedApplication" has incompatible type "str"; expected "int" app = SpecializedApplication("example", "apps", 5) app.name app.rating app.database_name # E: "SpecializedApplication" has no attribute "database_name" [builtins fixtures/list.pyi] [case testDataclassesInitVarsAndDefer] from dataclasses import InitVar, dataclass defer: Yes @dataclass class Application: name: str database_name: InitVar[str] reveal_type(Application) # N: Revealed type is 'def (name: builtins.str, database_name: builtins.str) -> __main__.Application' app = Application("example", 42) # E: Argument 2 to "Application" has incompatible type "int"; expected "str" app = Application("example", "apps") app.name app.database_name # E: "Application" has no attribute "database_name" class Yes: ... [builtins fixtures/list.pyi] [case testDataclassesNoInitInitVarInheritance] from dataclasses import dataclass, field, InitVar @dataclass class Super: foo: InitVar = field(init=False) @dataclass class Sub(Super): bar: int sub = Sub(5) sub.foo # E: "Sub" has no attribute "foo" sub.bar [builtins fixtures/bool.pyi] [case testDataclassFactory] from typing import Type, TypeVar from dataclasses import dataclass T = TypeVar('T', bound='A') @dataclass class A: @classmethod def make(cls: Type[T]) -> T: reveal_type(cls) # N: Revealed type is 'Type[T`-1]' reveal_type(cls()) # N: Revealed type is 'T`-1' return cls() [builtins fixtures/classmethod.pyi] [case testNoComplainFieldNone] # flags: --python-version 3.6 # flags: --no-strict-optional from dataclasses import dataclass, field from typing import Optional @dataclass class Foo: bar: Optional[int] = field(default=None) [builtins fixtures/list.pyi] [out] [case testNoComplainFieldNoneStrict] # flags: --python-version 3.6 # flags: --strict-optional from dataclasses import dataclass, field from typing import Optional @dataclass class Foo: bar: Optional[int] = field(default=None) [builtins fixtures/list.pyi] [out] [case testDisallowUntypedWorksForward] # flags: --disallow-untyped-defs from dataclasses import dataclass from typing import List @dataclass class B: x: C class C(List[C]): pass reveal_type(B) # N: Revealed type is 'def (x: __main__.C) -> __main__.B' [builtins fixtures/list.pyi] [case testDisallowUntypedWorksForwardBad] # flags: --disallow-untyped-defs from dataclasses import dataclass @dataclass class B: x: Undefined # E: Name 'Undefined' is not defined y = undefined() # E: Name 'undefined' is not defined reveal_type(B) # N: Revealed type is 'def (x: Any) -> __main__.B' [builtins fixtures/list.pyi] [case testMemberExprWorksAsField] import dataclasses @dataclasses.dataclass class A: x: int = dataclasses.field(metadata={"doc": "foo"}) y: str @dataclasses.dataclass class B: x: int = dataclasses.field(init=False, default=1) y: str @dataclasses.dataclass class C: x: int = dataclasses.field(default=1) y: str = dataclasses.field(metadata={"doc": "foo"}) # E: Attributes without a default cannot follow attributes with one [builtins fixtures/dict.pyi] [case testDataclassOrderingDeferred] # flags: --python-version 3.6 from dataclasses import dataclass defer: Yes @dataclass(order=True) class Application: name: str rating: int a = Application('', 0) b = Application('', 0) a < b class Yes: ... [builtins fixtures/list.pyi] [case testDataclassFieldDeferred] from dataclasses import field, dataclass @dataclass class C: x: int = field(default=func()) def func() -> int: ... C('no') # E: Argument 1 to "C" has incompatible type "str"; expected "int" [builtins fixtures/bool.pyi] [case testDataclassFieldDeferredFrozen] from dataclasses import field, dataclass @dataclass(frozen=True) class C: x: int = field(default=func()) def func() -> int: ... c: C c.x = 1 # E: Property "x" defined in "C" is read-only [builtins fixtures/bool.pyi] [case testTypeInDataclassDeferredStar] import lib [file lib.py] from dataclasses import dataclass MYPY = False if MYPY: # Force deferral from other import * @dataclass class C: total: int C() # E: Too few arguments for "C" C('no') # E: Argument 1 to "C" has incompatible type "str"; expected "int" [file other.py] import lib [builtins fixtures/bool.pyi] [case testDeferredDataclassInitSignature] from dataclasses import dataclass from typing import Optional, Type @dataclass class C: x: Optional[int] = None y: Type[Deferred] = Deferred @classmethod def default(cls) -> C: return cls(x=None, y=None) class Deferred: pass [builtins fixtures/classmethod.pyi] [case testDeferredDataclassInitSignatureSubclass] # flags: --strict-optional from dataclasses import dataclass from typing import Optional @dataclass class B: x: Optional[C] @dataclass class C(B): y: str a = C(None, 'abc') [builtins fixtures/bool.pyi] [case testDataclassesDefaultsIncremental] # flags: --python-version 3.6 import a [file a.py] from dataclasses import dataclass from b import Person @dataclass class Asdf(Person): c: str = 'test' [file a.py.2] from dataclasses import dataclass from b import Person @dataclass class Asdf(Person): c: str = 'test' # asdf [file b.py] from dataclasses import dataclass @dataclass class Person: b: int a: str = 'test' [builtins fixtures/list.pyi] [case testDataclassesDefaultsMroOtherFile] # flags: --python-version 3.6 import a [file a.py] from dataclasses import dataclass from b import A1, A2 @dataclass class Asdf(A1, A2): # E: Attributes without a default cannot follow attributes with one pass [file b.py] from dataclasses import dataclass # a bunch of blank lines to make sure the error doesn't accidentally line up... @dataclass class A1: a: int @dataclass class A2: b: str = 'test' [builtins fixtures/list.pyi] [case testDataclassesInheritingDuplicateField] # see mypy issue #7792 from dataclasses import dataclass @dataclass class A: # E: Name 'x' already defined (possibly by an import) x: int = 0 x: int = 0 # E: Name 'x' already defined on line 6 @dataclass class B(A): pass mypy-0.761/test-data/unit/check-default-plugin.test0000644€tŠÔÚ€2›s®0000000222213576752246026457 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the default plugin -- -- Note that we have additional test cases in pythoneval.test (that use real typeshed stubs). [case testContextManagerWithGenericFunction] from contextlib import contextmanager from typing import TypeVar, Iterator T = TypeVar('T') @contextmanager def yield_id(item: T) -> Iterator[T]: yield item reveal_type(yield_id) # N: Revealed type is 'def [T] (item: T`-1) -> contextlib.GeneratorContextManager[T`-1]' with yield_id(1) as x: reveal_type(x) # N: Revealed type is 'builtins.int*' f = yield_id def g(x, y): pass f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[T], GeneratorContextManager[T]]") [typing fixtures/typing-full.pyi] [case testContextManagerWithUnspecifiedArguments] from contextlib import contextmanager from typing import Callable, Iterator c: Callable[..., Iterator[int]] reveal_type(c) # N: Revealed type is 'def (*Any, **Any) -> typing.Iterator[builtins.int]' reveal_type(contextmanager(c)) # N: Revealed type is 'def (*Any, **Any) -> contextlib.GeneratorContextManager[builtins.int*]' [typing fixtures/typing-full.pyi] mypy-0.761/test-data/unit/check-dynamic-typing.test0000644€tŠÔÚ€2›s®0000004001213576752246026472 0ustar jukkaDROPBOX\Domain Users00000000000000-- Assignment -- ---------- [case testAssignmentWithDynamic] from typing import Any d = None # type: Any a = None # type: A if int(): a = d # Everything ok if int(): d = a if int(): d = d d.x = a d.x = d class A: pass [case testMultipleAssignmentWithDynamic] from typing import Any d = None # type: Any a, b = None, None # type: (A, B) if int(): d, a = b, b # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): d, d = d, d, d # E: Too many values to unpack (2 expected, 3 provided) if int(): a, b = d, d if int(): d, d = a, b if int(): a, b = d s, t = d class A: pass class B: pass -- Expressions -- ----------- [case testCallingFunctionWithDynamicArgumentTypes] from typing import Any a, b = None, None # type: (A, B) if int(): b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = f(a) if int(): a = f(b) if int(): a = f(None) if int(): a = f(f) def f(x: Any) -> 'A': pass class A: pass class B: pass [case testCallingWithDynamicReturnType] from typing import Any a, b = None, None # type: (A, B) a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" a = f(a) b = f(a) def f(x: 'A') -> Any: pass class A: pass class B: pass [case testBinaryOperationsWithDynamicLeftOperand] from typing import Any d = None # type: Any a = None # type: A c = None # type: C b = None # type: bool n = 0 d in a # E: Unsupported right operand type for in ("A") d and a d or a if int(): c = d and b # E: Incompatible types in assignment (expression has type "Union[Any, bool]", variable has type "C") if int(): c = d or b # E: Incompatible types in assignment (expression has type "Union[Any, bool]", variable has type "C") if int(): c = d + a if int(): c = d - a if int(): c = d * a if int(): c = d / a if int(): c = d // a if int(): c = d % a if int(): c = d ** a if int(): b = d == a if int(): b = d != a if int(): b = d < a if int(): b = d <= a if int(): b = d > a if int(): b = d >= a if int(): b = d in c if int(): b = d and b if int(): b = d or b class A: pass class C: def __contains__(self, a: A) -> bool: pass [file builtins.py] class object: def __init__(self): pass class bool: pass class int: pass class type: pass class function: pass class str: pass [case testBinaryOperationsWithDynamicAsRightOperand] from typing import Any d = None # type: Any a = None # type: A c = None # type: C b = None # type: bool n = 0 a and d a or d if int(): c = a in d # E: Incompatible types in assignment (expression has type "bool", variable has type "C") if int(): c = b and d # E: Incompatible types in assignment (expression has type "Union[bool, Any]", variable has type "C") if int(): c = b or d # E: Incompatible types in assignment (expression has type "Union[bool, Any]", variable has type "C") if int(): b = a + d if int(): b = a / d if int(): c = a + d if int(): c = a - d if int(): c = a * d if int(): c = a / d if int(): c = a // d if int(): c = a % d if int(): c = a ** d if int(): b = a in d if int(): b = b and d if int(): b = b or d class A: def __add__(self, a: 'A') -> 'C': pass def __sub__(self, a: 'A') -> 'C': pass def __mul__(self, a: 'A') -> 'C': pass def __truediv__(self, a: 'A') -> 'C': pass def __floordiv__(self, a: 'A') -> 'C': pass def __mod__(self, a: 'A') -> 'C': pass def __pow__(self, a: 'A') -> 'C': pass def _lt(self, a: 'A') -> bool: pass def _gt(self, a: 'A') -> bool: pass class C: pass [file builtins.py] class object: def __init__(self): pass class bool: pass class int: pass class type: pass class function: pass class str: pass [case testDynamicWithUnaryExpressions] from typing import Any d = None # type: Any a = None # type: A b = None # type: bool if int(): a = not d # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): b = not d a = -d class A: pass [builtins fixtures/bool.pyi] [out] [case testDynamicWithMemberAccess] from typing import Any d = None # type: Any a = None # type: A if int(): a = d.foo(a()) # E: "A" not callable if int(): a = d.x if int(): a = d.foo(a, a) d.x = a d.x.y.z # E: "A" has no attribute "y" class A: pass [out] [case testIndexingWithDynamic] from typing import Any d = None # type: Any a = None # type: A if int(): a = d[a()] # E: "A" not callable d[a()] = a # E: "A" not callable if int(): a = d[a] d[a] = a d[a], d[a] = a, a class A: pass [case testTupleExpressionsWithDynamci] from typing import Tuple, Any t2 = None # type: Tuple[A, A] d = None # type: Any if int(): t2 = (d, d, d) # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[A, A]") if int(): t2 = (d, d) class A: pass [builtins fixtures/tuple.pyi] [case testCastsWithDynamicType] from typing import Any, cast class A: pass class B: pass d = None # type: Any a = None # type: A b = None # type: B if int(): b = cast(A, d) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = cast(A, d) if int(): b = cast(Any, d) if int(): a = cast(Any, f()) def f() -> None: pass [case testCompatibilityOfDynamicWithOtherTypes] from typing import Any, Tuple d = None # type: Any t = None # type: Tuple[A, A] # TODO: callable types, overloaded functions d = None # All ok d = t d = g d = A t = d f = d def g(a: 'A') -> None: pass class A: pass class B: pass [builtins fixtures/tuple.pyi] -- Statements -- ---------- [case testDynamicCondition] from typing import Any d = None # type: Any while d: pass if d: pass elif d: pass [builtins fixtures/bool.pyi] [case testRaiseWithDynamic] from typing import Any d = None # type: Any raise d [builtins fixtures/exception.pyi] [case testReturnWithDynamic] from typing import Any d = None # type: Any def f() -> None: return d # Ok def g() -> 'A': return d # Ok class A: pass -- Implicit dynamic types for functions -- ------------------------------------ [case testImplicitGlobalFunctionSignature] from typing import Any, Callable x = None # type: Any a = None # type: A g = None # type: Callable[[], None] h = None # type: Callable[[A], None] f() # E: Too few arguments for "f" f(x, x) # E: Too many arguments for "f" if int(): g = f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]") f(a) f(x) if int(): a = f(a) if int(): h = f def f(x): pass class A: pass [case testImplicitGlobalFunctionSignatureWithDifferentArgCounts] from typing import Callable g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] a = None # type: A if int(): g1 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A], None]") if int(): g2 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A, A], None]") if int(): g0 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[], None]") if int(): g1 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[A], None]") if int(): g0 = g0 if int(): g2 = f2 f0() f2(a, a) def f0(): pass def f2(x, y): pass class A: pass [case testImplicitGlobalFunctionSignatureWithDefaultArgs] from typing import Callable a, b = None, None # type: (A, B) g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] g3 = None # type: Callable[[A, A, A], None] g4 = None # type: Callable[[A, A, A, A], None] f01(a, a) # E: Too many arguments for "f01" f13() # E: Too few arguments for "f13" f13(a, a, a, a) # E: Too many arguments for "f13" if int(): g2 = f01 # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[A, A], None]") if int(): g0 = f13 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any, Any], Any]", variable has type "Callable[[], None]") if int(): g4 = f13 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any, Any], Any]", variable has type "Callable[[A, A, A, A], None]") f01() f01(a) f13(a) f13(a, a) f13(a, a, a) if int(): g0 = f01 if int(): g1 = f01 if int(): g1 = f13 if int(): g2 = f13 if int(): g3 = f13 def f01(x = b): pass def f13(x, y = b, z = b): pass class A: pass class B: pass [case testSkipTypeCheckingWithImplicitSignature] a = None # type: A def f(): a() def g(x): a() a.x a + a if a(): a() class A: pass [builtins fixtures/bool.pyi] [case testSkipTypeCheckingWithImplicitSignatureAndDefaultArgs] a = None # type: A def f(x=a()): a() def g(x, y=a, z=a()): a() class A: pass [case testImplicitMethodSignature] from typing import Callable g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] a = None # type: A if int(): g0 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]") if int(): g2 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[A, A], None]") if int(): a = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "A") class A: def g(self) -> None: a = self.f(a) def f(self, x): pass if int(): g1 = a.f if int(): a = a.f(a) [case testSkipTypeCheckingImplicitMethod] a = None # type: A class A: def f(self): a() def g(self, x, y=a()): a() [case testImplicitInheritedMethod] from typing import Callable g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] a = None # type: A if int(): g0 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]") if int(): g1 = a.f if int(): a = a.f(a) class B: def f(self, x): pass class A(B): def g(self) -> None: a = self.f(a) [case testEmptyReturnWithImplicitSignature] import typing def f(): return class A: def g(self): return [case testVarArgsWithImplicitSignature] from typing import Any o = None # type: Any def f(x, *a): pass f() # E: Too few arguments for "f" f(o) f(o, o) f(o, o, o) [builtins fixtures/list.pyi] -- Implicit types for constructors -- ------------------------------- [case testInitMethodWithImplicitSignature] from typing import Callable f1 = None # type: Callable[[A], A] f2 = None # type: Callable[[A, A], A] a = None # type: A A(a) # E: Too few arguments for "A" if int(): f1 = A # E: Incompatible types in assignment (expression has type "Type[A]", variable has type "Callable[[A], A]") A(a, a) if int(): f2 = A class A: def __init__(self, a, b): pass [case testUsingImplicitTypeObjectWithIs] t = None # type: type t = A t = B class A: pass class B: def __init__(self): pass -- Type compatibility -- ------------------ [case testTupleTypeCompatibility] from typing import Any, Tuple t1 = None # type: Tuple[Any, A] t2 = None # type: Tuple[A, Any] t3 = None # type: Tuple[Any, Any] t4 = None # type: Tuple[A, A] t5 = None # type: Tuple[Any, Any, Any] def f(): t1, t2, t3, t4, t5 # Prevent redefinition t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[Any, Any]") t5 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[Any, Any, Any]") t1 = t1 t1 = t2 t1 = t3 t1 = t4 t2 = t1 t2 = t3 t2 = t4 t3 = t1 t3 = t2 t3 = t4 t4 = t1 t4 = t2 t4 = t3 class A: pass [builtins fixtures/tuple.pyi] [case testFunctionTypeCompatibilityAndReturnTypes] from typing import Any, Callable f1 = None # type: Callable[[], Any] f11 = None # type: Callable[[], Any] f2 = None # type: Callable[[], A] f3 = None # type: Callable[[], None] f2 = f3 f1 = f2 f1 = f3 f2 = f11 f3 = f11 class A: pass [case testFunctionTypeCompatibilityAndArgumentTypes] from typing import Any, Callable f1 = None # type: Callable[[A, Any], None] f2 = None # type: Callable[[Any, A], None] f3 = None # type: Callable[[A, A], None] f1 = f1 f1 = f2 f1 = f3 f2 = f1 f2 = f2 f2 = f3 f3 = f1 f3 = f2 f3 = f3 class A: pass [case testFunctionTypeCompatibilityAndArgumentCounts] from typing import Any, Callable f1 = None # type: Callable[[Any], None] f2 = None # type: Callable[[Any, Any], None] if int(): f1 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], None]", variable has type "Callable[[Any], None]") -- Overriding -- ---------- [case testOverridingMethodWithDynamicTypes] from typing import Any a, b = None, None # type: (A, B) b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A" a = a.f(b) class B: def f(self, x: 'A') -> 'B': pass def g(self, x: 'B') -> None: pass class A(B): def f(self, x: Any) -> Any: pass def g(self, x: Any) -> None: pass [case testOverridingMethodWithImplicitDynamicTypes] a, b = None, None # type: (A, B) b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A" a = a.f(b) class B: def f(self, x: 'A') -> 'B': pass def g(self, x: 'B') -> None: pass class A(B): def f(self, x): pass def g(self, x): pass [case testOverridingMethodAcrossHierarchy] import typing class C: def f(self, a: 'A') -> None: pass class B(C): def f(self, a): pass class A(B): def f(self, a: 'D') -> None: # E: Argument 1 of "f" is incompatible with supertype "C"; supertype defines the argument type as "A" pass class D: pass [out] [case testInvalidOverrideArgumentCountWithImplicitSignature1] import typing class B: def f(self, x: A) -> None: pass class A(B): def f(self, x, y): # dynamic function not type checked x() [out] [case testInvalidOverrideArgumentCountWithImplicitSignature2] import typing class B: def f(self, x, y): pass class A(B): def f(self, x: 'A') -> None: # E: Signature of "f" incompatible with supertype "B" pass [out] [case testInvalidOverrideArgumentCountWithImplicitSignature3] import typing class B: def f(self, x: A) -> None: pass class A(B): def f(self, x, y) -> None: # E: Signature of "f" incompatible with supertype "B" x() [out] [case testInvalidOverrideWithImplicitSignatureAndClassMethod1] class B: @classmethod def f(cls, x, y): pass class A(B): @classmethod def f(cls, x, y, z): pass # No error since no annotations [builtins fixtures/classmethod.pyi] [case testInvalidOverrideWithImplicitSignatureAndClassMethod2] class B: @classmethod def f(cls, x: int, y): pass class A(B): @classmethod def f(cls, x, y, z): pass # No error since no annotations [builtins fixtures/classmethod.pyi] [case testInvalidOverrideWithImplicitSignatureAndStaticMethod1] class B: @staticmethod def f(x, y): pass class A(B): @staticmethod def f(x, y, z): pass # No error since no annotations [builtins fixtures/classmethod.pyi] [case testInvalidOverrideWithImplicitSignatureAndStaticMethod2] class B: @staticmethod def f(self, x: int, y): pass class A(B): @staticmethod def f(self, x, y, z): pass # No error since no annotations [builtins fixtures/classmethod.pyi] -- Don't complain about too few/many arguments in dynamic functions -- ---------------------------------------------------------------- [case testTooManyArgsInDynamic] def f() -> None: pass def g(): f(1) # Silent [out] [case testTooFewArgsInDynamic] def f(a: int) -> None: pass def g(): f() # Silent [out] [case testJustRightInDynamic] def f(a: int) -> None: pass def g(): f('') # Silent [out] mypy-0.761/test-data/unit/check-enum.test0000644€tŠÔÚ€2›s®0000006700213576752246024512 0ustar jukkaDROPBOX\Domain Users00000000000000-- This test file checks Enum [case testEnumBasics] from enum import Enum class Medal(Enum): gold = 1 silver = 2 bronze = 3 reveal_type(Medal.bronze) # N: Revealed type is 'Literal[__main__.Medal.bronze]?' m = Medal.gold if int(): m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Medal") [case testEnumFromEnumMetaBasics] from enum import EnumMeta class Medal(metaclass=EnumMeta): gold = 1 silver = "hello" bronze = None # Without __init__ the definition fails at runtime, but we want to verify that mypy # uses `enum.EnumMeta` and not `enum.Enum` as the definition of what is enum. def __init__(self, *args): pass reveal_type(Medal.bronze) # N: Revealed type is 'Literal[__main__.Medal.bronze]?' m = Medal.gold if int(): m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Medal") [case testEnumFromEnumMetaSubclass] from enum import EnumMeta class Achievement(metaclass=EnumMeta): pass class Medal(Achievement): gold = 1 silver = "hello" bronze = None # See comment in testEnumFromEnumMetaBasics def __init__(self, *args): pass reveal_type(Medal.bronze) # N: Revealed type is 'Literal[__main__.Medal.bronze]?' m = Medal.gold if int(): m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Medal") [case testEnumFromEnumMetaGeneric] from enum import EnumMeta from typing import Generic, TypeVar T = TypeVar("T") class Medal(Generic[T], metaclass=EnumMeta): # E: Enum class cannot be generic q = None [case testEnumNameAndValue] from enum import Enum class Truth(Enum): true = True false = False x = '' x = Truth.true.name reveal_type(Truth.true.name) # N: Revealed type is 'Literal['true']?' reveal_type(Truth.false.value) # N: Revealed type is 'builtins.bool' [builtins fixtures/bool.pyi] [case testEnumUnique] import enum @enum.unique class E(enum.Enum): x = 1 y = 1 # NOTE: This duplicate value is not detected by mypy at the moment x = 1 x = E.x [out] main:7: error: Incompatible types in assignment (expression has type "E", variable has type "int") [case testIntEnum_assignToIntVariable] from enum import IntEnum class N(IntEnum): x = 1 y = 1 n = 1 if int(): n = N.x # Subclass of int, so it's okay s = '' if int(): s = N.y # E: Incompatible types in assignment (expression has type "N", variable has type "str") [case testIntEnum_functionTakingIntEnum] from enum import IntEnum class SomeIntEnum(IntEnum): x = 1 def takes_some_int_enum(n: SomeIntEnum): pass takes_some_int_enum(SomeIntEnum.x) takes_some_int_enum(1) # Error takes_some_int_enum(SomeIntEnum(1)) # How to deal with the above [out] main:7: error: Argument 1 to "takes_some_int_enum" has incompatible type "int"; expected "SomeIntEnum" [case testIntEnum_functionTakingInt] from enum import IntEnum class SomeIntEnum(IntEnum): x = 1 def takes_int(i: int): pass takes_int(SomeIntEnum.x) takes_int(2) [case testIntEnum_functionReturningIntEnum] from enum import IntEnum class SomeIntEnum(IntEnum): x = 1 def returns_some_int_enum() -> SomeIntEnum: return SomeIntEnum.x an_int = 1 an_int = returns_some_int_enum() an_enum = SomeIntEnum.x an_enum = returns_some_int_enum() [out] [case testEnumMethods] from enum import Enum class Color(Enum): red = 1 green = 2 def m(self, x: int): pass @staticmethod def m2(x: int): pass Color.red.m('') Color.m2('') [builtins fixtures/staticmethod.pyi] [out] main:11: error: Argument 1 to "m" of "Color" has incompatible type "str"; expected "int" main:12: error: Argument 1 to "m2" of "Color" has incompatible type "str"; expected "int" [case testIntEnum_ExtendedIntEnum_functionTakingExtendedIntEnum] from enum import IntEnum class ExtendedIntEnum(IntEnum): pass class SomeExtIntEnum(ExtendedIntEnum): x = 1 def takes_int(i: int): pass takes_int(SomeExtIntEnum.x) def takes_some_ext_int_enum(s: SomeExtIntEnum): pass takes_some_ext_int_enum(SomeExtIntEnum.x) [case testNamedTupleEnum] from typing import NamedTuple from enum import Enum N = NamedTuple('N', [('bar', int)]) class E(N, Enum): X = N(1) def f(x: E) -> None: pass f(E.X) [case testEnumCall] from enum import IntEnum class E(IntEnum): a = 1 x = None # type: int reveal_type(E(x)) [out] main:5: note: Revealed type is '__main__.E*' [case testEnumIndex] from enum import IntEnum class E(IntEnum): a = 1 s = None # type: str reveal_type(E[s]) [out] main:5: note: Revealed type is '__main__.E' [case testEnumIndexError] from enum import IntEnum class E(IntEnum): a = 1 E[1] # E: Enum index should be a string (actual index type "int") x = E[1] # E: Enum index should be a string (actual index type "int") [case testEnumIndexIsNotAnAlias] from enum import Enum class E(Enum): a = 1 b = 2 reveal_type(E['a']) # N: Revealed type is '__main__.E' E['a'] x = E['a'] reveal_type(x) # N: Revealed type is '__main__.E' def get_member(name: str) -> E: val = E[name] return val reveal_type(get_member('a')) # N: Revealed type is '__main__.E' [case testGenericEnum] from enum import Enum from typing import Generic, TypeVar T = TypeVar('T') class F(Generic[T], Enum): # E: Enum class cannot be generic x: T y: T reveal_type(F[int].x) # N: Revealed type is '__main__.F[builtins.int*]' [case testEnumFlag] from enum import Flag class C(Flag): a = 1 b = 2 x = C.a if int(): x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "C") if int(): x = x | C.b [case testEnumIntFlag] from enum import IntFlag class C(IntFlag): a = 1 b = 2 x = C.a if int(): x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "C") if int(): x = x | C.b [case testAnonymousEnum] from enum import Enum class A: def f(self) -> None: class E(Enum): a = 1 self.x = E.a a = A() reveal_type(a.x) [out] main:8: note: Revealed type is '__main__.E@4' [case testEnumInClassBody] from enum import Enum class A: class E(Enum): a = 1 class B: class E(Enum): a = 1 x = A.E.a y = B.E.a if int(): x = y # E: Incompatible types in assignment (expression has type "__main__.B.E", variable has type "__main__.A.E") [case testFunctionalEnumString] from enum import Enum, IntEnum E = Enum('E', 'foo bar') I = IntEnum('I', ' bar, baz ') reveal_type(E.foo) reveal_type(E.bar.value) reveal_type(I.bar) reveal_type(I.baz.value) [out] main:4: note: Revealed type is 'Literal[__main__.E.foo]?' main:5: note: Revealed type is 'Any' main:6: note: Revealed type is 'Literal[__main__.I.bar]?' main:7: note: Revealed type is 'builtins.int' [case testFunctionalEnumListOfStrings] from enum import Enum, IntEnum E = Enum('E', ('foo', 'bar')) F = IntEnum('F', ['bar', 'baz']) reveal_type(E.foo) reveal_type(F.baz) [out] main:4: note: Revealed type is 'Literal[__main__.E.foo]?' main:5: note: Revealed type is 'Literal[__main__.F.baz]?' [case testFunctionalEnumListOfPairs] from enum import Enum, IntEnum E = Enum('E', [('foo', 1), ['bar', 2]]) F = IntEnum('F', (['bar', 1], ('baz', 2))) reveal_type(E.foo) reveal_type(F.baz) reveal_type(E.foo.value) reveal_type(F.bar.name) [out] main:4: note: Revealed type is 'Literal[__main__.E.foo]?' main:5: note: Revealed type is 'Literal[__main__.F.baz]?' main:6: note: Revealed type is 'Literal[1]?' main:7: note: Revealed type is 'Literal['bar']?' [case testFunctionalEnumDict] from enum import Enum, IntEnum E = Enum('E', {'foo': 1, 'bar': 2}) F = IntEnum('F', {'bar': 1, 'baz': 2}) reveal_type(E.foo) reveal_type(F.baz) reveal_type(E.foo.value) reveal_type(F.bar.name) [out] main:4: note: Revealed type is 'Literal[__main__.E.foo]?' main:5: note: Revealed type is 'Literal[__main__.F.baz]?' main:6: note: Revealed type is 'Literal[1]?' main:7: note: Revealed type is 'Literal['bar']?' [case testFunctionalEnumErrors] from enum import Enum, IntEnum A = Enum('A') B = Enum('B', 42) C = Enum('C', 'a b', 'x') D = Enum('D', foo) bar = 'x y z' E = Enum('E', bar) I = IntEnum('I') J = IntEnum('I', 42) K = IntEnum('I', 'p q', 'z') L = Enum('L', ' ') M = Enum('M', ()) N = IntEnum('M', []) P = Enum('P', [42]) Q = Enum('Q', [('a', 42, 0)]) R = IntEnum('R', [[0, 42]]) S = Enum('S', {1: 1}) T = Enum('T', keyword='a b') U = Enum('U', *['a']) V = Enum('U', **{'a': 1}) W = Enum('W', 'a b') W.c [typing fixtures/typing-full.pyi] [out] main:2: error: Too few arguments for Enum() main:3: error: Enum() expects a string, tuple, list or dict literal as the second argument main:4: error: Too many arguments for Enum() main:5: error: Enum() expects a string, tuple, list or dict literal as the second argument main:5: error: Name 'foo' is not defined main:7: error: Enum() expects a string, tuple, list or dict literal as the second argument main:8: error: Too few arguments for IntEnum() main:9: error: IntEnum() expects a string, tuple, list or dict literal as the second argument main:10: error: Too many arguments for IntEnum() main:11: error: Enum() needs at least one item main:12: error: Enum() needs at least one item main:13: error: IntEnum() needs at least one item main:14: error: Enum() with tuple or list expects strings or (name, value) pairs main:15: error: Enum() with tuple or list expects strings or (name, value) pairs main:16: error: IntEnum() with tuple or list expects strings or (name, value) pairs main:17: error: Enum() with dict literal requires string literals main:18: error: Unexpected arguments to Enum() main:19: error: Unexpected arguments to Enum() main:20: error: Unexpected arguments to Enum() main:22: error: "Type[W]" has no attribute "c" [case testFunctionalEnumFlag] from enum import Flag, IntFlag A = Flag('A', 'x y') B = IntFlag('B', 'a b') reveal_type(A.x) # N: Revealed type is 'Literal[__main__.A.x]?' reveal_type(B.a) # N: Revealed type is 'Literal[__main__.B.a]?' reveal_type(A.x.name) # N: Revealed type is 'Literal['x']?' reveal_type(B.a.name) # N: Revealed type is 'Literal['a']?' # TODO: The revealed type should be 'int' here reveal_type(A.x.value) # N: Revealed type is 'Any' reveal_type(B.a.value) # N: Revealed type is 'Any' [case testAnonymousFunctionalEnum] from enum import Enum class A: def f(self) -> None: E = Enum('E', 'a b') self.x = E.a a = A() reveal_type(a.x) [out] main:7: note: Revealed type is '__main__.A.E@4' [case testFunctionalEnumInClassBody] from enum import Enum class A: E = Enum('E', 'a b') class B: E = Enum('E', 'a b') x = A.E.a y = B.E.a if int(): x = y # E: Incompatible types in assignment (expression has type "__main__.B.E", variable has type "__main__.A.E") [case testFunctionalEnumProtocols] from enum import IntEnum Color = IntEnum('Color', 'red green blue') reveal_type(Color['green']) # N: Revealed type is '__main__.Color' for c in Color: reveal_type(c) # N: Revealed type is '__main__.Color*' reveal_type(list(Color)) # N: Revealed type is 'builtins.list[__main__.Color*]' [builtins fixtures/list.pyi] [case testEnumWorkWithForward] from enum import Enum a: E = E.x class E(Enum): x = 1 y = 2 [out] [case testEnumWorkWithForward2] from enum import Enum b: F F = Enum('F', {'x': 1, 'y': 2}) def fn(x: F) -> None: pass fn(b) [out] [case testFunctionalEnum_python2] from enum import Enum Eu = Enum(u'Eu', u'a b') Eb = Enum(b'Eb', b'a b') Gu = Enum(u'Gu', {u'a': 1}) Gb = Enum(b'Gb', {b'a': 1}) Hu = Enum(u'Hu', [u'a']) Hb = Enum(b'Hb', [b'a']) Eu.a Eb.a Gu.a Gb.a Hu.a Hb.a [out] [case testEnumIncremental] import m reveal_type(m.E.a) reveal_type(m.F.b) [file m.py] from enum import Enum class E(Enum): a = 1 b = 2 F = Enum('F', 'a b') [rechecked] [stale] [out1] main:2: note: Revealed type is 'Literal[m.E.a]?' main:3: note: Revealed type is 'Literal[m.F.b]?' [out2] main:2: note: Revealed type is 'Literal[m.E.a]?' main:3: note: Revealed type is 'Literal[m.F.b]?' [case testEnumAuto] from enum import Enum, auto class Test(Enum): a = auto() b = auto() reveal_type(Test.a) # N: Revealed type is 'Literal[__main__.Test.a]?' [builtins fixtures/primitives.pyi] [case testEnumAttributeAccessMatrix] from enum import Enum, IntEnum, IntFlag, Flag, EnumMeta, auto from typing_extensions import Literal def is_x(val: Literal['x']) -> None: pass A1 = Enum('A1', 'x') class A2(Enum): x = auto() class A3(Enum): x = 1 is_x(reveal_type(A1.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(A1.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(A1.x.value) # N: Revealed type is 'Any' reveal_type(A1.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(A2.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(A2.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(A2.x.value) # N: Revealed type is 'Any' reveal_type(A2.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(A3.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(A3.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(A3.x.value) # N: Revealed type is 'builtins.int' reveal_type(A3.x._value_) # N: Revealed type is 'builtins.int' B1 = IntEnum('B1', 'x') class B2(IntEnum): x = auto() class B3(IntEnum): x = 1 # TODO: getting B1.x._value_ and B2.x._value_ to have type 'int' requires a typeshed change is_x(reveal_type(B1.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(B1.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(B1.x.value) # N: Revealed type is 'builtins.int' reveal_type(B1.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(B2.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(B2.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(B2.x.value) # N: Revealed type is 'builtins.int' reveal_type(B2.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(B3.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(B3.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(B3.x.value) # N: Revealed type is 'builtins.int' reveal_type(B3.x._value_) # N: Revealed type is 'builtins.int' # TODO: C1.x.value and C2.x.value should also be of type 'int' # This requires either a typeshed change or a plugin refinement C1 = IntFlag('C1', 'x') class C2(IntFlag): x = auto() class C3(IntFlag): x = 1 is_x(reveal_type(C1.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(C1.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(C1.x.value) # N: Revealed type is 'Any' reveal_type(C1.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(C2.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(C2.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(C2.x.value) # N: Revealed type is 'Any' reveal_type(C2.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(C3.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(C3.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(C3.x.value) # N: Revealed type is 'builtins.int' reveal_type(C3.x._value_) # N: Revealed type is 'builtins.int' D1 = Flag('D1', 'x') class D2(Flag): x = auto() class D3(Flag): x = 1 is_x(reveal_type(D1.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(D1.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(D1.x.value) # N: Revealed type is 'Any' reveal_type(D1.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(D2.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(D2.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(D2.x.value) # N: Revealed type is 'Any' reveal_type(D2.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(D3.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(D3.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(D3.x.value) # N: Revealed type is 'builtins.int' reveal_type(D3.x._value_) # N: Revealed type is 'builtins.int' # TODO: Generalize our enum functional API logic to work with subclasses of Enum # See https://github.com/python/mypy/issues/6037 class Parent(Enum): pass # E1 = Parent('E1', 'x') # See above TODO class E2(Parent): x = auto() class E3(Parent): x = 1 is_x(reveal_type(E2.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(E2.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(E2.x.value) # N: Revealed type is 'Any' reveal_type(E2.x._value_) # N: Revealed type is 'Any' is_x(reveal_type(E3.x.name)) # N: Revealed type is 'Literal['x']' is_x(reveal_type(E3.x._name_)) # N: Revealed type is 'Literal['x']' reveal_type(E3.x.value) # N: Revealed type is 'builtins.int' reveal_type(E3.x._value_) # N: Revealed type is 'builtins.int' # TODO: Figure out if we can construct enums using EnumMetas using the functional API. # Also figure out if we even care about supporting that use case. class F2(metaclass=EnumMeta): x = auto() class F3(metaclass=EnumMeta): x = 1 F2.x.name # E: "F2" has no attribute "name" F2.x._name_ # E: "F2" has no attribute "_name_" F2.x.value # E: "F2" has no attribute "value" F2.x._value_ # E: "F2" has no attribute "_value_" F3.x.name # E: "F3" has no attribute "name" F3.x._name_ # E: "F3" has no attribute "_name_" F3.x.value # E: "F3" has no attribute "value" F3.x._value_ # E: "F3" has no attribute "_value_" [builtins fixtures/primitives.pyi] [case testEnumAttributeChangeIncremental] from a import SomeEnum reveal_type(SomeEnum.a.value) [file a.py] from b import SomeEnum [file b.py] from enum import Enum class SomeEnum(Enum): a = 1 [file b.py.2] from enum import Enum class SomeEnum(Enum): a = "foo" [out] main:2: note: Revealed type is 'builtins.int' [out2] main:2: note: Revealed type is 'builtins.str' [case testEnumReachabilityChecksBasic] from enum import Enum from typing_extensions import Literal class Foo(Enum): A = 1 B = 2 C = 3 x: Literal[Foo.A, Foo.B, Foo.C] if x is Foo.A: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' elif x is Foo.B: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' elif x is Foo.C: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.C]' else: reveal_type(x) # No output here: this branch is unreachable if Foo.A is x: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' elif Foo.B is x: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' elif Foo.C is x: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.C]' else: reveal_type(x) # No output here: this branch is unreachable y: Foo if y is Foo.A: reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' elif y is Foo.B: reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' elif y is Foo.C: reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.C]' else: reveal_type(y) # No output here: this branch is unreachable if Foo.A is y: reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' elif Foo.B is y: reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' elif Foo.C is y: reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.C]' else: reveal_type(y) # No output here: this branch is unreachable [builtins fixtures/bool.pyi] [case testEnumReachabilityChecksIndirect] from enum import Enum from typing_extensions import Literal, Final class Foo(Enum): A = 1 B = 2 C = 3 def accepts_foo_a(x: Literal[Foo.A]) -> None: ... x: Foo y: Literal[Foo.A] z: Final = Foo.A if x is y: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' else: reveal_type(x) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' if y is x: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' else: reveal_type(x) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' if x is z: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(z) # N: Revealed type is 'Literal[__main__.Foo.A]?' accepts_foo_a(z) else: reveal_type(x) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' reveal_type(z) # N: Revealed type is 'Literal[__main__.Foo.A]?' accepts_foo_a(z) if z is x: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(z) # N: Revealed type is 'Literal[__main__.Foo.A]?' accepts_foo_a(z) else: reveal_type(x) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' reveal_type(z) # N: Revealed type is 'Literal[__main__.Foo.A]?' accepts_foo_a(z) if y is z: reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(z) # N: Revealed type is 'Literal[__main__.Foo.A]?' accepts_foo_a(z) else: reveal_type(y) # No output: this branch is unreachable reveal_type(z) # No output: this branch is unreachable if z is y: reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(z) # N: Revealed type is 'Literal[__main__.Foo.A]?' accepts_foo_a(z) else: reveal_type(y) # No output: this branch is unreachable reveal_type(z) # No output: this branch is unreachable [builtins fixtures/bool.pyi] [case testEnumReachabilityNoNarrowingForUnionMessiness] from enum import Enum from typing_extensions import Literal class Foo(Enum): A = 1 B = 2 C = 3 x: Foo y: Literal[Foo.A, Foo.B] z: Literal[Foo.B, Foo.C] # For the sake of simplicity, no narrowing is done when the narrower type is a Union. if x is y: reveal_type(x) # N: Revealed type is '__main__.Foo' reveal_type(y) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.B]]' else: reveal_type(x) # N: Revealed type is '__main__.Foo' reveal_type(y) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.B]]' if y is z: reveal_type(y) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.B]]' reveal_type(z) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' else: reveal_type(y) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.B]]' reveal_type(z) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' [builtins fixtures/bool.pyi] [case testEnumReachabilityWithNone] # flags: --strict-optional from enum import Enum from typing import Optional class Foo(Enum): A = 1 B = 2 C = 3 x: Optional[Foo] if x: reveal_type(x) # N: Revealed type is '__main__.Foo' else: reveal_type(x) # N: Revealed type is 'Union[__main__.Foo, None]' if x is not None: reveal_type(x) # N: Revealed type is '__main__.Foo' else: reveal_type(x) # N: Revealed type is 'None' if x is Foo.A: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' else: reveal_type(x) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C], None]' [builtins fixtures/bool.pyi] [case testEnumReachabilityWithMultipleEnums] from enum import Enum from typing import Union from typing_extensions import Literal class Foo(Enum): A = 1 B = 2 class Bar(Enum): A = 1 B = 2 x1: Union[Foo, Bar] if x1 is Foo.A: reveal_type(x1) # N: Revealed type is 'Literal[__main__.Foo.A]' else: reveal_type(x1) # N: Revealed type is 'Union[Literal[__main__.Foo.B], __main__.Bar]' x2: Union[Foo, Bar] if x2 is Bar.A: reveal_type(x2) # N: Revealed type is 'Literal[__main__.Bar.A]' else: reveal_type(x2) # N: Revealed type is 'Union[__main__.Foo, Literal[__main__.Bar.B]]' x3: Union[Foo, Bar] if x3 is Foo.A or x3 is Bar.A: reveal_type(x3) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Bar.A]]' else: reveal_type(x3) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Bar.B]]' [builtins fixtures/bool.pyi] [case testEnumReachabilityPEP484ExampleWithFinal] # flags: --strict-optional from typing import Union from typing_extensions import Final from enum import Enum class Empty(Enum): token = 0 _empty: Final = Empty.token def func(x: Union[int, None, Empty] = _empty) -> int: boom = x + 42 # E: Unsupported left operand type for + ("None") \ # E: Unsupported left operand type for + ("Empty") \ # N: Left operand is of type "Union[int, None, Empty]" if x is _empty: reveal_type(x) # N: Revealed type is 'Literal[__main__.Empty.token]' return 0 elif x is None: reveal_type(x) # N: Revealed type is 'None' return 1 else: # At this point typechecker knows that x can only have type int reveal_type(x) # N: Revealed type is 'builtins.int' return x + 2 [builtins fixtures/primitives.pyi] [case testEnumReachabilityPEP484ExampleWithMultipleValues] from typing import Union from enum import Enum class Reason(Enum): timeout = 1 error = 2 def process(response: Union[str, Reason] = '') -> str: if response is Reason.timeout: reveal_type(response) # N: Revealed type is 'Literal[__main__.Reason.timeout]' return 'TIMEOUT' elif response is Reason.error: reveal_type(response) # N: Revealed type is 'Literal[__main__.Reason.error]' return 'ERROR' else: # response can be only str, all other possible values exhausted reveal_type(response) # N: Revealed type is 'builtins.str' return 'PROCESSED: ' + response [builtins fixtures/primitives.pyi] [case testEnumReachabilityPEP484ExampleSingleton] # flags: --strict-optional from typing import Union from typing_extensions import Final from enum import Enum class Empty(Enum): token = 0 _empty = Empty.token def func(x: Union[int, None, Empty] = _empty) -> int: boom = x + 42 # E: Unsupported left operand type for + ("None") \ # E: Unsupported left operand type for + ("Empty") \ # N: Left operand is of type "Union[int, None, Empty]" if x is _empty: reveal_type(x) # N: Revealed type is 'Literal[__main__.Empty.token]' return 0 elif x is None: reveal_type(x) # N: Revealed type is 'None' return 1 else: # At this point typechecker knows that x can only have type int reveal_type(x) # N: Revealed type is 'builtins.int' return x + 2 [builtins fixtures/primitives.pyi] [case testEnumReachabilityPEP484ExampleSingletonWithMethod] # flags: --strict-optional from typing import Union from typing_extensions import Final from enum import Enum class Empty(Enum): token = lambda x: x def f(self) -> int: return 1 _empty = Empty.token def func(x: Union[int, None, Empty] = _empty) -> int: boom = x + 42 # E: Unsupported left operand type for + ("None") \ # E: Unsupported left operand type for + ("Empty") \ # N: Left operand is of type "Union[int, None, Empty]" if x is _empty: reveal_type(x) # N: Revealed type is 'Literal[__main__.Empty.token]' return 0 elif x is None: reveal_type(x) # N: Revealed type is 'None' return 1 else: # At this point typechecker knows that x can only have type int reveal_type(x) # N: Revealed type is 'builtins.int' return x + 2 [builtins fixtures/primitives.pyi] [case testAssignEnumAsAttribute] from enum import Enum class A: def __init__(self) -> None: self.b = Enum("x", [("foo", "bar")]) # E: Enum type as attribute is not supported reveal_type(A().b) # N: Revealed type is 'Any' mypy-0.761/test-data/unit/check-errorcodes.test0000644€tŠÔÚ€2›s®0000006050713576752246025720 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for error codes and ignoring errors using error codes -- -- These implicitly use --show-error-codes. [case testErrorCodeNoAttribute] import m m.x # E: Module has no attribute "x" [attr-defined] 'x'.foobar # E: "str" has no attribute "foobar" [attr-defined] from m import xx # E: Module 'm' has no attribute 'xx' [attr-defined] from m import think # E: Module 'm' has no attribute 'think'; maybe "thing"? [attr-defined] for x in 1: # E: "int" has no attribute "__iter__" (not iterable) [attr-defined] pass [file m.py] thing = 0 [builtins fixtures/module.pyi] [case testErrorCodeUndefinedName] x # E: Name 'x' is not defined [name-defined] def f() -> None: y # E: Name 'y' is not defined [name-defined] [file m.py] [builtins fixtures/module.pyi] [case testErrorCodeUnclassifiedError] class A: def __init__(self) -> int: \ # E: The return type of "__init__" must be None [misc] pass [case testErrorCodeNoteHasNoCode] reveal_type(1) # N: Revealed type is 'Literal[1]?' [case testErrorCodeSyntaxError] 1 '' # E: invalid syntax [syntax] [case testErrorCodeSyntaxError2] def f(): # E: Type signature has too many arguments [syntax] # type: (int) -> None 1 x = 0 # type: x y # E: syntax error in type comment 'x y' [syntax] [case testErrorCodeSyntaxError3] # This is a bit inconsistent -- syntax error would be more logical? x: 'a b' # E: Invalid type comment or annotation [valid-type] for v in x: # type: int, int # E: Syntax error in type annotation [syntax] \ # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) pass [case testErrorCodeSyntaxErrorIgnoreNote] # This is a bit inconsistent -- syntax error would be more logical? x: 'a b' # type: ignore[valid-type] for v in x: # type: int, int # type: ignore[syntax] pass [case testErrorCodeSyntaxError_python2] 1 '' # E: invalid syntax [syntax] [case testErrorCodeSyntaxError2_python2] def f(): # E: Type signature has too many arguments [syntax] # type: (int) -> None 1 x = 0 # type: x y # E: syntax error in type comment 'x y' [syntax] [case testErrorCodeSyntaxError3_python2] def f(): pass for v in f(): # type: int, int # E: Syntax error in type annotation [syntax] \ # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) pass [case testErrorCodeIgnore1] 'x'.foobar # type: ignore[attr-defined] 'x'.foobar # type: ignore[xyz] # E: "str" has no attribute "foobar" [attr-defined] 'x'.foobar # type: ignore [case testErrorCodeIgnore2] a = 'x'.foobar # type: int # type: ignore[attr-defined] b = 'x'.foobar # type: int # type: ignore[xyz] # E: "str" has no attribute "foobar" [attr-defined] c = 'x'.foobar # type: int # type: ignore [case testErrorCodeIgnore1_python2] 'x'.foobar # type: ignore[attr-defined] 'x'.foobar # type: ignore[xyz] # E: "str" has no attribute "foobar" [attr-defined] 'x'.foobar # type: ignore [case testErrorCodeIgnore2_python2] a = 'x'.foobar # type: int # type: ignore[attr-defined] b = 'x'.foobar # type: int # type: ignore[xyz] # E: "str" has no attribute "foobar" [attr-defined] c = 'x'.foobar # type: int # type: ignore [case testErrorCodeIgnoreMultiple1] a = 'x'.foobar(b) # type: ignore[name-defined, attr-defined] a = 'x'.foobar(b) # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] a = 'x'.foobar(b) # type: ignore[xyz, w, attr-defined] # E: Name 'b' is not defined [name-defined] [case testErrorCodeIgnoreMultiple2] a = 'x'.foobar(b) # type: int # type: ignore[name-defined, attr-defined] b = 'x'.foobar(b) # type: int # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] [case testErrorCodeIgnoreMultiple1_python2] a = 'x'.foobar(b) # type: ignore[name-defined, attr-defined] a = 'x'.foobar(b) # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] a = 'x'.foobar(b) # type: ignore[xyz, w, attr-defined] # E: Name 'b' is not defined [name-defined] [case testErrorCodeIgnoreMultiple2_python2] a = 'x'.foobar(b) # type: int # type: ignore[name-defined, attr-defined] b = 'x'.foobar(b) # type: int # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] [case testErrorCodeIgnoreWithExtraSpace] x # type: ignore [name-defined] x2 # type: ignore [ name-defined ] x3 # type: ignore [ xyz , name-defined ] x4 # type: ignore[xyz,name-defined] y # type: ignore [xyz] # E: Name 'y' is not defined [name-defined] y # type: ignore[ xyz ] # E: Name 'y' is not defined [name-defined] y # type: ignore[ xyz , foo ] # E: Name 'y' is not defined [name-defined] a = z # type: int # type: ignore [name-defined] b = z2 # type: int # type: ignore [ name-defined ] c = z2 # type: int # type: ignore [ name-defined , xyz ] d = zz # type: int # type: ignore [xyz] # E: Name 'zz' is not defined [name-defined] e = zz # type: int # type: ignore [ xyz ] # E: Name 'zz' is not defined [name-defined] f = zz # type: int # type: ignore [ xyz,foo ] # E: Name 'zz' is not defined [name-defined] [case testErrorCodeIgnoreAfterArgComment] def f(x # type: xyz # type: ignore[name-defined] # Comment ): # type () -> None pass def g(x # type: xyz # type: ignore # Comment ): # type () -> None pass def h(x # type: xyz # type: ignore[foo] # E: Name 'xyz' is not defined [name-defined] ): # type () -> None pass [case testErrorCodeIgnoreAfterArgComment_python2] def f(x # type: xyz # type: ignore[name-defined] # Comment ): # type () -> None pass def g(x # type: xyz # type: ignore # Comment ): # type () -> None pass def h(x # type: xyz # type: ignore[foo] # E: Name 'xyz' is not defined [name-defined] ): # type () -> None pass [case testErrorCodeIgnoreWithNote] import nostub # type: ignore[import] from defusedxml import xyz # type: ignore[import] [case testErrorCodeIgnoreWithNote_python2] import nostub # type: ignore[import] from defusedxml import xyz # type: ignore[import] [case testErrorCodeBadIgnore] import nostub # type: ignore xyz # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[ # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[foo # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[foo, # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[foo]] # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[foo][bar] # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[foo] [bar] # E: Invalid "type: ignore" comment [syntax] x = 0 # type: ignore[ # E: Invalid "type: ignore" comment [syntax] def f(x, # type: int # type: ignore[ # E: Invalid "type: ignore" comment [syntax] ): # type: (...) -> None pass [case testErrorCodeBadIgnoreNoExtraComment] # Omit the E: ... comments, as they affect parsing import nostub # type: ignore xyz import nostub # type: ignore[xyz import nostub # type: ignore[xyz][xyz] x = 0 # type: ignore[ def f(x, # type: int # type: ignore[ ): # type: (...) -> None pass [out] main:2: error: Invalid "type: ignore" comment [syntax] main:3: error: Invalid "type: ignore" comment [syntax] main:4: error: Invalid "type: ignore" comment [syntax] main:5: error: Invalid "type: ignore" comment [syntax] main:6: error: Invalid "type: ignore" comment [syntax] [case testErrorCodeBadIgnore_python2] import nostub # type: ignore xyz import nostub # type: ignore[xyz # Comment [x] import nostub # type: ignore[xyz][xyz] x = 0 # type: ignore[ def f(x, # type: int # type: ignore[ ): # type: (...) -> None pass [out] main:1: error: Invalid "type: ignore" comment [syntax] main:2: error: Invalid "type: ignore" comment [syntax] main:3: error: Invalid "type: ignore" comment [syntax] main:4: error: Invalid "type: ignore" comment [syntax] main:5: error: Invalid "type: ignore" comment [syntax] [case testErrorCodeArgKindAndCount] def f(x: int) -> None: pass # N: "f" defined here f() # E: Too few arguments for "f" [call-arg] f(1, 2) # E: Too many arguments for "f" [call-arg] f(y=1) # E: Unexpected keyword argument "y" for "f" [call-arg] def g(*, x: int) -> None: pass g() # E: Missing named argument "x" for "g" [call-arg] def h(x: int, y: int, z: int) -> None: pass h(y=1, z=1) # E: Missing positional argument "x" in call to "h" [call-arg] h(y=1) # E: Missing positional arguments "x", "z" in call to "h" [call-arg] [case testErrorCodeSuperArgs_python2] class A: def f(self): pass class B(A): def f(self): # type: () -> None super().f() # E: Too few arguments for "super" [call-arg] [case testErrorCodeArgType] def f(x: int) -> None: pass f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [arg-type] class A: def g(self, *, x: int) -> None: pass A().g(x='') # E: Argument "x" to "g" of "A" has incompatible type "str"; expected "int" [arg-type] [case testErrorCodeInvalidType] def f(): pass x: f # E: Function "__main__.f" is not valid as a type [valid-type] \ # N: Perhaps you need "Callable[...]" or a callback protocol? import sys y: sys # E: Module "sys" is not valid as a type [valid-type] z: y # E: Variable "__main__.y" is not valid as a type [valid-type] [case testErrorCodeNeedTypeAnnotation] from typing import TypeVar T = TypeVar('T') def f() -> T: pass x = f() # E: Need type annotation for 'x' [var-annotated] y = [] # E: Need type annotation for 'y' (hint: "y: List[] = ...") [var-annotated] [builtins fixtures/list.pyi] [case testErrorCodeBadOverride] from typing import overload class A: def f(self) -> int: return 0 class B(A): def f(self) -> str: # E: Return type "str" of "f" incompatible with return type "int" in supertype "A" [override] return '' class C(A): def f(self, x: int) -> int: # E: Signature of "f" incompatible with supertype "A" [override] return 0 class D: def f(self, x: int) -> int: return 0 class E(D): def f(self, x: str) -> int: # E: Argument 1 of "f" is incompatible with supertype "D"; supertype defines the argument type as "int" [override] return 0 class O: @overload def f(self, x: int) -> None: pass @overload def f(self, x: str) -> None: pass def f(self, x): pass class OO(O): @overload # E: Signature of "f" incompatible with supertype "O" [override] \ # N: Overload variants must be defined in the same order as they are in "O" def f(self, x: str) -> None: pass @overload def f(self, x: int) -> None: pass def f(self, x): pass [case testErrorCodeReturnValue] def f() -> int: return '' # E: Incompatible return value type (got "str", expected "int") [return-value] [case testErrorCodeMissingReturnValueInReturnStatement] def f() -> int: return # E: Return value expected [return-value] [case testErrorCodeAssignment] x: str = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [assignment] def f(x: str = 0) -> None: # E: Incompatible default for argument "x" (default has type "int", argument has type "str") [assignment] pass class A: x = 0 class B(A): x = '' # E: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [assignment] a: A a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] [case testErrorCodeMissingTypeArg] # flags: --disallow-any-generics from typing import List, TypeVar x: List # E: Missing type parameters for generic type "List" [type-arg] y: list # E: Implicit generic "Any". Use "typing.List" and specify generic parameters [type-arg] T = TypeVar('T') L = List[List[T]] z: L # E: Missing type parameters for generic type "L" [type-arg] [builtins fixtures/list.pyi] [case testErrorCodeUnionAttribute] from typing import Union class A: x: int class B: y: str a: Union[A, B] a.x # E: Item "B" of "Union[A, B]" has no attribute "x" [union-attr] [case testErrorCodeFunctionHasNoAnnotation] # flags: --disallow-untyped-defs def f(x): # E: Function is missing a type annotation [no-untyped-def] pass def g(x: int): # E: Function is missing a return type annotation [no-untyped-def] pass def h(x) -> None: # E: Function is missing a type annotation for one or more arguments [no-untyped-def] pass def gen(): # E: Function is missing a return type annotation [no-untyped-def] yield 1 def gen2(x: int): # E: Function is missing a return type annotation [no-untyped-def] yield 1 async def asyncf(): # E: Function is missing a return type annotation [no-untyped-def] return 0 async def asyncf2(x: int): # E: Function is missing a return type annotation [no-untyped-def] return 0 [typing fixtures/typing-full.pyi] [case testErrorCodeCallUntypedFunction] # flags: --disallow-untyped-calls def f() -> None: g() # E: Call to untyped function "g" in typed context [no-untyped-call] def g(): pass [case testErrorCodeIndexing] from typing import Dict x: Dict[int, int] x[''] # E: Invalid index type "str" for "Dict[int, int]"; expected type "int" [index] 1[''] # E: Value of type "int" is not indexable [index] 1[''] = 1 # E: Unsupported target for indexed assignment [index] [builtins fixtures/dict.pyi] [case testErrorCodeInvalidTypeArg] from typing import TypeVar, Generic T = TypeVar('T', int, str) TT = TypeVar('TT', int, None) S = TypeVar('S', bound=str) def f(x: T) -> T: return x f(object()) # E: Value of type variable "T" of "f" cannot be "object" [type-var] def g(x: S) -> S: return x g(1) # E: Value of type variable "S" of "g" cannot be "int" [type-var] class C(Generic[T]): pass class D(Generic[S]): pass class E(Generic[S, T]): pass x: C[object] # E: Value of type variable "T" of "C" cannot be "object" [type-var] y: D[int] # E: Type argument "builtins.int" of "D" must be a subtype of "builtins.str" [type-var] z: D[int, int] # E: "D" expects 1 type argument, but 2 given [type-arg] def h(a: TT, s: S) -> None: b: C[TT] # E: Invalid type argument value for "C" [type-var] c: C[S] # E: Type variable "S" not valid as type argument value for "C" [type-var] [case testErrorCodeOperators] class A: pass A() + 1 # E: Unsupported left operand type for + ("A") [operator] 1 in A() # E: Unsupported right operand type for in ("A") [operator] A() < 1 # E: Unsupported left operand type for < ("A") [operator] -A() # E: Unsupported operand type for unary - ("A") [operator] +A() # E: Unsupported operand type for unary + ("A") [operator] ~A() # E: Unsupported operand type for ~ ("A") [operator] class B: def __add__(self, other: int) -> 'B': return self def __radd__(self, other: int) -> 'B': return self def __contains__(self, other: int) -> int: return 0 B() + '' # E: Unsupported operand types for + ("B" and "str") [operator] '' + B() # E: Unsupported operand types for + ("str" and "B") [operator] '' in B() # E: Unsupported operand types for in ("str" and "B") [operator] 1() # E: "int" not callable [operator] [case testErrorCodeListOrDictItem] from typing import List, Dict x: List[int] = [''] # E: List item 0 has incompatible type "str"; expected "int" [list-item] y: Dict[int, int] = {1: ''} # E: Dict entry 0 has incompatible type "int": "str"; expected "int": "int" [dict-item] [builtins fixtures/dict.pyi] [case testErrorCodeTypedDict] from typing_extensions import TypedDict class D(TypedDict): x: int class E(TypedDict): x: int y: int a: D = {'x': ''} # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [typeddict-item] b: D = {'y': ''} # E: Extra key 'y' for TypedDict "D" [typeddict-item] c = D(x=0) if int() else E(x=0, y=0) c = {} # E: Expected TypedDict key 'x' but found no keys [typeddict-item] [builtins fixtures/dict.pyi] [case testErrorCodeCannotDetermineType] y = x # E: Cannot determine type of 'x' [has-type] reveal_type(y) # N: Revealed type is 'Any' x = None [case testErrorCodeRedundantCast] # flags: --warn-redundant-casts from typing import cast x = cast(int, int()) # E: Redundant cast to "int" [redundant-cast] [case testErrorCodeInvalidCommentSignature] def f(x): # E: Type signature has too few arguments [syntax] # type: () -> None pass def g(x): # E: Type signature has too many arguments [syntax] # type: (int, int) -> None pass [case testErrorCodeInvalidCommentSignature_python2] def f(x): # E: Type signature has too few arguments [syntax] # type: () -> None pass def g(x): # E: Type signature has too many arguments [syntax] # type: (int, int) -> None pass [case testErrorCodeNonOverlappingEquality] # flags: --strict-equality if int() == str(): # E: Non-overlapping equality check (left operand type: "int", right operand type: "str") [comparison-overlap] pass if int() != str(): # E: Non-overlapping equality check (left operand type: "int", right operand type: "str") [comparison-overlap] pass if int() is str(): # E: Non-overlapping identity check (left operand type: "int", right operand type: "str") [comparison-overlap] pass [builtins fixtures/primitives.pyi] [case testErrorCodeMissingModule] from defusedxml import xyz # E: No library stub file for module 'defusedxml' [import] \ # N: (Stub files are from https://github.com/python/typeshed) from nonexistent import foobar # E: Cannot find implementation or library stub for module named 'nonexistent' [import] \ # N: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports import nonexistent2 # E: Cannot find implementation or library stub for module named 'nonexistent2' [import] from nonexistent3 import * # E: Cannot find implementation or library stub for module named 'nonexistent3' [import] from pkg import bad # E: Module 'pkg' has no attribute 'bad' [attr-defined] from pkg.bad2 import bad3 # E: Cannot find implementation or library stub for module named 'pkg.bad2' [import] [file pkg/__init__.py] [case testErrorCodeAlreadyDefined] x: int x: str # E: Name 'x' already defined on line 1 [no-redef] def f(): pass def f(): # E: Name 'f' already defined on line 4 [no-redef] pass [case testErrorCodeMissingReturn] def f() -> int: # E: Missing return statement [return] x = 0 [case testErrorCodeReturnValueNotExpected] def f() -> None: return 1 # E: No return value expected [return-value] [case testErrorCodeFunctionDoesNotReturnValue] from typing import Callable def f() -> None: pass x = f() # E: "f" does not return a value [func-returns-value] class A: def g(self) -> None: pass y = A().g() # E: "g" of "A" does not return a value [func-returns-value] c: Callable[[], None] z = c() # E: Function does not return a value [func-returns-value] [case testErrorCodeInstantiateAbstract] from abc import abstractmethod class A: @abstractmethod def f(self): pass class B(A): pass B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'f' [abstract] [case testErrorCodeNewTypeNotSubclassable] from typing import Union, NewType X = NewType('X', Union[int, str]) # E: Argument 2 to NewType(...) must be subclassable (got "Union[int, str]") [valid-newtype] [case testErrorCodeOverloadVariant] from typing import overload @overload def f(x: int) -> int: ... @overload def f(x: str) -> str: ... def f(x): return x f(object()) # E: No overload variant of "f" matches argument type "object" [call-overload] \ # N: Possible overload variants: \ # N: def f(x: int) -> int \ # N: def f(x: str) -> str f() # E: All overload variants of "f" require at least one argument [call-overload] \ # N: Possible overload variants: \ # N: def f(x: int) -> int \ # N: def f(x: str) -> str f(1, 1) # E: No overload variant of "f" matches argument types "int", "int" [call-overload] \ # N: Possible overload variants: \ # N: def f(x: int) -> int \ # N: def f(x: str) -> str [case testErrorCodeOverloadVariantIgnore] from typing import overload @overload def f(x: int) -> int: ... @overload def f(x: str) -> str: ... def f(x): return x f(object()) # type: ignore[call-overload] [case testErrorCodeAnyFromUnfollowedImport] # flags: --disallow-any-unimported from m import C # type: ignore def f(x: C) -> None: # E: Argument 1 to "f" becomes "Any" due to an unfollowed import [no-any-unimported] pass def g() -> C: ... # E: Return type becomes "Any" due to an unfollowed import [no-any-unimported] [case testErrorCodeReturnAny] # flags: --warn-return-any def f(): pass def g() -> int: return f() # E: Returning Any from function declared to return "int" [no-any-return] [case testErrorCodeFormatCall] '{:d}'.format('no') # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") [str-format] '{!x}'.format('Hm...') # E: Invalid conversion type "x", must be one of "r", "s" or "a" [str-format] '}{'.format() # E: Invalid conversion specifier in format string: unexpected } [str-format] '%d' % 'no' # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsInt]") [str-format] '%d + %d' % (1, 2, 3) # E: Not all arguments converted during string formatting [str-format] '{}'.format(b'abc') # E: On Python 3 '{}'.format(b'abc') produces "b'abc'"; use !r if this is a desired behavior [str-bytes-safe] '%s' % b'abc' # E: On Python 3 '%s' % b'abc' produces "b'abc'"; use %r if this is a desired behavior [str-bytes-safe] [builtins fixtures/primitives.pyi] [case testErrorCodeIgnoreNamedDefinedNote] x: List[int] # type: ignore[name-defined] [case testErrorCodeIgnoreMiscNote] x: [int] # type: ignore[misc] [case testErrorCodeProtocolProblemsIgnore] from typing_extensions import Protocol class P(Protocol): def f(self, x: str) -> None: ... class A: def f(self, x: int) -> None: ... def g(p: P) -> None: pass p: A g(p) # type: ignore[arg-type] [case testErrorCodeNoneReturnNoteIgnore] # flags: --disallow-untyped-defs def f(): # type: ignore[no-untyped-def] pass [case testErrorCodeVarianceNoteIgnore] from typing import List def f(x: List[object]) -> None: pass a = [1] f(a) # type: ignore[arg-type] [builtins fixtures/list.pyi] [case testErrorCodeAssignToMethod] class A: def f(self) -> None: pass def g(self: A) -> None: pass A.f = g # E: Cannot assign to a method [assignment] [case testErrorCodeDefinedHereNoteIgnore] import m m.f(kw=1) # type: ignore[call-arg] [file m.py] def f() -> None: pass [case testErrorCodeUnionNoteIgnore] from typing import Union class Foo: def __add__(self, x: Foo) -> Foo: pass def __radd__(self, x: Foo) -> Foo: pass class Bar: def __add__(self, x: Bar) -> Bar: pass def __radd__(self, x: Bar) -> Bar: pass a: Union[Foo, Bar] a + a # type: ignore[operator] a + Foo() # type: ignore[operator] Foo() + a # type: ignore[operator] [case testErrorCodeTypeIgnoreMisspelled1] x = y # type: ignored[foo] xx = y # type: ignored [foo] [out] main:1: error: Name 'ignored' is not defined [name-defined] main:1: error: Name 'y' is not defined [name-defined] main:2: error: Name 'ignored' is not defined [name-defined] main:2: error: Name 'y' is not defined [name-defined] [case testErrorCodeTypeIgnoreMisspelled2] x = y # type: int # type: ignored[foo] x = y # type: int # type: ignored [foo] [out] main:1: error: syntax error in type comment 'int' [syntax] main:2: error: syntax error in type comment 'int' [syntax] [case testErrorCode__exit__Return] class InvalidReturn: def __exit__(self, x, y, z) -> bool: # E: "bool" is invalid as return type for "__exit__" that always returns False [exit-return] \ # N: Use "typing_extensions.Literal[False]" as the return type or change it to "None" \ # N: If return type of "__exit__" implies that it may return True, the context manager may swallow exceptions return False [builtins fixtures/bool.pyi] [case testErrorCodeOverloadedOperatorMethod] # flags: --strict-optional from typing import Optional, overload class A: @overload def __add__(self, x: int) -> A: ... @overload def __add__(self, x: str) -> str: ... def __add__(self, x): pass class B: pass x: Optional[B] A() + x # type: ignore[operator] class C: @overload def __rsub__(self, x: int) -> A: ... @overload def __rsub__(self, x: str) -> str: ... def __rsub__(self, x): pass x - C() # type: ignore[operator] mypy-0.761/test-data/unit/check-expressions.test0000644€tŠÔÚ€2›s®0000022752413576752246026137 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for simple expressions. -- -- See also: -- * check-functions.test contains test cases for calls. -- * check-varargs.test contains test cases for *args. -- * check-dynamic.test contains test cases related to 'Any' type. -- * check-generics.test contains test cases for generic values. -- None expression -- --------------- [case testNoneAsRvalue] import typing a = None # type: A class A: pass [out] [case testNoneAsArgument] import typing def f(x: 'A', y: 'B') -> None: pass f(None, None) class A: pass class B(A): pass [out] -- Simple expressions -- ------------------ [case testIntLiteral] a = 0 b = None # type: A if int(): b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "A") if int(): a = 1 class A: pass [case testStrLiteral] a = '' b = None # type: A if int(): b = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "A") if int(): a = 'x' if int(): a = r"x" if int(): a = """foo""" class A: pass [case testFloatLiteral] a = 0.0 b = None # type: A if str(): b = 1.1 # E: Incompatible types in assignment (expression has type "float", variable has type "A") if str(): a = 1.1 class A: pass [file builtins.py] class object: def __init__(self): pass class type: pass class function: pass class float: pass class str: pass [case testComplexLiteral] a = 0.0j b = None # type: A if str(): b = 1.1j # E: Incompatible types in assignment (expression has type "complex", variable has type "A") if str(): a = 1.1j class A: pass [file builtins.py] class object: def __init__(self): pass class type: pass class function: pass class complex: pass class str: pass [case testBytesLiteral] b, a = None, None # type: (bytes, A) if str(): b = b'foo' if str(): b = br"foo" if str(): b = b'''foo''' if str(): a = b'foo' # E: Incompatible types in assignment (expression has type "bytes", variable has type "A") class A: pass [file builtins.py] class object: def __init__(self): pass class type: pass class tuple: pass class function: pass class bytes: pass class str: pass [case testUnicodeLiteralInPython3] s = None # type: str if int(): s = u'foo' b = None # type: bytes if int(): b = u'foo' # E: Incompatible types in assignment (expression has type "str", variable has type "bytes") [builtins fixtures/primitives.pyi] -- Binary operators -- ---------------- [case testAdd] a, b, c = None, None, None # type: (A, B, C) if int(): c = a + c # E: Unsupported operand types for + ("A" and "C") if int(): a = a + b # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b + a # E: Unsupported left operand type for + ("B") if int(): c = a + b class A: def __add__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testSub] a, b, c = None, None, None # type: (A, B, C) if int(): c = a - c # E: Unsupported operand types for - ("A" and "C") if int(): a = a - b # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b - a # E: Unsupported left operand type for - ("B") if int(): c = a - b class A: def __sub__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testMul] a, b, c = None, None, None # type: (A, B, C) if int(): c = a * c # E: Unsupported operand types for * ("A" and "C") if int(): a = a * b # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b * a # E: Unsupported left operand type for * ("B") if int(): c = a * b class A: def __mul__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testMatMul] a, b, c = None, None, None # type: (A, B, C) if int(): c = a @ c # E: Unsupported operand types for @ ("A" and "C") if int(): a = a @ b # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b @ a # E: Unsupported left operand type for @ ("B") if int(): c = a @ b class A: def __matmul__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testDiv] a, b, c = None, None, None # type: (A, B, C) if int(): c = a / c # E: Unsupported operand types for / ("A" and "C") a = a / b # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b / a # E: Unsupported left operand type for / ("B") if int(): c = a / b class A: def __truediv__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testIntDiv] a, b, c = None, None, None # type: (A, B, C) if int(): c = a // c # E: Unsupported operand types for // ("A" and "C") a = a // b # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b // a # E: Unsupported left operand type for // ("B") if int(): c = a // b class A: def __floordiv__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testMod] a, b, c = None, None, None # type: (A, B, C) if int(): c = a % c # E: Unsupported operand types for % ("A" and "C") if int(): a = a % b # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b % a # E: Unsupported left operand type for % ("B") if int(): c = a % b class A: def __mod__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testPow] a, b, c = None, None, None # type: (A, B, C) if int(): c = a ** c # E: Unsupported operand types for ** ("A" and "C") if int(): a = a ** b # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b ** a # E: Unsupported left operand type for ** ("B") if int(): c = a ** b class A: def __pow__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testMiscBinaryOperators] a, b = None, None # type: (A, B) b = a & a # Fail b = a | b # Fail b = a ^ a # Fail b = a << b # Fail b = a >> a # Fail b = a & b b = a | a b = a ^ b b = a << a b = a >> b class A: def __and__(self, x: 'B') -> 'B': pass def __or__(self, x: 'A') -> 'B': pass def __xor__(self, x: 'B') -> 'B': pass def __lshift__(self, x: 'A') -> 'B': pass def __rshift__(self, x: 'B') -> 'B': pass class B: pass [out] main:3: error: Unsupported operand types for & ("A" and "A") main:4: error: Unsupported operand types for | ("A" and "B") main:5: error: Unsupported operand types for ^ ("A" and "A") main:6: error: Unsupported operand types for << ("A" and "B") main:7: error: Unsupported operand types for >> ("A" and "A") [case testBooleanAndOr] a, b = None, None # type: (A, bool) if int(): b = b and b if int(): b = b or b if int(): b = b and a # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool") if int(): b = a and b # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool") if int(): b = b or a # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool") if int(): b = a or b # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool") class A: pass [builtins fixtures/bool.pyi] [case testRestrictedTypeAnd] b = None # type: bool i = None # type: str j = not b and i if j: reveal_type(j) # N: Revealed type is 'builtins.str' [builtins fixtures/bool.pyi] [case testRestrictedTypeOr] b = None # type: bool i = None # type: str j = b or i if not j: reveal_type(j) # N: Revealed type is 'builtins.str' [builtins fixtures/bool.pyi] [case testAndOr] s = "" b = bool() reveal_type(s and b or b) # N: Revealed type is 'builtins.bool' [builtins fixtures/bool.pyi] [case testRestrictedBoolAndOrWithGenerics] from typing import List def f(a: List[str], b: bool) -> bool: x = a and b y: bool return reveal_type(x or y) # N: Revealed type is 'builtins.bool' [builtins fixtures/list.pyi] [case testNonBooleanOr] c, d, b = None, None, None # type: (C, D, bool) if int(): c = c or c if int(): c = c or d if int(): c = d or c if int(): b = c or c # E: Incompatible types in assignment (expression has type "C", variable has type "bool") if int(): d = c or d # E: Incompatible types in assignment (expression has type "C", variable has type "D") if int(): d = d or c # E: Incompatible types in assignment (expression has type "C", variable has type "D") class C: pass class D(C): pass [builtins fixtures/bool.pyi] [case testInOperator] from typing import Iterator, Iterable, Any a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any) if int(): c = c in a # E: Unsupported operand types for in ("bool" and "A") if int(): a = b in a # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): c = a in b # E: Unsupported right operand type for in ("B") if int(): c = b in d # E: Unsupported operand types for in ("B" and "D") if int(): c = b in a if int(): c = a in d if int(): c = e in d if int(): c = a in e class A: def __contains__(self, x: 'B') -> bool: pass class B: pass class D(Iterable[A]): def __iter__(self) -> Iterator[A]: pass [builtins fixtures/bool.pyi] [case testNotInOperator] from typing import Iterator, Iterable, Any a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any) if int(): c = c not in a # E: Unsupported operand types for in ("bool" and "A") if int(): a = b not in a # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): c = a not in b # E: Unsupported right operand type for in ("B") if int(): c = b not in d # E: Unsupported operand types for in ("B" and "D") if int(): c = b not in a if int(): c = a not in d if int(): c = e in d if int(): c = a in e class A: def __contains__(self, x: 'B') -> bool: pass class B: pass class D(Iterable[A]): def __iter__(self) -> Iterator[A]: pass [builtins fixtures/bool.pyi] [case testNonBooleanContainsReturnValue] a, b, c = None, None, None # type: (A, bool, int) if int(): b = a not in a if int(): b = a in a if int(): c = a not in a # E: Incompatible types in assignment (expression has type "bool", variable has type "int") if int(): c = a in a # E: Incompatible types in assignment (expression has type "bool", variable has type "int") class A: def __contains__(self, x: 'A') -> int: pass [builtins fixtures/bool.pyi] [case testInWithInvalidArgs] a = 1 in ([1] + ['x']) # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testEq] a, b = None, None # type: (A, bool) if int(): a = a == b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): a = a != b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): b = a == b if int(): b = a != b class A: def __eq__(self, o: object) -> bool: pass def __ne__(self, o: object) -> bool: pass [builtins fixtures/bool.pyi] [case testLtAndGt] a, b, bo = None, None, None # type: (A, B, bool) if int(): a = a < b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): a = a > b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): bo = a < b if int(): bo = a > b class A: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass class B: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass [builtins fixtures/bool.pyi] [case testCmp_python2] a, b, c, bo = None, None, None, None # type: (A, B, C, bool) bo = a == a # E: Unsupported operand types for == ("A" and "A") bo = a != a # E: Unsupported operand types for comparison ("A" and "A") bo = a < b bo = a > b bo = b <= b bo = b <= c bo = b >= c # E: Unsupported operand types for comparison ("C" and "B") bo = a >= b bo = c >= b bo = c <= b # E: Unsupported operand types for comparison ("B" and "C") bo = a == c bo = b == c # E: Unsupported operand types for == ("C" and "B") class A: def __cmp__(self, o): # type: ('B') -> bool pass def __eq__(self, o): # type: ('int') -> bool pass class B: def __cmp__(self, o): # type: ('B') -> bool pass def __le__(self, o): # type: ('C') -> bool pass class C: def __cmp__(self, o): # type: ('A') -> bool pass def __eq__(self, o): # type: ('int') -> bool pass [builtins_py2 fixtures/bool_py2.pyi] [case testDiv_python2] 10 / 'no' # E: Unsupported operand types for / ("int" and "str") 'no' / 10 # E: Unsupported operand types for / ("str" and "int") [builtins_py2 fixtures/ops.pyi] [case cmpIgnoredPy3] a, b, bo = None, None, None # type: (A, B, bool) bo = a <= b # E: Unsupported left operand type for <= ("A") class A: def __cmp__(self, o: 'B') -> bool: pass class B: pass [builtins fixtures/bool.pyi] [case testLeAndGe] a, b, bo = None, None, None # type: (A, B, bool) if int(): a = a <= b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): a = a >= b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): bo = a <= b if int(): bo = a >= b class A: def __le__(self, o: 'B') -> bool: pass def __ge__(self, o: 'B') -> bool: pass class B: def __le__(self, o: 'B') -> bool: pass def __ge__(self, o: 'B') -> bool: pass [builtins fixtures/bool.pyi] [case testChainedComp] a, b, bo = None, None, None # type: (A, B, bool) a < a < b < b # Fail a < b < b < b a < a > a < b # Fail class A: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass class B: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass [builtins fixtures/bool.pyi] [out] main:3: error: Unsupported operand types for < ("A" and "A") main:5: error: Unsupported operand types for < ("A" and "A") main:5: error: Unsupported operand types for > ("A" and "A") [case testChainedCompBoolRes] a, b, bo = None, None, None # type: (A, B, bool) if int(): bo = a < b < b if int(): a = a < b < b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") class A: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass class B: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass [builtins fixtures/bool.pyi] [case testChainedCompResTyp] x, y = None, None # type: (X, Y) a, b, p, bo = None, None, None, None # type: (A, B, P, bool) if int(): b = y == y == y if int(): bo = y == y == y # E: Incompatible types in assignment (expression has type "B", variable has type "bool") if int(): a = x < y if int(): a = x < y == y # E: Incompatible types in assignment (expression has type "P", variable has type "A") if int(): p = x < y == y class P: pass class A(P): pass class B(P): pass class X: def __lt__(self, o: 'Y') -> A: pass def __gt__(self, o: 'Y') -> A: pass class Y: def __lt__(self, o: 'Y') -> A: pass def __gt__(self, o: 'Y') -> A: pass def __eq__(self, o: 'Y') -> B: pass # type: ignore [builtins fixtures/bool.pyi] [case testIs] a, b = None, None # type: (A, bool) if int(): a = a is b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): b = a is b if int(): b = b is a if int(): b = a is None class A: pass [builtins fixtures/bool.pyi] [case testIsNot] a, b = None, None # type: (A, bool) if int(): a = a is not b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): b = a is not b if int(): b = b is not a if int(): b = a is not None class A: pass [builtins fixtures/bool.pyi] [case testIsRightOperand] 1 is 1() [builtins fixtures/bool.pyi] [out] main:2: error: "int" not callable [case testReverseBinaryOperator] class A: def __add__(self, x: int) -> int: pass class B: def __radd__(self, x: A) -> str: pass s = None # type: str n = None # type: int if int(): n = A() + 1 if int(): s = A() + B() if int(): n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testReverseBinaryOperator2] class A: def __add__(self, x: 'A') -> object: pass class B: def __radd__(self, x: A) -> str: pass s = None # type: str n = None # type: int if int(): s = A() + B() n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testReverseBinaryOperator3] class N: def __add__(self, x: 'N') -> object: pass class A: def __add__(self, x: N) -> int: pass class B: def __radd__(self, x: N) -> str: pass s = None # type: str s = A() + B() # E: Unsupported operand types for + ("A" and "B") [case testBinaryOperatorWithAnyRightOperand] from typing import Any, cast class A: pass A() + cast(Any, 1) [case testReverseComparisonOperator] class C: def __gt__(self, x: 'A') -> object: pass class A: def __lt__(self, x: C) -> int: pass # E: Signatures of "__lt__" of "A" and "__gt__" of "C" are unsafely overlapping class B: def __gt__(self, x: A) -> str: pass s = None # type: str n = None # type: int if int(): n = A() < C() s = A() < B() if int(): n = A() < B() # E: Incompatible types in assignment (expression has type "str", variable has type "int") s = object() < B() # E: Unsupported operand types for > ("B" and "object") [case testReversibleComparisonWithExtraArgument] class C: def __lt__(self, o: object, x: str = "") -> int: ... [case testErrorContextAndBinaryOperators] import typing class A: def __getitem__(self, i: str) -> int: pass def f() -> None: A()[1] # Error class B: A()[1] # Error A()[1] # Error [out] main:5: error: Invalid index type "int" for "A"; expected type "str" main:7: error: Invalid index type "int" for "A"; expected type "str" main:8: error: Invalid index type "int" for "A"; expected type "str" [case testErrorContextAndBinaryOperators2] import m [file m.py] import typing class A: def __getitem__(self, i: str) -> int: pass def f() -> None: A()[1] # Error class B: A()[1] # Error A()[1] # Error [out] tmp/m.py:5: error: Invalid index type "int" for "A"; expected type "str" tmp/m.py:7: error: Invalid index type "int" for "A"; expected type "str" tmp/m.py:8: error: Invalid index type "int" for "A"; expected type "str" [case testDivmod] from typing import Tuple, Union, SupportsInt _Decimal = Union[Decimal, int] class Decimal(SupportsInt): def __init__(self, int) -> None: ... def __divmod__(self, other: _Decimal) -> Tuple[Decimal, Decimal]: ... def __rdivmod__(self, other: _Decimal) -> Tuple[Decimal, Decimal]: ... i = 8 f = 8.0 d = Decimal(8) reveal_type(divmod(i, i)) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(divmod(f, i)) # N: Revealed type is 'Tuple[builtins.float, builtins.float]' reveal_type(divmod(d, i)) # N: Revealed type is 'Tuple[__main__.Decimal, __main__.Decimal]' reveal_type(divmod(i, f)) # N: Revealed type is 'Tuple[builtins.float, builtins.float]' reveal_type(divmod(f, f)) # N: Revealed type is 'Tuple[builtins.float, builtins.float]' divmod(d, f) # E: Unsupported operand types for divmod ("Decimal" and "float") reveal_type(divmod(i, d)) # N: Revealed type is 'Tuple[__main__.Decimal, __main__.Decimal]' divmod(f, d) # E: Unsupported operand types for divmod ("float" and "Decimal") reveal_type(divmod(d, d)) # N: Revealed type is 'Tuple[__main__.Decimal, __main__.Decimal]' # Now some bad calls divmod() # E: 'divmod' expects 2 arguments \ # E: Too few arguments for "divmod" divmod(7) # E: 'divmod' expects 2 arguments \ # E: Too few arguments for "divmod" divmod(7, 8, 9) # E: 'divmod' expects 2 arguments \ # E: Too many arguments for "divmod" divmod(_x=7, _y=9) # E: 'divmod' must be called with 2 positional arguments divmod('foo', 'foo') # E: Unsupported left operand type for divmod ("str") divmod(i, 'foo') # E: Unsupported operand types for divmod ("int" and "str") divmod(f, 'foo') # E: Unsupported operand types for divmod ("float" and "str") divmod(d, 'foo') # E: Unsupported operand types for divmod ("Decimal" and "str") divmod('foo', i) # E: Unsupported operand types for divmod ("str" and "int") divmod('foo', f) # E: Unsupported operand types for divmod ("str" and "float") divmod('foo', d) # E: Unsupported operand types for divmod ("str" and "Decimal") [builtins fixtures/divmod.pyi] [typing fixtures/typing-full.pyi] -- Unary operators -- --------------- [case testUnaryMinus] a, b = None, None # type: (A, B) if int(): a = -a # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = -b # E: Unsupported operand type for unary - ("B") if int(): b = -a class A: def __neg__(self) -> 'B': pass class B: pass [case testUnaryPlus] a, b = None, None # type: (A, B) if int(): a = +a # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = +b # E: Unsupported operand type for unary + ("B") if int(): b = +a class A: def __pos__(self) -> 'B': pass class B: pass [case testUnaryNot] a, b = None, None # type: (A, bool) if int(): a = not b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if int(): b = not a if int(): b = not b class A: pass [builtins fixtures/bool.pyi] [case testUnaryBitwiseNeg] a, b = None, None # type: (A, B) if int(): a = ~a # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = ~b # E: Unsupported operand type for ~ ("B") if int(): b = ~a class A: def __invert__(self) -> 'B': pass class B: pass -- Indexing -- -------- [case testIndexing] a, b, c = None, None, None # type: (A, B, C) if int(): c = a[c] # E: Invalid index type "C" for "A"; expected type "B" if int(): a = a[b] # E: Incompatible types in assignment (expression has type "C", variable has type "A") if int(): c = b[a] # E: Value of type "B" is not indexable if int(): c = a[b] class A: def __getitem__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testIndexingAsLvalue] a, b, c = None, None, None # type: (A, B, C) a[c] = c # Fail a[b] = a # Fail b[a] = c # Fail a[b] = c class A: def __setitem__(self, x: 'B', y: 'C') -> None: pass class B: pass class C: pass [out] main:3: error: Invalid index type "C" for "A"; expected type "B" main:4: error: Incompatible types in assignment (expression has type "A", target has type "C") main:5: error: Unsupported target for indexed assignment [case testOverloadedIndexing] from foo import * [file foo.pyi] from typing import overload a, b, c = None, None, None # type: (A, B, C) a[b] a[c] a[1] # E: No overload variant of "__getitem__" of "A" matches argument type "int" \ # N: Possible overload variants: \ # N: def __getitem__(self, B) -> int \ # N: def __getitem__(self, C) -> str i, s = None, None # type: (int, str) if int(): i = a[b] if int(): s = a[b] # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): i = a[c] # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): s = a[c] class A: @overload def __getitem__(self, x: 'B') -> int: pass @overload def __getitem__(self, x: 'C') -> str: pass class B: pass class C: pass [out] -- Cast expression -- --------------- [case testCastExpressions] from typing import cast, Any class A: pass class B: pass class C(A): pass a, b, c = None, None, None # type: (A, B, C) if int(): a = cast(A, a()) # E: "A" not callable if int(): a = cast(Any, a()) # E: "A" not callable b = cast(A, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = cast(A, b) if int(): a = cast(A, a) c = cast(C, a) if int(): a = cast(A, c) if int(): a = cast(Any, b) b = cast(Any, a) [out] [case testAnyCast] from typing import cast, Any a, b = None, None # type: (A, B) a = cast(Any, a()) # Fail a = cast(Any, b) b = cast(Any, a) class A: pass class B: pass [out] main:3: error: "A" not callable -- None return type -- ---------------- [case testNoneReturnTypeBasics] a, o = None, None # type: (A, object) if int(): a = f() # E: "f" does not return a value if int(): o = a() # E: Function does not return a value if int(): o = A().g(a) # E: "g" of "A" does not return a value if int(): o = A.g(a, a) # E: "g" of "A" does not return a value A().g(f()) # E: "f" does not return a value x: A = f() # E: "f" does not return a value f() A().g(a) def f() -> None: pass class A: def g(self, x: object) -> None: pass def __call__(self) -> None: pass [case testNoneReturnTypeWithStatements] import typing if f(): # Fail pass elif f(): # Fail pass while f(): # Fail pass def g() -> object: return f() # Fail raise f() # Fail def f() -> None: pass [builtins fixtures/exception.pyi] [out] main:2: error: "f" does not return a value main:4: error: "f" does not return a value main:6: error: "f" does not return a value main:9: error: "f" does not return a value main:10: error: "f" does not return a value [case testNoneReturnTypeWithExpressions] from typing import cast a = None # type: A [f()] # E: "f" does not return a value f() + a # E: "f" does not return a value a + f() # E: "f" does not return a value f() == a # E: "f" does not return a value a != f() # E: "f" does not return a value cast(A, f()) f().foo # E: "f" does not return a value def f() -> None: pass class A: def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/list.pyi] [case testNoneReturnTypeWithExpressions2] import typing a, b = None, None # type: (A, bool) f() in a # E: "f" does not return a value # E: Unsupported right operand type for in ("A") a < f() # E: "f" does not return a value f() <= a # E: "f" does not return a value a in f() # E: "f" does not return a value -f() # E: "f" does not return a value not f() # E: "f" does not return a value f() and b # E: "f" does not return a value b or f() # E: "f" does not return a value def f() -> None: pass class A: def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/bool.pyi] -- Slicing -- ------- [case testGetSlice] a, b = None, None # type: (A, B) if int(): a = a[1:2] # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = a[1:] # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = a[:2] # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = a[:] # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = a[1:2] if int(): b = a[1:] if int(): b = a[:2] if int(): b = a[:] class A: def __getitem__(self, s: slice) -> 'B': pass class B: pass [builtins fixtures/slice.pyi] [case testSlicingWithInvalidBase] a = None # type: A a[1:2] # E: Invalid index type "slice" for "A"; expected type "int" a[:] # E: Invalid index type "slice" for "A"; expected type "int" class A: def __getitem__(self, n: int) -> 'A': pass [builtins fixtures/slice.pyi] [case testSlicingWithNonindexable] o = None # type: object o[1:2] # E: Value of type "object" is not indexable o[:] # E: Value of type "object" is not indexable [builtins fixtures/slice.pyi] [case testNonIntSliceBounds] from typing import Any a, o = None, None # type: (Any, object) a[o:1] # E: Slice index must be an integer or None a[1:o] # E: Slice index must be an integer or None a[o:] # E: Slice index must be an integer or None a[:o] # E: Slice index must be an integer or None [builtins fixtures/slice.pyi] [case testNoneSliceBounds] from typing import Any a = None # type: Any a[None:1] a[1:None] a[None:] a[:None] [builtins fixtures/slice.pyi] [case testNoneSliceBoundsWithStrictOptional] # flags: --strict-optional from typing import Any a = None # type: Any a[None:1] a[1:None] a[None:] a[:None] [builtins fixtures/slice.pyi] -- String interpolation -- -------------------- [case testStringInterpolationType] from typing import Tuple i, f, s, t = None, None, None, None # type: (int, float, str, Tuple[int]) '%d' % i '%f' % f '%s' % s '%d' % (f,) '%d' % (s,) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsInt]") '%d' % t '%d' % s # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsInt]") '%f' % s # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsFloat]") '%x' % f # E: Incompatible types in string interpolation (expression has type "float", placeholder has type "int") '%i' % f '%o' % f # E: Incompatible types in string interpolation (expression has type "float", placeholder has type "int") [builtins fixtures/primitives.pyi] [case testStringInterpolationSAcceptsAnyType] from typing import Any i, o, s = None, None, None # type: (int, object, str) '%s %s %s' % (i, o, s) [builtins fixtures/primitives.pyi] [case testStringInterpolationSBytesVsStrErrorPy3] xb: bytes xs: str '%s' % xs # OK '%s' % xb # E: On Python 3 '%s' % b'abc' produces "b'abc'"; use %r if this is a desired behavior '%(name)s' % {'name': b'value'} # E: On Python 3 '%s' % b'abc' produces "b'abc'"; use %r if this is a desired behavior [builtins fixtures/primitives.pyi] [case testStringInterpolationSBytesVsStrResultsPy2] # flags: --python-version 2.7 xs = 'x' xu = u'x' reveal_type('%s' % xu) # N: Revealed type is 'builtins.unicode' reveal_type('%s, %d' % (u'abc', 42)) # N: Revealed type is 'builtins.unicode' reveal_type('%(key)s' % {'key': xu}) # N: Revealed type is 'builtins.unicode' reveal_type('%r' % xu) # N: Revealed type is 'builtins.str' reveal_type('%s' % xs) # N: Revealed type is 'builtins.str' [builtins fixtures/primitives.pyi] [case testStringInterpolationCount] '%d %d' % 1 # E: Not enough arguments for format string '%d %d' % (1, 2) '%d %d' % (1, 2, 3) # E: Not all arguments converted during string formatting t = 1, 's' '%d %s' % t '%s %d' % t # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsInt]") '%d' % t # E: Not all arguments converted during string formatting [builtins fixtures/primitives.pyi] [case testStringInterpolationWithAnyType] from typing import Any a = None # type: Any '%d %d' % a [builtins fixtures/primitives.pyi] [case testStringInterpolationInvalidPlaceholder] '%W' % 1 # E: Unsupported format character 'W' '%b' % 1 # E: Format character 'b' is only supported on bytes patterns [case testStringInterPolationPython2] # flags: --python-version 2.7 b'%b' % 1 # E: Format character 'b' is only supported in Python 3.5 and later b'%s' % 1 b'%a' % 1 # E: Format character 'a' is only supported in Python 3 [case testBytesInterpolationBefore35] # flags: --python-version 3.4 b'%b' % 1 # E: Unsupported left operand type for % ("bytes") [case testBytesInterpolation] b'%b' % 1 # E: Incompatible types in string interpolation (expression has type "int", placeholder has type "bytes") b'%b' % b'1' b'%a' % 3 [case testStringInterpolationWidth] '%2f' % 3.14 '%*f' % 3.14 # E: Not enough arguments for format string '%*f' % (4, 3.14) '%*f' % (1.1, 3.14) # E: * wants int [builtins fixtures/primitives.pyi] [case testStringInterpolationPrecision] '%.2f' % 3.14 '%.*f' % 3.14 # E: Not enough arguments for format string '%.*f' % (4, 3.14) '%.*f' % (1.1, 3.14) # E: * wants int [builtins fixtures/primitives.pyi] [case testStringInterpolationWidthAndPrecision] '%4.2f' % 3.14 '%4.*f' % 3.14 # E: Not enough arguments for format string '%*.2f' % 3.14 # E: Not enough arguments for format string '%*.*f' % 3.14 # E: Not enough arguments for format string '%*.*f' % (4, 2, 3.14) [builtins fixtures/primitives.pyi] [case testStringInterpolationFlagsAndLengthModifiers] '%04hd' % 1 '%-.4ld' % 1 '%+*Ld' % (1, 1) '% .*ld' % (1, 1) [builtins fixtures/primitives.pyi] [case testStringInterpolationDoublePercentage] '%% %d' % 1 '%3% %d' % 1 '%*%' % 1 '%*% %d' % 1 # E: Not enough arguments for format string [builtins fixtures/primitives.pyi] [case testStringInterpolationC] '%c' % 1 '%c' % 's' '%c' % '' # E: "%c" requires int or char '%c' % 'ab' # E: "%c" requires int or char [builtins fixtures/primitives.pyi] [case testStringInterpolationMappingTypes] '%(a)d %(b)s' % {'a': 1, 'b': 's'} '%(a)d %(b)s' % {'a': 's', 'b': 1} # E: Incompatible types in string interpolation (expression has type "str", placeholder with key 'a' has type "Union[int, float, SupportsInt]") b'%(x)s' % {b'x': b'data'} [builtins fixtures/primitives.pyi] [case testStringInterpolationMappingKeys] '%()d' % {'': 2} '%(a)d' % {'a': 1, 'b': 2, 'c': 3} '%(q)d' % {'a': 1, 'b': 2, 'c': 3} # E: Key 'q' not found in mapping '%(a)d %%' % {'a': 1} [builtins fixtures/primitives.pyi] [case testStringInterpolationMappingDictTypes] from typing import Any, Dict a = None # type: Any ds, do, di = None, None, None # type: Dict[str, int], Dict[object, int], Dict[int, int] '%(a)' % 1 # E: Format requires a mapping (expression has type "int", expected type for mapping is "Mapping[str, Any]") '%()d' % a '%()d' % ds '%()d' % do # E: Format requires a mapping (expression has type "Dict[object, int]", expected type for mapping is "Mapping[str, Any]") b'%()d' % ds # E: Format requires a mapping (expression has type "Dict[str, int]", expected type for mapping is "Mapping[bytes, Any]") [builtins fixtures/primitives.pyi] [case testStringInterpolationMappingInvalidDictTypesPy2] # flags: --py2 --no-strict-optional from typing import Any, Dict di = None # type: Dict[int, int] '%()d' % di # E: Format requires a mapping (expression has type "Dict[int, int]", expected type for mapping is "Union[Mapping[str, Any], Mapping[unicode, Any]]") [builtins_py2 fixtures/python2.pyi] [case testStringInterpolationMappingInvalidSpecifiers] '%(a)d %d' % 1 # E: String interpolation mixes specifier with and without mapping keys '%(b)*d' % 1 # E: String interpolation contains both stars and mapping keys '%(b).*d' % 1 # E: String interpolation contains both stars and mapping keys [case testStringInterpolationMappingFlagsAndLengthModifiers] '%(a)1d' % {'a': 1} '%(a).1d' % {'a': 1} '%(a)#1.1ld' % {'a': 1} [builtins fixtures/primitives.pyi] [case testStringInterpolationFloatPrecision] '%.f' % 1.2 '%.3f' % 1.2 '%.f' % 'x' '%.3f' % 'x' [builtins fixtures/primitives.pyi] [out] main:3: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsFloat]") main:4: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsFloat]") [case testStringInterpolationSpaceKey] '%( )s' % {' ': 'foo'} [case testByteByteInterpolation] def foo(a: bytes, b: bytes): b'%s:%s' % (a, b) foo(b'a', b'b') == b'a:b' [case testStringInterpolationStarArgs] x = (1, 2) "%d%d" % (*x,) [case testBytePercentInterpolationSupported] b'%s' % (b'xyz',) b'%(name)s' % {'name': b'jane'} # E: Dictionary keys in bytes formatting must be bytes, not strings b'%(name)s' % {b'name': 'jane'} # E: On Python 3 b'%s' requires bytes, not string b'%c' % (123) [case testUnicodeInterpolation_python2] u'%s' % (u'abc',) [case testStringInterpolationVariableLengthTuple] from typing import Tuple def f(t: Tuple[int, ...]) -> None: '%d %d' % t '%d %d %d' % t [builtins fixtures/primitives.pyi] [case testStringInterpolationUnionType] from typing import Tuple, Union a: Union[Tuple[int, str], Tuple[str, int]] = ('A', 1) '%s %s' % a '%s' % a # E: Not all arguments converted during string formatting b: Union[Tuple[int, str], Tuple[int, int], Tuple[str, int]] = ('A', 1) '%s %s' % b '%s %s %s' % b # E: Not enough arguments for format string c: Union[Tuple[str, int], Tuple[str, int, str]] = ('A', 1) '%s %s' % c # E: Not all arguments converted during string formatting -- str.format() calls -- ------------------ [case testFormatCallParseErrors] '}'.format() # E: Invalid conversion specifier in format string: unexpected } '{'.format() # E: Invalid conversion specifier in format string: unmatched { '}}'.format() # OK '{{'.format() # OK '{{}}}'.format() # E: Invalid conversion specifier in format string: unexpected } '{{{}}'.format() # E: Invalid conversion specifier in format string: unexpected } '{}}{{}'.format() # E: Invalid conversion specifier in format string: unexpected } '{{{}:{}}}'.format(0) # E: Cannot find replacement for positional format specifier 1 [builtins fixtures/primitives.pyi] [case testFormatCallValidationErrors] '{!}}'.format(0) # E: Invalid conversion specifier in format string: unexpected } '{!x}'.format(0) # E: Invalid conversion type "x", must be one of "r", "s" or "a" '{!:}'.format(0) # E: Invalid conversion specifier in format string '{{}:s}'.format(0) # E: Invalid conversion specifier in format string: unexpected } '{{}.attr}'.format(0) # E: Invalid conversion specifier in format string: unexpected } '{{}[key]}'.format(0) # E: Invalid conversion specifier in format string: unexpected } '{ {}:s}'.format() # E: Conversion value must not contain { or } '{ {}.attr}'.format() # E: Conversion value must not contain { or } '{ {}[key]}'.format() # E: Conversion value must not contain { or } [builtins fixtures/primitives.pyi] [case testFormatCallEscaping] '{}'.format() # E: Cannot find replacement for positional format specifier 0 '{}'.format(0) # OK '{{}}'.format() # OK '{{}}'.format(0) # E: Not all arguments converted during string formatting '{{{}}}'.format() # E: Cannot find replacement for positional format specifier 0 '{{{}}}'.format(0) # OK '{{}} {} {{}}'.format(0) # OK '{{}} {:d} {{}} {:d}'.format('a', 'b') # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") 'foo({}, {}) == {{}} ({{}} expected)'.format(0) # E: Cannot find replacement for positional format specifier 1 'foo({}, {}) == {{}} ({{}} expected)'.format(0, 1) # OK 'foo({}, {}) == {{}} ({{}} expected)'.format(0, 1, 2) # E: Not all arguments converted during string formatting [builtins fixtures/primitives.pyi] [case testFormatCallNestedFormats] '{:{}{}}'.format(42, '*') # E: Cannot find replacement for positional format specifier 2 '{:{}{}}'.format(42, '*', '^') # OK '{:{}{}}'.format(42, '*', '^', 0) # E: Not all arguments converted during string formatting # NOTE: we don't check format specifiers that contain { or } at all '{:{{}}}'.format() # E: Cannot find replacement for positional format specifier 0 '{:{:{}}}'.format() # E: Formatting nesting must be at most two levels deep '{:{{}:{}}}'.format() # E: Invalid conversion specifier in format string: unexpected } '{!s:{fill:d}{align}}'.format(42, fill='*', align='^') # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") [builtins fixtures/primitives.pyi] [case testFormatCallAutoNumbering] '{}, {{}}, {0}'.format() # E: Cannot combine automatic field numbering and manual field specification '{0}, {1}, {}'.format() # E: Cannot combine automatic field numbering and manual field specification '{0}, {1}, {0}'.format(1, 2, 3) # E: Not all arguments converted during string formatting '{}, {other:+d}, {}'.format(1, 2, other='no') # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") '{0}, {other}, {}'.format() # E: Cannot combine automatic field numbering and manual field specification '{:{}}, {:{:.5d}{}}'.format(1, 2, 3, 'a', 5) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") [builtins fixtures/primitives.pyi] [case testFormatCallMatchingPositional] '{}'.format(positional='no') # E: Cannot find replacement for positional format specifier 0 \ # E: Not all arguments converted during string formatting '{.x}, {}, {}'.format(1, 'two', 'three') # E: "int" has no attribute "x" 'Reverse {2.x}, {1}, {0}'.format(1, 2, 'three') # E: "str" has no attribute "x" ''.format(1, 2) # E: Not all arguments converted during string formatting [builtins fixtures/primitives.pyi] [case testFormatCallMatchingNamed] '{named}'.format(0) # E: Cannot find replacement for named format specifier "named" \ # E: Not all arguments converted during string formatting '{one.x}, {two}'.format(one=1, two='two') # E: "int" has no attribute "x" '{one}, {two}, {.x}'.format(1, one='two', two='three') # E: "int" has no attribute "x" ''.format(stuff='yes') # E: Not all arguments converted during string formatting [builtins fixtures/primitives.pyi] [case testFormatCallMatchingVarArg] from typing import List args: List[int] = [] '{}, {}'.format(1, 2, *args) # Don't flag this because args may be empty strings: List[str] '{:d}, {[0].x}'.format(*strings) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") \ # E: "str" has no attribute "x" # TODO: this is a runtime error, but error message is confusing '{[0][:]:d}'.format(*strings) # E: Syntax error in format specifier "0[0][" [builtins fixtures/primitives.pyi] [case testFormatCallMatchingKwArg] from typing import Dict kwargs: Dict[str, str] = {} '{one}, {two}'.format(one=1, two=2, **kwargs) # Don't flag this because args may be empty '{stuff:.3d}'.format(**kwargs) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") '{stuff[0]:f}, {other}'.format(**kwargs) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") '{stuff[0]:c}'.format(**kwargs) [builtins fixtures/primitives.pyi] [case testFormatCallCustomFormatSpec] from typing import Union class Bad: ... class Good: def __format__(self, spec: str) -> str: ... '{:OMG}'.format(Good()) '{:OMG}'.format(Bad()) # E: Unrecognized format specification "OMG" '{!s:OMG}'.format(Good()) # E: Unrecognized format specification "OMG" '{:{}OMG{}}'.format(Bad(), 'too', 'dynamic') x: Union[Good, Bad] '{:OMG}'.format(x) # E: Unrecognized format specification "OMG" [builtins fixtures/primitives.pyi] [case testFormatCallFormatTypes] '{:x}'.format(42) '{:E}'.format(42) '{:g}'.format(42) '{:x}'.format('no') # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") '{:E}'.format('no') # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") '{:g}'.format('no') # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") '{:n}'.format(3.14) '{:d}'.format(3.14) # E: Incompatible types in string interpolation (expression has type "float", placeholder has type "int") '{:s}'.format(42) '{:s}'.format('yes') '{:z}'.format('what') # E: Unsupported format character 'z' '{:Z}'.format('what') # E: Unsupported format character 'Z' [builtins fixtures/primitives.pyi] [case testFormatCallFormatTypesChar] '{:c}'.format(42) '{:c}'.format('no') # E: ":c" requires int or char '{:c}'.format('c') class C: ... '{:c}'.format(C()) # E: Incompatible types in string interpolation (expression has type "C", placeholder has type "Union[int, float, str]") x: str '{:c}'.format(x) [builtins fixtures/primitives.pyi] [case testFormatCallFormatTypesCustomFormat] from typing import Union class Bad: ... class Good: def __format__(self, spec: str) -> str: ... x: Union[Good, Bad] y: Union[Good, int] z: Union[Bad, int] t: Union[Good, str] '{:d}'.format(x) # E: Incompatible types in string interpolation (expression has type "Bad", placeholder has type "int") '{:d}'.format(y) '{:d}'.format(z) # E: Incompatible types in string interpolation (expression has type "Bad", placeholder has type "int") '{:d}'.format(t) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") [builtins fixtures/primitives.pyi] [case testFormatCallFormatTypesBytes] from typing import Union, TypeVar, NewType, Generic A = TypeVar('A', str, bytes) B = TypeVar('B', bound=bytes) x: Union[str, bytes] a: str b: bytes N = NewType('N', bytes) n: N '{}'.format(a) '{}'.format(b) # E: On Python 3 '{}'.format(b'abc') produces "b'abc'"; use !r if this is a desired behavior '{}'.format(x) # E: On Python 3 '{}'.format(b'abc') produces "b'abc'"; use !r if this is a desired behavior '{}'.format(n) # E: On Python 3 '{}'.format(b'abc') produces "b'abc'"; use !r if this is a desired behavior class C(Generic[B]): x: B def meth(self) -> None: '{}'.format(self.x) # E: On Python 3 '{}'.format(b'abc') produces "b'abc'"; use !r if this is a desired behavior def func(x: A) -> A: '{}'.format(x) # E: On Python 3 '{}'.format(b'abc') produces "b'abc'"; use !r if this is a desired behavior return x '{!r}'.format(b) '{!r}'.format(x) '{!r}'.format(n) [builtins fixtures/primitives.pyi] [case testFormatCallFormatTypesBytesNotPy2] # flags: --py2 from typing import Union, TypeVar, NewType, Generic A = TypeVar('A', str, unicode) B = TypeVar('B', bound=str) x = '' # type: Union[str, unicode] a = '' b = b'' N = NewType('N', str) n = N(b'') '{}'.format(a) '{}'.format(b) '{}'.format(x) '{}'.format(n) u'{}'.format(a) u'{}'.format(b) u'{}'.format(x) u'{}'.format(n) class C(Generic[B]): x = None # type: B def meth(self): # type: () -> None '{}'.format(self.x) def func(x): # type: (A) -> A '{}'.format(x) return x '{!r}'.format(b) '{!r}'.format(x) '{!r}'.format(n) [builtins_py2 fixtures/python2.pyi] [case testFormatCallFinal] from typing_extensions import Final FMT: Final = '{.x}, {:{:d}}' FMT.format(1, 2, 'no') # E: "int" has no attribute "x" \ # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") [builtins fixtures/primitives.pyi] [case testFormatCallFinalChar] from typing_extensions import Final GOOD: Final = 'c' BAD: Final = 'no' OK: Final[str] = '...' '{:c}'.format(GOOD) '{:c}'.format(BAD) # E: ":c" requires int or char '{:c}'.format(OK) [builtins fixtures/primitives.pyi] [case testFormatCallForcedConversions] '{!r}'.format(42) '{!s}'.format(42) '{!s:d}'.format(42) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "int") '{!s:s}'.format('OK') '{} and {!x}'.format(0, 1) # E: Invalid conversion type "x", must be one of "r", "s" or "a" [builtins fixtures/primitives.pyi] [case testFormatCallAccessorsBasic] from typing import Any x: Any '{.x:{[0]}}'.format('yes', 42) # E: "str" has no attribute "x" \ # E: Value of type "int" is not indexable '{.1+}'.format(x) # E: Syntax error in format specifier "0.1+" '{name.x[x]()[x]:.2f}'.format(name=x) # E: Only index and member expressions are allowed in format field accessors; got "name.x[x]()[x]" [builtins fixtures/primitives.pyi] [case testFormatCallAccessorsIndices] from typing_extensions import TypedDict class User(TypedDict): id: int name: str u: User '{user[name]:.3f}'.format(user=u) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") def f() -> str: ... '{[f()]}'.format(u) # E: Invalid index expression in format field accessor "[f()]" [builtins fixtures/primitives.pyi] [case testFormatCallFlags] from typing import Union class Good: def __format__(self, spec: str) -> str: ... '{:#}'.format(42) '{:#}'.format('no') # E: Numeric flags are only allowed for numeric types '{!s:#}'.format(42) # E: Numeric flags are only allowed for numeric types '{:#s}'.format(42) # E: Numeric flags are only allowed for numeric types '{:+s}'.format(42) # E: Numeric flags are only allowed for numeric types '{:+d}'.format(42) '{:#d}'.format(42) x: Union[float, Good] '{:+f}'.format(x) [builtins fixtures/primitives.pyi] [case testFormatCallSpecialCases] '{:08b}'.format(int('3')) class S: def __int__(self) -> int: ... '{:+d}'.format(S()) # E: Incompatible types in string interpolation (expression has type "S", placeholder has type "int") '%d' % S() # This is OK however '{:%}'.format(0.001) [builtins fixtures/primitives.pyi] -- Lambdas -- ------- [case testTrivialLambda] from typing import Callable f = lambda: 1 # type: Callable[[], int] if int(): f = lambda: ''.x # E: "str" has no attribute "x" if int(): f = lambda: '' \ # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "Callable[[], int]") \ # E: Incompatible return value type (got "str", expected "int") [case testVoidLambda] import typing def void() -> None: pass x = lambda: void() # type: typing.Callable[[], None] [case testNoCrashOnLambdaGenerator] from typing import Iterator, Callable # These should not crash lambda: (yield) gen: Callable[[], Iterator[str]] gen = (lambda: (yield 1)) # E: Incompatible types in "yield" (actual type "int", expected type "str") def fun(cb: Callable[[], Iterator[str]]) -> None: pass fun(lambda: (yield from [1])) # E: Incompatible types in "yield from" (actual type "int", expected type "str") [builtins fixtures/list.pyi] [out] [case testLambdaAndReachability] def f() -> None: aa = [] y = lambda x: 1 aa.append(1) 1() # E: "int" not callable [builtins fixtures/list.pyi] -- List comprehensions -- ------------------- [case testSimpleListComprehension] from typing import List a = None # type: List[A] a = [x for x in a] b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B] class A: pass class B: pass [builtins fixtures/for.pyi] [case testSimpleListComprehensionNestedTuples] from typing import List, Tuple l = None # type: List[Tuple[A, Tuple[A, B]]] a = [a2 for a1, (a2, b1) in l] # type: List[A] b = [a2 for a1, (a2, b1) in l] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B] class A: pass class B: pass [builtins fixtures/for.pyi] [case testSimpleListComprehensionNestedTuples2] from typing import List, Tuple l = None # type: List[Tuple[int, Tuple[int, str]]] a = [f(d) for d, (i, s) in l] b = [f(s) for d, (i, s) in l] # E: Argument 1 to "f" has incompatible type "str"; expected "int" def f(x: int): pass [builtins fixtures/for.pyi] [case testListComprehensionWithNonDirectMapping] from typing import List a: List[A] b: List[B] if int(): b = [f(x) for x in a] if int(): a = [f(x) for x in a] # E: List comprehension has incompatible type List[B]; expected List[A] ([f(x) for x in b]) # E: Argument 1 to "f" has incompatible type "B"; expected "A" class A: pass class B: pass def f(a: A) -> B: pass [builtins fixtures/for.pyi] [case testErrorInListComprehensionCondition] from typing import List a = None # type: List[A] a = [x for x in a if x()] # E: "A" not callable class A: pass [builtins fixtures/for.pyi] [case testTypeInferenceOfListComprehension] from typing import List a = None # type: List[A] o = [x for x in a] # type: List[object] class A: pass [builtins fixtures/for.pyi] [case testSimpleListComprehensionInClassBody] from typing import List class A: a = None # type: List[A] a = [x for x in a] b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B] class B: pass [builtins fixtures/for.pyi] [out] -- Set comprehension -- ----------------- [case testSimpleSetComprehension] from typing import Set a = None # type: Set[A] a = {x for x in a} b = {x for x in a} # type: Set[B] # E: Set comprehension has incompatible type Set[A]; expected Set[B] class A: pass class B: pass [builtins fixtures/set.pyi] -- Dictionary comprehension -- ------------------------ [case testSimpleDictionaryComprehension] from typing import Dict, List, Tuple abd = None # type: Dict[A, B] abl = None # type: List[Tuple[A, B]] abd = {a: b for a, b in abl} x = {a: b for a, b in abl} # type: Dict[B, A] y = {a: b for a, b in abl} # type: A class A: pass class B: pass [builtins fixtures/dict.pyi] [out] main:5: error: Key expression in dictionary comprehension has incompatible type "A"; expected type "B" main:5: error: Value expression in dictionary comprehension has incompatible type "B"; expected type "A" main:6: error: Incompatible types in assignment (expression has type "Dict[A, B]", variable has type "A") [case testDictionaryComprehensionWithNonDirectMapping] from typing import Dict, List, Tuple abd: Dict[A, B] abl = None # type: List[Tuple[A, B]] abd = {a: f(b) for a, b in abl} class A: pass class B: pass class C: pass def f(b: A) -> C: pass [builtins fixtures/dict.pyi] [out] main:4: error: Value expression in dictionary comprehension has incompatible type "C"; expected type "B" main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A" -- Generator expressions -- --------------------- [case testSimpleGeneratorExpression] from typing import Iterator # The implementation is mostly identical to list comprehensions, so only a few # test cases is ok. a = None # type: Iterator[int] if int(): a = (x for x in a) b = None # type: Iterator[str] if int(): b = (x for x in a) # E: Generator has incompatible item type "int"; expected "str" [builtins fixtures/for.pyi] [case testGeneratorIncompatibleErrorMessage] from typing import Callable, Iterator, List a = [] # type: List[Callable[[], str]] b = None # type: Iterator[Callable[[], int]] if int(): b = (x for x in a) # E: Generator has incompatible item type "Callable[[], str]"; expected "Callable[[], int]" [builtins fixtures/list.pyi] -- Conditional expressions -- ----------------------- [case testSimpleConditionalExpression] import typing y = '' x = 1 if y else 2 if int(): x = 3 if int(): x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testConditionalExpressionWithEmptyCondition] import typing def f() -> None: pass x = 1 if f() else 2 # E: "f" does not return a value [case testConditionalExpressionWithSubtyping] import typing class A: pass class B(A): pass x = B() if bool() else A() if int(): x = A() if int(): x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A") y = A() if bool() else B() if int(): y = A() if int(): y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A") [builtins fixtures/bool.pyi] [case testConditionalExpressionAndTypeContext] import typing x = [1] if bool() else [] if int(): x = [1] if int(): x = ['x'] # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testConditionalExpressionUnion] from typing import Union reveal_type(1 if bool() else 2) # N: Revealed type is 'builtins.int' reveal_type(1 if bool() else '') # N: Revealed type is 'builtins.object' x: Union[int, str] = reveal_type(1 if bool() else '') \ # N: Revealed type is 'Union[Literal[1]?, Literal['']?]' class A: pass class B(A): pass class C: pass class D(A): pass a = A() b = B() c = C() d = D() reveal_type(a if bool() else b) # N: Revealed type is '__main__.A' reveal_type(b if bool() else c) # N: Revealed type is 'builtins.object' reveal_type(c if bool() else b) # N: Revealed type is 'builtins.object' reveal_type(c if bool() else a) # N: Revealed type is 'builtins.object' reveal_type(d if bool() else b) # N: Revealed type is '__main__.A' [builtins fixtures/bool.pyi] [case testConditionalExpressionUnionWithAny] from typing import Union, Any a: Any x: Union[int, str] = reveal_type(a if int() else 1) # N: Revealed type is 'Union[Any, Literal[1]?]' reveal_type(a if int() else 1) # N: Revealed type is 'Any' -- Special cases -- ------------- [case testOperationsWithNonInstanceTypes] from typing import cast class A: def __add__(self, a: 'A') -> 'A': pass a = None # type: A None + a # Fail f + a # Fail a + f # Fail cast(A, f) def f() -> None: pass [out] main:5: error: Unsupported left operand type for + ("None") main:6: error: Unsupported left operand type for + ("Callable[[], None]") main:7: error: Unsupported operand types for + ("A" and "Callable[[], None]") [case testOperatorMethodWithInvalidArgCount] a = None # type: A a + a # Fail class A: def __add__(self) -> 'A': pass [out] main:3: error: Too many arguments for "__add__" of "A" [case testOperatorMethodAsVar] from typing import Any class A: def __init__(self, _add: Any) -> None: self.__add__ = _add a = None # type: A a + a [out] [case testOperatorMethodAsVar2] class A: def f(self, x: int) -> str: pass __add__ = f s = None # type: str s = A() + 1 A() + (A() + 1) [out] main:7: error: Argument 1 has incompatible type "str"; expected "int" [case testIndexedLvalueWithSubtypes] a, b, c = None, None, None # type: (A, B, C) a[c] = c a[b] = c a[c] = b class A: def __setitem__(self, x: 'B', y: 'B') -> None: pass class B: pass class C(B): pass [out] -- Ellipsis -- -------- [case testEllipsis] a = None # type: A if str(): a = ... # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "A") b = ... c = ... if str(): b = c ....__class__ ....a # E: "ellipsis" has no attribute "a" class A: pass [file builtins.py] class object: def __init__(self): pass class ellipsis: def __init__(self): pass __class__ = object() class type: pass class function: pass class str: pass [out] -- Yield expression -- ---------------- [case testYieldExpression] def f(x: int) -> None: x = yield f('') x = 1 [builtins fixtures/for.pyi] [out] main:1: error: The return type of a generator function should be "Generator" or one of its supertypes main:2: error: "f" does not return a value main:2: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testYieldExpressionWithNone] from typing import Iterator def f(x: int) -> Iterator[None]: (yield) [builtins fixtures/for.pyi] [out] -- Yield from expression -- ---------------- [case testYieldFromIteratorHasNoValue] from typing import Iterator def f() -> Iterator[int]: yield 5 def g() -> Iterator[int]: a = yield from f() # E: Function does not return a value [case testYieldFromGeneratorHasValue] from typing import Iterator, Generator def f() -> Generator[int, None, str]: yield 5 return "ham" def g() -> Iterator[int]: a = "string" a = yield from f() [out] [case testYieldFromTupleExpression] from typing import Generator def g() -> Generator[int, None, None]: x = yield from () # E: Function does not return a value x = yield from (0, 1, 2) # E: Function does not return a value x = yield from (0, "ERROR") # E: Incompatible types in "yield from" (actual type "object", expected type "int") \ # E: Function does not return a value x = yield from ("ERROR",) # E: Incompatible types in "yield from" (actual type "str", expected type "int") \ # E: Function does not return a value [builtins fixtures/tuple.pyi] -- dict(...) -- --------- -- Note that the stub used in unit tests does not have all overload -- variants, but it should not matter. [case testDictWithKeywordArgsOnly] from typing import Dict, Any d1 = dict(a=1, b=2) # type: Dict[str, int] d2 = dict(a=1, b='') # type: Dict[str, int] # E: Dict entry 1 has incompatible type "str": "str"; expected "str": "int" d3 = dict(a=1) # type: Dict[int, int] # E: Dict entry 0 has incompatible type "str": "int"; expected "int": "int" d4 = dict(a=1, b=1) d4.xyz # E: "Dict[str, int]" has no attribute "xyz" d5 = dict(a=1, b='') # type: Dict[str, Any] [builtins fixtures/dict.pyi] [case testDictWithoutKeywordArgs] from typing import Dict d = dict() # E: Need type annotation for 'd' (hint: "d: Dict[, ] = ...") d2 = dict() # type: Dict[int, str] dict(undefined) # E: Name 'undefined' is not defined [builtins fixtures/dict.pyi] [case testDictFromList] from typing import Dict d = dict([(1, 'x'), (2, 'y')]) d() # E: "Dict[int, str]" not callable d2 = dict([(1, 'x')]) # type: Dict[str, str] # E: List item 0 has incompatible type "Tuple[int, str]"; expected "Tuple[str, str]" [builtins fixtures/dict.pyi] [case testDictFromIterableAndKeywordArg] from typing import Dict it = [('x', 1)] d = dict(it, x=1) d() # E: "Dict[str, int]" not callable d2 = dict(it, x='') d2() # E: "Dict[str, object]" not callable d3 = dict(it, x='') # type: Dict[str, int] # E: Argument "x" to "dict" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testDictFromIterableAndKeywordArg2] it = [(1, 'x')] dict(it, x='y') # E: Keyword argument only valid with "str" key type in call to "dict" [builtins fixtures/dict.pyi] [case testDictFromIterableAndKeywordArg3] d = dict([], x=1) d() # E: "Dict[str, int]" not callable [builtins fixtures/dict.pyi] [case testDictFromIterableAndStarStarArgs] from typing import Dict it = [('x', 1)] kw = {'x': 1} d = dict(it, **kw) d() # E: "Dict[str, int]" not callable kw2 = {'x': ''} d2 = dict(it, **kw2) d2() # E: "Dict[str, object]" not callable d3 = dict(it, **kw2) # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type "**Dict[str, str]"; expected "int" [builtins fixtures/dict.pyi] [case testDictFromIterableAndStarStarArgs2] it = [(1, 'x')] kw = {'x': 'y'} d = dict(it, **kw) # E: Keyword argument only valid with "str" key type in call to "dict" d() # E: "Dict[int, str]" not callable [builtins fixtures/dict.pyi] [case testUserDefinedClassNamedDict] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class dict(Generic[T, S]): def __init__(self, x: T, **kwargs: T) -> None: pass dict(1, y=1) [builtins fixtures/dict.pyi] [case testSpecialSignatureForSubclassOfDict] from typing import TypeVar, Dict, Generic T = TypeVar('T') S = TypeVar('S') class D1(dict): pass # Implicit base class Dict[Any, Any] D1([(1, 2)], x=1) class D2(Dict[T, S], Generic[T, S]): pass da = D2([('x', 2)], x=1) da() # E: "D2[str, int]" not callable D2([(1, 2)], x=1) # E: Keyword argument only valid with "str" key type in call to "dict" db = D2(x=1) db() # E: "D2[str, int]" not callable [builtins fixtures/dict.pyi] [case testSpecialSignatureForSubclassOfDict2] from typing import TypeVar, Dict, Generic T = TypeVar('T') class D(Dict[str, T], Generic[T]): pass D([('x', 1)], x=1) [builtins fixtures/dict.pyi] [case testOverridingSpecialSignatureInSubclassOfDict] from typing import TypeVar, Dict, Generic T = TypeVar('T') S = TypeVar('S') class D(Dict[T, S], Generic[T, S]): def __init__(self, x: S, y: T) -> None: pass d = D(1, y='') d() # E: "D[str, int]" not callable [builtins fixtures/dict.pyi] [case testRevealType] reveal_type(1) # N: Revealed type is 'Literal[1]?' [case testRevealLocals] x = 1 y = 2 z = x + y reveal_locals() [out] main:4: note: Revealed local types are: main:4: note: x: builtins.int main:4: note: y: builtins.int main:4: note: z: builtins.int [case testUndefinedRevealType] reveal_type(x) [out] main:1: error: Name 'x' is not defined main:1: note: Revealed type is 'Any' [case testUserDefinedRevealType] def reveal_type(x: int) -> None: pass reveal_type("foo") # E: Argument 1 to "reveal_type" has incompatible type "str"; expected "int" [case testRevealTypeVar] reveal_type = 1 1 + "foo" # E: Unsupported operand types for + ("int" and "str") [case testRevealForward] def f() -> None: reveal_type(x) x = 1 + 1 [out] main:2: note: Revealed type is 'builtins.int' [case testRevealUncheckedFunction] def f(): x = 42 reveal_type(x) [out] main:3: note: Revealed type is 'Any' main:3: note: 'reveal_type' always outputs 'Any' in unchecked functions [case testRevealCheckUntypedDefs] # flags: --check-untyped-defs def f(): x = 42 reveal_type(x) [out] main:4: note: Revealed type is 'builtins.int' [case testRevealTypedDef] def f() -> None: x = 42 reveal_type(x) [out] main:3: note: Revealed type is 'builtins.int' [case testEqNone] None == None [builtins fixtures/ops.pyi] [case testLtNone] None < None # E: Unsupported left operand type for < ("None") [builtins fixtures/ops.pyi] [case testDictWithStarExpr] b = {'z': 26, *a} # E: invalid syntax [builtins fixtures/dict.pyi] [case testDictWithStarStarExpr] from typing import Dict a = {'a': 1} b = {'z': 26, **a} c = {**b} d = {**a, **b, 'c': 3} e = {1: 'a', **a} # E: Argument 1 to "update" of "dict" has incompatible type "Dict[str, int]"; expected "Mapping[int, str]" f = {**b} # type: Dict[int, int] # E: List item 0 has incompatible type "Dict[str, int]"; expected "Mapping[int, int]" [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testDictIncompatibleTypeErrorMessage] from typing import Dict, Callable def things() -> int: return 42 stuff: Dict[int, Callable[[], str]] = { 1: things # E: Dict entry 0 has incompatible type "int": "Callable[[], int]"; expected "int": "Callable[[], str]" } [builtins fixtures/dict.pyi] [case testDictIncompatibleKeyVerbosity] from typing import Dict import mod class A: ... class B(A): ... d: Dict[A, B] = {A(): mod.B()} # E: Dict entry 0 has incompatible type "A": "mod.B"; expected "A": "__main__.B" [file mod.py] class B: ... [builtins fixtures/dict.pyi] [case testDictIncompatibleValueVerbosity] from typing import Dict import mod class A: ... class B(A): ... d: Dict[B, A] = {mod.B(): A()} # E: Dict entry 0 has incompatible type "mod.B": "A"; expected "__main__.B": "A" [file mod.py] class B: ... [builtins fixtures/dict.pyi] [case testTypeAnnotationNeededMultipleAssignment] x, y = [], [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") \ # E: Need type annotation for 'y' (hint: "y: List[] = ...") [builtins fixtures/list.pyi] [case testStrictEqualityEq] # flags: --strict-equality class A: ... class B: ... class C(B): ... A() == B() # E: Non-overlapping equality check (left operand type: "A", right operand type: "B") B() == C() C() == B() A() != B() # E: Non-overlapping equality check (left operand type: "A", right operand type: "B") B() != C() C() != B() [builtins fixtures/bool.pyi] [case testStrictEqualityIs] # flags: --strict-equality class A: ... class B: ... class C(B): ... A() is B() # E: Non-overlapping identity check (left operand type: "A", right operand type: "B") B() is C() C() is B() A() is not B() # E: Non-overlapping identity check (left operand type: "A", right operand type: "B") B() is not C() C() is not B() [builtins fixtures/bool.pyi] [case testStrictEqualityContains] # flags: --strict-equality class A: ... class B: ... class C(B): ... A() in [B()] # E: Non-overlapping container check (element type: "A", container item type: "B") B() in [C()] C() in [B()] A() not in [B()] # E: Non-overlapping container check (element type: "A", container item type: "B") B() not in [C()] C() not in [B()] [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityUnions] # flags: --strict-equality from typing import Container, Union class A: ... class B: ... a: Union[int, str] b: Union[A, B] a == int() b == int() # E: Non-overlapping equality check (left operand type: "Union[A, B]", right operand type: "int") a is int() b is int() # E: Non-overlapping identity check (left operand type: "Union[A, B]", right operand type: "int") ca: Union[Container[int], Container[str]] cb: Union[Container[A], Container[B]] 42 in ca 42 in cb # E: Non-overlapping container check (element type: "int", container item type: "Union[A, B]") [builtins fixtures/bool.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityBytesSpecial] # flags: --strict-equality b'abc' in b'abcde' [builtins fixtures/primitives.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityBytesSpecialUnion] # flags: --strict-equality from typing import Union x: Union[bytes, str] b'abc' in x x in b'abc' [builtins fixtures/primitives.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityByteArraySpecial] # flags: --strict-equality b'abc' in bytearray(b'abcde') bytearray(b'abc') in b'abcde' # OK on Python 3 [builtins fixtures/primitives.pyi] [typing fixtures/typing-full.pyi] [case testBytesVsByteArray_python2] # flags: --strict-equality --py2 b'hi' in bytearray(b'hi') [builtins_py2 fixtures/python2.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityNoPromotePy3] # flags: --strict-equality 'a' == b'a' # E: Non-overlapping equality check (left operand type: "Literal['a']", right operand type: "Literal[b'a']") b'a' in 'abc' # E: Non-overlapping container check (element type: "bytes", container item type: "str") x: str y: bytes x != y # E: Non-overlapping equality check (left operand type: "str", right operand type: "bytes") [builtins fixtures/primitives.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityOkPromote] # flags: --strict-equality from typing import Container c: Container[int] 1 == 1.0 # OK 1.0 in c # OK [builtins fixtures/primitives.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityAny] # flags: --strict-equality from typing import Any, Container x: Any c: Container[str] x in c x == 42 x is 42 [builtins fixtures/bool.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityStrictOptional] # flags: --strict-equality --strict-optional x: str if x is not None: # OK even with strict-optional pass [builtins fixtures/bool.pyi] [case testStrictEqualityNoStrictOptional] # flags: --strict-equality --no-strict-optional x: str if x is not None: # OK without strict-optional pass [builtins fixtures/bool.pyi] [case testStrictEqualityEqNoOptionalOverlap] # flags: --strict-equality --strict-optional from typing import Optional x: Optional[str] y: Optional[int] if x == y: # E: Non-overlapping equality check (left operand type: "Optional[str]", right operand type: "Optional[int]") ... [builtins fixtures/bool.pyi] [case testCustomEqCheckStrictEquality] # flags: --strict-equality class A: def __eq__(self, other: A) -> bool: # type: ignore ... class B: def __eq__(self, other: B) -> bool: # type: ignore ... # Don't report non-overlapping check if there is already and error. A() == B() # E: Unsupported operand types for == ("A" and "B") [builtins fixtures/bool.pyi] [case testCustomEqCheckStrictEqualityOKInstance] # flags: --strict-equality class A: def __eq__(self, other: object) -> bool: ... class B: def __eq__(self, other: object) -> bool: ... A() == int() # OK int() != B() # OK [builtins fixtures/bool.pyi] [case testCustomEqCheckStrictEqualityOKUnion] # flags: --strict-equality from typing import Union class A: def __eq__(self, other: object) -> bool: ... x: Union[A, str] x == int() [builtins fixtures/bool.pyi] [case testCustomEqCheckStrictEqualityTuple] # flags: --strict-equality from typing import NamedTuple class Base(NamedTuple): attr: int class Custom(Base): def __eq__(self, other: object) -> bool: ... Base(int()) == int() # E: Non-overlapping equality check (left operand type: "Base", right operand type: "int") Base(int()) == tuple() Custom(int()) == int() [builtins fixtures/bool.pyi] [case testCustomEqCheckStrictEqualityMeta] # flags: --strict-equality class CustomMeta(type): def __eq__(self, other: object) -> bool: ... class Normal: ... class Custom(metaclass=CustomMeta): ... Normal == int() # E: Non-overlapping equality check (left operand type: "Type[Normal]", right operand type: "int") Normal == Normal Custom == int() [builtins fixtures/bool.pyi] [case testCustomContainsCheckStrictEquality] # flags: --strict-equality class A: def __contains__(self, other: A) -> bool: ... # Don't report non-overlapping check if there is already and error. 42 in A() # E: Unsupported operand types for in ("int" and "A") [builtins fixtures/bool.pyi] [case testStrictEqualityTypeVsCallable] # flags: --strict-equality from typing import Type, List class C: ... class D(C): ... class Bad: ... subclasses: List[Type[C]] object in subclasses D in subclasses Bad in subclasses # E: Non-overlapping container check (element type: "Type[Bad]", container item type: "Type[C]") [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [case testStrictEqualityMetaclass] # flags: --strict-equality from typing import List, Type, Any class Meta(type): ... class OtherMeta(type): ... class A(metaclass=Meta): ... class B(metaclass=Meta): ... class C(metaclass=OtherMeta): ... o: Type[object] a: Type[Any] aa: type exp: List[Meta] A in exp B in exp C in exp # E: Non-overlapping container check (element type: "Type[C]", container item type: "Meta") o in exp a in exp aa in exp a in [A, B] aa in [A, B] class AA: ... class BB: ... a in [AA, BB] aa in [AA, BB] [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [case testEmptyListOverlap] # mypy: strict-equality from typing import List x: List[int] x == [] [builtins fixtures/isinstancelist.pyi] [case testCustomEqDecoratedStrictEquality] # flags: --strict-equality from typing import TypeVar, Callable, Any F = TypeVar('F', bound=Callable[..., Any]) def deco(f: F) -> F: ... class Custom: @deco def __eq__(self, other: object) -> bool: ... Custom() == int() [builtins fixtures/bool.pyi] [case testCustomEqVarStrictEquality] # flags: --strict-equality class Custom: def compare(self, other: object) -> bool: ... __eq__ = compare Custom() == int() [builtins fixtures/bool.pyi] [case testStrictEqualityDisabledWithTypeVarRestrictions] # flags: --strict-equality from typing import TypeVar T = TypeVar('T', str, int) def f(x: T) -> T: if x == int(): # OK ... return x [builtins fixtures/bool.pyi] [case testStrictEqualityWithALiteral] # flags: --strict-equality from typing_extensions import Literal, Final def returns_a_or_b() -> Literal['a', 'b']: ... def returns_1_or_2() -> Literal[1, 2]: ... THREE: Final = 3 if returns_a_or_b() == 'c': # E: Non-overlapping equality check (left operand type: "Union[Literal['a'], Literal['b']]", right operand type: "Literal['c']") ... if returns_1_or_2() is THREE: # E: Non-overlapping identity check (left operand type: "Union[Literal[1], Literal[2]]", right operand type: "Literal[3]") ... [builtins fixtures/bool.pyi] [case testStrictEqualityWithALiteralNewType] # flags: --strict-equality from typing import NewType UserId = NewType('UserId', int) FileId = NewType('FileId', str) u: UserId f: FileId if u == 0: # OK ... if f == 0: # E: Non-overlapping equality check (left operand type: "FileId", right operand type: "Literal[0]") ... [builtins fixtures/bool.pyi] [case testStrictEqualityPromotionsLiterals] # flags: --strict-equality --py2 from typing import Final U_FOO = u'foo' # type: Final if str() == U_FOO: pass assert u'foo' == 'foo' assert u'foo' == u'bar' # E: Non-overlapping equality check (left operand type: "Literal[u'foo']", right operand type: "Literal[u'bar']") [builtins_py2 fixtures/python2.pyi] [case testUnimportedHintAny] def f(x: Any) -> None: # E: Name 'Any' is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any") pass [case testUnimportedHintAnyLower] def f(x: any) -> None: # E: Name 'any' is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any") pass [case testUnimportedHintOptional] def f(x: Optional[str]) -> None: # E: Name 'Optional' is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Optional") pass [case testAssertionLazilyWithIsNone] from typing import Optional, List li: Optional[List] = [] assert li is None, li[0] [builtins fixtures/list.pyi] [case testAssertionLazilyWithIsInstance] from typing import Optional, List li: Optional[List] = [] assert not isinstance(li,list), li[0] [builtins fixtures/isinstancelist.pyi] [case testAssertCurrentFrameIsNotUnreachable] def f() -> int: # E: Missing return statement x: int assert isinstance(x, int), '...' [builtins fixtures/isinstance.pyi] mypy-0.761/test-data/unit/check-fastparse.test0000644€tŠÔÚ€2›s®0000003225113576752246025534 0ustar jukkaDROPBOX\Domain Users00000000000000[case testFastParseSyntaxError] 1 + # E: invalid syntax [case testFastParseTypeCommentSyntaxError] x = None # type: a : b # E: syntax error in type comment 'a : b' [case testFastParseInvalidTypeComment] x = None # type: a + b # E: Invalid type comment or annotation -- Function type comments are attributed to the function def line. -- This happens in both parsers. [case testFastParseFunctionAnnotationSyntaxError] def f(): # E: syntax error in type comment 'None -> None' # N: Suggestion: wrap argument types in parentheses # type: None -> None pass [case testFastParseFunctionAnnotationSyntaxErrorSpaces] def f(): # E: syntax error in type comment 'None -> None' # N: Suggestion: wrap argument types in parentheses # type: None -> None pass [case testFastParseInvalidFunctionAnnotation] def f(x): # E: Invalid type comment or annotation # type: (a + b) -> None pass [case testFastParseInvalidTypes2] # flags: --py2 # All of these should not crash from typing import Callable, Tuple, Iterable x = None # type: Tuple[int, str].x # E: Invalid type comment or annotation a = None # type: Iterable[x].x # E: Invalid type comment or annotation b = None # type: Tuple[x][x] # E: Invalid type comment or annotation c = None # type: Iterable[x][x] # E: Invalid type comment or annotation d = None # type: Callable[..., int][x] # E: Invalid type comment or annotation e = None # type: Callable[..., int].x # E: Invalid type comment or annotation def f1(x): # E: Invalid type comment or annotation # type: (Tuple[int, str].x) -> None pass def f2(x): # E: Invalid type comment or annotation # type: (Iterable[x].x) -> None pass def f3(x): # E: Invalid type comment or annotation # type: (Tuple[x][x]) -> None pass def f4(x): # E: Invalid type comment or annotation # type: (Iterable[x][x]) -> None pass def f5(x): # E: Invalid type comment or annotation # type: (Callable[..., int][x]) -> None pass def f6(x): # E: Invalid type comment or annotation # type: (Callable[..., int].x) -> None pass [case testFastParseInvalidTypes3] # flags: --python-version 3.6 # All of these should not crash from typing import Callable, Tuple, Iterable x: Tuple[int, str].x # E: Invalid type comment or annotation a: Iterable[x].x # E: Invalid type comment or annotation b: Tuple[x][x] # E: Invalid type comment or annotation c: Iterable[x][x] # E: Invalid type comment or annotation d: Callable[..., int][x] # E: Invalid type comment or annotation e: Callable[..., int].x # E: Invalid type comment or annotation f = None # type: Tuple[int, str].x # E: Invalid type comment or annotation g = None # type: Iterable[x].x # E: Invalid type comment or annotation h = None # type: Tuple[x][x] # E: Invalid type comment or annotation i = None # type: Iterable[x][x] # E: Invalid type comment or annotation j = None # type: Callable[..., int][x] # E: Invalid type comment or annotation k = None # type: Callable[..., int].x # E: Invalid type comment or annotation def f1(x: Tuple[int, str].x) -> None: pass # E: Invalid type comment or annotation def f2(x: Iterable[x].x) -> None: pass # E: Invalid type comment or annotation def f3(x: Tuple[x][x]) -> None: pass # E: Invalid type comment or annotation def f4(x: Iterable[x][x]) -> None: pass # E: Invalid type comment or annotation def f5(x: Callable[..., int][x]) -> None: pass # E: Invalid type comment or annotation def f6(x: Callable[..., int].x) -> None: pass # E: Invalid type comment or annotation [case testFastParseTypeWithIgnore] def f(x, # type: x # type: ignore ): # type: (...) -> None pass [case testFastParseVariableTypeWithIgnore] x = 1 # type: str # type: ignore [case testFastParseVariableTypeWithIgnoreNoSpace] x = 1 # type: str #type:ignore [case testFastParseVariableTypeWithIgnoreAndComment] x = 1 # type: str # type: ignore # comment [case testFastParseTypeWithIgnoreWithStmt] with open('test', 'r') as f: # type: int # type: ignore pass [case testFastParseTypeWithIgnoreForStmt] for i in (1, 2, 3, 100): # type: str # type: ignore pass [case testFastParseVariableCommentThenIgnore] a="test" # type: int #comment # type: ignore # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testFastParseProperty] class C: @property def x(self) -> str: pass @x.setter def x(self, value: str) -> None: pass [builtins fixtures/property.pyi] [case testFastParseConditionalProperty] class C: if bool(): @property def x(self) -> str: pass @x.setter def x(self, value: str) -> None: pass [builtins fixtures/property.pyi] [case testFastParsePerArgumentAnnotations] class A: pass class B: pass class C: pass class D: pass class E: pass class F: pass def f(a, # type: A b = None, # type: B *args, # type: C d = None, # type: D e, # type: E **kwargs # type: F ): reveal_type(a) # N: Revealed type is '__main__.A' reveal_type(b) # N: Revealed type is 'Union[__main__.B, None]' reveal_type(args) # N: Revealed type is 'builtins.tuple[__main__.C]' reveal_type(d) # N: Revealed type is 'Union[__main__.D, None]' reveal_type(e) # N: Revealed type is '__main__.E' reveal_type(kwargs) # N: Revealed type is 'builtins.dict[builtins.str, __main__.F]' [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotationsWithReturn] class A: pass class B: pass class C: pass class D: pass class E: pass class F: pass def f(a, # type: A b = None, # type: B *args, # type: C d = None, # type: D e, # type: E **kwargs # type: F ): # type: (...) -> int reveal_type(a) # N: Revealed type is '__main__.A' reveal_type(b) # N: Revealed type is 'Union[__main__.B, None]' reveal_type(args) # N: Revealed type is 'builtins.tuple[__main__.C]' reveal_type(d) # N: Revealed type is 'Union[__main__.D, None]' reveal_type(e) # N: Revealed type is '__main__.E' reveal_type(kwargs) # N: Revealed type is 'builtins.dict[builtins.str, __main__.F]' return "not an int" # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotationsWithAnnotatedBareStar] def f(*, # type: int # E: bare * has associated type comment x # type: str ): # type: (...) -> int pass [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotationsWithReturnAndBareStar] def f(*, x # type: str ): # type: (...) -> int reveal_type(x) # N: Revealed type is 'builtins.str' return "not an int" # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotations_python2] class A: pass class B: pass class C: pass class D: pass def f(a, # type: A b = None, # type: B *args # type: C # kwargs not tested due to lack of 2.7 dict fixtures ): reveal_type(a) # N: Revealed type is '__main__.A' reveal_type(b) # N: Revealed type is 'Union[__main__.B, None]' reveal_type(args) # N: Revealed type is 'builtins.tuple[__main__.C]' [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotationsWithReturn_python2] class A: pass class B: pass class C: pass class D: pass def f(a, # type: A b = None, # type: B *args # type: C # kwargs not tested due to lack of 2.7 dict fixtures ): # type: (...) -> int reveal_type(a) # N: Revealed type is '__main__.A' reveal_type(b) # N: Revealed type is 'Union[__main__.B, None]' reveal_type(args) # N: Revealed type is 'builtins.tuple[__main__.C]' return "not an int" # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/dict.pyi] [out] [case testFasterParseTooManyArgumentsAnnotation] def f(): # E: Type signature has too many arguments # type: (int) -> None pass f() f(1) # E: Too many arguments for "f" [case testFasterParseTooFewArgumentsAnnotation] def f(x, y): # E: Type signature has too few arguments # type: (int) -> None x() y() f(1, 2) f(1) # E: Too few arguments for "f" [case testFasterParseTooManyArgumentsAnnotation_python2] def f(): # E: Type signature has too many arguments # type: (int) -> None pass f() f(1) # E: Too many arguments for "f" [case testFasterParseTooFewArgumentsAnnotation_python2] def f(x, y): # E: Type signature has too few arguments # type: (int) -> None x() y() f(1, 2) f(1) # E: Too few arguments for "f" [case testFasterParseTypeCommentError_python2] from typing import Tuple def f(a): # type: (Tuple(int, int)) -> int pass [out] main:2: error: Invalid type comment or annotation main:2: note: Suggestion: use Tuple[...] instead of Tuple(...) [case testFasterParseTypeErrorList_python2] from typing import List def f(a): # type: (List(int)) -> int pass [builtins_py2 fixtures/floatdict_python2.pyi] [out] main:2: error: Invalid type comment or annotation main:2: note: Suggestion: use List[...] instead of List(...) [case testFasterParseTypeErrorCustom] from typing import TypeVar, Generic T = TypeVar('T') class Foo(Generic[T]): pass def f(a: Foo(int)) -> int: pass [out] main:7: error: Invalid type comment or annotation main:7: note: Suggestion: use Foo[...] instead of Foo(...) [case testFastParseMatMul] from typing import Any x = None # type: Any x @ 1 x @= 1 [case testIncorrectTypeCommentIndex] from typing import Dict x = None # type: Dict[x: y] [out] main:3: error: syntax error in type comment [case testPrintStatementTrailingCommaFastParser_python2] print 0, print 1, 2, [case testFastParserShowsMultipleErrors] def f(x): # E: Type signature has too few arguments # type: () -> None pass def g(): # E: Type signature has too many arguments # type: (int) -> None pass [case testFastParseMalformedAssert] assert 1, 2 assert (1, 2) # E: Assertion is always true, perhaps remove parentheses? assert (1, 2), 3 # E: Assertion is always true, perhaps remove parentheses? assert () assert (1,) # E: Assertion is always true, perhaps remove parentheses? [case testFastParseAssertMessage] assert 1 assert 1, 2 assert 1, 1+2 assert 1, 1+'test' # E: Unsupported operand types for + ("int" and "str") assert 1, f() # E: Name 'f' is not defined [case testFastParserConsistentFunctionTypes] def f(x, y, z): # type: (int, int, int) -> int pass def f(x, # type: int # E: Function has duplicate type signatures y, # type: int z # type: int ): # type: (int, int, int) -> int pass def f(x, # type: int y, # type: int z # type: int ): # type: (...) -> int pass def f(x, y, z): # type: (int, int, int) -> int pass def f(x) -> int: # E: Function has duplicate type signatures # type: (int) -> int pass def f(x: int, y: int, z: int): # type: (...) -> int pass def f(x: int): # E: Function has duplicate type signatures # type: (int) -> int pass [case testFastParserDuplicateNames] def f(x, y, z): pass def g(x, y, x): # E: Duplicate argument 'x' in function definition pass def h(x, y, *x): # E: Duplicate argument 'x' in function definition pass def i(x, y, *z, **z): # E: Duplicate argument 'z' in function definition pass def j(x: int, y: int, *, x: int = 3): # E: Duplicate argument 'x' in function definition pass def k(*, y, z, y): # E: Duplicate argument 'y' in function definition pass lambda x, y, x: ... # E: Duplicate argument 'x' in function definition [case testFastParserDuplicateNames_python2] def f(x, y, z): pass def g(x, y, x): # E: Duplicate argument 'x' in function definition pass def h(x, y, *x): # E: Duplicate argument 'x' in function definition pass def i(x, y, *z, **z): # E: Duplicate argument 'z' in function definition pass def j(x, (y, y), z): # E: Duplicate argument 'y' in function definition pass def k(x, (y, x)): # E: Duplicate argument 'x' in function definition pass def l((x, y), (z, x)): # E: Duplicate argument 'x' in function definition pass def m(x, ((x, y), z)): # E: Duplicate argument 'x' in function definition pass lambda x, (y, x): None # E: Duplicate argument 'x' in function definition [case testNoCrashOnImportFromStar] from pack import * [file pack/__init__.py] from . import * [case testNoCrashOnImportFromStarNested] import blamodule [file blamodule/__init__.py] from . import command from . import backends [file blamodule/backends/__init__.py] from .Bla import Bla Bla().method() [file blamodule/backends/Bla.py] from .. import * class Bla: def method(self) -> str: return command.call() [file blamodule/command.py] def call() -> str: pass [builtins fixtures/module.pyi] [case testNoCrashOnImportFromStarPython2] # flags: --py2 from . import * # E: No parent module -- cannot perform relative import [case testSpuriousTrailingComma_python2] from typing import Optional def update_state(tid, # type: int vid, # type: int update_ts=None, # type: Optional[float], ): # type: (...) -> str pass [out] main:3: error: Syntax error in type annotation main:3: note: Suggestion: Is there a spurious trailing comma? mypy-0.761/test-data/unit/check-final.test0000644€tŠÔÚ€2›s®0000007134513576752246024644 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for final qualifier -- -- Definitions [case testFinalDefiningModuleVar] from typing import Final x: Final = int() y: Final[float] = int() z: Final[int] = int() bad: Final[str] = int() # E: Incompatible types in assignment (expression has type "int", variable has type "str") reveal_type(x) # N: Revealed type is 'builtins.int' reveal_type(y) # N: Revealed type is 'builtins.float' reveal_type(z) # N: Revealed type is 'builtins.int' [out] [case testFinalDefiningInstanceVar] from typing import Final class C: x: Final = int() y: Final[float] = int() z: Final[int] = int() bad: Final[str] = int() # E: Incompatible types in assignment (expression has type "int", variable has type "str") class D(C): pass reveal_type(D.x) # N: Revealed type is 'builtins.int' reveal_type(D.y) # N: Revealed type is 'builtins.float' reveal_type(D.z) # N: Revealed type is 'builtins.int' reveal_type(D().x) # N: Revealed type is 'builtins.int' reveal_type(D().y) # N: Revealed type is 'builtins.float' reveal_type(D().z) # N: Revealed type is 'builtins.int' [out] [case testFinalDefiningInstanceVarImplicit] from typing import Final, Tuple, Any class C: def __init__(self, x: Tuple[int, Any]) -> None: self.x: Final = x self.y: Final[float] = 1 reveal_type(C((1, 2)).x) # N: Revealed type is 'Tuple[builtins.int, Any]' reveal_type(C((1, 2)).y) # N: Revealed type is 'builtins.float' [out] [case testFinalBadDefinitionTooManyArgs] from typing import Final x: Final[int, str] # E: Final name must be initialized with a value \ # E: Final[...] takes at most one type argument reveal_type(x) # N: Revealed type is 'builtins.int' class C: def __init__(self) -> None: self.x: Final[float, float] = 1 # E: Final[...] takes at most one type argument reveal_type(C().x) # N: Revealed type is 'builtins.float' [out] [case testFinalInvalidDefinitions] # Errors are shown in a different order with the new analyzer. from typing import Final, Any x = y = 1 # type: Final[float] # E: Invalid final declaration z: Any z[0]: Final[int] # E: Invalid final declaration \ # E: Unexpected type declaration [out] [case testFinalDefiningInstanceVarStubs] # Allow skipping r.h.s. import mod [file mod.pyi] from typing import Final x: Final # E: Type in Final[...] can only be omitted if there is an initializer y: Final[int] class C: x: Final # E: Type in Final[...] can only be omitted if there is an initializer y: Final[int] def __init__(self) -> None: self.z: Final # E: Type in Final[...] can only be omitted if there is an initializer reveal_type(x) # N: Revealed type is 'Any' reveal_type(C.x) # N: Revealed type is 'Any' v: C reveal_type(v.z) # N: Revealed type is 'Any' [out] [case testFinalDefiningFunc] from typing import final @final # E: @final cannot be used with non-method functions def f(x: int) -> None: ... [out] [case testFinalDefiningFuncOverloaded] from typing import final, overload @overload def f(x: int) -> int: ... @overload def f(x: str) -> str: ... @final # E: @final cannot be used with non-method functions def f(x): pass [out] [case testFinalDefiningMeth] from typing import final class C: @final def f(self, x: int) -> None: ... reveal_type(C().f) # N: Revealed type is 'def (x: builtins.int)' [out] [case testFinalDefiningMethOverloaded] from typing import final, overload class C: @overload def f(self, x: int) -> int: ... @overload def f(self, x: str) -> str: ... @final def f(self, x): pass @overload def bad(self, x: int) -> int: ... @final # E: @final should be applied only to overload implementation @overload def bad(self, x: str) -> str: ... def bad(self, x): pass reveal_type(C().f) # N: Revealed type is 'Overload(def (x: builtins.int) -> builtins.int, def (x: builtins.str) -> builtins.str)' [out] [case testFinalDefiningMethOverloadedStubs] from mod import C reveal_type(C().f) [file mod.pyi] from typing import final, overload class C: @final @overload def f(self, x: int) -> int: ... @overload def f(self, x: str) -> str: ... @overload def bad(self, x: int) -> int: ... @final # Error! @overload def bad(self, x: str) -> str: ... [out] tmp/mod.pyi:12: error: In a stub file @final must be applied only to the first overload main:3: note: Revealed type is 'Overload(def (x: builtins.int) -> builtins.int, def (x: builtins.str) -> builtins.str)' [case testFinalDefiningProperty] from typing import final class C: @final @property def f(self) -> int: pass @property @final def g(self) -> int: pass reveal_type(C().f) # N: Revealed type is 'builtins.int' reveal_type(C().g) # N: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [out] [case testFinalDefiningOuterOnly] from typing import Final, Callable, Tuple, Any x: Tuple[Final] # E: Final can be only used as an outermost qualifier in a variable annotation y: Callable[[], Tuple[Final[int]]] # E: Final can be only used as an outermost qualifier in a variable annotation [out] [case testFinalDefiningNotInMethod] from typing import Final def f(x: Final[int]) -> int: ... # E: Final can be only used as an outermost qualifier in a variable annotation def g(x: int) -> Final[int]: ... # E: Final can be only used as an outermost qualifier in a variable annotation [out] [case testFinalDefiningNotInMethodExtensions] from typing_extensions import Final def f(x: Final[int]) -> int: ... # E: Final can be only used as an outermost qualifier in a variable annotation def g(x: int) -> Final[int]: ... # E: Final can be only used as an outermost qualifier in a variable annotation [out] [case testFinalDefiningNoRhs] from typing import Final x: Final # E: Type in Final[...] can only be omitted if there is an initializer y: Final[int] # E: Final name must be initialized with a value class C: x: Final # E: Type in Final[...] can only be omitted if there is an initializer y: Final[int] # E: Final name must be initialized with a value def __init__(self) -> None: self.z: Final # E: Type in Final[...] can only be omitted if there is an initializer reveal_type(x) # N: Revealed type is 'Any' reveal_type(y) # N: Revealed type is 'builtins.int' reveal_type(C().x) # N: Revealed type is 'Any' reveal_type(C().y) # N: Revealed type is 'builtins.int' reveal_type(C().z) # N: Revealed type is 'Any' [out] [case testFinalDefiningNoRhsSubclass] from typing import Final class A: x: Final[int] # E: Final name must be initialized with a value class B(A): x = 1 # E: Cannot assign to final name "x" def __init__(self) -> None: self.x = 1 # E: Cannot assign to final attribute "x" [out] [case testFinalDefiningNoTypevarsExplicit] from typing import Final, TypeVar, Generic, Tuple, Any T = TypeVar('T') d: Any class C(Generic[T]): x: Final[Tuple[T, T]] = d # E: Final name declared in class body cannot depend on type variables [out] [case testFinalDefiningTypevarsImplicit] from typing import Final, TypeVar, Generic, Tuple, Any T = TypeVar('T') class C(Generic[T]): def __init__(self, x: Tuple[T, T]) -> None: self.x: Final = x self.y: Final = 1 reveal_type(C((1, 2)).x) # N: Revealed type is 'Tuple[builtins.int*, builtins.int*]' C.x # E: Cannot access final instance attribute "x" on class object \ # E: Access to generic instance variables via class is ambiguous C.y # E: Cannot access final instance attribute "y" on class object [out] [case testFinalDefiningNotInOtherMethod] from typing import Final, Any, Tuple class C: def meth(self, x: Tuple[int, Any]) -> None: self.x: Final = x # E: Can only declare a final attribute in class body or __init__ self.y: Final[float] = 1 # E: Can only declare a final attribute in class body or __init__ [out] [case testFinalDefiningOnlyOnSelf] from typing import Final, Any, Tuple class U: x: Any y: Any class C: def __init__(self, x: Tuple[int, Any]) -> None: slf = U() slf.x: Final = x # E: Final can be only applied to a name or an attribute on self slf.y: Final[float] = 1 # E: Type cannot be declared in assignment to non-self attribute \ # E: Final can be only applied to a name or an attribute on self [out] [case testFinalNotInProtocol] from typing import Final, final, Protocol, overload class P(Protocol): x: Final[float] = 1 # E: Protocol member cannot be final @final # E: Protocol member cannot be final def meth(self, x) -> int: pass @overload def other(self, x: int) -> int: ... @overload def other(self, x: str) -> str: ... @final # E: Protocol member cannot be final def other(self, x): pass [out] [case testFinalNotInLoops] from typing import Final for i in [1, 2, 3]: x: Final = i # E: Cannot use Final inside a loop while True: y: Final = True # E: Cannot use Final inside a loop [builtins fixtures/list.pyi] [out] [case testFinalDelayedDefinition] from typing import Final class C: x: Final[int] # OK, defined in __init__ bad: Final[int] # E: Final name must be initialized with a value def __init__(self, x: int) -> None: self.x = x # OK, deferred definition self.x = 2 # E: Cannot assign to final attribute "x" def meth(self) -> None: self.x = 2 # E: Cannot assign to final attribute "x" c: C c.x = 3 # E: Cannot assign to final attribute "x" class D(C): x = 4 # E: Cannot assign to final name "x" d: D d.x = 5 # E: Cannot assign to final attribute "x" [out] [case testFinalDelayedDefinitionOtherMethod] from typing import Final class C: x: Final[int] # E: Final name must be initialized with a value def meth(self) -> None: self.x = 2 # E: Cannot assign to final attribute "x" [out] -- Reassignments [case testFinalReassignModuleVar] # flags: --allow-redefinition from typing import Final x: Final = 1 x x = 2 # E: Cannot assign to final name "x" def f() -> int: global x x = 3 # No error here is okay since we reported an error above return x x2: Final = 1 x2 def f2() -> None: global x2 x2 = 1 # E: Cannot assign to final name "x2" y = 1 y y: Final = 2 # E: Cannot redefine an existing name as final y = 3 # E: Cannot assign to final name "y" z: Final = 1 z: Final = 2 # E: Cannot redefine an existing name as final z = 3 # E: Cannot assign to final name "z" [case testFinalReassignModuleVar2] # flags: --allow-redefinition from typing import Final x: Final = 1 x def f() -> int: global x x = 3 # E: Cannot assign to final name "x" return x y = 1 y y = 2 y y: Final = 3 # E: Cannot redefine an existing name as final [case testFinalReassignModuleVar3] # flags: --disallow-redefinition # Error formatting is subtly different with new analyzer. from typing import Final x: Final = 1 x x = 2 # E: Cannot assign to final name "x" def f() -> int: global x x = 3 # E: Cannot assign to final name "x" return x x2: Final = 1 x2 def f2() -> None: global x2 x2 = 1 # E: Cannot assign to final name "x2" y = 1 # E: Cannot assign to final name "y" y y: Final = 2 # E: Cannot redefine an existing name as final y = 3 # E: Cannot assign to final name "y" z: Final = 1 z: Final = 2 # E: Cannot redefine an existing name as final z = 3 # E: Cannot assign to final name "z" [case testFinalReassignModuleReexport] # Error formatting is subtly different with the new analyzer. from typing import Final from lib import X from lib.mod import ID X = 1 # Error! ID: Final = 1 # Two errors! ID = 1 # Error! [file lib/__init__.pyi] from lib.const import X as X [file lib/mod.pyi] from lib.const import * [file lib/const.pyi] from typing import Final ID: Final # Error! X: Final[int] [out] tmp/lib/const.pyi:3: error: Type in Final[...] can only be omitted if there is an initializer main:8: error: Cannot assign to final name "X" main:9: error: Cannot redefine an existing name as final main:10: error: Cannot assign to final name "ID" [case testFinalReassignFuncScope] from typing import Final def f() -> None: nl: Final = 0 x: Final = 1 x = 1 # E: Cannot assign to final name "x" y: Final = 1 y: Final = 2 # E: Cannot redefine an existing name as final def nested() -> None: nonlocal nl nl = 1 # E: Cannot assign to final name "nl" [out] [case testFinalReassignModuleVarExternal] import mod mod.x = 2 # E: Cannot assign to final name "x" [file mod.pyi] from typing import Final x: Final[int] [out] [case testFinalReassignInstanceVarClassBody] from typing import Final class C: x: Final = 1 x = 2 # E: Cannot assign to final name "x" y = 1 # E: Cannot assign to final name "y" y: Final = 2 # E: Cannot redefine an existing name as final [out] [case testFinalReassignInstanceVarInit] from typing import Final class C: def __init__(self) -> None: self.x: Final = 1 self.y = 1 self.y: Final = 2 # E: Cannot redefine an existing name as final def meth(self) -> None: self.x = 2 # E: Cannot assign to final attribute "x" [out] [case testFinalReassignInstanceVarClassVsInit] from typing import Final class C: y: Final = 1 def __init__(self) -> None: # Methods are processed after top-level in new analyzer. self.x: Final = 1 # E: Cannot redefine an existing name as final self.y = 2 # E: Cannot assign to final attribute "y" x = 2 [out] [case testFinalReassignInstanceVarMethod] from typing import Final class C: x: Final = 1 def __init__(self) -> None: self.y: Final = 1 def meth(self) -> None: self.x = 2 # E: Cannot assign to final attribute "x" self.y = 2 # E: Cannot assign to final attribute "y" def other(self) -> None: self.x = 2 # E: Cannot assign to final attribute "x" self.y = 2 # E: Cannot assign to final attribute "y" @classmethod def cm(cls) -> None: cls.x = 2 # E: Cannot assign to final attribute "x" cls.y # E: Cannot access final instance attribute "y" on class object [builtins fixtures/classmethod.pyi] [out] [case testFinalReassignInstanceVarExternalClass] from typing import Final class C: x: Final = 1 def __init__(self) -> None: self.y: Final = 1 class D(C): pass C.x = 2 # E: Cannot assign to final attribute "x" D.x = 2 # E: Cannot assign to final attribute "x" D.y = 2 # E: Cannot access final instance attribute "y" on class object \ # E: Cannot assign to final attribute "y" [out] [case testFinalReassignInstanceVarExternalInstance] from typing import Final class C: x: Final = 1 def __init__(self) -> None: self.y: Final = 1 class D(C): pass C().x = 2 # E: Cannot assign to final attribute "x" D().x = 2 # E: Cannot assign to final attribute "x" D().y = 2 # E: Cannot assign to final attribute "y" [out] [case testFinalWorksWithComplexTargets] from typing import Final, Any y: Final[Any] = 1 x = a, (b, y), c = 2, (2, 2), 2 # E: Cannot assign to final name "y" t, *y, s = u = [2, 2, 2] # E: Cannot assign to final name "y" [builtins fixtures/list.pyi] [out] [case testFinalInplaceAssign] from typing import Final class A: # no such things in fixtures def __add__(self, other: A) -> A: ... class B: def __add__(self, other: B) -> B: ... def __iadd__(self, other: B) -> B: ... a: Final = A() b: Final = B() class C: a: Final = A() b: Final = B() class D(C): pass a += A() # E: Cannot assign to final name "a" b += B() # E: Cannot assign to final name "b" D().a += A() # E: Cannot assign to final attribute "a" D().b += B() # E: Cannot assign to final attribute "b" [out] -- Overriding [case testFinalOverridingVarClassBody] from typing import Final # We use properties in this tests and below because we want to check # that any existing variable before final doesn't affect logic of # subsequent overrides but writable attributes cannot be overridden by final. class A: @property def x(self) -> int: ... @property def y(self) -> int: ... class B(A): x: Final = 1 def __init__(self) -> None: self.y: Final = 1 class C(B): x: int = 2 # E: Cannot assign to final name "x" y: int = 2 # E: Cannot assign to final name "y" x = 3 # E: Cannot assign to final name "x" y = 3 # E: Cannot assign to final name "y" class D(C): pass D.x = 4 # E: Cannot assign to final attribute "x" D.y = 4 # E: Cannot assign to final attribute "y" [builtins fixtures/property.pyi] [out] [case testFinalOverridingVarClassBodyExplicit] from typing import Final class A: @property def x(self) -> int: ... @property def y(self) -> int: ... class B(A): x: Final = 1 def __init__(self) -> None: self.y: Final = 1 class C(B): x: Final = 2 # E: Cannot override final attribute "x" (previously declared in base class "B") y: Final = 2 # E: Cannot override final attribute "y" (previously declared in base class "B") [builtins fixtures/property.pyi] [out] [case testFinalOverridingVarInit] from typing import Final class A: @property def x(self) -> int: ... @property def y(self) -> int: ... class B(A): x: Final = 1 def __init__(self) -> None: self.y: Final = 1 class C(B): def __init__(self) -> None: self.x = 2 # E: Cannot assign to final attribute "x" self.y = 2 # E: Cannot assign to final attribute "y" def meth(self) -> None: self.x = 3 # E: Cannot assign to final attribute "x" self.y = 3 # E: Cannot assign to final attribute "y" [builtins fixtures/property.pyi] [out] [case testFinalOverridingVarInit2] from typing import Final class A: @property def x(self) -> int: ... @property def y(self) -> int: ... class B(A): x: Final = 1 def __init__(self) -> None: self.y: Final = 1 class C(B): def __init__(self) -> None: self.x: Final = 2 # E: Cannot override final attribute "x" (previously declared in base class "B") self.y: Final = 2 # E: Cannot override final attribute "y" (previously declared in base class "B") [builtins fixtures/property.pyi] [out] [case testFinalOverridingVarOtherMethod] from typing import Final class A: @property def x(self) -> int: ... @property def y(self) -> int: ... class B(A): x: Final = 1 def __init__(self) -> None: self.y: Final = 1 class C(B): def meth(self) -> None: self.x: int = 2 # E: Cannot assign to final attribute "x" self.y: int = 2 # E: Cannot assign to final attribute "y" self.x = 3 # E: Cannot assign to final attribute "x" self.y = 3 # E: Cannot assign to final attribute "y" [builtins fixtures/property.pyi] [out] [case testFinalOverridingVarMultipleInheritanceClass] from typing import Final, Any class A: x: Final[Any] = 1 class B: @property def x(self) -> int: ... class C(A, B): ... class D(B, A): ... # E: Cannot override final attribute "x" (previously declared in base class "A") C.x = 3 # E: Cannot assign to final attribute "x" C().x = 4 # E: Cannot assign to final attribute "x" D().x = 4 # E: Cannot assign to final attribute "x" \ # E: Property "x" defined in "B" is read-only [builtins fixtures/property.pyi] [out] [case testFinalOverridingVarMultipleInheritanceInit] from typing import Final, Any class A: def __init__(self) -> None: self.x: Final[Any] = 1 class B: @property def x(self) -> int: ... class C(A, B): ... class D(B, A): ... # E: Cannot override final attribute "x" (previously declared in base class "A") C.x = 3 # E: Cannot access final instance attribute "x" on class object \ # E: Cannot assign to final attribute "x" C().x = 4 # E: Cannot assign to final attribute "x" [builtins fixtures/property.pyi] [out] [case testFinalOverridingVarMultipleInheritanceMixed] from typing import Final class A: x: Final = 1 class B: def __init__(self) -> None: self.x = 2 class C(A, B): ... # E: Cannot override writable attribute "x" with a final one class D(B, A): ... # E: Cannot override final attribute "x" (previously declared in base class "A") C.x = 3 # E: Cannot assign to final attribute "x" D.x = 3 # E: Cannot assign to final attribute "x" C().x = 4 # E: Cannot assign to final attribute "x" D().x = 4 # E: Cannot assign to final attribute "x" [out] [case testFinalOverridingVarWithMethod] from typing import Final, Any class A: x: Final[Any] = 1 def __init__(self) -> None: self.y: Final[Any] = 1 class B(A): def x(self) -> None: pass # E: Cannot override final attribute "x" (previously declared in base class "A") def y(self) -> None: pass # E: Cannot override final attribute "y" (previously declared in base class "A") class C(A): @property # E: Cannot override final attribute "x" (previously declared in base class "A") def x(self) -> None: pass @property # E: Cannot override final attribute "y" (previously declared in base class "A") def y(self) -> None: pass [builtins fixtures/property.pyi] [out] [case testFinalOverridingVarWithMethodClass] from typing import Final, Any class A: x: Final[Any] = 1 def __init__(self) -> None: self.y: Final[Any] = 1 class B(A): @classmethod # E: Cannot override final attribute "x" (previously declared in base class "A") def x(self) -> None: pass @classmethod # E: Cannot override final attribute "y" (previously declared in base class "A") def y(self) -> None: pass [builtins fixtures/classmethod.pyi] [out] [case testFinalOverridingMethodRegular] from typing import final class B: @final def meth(self) -> None: ... class C(B): def meth(self) -> None: ... # E: Cannot override final attribute "meth" (previously declared in base class "B") [out] [case testFinalOverridingMethodInitNew] from typing import final class B: @final def __init__(self) -> None: ... @final def __new__(cls) -> B: ... class C(B): def __init__(self) -> None: ... # E: Cannot override final attribute "__init__" (previously declared in base class "B") def __new__(cls) -> C: ... # E: Cannot override final attribute "__new__" (previously declared in base class "B") [out] [case testFinalOverridingMethodWithVar] from typing import final, Final, Any a: Any class A: @final def f(self) -> None: pass @final @property def p(self) -> int: pass class B(A): f = a # E: Cannot override final attribute "f" (previously declared in base class "A") p = a # E: Cannot override final attribute "p" (previously declared in base class "A") class C(A): f: Any # E: Cannot override final attribute "f" (previously declared in base class "A") p: Any # E: Cannot override final attribute "p" (previously declared in base class "A") class D(A): f: Final = a # E: Cannot override final attribute "f" (previously declared in base class "A") p: Final = a # E: Cannot override final attribute "p" (previously declared in base class "A") [builtins fixtures/property.pyi] [out] [case testFinalOverridingMethodWithVarImplicit] from typing import final, Any, Final a: Any class A: @final def f(self) -> None: pass @final @classmethod def c(cls) -> int: pass class B(A): def __init__(self) -> None: self.f: Any # E: Cannot assign to final attribute "f" \ # E: Cannot override final attribute "f" (previously declared in base class "A") self.c: Any # E: Cannot assign to final attribute "c" \ # E: Cannot override final attribute "c" (previously declared in base class "A") B().f = a # E: Cannot assign to final attribute "f" B().c = a # E: Cannot assign to final attribute "c" class C(A): def __init__(self) -> None: self.f: Final = a # E: Cannot override final attribute "f" (previously declared in base class "A") self.c: Final = a # E: Cannot override final attribute "c" (previously declared in base class "A") [builtins fixtures/classmethod.pyi] [out] [case testFinalCanOverrideMethodWithFinal] from typing import final class B: def meth(self) -> None: ... class C(B): @final # OK def meth(self) -> None: ... [out] [case testFinalOverridingMethodMultipleInheritance] from typing import final class A: def m(self) -> int: pass class B: @final def m(self) -> int: pass class C(A, B): pass # E: Cannot override final attribute "m" (previously declared in base class "B") class D(B, A): pass [out] [case testFinalOverridingMethodMultipleInheritanceVar] from typing import final, Any class A: m: Any class B: @final def m(self) -> int: pass class C(A, B): pass # E: Cannot override final attribute "m" (previously declared in base class "B") class D(B, A): pass # E: Cannot override writable attribute "m" with a final one [out] [case testFinalOverridingClassMethod] from typing import final class B: @classmethod @final def f(cls) -> int: pass class C(B): @classmethod # E: Cannot override final attribute "f" (previously declared in base class "B") def f(cls) -> int: pass [builtins fixtures/classmethod.pyi] [out] [case testFinalOverridingStaticMethod] from typing import final class B: @staticmethod @final def f() -> int: pass @final @staticmethod def g() -> int: pass class C(B): @staticmethod # E: Cannot override final attribute "f" (previously declared in base class "B") def f() -> int: pass @staticmethod # E: Cannot override final attribute "g" (previously declared in base class "B") def g() -> int: pass [builtins fixtures/staticmethod.pyi] [out] [case testFinalOverridingProperty] from typing import final class B: @final @property def f(self) -> int: pass @property @final def g(self) -> int: pass class C(B): @property # E: Cannot override final attribute "f" (previously declared in base class "B") def f(self) -> int: pass @property # E: Cannot override final attribute "g" (previously declared in base class "B") def g(self) -> int: pass [builtins fixtures/property.pyi] [out] [case testFinalOverridingMethodOverloads] from typing import final, overload class B: @overload def f(self, x: int) -> int: ... @overload def f(self, x: str) -> str: ... @final def f(self, x): pass class C(B): @overload # E: Cannot override final attribute "f" (previously declared in base class "B") def f(self, x: int) -> int: ... @overload def f(self, x: str) -> str: ... def f(self, x): pass [out] [case testFinalClassNoInheritance] from typing import final @final class B: ... class C(B): # E: Cannot inherit from final class "B" pass class D(C): # E: Cannot inherit from final class "B" pass [out] [case testFinalClassNoInheritanceMulti] from typing import final class A: ... @final class B: ... class C(B, A): # E: Cannot inherit from final class "B" pass class D(A, B): # E: Cannot inherit from final class "B" pass [out] [case testFinalCantOverrideWriteable] from typing import Any, Final, final class B: x: Any @property def y(self) -> Any: ... @y.setter def y(self, x: Any) -> None: ... class C(B): x: Final = 1 # E: Cannot override writable attribute "x" with a final one y: Final = 1 # E: Cannot override writable attribute "y" with a final one class D(B): @final # E: Cannot override writable attribute "x" with a final one def x(self) -> int: ... @final # E: Cannot override writable attribute "y" with a final one def y(self) -> int: ... [builtins fixtures/property.pyi] [out] [case testFinalCanUseTypingExtensions] from typing_extensions import final, Final x: Final = 1 x = 2 # E: Cannot assign to final name "x" class S: x: Final = 1 S.x = 2 # E: Cannot assign to final attribute "x" class B: @final def meth(self) -> None: ... class C(B): def meth(self) -> None: ... # E: Cannot override final attribute "meth" (previously declared in base class "B") @final class F: ... class E(F): ... # E: Cannot inherit from final class "F" [out] [case testFinalCanUseTypingExtensionsAliased] from typing_extensions import final as f, Final as F x: F = 1 x = 2 # E: Cannot assign to final name "x" class S: x: F = 1 S.x = 2 # E: Cannot assign to final attribute "x" class B: @f def meth(self) -> None: ... class C(B): def meth(self) -> None: ... # E: Cannot override final attribute "meth" (previously declared in base class "B") @f class D(C): ... class E(D): ... # E: Cannot inherit from final class "D" [out] [case testFinalMultiassignAllowed] from typing import Final class A: x: Final[int] y: Final[int] def __init__(self) -> None: self.x, self.y = 1, 2 class B: x: Final[int] y: Final[int] def __init__(self) -> None: self.x = self.y = 1 [out] [case testFinalInDeferredMethod] from typing_extensions import Final class A: def __init__(self) -> None: self.x = 10 # type: Final undefined # type: ignore mypy-0.761/test-data/unit/check-flags.test0000644€tŠÔÚ€2›s®0000007544413576752246024653 0ustar jukkaDROPBOX\Domain Users00000000000000[case testUnannotatedFunction] # flags: --disallow-untyped-defs def f(x): pass [out] main:2: error: Function is missing a type annotation [case testUnannotatedArgument] # flags: --disallow-untyped-defs def f(x) -> int: pass [out] main:2: error: Function is missing a type annotation for one or more arguments [case testNoArgumentFunction] # flags: --disallow-untyped-defs def f() -> int: pass [out] [case testUnannotatedReturn] # flags: --disallow-untyped-defs def f(x: int): pass [out] main:2: error: Function is missing a return type annotation [case testUnannotatedReturnWithFastParser] # flags: --disallow-untyped-defs def f(x: int): pass [out] main:2: error: Function is missing a return type annotation [case testLambda] # flags: --disallow-untyped-defs lambda x: x [out] [case testUntypedDef] # flags: --disallow-untyped-defs def f(): 1 + "str" [out] main:2: error: Function is missing a return type annotation main:2: note: Use "-> None" if function does not return a value [case testUnannotatedReturnWithOnlySelfArgument] # flags: --disallow-untyped-defs def f(self): pass [out] main:2: error: Function is missing a return type annotation main:2: note: Use "-> None" if function does not return a value [case testUnannotatedReturnWithNontrivialReturn] # flags: --disallow-untyped-defs def f(): return 1 [out] main:2: error: Function is missing a return type annotation [case testUntypedAsyncDef] # flags: --disallow-untyped-defs async def f(): # E: Function is missing a return type annotation \ # N: Use "-> None" if function does not return a value pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncUnannotatedArgument] # flags: --disallow-untyped-defs async def f(x) -> None: # E: Function is missing a type annotation for one or more arguments pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncUnannotatedReturn] # flags: --disallow-untyped-defs from typing import Any async def f(x: int): # E: Function is missing a return type annotation pass # Make sure explicit Any is allowed. async def g(x: int) -> Any: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testDisallowUntypedDefsUntypedDecorator] # flags: --disallow-untyped-decorators def d(p): return p @d # E: Untyped decorator makes function "f" untyped def f(i: int) -> int: return i [case testDisallowUntypedDecoratorsUnresolvedDecorator] # flags: --disallow-untyped-decorators --ignore-missing-imports from nonexistent import d @d # E: Untyped decorator makes function "f" untyped def f(i: int) -> int: return i [case testDisallowUntypedDecoratorUntypedDef] # flags: --disallow-untyped-decorators def d(p): return p @d # no error def f(): pass [case testDisallowUntypedDecoratorsPartialFunction] # flags: --disallow-untyped-decorators def d(p): return p @d # E: Untyped decorator makes function "f" untyped def f(x) -> None: pass @d # E: Untyped decorator makes function "g" untyped def g(x, y: int): pass @d # E: Untyped decorator makes function "h" untyped def h(x: int): pass [case testDisallowUntypedDecoratorsImpreciseDecorator] # flags: --disallow-untyped-decorators from typing import Any def d(p) -> Any: return p @d # no error def f() -> None: pass [case testDisallowUntypedDecoratorsMultipleDecorators] # flags: --disallow-untyped-decorators from typing import Any def d1(p): return p def d2(p): return p def d3(p) -> Any: return p @d1 # E: Untyped decorator makes function "f" untyped @d2 # E: Untyped decorator makes function "f" untyped @d3 # no error @d1 # E: Untyped decorator makes function "f" untyped def f() -> None: pass [case testDisallowUntypedDecoratorsCallableInstance] # flags: --disallow-untyped-decorators from typing import Callable class TypedDecorator: def __call__(self, c: Callable) -> Callable: return function class UntypedDecorator: def __call__(self, c): return function @TypedDecorator() def f() -> None: pass @UntypedDecorator() # E: Untyped decorator makes function "g" untyped def g() -> None: pass @TypedDecorator() @UntypedDecorator() # E: Untyped decorator makes function "h" untyped def h() -> None: pass @UntypedDecorator() # E: Untyped decorator makes function "i" untyped @TypedDecorator() def i() -> None: pass reveal_type(f) # N: Revealed type is 'def (*Any, **Any) -> Any' reveal_type(g) # N: Revealed type is 'Any' reveal_type(h) # N: Revealed type is 'def (*Any, **Any) -> Any' reveal_type(i) # N: Revealed type is 'Any' [case testDisallowUntypedDecoratorsNonCallableInstance] # flags: --disallow-untyped-decorators class Decorator: pass @Decorator() # E: "Decorator" not callable def f() -> None: pass [case testSubclassingAny] # flags: --disallow-subclassing-any from typing import Any FakeClass = None # type: Any class Foo(FakeClass): pass # E: Class cannot subclass 'FakeClass' (has type 'Any') [out] [case testSubclassingAnyMultipleBaseClasses] # flags: --disallow-subclassing-any from typing import Any FakeClass = None # type: Any class ActualClass: pass class Foo(ActualClass, FakeClass): pass # E: Class cannot subclass 'FakeClass' (has type 'Any') [out] [case testSubclassingAnySilentImports] # flags: --disallow-subclassing-any --follow-imports=skip # cmd: mypy -m main [file main.py] from ignored_module import BaseClass class Foo(BaseClass): pass [file ignored_module.py] class BaseClass: pass [out] tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any') [case testSubclassingAnySilentImports2] # flags: --disallow-subclassing-any --follow-imports=skip # cmd: mypy -m main [file main.py] import ignored_module class Foo(ignored_module.BaseClass): pass [file ignored_module.py] class BaseClass: pass [out] tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any') [case testWarnNoReturnIgnoresTrivialFunctions] # flags: --warn-no-return def f() -> int: pass def g() -> int: ... def h() -> int: """with docstring""" pass def i() -> int: """with docstring""" ... def j() -> int: u"""with unicode docstring""" pass def k() -> int: """docstring only""" [case testWarnNoReturnWorksWithAlwaysTrue] # flags: --warn-no-return PY3 = True def f() -> int: if PY3: return 0 else: return 0 [builtins fixtures/bool.pyi] [case testWarnNoReturnWorksWithAlwaysFalse] # flags: --warn-no-return PY2 = False def f() -> int: if PY2: return 0 else: return 0 [builtins fixtures/bool.pyi] [case testWarnNoReturnWorksWithMypyTrue] # flags: --warn-no-return MYPY = False def f() -> int: if MYPY: return 0 else: return 0 [builtins fixtures/bool.pyi] [case testNoReturnDisallowsReturn] # flags: --warn-no-return from mypy_extensions import NoReturn def f() -> NoReturn: if bool(): return 5 # E: Return statement in function which does not return else: return # E: Return statement in function which does not return [builtins fixtures/dict.pyi] [case testNoReturnWithoutImplicitReturn] # flags: --warn-no-return from mypy_extensions import NoReturn def no_return() -> NoReturn: pass def f() -> NoReturn: no_return() [builtins fixtures/dict.pyi] [case testNoReturnDisallowsImplicitReturn] # flags: --warn-no-return from mypy_extensions import NoReturn def f() -> NoReturn: # E: Implicit return in function which does not return non_trivial_function = 1 [builtins fixtures/dict.pyi] [case testNoReturnNoWarnNoReturn] # flags: --warn-no-return from mypy_extensions import NoReturn def no_return() -> NoReturn: pass def f() -> int: if bool(): return 0 else: no_return() [builtins fixtures/dict.pyi] [case testNoReturnInExpr] # flags: --warn-no-return from mypy_extensions import NoReturn def no_return() -> NoReturn: pass def f() -> int: return 0 reveal_type(f() or no_return()) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [case testNoReturnVariable] # flags: --warn-no-return from mypy_extensions import NoReturn x = 0 # type: NoReturn # E: Incompatible types in assignment (expression has type "int", variable has type "NoReturn") [builtins fixtures/dict.pyi] [case testNoReturnImportFromTyping] from typing import NoReturn def h() -> NoReturn: if bool(): return 5 # E: Return statement in function which does not return else: return # E: Return statement in function which does not return def no_return() -> NoReturn: pass def f() -> NoReturn: no_return() x: NoReturn = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "NoReturn") [builtins fixtures/dict.pyi] [case testShowErrorContextFunction] # flags: --show-error-context def f() -> None: 0 + "" [out] main: note: In function "f": main:3: error: Unsupported operand types for + ("int" and "str") [case testShowErrorContextClass] # flags: --show-error-context class A: 0 + "" [out] main: note: In class "A": main:3: error: Unsupported operand types for + ("int" and "str") [case testShowErrorContextMember] # flags: --show-error-context class A: def f(self, x: int) -> None: self.f("") [out] main: note: In member "f" of class "A": main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testShowErrorContextModule] # flags: --show-error-context import m [file m.py] 0 + "" [out] main:2: note: In module imported here: tmp/m.py:1: error: Unsupported operand types for + ("int" and "str") [case testShowErrorContextTopLevel] # flags: --show-error-context def f() -> None: 0 + "" 0 + "" [out] main: note: In function "f": main:3: error: Unsupported operand types for + ("int" and "str") main: note: At top level: main:4: error: Unsupported operand types for + ("int" and "str") [case testShowErrorContextFromHere] # flags: --show-error-context import a [file a.py] import b [file b.py] 0 + "" [out] tmp/a.py:1: note: In module imported here, main:2: note: ... from here: tmp/b.py:1: error: Unsupported operand types for + ("int" and "str") [case testFollowImportsNormal] # flags: --follow-imports=normal from mod import x x + "" [file mod.py] 1 + "" x = 0 [out] tmp/mod.py:1: error: Unsupported operand types for + ("int" and "str") main:3: error: Unsupported operand types for + ("int" and "str") [case testFollowImportsSilent] # flags: --follow-imports=silent from mod import x x + "" # E: Unsupported operand types for + ("int" and "str") [file mod.py] 1 + "" x = 0 [case testFollowImportsSilentTypeIgnore] # flags: --warn-unused-ignores --follow-imports=silent import mod [file mod.py] x = 3 # type: ignore [case testFollowImportsSkip] # flags: --follow-imports=skip from mod import x x + "" [file mod.py] this deliberate syntax error will not be reported [out] [case testFollowImportsError] # flags: --follow-imports=error from mod import x x + "" [file mod.py] deliberate syntax error [out] main:2: error: Import of 'mod' ignored main:2: note: (Using --follow-imports=error, module not passed on command line) [case testIgnoreMissingImportsFalse] from mod import x [out] main:1: error: Cannot find implementation or library stub for module named 'mod' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testIgnoreMissingImportsTrue] # flags: --ignore-missing-imports from mod import x [out] [case testPerFileIncompleteDefsBasic] # flags: --config-file tmp/mypy.ini import standard, incomplete [file standard.py] def incomplete(x) -> int: return 0 [file incomplete.py] def incomplete(x) -> int: # E: Function is missing a type annotation for one or more arguments return 0 [file mypy.ini] \[mypy] disallow_incomplete_defs = False \[mypy-incomplete] disallow_incomplete_defs = True [case testPerFileStrictOptionalBasic] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] x = 0 if int(): x = None [file optional.py] x = 0 if int(): x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [file mypy.ini] \[mypy] strict_optional = False \[mypy-optional] strict_optional = True [case testPerFileStrictOptionalBasicImportStandard] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] from typing import Optional def f(x: int) -> None: pass an_int = 0 # type: int optional_int = None # type: Optional[int] f(an_int) # ints can be used as ints f(optional_int) # optional ints can be used as ints in this file [file optional.py] import standard def f(x: int) -> None: pass standard.an_int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") standard.optional_int = None # OK -- explicitly declared as optional f(standard.an_int) # ints can be used as ints f(standard.optional_int) # E: Argument 1 to "f" has incompatible type "None"; expected "int" [file mypy.ini] \[mypy] strict_optional = False \[mypy-optional] strict_optional = True [case testPerFileStrictOptionalBasicImportOptional] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] import optional def f(x: int) -> None: pass f(optional.x) # OK -- in non-strict Optional context f(optional.y) # OK -- in non-strict Optional context [file optional.py] from typing import Optional def f(x: int) -> None: pass x = 0 # type: Optional[int] y = None # type: None [file mypy.ini] \[mypy] strict_optional = False \[mypy-optional] strict_optional = True [case testPerFileStrictOptionalListItemImportOptional] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] import optional from typing import List def f(x: List[int]) -> None: pass f(optional.x) # OK -- in non-strict Optional context f(optional.y) # OK -- in non-strict Optional context [file optional.py] from typing import Optional, List def f(x: List[int]) -> None: pass x = [] # type: List[Optional[int]] y = [] # type: List[int] [file mypy.ini] \[mypy] strict_optional = False \[mypy-optional] strict_optional = True [builtins fixtures/list.pyi] [case testPerFileStrictOptionalComplicatedList] from typing import Union, Optional, List def f() -> None: x = [] # type: Union[List[Optional[str]], str] [builtins fixtures/list.pyi] [case testPerFileStrictOptionalNoneArguments] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] def f(x: int = None) -> None: pass [file optional.py] import standard def f(x: int = None) -> None: pass standard.f(None) [file mypy.ini] \[mypy] strict_optional = False \[mypy-optional] strict_optional = True [case testDisallowImplicitTypesIgnoreMissingTypes] # flags: --ignore-missing-imports --disallow-any-unimported from missing import MyType def f(x: MyType) -> None: # E: Argument 1 to "f" becomes "Any" due to an unfollowed import pass [case testDisallowImplicitTypes] # flags: --disallow-any-unimported from missing import MyType def f(x: MyType) -> None: pass [out] main:2: error: Cannot find implementation or library stub for module named 'missing' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:4: error: Argument 1 to "f" becomes "Any" due to an unfollowed import [case testDisallowImplicitAnyVariableDefinition] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked t: Unchecked = 12 # E: Type of variable becomes "Any" due to an unfollowed import [case testDisallowImplicitAnyGeneric] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked from typing import List def foo(l: List[Unchecked]) -> List[Unchecked]: t = [] # type: List[Unchecked] return l [builtins fixtures/list.pyi] [out] main:5: error: Return type becomes "List[Any]" due to an unfollowed import main:5: error: Argument 1 to "foo" becomes "List[Any]" due to an unfollowed import main:6: error: Type of variable becomes "List[Any]" due to an unfollowed import [case testDisallowImplicitAnyInherit] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked from typing import List class C(Unchecked): # E: Base type Unchecked becomes "Any" due to an unfollowed import pass class A(List[Unchecked]): # E: Base type becomes "List[Any]" due to an unfollowed import pass [builtins fixtures/list.pyi] [case testDisallowImplicitAnyAlias] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked from typing import List X = List[Unchecked] def f(x: X) -> None: # E: Argument 1 to "f" becomes "List[Any]" due to an unfollowed import pass [builtins fixtures/list.pyi] [case testDisallowImplicitAnyCast] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked from typing import List, cast foo = [1, 2, 3] cast(List[Unchecked], foo) # E: Target type of cast becomes "List[Any]" due to an unfollowed import cast(Unchecked, foo) # E: Target type of cast becomes "Any" due to an unfollowed import [builtins fixtures/list.pyi] [case testDisallowImplicitAnyNamedTuple] # flags: --ignore-missing-imports --disallow-any-unimported from typing import List, NamedTuple from missing import Unchecked Point = NamedTuple('Point', [('x', List[Unchecked]), ('y', Unchecked)]) [builtins fixtures/list.pyi] [out] main:5: error: NamedTuple type becomes "Tuple[List[Any], Any]" due to an unfollowed import [case testDisallowImplicitAnyTypeVarConstraints] # flags: --ignore-missing-imports --disallow-any-unimported from typing import List, NamedTuple, TypeVar, Any from missing import Unchecked T = TypeVar('T', Unchecked, List[Unchecked], str) [builtins fixtures/list.pyi] [out] main:5: error: Constraint 1 becomes "Any" due to an unfollowed import main:5: error: Constraint 2 becomes "List[Any]" due to an unfollowed import [case testDisallowImplicitAnyNewType] # flags: --ignore-missing-imports --disallow-any-unimported from typing import NewType, List from missing import Unchecked Baz = NewType('Baz', Unchecked) # E: Argument 2 to NewType(...) must be subclassable (got "Any") Bar = NewType('Bar', List[Unchecked]) # E: Argument 2 to NewType(...) becomes "List[Any]" due to an unfollowed import [builtins fixtures/list.pyi] [case testDisallowImplicitAnyCallableAndTuple] # flags: --ignore-missing-imports --disallow-any-unimported from typing import Callable, Tuple from missing import Unchecked def foo(f: Callable[[], Unchecked]) -> Tuple[Unchecked]: return f() [builtins fixtures/list.pyi] [out] main:5: error: Return type becomes "Tuple[Any]" due to an unfollowed import main:5: error: Argument 1 to "foo" becomes "Callable[[], Any]" due to an unfollowed import [case testDisallowImplicitAnySubclassingExplicitAny] # flags: --ignore-missing-imports --disallow-any-unimported --disallow-subclassing-any from typing import Any class C(Any): # E: Class cannot subclass 'Any' (has type 'Any') pass [case testDisallowImplicitAnyVarDeclaration] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked foo: Unchecked = "" foo = "" x, y = 1, 2 # type: Unchecked, Unchecked [out] main:4: error: Type of variable becomes "Any" due to an unfollowed import main:6: error: A type on this line becomes "Any" due to an unfollowed import [case testDisallowUnimportedAnyTypedDictSimple] # flags: --ignore-missing-imports --disallow-any-unimported from mypy_extensions import TypedDict from x import Unchecked M = TypedDict('M', {'x': str, 'y': Unchecked}) # E: Type of a TypedDict key becomes "Any" due to an unfollowed import def f(m: M) -> M: pass # no error [builtins fixtures/dict.pyi] [case testDisallowUnimportedAnyTypedDictGeneric] # flags: --ignore-missing-imports --disallow-any-unimported from mypy_extensions import TypedDict from typing import List from x import Unchecked M = TypedDict('M', {'x': str, 'y': List[Unchecked]}) # E: Type of a TypedDict key becomes "List[Any]" due to an unfollowed import def f(m: M) -> M: pass # no error [builtins fixtures/dict.pyi] [case testDisallowAnyDecoratedUnannotatedDecorator] # flags: --disallow-any-decorated from typing import Any def d(f): return f @d def f(x: Any) -> Any: # E: Function is untyped after decorator transformation pass @d def h(x): # E: Function is untyped after decorator transformation pass [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedErrorIsReportedOnlyOnce] # flags: --disallow-any-decorated def d(f): return f def d2(f): return f @d @d2 @d def f(x: int) -> None: pass # E: Function is untyped after decorator transformation [case testDisallowAnyDecoratedReturnAny] # flags: --disallow-any-decorated from typing import Any def d(f) -> Any: return f @d def f() -> None: pass # E: Function is untyped after decorator transformation [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedReturnCallable] # flags: --disallow-any-decorated from typing import Any, Callable def d(f) -> Callable[..., None]: return f @d def g(i: int, s: str) -> None: pass # E: Type of decorated function contains type "Any" ("Callable[..., None]") [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedNonexistentDecorator] # flags: --disallow-any-decorated --ignore-missing-imports from nonexistent import d @d def f() -> None: pass # E: Function is untyped after decorator transformation [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedPartlyTypedCallable] # flags: --disallow-any-decorated --ignore-missing-imports from typing import Callable, Any, List def d(f) -> Callable[[int, Any], Any]: pass def d2(f) -> Callable[[int], List[Any]]: pass def d3(f) -> Callable[[Any], List[str]]: pass @d def f(i: int, s: str) -> None: # E: Type of decorated function contains type "Any" ("Callable[[int, Any], Any]") pass @d2 def g(i: int) -> None: # E: Type of decorated function contains type "Any" ("Callable[[int], List[Any]]") pass @d3 def h(i: int) -> None: # E: Type of decorated function contains type "Any" ("Callable[[Any], List[str]]") pass [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedReturnsCallableNoParams] # flags: --disallow-any-decorated from typing import Callable def d(p) -> Callable[[], int]: return p @d def f(i): return i [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedDecoratorReturnsNonCallable] # flags: --disallow-any-decorated def d(p) -> int: return p(0) @d def f(i): return i [case testDisallowAnyDecoratedUntypedUndecoratedFunction] # flags: --disallow-any-decorated from typing import Callable def f(i): # no error return i [case testDisallowAnyDecoratedTwoDecorators] # flags: --disallow-any-decorated from typing import Callable def typed_dec(f) -> Callable[[], int]: pass def untyped_dec(f): pass @typed_dec @untyped_dec def f(): # no error return i @untyped_dec @typed_dec def g(): # E: Function is untyped after decorator transformation return i [case testDisallowAnyExprSimple] # flags: --disallow-any-expr from typing import Any def f(s): yield s x = f(0) # E: Expression has type "Any" for x in f(0): # E: Expression has type "Any" g(x) # E: Expression has type "Any" def g(x) -> Any: yield x # E: Expression has type "Any" l = [1, 2, 3] l[f(0)] # E: Expression has type "Any" f(l) f(f(0)) # E: Expression has type "Any" [builtins fixtures/list.pyi] [case testDisallowAnyExprUnannotatedFunction] # flags: --disallow-any-expr def g(s): return s g(0) w: int = g(1) [case testDisallowAnyExprExplicitAnyParam] # flags: --disallow-any-expr from typing import Any, List def f(s: Any) -> None: pass def g(s: List[Any]) -> None: pass f(0) # type of list below is inferred with expected type of "List[Any]", so that becomes it's type # instead of List[str] g(['']) # E: Expression type contains "Any" (has type "List[Any]") [builtins fixtures/list.pyi] [case testDisallowAnyExprAllowsAnyInCast] # flags: --disallow-any-expr from typing import Any, cast class Foo: g: Any = 2 z = cast(int, Foo().g) m = cast(Any, Foo().g) # E: Expression has type "Any" k = Foo.g # E: Expression has type "Any" [builtins fixtures/list.pyi] [case testDisallowAnyExprAllowsAnyInVariableAssignmentWithExplicitTypeAnnotation] # flags: --disallow-any-expr from typing import Any class Foo: g: Any = 2 z: int = Foo().g x = Foo().g # type: int m: Any = Foo().g # E: Expression has type "Any" n = Foo().g # type: Any # E: Expression has type "Any" [builtins fixtures/list.pyi] [case testDisallowAnyExprGeneric] # flags: --disallow-any-expr from typing import List l: List = [] l.append(1) # E: Expression type contains "Any" (has type "List[Any]") k = l[0] # E: Expression type contains "Any" (has type "List[Any]") # E: Expression has type "Any" [builtins fixtures/list.pyi] [case testDisallowAnyExprTypeVar] # flags: --disallow-any-expr from typing import TypeVar T = TypeVar('T') # no error def f(t: T) -> T: return t [builtins fixtures/list.pyi] [case testDisallowAnyExprNamedTuple] # flags: --disallow-any-expr from typing import NamedTuple Point = NamedTuple('Point', [('x', int), ('y', int)]) # no error def origin() -> Point: return Point(x=0, y=0) [builtins fixtures/list.pyi] [case testDisallowAnyExprNewType] # flags: --disallow-any-expr from typing import NewType NT = NewType('NT', int) # no error def nt() -> NT: return NT(1) [builtins fixtures/list.pyi] [case testDisallowAnyExprEnum] # flags: --disallow-any-expr from enum import Enum E = Enum('E', '1, 2, 3') # no error def k(s: E) -> None: pass [builtins fixtures/list.pyi] [case testDisallowAnyExprTypedDict] # flags: --disallow-any-expr from mypy_extensions import TypedDict Movie = TypedDict('Movie', {'name': str, 'year': int}) def g(m: Movie) -> Movie: return m [builtins fixtures/dict.pyi] [case testDisallowIncompleteDefs] # flags: --disallow-incomplete-defs def f(i: int): # E: Function is missing a return type annotation pass def g(i) -> None: # E: Function is missing a type annotation for one or more arguments pass def h(i: int) -> int: # no error return i def i() -> None: # no error pass [case testDisallowIncompleteDefsNoReturn] # flags: --disallow-incomplete-defs --disallow-untyped-defs def f(i: int): # E: Function is missing a return type annotation pass [case testDisallowIncompleteDefsSelf] # flags: --disallow-incomplete-defs class C: def foo(self) -> None: # no error pass [case testDisallowIncompleteDefsPartiallyAnnotatedParams] # flags: --disallow-incomplete-defs def f(i: int, s): pass [out] main:3: error: Function is missing a return type annotation main:3: error: Function is missing a type annotation for one or more arguments [case testDisallowIncompleteDefsAttrsNoAnnotations] # flags: --disallow-incomplete-defs import attr @attr.s() class Unannotated: foo = attr.ib() [case testDisallowIncompleteDefsAttrsWithAnnotations] # flags: --disallow-incomplete-defs import attr @attr.s() class Annotated: bar: int = attr.ib() [case testDisallowIncompleteDefsAttrsPartialAnnotations] # flags: --disallow-incomplete-defs import attr @attr.s() class PartiallyAnnotated: # E: Function is missing a type annotation for one or more arguments bar: int = attr.ib() baz = attr.ib() [case testAlwaysTrueAlwaysFalseFlags] # flags: --always-true=YOLO --always-true=YOLO1 --always-false=BLAH1 --always-false BLAH --ignore-missing-imports from somewhere import YOLO, BLAH if not YOLO: 1+() if BLAH: 1+() [builtins fixtures/bool.pyi] [case testAlwaysTrueAlwaysFalseConfigFile] # flags: --config-file tmp/mypy.ini from somewhere import YOLO, BLAH if not YOLO: 1+() if BLAH: 1+() [file mypy.ini] \[mypy] ignore_missing_imports = True always_true = YOLO1, YOLO always_false = BLAH, BLAH1 [builtins fixtures/bool.pyi] [case testCheckDisallowAnyGenericsNamedTuple] # flags: --disallow-any-generics from typing import NamedTuple N = NamedTuple('N', [('x', N)]) # type: ignore n: N [out] [case testCheckDisallowAnyGenericsTypedDict] # flags: --disallow-any-generics from typing import Dict, Any, Optional from mypy_extensions import TypedDict VarsDict = Dict[str, Any] HostsDict = Dict[str, Optional[VarsDict]] GroupDataDict = TypedDict( "GroupDataDict", {"children": "GroupsDict", # type: ignore "vars": VarsDict, "hosts": HostsDict}, total=False ) GroupsDict = Dict[str, GroupDataDict] # type: ignore [builtins fixtures/dict.pyi] [case testCheckDefaultAllowAnyGeneric] from typing import TypeVar, Callable T = TypeVar('T') C = Callable[[], T] def f(c: C): pass [out] [case testCheckAllowAnyGenericAnyGeneric] # flags: --strict --allow-any-generics from typing import TypeVar, Callable T = TypeVar('T') C = Callable[[], T] def f(c: C) -> None: pass [out] [case testCheckDisallowAnyGenericsAnyGeneric] # flags: --disallow-any-generics from typing import TypeVar, Callable T = TypeVar('T') C = Callable[[], T] def f(c: C): # E: Missing type parameters for generic type "C" pass [out] [case testStrictAnyGeneric] # flags: --strict from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass def f(c: A) -> None: # E: Missing type parameters for generic type "A" pass [out] [case testStrictEqualityPerFile] # flags: --config-file tmp/mypy.ini import b 42 == 'no' # E: Non-overlapping equality check (left operand type: "Literal[42]", right operand type: "Literal['no']") [file b.py] 42 == 'no' [file mypy.ini] \[mypy] strict_equality = True \[mypy-b] strict_equality = False [builtins fixtures/bool.pyi] [case testNoImplicitReexport] # flags: --no-implicit-reexport from other_module_2 import a [file other_module_1.py] a = 5 [file other_module_2.py] from other_module_1 import a [out] main:2: error: Module 'other_module_2' has no attribute 'a' [case testNoImplicitReexportRespectsAll] # flags: --no-implicit-reexport from other_module_2 import a from other_module_2 import b [file other_module_1.py] a = 5 b = 6 [file other_module_2.py] from other_module_1 import a, b __all__ = ('b',) [out] main:2: error: Module 'other_module_2' has no attribute 'a' [case testNoImplicitReexportStarConsideredImplicit] # flags: --no-implicit-reexport from other_module_2 import a [file other_module_1.py] a = 5 [file other_module_2.py] from other_module_1 import * [out] main:2: error: Module 'other_module_2' has no attribute 'a' [case testNoImplicitReexportStarCanBeReexportedWithAll] # flags: --no-implicit-reexport from other_module_2 import a from other_module_2 import b [file other_module_1.py] a = 5 b = 6 [file other_module_2.py] from other_module_1 import * __all__ = ('b',) [out] main:2: error: Module 'other_module_2' has no attribute 'a' [case testNoImplicitReexportMypyIni] # flags: --config-file tmp/mypy.ini from other_module_2 import a [file other_module_1.py] a = 5 [file other_module_2.py] from other_module_1 import a [file mypy.ini] \[mypy] implicit_reexport = True \[mypy-other_module_2] implicit_reexport = False [out] main:2: error: Module 'other_module_2' has no attribute 'a' [case testImplicitAnyOKForNoArgs] # flags: --disallow-any-generics --show-column-numbers from typing import List A = List # OK B = List[A] # E:10: Missing type parameters for generic type "A" x: A # E:4: Missing type parameters for generic type "A" [builtins fixtures/list.pyi] mypy-0.761/test-data/unit/check-functions.test0000644€tŠÔÚ€2›s®0000020463313576752246025561 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the type checker related to functions, function types and -- calls. -- See also check-varargs.test. -- Callable type basics -- -------------------- [case testCallingVariableWithFunctionType] from typing import Callable f = None # type: Callable[[A], B] a, b = None, None # type: (A, B) if int(): a = f(a) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = f(b) # E: Argument 1 has incompatible type "B"; expected "A" if int(): b = f() # E: Too few arguments if int(): b = f(a, a) # E: Too many arguments if int(): b = f(a) class A: pass class B: pass [case testKeywordOnlyArgumentOrderInsensitivity] import typing class A(object): def f(self, *, a: int, b: str) -> None: pass class B(A): def f(self, *, b: str, a: int) -> None: pass class C(A): def f(self, *, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A" [case testPositionalOverridingArgumentNameInsensitivity] import typing class A(object): def f(self, a: int, b: str) -> None: pass class B(A): def f(self, b: str, a: int) -> None: pass # E: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" # E: Argument 2 of "f" is incompatible with supertype "A"; supertype defines the argument type as "str" class C(A): def f(self, foo: int, bar: str) -> None: pass [case testPositionalOverridingArgumentNamesCheckedWhenMismatchingPos] import typing class A(object): def f(self, a: int, b: str) -> None: pass class B(A): def f(self, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A" [case testSubtypingFunctionTypes] from typing import Callable class A: pass class B(A): pass f = None # type: Callable[[B], A] g = None # type: Callable[[A], A] # subtype of f h = None # type: Callable[[B], B] # subtype of f if int(): g = h # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[A], A]") if int(): h = f # E: Incompatible types in assignment (expression has type "Callable[[B], A]", variable has type "Callable[[B], B]") if int(): h = g # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[B], B]") if int(): g = f # E: Incompatible types in assignment (expression has type "Callable[[B], A]", variable has type "Callable[[A], A]") if int(): f = g if int(): f = h if int(): f = f if int(): g = g if int(): h = h [case testSubtypingFunctionsDoubleCorrespondence] def l(x) -> None: ... def r(__, *, x) -> None: ... r = l # E: Incompatible types in assignment (expression has type "Callable[[Any], None]", variable has type "Callable[[Any, NamedArg(Any, 'x')], None]") [case testSubtypingFunctionsRequiredLeftArgNotPresent] def l(x, y) -> None: ... def r(x) -> None: ... r = l # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], None]", variable has type "Callable[[Any], None]") [case testSubtypingFunctionsImplicitNames] from typing import Any def f(a, b): pass def g(c: Any, d: Any) -> Any: pass ff = f gg = g gg = f ff = g [case testSubtypingFunctionsDefaultsNames] from typing import Callable def f(a: int, b: str) -> None: pass f_nonames = None # type: Callable[[int, str], None] def g(a: int, b: str = "") -> None: pass def h(aa: int, b: str = "") -> None: pass ff_nonames = f_nonames ff = f gg = g hh = h if int(): ff = gg if int(): ff_nonames = ff if int(): ff_nonames = f_nonames # reset if int(): ff = ff_nonames # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]") if int(): ff = f # reset if int(): gg = ff # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]", variable has type "Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None]") if int(): gg = hh # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'aa'), DefaultArg(str, 'b')], None]", variable has type "Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None]") [case testSubtypingFunctionsArgsKwargs] from typing import Any, Callable def everything(*args: Any, **kwargs: Any) -> None: pass everywhere = None # type: Callable[..., None] def specific_1(a: int, b: str) -> None: pass def specific_2(a: int, *, b: str) -> None: pass ss_1 = specific_1 ss_2 = specific_2 ee_def = everything ee_var = everywhere if int(): ss_1 = ee_def if int(): ss_1 = specific_1 if int(): ss_2 = ee_def if int(): ss_2 = specific_2 if int(): ee_def = everywhere if int(): ee_def = everything if int(): ee_var = everything if int(): ee_var = everywhere if int(): ee_var = specific_1 # The difference between Callable[..., blah] and one with a *args: Any, **kwargs: Any is that the ... goes loosely both ways. if int(): ee_def = specific_1 # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[VarArg(Any), KwArg(Any)], None]") [builtins fixtures/dict.pyi] [case testSubtypingFunctionsDecorated] from typing import Any # untyped decorator def deco(f): pass class A: @deco def f(self) -> Any: pass class B(A): @deco def f(self) -> Any: pass [builtins fixtures/list.pyi] [case testLackOfNames] def f(__a: int, __b: str) -> None: pass def g(a: int, b: str) -> None: pass ff = f gg = g if int(): ff = g if int(): gg = f # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]") [case testLackOfNamesFastparse] def f(__a: int, __b: str) -> None: pass def g(a: int, b: str) -> None: pass ff = f gg = g if int(): ff = g if int(): gg = f # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]") [case testFunctionTypeCompatibilityWithOtherTypes] from typing import Callable f = None # type: Callable[[], None] a, o = None, None # type: (A, object) if int(): a = f # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "A") if int(): f = a # E: Incompatible types in assignment (expression has type "A", variable has type "Callable[[], None]") if int(): f = o # E: Incompatible types in assignment (expression has type "object", variable has type "Callable[[], None]") if int(): f = f() # E: Function does not return a value if int(): f = f if int(): f = None if int(): o = f class A: pass [case testFunctionSubtypingWithVoid] from typing import Callable f = None # type: Callable[[], None] g = None # type: Callable[[], object] if int(): f = g # E: Incompatible types in assignment (expression has type "Callable[[], object]", variable has type "Callable[[], None]") if int(): g = f # OK if int(): f = f if int(): g = g [case testFunctionSubtypingWithMultipleArgs] from typing import Callable f = None # type: Callable[[A, A], None] g = None # type: Callable[[A, B], None] h = None # type: Callable[[B, B], None] if int(): f = g # E: Incompatible types in assignment (expression has type "Callable[[A, B], None]", variable has type "Callable[[A, A], None]") if int(): f = h # E: Incompatible types in assignment (expression has type "Callable[[B, B], None]", variable has type "Callable[[A, A], None]") if int(): g = h # E: Incompatible types in assignment (expression has type "Callable[[B, B], None]", variable has type "Callable[[A, B], None]") if int(): g = f if int(): h = f if int(): h = g if int(): f = f if int(): g = g if int(): h = h class A: pass class B(A): pass [case testFunctionTypesWithDifferentArgumentCounts] from typing import Callable f = None # type: Callable[[], None] g = None # type: Callable[[A], None] h = None # type: Callable[[A, A], None] if int(): f = g # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[], None]") if int(): f = h # E: Incompatible types in assignment (expression has type "Callable[[A, A], None]", variable has type "Callable[[], None]") if int(): h = f # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "Callable[[A, A], None]") if int(): h = g # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[A, A], None]") if int(): f = f if int(): g = g if int(): h = h class A: pass [out] [case testCompatibilityOfSimpleTypeObjectWithStdType] t = None # type: type a = None # type: A if int(): a = A # E: Incompatible types in assignment (expression has type "Type[A]", variable has type "A") if int(): t = f # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "type") if int(): t = A class A: def __init__(self, a: 'A') -> None: pass def f() -> None: pass [case testFunctionTypesWithOverloads] from foo import * [file foo.pyi] from typing import Callable, overload f = None # type: Callable[[AA], A] g = None # type: Callable[[B], B] h = None # type: Callable[[A], AA] if int(): h = i # E: Incompatible types in assignment (expression has type overloaded function, variable has type "Callable[[A], AA]") if int(): f = j if int(): f = i if int(): g = i if int(): g = j class A: pass class AA(A): pass class B: pass @overload def i(x: AA) -> A: pass @overload def i(x: B) -> B: pass @overload def j(x: B) -> B: pass @overload def j(x: A) -> AA: pass [case testOverloadWithThreeItems] from foo import * [file foo.pyi] from typing import Callable, overload g1 = None # type: Callable[[A], A] g2 = None # type: Callable[[B], B] g3 = None # type: Callable[[C], C] g4 = None # type: Callable[[A], B] a, b, c = None, None, None # type: (A, B, C) if int(): b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = f(c) # E: Incompatible types in assignment (expression has type "C", variable has type "B") if int(): g4 = f # E: Incompatible types in assignment (expression has type overloaded function, variable has type "Callable[[A], B]") if int(): g1 = f if int(): g2 = f if int(): g3 = f if int(): a = f(a) if int(): b = f(b) if int(): c = f(c) class A: pass class B: pass class C: pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass @overload def f(x: C) -> C: pass [case testInferConstraintsUnequalLengths] from typing import Any, Callable, List def f(fields: List[Callable[[Any], Any]]): pass class C: pass f([C]) # E: List item 0 has incompatible type "Type[C]"; expected "Callable[[Any], Any]" class D: def __init__(self, a, b): pass f([D]) # E: List item 0 has incompatible type "Type[D]"; expected "Callable[[Any], Any]" [builtins fixtures/list.pyi] [case testSubtypingTypeTypeAsCallable] from typing import Callable, Type class A: pass x = None # type: Callable[..., A] y = None # type: Type[A] x = y [case testSubtypingCallableAsTypeType] from typing import Callable, Type class A: pass x = None # type: Callable[..., A] y = None # type: Type[A] if int(): y = x # E: Incompatible types in assignment (expression has type "Callable[..., A]", variable has type "Type[A]") -- Default argument values -- ----------------------- [case testCallingFunctionsWithDefaultArgumentValues] a, b = None, None # type: (A, B) if int(): a = f() # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "Optional[A]" if int(): b = f(a, a) # E: Too many arguments for "f" if int(): b = f() if int(): b = f(a) if int(): b = f(AA()) def f(x: 'A' = None) -> 'B': pass class A: pass class AA(A): pass class B: pass [case testDefaultArgumentExpressions] import typing def f(x: 'A' = A()) -> None: b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x # type: A class B: pass class A: pass [out] [case testDefaultArgumentExpressions2] import typing def f(x: 'A' = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "A") b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x # type: A class B: pass class A: pass [case testDefaultArgumentExpressionsGeneric] from typing import TypeVar T = TypeVar('T', bound='A') def f(x: T = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "T") b = x # type: B # E: Incompatible types in assignment (expression has type "T", variable has type "B") a = x # type: A class B: pass class A: pass [case testDefaultArgumentExpressionsPython2] # flags: --python-version 2.7 from typing import Tuple def f(x = B()): # E: Incompatible default for argument "x" (default has type "B", argument has type "A") # type: (A) -> None b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x # type: A class B: pass class A: pass [case testDefaultTupleArgumentExpressionsPython2] # flags: --python-version 2.7 from typing import Tuple def f((x, y) = (A(), B())): # E: Incompatible default for tuple argument 1 (default has type "Tuple[A, B]", argument has type "Tuple[B, B]") # type: (Tuple[B, B]) -> None b = x # type: B a = x # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") def g(a, (x, y) = (A(),)): # E: Incompatible default for tuple argument 2 (default has type "Tuple[A]", argument has type "Tuple[B, B]") # type: (int, Tuple[B, B]) -> None pass def h((x, y) = (A(), B(), A())): # E: Incompatible default for tuple argument 1 (default has type "Tuple[A, B, A]", argument has type "Tuple[B, B]") # type: (Tuple[B, B]) -> None pass class B: pass class A: pass [case testDefaultArgumentsWithSubtypes] import typing def f(x: 'B' = A()) -> None: # E: Incompatible default for argument "x" (default has type "A", argument has type "B") pass def g(x: 'A' = B()) -> None: pass class A: pass class B(A): pass [out] [case testMultipleDefaultArgumentExpressions] import typing def f(x: 'A' = B(), y: 'B' = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "A") pass def h(x: 'A' = A(), y: 'B' = B()) -> None: pass class A: pass class B: pass [out] [case testMultipleDefaultArgumentExpressions2] import typing def g(x: 'A' = A(), y: 'B' = A()) -> None: # E: Incompatible default for argument "y" (default has type "A", argument has type "B") pass class A: pass class B: pass [out] [case testDefaultArgumentsAndSignatureAsComment] import typing def f(x = 1): # type: (int) -> str pass f() f(1) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testMethodDefaultArgumentsAndSignatureAsComment] import typing class A: def f(self, x = 1): # type: (int) -> str pass A().f() A().f(1) A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" -- Access to method defined as a data attribute -- -------------------------------------------- [case testMethodAsDataAttribute] from typing import Any, Callable class B: pass x = None # type: Any class A: f = x # type: Callable[[A], None] g = x # type: Callable[[A, B], None] a = None # type: A a.f() a.g(B()) a.f(a) # E: Too many arguments a.g() # E: Too few arguments [case testMethodWithInvalidMethodAsDataAttribute] from typing import Any, Callable class B: pass x = None # type: Any class A: f = x # type: Callable[[], None] g = x # type: Callable[[B], None] a = None # type: A a.f() # E: Attribute function "f" with type "Callable[[], None]" does not accept self argument a.g() # E: Invalid self argument "A" to attribute function "g" with type "Callable[[B], None]" [case testMethodWithDynamicallyTypedMethodAsDataAttribute] from typing import Any, Callable class B: pass x = None # type: Any class A: f = x # type: Callable[[Any], Any] a = None # type: A a.f() a.f(a) # E: Too many arguments [case testOverloadedMethodAsDataAttribute] from foo import * [file foo.pyi] from typing import overload class B: pass class A: @overload def f(self) -> None: pass @overload def f(self, b: B) -> None: pass g = f a = None # type: A a.g() a.g(B()) a.g(a) # E: No overload variant matches argument type "A" \ # N: Possible overload variant: \ # N: def f(self, b: B) -> None \ # N: <1 more non-matching overload not shown> [case testMethodAsDataAttributeInferredFromDynamicallyTypedMethod] class A: def f(self, x): pass g = f a = None # type: A a.g(object()) a.g(a, a) # E: Too many arguments a.g() # E: Too few arguments [case testMethodAsDataAttributeInGenericClass] from typing import TypeVar, Generic t = TypeVar('t') class B: pass class A(Generic[t]): def f(self, x: t) -> None: pass g = f a = None # type: A[B] a.g(B()) a.g(a) # E: Argument 1 has incompatible type "A[B]"; expected "B" [case testInvalidMethodAsDataAttributeInGenericClass] from typing import Any, TypeVar, Generic, Callable t = TypeVar('t') class B: pass class C: pass x = None # type: Any class A(Generic[t]): f = x # type: Callable[[A[B]], None] ab = None # type: A[B] ac = None # type: A[C] ab.f() ac.f() # E: Invalid self argument "A[C]" to attribute function "f" with type "Callable[[A[B]], None]" [case testPartiallyTypedSelfInMethodDataAttribute] from typing import Any, TypeVar, Generic, Callable t = TypeVar('t') class B: pass class C: pass x = None # type: Any class A(Generic[t]): f = x # type: Callable[[A], None] ab = None # type: A[B] ac = None # type: A[C] ab.f() ac.f() [case testCallableDataAttribute] from typing import Callable class A: g = None # type: Callable[[A], None] def __init__(self, f: Callable[[], None]) -> None: self.f = f a = A(None) a.f() a.g() a.f(a) # E: Too many arguments a.g(a) # E: Too many arguments -- Nested functions -- ---------------- [case testSimpleNestedFunction] import typing def f(a: 'A') -> None: def g(b: 'B') -> None: if int(): b = a \ # E: Incompatible types in assignment (expression has type "A", variable has type "B") aa = a # type: A # ok b = B() g(a) # E: Argument 1 to "g" has incompatible type "A"; expected "B" g(B()) class A: pass class B: pass [case testReturnAndNestedFunction] import typing def f() -> 'A': def g() -> 'B': return A() # fail return B() return B() # fail return A() class A: pass class B: pass [out] main:4: error: Incompatible return value type (got "A", expected "B") main:6: error: Incompatible return value type (got "B", expected "A") [case testDynamicallyTypedNestedFunction] import typing def f(x: object) -> None: def g(y): pass g() # E: Too few arguments for "g" g(x) [out] [case testNestedFunctionInMethod] import typing class A: def f(self) -> None: def g(x: int) -> None: y = x # type: int a = x # type: A # fail g(2) g(A()) # fail [out] main:6: error: Incompatible types in assignment (expression has type "int", variable has type "A") main:8: error: Argument 1 to "g" has incompatible type "A"; expected "int" [case testNestedFunctionInMethodWithTooFewArgumentsInTypeComment] class A: def f(self): # type: () -> None def g(x): # E: Type signature has too few arguments # type: () -> None pass [case testDeepNestedFunctionWithTooFewArgumentsInTypeComment] class A: def f(self): # type: () -> None class B: def g(self): # type: () -> None def h(x): # E: Type signature has too few arguments # type: () -> None pass [case testDeepNestedMethodInTypeComment] class A: def f(self): # type: () -> None class B: class C: def g(self): # type: () -> None pass [case testMutuallyRecursiveNestedFunctions] def f() -> None: def g() -> None: h(1) h('') # E def h(x: int) -> None: g() g(1) # E [out] main:4: error: Argument 1 to "h" has incompatible type "str"; expected "int" main:7: error: Too many arguments for "g" [case testMutuallyRecursiveDecoratedFunctions] from typing import Callable, Any def dec(f) -> Callable[..., Any]: pass def f() -> None: @dec def g() -> None: h() h.x # E @dec def h(x: int) -> None: g(1) g.x # E [out] main:7: error: "Callable[..., Any]" has no attribute "x" main:11: error: "Callable[..., Any]" has no attribute "x" [case testNestedGenericFunctions] from typing import TypeVar T = TypeVar('T') U = TypeVar('U') def outer(x: T) -> T: def inner(y: U) -> T: ... return inner(1) -- Casts -- ----- [case testCastsToAndFromFunctionTypes] from typing import TypeVar, Callable, Any, cast t = TypeVar('t') def f(x: t, f1: Callable[[], None], f2: Callable[[Any], None], o: object) -> None: x = cast(t, f1) f1 = cast(Callable[[], None], x) f1 = cast(Callable[[], None], f2) f1 = cast(Callable[[], None], o) -- Function decorators -- ------------------- [case testTrivialStaticallyTypedFunctionDecorator] from typing import TypeVar t = TypeVar('t') def dec(f: t) -> t: return f @dec def f(x: int) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testTrivialStaticallyTypedMethodDecorator] from typing import TypeVar t = TypeVar('t') def dec(f: t) -> t: return f class A: @dec def f(self, x: int) -> None: pass A().f(1) A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" class B: pass [case testTrivialDecoratedNestedFunction] from typing import TypeVar t = TypeVar('t') def dec(f: t) -> t: return f def g() -> None: @dec def f(x: int) -> None: pass f(1) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [out] [case testCheckingDecoratedFunction] import typing def dec(f): pass @dec def f(x: 'A') -> None: a = x # type: A if int(): x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") class A: pass [out] [case testDecoratorThatSwitchesType] from typing import Callable def dec(x) -> Callable[[], None]: pass @dec def f(y): pass f() f(None) # E: Too many arguments for "f" [case testDecoratorThatSwitchesTypeWithMethod] from typing import Any, Callable def dec(x) -> Callable[[Any], None]: pass class A: @dec def f(self, a, b, c): pass a = None # type: A a.f() a.f(None) # E: Too many arguments for "f" of "A" [case testNestedDecorators] from typing import Any, Callable def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass @dec1 @dec2 def f(x, y): pass f() f(None) # E: Too many arguments for "f" [case testInvalidDecorator1] from typing import Any, Callable def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass @dec1 # E: Argument 1 to "dec2" has incompatible type "Callable[[Any], Any]"; expected "Callable[[Any, Any], None]" @dec2 def f(x): pass [case testInvalidDecorator2] from typing import Any, Callable def dec1(f: Callable[[Any, Any], None]) -> Callable[[], None]: pass def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass @dec1 # E: Argument 1 to "dec1" has incompatible type "Callable[[Any], None]"; expected "Callable[[Any, Any], None]" @dec2 def f(x, y): pass [case testNoTypeCheckDecoratorOnMethod1] from typing import no_type_check @no_type_check def foo(x: 'bar', y: {'x': 4}) -> 42: 1 + 'x' [typing fixtures/typing-full.pyi] [case testNoTypeCheckDecoratorOnMethod2] import typing @typing.no_type_check def foo(x: 's', y: {'x': 4}) -> 42: 1 + 'x' @typing.no_type_check def bar() -> None: 1 + 'x' [typing fixtures/typing-full.pyi] [case testCallingNoTypeCheckFunction] import typing @typing.no_type_check def foo(x: {1:2}) -> [1]: 1 + 'x' foo() foo(1, 'b') [typing fixtures/typing-full.pyi] [case testCallingNoTypeCheckFunction2] import typing def f() -> None: foo() @typing.no_type_check def foo(x: {1:2}) -> [1]: 1 + 'x' [typing fixtures/typing-full.pyi] [case testNoTypeCheckDecoratorSemanticError] import typing @typing.no_type_check def foo(x: {1:2}) -> [1]: x = y [typing fixtures/typing-full.pyi] -- Forward references to decorated functions -- ----------------------------------------- [case testForwardReferenceToDynamicallyTypedDecorator] def f(self) -> None: g() g(1) def dec(f): return f @dec def g(): pass [case testForwardReferenceToDecoratorWithAnyReturn] from typing import Any def f(self) -> None: g() g(1) def dec(f) -> Any: return f @dec def g(): pass [case testForwardReferenceToDecoratorWithIdentityMapping] from typing import TypeVar def f(self) -> None: g() g(1) # E: Too many arguments for "g" h(1).x # E: "str" has no attribute "x" h('') # E: Argument 1 to "h" has incompatible type "str"; expected "int" T = TypeVar('T') def dec(f: T) -> T: return f @dec def g(): pass @dec def h(x: int) -> str: pass [out] [case testForwardReferenceToDynamicallyTypedDecoratedMethod] def f(self) -> None: A().f(1).y A().f() class A: @dec def f(self, x): pass def dec(f): return f [builtins fixtures/staticmethod.pyi] [case testForwardReferenceToStaticallyTypedDecoratedMethod] from typing import TypeVar def f(self) -> None: A().f(1).y # E: "str" has no attribute "y" A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" class A: @dec def f(self, a: int) -> str: return '' T = TypeVar('T') def dec(f: T) -> T: return f [builtins fixtures/staticmethod.pyi] [out] [case testForwardReferenceToDynamicallyTypedProperty] def f(self) -> None: A().x.y class A: @property def x(self): pass [builtins fixtures/property.pyi] [case testForwardReferenceToStaticallyTypedProperty] def f(self) -> None: A().x.y # E: "int" has no attribute "y" class A: @property def x(self) -> int: return 1 [builtins fixtures/property.pyi] [out] [case testForwardReferenceToDynamicallyTypedStaticMethod] def f(self) -> None: A.x(1).y A.x() # E: Too few arguments for "x" class A: @staticmethod def x(x): pass [builtins fixtures/staticmethod.pyi] [out] [case testForwardReferenceToStaticallyTypedStaticMethod] def f(self) -> None: A.x(1).y # E: "str" has no attribute "y" A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int" class A: @staticmethod def x(a: int) -> str: return '' [builtins fixtures/staticmethod.pyi] [out] [case testForwardReferenceToDynamicallyTypedClassMethod] def f(self) -> None: A.x(1).y A.x() # E: Too few arguments for "x" class A: @classmethod def x(cls, a): pass [builtins fixtures/classmethod.pyi] [out] [case testForwardReferenceToStaticallyTypedClassMethod] def f(self) -> None: A.x(1).y # E: "str" has no attribute "y" A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int" class A: @classmethod def x(cls, x: int) -> str: return '' [builtins fixtures/classmethod.pyi] [out] [case testForwardReferenceToDecoratedFunctionUsingMemberExpr] import m def f(self) -> None: g(1).x # E: "str" has no attribute "x" @m.dec def g(x: int) -> str: pass [file m.py] from typing import TypeVar T = TypeVar('T') def dec(f: T) -> T: return f [out] [case testForwardReferenceToFunctionWithMultipleDecorators] def f(self) -> None: g() g(1) def dec(f): return f @dec @dec2 def g(): pass def dec2(f): return f [case testForwardReferenceToDynamicallyTypedDecoratedStaticMethod] def f(self) -> None: A().f(1).y A().f() A().g(1).y A().g() class A: @dec @staticmethod def f(self, x): pass @staticmethod @dec def g(self, x): pass def dec(f): return f [builtins fixtures/staticmethod.pyi] [case testForwardRefereceToDecoratedFunctionWithCallExpressionDecorator] def f(self) -> None: g() g(1) @dec(1) def g(): pass def dec(f): pass -- Decorator functions in import cycles -- ------------------------------------ [case testDecoratorWithIdentityTypeInImportCycle] import a [file a.py] import b from d import dec @dec def f(x: int) -> None: pass b.g(1) # E [file b.py] import a from d import dec @dec def g(x: str) -> None: pass a.f('') [file d.py] from typing import TypeVar T = TypeVar('T') def dec(f: T) -> T: return f [out] tmp/b.py:5: error: Argument 1 to "f" has incompatible type "str"; expected "int" tmp/a.py:5: error: Argument 1 to "g" has incompatible type "int"; expected "str" [case testDecoratorWithNoAnnotationInImportCycle] import a [file a.py] import b from d import dec @dec def f(x: int) -> None: pass b.g(1, z=4) [file b.py] import a from d import dec @dec def g(x: str) -> None: pass a.f('', y=2) [file d.py] def dec(f): return f [case testDecoratorWithFixedReturnTypeInImportCycle] import a [file a.py] import b from d import dec @dec def f(x: int) -> str: pass b.g(1)() [file b.py] import a from d import dec @dec def g(x: int) -> str: pass a.f(1)() [file d.py] from typing import Callable def dec(f: Callable[[int], str]) -> Callable[[int], str]: return f [out] tmp/b.py:5: error: "str" not callable tmp/a.py:5: error: "str" not callable [case testDecoratorWithCallAndFixedReturnTypeInImportCycle] import a [file a.py] import b from d import dec @dec() def f(x: int) -> str: pass b.g(1)() [file b.py] import a from d import dec @dec() def g(x: int) -> str: pass a.f(1)() [file d.py] from typing import Callable def dec() -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass [out] tmp/b.py:5: error: "str" not callable tmp/a.py:5: error: "str" not callable [case testDecoratorWithCallAndFixedReturnTypeInImportCycleAndDecoratorArgs] import a [file a.py] import b from d import dec @dec(1) def f(x: int) -> str: pass b.g(1)() [file b.py] import a from d import dec @dec(1) def g(x: int) -> str: pass a.f(1)() [file d.py] from typing import Callable def dec(x: str) -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass [out] tmp/b.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str" tmp/b.py:5: error: "str" not callable tmp/a.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str" tmp/a.py:5: error: "str" not callable [case testUndefinedDecoratorInImportCycle] # cmd: mypy -m foo.base [file foo/__init__.py] import foo.base class Derived(foo.base.Base): def method(self) -> None: pass [file foo/base.py] import foo class Base: @decorator def method(self) -> None: pass [out] tmp/foo/base.py:3: error: Name 'decorator' is not defined -- Conditional function definition -- ------------------------------- [case testTypeCheckBodyOfConditionalFunction] from typing import Any x = None # type: Any if x: def f(x: int) -> None: if int(): x = 1 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [out] [case testCallConditionalFunction] from typing import Any x = None # type: Any if x: def f(x: int) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testConditionalFunctionDefinitionWithIfElse] from typing import Any x = None # type: Any if x: def f(x: int) -> None: 'x' + x # fail if int(): x = 1 else: def f(x: int) -> None: x + 'x' # fail if int(): x = 1 f(1) f('x') # fail [out] main:5: error: Unsupported operand types for + ("str" and "int") main:10: error: Unsupported operand types for + ("int" and "str") main:14: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testNestedConditionalFunctionDefinitionWithIfElse] from typing import Any x = None # type: Any def top() -> None: if x: def f(x: int) -> None: if int(): x = 'x' # E: Incompatible types in assignment \ (expression has type "str", variable has type "int") x = 1 else: def f(x: int) -> None: x + 'x' # E: Unsupported operand types for + ("int" and "str") x = 1 f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testUnconditionalRedefinitionOfConditionalFunction] from typing import Any x = None # type: Any if x: def f(): pass def f(): pass # E: Name 'f' already defined on line 4 [case testIncompatibleConditionalFunctionDefinition] from typing import Any x = None # type: Any if x: def f(x: int) -> None: pass else: def f(x): pass # E: All conditional function variants must have identical signatures [case testIncompatibleConditionalFunctionDefinition2] from typing import Any x = None # type: Any if x: def f(x: int) -> None: pass else: def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures [case testIncompatibleConditionalFunctionDefinition3] from typing import Any x = None # type: Any if x: def f(x: int) -> None: pass else: def f(x: int = 0) -> None: pass # E: All conditional function variants must have identical signatures [case testConditionalFunctionDefinitionUsingDecorator1] from typing import Callable def dec(f) -> Callable[[int], None]: pass x = int() if x: @dec def f(): pass else: def f(x: int) -> None: pass [case testConditionalFunctionDefinitionUsingDecorator2] from typing import Callable def dec(f) -> Callable[[int], None]: pass x = int() if x: @dec def f(): pass else: def f(x: str) -> None: pass # E: Incompatible redefinition (redefinition with type "Callable[[str], None]", original type "Callable[[int], None]") [case testConditionalFunctionDefinitionUsingDecorator3] from typing import Callable def dec(f) -> Callable[[int], None]: pass x = int() if x: def f(x: int) -> None: pass else: @dec def f(): pass [case testConditionalFunctionDefinitionUsingDecorator4] from typing import Callable def dec(f) -> Callable[[int], None]: pass x = int() if x: def f(x: str) -> None: pass else: # TODO: Complain about incompatible redefinition @dec def f(): pass [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1] from typing import Any def f(x: str) -> None: pass x = None # type: Any if x: def f(x: int) -> None: pass # E: All conditional function variants must have identical signatures [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1] from typing import Any def f(x: int) -> None: pass # N: "f" defined here x = None # type: Any if x: def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures f(x=1) # The first definition takes precedence. f(y=1) # E: Unexpected keyword argument "y" for "f" [case testRedefineFunctionDefinedAsVariable] def g(): pass f = g if g(): def f(): pass f() f(1) # E: Too many arguments [case testRedefineFunctionDefinedAsVariableInitializedToNone] def g(): pass f = None if g(): def f(): pass f() f(1) # E: Too many arguments for "f" [case testRedefineNestedFunctionDefinedAsVariableInitializedToNone] def g() -> None: f = None if object(): def f(x: int) -> None: pass f() # E: Too few arguments for "f" f(1) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [out] [case testRedefineFunctionDefinedAsVariableWithInvalidSignature] def g(): pass f = g if g(): def f(x): pass # E: Incompatible redefinition (redefinition with type "Callable[[Any], Any]", original type "Callable[[], Any]") [case testRedefineFunctionDefinedAsVariableWithVariance1] class B: pass class C(B): pass def g(x: C) -> B: pass f = g if g(C()): def f(x: C) -> C: pass [case testRedefineFunctionDefinedAsVariableWithVariance2] class B: pass class C(B): pass def g(x: C) -> B: pass f = g if g(C()): def f(x: B) -> B: pass [case testRedefineFunctionDefinedAsVariableInitializedToEmptyList] f = [] # E: Need type annotation for 'f' (hint: "f: List[] = ...") if object(): def f(): pass # E: Incompatible redefinition f() # E: "List[Any]" not callable f(1) # E: "List[Any]" not callable [builtins fixtures/list.pyi] [case testDefineConditionallyAsImportedAndDecorated] from typing import Callable def dec(f: Callable[[], None]) -> Callable[[], None]: ... if int(): from m import f else: @dec def f(): yield [file m.py] def f(): pass [case testDefineConditionallyAsImportedAndDecoratedWithInference] if int(): from m import f else: from contextlib import contextmanager @contextmanager def f(): yield [file m.py] from contextlib import contextmanager @contextmanager def f(): yield [typing fixtures/typing-full.pyi] -- Conditional method definition -- ----------------------------- [case testTypeCheckBodyOfConditionalMethod] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: if int(): x = 1 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [out] [case testCallConditionalMethodInClassBody] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: pass f(x, 1) f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int" f(x, 1) f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int" [out] [case testCallConditionalMethodViaInstance] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: pass A().f(1) A().f('x') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testConditionalMethodDefinitionWithIfElse] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: 'x' + x # fail if int(): x = 1 else: def f(self, x: int) -> None: x + 'x' # fail if int(): x = 1 A().f(1) A().f('x') # fail [out] main:6: error: Unsupported operand types for + ("str" and "int") main:11: error: Unsupported operand types for + ("int" and "str") main:15: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testUnconditionalRedefinitionOfConditionalMethod] from typing import Any x = None # type: Any class A: if x: def f(self): pass def f(self): pass # E: Name 'f' already defined on line 5 [case testIncompatibleConditionalMethodDefinition] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: pass else: def f(self, x): pass # E: All conditional function variants must have identical signatures [out] [case testConditionalFunctionDefinitionInTry] import typing try: def f(x: int) -> None: pass except: def g(x: str) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" g('x') g(1) # E: Argument 1 to "g" has incompatible type "int"; expected "str" [case testConditionalMethodDefinitionUsingDecorator] from typing import Callable def dec(f) -> Callable[['A', int], None]: pass class A: x = int() if x: @dec def f(self): pass else: def f(self, x: int) -> None: pass -- Callable with specific arg list -- ------------------------------- [case testCallableWithNamedArg] from typing import Callable from mypy_extensions import Arg def a(f: Callable[[Arg(int, 'x')], int]): f(x=4) f(5) f(y=3) # E: Unexpected keyword argument "y" [builtins fixtures/dict.pyi] [case testCallableWithOptionalArg] from typing import Callable from mypy_extensions import DefaultArg def a(f: Callable[[DefaultArg(int, 'x')], int]): f(x=4) f(2) f() f(y=3) # E: Unexpected keyword argument "y" f("foo") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableWithNamedArgFromExpr] from typing import Callable from mypy_extensions import Arg F = Callable[[Arg(int, 'x')], int] def a(f: F): f(x=4) f(5) f(y=3) # E: Unexpected keyword argument "y" [builtins fixtures/dict.pyi] [case testCallableWithOptionalArgFromExpr] from typing import Callable from mypy_extensions import DefaultArg F = Callable[[DefaultArg(int, 'x')], int] def a(f: F): f(x=4) f(2) f() f(y=3) # E: Unexpected keyword argument "y" f("foo") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableParsingInInheritence] from collections import namedtuple class C(namedtuple('t', 'x')): pass [case testCallableParsingSameName] from typing import Callable def Arg(x, y): pass F = Callable[[Arg(int, 'x')], int] # E: Invalid argument constructor "__main__.Arg" [case testCallableParsingFromExpr] from typing import Callable, List from mypy_extensions import Arg, VarArg, KwArg import mypy_extensions def WrongArg(x, y): return y # Note that for this test, the 'Value of type "int" is not indexable' errors are silly, # and a consequence of Callable being set to an int in the test stub. We can't set it to # something else sensible, because other tests require the stub not have anything # that looks like a function call. F = Callable[[WrongArg(int, 'x')], int] # E: Invalid argument constructor "__main__.WrongArg" G = Callable[[Arg(1, 'x')], int] # E: Invalid type: try using Literal[1] instead? H = Callable[[VarArg(int, 'x')], int] # E: VarArg arguments should not have names I = Callable[[VarArg(int)], int] # ok J = Callable[[VarArg(), KwArg()], int] # ok K = Callable[[VarArg(), int], int] # E: Required positional args may not appear after default, named or var args L = Callable[[Arg(name='x', type=int)], int] # ok # I have commented out the following test because I don't know how to expect the "defined here" note part of the error. # M = Callable[[Arg(gnome='x', type=int)], int] E: Invalid type alias: expression is not a valid type E: Unexpected keyword argument "gnome" for "Arg" N = Callable[[Arg(name=None, type=int)], int] # ok O = Callable[[List[Arg(int)]], int] # E: Invalid type alias: expression is not a valid type # E: Value of type "int" is not indexable # E: Type expected within [...] # E: The type "Type[List[Any]]" is not generic and not indexable P = Callable[[mypy_extensions.VarArg(int)], int] # ok Q = Callable[[Arg(int, type=int)], int] # E: Invalid type alias: expression is not a valid type # E: Value of type "int" is not indexable # E: "Arg" gets multiple values for keyword argument "type" R = Callable[[Arg(int, 'x', name='y')], int] # E: Invalid type alias: expression is not a valid type # E: Value of type "int" is not indexable # E: "Arg" gets multiple values for keyword argument "name" [builtins fixtures/dict.pyi] [case testCallableParsing] from typing import Callable from mypy_extensions import Arg, VarArg, KwArg def WrongArg(x, y): return y def b(f: Callable[[Arg(1, 'x')], int]): pass # Invalid type. Try using Literal[1] instead? def d(f: Callable[[VarArg(int)], int]): pass # ok def e(f: Callable[[VarArg(), KwArg()], int]): pass # ok def g(f: Callable[[Arg(name='x', type=int)], int]): pass # ok def h(f: Callable[[Arg(gnome='x', type=int)], int]): pass # E: Unexpected argument "gnome" for argument constructor def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok def j(f: Callable[[Arg(int, 'x', name='y')], int]): pass # E: "Arg" gets multiple values for keyword argument "name" def k(f: Callable[[Arg(int, type=int)], int]): pass # E: "Arg" gets multiple values for keyword argument "type" [builtins fixtures/dict.pyi] [case testCallableTypeAnalysis] from typing import Callable from mypy_extensions import Arg, VarArg as VARG, KwArg import mypy_extensions as ext def WrongArg(x, y): return y def a(f: Callable[[WrongArg(int, 'x')], int]): pass # E: Invalid argument constructor "__main__.WrongArg" def b(f: Callable[[BadArg(int, 'x')], int]): pass # E: Name 'BadArg' is not defined def d(f: Callable[[ext.VarArg(int)], int]): pass # ok def e(f: Callable[[VARG(), ext.KwArg()], int]): pass # ok def g(f: Callable[[ext.Arg(name='x', type=int)], int]): pass # ok def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok def f1(*args) -> int: pass def f2(*args, **kwargs) -> int: pass d(f1) e(f2) d(f2) e(f1) # E: Argument 1 to "e" has incompatible type "Callable[[VarArg(Any)], int]"; expected "Callable[[VarArg(Any), KwArg(Any)], int]" [builtins fixtures/dict.pyi] [case testCallableWrongTypeType] from typing import Callable from mypy_extensions import Arg def b(f: Callable[[Arg(1, 'x')], int]): pass # E: Invalid type: try using Literal[1] instead? [builtins fixtures/dict.pyi] [case testCallableTooManyVarArg] from typing import Callable from mypy_extensions import VarArg def c(f: Callable[[VarArg(int, 'x')], int]): pass # E: VarArg arguments should not have names [builtins fixtures/dict.pyi] [case testCallableFastParseGood] from typing import Callable from mypy_extensions import VarArg, Arg, KwArg def d(f: Callable[[VarArg(int)], int]): pass # ok def e(f: Callable[[VarArg(), KwArg()], int]): pass # ok def g(f: Callable[[Arg(name='x', type=int)], int]): pass # ok def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok [builtins fixtures/dict.pyi] [case testCallableFastParseBadArgArgName] from typing import Callable from mypy_extensions import Arg def h(f: Callable[[Arg(gnome='x', type=int)], int]): pass # E: Unexpected argument "gnome" for argument constructor [builtins fixtures/dict.pyi] [case testCallableKindsOrdering] from typing import Callable, Any from mypy_extensions import Arg, VarArg, KwArg, DefaultArg, NamedArg def f(f: Callable[[VarArg(), int], int]): pass # E: Required positional args may not appear after default, named or var args def g(f: Callable[[VarArg(), VarArg()], int]): pass # E: Var args may not appear after named or var args def h(f: Callable[[KwArg(), KwArg()], int]): pass # E: You may only have one **kwargs argument def i(f: Callable[[DefaultArg(), int], int]): pass # E: Required positional args may not appear after default, named or var args def j(f: Callable[[NamedArg(Any, 'x'), DefaultArg(int, 'y')], int]): pass # E: Positional default args may not appear after named or var args def k(f: Callable[[KwArg(), NamedArg(Any, 'x')], int]): pass # E: A **kwargs argument must be the last argument [builtins fixtures/dict.pyi] [case testCallableDuplicateNames] from typing import Callable from mypy_extensions import Arg, VarArg, KwArg, DefaultArg def f(f: Callable[[Arg(int, 'x'), int, Arg(int, 'x')], int]): pass # E: Duplicate argument 'x' in Callable [builtins fixtures/dict.pyi] [case testCallableWithKeywordOnlyArg] from typing import Callable from mypy_extensions import NamedArg def a(f: Callable[[NamedArg(int, 'x')], int]): f(x=4) f(2) # E: Too many positional arguments f() # E: Missing named argument "x" f(y=3) # E: Unexpected keyword argument "y" f(x="foo") # E: Argument "x" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableWithKeywordOnlyOptionalArg] from typing import Callable from mypy_extensions import DefaultNamedArg def a(f: Callable[[DefaultNamedArg(int, 'x')], int]): f(x=4) f(2) # E: Too many positional arguments f() f(y=3) # E: Unexpected keyword argument "y" f(x="foo") # E: Argument "x" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableWithKwargs] from typing import Callable from mypy_extensions import KwArg def a(f: Callable[[KwArg(int)], int]): f(x=4) f(2) # E: Too many arguments f() f(y=3) f(x=4, y=3, z=10) f(x="foo") # E: Argument "x" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableWithVarArg] from typing import Callable from mypy_extensions import VarArg def a(f: Callable[[VarArg(int)], int]): f(x=4) # E: Unexpected keyword argument "x" f(2) f() f(3, 4, 5) f("a") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableArgKindSubtyping] from typing import Callable from mypy_extensions import Arg, DefaultArg int_str_fun = None # type: Callable[[int, str], str] int_opt_str_fun = None # type: Callable[[int, DefaultArg(str, None)], str] int_named_str_fun = None # type: Callable[[int, Arg(str, 's')], str] def isf(ii: int, ss: str) -> str: return ss def iosf(i: int, s: str = "bar") -> str: return s def isf_unnamed(__i: int, __s: str) -> str: return __s int_str_fun = isf int_str_fun = isf_unnamed int_named_str_fun = isf_unnamed # E: Incompatible types in assignment (expression has type "Callable[[int, str], str]", variable has type "Callable[[int, Arg(str, 's')], str]") int_opt_str_fun = iosf int_str_fun = iosf int_opt_str_fun = isf # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str]", variable has type "Callable[[int, DefaultArg(str)], str]") int_named_str_fun = isf # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str]", variable has type "Callable[[int, Arg(str, 's')], str]") int_named_str_fun = iosf [builtins fixtures/dict.pyi] -- Callable[..., T] -- ---------------- [case testCallableWithArbitraryArgs] from typing import Callable def f(x: Callable[..., int]) -> None: x() x(1) x(z=1) x() + '' # E: Unsupported operand types for + ("int" and "str") [out] [case testCallableWithArbitraryArgs2] from typing import Callable def f(x: Callable[..., int]) -> None: x(*[1], **{'x': 2}) [builtins fixtures/dict.pyi] [case testCastWithCallableAndArbitraryArgs] from typing import Callable, cast f = cast(Callable[..., int], None) f(x=4) + '' # E: Unsupported operand types for + ("int" and "str") [case testCallableWithArbitraryArgsInErrorMessage] from typing import Callable def f(x: Callable[..., int]) -> None: if int(): x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[..., int]") [out] [case testCallableWithArbitraryArgsInGenericFunction] from typing import Callable, TypeVar T = TypeVar('T') def f(x: Callable[..., T]) -> T: pass def g(*x: int) -> str: pass x = f(g) x + 1 # E: Unsupported left operand type for + ("str") [builtins fixtures/list.pyi] [case testCallableWithArbitraryArgsSubtyping] from typing import Callable def f(x: Callable[..., int]) -> None: pass def g1(): pass def g2(x, y) -> int: pass def g3(*, y: str) -> int: pass def g4(*, y: int) -> str: pass f(g1) f(g2) f(g3) f(g4) # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(int, 'y')], str]"; expected "Callable[..., int]" [case testCallableWithArbitraryArgsSubtypingWithGenericFunc] from typing import Callable, TypeVar T = TypeVar('T') def f(x: Callable[..., int]) -> None: pass def g1(x: T) -> int: pass def g2(*x: T) -> int: pass def g3(*x: T) -> T: pass f(g1) f(g2) f(g3) -- (...) -> T -- ---------------- [case testEllipsisWithArbitraryArgsOnBareFunction] def f(x, y, z): # type: (...) -> None pass f(1, "hello", []) f(x=1, y="hello", z=[]) [builtins fixtures/dict.pyi] [case testEllipsisWithArbitraryArgsOnBareFunctionWithDefaults] def f(x, y=1, z="hey"): # type: (...) -> None pass f(1, "hello", []) f(x=1, y="hello", z=[]) [builtins fixtures/dict.pyi] [case testEllipsisWithArbitraryArgsOnBareFunctionWithKwargs] from typing import Dict def f(x, **kwargs): # type: (...) -> None success_dict_type = kwargs # type: Dict[str, str] failure_dict_type = kwargs # type: Dict[int, str] # E: Incompatible types in assignment (expression has type "Dict[str, Any]", variable has type "Dict[int, str]") f(1, thing_in_kwargs=["hey"]) [builtins fixtures/dict.pyi] [out] [case testEllipsisWithArbitraryArgsOnBareFunctionWithVarargs] from typing import Tuple, Any def f(x, *args): # type: (...) -> None success_tuple_type = args # type: Tuple[Any, ...] fail_tuple_type = args # type: None # E: Incompatible types in assignment (expression has type "Tuple[Any, ...]", variable has type "None") f(1, "hello") [builtins fixtures/tuple.pyi] [out] [case testEllipsisWithArbitraryArgsOnInstanceMethod] class A: def f(self, x, y, z): # type: (...) -> None pass [case testEllipsisWithArbitraryArgsOnClassMethod] class A: @classmethod def f(cls, x, y, z): # type: (...) -> None pass [builtins fixtures/classmethod.pyi] [case testEllipsisWithArbitraryArgsOnStaticMethod] class A: @staticmethod def f(x, y, z): # type: (...) -> None pass [builtins fixtures/staticmethod.pyi] [case testEllipsisWithSomethingAfterItFails] def f(x, y, z): # type: (..., int) -> None pass [out] main:1: error: Ellipses cannot accompany other argument types in function type signature [case testEllipsisWithSomethingBeforeItFails] def f(x, y, z): # type: (int, ...) -> None pass [out] main:1: error: Ellipses cannot accompany other argument types in function type signature [case testRejectCovariantArgument] from typing import TypeVar, Generic t = TypeVar('t', covariant=True) class A(Generic[t]): def foo(self, x: t) -> None: return None [builtins fixtures/bool.pyi] [out] main:5: error: Cannot use a covariant type variable as a parameter [case testRejectCovariantArgumentSplitLine] from typing import TypeVar, Generic t = TypeVar('t', covariant=True) class A(Generic[t]): def foo(self, x: t) -> None: return None [builtins fixtures/bool.pyi] [out] main:6: error: Cannot use a covariant type variable as a parameter [case testRejectCovariantArgumentInLambda] from typing import TypeVar, Generic, Callable t = TypeVar('t', covariant=True) class Thing(Generic[t]): def chain(self, func: Callable[[t], None]) -> None: pass def end(self) -> None: return self.chain( # Note that lambda args have no line numbers lambda _: None) [builtins fixtures/bool.pyi] [out] main:8: error: Cannot use a covariant type variable as a parameter [case testRejectCovariantArgumentInLambdaSplitLine] from typing import TypeVar, Generic, Callable [case testRejectContravariantReturnType] from typing import TypeVar, Generic t = TypeVar('t', contravariant=True) class A(Generic[t]): def foo(self) -> t: return None [builtins fixtures/bool.pyi] [out] main:5: error: Cannot use a contravariant type variable as return type [case testAcceptCovariantReturnType] from typing import TypeVar, Generic t = TypeVar('t', covariant=True) class A(Generic[t]): def foo(self) -> t: return None [builtins fixtures/bool.pyi] [case testAcceptContravariantArgument] from typing import TypeVar, Generic t = TypeVar('t', contravariant=True) class A(Generic[t]): def foo(self, x: t) -> None: return None [builtins fixtures/bool.pyi] -- Redefining functions -- -------------------- [case testRedefineFunction] from typing import Any def f(x) -> Any: pass def g(x, y): pass def h(x): pass def j(y) -> Any: pass f = h f = j # E: Incompatible types in assignment (expression has type "Callable[[Arg(Any, 'y')], Any]", variable has type "Callable[[Arg(Any, 'x')], Any]") f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[Any], Any]") [case testRedefineFunction2] def f() -> None: pass def f() -> None: pass # E: Name 'f' already defined on line 1 -- Special cases -- ------------- [case testFunctionDefinitionWithForStatement] for _ in [1]: def f(): pass else: def g(): pass f() g() [builtins fixtures/list.pyi] [case testFunctionDefinitionWithWhileStatement] while bool(): def f(): pass else: def g(): pass f() g() [builtins fixtures/bool.pyi] [case testBareCallable] from typing import Callable, Any def foo(f: Callable) -> bool: return f() def f1() -> bool: return False foo(f1) [builtins fixtures/bool.pyi] [case testFunctionNestedWithinWith] from typing import Any a = 1 # type: Any with a: def f() -> None: pass f(1) # E: Too many arguments for "f" [case testNameForDecoratorMethod] from typing import Callable class A: def f(self) -> None: # In particular, test that the error message contains "g" of "A". self.g() # E: Too few arguments for "g" of "A" self.g(1) @dec def g(self, x: str) -> None: pass def dec(f: Callable[[A, str], None]) -> Callable[[A, int], None]: pass [out] [case testUnknownFunctionNotCallable] def f() -> None: pass def g(x: int) -> None: pass h = f if bool() else g reveal_type(h) # N: Revealed type is 'builtins.function' h(7) # E: Cannot call function of unknown type [builtins fixtures/bool.pyi] -- Positional-only arguments -- ------------------------- [case testPositionalOnlyArg] def f(__a: int) -> None: pass def g(__a__: int) -> None: pass f(1) f(__a=1) # E: Unexpected keyword argument "__a" for "f" g(1) # Argument names that also end with __ are not positional-only. g(__a__=1) [builtins fixtures/bool.pyi] [out] main:1: note: "f" defined here [case testMagicMethodPositionalOnlyArg] class A(object): def __eq__(self, other) -> bool: return True # We are all equal. # N: "__eq__" of "A" defined here a = A() a.__eq__(a) a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A" [builtins fixtures/bool.pyi] [case testMagicMethodPositionalOnlyArgFastparse] class A(object): def __eq__(self, other) -> bool: return True # We are all equal. # N: "__eq__" of "A" defined here a = A() a.__eq__(a) a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A" [builtins fixtures/bool.pyi] [case testTupleArguments] # flags: --python-version 2.7 def f(a, (b, c), d): pass [case testTupleArgumentsFastparse] # flags: --python-version 2.7 def f(a, (b, c), d): pass -- Type variable shenanagins -- ------------------------- [case testGenericFunctionTypeDecl] from typing import Callable, TypeVar T = TypeVar('T') f: Callable[[T], T] reveal_type(f) # N: Revealed type is 'def [T] (T`-1) -> T`-1' def g(__x: T) -> T: pass f = g reveal_type(f) # N: Revealed type is 'def [T] (T`-1) -> T`-1' i = f(3) reveal_type(i) # N: Revealed type is 'builtins.int*' [case testFunctionReturningGenericFunction] from typing import Callable, TypeVar T = TypeVar('T') def deco() -> Callable[[T], T]: pass reveal_type(deco) # N: Revealed type is 'def () -> def [T] (T`-1) -> T`-1' f = deco() reveal_type(f) # N: Revealed type is 'def [T] (T`-1) -> T`-1' i = f(3) reveal_type(i) # N: Revealed type is 'builtins.int*' [case testFunctionReturningGenericFunctionPartialBinding] from typing import Callable, TypeVar T = TypeVar('T') U = TypeVar('U') def deco(x: U) -> Callable[[T, U], T]: pass reveal_type(deco) # N: Revealed type is 'def [U] (x: U`-1) -> def [T] (T`-2, U`-1) -> T`-2' f = deco("foo") reveal_type(f) # N: Revealed type is 'def [T] (T`-2, builtins.str*) -> T`-2' i = f(3, "eggs") reveal_type(i) # N: Revealed type is 'builtins.int*' [case testFunctionReturningGenericFunctionTwoLevelBinding] from typing import Callable, TypeVar T = TypeVar('T') R = TypeVar('R') def deco() -> Callable[[T], Callable[[T, R], R]]: pass f = deco() reveal_type(f) # N: Revealed type is 'def [T] (T`-1) -> def [R] (T`-1, R`-2) -> R`-2' g = f(3) reveal_type(g) # N: Revealed type is 'def [R] (builtins.int*, R`-2) -> R`-2' s = g(4, "foo") reveal_type(s) # N: Revealed type is 'builtins.str*' [case testGenericFunctionReturnAsDecorator] from typing import Callable, TypeVar T = TypeVar('T') def deco(__i: int) -> Callable[[T], T]: pass @deco(3) def lol(x: int) -> str: ... reveal_type(lol) # N: Revealed type is 'def (x: builtins.int) -> builtins.str' s = lol(4) reveal_type(s) # N: Revealed type is 'builtins.str' [case testGenericFunctionOnReturnTypeOnly] from typing import TypeVar, List T = TypeVar('T') def make_list() -> List[T]: pass l: List[int] = make_list() bad = make_list() # E: Need type annotation for 'bad' (hint: "bad: List[] = ...") [builtins fixtures/list.pyi] [case testAnonymousArgumentError] def foo(__b: int, x: int, y: int) -> int: pass foo(x=2, y=2) # E: Missing positional argument foo(y=2) # E: Missing positional arguments [case testMissingArgumentError] def f(a, b, c, d=None) -> None: pass f(1, 2, d=3) # E: Missing positional argument "c" in call to "f" [case testMissingArgumentErrorMoreThanOneOptional] def f(a: int, b=None, c=None) -> None: pass f(b=4) # E: Missing positional argument "a" in call to "f" [case testMissingArgumentsError] def f(a, b, c, d=None) -> None: pass f(1, d=3) # E: Missing positional arguments "b", "c" in call to "f" [case testReturnTypeLineNumberWithDecorator] def dec(f): pass @dec def test(a: str) -> (str,): # E: Syntax error in type annotation # N: Suggestion: Is there a spurious trailing comma? return None [case testReturnTypeLineNumberNewLine] def fn(a: str ) -> badtype: # E: Name 'badtype' is not defined pass [case testArgumentTypeLineNumberWithDecorator] def dec(f): pass @dec def some_method(self: badtype): pass # E: Name 'badtype' is not defined [case TestArgumentTypeLineNumberNewline] def fn( a: badtype) -> None: # E: Name 'badtype' is not defined pass [case testInferredTypeSubTypeOfReturnType] from typing import Union, Dict, List def f() -> List[Union[str, int]]: x = ['a'] return x # E: Incompatible return value type (got "List[str]", expected "List[Union[str, int]]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant \ # N: Perhaps you need a type annotation for "x"? Suggestion: "List[Union[str, int]]" def g() -> Dict[str, Union[str, int]]: x = {'a': 'a'} return x # E: Incompatible return value type (got "Dict[str, str]", expected "Dict[str, Union[str, int]]") \ # N: "Dict" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Mapping" instead, which is covariant in the value type \ # N: Perhaps you need a type annotation for "x"? Suggestion: "Dict[str, Union[str, int]]" def h() -> Dict[Union[str, int], str]: x = {'a': 'a'} return x # E: Incompatible return value type (got "Dict[str, str]", expected "Dict[Union[str, int], str]") \ # N: Perhaps you need a type annotation for "x"? Suggestion: "Dict[Union[str, int], str]" def i() -> List[Union[int, float]]: x: List[int] = [1] return x # E: Incompatible return value type (got "List[int]", expected "List[Union[int, float]]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/dict.pyi] [case testInferredTypeNotSubTypeOfReturnType] from typing import Union, List def f() -> List[Union[int, float]]: x = ['a'] return x # E: Incompatible return value type (got "List[str]", expected "List[Union[int, float]]") def g() -> List[Union[str, int]]: x = ('a', 2) return x # E: Incompatible return value type (got "Tuple[str, int]", expected "List[Union[str, int]]") [builtins fixtures/list.pyi] [case testInferredTypeIsObjectMismatch] from typing import Union, Dict, List def f() -> Dict[str, Union[str, int]]: x = {'a': 'a', 'b': 2} return x # E: Incompatible return value type (got "Dict[str, object]", expected "Dict[str, Union[str, int]]") def g() -> Dict[str, Union[str, int]]: x: Dict[str, Union[str, int]] = {'a': 'a', 'b': 2} return x def h() -> List[Union[str, int]]: x = ['a', 2] return x # E: Incompatible return value type (got "List[object]", expected "List[Union[str, int]]") def i() -> List[Union[str, int]]: x: List[Union[str, int]] = ['a', 2] return x [builtins fixtures/dict.pyi] [case testLambdaSemanal] f = lambda: xyz [out] main:1: error: Name 'xyz' is not defined [case testLambdaTypeCheck] f = lambda: 1 + '1' [out] main:1: error: Unsupported operand types for + ("int" and "str") [case testLambdaTypeInference] f = lambda: 5 reveal_type(f) [out] main:2: note: Revealed type is 'def () -> builtins.int' [case testRevealLocalsFunction] a = 1.0 class A: pass def f(a: int, b: int) -> int: reveal_locals() c = a + b class C: pass reveal_locals() return c reveal_locals() [out] main:6: note: Revealed local types are: main:6: note: a: builtins.int main:6: note: b: builtins.int main:9: note: Revealed local types are: main:9: note: a: builtins.int main:9: note: b: builtins.int main:9: note: c: builtins.int main:12: note: Revealed local types are: main:12: note: a: builtins.float [case testNoComplainOverloadNone] # flags: --no-strict-optional from typing import overload, Optional @overload def bar(x: None) -> None: ... @overload def bar(x: int) -> str: ... def bar(x: Optional[int]) -> Optional[str]: if x is None: return None return "number" reveal_type(bar(None)) # N: Revealed type is 'None' [builtins fixtures/isinstance.pyi] [out] [case testNoComplainOverloadNoneStrict] # flags: --strict-optional from typing import overload, Optional @overload def bar(x: None) -> None: ... @overload def bar(x: int) -> str: ... def bar(x: Optional[int]) -> Optional[str]: if x is None: return None return "number" reveal_type(bar(None)) # N: Revealed type is 'None' [builtins fixtures/isinstance.pyi] [out] [case testNoComplainInferredNone] # flags: --no-strict-optional from typing import TypeVar, Optional T = TypeVar('T') def X(val: T) -> T: ... x_in = None def Y(x: Optional[str] = X(x_in)): ... xx: Optional[int] = X(x_in) [out] [case testNoComplainInferredNoneStrict] # flags: --strict-optional from typing import TypeVar, Optional T = TypeVar('T') def X(val: T) -> T: ... x_in = None def Y(x: Optional[str] = X(x_in)): ... xx: Optional[int] = X(x_in) [out] [case testNoComplainNoneReturnFromUntyped] def foo() -> None: pass def lol(): x = foo() [case testConditionalImportFunction] import p [file p/__init__.py] if int(): from p.a import f elif int(): from p.b import f else: from p.c import f [file p/a.py] def f() -> int: ... [file p/b.py] from p.d import f [file p/c.py] def f() -> int: ... [file p/d.py] import p def f() -> int: ... [case testLambdaDefaultTypeErrors] lambda a=nonsense: a # E: Name 'nonsense' is not defined lambda a=(1 + 'asdf'): a # E: Unsupported operand types for + ("int" and "str") def f(x: int = i): # E: Name 'i' is not defined i = 42 mypy-0.761/test-data/unit/check-generic-subtyping.test0000644€tŠÔÚ€2›s®0000005027113576752246027204 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the type checker related to subtyping and inheritance with -- generics. -- Subtyping + inheritance -- ----------------------- [case testSubtypingAndInheritingNonGenericTypeFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') ac = None # type: A[C] ad = None # type: A[D] b = None # type: B if int(): b = ad # E: Incompatible types in assignment (expression has type "A[D]", variable has type "B") ad = b # E: Incompatible types in assignment (expression has type "B", variable has type "A[D]") if int(): b = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B") if int(): b = b ac = b class C: pass class A(Generic[T]): pass class B(A[C]): pass class D: pass [case testSubtypingAndInheritingGenericTypeFromNonGenericType] from typing import TypeVar, Generic T = TypeVar('T') a = None # type: A bc = None # type: B[C] bd = None # type: B[D] if int(): bc = bd # E: Incompatible types in assignment (expression has type "B[D]", variable has type "B[C]") bd = bc # E: Incompatible types in assignment (expression has type "B[C]", variable has type "B[D]") if int(): bc = a # E: Incompatible types in assignment (expression has type "A", variable has type "B[C]") bd = a # E: Incompatible types in assignment (expression has type "A", variable has type "B[D]") if int(): a = bc if int(): a = bd class A: pass class B(A, Generic[T]): pass class C: pass class D: pass [case testSubtypingAndInheritingGenericTypeFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') ac = None # type: A[C] ad = None # type: A[D] bcc = None # type: B[C, C] bdc = None # type: B[D, C] if int(): ad = bcc # E: Incompatible types in assignment (expression has type "B[C, C]", variable has type "A[D]") if int(): ad = bdc # E: Incompatible types in assignment (expression has type "B[D, C]", variable has type "A[D]") bcc = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B[C, C]") bdc = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B[D, C]") if int(): bcc = bcc bdc = bdc ac = bcc if int(): ac = bdc class A(Generic[T]): pass class B(A[S], Generic[T, S]): pass class C: pass class D: pass [case testSubtypingAndInheritingGenericTypeFromGenericTypeAcrossHierarchy] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') X = TypeVar('X') Y = TypeVar('Y') ae = None # type: A[A[E]] af = None # type: A[A[F]] cef = None # type: C[E, F] cff = None # type: C[F, F] cfe = None # type: C[F, E] if int(): ae = cef # E: Incompatible types in assignment (expression has type "C[E, F]", variable has type "A[A[E]]") af = cfe # E: Incompatible types in assignment (expression has type "C[F, E]", variable has type "A[A[F]]") if int(): ae = cfe af = cef if int(): af = cff class A(Generic[T]): pass class B(A[S], Generic[T, S]): pass class C(B[A[X], A[Y]], Generic[X, Y]): pass class E: pass class F: pass [case testIncludingBaseClassTwice] from typing import TypeVar, Generic t = TypeVar('t') class I(Generic[t]): pass class A(I[C], I[object]): pass # E: Duplicate base class "I" class C: pass -- Accessing inherited generic members -- ----------------------------------- [case testAccessingMethodInheritedFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') b = None # type: B[C, D] c, d = None, None # type: (C, D) b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D" b.f(d) class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[S], Generic[T, S]): pass class C: pass class D: pass [case testAccessingMethodInheritedFromGenericTypeInNonGenericType] from typing import TypeVar, Generic T = TypeVar('T') b, c, d = None, None, None # type: (B, C, D) b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D" b.f(d) class C: pass class D: pass class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[D]): pass [case testAccessingMemberVarInheritedFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T]): def __init__(self, a: T) -> None: self.a = a b = None # type: B[C, D] c, d = None, None # type: (C, D) b.a = c # E: Incompatible types in assignment (expression has type "C", variable has type "D") b.a = d class B(A[S], Generic[T, S]): pass class C: pass class D: pass -- Overriding with generic types -- ----------------------------- [case testOverridingMethodInSimpleTypeInheritingGenericType] from typing import TypeVar, Generic T = TypeVar('T') class B(Generic[T]): def f(self, a: T) -> None: pass def g(self, a: T) -> None: pass class C: pass class D: pass class A(B[C]): def f(self, a: D) -> None: pass \ # E: Argument 1 of "f" is incompatible with supertype "B"; supertype defines the argument type as "C" def g(self, a: C) -> None: pass [out] [case testOverridingMethodInGenericTypeInheritingSimpleType] from typing import TypeVar, Generic T = TypeVar('T') class C: pass class B: def f(self, a: C) -> None: pass def g(self, a: C) -> None: pass class A(B, Generic[T]): def f(self, a: T) -> None: pass \ # E: Argument 1 of "f" is incompatible with supertype "B"; supertype defines the argument type as "C" def g(self, a: 'C') -> None: pass [out] [case testOverridingMethodInGenericTypeInheritingGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def f(self, a: T) -> None: pass def g(self, a: T) -> None: pass class A(B[S], Generic[T, S]): def f(self, a: T) -> None: pass \ # E: Argument 1 of "f" is incompatible with supertype "B"; supertype defines the argument type as "S" def g(self, a: S) -> None: pass [out] [case testOverridingMethodInMultilevelHierarchyOfGenericTypes] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') V = TypeVar('V') class D: pass class C(Generic[T, U, V]): def f(self, a: V) -> None: pass def g(self, a: V) -> None: pass class B(C[D, D, T], Generic[T]): pass class A(B[S], Generic[T, S]): def f(self, a: T) -> None: pass \ # E: Argument 1 of "f" is incompatible with supertype "C"; supertype defines the argument type as "S" def g(self, a: S) -> None: pass [out] [case testOverrideGenericMethodInNonGenericClass] from typing import TypeVar T = TypeVar('T') S = TypeVar('S') class A: def f(self, x: T, y: S) -> None: pass class B(A): def f(self, x: S, y: T) -> None: pass class C(A): # Okay, because T = object allows any type for the arguments. def f(self, x: T, y: T) -> None: pass [case testOverrideGenericMethodInNonGenericClassLists] from typing import TypeVar, List T = TypeVar('T') S = TypeVar('S') class A: def f(self, x: List[T], y: List[S]) -> None: pass class B(A): def f(self, x: List[S], y: List[T]) -> None: pass class C(A): def f(self, x: List[T], y: List[T]) -> None: pass # E: Signature of "f" incompatible with supertype "A" [builtins fixtures/list.pyi] [out] [case testOverrideGenericMethodInNonGenericClassGeneralize] from typing import TypeVar T = TypeVar('T') T1 = TypeVar('T1', bound=str) S = TypeVar('S') class A: def f(self, x: int, y: S) -> None: pass class B(A): def f(self, x: T, y: S) -> None: pass class C(A): def f(self, x: T, y: str) -> None: pass class D(A): def f(self, x: T1, y: S) -> None: pass # TODO: This error could be more specific. [out] main:12: error: Argument 2 of "f" is incompatible with supertype "A"; supertype defines the argument type as "S" main:14: error: Signature of "f" incompatible with supertype "A" -- Inheritance from generic types with implicit dynamic supertype -- -------------------------------------------------------------- [case testInheritanceFromGenericWithImplicitDynamicAndSubtyping] from typing import TypeVar, Generic T = TypeVar('T') a = None # type: A bc = None # type: B[C] bd = None # type: B[D] if int(): a = bc # E: Incompatible types in assignment (expression has type "B[C]", variable has type "A") bc = a bd = a class B(Generic[T]): pass class A(B): pass class C: pass class D: pass [out] [case testInheritanceFromGenericWithImplicitDynamicAndExternalAccess] from typing import TypeVar, Generic T = TypeVar('T') class B(Generic[T]): def f(self, a: 'B[T]') -> None: pass def __init__(self, x: 'B[T]') -> None: self.x = x class A(B): pass class C: pass a = None # type: A c = None # type: C bc = None # type: B[C] a.x = c # E: Incompatible types in assignment (expression has type "C", variable has type "B[Any]") a.f(c) # E: Argument 1 to "f" of "B" has incompatible type "C"; expected "B[Any]" a.x = bc a.f(bc) [out] [case testInheritanceFromGenericWithImplicitDynamic] from typing import TypeVar, Generic T = TypeVar('T') a = None # type: A c = None # type: C bc = None # type: B[C] class B(Generic[T]): def f(self, a: 'B[T]') -> None: pass def __init__(self, x: 'B[T]') -> None: self.x = x class A(B): def g(self) -> None: self.x = c # E: Incompatible types in assignment (expression has type "C", variable has type "B[Any]") self.f(c) # E: Argument 1 to "f" of "B" has incompatible type "C"; expected "B[Any]" self.x = bc self.f(bc) class C: pass [out] [case testInheritanceFromGenericWithImplicitDynamicAndOverriding] from typing import TypeVar, Generic, Tuple T = TypeVar('T') class B(Generic[T]): def f(self, a: T, b: 'Tuple[T, B[T]]') -> None: pass class A(B): def f(self, a, b): pass [builtins fixtures/tuple.pyi] [out] -- Inheritance from generic types and super expressions -- ---------------------------------------------------- [case testSuperExpressionsWhenInheritingFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def f(self, a: T) -> None: pass class A(B[S], Generic[T, S]): def g(self, t: T, s: S) -> None: super().f(t) # E: Argument 1 to "f" of "B" has incompatible type "T"; expected "S" super().f(s) [out] [case testSuperExpressionsWhenInheritingFromGenericTypeAndDeepHierarchy] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') V = TypeVar('V') class C(Generic[T, U, V]): def f(self, a: V) -> None: pass class D: pass class B(C[D, D, T], Generic[T]): pass class A(B[S], Generic[T, S]): def g(self, t: T, s: S) -> None: super().f(t) # E: Argument 1 to "f" of "C" has incompatible type "T"; expected "S" super().f(s) [out] -- Type of inherited constructor -- ----------------------------- [case testInheritedConstructor] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, x: T) -> None: pass class B(A[T], Generic[T]): pass class C(A[int]): pass class D(A[A[T]], Generic[T]): pass B(1) C(1) C('a') # E: Argument 1 to "C" has incompatible type "str"; expected "int" D(A(1)) D(1) # E: Argument 1 to "D" has incompatible type "int"; expected "A[]" [case testInheritedConstructor2] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') Z = TypeVar('Z') class A(Generic[T, U]): def __init__(self, x: T, y: U, z: Z) -> None: pass class B(A[int, T], Generic[T]): pass class C(B[A[T, str]], Generic[T, U]): pass # C[T, U] <: B[A[T, str]] <: A[int, A[T, str]] C(1, A(1, 'a', 0), 'z') C(1, A('1', 'a', 0), 'z') C('1', A(1, 'a', 0), 'z') # E: Argument 1 to "C" has incompatible type "str"; expected "int" C(1, A(1, 1, 0), 'z') # E: Argument 2 to "A" has incompatible type "int"; expected "str" -- Subtyping with a generic abstract base class -- -------------------------------------------- [case testSubtypingWithGenericTypeSubclassingGenericAbstractClass] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T') S = TypeVar('S') acd = None # type: A[C, D] adc = None # type: A[D, C] ic = None # type: I[C] id = None # type: I[D] if int(): ic = acd # E: Incompatible types in assignment (expression has type "A[C, D]", variable has type "I[C]") id = adc # E: Incompatible types in assignment (expression has type "A[D, C]", variable has type "I[D]") adc = ic # E: Incompatible types in assignment (expression has type "I[C]", variable has type "A[D, C]") if int(): ic = adc id = acd class I(Generic[T]): @abstractmethod def f(self): pass class A(I[S], Generic[T, S]): pass class C: pass class D: pass [case testSubtypingWithTypeImplementingGenericABCViaInheritance] from typing import TypeVar, Generic S = TypeVar('S') a, b = None, None # type: (A, B) ic, id, ie = None, None, None # type: (I[C], I[D], I[E]) class I(Generic[S]): pass class B(I[C]): pass class A(B): pass if int(): ie = a # E: Incompatible types in assignment (expression has type "A", variable has type "I[E]") a = ic # E: Incompatible types in assignment (expression has type "I[C]", variable has type "A") if int(): a = id # E: Incompatible types in assignment (expression has type "I[D]", variable has type "A") if int(): a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") id = a # E: Incompatible types in assignment (expression has type "A", variable has type "I[D]") ic = a b = a class C: pass class D: pass class E: pass [out] [case testSubtypingWithTypeImplementingGenericABCViaInheritance2-skip] from typing import TypeVar, Generic T = TypeVar('T') class I(Generic[T]): pass class A(I[C]): pass class B(A, I[D]): pass # Fail class C: pass class D: pass [out] main:5: error: Class "B" has base "I" duplicated inconsistently [case testSubtypingAndABCExtension] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') a, i, j = None, None, None # type: (A[object], I[object], J[object]) (ii, jj) = (i, j) if int(): ii = a jj = a if int(): jj = i a = i # E: Incompatible types in assignment (expression has type "I[object]", variable has type "A[object]") if int(): a = j # E: Incompatible types in assignment (expression has type "J[object]", variable has type "A[object]") class J(Generic[t]): pass class X(metaclass=ABCMeta): pass class I(X, J[t], Generic[t]): pass class A(I[t], Generic[t]): pass -- Subclassing a generic ABC -- ------------------------- [case testSubclassingGenericABC1] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T') class I(Generic[T]): @abstractmethod def f(self, a: T) -> None: pass @abstractmethod def g(self, a: T) -> None: pass class A(I[C]): def f(self, a: 'D') -> None: pass \ # E: Argument 1 of "f" is incompatible with supertype "I"; supertype defines the argument type as "C" def g(self, a: 'C') -> None: pass class C: pass class D: pass [out] -- Extending a generic ABC with deep type hierarchy -- ------------------------------------------------ [case testSubclassingGenericABCWithDeepHierarchy] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') a = None # type: A ic, id = None, None # type: (I[C], I[D]) if int(): id = a # E: Incompatible types in assignment (expression has type "A", variable has type "I[D]") ic = a class I(Generic[T]): @abstractmethod def f(self, a: T, b: T) -> None: pass @abstractmethod def g(self, a: T, b: 'D') -> None: pass class B(I[C]): def f(self, a: 'C', b: 'C') -> None: pass def g(self, a: 'C', b: Any) -> None: pass class A(B): def g(self, a: 'C', b: 'C') -> None: pass \ # E: Argument 2 of "g" is incompatible with supertype "I"; supertype defines the argument type as "D" def f(self, a: 'C', b: 'C') -> None: pass class C: pass class D: pass [case testSubclassingGenericABCWithDeepHierarchy2] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') class I(Generic[T]): @abstractmethod def f(self, a: T, b: T) -> None: pass class B(I[C]): def f(self, a: 'C', b: Any) -> None: pass class A(B): def f(self, a: 'C', b: 'D') -> None: pass \ # E: Argument 2 of "f" is incompatible with supertype "I"; supertype defines the argument type as "C" class C: pass class D: pass [out] -- Implicit Any types and subclassing generic ABC -- ---------------------------------------------- [case testSubclassingGenericABCWithImplicitAny] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') a = None # type: Any ic = None # type: I[C] id = None # type: I[D] ic = a id = a class I(Generic[T]): @abstractmethod def f(self, a: T) -> None: pass class A(I): def f(self, a): pass class C: pass class D: pass [case testSubclassingGenericABCWithImplicitAnyAndDeepHierarchy] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') a = None # type: Any ic = None # type: I[C] id = None # type: I[D] ic = a id = a class I(Generic[T]): @abstractmethod def f(self, a: T, b: T) -> None: pass class B(I): def f(self, a, b): pass class A(B): def f(self, a: 'C', b: 'D') -> None: pass class C: pass class D: pass [case testImplementingGenericABCWithImplicitAnyAndDeepHierarchy2] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') a = None # type: Any jc = None # type: J[C] jd = None # type: J[D] jc = a jd = a class J(Generic[T]): @abstractmethod def f(self, a: T, b: T) -> None: pass class I(J): @abstractmethod def f(self, a, b): pass class A(I): def f(self, a: 'C', b: 'D') -> None: pass class C: pass class D: pass -- Accessing generic ABC members -- ----------------------------- [case testAccessingGenericABCMembers] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T') class I(Generic[T]): @abstractmethod def f(self, a: T) -> None: pass class A: pass class B: pass a, b = None, None # type: (A, B) ia = None # type: I[A] ia.f(b) # E: Argument 1 to "f" of "I" has incompatible type "B"; expected "A" ia.f(a) [case testAccessingInheritedGenericABCMembers] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T') class J(Generic[T]): @abstractmethod def f(self, a: T) -> None: pass class I(J[T], Generic[T]): pass class A: pass class B: pass a, b = None, None # type: (A, B) ia = None # type: I[A] ia.f(b) # E: Argument 1 to "f" of "J" has incompatible type "B"; expected "A" ia.f(a) -- Misc -- ---- [case testMultipleAssignmentAndGenericSubtyping] from typing import Iterable n, s = None, None # type: int, str class Nums(Iterable[int]): def __iter__(self): pass def __next__(self): pass n, n = Nums() s, s = Nums() # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/for.pyi] [out] -- Variance -- -------- [case testCovariant] from typing import TypeVar, Generic T = TypeVar('T', covariant=True) class G(Generic[T]): pass class A: pass class B(A): pass class C(B): pass a = None # type: G[A] b = None # type: G[B] c = None # type: G[C] if int(): b = a # E: Incompatible types in assignment (expression has type "G[A]", variable has type "G[B]") b = c [builtins fixtures/bool.pyi] [out] [case testContravariant] from typing import TypeVar, Generic T = TypeVar('T', contravariant=True) class G(Generic[T]): pass class A: pass class B(A): pass class C(B): pass a = None # type: G[A] b = None # type: G[B] c = None # type: G[C] if int(): b = a b = c # E: Incompatible types in assignment (expression has type "G[C]", variable has type "G[B]") [builtins fixtures/bool.pyi] [out] [case testInvariant] from typing import TypeVar, Generic T = TypeVar('T') # invariant (default) class G(Generic[T]): pass class A: pass class B(A): pass class C(B): pass a = None # type: G[A] b = None # type: G[B] c = None # type: G[C] if int(): b = a # E: Incompatible types in assignment (expression has type "G[A]", variable has type "G[B]") b = c # E: Incompatible types in assignment (expression has type "G[C]", variable has type "G[B]") [builtins fixtures/bool.pyi] [out] [case testTypeVarSubtypeUnion] from typing import Union, TypeVar, Generic class U: pass class W: pass T = TypeVar('T', bound=Union[U, W]) class Y(Generic[T]): def __init__(self) -> None: pass def f(self) -> T: return U() # E: Incompatible return value type (got "U", expected "T") [out] mypy-0.761/test-data/unit/check-generics.test0000644€tŠÔÚ€2›s®0000020341713576752246025347 0ustar jukkaDROPBOX\Domain Users00000000000000-- Simple generic types -- -------------------- [case testGenericMethodReturnType] from typing import TypeVar, Generic T = TypeVar('T') a, b, c = None, None, None # type: (A[B], B, C) if int(): c = a.f() # E: Incompatible types in assignment (expression has type "B", variable has type "C") b = a.f() class A(Generic[T]): def f(self) -> T: pass class B: pass class C: pass [case testGenericMethodArgument] from typing import TypeVar, Generic T = TypeVar('T') a.f(c) # Fail a.f(b) a = None # type: A[B] b = None # type: B c = None # type: C class A(Generic[T]): def f(self, a: T) -> None: pass class B: pass class C: pass [out] main:3: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" [case testGenericMemberVariable] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, v: T) -> None: self.v = v a, b, c = None, None, None # type: (A[B], B, C) a.v = c # Fail a.v = b class B: pass class C: pass [out] main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B") [case testGenericMemberVariable] from typing import TypeVar, Generic T = TypeVar('T') a, b, c = None, None, None # type: (A[B], B, C) a.v = c # Fail a.v = b class A(Generic[T]): v = None # type: T class B: pass class C: pass [out] main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") [case testSimpleGenericSubtyping] from typing import TypeVar, Generic T = TypeVar('T') b, bb, c = None, None, None # type: (A[B], A[B], A[C]) if int(): c = b # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]") b = c # E: Incompatible types in assignment (expression has type "A[C]", variable has type "A[B]") if int(): b = b if int(): b = bb class A(Generic[T]): pass class B: pass class C(B): pass [case testGenericTypeCompatibilityWithAny] from typing import Any, TypeVar, Generic T = TypeVar('T') b, c, d = None, None, None # type: (A[B], A[C], A[Any]) b = d c = d d = b d = c class A(Generic[T]): pass class B: pass class C(B): pass [out] [case testTypeVariableAsTypeArgument] from typing import TypeVar, Generic T = TypeVar('T') a = None # type: A[B] b = None # type: A[B] c = None # type: A[C] a.v = c # E: Incompatible types in assignment (expression has type "A[C]", variable has type "A[B]") if int(): c = a.v # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]") a.v = b if int(): b = a.v class A(Generic[T]): v = None # type: A[T] class B: pass class C: pass [case testMultipleGenericTypeParametersWithMemberVars] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[B, C] s = None # type: B t = None # type: C if int(): t = a.s # E: Incompatible types in assignment (expression has type "B", variable has type "C") s = a.t # E: Incompatible types in assignment (expression has type "C", variable has type "B") if int(): s = a.s t = a.t class A(Generic[S, T]): s = None # type: S t = None # type: T class B: pass class C: pass [case testMultipleGenericTypeParametersWithMethods] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[B, C] s = None # type: B t = None # type: C a.f(s, s) # Fail a.f(t, t) # Fail a.f(s, t) class A(Generic[S, T]): def f(self, s: S, t: T) -> None: pass class B: pass class C: pass [out] main:8: error: Argument 2 to "f" of "A" has incompatible type "B"; expected "C" main:9: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" [case testMultipleGenericTypeParametersAndSubtyping] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') bc = None # type: A[B, C] bb = None # type: A[B, B] cb = None # type: A[C, B] if int(): bb = bc # E: Incompatible types in assignment (expression has type "A[B, C]", variable has type "A[B, B]") if int(): bb = cb # E: Incompatible types in assignment (expression has type "A[C, B]", variable has type "A[B, B]") bc = bb # E: Incompatible types in assignment (expression has type "A[B, B]", variable has type "A[B, C]") if int(): bb = bb bc = bc class A(Generic[S, T]): s = None # type: S t = None # type: T class B: pass class C(B):pass -- Simple generic type bodies -- -------------------------- [case testGenericTypeBody1] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: T def f(self, b: T) -> T: self.f(x) # Fail d = self # type: A[B] # Fail self.a = self.f(self.a) return self.a c = self # type: A[T] x = None # type: B class B: pass [out] main:7: error: Argument 1 to "f" of "A" has incompatible type "B"; expected "T" main:8: error: Incompatible types in assignment (expression has type "A[T]", variable has type "A[B]") [case testGenericTypeBodyWithMultipleVariables] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') class A(Generic[S, T]): def f(self) -> None: s = None # type: S t = None # type: T if int(): s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S") t = s # E: Incompatible types in assignment (expression has type "S", variable has type "T") a = self # type: A[S, B] # E: Incompatible types in assignment (expression has type "A[S, T]", variable has type "A[S, B]") b = self # type: A[T, T] # E: Incompatible types in assignment (expression has type "A[S, T]", variable has type "A[T, T]") c = self # type: A[S, T] if int(): t = t class B: pass [out] [case testCompatibilityOfNoneWithTypeVar] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> None: a = None # type: T a = None [out] [case testCompatibilityOfTypeVarWithObject] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> T: a = object() # type: T # E: Incompatible types in assignment (expression has type "object", variable has type "T") if int(): a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "T") b = self.f() # type: object if int(): b = self.f() return None [out] -- Operations with generic types -- ----------------------------- [case testGenericOperations] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[B, C] b = None # type: B c = None # type: C if int(): b = a + b # E: Incompatible types in assignment (expression has type "C", variable has type "B") c = a + c # E: Unsupported operand types for + ("A[B, C]" and "C") if int(): c = a[c] # E: Incompatible types in assignment (expression has type "B", variable has type "C") b = a[b] # E: Invalid index type "B" for "A[B, C]"; expected type "C" if int(): c = a + b b = a[c] class A(Generic[S, T]): def __add__(self, a: S) -> T: pass def __getitem__(self, i: T) -> S: pass class B: pass class C: pass [case testOperatorAssignmentWithIndexLvalue1] from typing import TypeVar, Generic T = TypeVar('T') b = None # type: B c = None # type: C ac = None # type: A[C] ac[b] += b # Fail ac[c] += c # Fail ac[b] += c ac[b] = ac[b] + c class A(Generic[T]): def __getitem__(self, i: 'B') -> T: pass def __setitem__(self, i: 'B', v: T) -> None: pass class B: pass class C: def __add__(self, o: 'C') -> 'C': pass [out] main:7: error: Unsupported operand types for + ("C" and "B") main:8: error: Invalid index type "C" for "A[C]"; expected type "B" [case testOperatorAssignmentWithIndexLvalue2] from typing import TypeVar, Generic T = TypeVar('T') b = None # type: B c = None # type: C ac = None # type: A[C] ac[b] += c # Fail ac[c] += c # Fail ac[b] = ac[b] + c # Fail class A(Generic[T]): def __getitem__(self, i: 'B') -> T: pass def __setitem__(self, i: 'C', v: T) -> None: pass class B: pass class C: def __add__(self, o: 'C') -> 'C': pass [out] main:7: error: Invalid index type "B" for "A[C]"; expected type "C" main:8: error: Invalid index type "C" for "A[C]"; expected type "B" main:9: error: Invalid index type "B" for "A[C]"; expected type "C" -- Nested generic types -- -------------------- [case testNestedGenericTypes] from typing import TypeVar, Generic T = TypeVar('T') aab = None # type: A[A[B]] aac = None # type: A[A[C]] ab = None # type: A[B] ac = None # type: A[C] if int(): ac = aab.x # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]") ac.y = aab # E: Incompatible types in assignment (expression has type "A[A[B]]", variable has type "A[A[C]]") if int(): ab = aab.x ac = aac.x ab.y = aab ac.y = aac class A(Generic[T]): x = None # type: T y = None # type: A[A[T]] class B: pass class C: pass -- Generic functions -- ----------------- [case testTypeCheckingGenericFunctionBody] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') class A: pass class p(Generic[T, S]): def __init__(self, t: T, a: S) -> None: pass def f(s: S, t: T) -> p[T, A]: a = t # type: S # E: Incompatible types in assignment (expression has type "T", variable has type "S") if int(): s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S") p_s_a = None # type: p[S, A] if s: return p_s_a # E: Incompatible return value type (got "p[S, A]", expected "p[T, A]") b = t # type: T c = s # type: S p_t_a = None # type: p[T, A] return p_t_a [out] [case testTypeCheckingGenericMethodBody] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class p(Generic[T, S]): def __init__(self, t: T, a: S) -> None: pass class A(Generic[T]): def f(self, s: S, t: T) -> p[S, T]: if int(): s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S") p_s_s = None # type: p[S, S] if s: return p_s_s # E: Incompatible return value type (got "p[S, S]", expected "p[S, T]") p_t_t = None # type: p[T, T] if t: return p_t_t # E: Incompatible return value type (got "p[T, T]", expected "p[S, T]") if 1: t = t s = s p_s_t = None # type: p[S, T] return p_s_t [out] [case testProhibitTypeApplicationToGenericFunctions] from typing import TypeVar T = TypeVar('T') def f(x: T) -> T: pass y = f[int] # E: Type application is only supported for generic classes [out] -- Generic types in expressions -- ---------------------------- [case testTypeApplicationArgs] from typing import TypeVar, Generic T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... Node[int]() # E: Too few arguments for "Node" Node[int](1, 1, 1) # E: Too many arguments for "Node" [out] [case testTypeApplicationTvars] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): pass A[int]() # E: Type application has too few types (2 expected) A[int, str, int]() # E: Type application has too many types (2 expected) [out] [case testInvalidTypeApplicationType] a = None # type: A class A: pass a[A]() # E: Value of type "A" is not indexable A[A]() # E: The type "Type[A]" is not generic and not indexable [out] [case testTypeApplicationArgTypes] from typing import TypeVar, Generic T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... Node[int](1) Node[int]('a') # E: Argument 1 to "Node" has incompatible type "str"; expected "int" class Dummy(Generic[T]): def meth(self, x: T) -> None: ... def methout(self) -> T: ... Dummy[int]().meth(1) Dummy[int]().meth('a') # E: Argument 1 to "meth" of "Dummy" has incompatible type "str"; expected "int" reveal_type(Dummy[int]()) # N: Revealed type is '__main__.Dummy[builtins.int*]' reveal_type(Dummy[int]().methout()) # N: Revealed type is 'builtins.int*' [out] [case testTypeApplicationArgTypesSubclasses] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class C(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... class D(C[int, T]): ... D[str](1, 'a') D[str](1, 1) # E: Argument 2 to "D" has incompatible type "int"; expected "str" class E(D[str]): ... E(1, 'a') E(1, 1) # E: Argument 2 to "E" has incompatible type "int"; expected "str" [out] [case testTypeApplicationAlias] from typing import TypeVar, Generic T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... Alias = Node Alias[int](1) Alias[int]("a") # E: Argument 1 to "Node" has incompatible type "str"; expected "int" [out] [case testTypeApplicationCrash] type[int] # this was crashing, see #2302 (comment) # E: The type "Type[type]" is not generic and not indexable [out] -- Generic type aliases -- -------------------- [case testGenericTypeAliasesBasic] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... IntNode = Node[int, S] IntIntNode = Node[int, int] SameNode = Node[T, T] n = Node(1, 1) # type: IntIntNode n1 = Node(1, 'a') # type: IntIntNode # E: Argument 2 to "Node" has incompatible type "str"; expected "int" m = Node(1, 1) # type: IntNode m1 = Node('x', 1) # type: IntNode # E: Argument 1 to "Node" has incompatible type "str"; expected "int" m2 = Node(1, 1) # type: IntNode[str] # E: Argument 2 to "Node" has incompatible type "int"; expected "str" s = Node(1, 1) # type: SameNode[int] reveal_type(s) # N: Revealed type is '__main__.Node[builtins.int, builtins.int]' s1 = Node(1, 'x') # type: SameNode[int] # E: Argument 2 to "Node" has incompatible type "str"; expected "int" [out] [case testGenericTypeAliasesBasic2] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... IntNode = Node[int, S] IntIntNode = Node[int, int] SameNode = Node[T, T] def output_bad() -> IntNode[str]: return Node(1, 1) # Eroor - bad return type, see out def input(x: IntNode[str]) -> None: pass input(Node(1, 's')) input(Node(1, 1)) # E: Argument 2 to "Node" has incompatible type "int"; expected "str" def output() -> IntNode[str]: return Node(1, 'x') reveal_type(output()) # N: Revealed type is '__main__.Node[builtins.int, builtins.str]' def func(x: IntNode[T]) -> IntNode[T]: return x reveal_type(func) # N: Revealed type is 'def [T] (x: __main__.Node[builtins.int, T`-1]) -> __main__.Node[builtins.int, T`-1]' func(1) # E: Argument 1 to "func" has incompatible type "int"; expected "Node[int, ]" func(Node('x', 1)) # E: Argument 1 to "Node" has incompatible type "str"; expected "int" reveal_type(func(Node(1, 'x'))) # N: Revealed type is '__main__.Node[builtins.int, builtins.str*]' def func2(x: SameNode[T]) -> SameNode[T]: return x reveal_type(func2) # N: Revealed type is 'def [T] (x: __main__.Node[T`-1, T`-1]) -> __main__.Node[T`-1, T`-1]' func2(Node(1, 'x')) # E: Cannot infer type argument 1 of "func2" y = func2(Node('x', 'x')) reveal_type(y) # N: Revealed type is '__main__.Node[builtins.str*, builtins.str*]' def wrap(x: T) -> IntNode[T]: return Node(1, x) z = None # type: str reveal_type(wrap(z)) # N: Revealed type is '__main__.Node[builtins.int, builtins.str*]' [out] main:13: error: Argument 2 to "Node" has incompatible type "int"; expected "str" -- Error formatting is a bit different (and probably better) with new analyzer [case testGenericTypeAliasesWrongAliases] # flags: --show-column-numbers --python-version 3.6 --no-strict-optional from typing import TypeVar, Generic, List, Callable, Tuple, Union T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... A = Node[T] # Error B = Node[T, T] C = Node[T, T, T] # Error D = Node[T, S] E = Node[Node[T, T], List[T]] F = Node[List[T, T], S] # Error G = Callable[..., List[T, T]] # Error H = Union[int, Tuple[T, Node[T]]] # Error h: H # This was reported on previous line h1: H[int, str] # Error x = None # type: D[int, str] reveal_type(x) y = None # type: E[int] reveal_type(y) X = T # Error [builtins fixtures/list.pyi] [out] main:9:5: error: "Node" expects 2 type arguments, but 1 given main:11:5: error: "Node" expects 2 type arguments, but 3 given main:15:10: error: "list" expects 1 type argument, but 2 given main:16:19: error: "list" expects 1 type argument, but 2 given main:17:25: error: "Node" expects 2 type arguments, but 1 given main:19:5: error: Bad number of arguments for type alias, expected: 1, given: 2 main:22:13: note: Revealed type is '__main__.Node[builtins.int, builtins.str]' main:24:13: note: Revealed type is '__main__.Node[__main__.Node[builtins.int, builtins.int], builtins.list[builtins.int]]' main:26:5: error: Type variable "__main__.T" is invalid as target for type alias [case testGenericTypeAliasesForAliases] from typing import TypeVar, Generic, List, Union T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: pass ListedNode = Node[List[T], List[S]] Second = ListedNode[int, T] Third = Union[int, Second[str]] def f2(x: T) -> Second[T]: return Node([1], [x]) reveal_type(f2('a')) # N: Revealed type is '__main__.Node[builtins.list[builtins.int], builtins.list[builtins.str*]]' def f3() -> Third: return Node([1], ['x']) reveal_type(f3()) # N: Revealed type is 'Union[builtins.int, __main__.Node[builtins.list[builtins.int], builtins.list[builtins.str]]]' [builtins fixtures/list.pyi] [case testGenericTypeAliasesAny] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: self.x = x self.y = y IntNode = Node[int, S] AnyNode = Node[S, T] def output() -> IntNode[str]: return Node(1, 'x') x = output() # type: IntNode # This is OK (implicit Any) y = None # type: IntNode y.x = 1 y.x = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int") y.y = 1 # Both are OK (implicit Any) y.y = 'x' z = Node(1, 'x') # type: AnyNode reveal_type(z) # N: Revealed type is '__main__.Node[Any, Any]' [out] [case testGenericTypeAliasesAcessingMethods] from typing import TypeVar, Generic, List T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: self.x = x def meth(self) -> T: return self.x ListedNode = Node[List[T]] l = None # type: ListedNode[int] l.x.append(1) l.meth().append(1) reveal_type(l.meth()) # N: Revealed type is 'builtins.list*[builtins.int]' l.meth().append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" ListedNode[str]([]).x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "List[str]") [builtins fixtures/list.pyi] [case testGenericTypeAliasesSubclassing] from typing import TypeVar, Generic, Tuple, List T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... TupledNode = Node[Tuple[T, T]] class D(TupledNode[T]): ... class L(List[TupledNode[T]]): ... def f_bad(x: T) -> D[T]: return D(1) # Error, see out L[int]().append(Node((1, 1))) L[int]().append(5) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "Node[Tuple[int, int]]" x = D((1, 1)) # type: D[int] y = D(5) # type: D[int] # E: Argument 1 to "D" has incompatible type "int"; expected "Tuple[int, int]" def f(x: T) -> D[T]: return D((x, x)) reveal_type(f('a')) # N: Revealed type is '__main__.D[builtins.str*]' [builtins fixtures/list.pyi] [out] main:15: error: Argument 1 to "D" has incompatible type "int"; expected "Tuple[T, T]" [case testGenericTypeAliasesSubclassingBad] from typing import TypeVar, Generic, Tuple, Union T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... TupledNode = Node[Tuple[T, T]] UNode = Union[int, Node[T]] class C(TupledNode): ... # Same as TupledNode[Any] class D(TupledNode[T]): ... class E(Generic[T], UNode[T]): ... # E: Invalid base class "UNode" reveal_type(D((1, 1))) # N: Revealed type is '__main__.D[builtins.int*]' [builtins fixtures/list.pyi] [case testGenericTypeAliasesUnion] from typing import TypeVar, Generic, Union, Any T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: self.x = x UNode = Union[int, Node[T]] x = 1 # type: UNode[int] x + 1 # E: Unsupported left operand type for + ("Node[int]") \ # N: Left operand is of type "Union[int, Node[int]]" if not isinstance(x, Node): x + 1 if not isinstance(x, int): x.x = 1 x.x = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") def f(x: T) -> UNode[T]: if int(): return Node(x) else: return 1 reveal_type(f(1)) # N: Revealed type is 'Union[builtins.int, __main__.Node[builtins.int*]]' TNode = Union[T, Node[int]] s = 1 # type: TNode[str] # E: Incompatible types in assignment (expression has type "int", variable has type "Union[str, Node[int]]") if not isinstance(s, str): s.x = 1 z = None # type: TNode # Same as TNode[Any] z.x z.foo() # E: Item "Node[int]" of "Union[Any, Node[int]]" has no attribute "foo" [builtins fixtures/isinstance.pyi] [case testGenericTypeAliasesTuple] from typing import TypeVar, Tuple T = TypeVar('T') SameTP = Tuple[T, T] IntTP = Tuple[int, T] def f1(x: T) -> SameTP[T]: return x, x a, b, c = f1(1) # E: Need more than 2 values to unpack (3 expected) x, y = f1(1) reveal_type(x) # N: Revealed type is 'builtins.int' def f2(x: IntTP[T]) -> IntTP[T]: return x f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, ]" reveal_type(f2((1, 'x'))) # N: Revealed type is 'Tuple[builtins.int, builtins.str*]' [builtins fixtures/for.pyi] [case testGenericTypeAliasesCallable] from typing import TypeVar, Generic, Callable T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... BadC = Callable[T] # E: Please use "Callable[[], ]" or "Callable" C = Callable[..., T] C2 = Callable[[T, T], Node[T]] def make_cb(x: T) -> C[T]: return lambda *args: x reveal_type(make_cb(1)) # N: Revealed type is 'def (*Any, **Any) -> builtins.int*' def use_cb(arg: T, cb: C2[T]) -> Node[T]: return cb(arg, arg) use_cb(1, 1) # E: Argument 2 to "use_cb" has incompatible type "int"; expected "Callable[[int, int], Node[int]]" my_cb = None # type: C2[int] use_cb('x', my_cb) # E: Argument 2 to "use_cb" has incompatible type "Callable[[int, int], Node[int]]"; expected "Callable[[str, str], Node[str]]" reveal_type(use_cb(1, my_cb)) # N: Revealed type is '__main__.Node[builtins.int]' [out] [case testGenericTypeAliasesPEPBasedExample] from typing import TypeVar, List, Tuple T = TypeVar('T', int, bool) Vec = List[Tuple[T, T]] vec = [] # type: Vec[bool] vec.append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "Tuple[bool, bool]" reveal_type(vec[0]) # N: Revealed type is 'Tuple[builtins.bool, builtins.bool]' def fun1(v: Vec[T]) -> T: return v[0][0] def fun2(v: Vec[T], scale: T) -> Vec[T]: return v reveal_type(fun1([(1, 1)])) # N: Revealed type is 'builtins.int*' fun1(1) # E: Argument 1 to "fun1" has incompatible type "int"; expected "List[Tuple[bool, bool]]" fun1([(1, 'x')]) # E: Cannot infer type argument 1 of "fun1" reveal_type(fun2([(1, 1)], 1)) # N: Revealed type is 'builtins.list[Tuple[builtins.int*, builtins.int*]]' fun2([('x', 'x')], 'x') # E: Value of type variable "T" of "fun2" cannot be "str" [builtins fixtures/list.pyi] [case testGenericTypeAliasesImporting] from typing import TypeVar from a import Node, TupledNode T = TypeVar('T') n = None # type: TupledNode[int] n.x = 1 n.y = (1, 1) n.y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "Tuple[int, int]") def f(x: Node[T, T]) -> TupledNode[T]: return Node(x.x, (x.x, x.x)) f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Node[, ]" f(Node(1, 'x')) # E: Cannot infer type argument 1 of "f" reveal_type(Node('x', 'x')) # N: Revealed type is 'a.Node[builtins.str*, builtins.str*]' [file a.py] from typing import TypeVar, Generic, Tuple T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: self.x = x self.y = y TupledNode = Node[T, Tuple[T, T]] [builtins fixtures/list.pyi] [case testGenericTypeAliasesImportingWithoutTypeVar] from typing import Tuple from lib import Transform def int_tf(m: int) -> Transform[int, str]: def transform(i: int, pos: int) -> Tuple[int, str]: pass return transform var: Transform[int, str] reveal_type(var) # N: Revealed type is 'def (builtins.int, builtins.int) -> Tuple[builtins.int, builtins.str]' [file lib.py] from typing import Callable, TypeVar, Tuple T = TypeVar('T') R = TypeVar('R') Transform = Callable[[T, int], Tuple[T, R]] [out] [case testGenericTypeAliasesImportingWithoutTypeVarError] from a import Alias x: Alias[int, str] # E: Bad number of arguments for type alias, expected: 1, given: 2 reveal_type(x) # N: Revealed type is 'builtins.list[builtins.list[Any]]' [file a.py] from typing import TypeVar, List T = TypeVar('T') Alias = List[List[T]] [builtins fixtures/list.pyi] [out] [case testGenericAliasWithTypeVarsFromDifferentModules] from mod import Alias, TypeVar S = TypeVar('S') NewAlias = Alias[int, int, S, S] class C: pass x: NewAlias[str] reveal_type(x) # N: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.int, builtins.str, builtins.str]]' y: Alias[int, str, C, C] reveal_type(y) # N: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str, __main__.C, __main__.C]]' [file mod.py] from typing import TypeVar, List, Tuple import a import b T = TypeVar('T') Alias = List[Tuple[T, a.T, b.T, b.B.T]] # alias_tvars here will be ['T', 'a.T', 'b.T', 'b.B.T'] [file a.py] from typing import TypeVar T = TypeVar('T') [file b.py] from typing import TypeVar T = TypeVar('T') class B: T = TypeVar('T') [builtins fixtures/list.pyi] [out] [case testTypeAliasesResultingInPlainInstance] from typing import Optional, Union O = Optional[int] U = Union[int] x: O y: U reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' reveal_type(y) # N: Revealed type is 'builtins.int' U[int] # E: Type application targets a non-generic function or class O[int] # E: Bad number of arguments for type alias, expected: 0, given: 1 # E: Type application is only supported for generic classes [out] [case testAliasesInClassBodyNormalVsSubscripted] from typing import Union, Type, Iterable class A: pass class B(A): pass class C: a = A # This is a variable b = Union[int, str] # This is an alias c: Type[object] = Iterable[int] # This is however also a variable if int(): a = B if int(): b = int # E: Cannot assign multiple types to name "b" without an explicit "Type[...]" annotation if int(): c = int def f(self, x: a) -> None: pass # E: Variable "__main__.C.a" is not valid as a type def g(self, x: b) -> None: pass def h(self, x: c) -> None: pass # E: Variable "__main__.C.c" is not valid as a type x: b reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' [out] [case testGenericTypeAliasesRuntimeExpressionsInstance] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... IntNode = Node[int, T] IntNode[int](1, 1) IntNode[int](1, 'a') # E: Argument 2 to "Node" has incompatible type "str"; expected "int" SameNode = Node[T, T] # TODO: fix https://github.com/python/mypy/issues/7084. ff = SameNode[T](1, 1) a = SameNode(1, 'x') reveal_type(a) # N: Revealed type is '__main__.Node[Any, Any]' b = SameNode[int](1, 1) reveal_type(b) # N: Revealed type is '__main__.Node[builtins.int*, builtins.int*]' SameNode[int](1, 'x') # E: Argument 2 to "Node" has incompatible type "str"; expected "int" [out] [case testGenericTypeAliasesRuntimeExpressionsOther] from typing import TypeVar, Union, Tuple, Callable, Any T = TypeVar('T') CA = Callable[[T], int] TA = Tuple[T, int] UA = Union[T, int] cs = CA + 1 # E: The type alias to Callable is invalid in runtime context reveal_type(cs) # N: Revealed type is 'Any' ts = TA() # E: The type alias to Tuple is invalid in runtime context reveal_type(ts) # N: Revealed type is 'Any' us = UA.x # E: The type alias to Union is invalid in runtime context reveal_type(us) # N: Revealed type is 'Any' xx = CA[str] + 1 # E: Type application is only supported for generic classes yy = TA[str]() # E: Type application is only supported for generic classes zz = UA[str].x # E: Type application is only supported for generic classes [out] [case testGenericTypeAliasesTypeVarBinding] from typing import TypeVar, Generic, List T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... class B(Generic[T, S]): def __init__(self, x: List[T], y: List[S]) -> None: ... SameA = A[T, T] SameB = B[T, T] class C(Generic[T]): a = None # type: SameA[T] b = SameB[T]([], []) reveal_type(C[int]().a) # N: Revealed type is '__main__.A[builtins.int*, builtins.int*]' reveal_type(C[str]().b) # N: Revealed type is '__main__.B[builtins.str*, builtins.str*]' [builtins fixtures/list.pyi] [case testGenericTypeAliasesTypeVarConstraints] # flags: --show-column-numbers --no-strict-optional from typing import TypeVar, Generic T = TypeVar('T', int, list) S = TypeVar('S', int, list) class A(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... BadA = A[str, T] # One error here SameA = A[T, T] x = None # type: SameA[int] y = None # type: SameA[str] # Two errors here, for both args of A [builtins fixtures/list.pyi] [out] main:9:8: error: Value of type variable "T" of "A" cannot be "str" main:13:1: error: Value of type variable "T" of "A" cannot be "str" main:13:1: error: Value of type variable "S" of "A" cannot be "str" [case testGenericTypeAliasesIgnoredPotentialAlias] class A: ... Bad = A[int] # type: ignore reveal_type(Bad) # N: Revealed type is 'Any' [out] [case testNoSubscriptionOfBuiltinAliases] from typing import List, TypeVar list[int]() # E: "list" is not subscriptable ListAlias = List def fun() -> ListAlias[int]: pass reveal_type(fun()) # N: Revealed type is 'builtins.list[builtins.int]' BuiltinAlias = list BuiltinAlias[int]() # E: "list" is not subscriptable #check that error is reported only once, and type is still stored T = TypeVar('T') BadGenList = list[T] # E: "list" is not subscriptable reveal_type(BadGenList[int]()) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(BadGenList()) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [out] [case testImportedTypeAliasInRuntimeContext] from m import Alias n = Alias[int]([1]) reveal_type(n) # N: Revealed type is 'm.Node[builtins.list*[builtins.int]]' bad = Alias[str]([1]) # E: List item 0 has incompatible type "int"; expected "str" n2 = Alias([1]) # Same as Node[List[Any]] reveal_type(n2) # N: Revealed type is 'm.Node[builtins.list*[Any]]' [file m.py] from typing import TypeVar, Generic, List T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: self.x = x Alias = Node[List[T]] [builtins fixtures/list.pyi] [out] -- Simplified declaration of generics -- ---------------------------------- [case testSimplifiedGenericSimple] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def b(self) -> T: ... class C(Generic[T]): def c(self) -> T: ... class D(B[T], C[S]): ... reveal_type(D[str, int]().b()) # N: Revealed type is 'builtins.str*' reveal_type(D[str, int]().c()) # N: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericCallable] from typing import TypeVar, Generic, Callable T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def b(self) -> T: ... class D(B[Callable[[T], S]]): ... reveal_type(D[str, int]().b()) # N: Revealed type is 'def (builtins.str*) -> builtins.int*' [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericComplex] from typing import TypeVar, Generic, Tuple T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') class A(Generic[T, S]): pass class B(Generic[T, S]): def m(self) -> Tuple[T, S]: pass class C(A[S, B[T, int]], B[U, A[int, T]]): pass c = C[object, int, str]() reveal_type(c.m()) # N: Revealed type is 'Tuple[builtins.str*, __main__.A*[builtins.int, builtins.int*]]' [builtins fixtures/tuple.pyi] [out] [case testSimplifiedGenericOrder] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def b(self) -> T: ... class C(Generic[T]): def c(self) -> T: ... class D(B[T], C[S], Generic[S, T]): ... reveal_type(D[str, int]().b()) # N: Revealed type is 'builtins.int*' reveal_type(D[str, int]().c()) # N: Revealed type is 'builtins.str*' [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericDuplicate] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T, T]): # E: Duplicate type variables in Generic[...] or Protocol[...] pass a = A[int]() [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericNotAll] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T]): pass class B(Generic[T]): pass class C(A[T], B[S], Generic[T]): # E: If Generic[...] or Protocol[...] is present it should list all type variables pass c = C[int, str]() [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericInvalid] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B(A[S]): # E: Name 'S' is not defined pass [builtins fixtures/list.pyi] [out] -- Multiple assignment with lists -- ------------------------------ [case testMultipleAssignmentWithLists] from typing import List class A: pass class B: pass class B2(B): pass a = None # type: A b = None # type: B b2 = None # type: B2 list_a = [a] list_b = [b] list_b2 = [b2] if int(): a, b = list_a # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b, a = list_a # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b2, b2 = list_b # E: Incompatible types in assignment (expression has type "B", variable has type "B2") a, a = list_a b, b2, b = list_b2 [builtins fixtures/for.pyi] [case testMultipleAssignmentWithListsInInitialization] from typing import List class A: pass list_object = [object()] list_a = [A()] a, b = list_object # type: (A, object) # E: Incompatible types in assignment (expression has type "object", variable has type "A") c, d = list_object # type: (object, A) # E: Incompatible types in assignment (expression has type "object", variable has type "A") e, f = list_a # type: (A, object) [builtins fixtures/for.pyi] [case testMultipleAssignmentWithListAndIndexing] from typing import List a = None # type: List[A] b = None # type: List[int] a[1], b[1] = a # E: Incompatible types in assignment (expression has type "A", target has type "int") a[1], a[2] = a class A: pass [file builtins.py] from typing import TypeVar, Generic, Iterable T = TypeVar('T') class object: pass class list(Iterable[T]): def __setitem__(self, x: int, v: T) -> None: pass class int: pass class type: pass class tuple: pass class function: pass class str: pass [case testMultipleAssignmentWithIterable] from typing import Iterable, TypeVar a = None # type: int b = None # type: str T = TypeVar('T') def f(x: T) -> Iterable[T]: pass a, b = f(a) # E: Incompatible types in assignment (expression has type "int", variable has type "str") b, b = f(a) # E: Incompatible types in assignment (expression has type "int", variable has type "str") a, a = f(a) b, b = f(b) [builtins fixtures/for.pyi] -- Error messages -- -------------- [case testErrorWithLongGenericTypeName] from typing import TypeVar, Generic B = TypeVar('B') C = TypeVar('C') D = TypeVar('D') E = TypeVar('E') F = TypeVar('F') G = TypeVar('G') H = TypeVar('H') I = TypeVar('I') J = TypeVar('J') K = TypeVar('K') L = TypeVar('L') M = TypeVar('M') N = TypeVar('N') O = TypeVar('O') P = TypeVar('P') Q = TypeVar('Q') R = TypeVar('R') S = TypeVar('S') T = TypeVar('T') U = TypeVar('U') V = TypeVar('V') W = TypeVar('W') X = TypeVar('X') Y = TypeVar('Y') Z = TypeVar('Z') class OO: pass a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object] f(a) # E: Argument 1 to "f" has incompatible type "A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]"; expected "OO" def f(a: OO) -> None: pass class A(Generic[B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z]): pass [case testErrorWithShorterGenericTypeName] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, B] f(a) # E: Argument 1 to "f" has incompatible type "A[object, B]"; expected "B" def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass [case testErrorWithShorterGenericTypeName2] from typing import Callable, TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, Callable[[], None]] f(a) # E: Argument 1 to "f" has incompatible type "A[object, Callable[[], None]]"; expected "B" def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass -- Overloads + generics -- -------------------- [case testGenericArgumentInOverload] from foo import * [file foo.pyi] from typing import overload, List class A: pass class B: pass a, b = None, None # type: (A, B) @overload def f(a: List[A]) -> A: pass @overload def f(a: B) -> B: pass b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f([b]) # E: List item 0 has incompatible type "B"; expected "A" a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f([a]) b = f(b) [builtins fixtures/list.pyi] [case testGenericFunctionAsOverloadItem] from foo import * [file foo.pyi] from typing import overload, TypeVar, List T = TypeVar('T') class A: pass class B: pass @overload def f(a: B) -> B: pass @overload def f(a: List[T]) -> T: pass a, b = None, None # type: (A, B) if int(): b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f([b]) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = f([a]) b = f([b]) if int(): b = f(b) [builtins fixtures/list.pyi] -- Type variable scoping -- --------------------- [case testLocalTypeVariable] from typing import TypeVar def f() -> None: T = TypeVar('T') def g(x: T) -> T: pass a = g(1) if int(): a = 1 a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [out] [case testClassLevelTypeVariable] from typing import TypeVar class A: T = TypeVar('T') def g(self, x: T) -> T: pass a = A().g(1) if int(): a = 1 if int(): a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testGenericInnerClass] from typing import TypeVar, Generic T = TypeVar('T') class A: class B(Generic[T]): def meth(self) -> T: ... B[int]() reveal_type(B[int]().meth) # N: Revealed type is 'def () -> builtins.int*' A.B[int]() reveal_type(A.B[int]().meth) # N: Revealed type is 'def () -> builtins.int*' [case testGenericClassInnerFunctionTypeVariable] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, a: T) -> None: self.a = a def f(self, n: int) -> None: def g(a: T): self.a = a g(self.a) g(n) # E: Argument 1 to "g" has incompatible type "int"; expected "T" -- This is non-trivial with new analyzer (and also in fine grained incremental): -- We need to store whole tvar_scope, not only active class. [case testFunctionInGenericInnerClassTypeVariable-skip] from typing import TypeVar, Generic T = TypeVar('T') class Outer(Generic[T]): class Inner: x: T # E: Invalid type "__main__.T" def f(self, x: T) -> T: ... # E: Type variable 'T' is bound by an outer class def g(self) -> None: y: T # E: Invalid type "__main__.T" -- Callable subtyping with generic functions -- ----------------------------------------- [case testSubtypingWithGenericFunctions] from typing import TypeVar A = TypeVar('A') B = TypeVar('B') def f1(x: A) -> A: ... def f2(x: A) -> B: ... def f3(x: B) -> B: ... def f4(x: int) -> A: ... y1 = f1 if int(): y1 = f1 if int(): y1 = f2 if int(): y1 = f3 if int(): y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[A], A]") y2 = f2 if int(): y2 = f2 if int(): y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], B]") if int(): y2 = f3 # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[A], B]") if int(): y2 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[A], B]") y3 = f3 if int(): y3 = f3 if int(): y3 = f1 if int(): y3 = f2 if int(): y3 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[B], B]") y4 = f4 if int(): y4 = f4 if int(): y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[int], A]") if int(): y4 = f2 if int(): y4 = f3 # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[int], A]") [case testSubtypingWithGenericInnerFunctions] from typing import TypeVar A = TypeVar('A') B = TypeVar('B') T = TypeVar('T') def outer(t: T) -> None: def f1(x: A) -> A: ... def f2(x: A) -> B: ... def f3(x: T) -> A: ... def f4(x: A) -> T: ... def f5(x: T) -> T: ... y1 = f1 if int(): y1 = f2 y1 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A]", variable has type "Callable[[A], A]") y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A], T]", variable has type "Callable[[A], A]") y1 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[A], A]") y2 = f2 if int(): y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], B]") y3 = f3 if int(): y3 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[T], A]") y3 = f2 y3 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A], T]", variable has type "Callable[[T], A]") y3 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], A]") y4 = f4 if int(): y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], T]") y4 = f2 y4 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A]", variable has type "Callable[[A], T]") y4 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[A], T]") y5 = f5 if int(): y5 = f1 y5 = f2 y5 = f3 y5 = f4 [out] [case testSubtypingWithGenericFunctionUsingTypevarWithValues] from typing import TypeVar, Callable T = TypeVar('T', int, str) def f(x: T) -> T: pass def g1(f: Callable[[str], str]) -> None: pass g1(f) def g2(f: Callable[[int], int]) -> None: pass g2(f) def g3(f: Callable[[object], object]) -> None: pass g3(f) # E: Argument 1 to "g3" has incompatible type "Callable[[T], T]"; \ expected "Callable[[object], object]" [case testSubtypingWithGenericFunctionUsingTypevarWithValues2] from typing import TypeVar, Callable T = TypeVar('T', int, str) def f(x: T) -> T: pass g = f g = f --Operations on type variable types -- --------------------------------- [case testTypeVariableTypeEquality] from typing import TypeVar T = TypeVar('T') def f(a: T, b: T) -> T: a.__ne__(b) if a == b: return a else: return b [builtins fixtures/ops.pyi] [case testTypeVariableTypeIs] from typing import TypeVar T = TypeVar('T') def f(a: T, b: T) -> T: if a is b or a is 1: return a else: return b [builtins fixtures/ops.pyi] [case testTypeVarLessThan] from typing import TypeVar T = TypeVar('T') def f(a: T, b: T) -> T: if a < b: # E: Unsupported left operand type for < ("T") return a else: return b [builtins fixtures/ops.pyi] [case testTypeVarReversibleOperator] from typing import TypeVar class A: def __mul__(cls, other: int) -> str: return "" T = TypeVar("T", bound=A) def f(x: T) -> str: return reveal_type(x * 0) # N: Revealed type is 'builtins.str' [case testTypeVarReversibleOperatorTuple] from typing import TypeVar, Tuple class A(Tuple[int, int]): def __mul__(cls, other: Tuple[int, int]) -> str: return "" T = TypeVar("T", bound=A) def f(x: T) -> str: return reveal_type(x * (1, 2) ) # N: Revealed type is 'builtins.str' [builtins fixtures/tuple.pyi] -- Subtyping generic callables -- --------------------------- [case testSubtypingGenericTypeObject] from typing import Callable, Generic, TypeVar T = TypeVar('T') class C(Generic[T]): def __init__(self) -> None: pass x = C # type: Callable[[], C[int]] y = C # type: Callable[[], int] # E: Incompatible types in assignment (expression has type "Type[C[Any]]", variable has type "Callable[[], int]") -- Special cases -- ------------- [case testIdentityHigherOrderFunction] from typing import Callable, TypeVar A = TypeVar('A') B = TypeVar('B') def square(n: int) -> int: return n def id(f: Callable[[A], B]) -> Callable[[A], B]: return f g = id(square) g(1) g('x') # E: Argument 1 has incompatible type "str"; expected "int" [case testIdentityHigherOrderFunction2] from typing import Callable, TypeVar A = TypeVar('A') def voidify(n: int) -> None: pass def identity(f: Callable[[A], None]) -> Callable[[A], None]: return f identity(voidify)(3) [case testIdentityHigherOrderFunction3] from typing import Callable, TypeVar A = TypeVar('A') B = TypeVar('B') def fn(n: B) -> None: pass def identity(f: A) -> A: return f identity(fn) identity(fn)('x') [case testTypeVariableUnionAndCallableInTypeInference] from typing import Union, Callable, TypeVar T = TypeVar('T') def f(x: T, y: Union[T, Callable[[T], None]]) -> None: pass f('', '') [case testGenericFunctionsWithUnalignedIds] from typing import TypeVar A = TypeVar('A') B = TypeVar('B') def f1(x: int, y: A) -> A: ... def f2(x: int, y: A) -> B: ... def f3(x: A, y: B) -> B: ... g = f1 g = f2 g = f3 [case testTypeVariableWithContainerAndTuple] from typing import TypeVar, Container T = TypeVar('T') def f(x: Container[T]) -> T: ... reveal_type(f((1, 2))) # N: Revealed type is 'builtins.int*' [typing fixtures/typing-full.pyi] [case testClassMethodInGenericClassWithGenericConstructorArg] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, a: T) -> None: pass @classmethod def f(cls) -> None: pass [builtins fixtures/classmethod.pyi] [case testClassMethodInClassWithGenericConstructor] from typing import TypeVar, Generic T = TypeVar('T') class A: def __init__(self, a: T) -> None: pass @classmethod def f(cls) -> None: pass [builtins fixtures/classmethod.pyi] [case testGenericOperatorMethodOverlapping] from typing import TypeVar, Generic, Tuple T = TypeVar('T') T2 = TypeVar('T2') S = TypeVar('S', bound=str) S2 = TypeVar('S2', bound=str) class G(Generic[T]): pass class A: def __or__(self, x: G[T]) -> G[T]: pass def __ior__(self, x: G[T2]) -> G[T2]: pass class B: def __or__(self, x: G[T]) -> G[T]: pass def __ior__(self, x: G[S]) -> G[S]: pass \ # E: Signatures of "__ior__" and "__or__" are incompatible class C: def __or__(self, x: G[S]) -> G[S]: pass def __ior__(self, x: G[S2]) -> G[S2]: pass [case testGenericOperatorMethodOverlapping2] from typing import TypeVar, Generic, Tuple X = TypeVar('X') T = TypeVar('T', int, str) T2 = TypeVar('T2', int, str) S = TypeVar('S', float, str) S2 = TypeVar('S2', float, str) class G(Generic[X]): pass class A: def __or__(self, x: G[T]) -> G[T]: pass def __ior__(self, x: G[T2]) -> G[T2]: pass class B: def __or__(self, x: G[T]) -> G[T]: pass def __ior__(self, x: G[S]) -> G[S]: pass \ # E: Signatures of "__ior__" and "__or__" are incompatible class C: def __or__(self, x: G[S]) -> G[S]: pass def __ior__(self, x: G[S2]) -> G[S2]: pass class D: def __or__(self, x: G[X]) -> G[X]: pass def __ior__(self, x: G[S2]) -> G[S2]: pass \ # E: Signatures of "__ior__" and "__or__" are incompatible [case testConstraintInferenceForAnyAgainstTypeT] from typing import Type, Any, TypeVar T = TypeVar('T') def f(c: Type[T]) -> T: ... x: Any reveal_type(f(x)) # N: Revealed type is 'Any' [case testCallTypeTWithGenericBound] from typing import Generic, TypeVar, Type T = TypeVar('T') S = TypeVar('S', bound='A') class A(Generic[T]): pass def f(cls: Type[S]) -> None: cls() [case testQualifiedTypeVariableName] import b def f(x: b.T) -> b.T: return x reveal_type(f) reveal_type(b.g) [file b.py] from typing import TypeVar T = TypeVar('T') def g(x: T) -> T: return x [out] main:3: note: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' main:4: note: Revealed type is 'def [T] (x: T`-1) -> T`-1' [case testPartiallyQualifiedTypeVariableName] from p import b def f(x: b.T) -> b.T: return x reveal_type(f) reveal_type(b.g) [file p/__init__.py] [file p/b.py] from typing import TypeVar T = TypeVar('T') def g(x: T) -> T: return x [out] main:3: note: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' main:4: note: Revealed type is 'def [T] (x: T`-1) -> T`-1' [case testGenericClassMethodSimple] from typing import Generic, TypeVar T = TypeVar('T') class C(Generic[T]): @classmethod def get(cls) -> T: ... class D(C[str]): ... reveal_type(D.get()) # N: Revealed type is 'builtins.str*' reveal_type(D().get()) # N: Revealed type is 'builtins.str*' [builtins fixtures/classmethod.pyi] [case testGenericClassMethodExpansion] from typing import Generic, TypeVar, Tuple T = TypeVar('T') class C(Generic[T]): @classmethod def get(cls) -> T: ... class D(C[Tuple[T, T]]): ... class E(D[str]): ... reveal_type(E.get()) # N: Revealed type is 'Tuple[builtins.str*, builtins.str*]' reveal_type(E().get()) # N: Revealed type is 'Tuple[builtins.str*, builtins.str*]' [builtins fixtures/classmethod.pyi] [case testGenericClassMethodExpansionReplacingTypeVar] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class C(Generic[T]): @classmethod def get(cls) -> T: ... class D(C[S]): ... class E(D[int]): ... reveal_type(E.get()) # N: Revealed type is 'builtins.int*' reveal_type(E().get()) # N: Revealed type is 'builtins.int*' [builtins fixtures/classmethod.pyi] [case testGenericClassMethodUnboundOnClass] from typing import Generic, TypeVar T = TypeVar('T') class C(Generic[T]): @classmethod def get(cls) -> T: ... @classmethod def make_one(cls, x: T) -> C[T]: ... reveal_type(C.get) # N: Revealed type is 'def [T] () -> T`1' reveal_type(C[int].get) # N: Revealed type is 'def () -> builtins.int*' reveal_type(C.make_one) # N: Revealed type is 'def [T] (x: T`1) -> __main__.C[T`1]' reveal_type(C[int].make_one) # N: Revealed type is 'def (x: builtins.int*) -> __main__.C[builtins.int*]' [builtins fixtures/classmethod.pyi] [case testGenericClassMethodUnboundOnSubClass] from typing import Generic, TypeVar, Tuple T = TypeVar('T') S = TypeVar('S') class C(Generic[T]): @classmethod def get(cls) -> T: ... @classmethod def make_one(cls, x: T) -> C[T]: ... class D(C[Tuple[T, S]]): ... class E(D[S, str]): ... reveal_type(D.make_one) # N: Revealed type is 'def [T, S] (x: Tuple[T`1, S`2]) -> __main__.C[Tuple[T`1, S`2]]' reveal_type(D[int, str].make_one) # N: Revealed type is 'def (x: Tuple[builtins.int*, builtins.str*]) -> __main__.C[Tuple[builtins.int*, builtins.str*]]' reveal_type(E.make_one) # N: Revealed type is 'def [S] (x: Tuple[S`1, builtins.str*]) -> __main__.C[Tuple[S`1, builtins.str*]]' reveal_type(E[int].make_one) # N: Revealed type is 'def (x: Tuple[builtins.int*, builtins.str*]) -> __main__.C[Tuple[builtins.int*, builtins.str*]]' [builtins fixtures/classmethod.pyi] [case testGenericClassClsNonGeneric] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): @classmethod def f(cls, x: T) -> T: return x @classmethod def other(cls) -> None: reveal_type(C) # N: Revealed type is 'def [T] () -> __main__.C[T`1]' reveal_type(C[T]) # N: Revealed type is 'def () -> __main__.C[T`1]' reveal_type(C.f) # N: Revealed type is 'def [T] (x: T`1) -> T`1' reveal_type(C[T].f) # N: Revealed type is 'def (x: T`1) -> T`1' reveal_type(cls.f) # N: Revealed type is 'def (x: T`1) -> T`1' [builtins fixtures/classmethod.pyi] [case testGenericClassUnrelatedVars] from typing import TypeVar, Generic T = TypeVar('T') T2 = TypeVar('T2') class C(Generic[T]): @classmethod def f(cls, x: T) -> T: return x @classmethod def g(cls, x: T2) -> T2: cls.f(x) # E: Argument 1 to "f" of "C" has incompatible type "T2"; expected "T" return x [builtins fixtures/classmethod.pyi] [case testGenericClassInGenericFunction] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def __init__(self, item: T) -> None: ... @classmethod def f(cls, x: T) -> T: return x def foo(x: T, y: int) -> T: C(y) # OK C[T](y) # E: Argument 1 to "C" has incompatible type "int"; expected "T" C[T].f(y) # E: Argument 1 to "f" of "C" has incompatible type "int"; expected "T" C[T].f(x) # OK return x [builtins fixtures/classmethod.pyi] # TODO: enable this when #7935 is fixed. [case testGenericClassInGenericFunctionOverloadedConstructor-skip] from typing import TypeVar, Generic, overload T = TypeVar('T') class C(Generic[T]): @overload def __new__(cls) -> C[None]: ... @overload def __new__(cls, item: T) -> C[T]: ... def __new__(cls, item=None): ... @classmethod def f(cls, x: T) -> T: return x def foo(x: T, y: int) -> T: C.f(y) C(y) # OK C[T](y) # E: Argument 1 to "C" has incompatible type "int"; expected "T" C[T].f(y) # E: Argument 1 to "f" of "C" has incompatible type "int"; expected "T" C[T].f(x) # OK return x [builtins fixtures/classmethod.pyi] [case testGenericClassDirectCall] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def __init__(self, item: T) -> None: ... @classmethod def f(cls) -> None: cls(1) # E: Argument 1 to "C" has incompatible type "int"; expected "T" [builtins fixtures/classmethod.pyi] [case testGenericClassAlternativeConstructorPrecise] from typing import Generic, TypeVar, Type, Tuple T = TypeVar('T') class Base(Generic[T]): Q = TypeVar('Q', bound=Base[T]) def __init__(self, item: T) -> None: ... @classmethod def make_pair(cls: Type[Q], item: T) -> Tuple[Q, Q]: if bool(): return cls(0), cls(0) # E: Argument 1 to "Base" has incompatible type "int"; expected "T" return cls(item), cls(item) [builtins fixtures/classmethod.pyi] [case testGenericClassAlternativeConstructorPreciseOverloaded] from typing import Generic, TypeVar, Type, Tuple, overload, Union T = TypeVar('T') class Base(Generic[T]): Q = TypeVar('Q', bound=Base[T]) def __init__(self, item: T) -> None: ... @overload @classmethod def make_some(cls: Type[Q], item: T) -> Q: ... @overload @classmethod def make_some(cls: Type[Q], item: T, n: int) -> Tuple[Q, ...]: ... @classmethod def make_some(cls: Type[Q], item: T, n: int = 0) -> Union[Q, Tuple[Q, ...]]: if n: return (cls(item),) return cls(item) reveal_type(Base.make_some) # N: Revealed type is 'Overload(def [T] (item: T`1) -> __main__.Base[T`1], def [T] (item: T`1, n: builtins.int) -> builtins.tuple[__main__.Base[T`1]])' reveal_type(Base.make_some(1)) # N: Revealed type is '__main__.Base[builtins.int*]' reveal_type(Base.make_some(1, 1)) # N: Revealed type is 'builtins.tuple[__main__.Base[builtins.int*]]' class Sub(Base[str]): ... Sub.make_some(1) # E: No overload variant of "make_some" of "Base" matches argument type "int" \ # N: Possible overload variant: \ # N: def make_some(cls, item: str) -> Sub \ # N: <1 more non-matching overload not shown> [builtins fixtures/classmethod.pyi] [case testNoGenericAccessOnImplicitAttributes] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def __init__(self, x: T) -> None: self.x = x @classmethod def meth(cls) -> None: cls.x # E: Access to generic instance variables via class is ambiguous [builtins fixtures/classmethod.pyi] [case testGenericClassMethodUnboundOnClassNonMatchingIdNonGeneric] from typing import Generic, TypeVar, Any, Tuple, Type T = TypeVar('T') S = TypeVar('S') Q = TypeVar('Q', bound=A[Any]) class A(Generic[T]): @classmethod def foo(cls: Type[Q]) -> Tuple[T, Q]: ... class B(A[T], Generic[T, S]): def meth(self) -> None: reveal_type(A[T].foo) # N: Revealed type is 'def () -> Tuple[T`1, __main__.A[T`1]]' @classmethod def other(cls) -> None: reveal_type(cls.foo) # N: Revealed type is 'def () -> Tuple[T`1, __main__.B[T`1, S`2]]' reveal_type(B.foo) # N: Revealed type is 'def [T, S] () -> Tuple[T`1, __main__.B[T`1, S`2]]' [builtins fixtures/classmethod.pyi] [case testGenericClassAlternativeConstructorPrecise] from typing import Generic, TypeVar, Type, Tuple, Any T = TypeVar('T') Q = TypeVar('Q') class Base(Generic[T]): def __init__(self, item: T) -> None: ... @classmethod def make_pair(cls: Type[Q], item: T) -> Tuple[Q, Q]: ... class Sub(Base[T]): ... reveal_type(Sub.make_pair('yes')) # N: Revealed type is 'Tuple[__main__.Sub[builtins.str*], __main__.Sub[builtins.str*]]' Sub[int].make_pair('no') # E: Argument 1 to "make_pair" of "Base" has incompatible type "str"; expected "int" [builtins fixtures/classmethod.pyi] [case testGenericClassAttrUnboundOnClass] from typing import Generic, TypeVar T = TypeVar('T') class C(Generic[T]): x: T @classmethod def get(cls) -> T: return cls.x # OK x = C.x # E: Access to generic instance variables via class is ambiguous reveal_type(x) # N: Revealed type is 'Any' xi = C[int].x # E: Access to generic instance variables via class is ambiguous reveal_type(xi) # N: Revealed type is 'builtins.int' [builtins fixtures/classmethod.pyi] [case testGenericClassAttrUnboundOnSubClass] from typing import Generic, TypeVar, Tuple T = TypeVar('T') class C(Generic[T]): x: T class D(C[int]): ... class E(C[int]): x = 42 x = D.x # E: Access to generic instance variables via class is ambiguous reveal_type(x) # N: Revealed type is 'builtins.int' E.x # OK [case testGenericClassMethodOverloaded] from typing import Generic, TypeVar, overload, Tuple T = TypeVar('T') class C(Generic[T]): @overload @classmethod def get(cls) -> T: ... @overload @classmethod def get(cls, n: int) -> Tuple[T, ...]: ... @classmethod def get(cls, n: int = 0): pass class D(C[str]): ... reveal_type(D.get()) # N: Revealed type is 'builtins.str*' reveal_type(D.get(42)) # N: Revealed type is 'builtins.tuple[builtins.str*]' [builtins fixtures/classmethod.pyi] [case testGenericClassMethodAnnotation] from typing import Generic, TypeVar, Type T = TypeVar('T') class Maker(Generic[T]): x: T @classmethod def get(cls) -> T: ... class B(Maker[B]): ... def f(o: Maker[T]) -> T: if bool(): return o.x return o.get() b = f(B()) reveal_type(b) # N: Revealed type is '__main__.B*' def g(t: Type[Maker[T]]) -> T: if bool(): return t.x return t.get() bb = g(B) reveal_type(bb) # N: Revealed type is '__main__.B*' [builtins fixtures/classmethod.pyi] [case testGenericClassMethodAnnotationDecorator] from typing import Generic, Callable, TypeVar, Iterator T = TypeVar('T') class Box(Generic[T]): @classmethod def wrap(cls, generator: Callable[[], T]) -> Box[T]: ... class IteratorBox(Box[Iterator[T]]): ... @IteratorBox.wrap # E: Argument 1 to "wrap" of "Box" has incompatible type "Callable[[], int]"; expected "Callable[[], Iterator[]]" def g() -> int: ... [builtins fixtures/classmethod.pyi] [case testGenericClassMethodInGenericFunction] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class C(Generic[T]): @classmethod def get(cls) -> T: ... def func(x: S) -> S: return C[S].get() [builtins fixtures/classmethod.pyi] [case testMultipleAssignmentFromAnyIterable] from typing import Any class A: def __iter__(self) -> Any: ... x, y = A() reveal_type(x) # N: Revealed type is 'Any' reveal_type(y) # N: Revealed type is 'Any' [case testSubclassingGenericSelfClassMethod] from typing import TypeVar, Type AT = TypeVar('AT', bound='A') class A: @classmethod def from_config(cls: Type[AT]) -> AT: ... class B(A): @classmethod def from_config(cls: Type[B]) -> B: return B() [builtins fixtures/classmethod.pyi] [case testSubclassingGenericSelfClassMethodOptional] # flags: --strict-optional from typing import TypeVar, Type, Optional AT = TypeVar('AT', bound='A') class A: @classmethod def from_config(cls: Type[AT]) -> Optional[AT]: return None class B(A): @classmethod def from_config(cls: Type[B]) -> Optional[B]: return B() [builtins fixtures/classmethod.pyi] [case testSubclassingGenericSelfClassMethodNonAnnotated] from typing import TypeVar, Type AT = TypeVar('AT', bound='A') class A: @classmethod def from_config(cls: Type[AT]) -> AT: ... class B(A): @classmethod def from_config(cls) -> B: return B() [builtins fixtures/classmethod.pyi] [case testAbstractGenericMethodInference] from abc import ABC, abstractmethod from typing import Callable, Generic, TypeVar A = TypeVar('A') B = TypeVar('B') C = TypeVar('C') class TwoTypes(Generic[A, B]): def __call__(self) -> B: pass class MakeTwoAbstract(ABC, Generic[A]): def __init__(self) -> None: pass @abstractmethod def __call__(self, b: B) -> TwoTypes[A, B]: pass class MakeTwoConcrete(Generic[A]): def __call__(self, b: B) -> TwoTypes[A, B]: pass class MakeTwoGenericSubAbstract(Generic[C], MakeTwoAbstract[C]): def __call__(self, b: B) -> TwoTypes[C, B]: pass class MakeTwoAppliedSubAbstract(MakeTwoAbstract[str]): def __call__(self, b: B) -> TwoTypes[str, B]: pass class Test(): def make_two(self, mts: MakeTwoAbstract[A], mte: MakeTwoConcrete[A], mtgsa: MakeTwoGenericSubAbstract[A], mtasa: MakeTwoAppliedSubAbstract) -> None: reveal_type(mts(2)) # N: Revealed type is '__main__.TwoTypes[A`-1, builtins.int*]' reveal_type(mte(2)) # N: Revealed type is '__main__.TwoTypes[A`-1, builtins.int*]' reveal_type(mtgsa(2)) # N: Revealed type is '__main__.TwoTypes[A`-1, builtins.int*]' reveal_type(mtasa(2)) # N: Revealed type is '__main__.TwoTypes[builtins.str, builtins.int*]' reveal_type(MakeTwoConcrete[int]()('foo')) # N: Revealed type is '__main__.TwoTypes[builtins.int, builtins.str*]' reveal_type(MakeTwoConcrete[str]()(2)) # N: Revealed type is '__main__.TwoTypes[builtins.str, builtins.int*]' reveal_type(MakeTwoAppliedSubAbstract()('foo')) # N: Revealed type is '__main__.TwoTypes[builtins.str, builtins.str*]' reveal_type(MakeTwoAppliedSubAbstract()(2)) # N: Revealed type is '__main__.TwoTypes[builtins.str, builtins.int*]' reveal_type(MakeTwoGenericSubAbstract[str]()('foo')) # N: Revealed type is '__main__.TwoTypes[builtins.str, builtins.str*]' reveal_type(MakeTwoGenericSubAbstract[str]()(2)) # N: Revealed type is '__main__.TwoTypes[builtins.str, builtins.int*]' [case testGenericClassPropertyBound] from typing import Generic, TypeVar, Callable, Type, List, Dict T = TypeVar('T') U = TypeVar('U') def classproperty(f: Callable[..., U]) -> U: ... class C(Generic[T]): @classproperty def test(self) -> T: ... class D(C[str]): ... class E1(C[T], Generic[T, U]): ... class E2(C[U], Generic[T, U]): ... class G(C[List[T]]): ... x: C[int] y: Type[C[int]] reveal_type(x.test) # N: Revealed type is 'builtins.int*' reveal_type(y.test) # N: Revealed type is 'builtins.int*' xd: D yd: Type[D] reveal_type(xd.test) # N: Revealed type is 'builtins.str*' reveal_type(yd.test) # N: Revealed type is 'builtins.str*' ye1: Type[E1[int, str]] ye2: Type[E2[int, str]] reveal_type(ye1.test) # N: Revealed type is 'builtins.int*' reveal_type(ye2.test) # N: Revealed type is 'builtins.str*' xg: G[int] yg: Type[G[int]] reveal_type(xg.test) # N: Revealed type is 'builtins.list*[builtins.int*]' reveal_type(yg.test) # N: Revealed type is 'builtins.list*[builtins.int*]' class Sup: attr: int S = TypeVar('S', bound=Sup) def func(tp: Type[C[S]]) -> S: reveal_type(tp.test.attr) # N: Revealed type is 'builtins.int' reg: Dict[S, G[S]] reveal_type(reg[tp.test]) # N: Revealed type is '__main__.G*[S`-1]' reveal_type(reg[tp.test].test) # N: Revealed type is 'builtins.list*[S`-1]' if bool(): return tp.test else: return reg[tp.test].test[0] [builtins fixtures/dict.pyi] mypy-0.761/test-data/unit/check-ignore.test0000644€tŠÔÚ€2›s®0000001220013576752246025017 0ustar jukkaDROPBOX\Domain Users00000000000000[case testIgnoreTypeError] x = 1 x() # type: ignore x() # E: "int" not callable [case testIgnoreUndefinedName] x = 1 y # type: ignore z # E: Name 'z' is not defined [case testIgnoreImportError] import xyz_m # type: ignore xyz_m.foo 1() # E: "int" not callable [case testIgnoreImportFromError] from xyz_m import a, b # type: ignore a.foo b() 1() # E: "int" not callable [case testIgnoreImportFromErrorMultiline] from xyz_m import ( # type: ignore a, b ) a.foo b() 1() # E: "int" not callable [case testIgnoreImportAllError] from xyz_m import * # type: ignore x # E: Name 'x' is not defined 1() # E: "int" not callable [case testIgnoreImportBadModule] import m # type: ignore from m import a # type: ignore [file m.py] + [out] tmp/m.py:1: error: invalid syntax [case testIgnoreAppliesOnlyToMissing] import a # type: ignore import b # type: ignore reveal_type(a.foo) # N: Revealed type is 'Any' reveal_type(b.foo) # N: Revealed type is 'builtins.int' a.bar() b.bar() # E: Module has no attribute "bar" [file b.py] foo = 3 [builtins fixtures/module_all.pyi] [out] [case testIgnoreImportStarFromBadModule] from m import * # type: ignore [file m.py] + [out] tmp/m.py:1: error: invalid syntax [case testIgnoreAssignmentTypeError] x = 1 if int(): x = '' # type: ignore if int(): x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testIgnoreInvalidOverride] class A: def f(self) -> int: pass class B(A): def f(self) -> str: pass # type: ignore [case testIgnoreMissingModuleAttribute] import m m.x = object # type: ignore m.f() # type: ignore m.y # E: Module has no attribute "y" [file m.py] [builtins fixtures/module.pyi] [case testIgnoreTypeInferenceError] x = [] # type: ignore y = x x.append(1) [builtins fixtures/list.pyi] [case testIgnoreTypeInferenceError2] def f() -> None: pass x = f() # type: ignore y = x x = 1 [builtins fixtures/list.pyi] [case testIgnoreTypeInferenceErrorAndMultipleAssignment] x, y = [], [] # type: ignore z = x z = y [builtins fixtures/list.pyi] [case testIgnoreSomeStarImportErrors] from m1 import * from m2 import * # type: ignore # We should still import things that don't conflict. y() # E: "str" not callable z() # E: "int" not callable x() # E: "int" not callable [file m1.py] x = 1 y = '' [file m2.py] x = '' z = 1 [case testIgnoredModuleDefinesBaseClass1] from m import B # type: ignore class C(B): def f(self) -> None: self.f(1) # E: Too many arguments for "f" of "C" self.g(1) [out] [case testIgnoredModuleDefinesBaseClass2] import m # type: ignore class C(m.B): def f(self) -> None: ... c = C() c.f(1) # E: Too many arguments for "f" of "C" c.g(1) c.x = 1 [out] [case testIgnoredModuleDefinesBaseClassAndClassAttribute] import m # type: ignore class C(m.B): @staticmethod def f() -> None: pass C.f(1) # E: Too many arguments for "f" of "C" C.g(1) C.x = 1 [builtins fixtures/staticmethod.pyi] [out] [case testIgnoredModuleDefinesBaseClassWithInheritance1] from m import B # type: ignore class C: pass class D(C, B): def f(self) -> None: self.f(1) # E: Too many arguments for "f" of "D" self.g(1) [out] [case testIgnoredModuleDefinesBaseClassWithInheritance2] from m import B # type: ignore class C(B): pass class D(C): def f(self) -> None: self.f(1) # E: Too many arguments for "f" of "D" self.g(1) [out] [case testIgnoreWithFollowingIndentedComment] if 1: # type: ignore # blah pass [out] [case testIgnoreTooManyTypeArguments] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class Base(Generic[T, U]): pass class PartialBase(Base[T, int], Generic[T]): pass class Child(PartialBase[str, int]): # type: ignore pass def foo(x: Base[str, int]) -> None: pass foo(Child()) def bar(x: Base[str, str]) -> None: pass bar(Child()) [out] main:19: error: Argument 1 to "bar" has incompatible type "Child"; expected "Base[str, str]" [case testTypeIgnoreLineNumberWithinFile] import m pass # type: ignore m.f(kw=1) [file m.py] pass def f() -> None: pass [out] main:3: error: Unexpected keyword argument "kw" for "f" tmp/m.py:2: note: "f" defined here [case testIgnoreUnexpectedKeywordArgument] import m m.f(kw=1) # type: ignore [file m.py] def f() -> None: pass [out] [case testCannotIgnoreBlockingError] yield # type: ignore # E: 'yield' outside function [case testIgnoreWholeModule1] # flags: --warn-unused-ignores # type: ignore IGNORE # type: ignore # E: unused 'type: ignore' comment [case testIgnoreWholeModule2] # type: ignore if True: IGNORE [case testIgnoreWholeModule3] # type: ignore @d class C: ... IGNORE [case testIgnoreWholeModule4] # type: ignore @d def f(): ... IGNORE [case testIgnoreWholeModule5] # type: ignore import MISSING [case testIgnoreWholeModulePy27] # flags: --python-version 2.7 # type: ignore IGNORE [case testDontIgnoreWholeModule1] if True: # type: ignore ERROR # E: Name 'ERROR' is not defined ERROR # E: Name 'ERROR' is not defined [case testDontIgnoreWholeModule2] @d # type: ignore class C: ... ERROR # E: Name 'ERROR' is not defined [case testDontIgnoreWholeModule3] @d # type: ignore def f(): ... ERROR # E: Name 'ERROR' is not defined mypy-0.761/test-data/unit/check-incomplete-fixture.test0000644€tŠÔÚ€2›s®0000000630013576752246027363 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for reporting errors when a test case uses a fixture with -- missing definitions. At least in the most common cases this should not -- result in an uncaught exception. These tests make sure that this behavior -- does not regress. -- -- NOTE: These tests do NOT test behavior of mypy outside tests. [case testVariableUndefinedUsingDefaultFixture] import m # This used to cause a crash since types.ModuleType is not available # by default. We fall back to 'object' now. m.x # E: "object" has no attribute "x" [file m.py] [case testListMissingFromStubs] from typing import List def f(x: List[int]) -> None: pass [out] main:1: error: Module 'typing' has no attribute 'List' main:1: note: Maybe your test fixture does not define "builtins.list"? main:1: note: Consider adding [builtins fixtures/list.pyi] to your test description [case testDictMissingFromStubs] from typing import Dict def f(x: Dict[int]) -> None: pass [out] main:1: error: Module 'typing' has no attribute 'Dict' main:1: note: Maybe your test fixture does not define "builtins.dict"? main:1: note: Consider adding [builtins fixtures/dict.pyi] to your test description [case testSetMissingFromStubs] from typing import Set def f(x: Set[int]) -> None: pass [out] main:1: error: Module 'typing' has no attribute 'Set' main:1: note: Maybe your test fixture does not define "builtins.set"? main:1: note: Consider adding [builtins fixtures/set.pyi] to your test description [case testBaseExceptionMissingFromStubs] e: BaseException [out] main:1: error: Name 'BaseException' is not defined main:1: note: Maybe your test fixture does not define "builtins.BaseException"? main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description [case testExceptionMissingFromStubs] e: Exception [out] main:1: error: Name 'Exception' is not defined main:1: note: Maybe your test fixture does not define "builtins.Exception"? main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description [case testIsinstanceMissingFromStubs] if isinstance(1, int): pass [out] main:1: error: Name 'isinstance' is not defined main:1: note: Maybe your test fixture does not define "builtins.isinstance"? main:1: note: Consider adding [builtins fixtures/isinstancelist.pyi] to your test description [case testInvalidTupleDefinitionFromStubs] from typing import Tuple x: Tuple[int, ...] x[0] for y in x: pass [out] -- These errors are pretty bad, but keeping this test anyway to -- avoid things getting worse. main:3: error: Value of type "Tuple[int, ...]" is not indexable main:4: error: "Tuple[int, ...]" has no attribute "__iter__" (not iterable) [case testClassmethodMissingFromStubs] class A: @classmethod def f(cls): pass [out] main:2: error: Name 'classmethod' is not defined main:2: note: Maybe your test fixture does not define "builtins.classmethod"? main:2: note: Consider adding [builtins fixtures/classmethod.pyi] to your test description [case testPropertyMissingFromStubs] class A: @property def f(self): pass [out] main:2: error: Name 'property' is not defined main:2: note: Maybe your test fixture does not define "builtins.property"? main:2: note: Consider adding [builtins fixtures/property.pyi] to your test description mypy-0.761/test-data/unit/check-incremental.test0000644€tŠÔÚ€2›s®0000030506613576752246026054 0ustar jukkaDROPBOX\Domain Users00000000000000-- Checks for incremental mode (see testcheck.py). -- Each test is run at least twice, once with a cold cache, once with a warm cache. -- Before the tests are run again, in step N any *.py.N files are copied to -- *.py. There are at least two runs; more as long as there are *.py.N files. -- -- You can add an empty section like `[delete mod.py.2]` to delete `mod.py` -- before the second run. -- -- Errors expected in the first run should be in the `[out1]` section, and -- errors expected in the second run should be in the `[out2]` section, and so on. -- If a section is omitted, it is expected there are no errors on that run. -- The number of runs is determined by the highest N in all [outN] sections, but -- there are always at least two runs. (Note that [out] is equivalent to [out1].) -- -- The list of modules to be checked can be specified using -- # cmd: mypy -m mod1 mod2 mod3 -- To check a different list on the second run, use -- # cmd2: mypy -m mod1 mod3 -- (and cmd3 for the third run, and so on). -- -- Extra command line flags may be specified using -- # flags: --some-flag -- If the second run requires different flags, those can be specified using -- # flags2: --another-flag -- (and flags3 for the third run, and so on). -- -- Incremental tests involving plugins that get updated are also supported. -- All plugin files that are updated *must* end in '_plugin', so they will -- be unloaded from 'sys.modules' between incremental steps. -- -- Any files that we expect to be rechecked should be annotated in the [rechecked] -- annotation, and any files expect to be stale (aka have a modified interface) -- should be annotated in the [stale] annotation. Note that a file that ends up -- producing an error has its caches deleted and is marked stale automatically. -- Such files do not need to be included in [stale ...] list. -- -- The test suite will automatically assume that __main__ is stale and rechecked in -- all cases so we can avoid constantly having to annotate it. The list of -- rechecked/stale files can be in any arbitrary order, or can be left empty -- if no files should be rechecked/stale. -- -- There are additional incremental mode test cases in check-serialize.test. [case testIncrementalEmpty] [rechecked] [stale] [case testIncrementalBasics] import m [file m.py] def foo(): pass [file m.py.2] def foo() -> None: pass [rechecked m] [stale m] [case testIncrementalError] import m [file m.py] def foo() -> None: pass [file m.py.2] def foo() -> None: bar() [rechecked m] [stale] [out2] tmp/m.py:2: error: Name 'bar' is not defined [case testIncrementalSimpleImportSequence] import mod1 mod1.func1() [file mod1.py] import mod2 def func1() -> None: mod2.func2() [file mod2.py] import mod3 def func2() -> None: mod3.func3() [file mod3.py] def func3() -> None: pass [rechecked] [stale] [case testIncrementalInternalChangeOnly] import mod1 mod1.func1() [file mod1.py] import mod2 def func1() -> None: mod2.func2() [file mod2.py] import mod3 def func2() -> None: mod3.func3() [file mod3.py] def func3() -> None: pass [file mod3.py.2] def func3() -> None: 3 + 2 [rechecked mod3] [stale] [case testIncrementalImportGone] import mod1 [file mod1.py] from mod2 import A def func1() -> A: pass [file mod2.py] class A: pass [file mod1.py.2] def func1() -> A: pass [rechecked mod1] [stale] [out2] tmp/mod1.py:1: error: Name 'A' is not defined [case testIncrementalCallable] import mod1 [file mod1.py] from typing import Callable from mypy_extensions import Arg def func1() -> Callable[[Arg(int, 'x')], int]: pass [file mod1.py.2] from typing import Callable from mypy_extensions import Arg def func1() -> Callable[[Arg(int, 'x')], int]: ... [rechecked mod1] [stale] [builtins fixtures/dict.pyi] [case testIncrementalSameNameChange] import mod1 [file mod1.py] from mod2 import A def func1() -> A: pass [file mod2.py] class A: pass [file mod2.py.2] class Parent: pass class A(Parent): pass [rechecked mod1, mod2] [stale mod2] [case testIncrementalPartialInterfaceChange] import mod1 mod1.func1() [file mod1.py] import mod2 def func1() -> None: mod2.func2() [file mod2.py] import mod3 def func2() -> None: mod3.func3() [file mod3.py] def func3() -> None: pass [file mod3.py.2] def func3() -> int: return 2 [rechecked mod2, mod3] [stale mod3] [case testIncrementalInternalFunctionDefinitionChange] import mod1 [file mod1.py] import mod2 def accepts_int(a: int) -> int: return a accepts_int(mod2.foo()) [file mod2.py] def foo() -> int: def inner() -> int: return 42 return inner() [file mod2.py.2] def foo() -> int: def inner2() -> str: return "foo" return inner2() [rechecked mod1, mod2] [stale] [out2] tmp/mod2.py:4: error: Incompatible return value type (got "str", expected "int") [case testIncrementalInternalScramble] import mod1 [file mod1.py] import mod2 mod2.foo() [file mod2.py] def baz() -> int: return 3 def bar() -> int: return baz() def foo() -> int: return bar() [file mod2.py.2] def foo() -> int: return baz() def bar() -> int: return bar() def baz() -> int: return 42 [rechecked mod2] [stale] [case testIncrementalMethodInterfaceChange] import mod1 [file mod1.py] import mod2 [file mod2.py] class Foo: def bar(self, a: str) -> str: return "a" [file mod2.py.2] class Foo: def bar(self, a: float) -> str: return "a" [rechecked mod1, mod2] [stale mod2] [case testIncrementalBaseClassChange] import mod1 [file mod1.py] from mod2 import Child Child().good_method() [file mod2.py] class Good: def good_method(self) -> int: return 1 class Bad: pass class Child(Good): pass [file mod2.py.2] class Good: def good_method(self) -> int: return 1 class Bad: pass class Child(Bad): pass [rechecked mod1, mod2] [stale mod2] [out2] tmp/mod1.py:2: error: "Child" has no attribute "good_method" [case testIncrementalCascadingChange] import mod1 [file mod1.py] from mod2 import A def accepts_int(a: int) -> None: pass accepts_int(A) [file mod2.py] from mod3 import B A = B [file mod3.py] from mod4 import C B = C [file mod4.py] C = 3 [file mod4.py.2] C = "A" [rechecked mod1, mod2, mod3, mod4] [stale mod2, mod3, mod4] [out2] tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalBrokenCascade] import mod1 [file mod1.py] import mod2 def accept_int(a: int) -> int: return a accept_int(mod2.mod3.mod4.const) [file mod2.py] import mod3 [file mod3.py] import mod4 [file mod4.py] const = 3 [file mod3.py.2] # Import to mod4 is gone! [rechecked mod1, mod2, mod3] [stale mod3] [builtins fixtures/module.pyi] [out2] tmp/mod1.py:3: error: Module has no attribute "mod4" [case testIncrementalLongBrokenCascade] import mod1 [file mod1.py] import mod2 def accept_int(a: int) -> int: return a accept_int(mod2.mod3.mod4.mod5.mod6.mod7.const) [file mod2.py] import mod3 [file mod3.py] import mod4 [file mod4.py] import mod5 [file mod5.py] import mod6 [file mod6.py] import mod7 [file mod7.py] const = 3 [file mod6.py.2] # Import to mod7 is gone! [rechecked mod1, mod5, mod6] [stale mod6] [builtins fixtures/module.pyi] [out2] tmp/mod1.py:3: error: Module has no attribute "mod7" [case testIncrementalNestedBrokenCascade] import mod1 [file mod1.py] import mod2 def accept_int(a: int) -> int: return a accept_int(mod2.mod3.mod4.const) [file mod2/__init__.py] import mod2.mod3 as mod3 [file mod2/mod3/__init__.py] import mod2.mod3.mod4 as mod4 [file mod2/mod3/__init__.py.2] # Import is gone! [file mod2/mod3/mod4.py] const = 3 [rechecked mod1, mod2, mod2.mod3] [stale mod2.mod3] [builtins fixtures/module.pyi] [out2] tmp/mod1.py:3: error: Module has no attribute "mod4" [case testIncrementalNestedBrokenCascadeWithType1] import mod1, mod2.mod3.mod5 [file mod1.py] import mod2 def accept_int(x: int) -> None: pass def produce() -> mod2.CustomType: return mod2.CustomType() a = produce() accept_int(a.foo()) [file mod2/__init__.py] from mod2.mod3 import CustomType [file mod2/mod3/__init__.py] from mod2.mod3.mod4 import CustomType [file mod2/mod3/__init__.py.2] # Import a different class that also happens to be called 'CustomType' from mod2.mod3.mod5 import CustomType def produce() -> CustomType: return CustomType() [file mod2/mod3/mod4.py] class CustomType: def foo(self) -> int: return 1 [file mod2/mod3/mod5.py] class CustomType: def foo(self) -> str: return "a" [rechecked mod1, mod2, mod2.mod3] [stale mod2, mod2.mod3] [builtins fixtures/module.pyi] [out1] [out2] tmp/mod1.py:6: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalNestedBrokenCascadeWithType2] import mod1, mod2.mod3.mod5 [file mod1.py] from mod2 import produce def accept_int(x: int) -> None: pass a = produce() accept_int(a.foo()) [file mod2/__init__.py] from mod2.mod3 import produce [file mod2/mod3/__init__.py] from mod2.mod3.mod4 import CustomType def produce() -> CustomType: return CustomType() [file mod2/mod3/__init__.py.2] # Import a different class that also happens to be called 'CustomType' from mod2.mod3.mod5 import CustomType def produce() -> CustomType: return CustomType() [file mod2/mod3/mod4.py] class CustomType: def foo(self) -> int: return 1 [file mod2/mod3/mod5.py] class CustomType: def foo(self) -> str: return "a" [rechecked mod1, mod2, mod2.mod3] [stale mod2.mod3] [builtins fixtures/module.pyi] [out1] [out2] tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalRemoteChange] import mod1 [file mod1.py] import mod2 def accepts_int(a: int) -> None: pass accepts_int(mod2.mod3.mod4.const) [file mod2.py] import mod3 [file mod3.py] import mod4 [file mod4.py] const = 3 [file mod4.py.2] const = "foo" [rechecked mod1, mod3, mod4] [stale mod4] [out2] tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalBadChange] import mod1 [file mod1.py] from mod2 import func2 def func1() -> int: return func2() [file mod2.py] def func2() -> int: return 1 [file mod2.py.2] def func2() -> str: return "foo" [rechecked mod1, mod2] [stale mod2] [out2] tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int") [case testIncrementalBadChangeWithSave] import mod0 [file mod0.py] import mod1 A = mod1.func2() [file mod1.py] from mod2 import func2 def func1() -> int: return func2() [file mod2.py] def func2() -> int: return 1 [file mod2.py.2] def func2() -> str: return "foo" [rechecked mod0, mod1, mod2] [stale mod2] [out2] tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int") [case testIncrementalOkChangeWithSave] import mod0 [file mod0.py] import mod1 A = mod1.func2() [file mod1.py] from mod2 import func2 def func1() -> int: func2() return 1 [file mod2.py] def func2() -> int: return 1 [file mod2.py.2] def func2() -> str: return "foo" [rechecked mod0, mod1, mod2] [stale mod0, mod2] [out2] [case testIncrementalWithComplexDictExpression] import mod1 [file mod1.py] import mod1_private [file mod1_private.py] my_dict = { 'a': [1, 2, 3], 'b': [4, 5, 6] } [file mod1_private.py.2] my_dict = { 'a': [1, 2, 3], 'b': [4, 5, 'a'] } [rechecked mod1, mod1_private] [stale mod1_private] [builtins fixtures/dict.pyi] [case testIncrementalWithComplexConstantExpressionNoAnnotation] import mod1 [file mod1.py] import mod1_private [file mod1_private.py] def foobar() -> int: return 1 def baz() -> int: return 2 const = 1 + foobar() [file mod1_private.py.2] def foobar() -> int: return 1 def baz() -> int: return 2 const = 1 + baz() [rechecked mod1_private] [stale] [case testIncrementalWithComplexConstantExpressionWithAnnotation] import mod1 [file mod1.py] import mod1_private [file mod1_private.py] def foobar() -> int: return 1 def baz() -> int: return 2 const = 1 + foobar() # type: int [file mod1_private.py.2] def foobar() -> int: return 1 def baz() -> int: return 2 const = 1 + baz() # type: int [rechecked mod1_private] [stale] [case testIncrementalSmall] import mod1 [file mod1.py] import mod1_private def accepts_int(a: int) -> None: pass accepts_int(mod1_private.some_func(12)) [file mod1_private.py] def some_func(a: int) -> int: return 1 [file mod1_private.py.2] def some_func(a: int) -> str: return "a" [rechecked mod1, mod1_private] [stale mod1_private] [builtins fixtures/ops.pyi] [out2] tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalWithDecorators] import mod1 [file mod1.py] import mod1_private def accepts_int(a: int) -> None: pass accepts_int(mod1_private.some_func(12)) [file mod1_private.py] from typing import Callable def multiply(f: Callable[[int], int]) -> Callable[[int], int]: return lambda a: f(a) * 10 def stringify(f: Callable[[int], int]) -> Callable[[int], str]: return lambda a: str(f(a)) @multiply def some_func(a: int) -> int: return a + 2 [file mod1_private.py.2] from typing import Callable def multiply(f: Callable[[int], int]) -> Callable[[int], int]: return lambda a: f(a) * 10 def stringify(f: Callable[[int], int]) -> Callable[[int], str]: return lambda a: str(f(a)) @stringify def some_func(a: int) -> int: return a + 2 [rechecked mod1, mod1_private] [stale mod1_private] [builtins fixtures/ops.pyi] [out2] tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalChangingClassAttributes] import mod1 [file mod1.py] import mod2 mod2.Foo.A [file mod2.py] class Foo: A = 3 [file mod2.py.2] class Foo: A = "hello" [rechecked mod1, mod2] [stale mod2] [case testIncrementalChangingFields] import mod1 [file mod1.py] import mod2 f = mod2.Foo() f.A [file mod2.py] class Foo: def __init__(self) -> None: self.A = 3 [file mod2.py.2] class Foo: def __init__(self) -> None: self.A = "hello" [rechecked mod1, mod2] [stale mod2] [out2] [case testIncrementalChangingFieldsWithAssignment] import mod1 [file mod1.py] import mod2 f = mod2.Foo() B = f.A [file mod2.py] class Foo: def __init__(self) -> None: self.A = 3 [file mod2.py.2] class Foo: def __init__(self) -> None: self.A = "hello" [rechecked mod1, mod2] [stale mod1, mod2] [case testIncrementalCheckingChangingFields] import mod1 [file mod1.py] import mod2 def accept_int(a: int) -> int: return a f = mod2.Foo() accept_int(f.A) [file mod2.py] class Foo: def __init__(self) -> None: self.A = 3 [file mod2.py.2] class Foo: def __init__(self) -> None: self.A = "hello" [rechecked mod1, mod2] [stale mod2] [out2] tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalNestedClassDefinition] import mod1 [file mod1.py] import mod2 b = mod2.Foo.Bar() b.attr [file mod2.py] class Foo: class Bar: attr = 3 [file mod2.py.2] class Foo: class Bar: attr = "foo" [rechecked mod1, mod2] [stale mod2] [case testIncrementalSimpleBranchingModules] import mod1 import mod2 [file mod1.py] def func() -> None: pass [file mod2.py] def func() -> None: pass [file mod1.py.2] def func() -> int: return 1 [rechecked mod1] [stale mod1] [case testIncrementalSubmoduleImport] from parent.childA import Foo def func1() -> Foo: return Foo() [file parent/__init__.py] from parent.childA import Foo from parent.childB import Bar __all__ = ['Foo', 'Bar'] [file parent/childA.py] import parent class Foo: def test(self) -> int: return parent.Bar().test() [file parent/childB.py] class Bar: def test(self) -> int: return 3 [builtins fixtures/module_all.pyi] [rechecked] [stale] [case testIncrementalSubmoduleWithAttr] import mod.child x = mod.child.Foo() x.bar() [file mod/__init__.py] [file mod/child.py] class Foo: def bar(self) -> None: pass [builtins fixtures/module.pyi] [rechecked] [stale] [case testIncrementalNestedSubmoduleImportFromWithAttr] from mod1.mod2 import mod3 def accept_int(a: int) -> None: pass accept_int(mod3.val3) [file mod1/__init__.py] val1 = 1 [file mod1/mod2/__init__.py] val2 = 1 [file mod1/mod2/mod3.py] val3 = 1 [builtins fixtures/module.pyi] [rechecked] [stale] [case testIncrementalNestedSubmoduleWithAttr] import mod1.mod2.mod3 def accept_int(a: int) -> None: pass accept_int(mod1.mod2.mod3.val3) accept_int(mod1.mod2.val2) accept_int(mod1.val1) [file mod1/__init__.py] val1 = 1 [file mod1/mod2/__init__.py] val2 = 1 [file mod1/mod2/mod3.py] val3 = 1 [builtins fixtures/module.pyi] [rechecked] [stale] [case testIncrementalSubmoduleParentWithImportFrom] import parent [file parent/__init__.py] from parent import a [file parent/a.py] val = 3 [builtins fixtures/args.pyi] [stale] [case testIncrementalSubmoduleParentBackreference] import parent [file parent/__init__.py] from parent import a [file parent/a.py] import parent.b [file parent/b.py] [builtins fixtures/args.pyi] [stale] [case testIncrementalSubmoduleParentBackreferenceComplex] import parent [file parent/__init__.py] import parent.a [file parent/a.py] import parent.b import parent.c [file parent/b.py] import parent.a [file parent/c.py] import parent.a [builtins fixtures/args.pyi] [stale] [case testIncrementalReferenceNewFileWithImportFrom] from parent import a [file parent/__init__.py] [file parent/a.py] [file parent/a.py.2] from parent import b reveal_type(b.x) [file parent/b.py.2] x = 10 [stale parent.b] [rechecked parent.a, parent.b] [out2] tmp/parent/a.py:2: note: Revealed type is 'builtins.int' [case testIncrementalReferenceExistingFileWithImportFrom] from parent import a, b [file parent/__init__.py] [file parent/a.py] [file parent/b.py] [file parent/a.py.2] from parent import b [stale parent.a] [case testIncrementalWithTypeIgnoreOnDirectImport] import a, b [file a.py] import b # type: ignore [file b.py] import c [file c.py] [stale] [case testIncrementalWithTypeIgnoreOnImportFrom] import a, b [file a.py] from b import something # type: ignore [file b.py] import c something = 3 [file c.py] [stale] [case testIncrementalWithPartialTypeIgnore] import a # type: ignore import a.b [file a/__init__.py] [file a/b.py] [stale] [case testIncrementalAnyIsDifferentFromIgnore] import b [file b.py] from typing import Any import a.b [file b.py.2] from typing import Any a = 3 # type: Any import a.b [file a/__init__.py] [file a/b.py] [rechecked b] [stale] [out2] tmp/b.py:4: error: Name 'a' already defined on line 3 [case testIncrementalSilentImportsAndImportsInClass] # flags: --ignore-missing-imports class MyObject(object): from bar import FooBar [stale] [case testIncrementalSameFileSize] import m [file m.py] def foo(a: int) -> None: pass def bar(a: str) -> None: pass foo(3) [file m.py.2] def foo(a: int) -> None: pass def bar(a: str) -> None: pass bar(3) [rechecked m] [stale] [out2] tmp/m.py:4: error: Argument 1 to "bar" has incompatible type "int"; expected "str" [case testIncrementalUnsilencingModule] # cmd: mypy -m main package.subpackage.mod2 # cmd2: mypy -m main package.subpackage.mod1 # flags: --follow-imports=skip [file main.py] from package.subpackage.mod1 import Class def handle(c: Class) -> None: c.some_attribute [file package/__init__.py] # empty [file package/subpackage/__init__.py] # empty [file package/subpackage/mod1.py] import collections # Any previously unloaded package works here class Class: pass [file package/subpackage/mod2.py] # empty [builtins fixtures/args.pyi] [rechecked collections, main, package.subpackage.mod1] [stale collections, package.subpackage.mod1] [out2] tmp/main.py:4: error: "Class" has no attribute "some_attribute" [case testIncrementalWithIgnores] import foo # type: ignore [builtins fixtures/module.pyi] [stale] [case testIncrementalWithSilentImportsAndIgnore] # cmd: mypy -m main b # cmd2: mypy -m main c c.submodule # flags: --follow-imports=skip [file main.py] import a # type: ignore import b import c a.A().foo() b.B().foo() c.C().foo() [file b.py] class B: def foo(self) -> None: pass [file b.py.2] [file c/__init__.py] class C: pass [file c/submodule.py] val = 3 # type: int if int(): val = "foo" [builtins fixtures/module_all.pyi] [rechecked main, c, c.submodule] [stale c] [out2] tmp/c/submodule.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/main.py:7: error: "C" has no attribute "foo" [case testIncrementalRemoteError] import m m.C().foo().bar() [file m.py] import n class C: def foo(self) -> n.A: pass [file n.py] class A: def bar(self): pass [file n.py.2] class A: pass [rechecked m, n] [stale n] [out2] main:2: error: "A" has no attribute "bar" [case testIncrementalRemoteErrorFixed] import m m.C().foo().bar() [file m.py] import n class C: def foo(self) -> n.A: pass [file n.py] class A: pass [file n.py.2] class A: def bar(self): pass [rechecked m, n] [stale n] [out1] main:2: error: "A" has no attribute "bar" [case testIncrementalChangedError] import m [file m.py] import n def accept_int(x: int) -> None: pass accept_int(n.foo) [file n.py] foo = "hello" reveal_type(foo) [file n.py.2] foo = 3.14 reveal_type(foo) [rechecked m, n] [stale] [out1] tmp/n.py:2: note: Revealed type is 'builtins.str' tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [out2] tmp/n.py:2: note: Revealed type is 'builtins.float' tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "float"; expected "int" [case testIncrementalReplacingImports] import good, bad, client [file good.py] def foo(a: int) -> None: pass [file bad.py] def foo(a: str) -> None: pass [file client.py] import good import bad from good import foo foo(3) [file client.py.2] import good import bad from bad import foo foo(3) [rechecked client] [stale] [out2] tmp/client.py:4: error: Argument 1 to "foo" has incompatible type "int"; expected "str" [case testIncrementalChangingAlias] import m1, m2, m3, m4, m5 [file m1.py] from m2 import A def accepts_int(x: int) -> None: pass accepts_int(A()) [file m2.py] from m3 import A [file m3.py] from m4 import B A = B [file m3.py.2] from m5 import C A = C [file m4.py] def B() -> int: return 42 [file m5.py] def C() -> str: return "hello" [rechecked m1, m2, m3] [stale m3] [out2] tmp/m1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalStoresAliasTypeVars] import a [file mod.py] from typing import TypeVar, Union T = TypeVar('T') Alias = Union[int, T] x: Alias[str] [file a.py] from mod import Alias, x [file a.py.2] from mod import Alias, x reveal_type(x) y: Alias[int] reveal_type(y) [out2] tmp/a.py:3: note: Revealed type is 'Union[builtins.int, builtins.str]' tmp/a.py:5: note: Revealed type is 'Union[builtins.int, builtins.int]' [case testIncrementalSilentImportsWithBlatantError] # cmd: mypy -m main # flags: --follow-imports=skip [file main.py] from evil import Hello [file main.py.2] from evil import Hello reveal_type(Hello()) [file evil.py] def accept_int(x: int) -> None: pass accept_int("not an int") [rechecked main] [stale] [out2] tmp/main.py:2: note: Revealed type is 'Any' [case testIncrementalImportIsNewlySilenced] # cmd: mypy -m main foo # cmd2: mypy -m main # flags: --follow-imports=skip [file main.py] from foo import bar def accept_int(x: int) -> None: pass accept_int(bar) [file foo.py] bar = 3 [file foo.py.2] # Empty! [rechecked main] [stale main] [case testIncrementalSilencedModuleNoLongerCausesError] # cmd: mypy -m main evil # cmd2: mypy -m main # flags: --follow-imports=skip [file main.py] from evil import bar def accept_int(x: int) -> None: pass accept_int(bar) reveal_type(bar) [file evil.py] bar = "str" [rechecked main] [stale] [out1] tmp/main.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" tmp/main.py:4: note: Revealed type is 'builtins.str' [out2] tmp/main.py:4: note: Revealed type is 'Any' [case testIncrementalFixedBugCausesPropagation] import mod1 [file mod1.py] from mod2 import A val = A().makeB().makeC().foo() reveal_type(val) [file mod2.py] from mod3 import B class A: def makeB(self) -> B: return B() [file mod3.py] from mod4 import C class B: def makeC(self) -> C: val = 3 # type: int if 1: val = "str" # deliberately triggering error return C() [file mod3.py.2] from mod4 import C class B: def makeC(self) -> C: return C() [file mod4.py] class C: def foo(self) -> int: return 1 [rechecked mod3, mod2, mod1] [stale mod3, mod2] [out1] tmp/mod3.py:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/mod1.py:3: note: Revealed type is 'builtins.int' [out2] tmp/mod1.py:3: note: Revealed type is 'builtins.int' [case testIncrementalIncidentalChangeWithBugCausesPropagation] import mod1 [file mod1.py] from mod2 import A val = A().makeB().makeC().foo() reveal_type(val) [file mod2.py] from mod3 import B class A: def makeB(self) -> B: return B() [file mod3.py] from mod4 import C class B: def makeC(self) -> C: val = 3 # type: int if 1: val = "str" # deliberately triggering error return C() [file mod4.py] class C: def foo(self) -> int: return 1 [file mod4.py.2] class C: def foo(self) -> str: return 'a' [rechecked mod4, mod3, mod2, mod1] [stale mod4] [out1] tmp/mod3.py:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/mod1.py:3: note: Revealed type is 'builtins.int' [out2] tmp/mod3.py:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/mod1.py:3: note: Revealed type is 'builtins.str' [case testIncrementalIncidentalChangeWithBugFixCausesPropagation] import mod1 [file mod1.py] from mod2 import A val = A().makeB().makeC().foo() reveal_type(val) [file mod2.py] from mod3 import B class A: def makeB(self) -> B: return B() [file mod3.py] from mod4 import C class B: def makeC(self) -> C: val = 3 # type: int if 1: val = "str" # deliberately triggering error return C() [file mod3.py.2] from mod4 import C class B: def makeC(self) -> C: return C() [file mod4.py] class C: def foo(self) -> int: return 1 [file mod4.py.2] class C: def foo(self) -> str: return 'a' [rechecked mod4, mod3, mod2, mod1] [stale mod4, mod3, mod2] [out1] tmp/mod3.py:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/mod1.py:3: note: Revealed type is 'builtins.int' [out2] tmp/mod1.py:3: note: Revealed type is 'builtins.str' [case testIncrementalSilentImportsWithInnerImports] # cmd: mypy -m main foo # flags: --ignore-missing-imports [file main.py] from foo import MyClass m = MyClass() [file main.py.2] from foo import MyClass m = MyClass() reveal_type(m.val) [file foo.py] class MyClass: def __init__(self) -> None: import unrelated self.val = unrelated.test() [rechecked main] [stale] [out2] tmp/main.py:3: note: Revealed type is 'Any' [case testIncrementalSilentImportsWithInnerImportsAndNewFile] # cmd: mypy -m main foo # cmd2: mypy -m main foo unrelated # flags: --follow-imports=skip [file main.py] from foo import MyClass m = MyClass() [file main.py.2] from foo import MyClass m = MyClass() reveal_type(m.val) [file foo.py] class MyClass: def __init__(self) -> None: import unrelated self.val = unrelated.test() [file unrelated.py] def test() -> str: return "foo" [rechecked main, foo, unrelated] [stale foo, unrelated] [out2] tmp/main.py:3: note: Revealed type is 'builtins.str' [case testIncrementalWorksWithNestedClasses] import foo [file foo.py] class MyClass: class NestedClass: pass class_attr = NestedClass() [rechecked] [stale] [case testIncrementalWorksWithBasicProtocols] import a [file a.py] from b import P x: int y: P[int] x = y.meth() class C: def meth(self) -> int: pass y = C() [file a.py.2] from b import P x: str y: P[str] x = y.meth() class C: def meth(self) -> str: pass y = C() [file b.py] from typing import Protocol, TypeVar T = TypeVar('T', covariant=True) class P(Protocol[T]): def meth(self) -> T: pass [case testIncrementalSwitchFromNominalToStructural] import a [file a.py] from b import B, fun class C(B): def x(self) -> int: pass def y(self) -> int: pass fun(C()) [file b.py] from typing import Protocol class B: def x(self) -> float: pass def fun(arg: B) -> None: arg.x() [file b.py.2] from typing import Protocol class B(Protocol): def x(self) -> float: pass def fun(arg: B) -> None: arg.x() [file a.py.3] from b import fun class C: def x(self) -> int: pass def y(self) -> int: pass fun(C()) [out1] [out2] [out3] [case testIncrementalSwitchFromStructuralToNominal] import a [file a.py] from b import fun class C: def x(self) -> int: pass def y(self) -> int: pass fun(C()) [file b.py] from typing import Protocol class B(Protocol): def x(self) -> float: pass def fun(arg: B) -> None: arg.x() [file b.py.2] from typing import Protocol class B: def x(self) -> float: pass def fun(arg: B) -> None: arg.x() [out1] [out2] tmp/a.py:5: error: Argument 1 to "fun" has incompatible type "C"; expected "B" [case testIncrementalWorksWithNamedTuple] import foo [file foo.py] from mid import MyTuple def accept_int(x: int) -> None: pass accept_int(MyTuple(1, "b", "c").a) [file mid.py] from bar import MyTuple [file bar.py] from typing import NamedTuple MyTuple = NamedTuple('MyTuple', [ ('a', int), ('b', str), ('c', str) ]) [file bar.py.2] from typing import NamedTuple MyTuple = NamedTuple('MyTuple', [ ('b', int), # a and b are swapped ('a', str), ('c', str) ]) [rechecked bar, mid, foo] [stale bar] [out2] tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalWorksWithNestedNamedTuple] import foo [file foo.py] from mid import Outer def accept_int(x: int) -> None: pass accept_int(Outer.MyTuple(1, "b", "c").a) [file mid.py] from bar import Outer [file bar.py] from typing import NamedTuple class Outer: MyTuple = NamedTuple('MyTuple', [ ('a', int), ('b', str), ('c', str) ]) [file bar.py.2] from typing import NamedTuple class Outer: MyTuple = NamedTuple('MyTuple', [ ('b', int), # a and b are swapped ('a', str), ('c', str) ]) [rechecked bar, mid, foo] [stale bar] [out2] tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalPartialSubmoduleUpdate] # cmd: mypy -m a # cmd2: mypy -m a a.c # flags: --follow-imports=skip [file a/__init__.py] from .b import B from .c import C [file a/b.py] class B: pass [file a/c.py] class C: pass [file a/c.py.2] class C: pass pass [rechecked a, a.c] [stale a, a.c] [out] [case testIncrementalNestedClassRef] import top [file top.py] from funcs import callee from classes import Outer def caller(a: Outer.Inner) -> None: callee(a) [file funcs.py] from classes import Outer def callee(a: Outer.Inner) -> None: pass [file classes.py] class Outer: class Inner: pass [file top.py.2] from funcs import callee from classes import Outer def caller(a: Outer.Inner) -> int: callee(a) return 0 [case testIncrementalLoadsParentAfterChild] # cmd: mypy -m r.s [file r/__init__.py] from . import s [file r/m.py] class R: pass [file r/s.py] from . import m R = m.R a = None # type: R [file r/s.py.2] from . import m R = m.R a = None # type: R [case testIncrementalBaseClassAttributeConflict] class A: pass class B: pass class X: attr = None # type: A class Y: attr = None # type: B class Z(X, Y): pass [stale] [out] main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y" [out2] main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y" [case testIncrementalFollowImportsSilent] # flags: --follow-imports=silent import a [file a.py] x = 0 [file a.py.2] x = 0 x + '' [case testIncrementalFollowImportsSkip] # flags: --follow-imports=skip import a reveal_type(a.x) [file a.py] / [file a.py.2] // [out] main:3: note: Revealed type is 'Any' [out2] main:3: note: Revealed type is 'Any' [case testIncrementalFollowImportsError] # flags: --follow-imports=error import a [file a.py] / [file a.py.2] // [out1] main:2: error: Import of 'a' ignored main:2: note: (Using --follow-imports=error, module not passed on command line) [out2] main:2: error: Import of 'a' ignored main:2: note: (Using --follow-imports=error, module not passed on command line) [case testIncrementalFollowImportsVariable] # flags: --config-file tmp/mypy.ini import a reveal_type(a.x) [file a.py] x = 0 [file mypy.ini] \[mypy] follow_imports = normal [file mypy.ini.2] \[mypy] follow_imports = skip [out1] main:3: note: Revealed type is 'builtins.int' [out2] main:3: note: Revealed type is 'Any' [case testIncrementalNamedTupleInMethod] from ntcrash import nope [file ntcrash.py] from typing import NamedTuple class C: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] main:1: error: Module 'ntcrash' has no attribute 'nope' [case testIncrementalNamedTupleInMethod2] from ntcrash import nope [file ntcrash.py] from typing import NamedTuple class C: class D: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] main:1: error: Module 'ntcrash' has no attribute 'nope' [case testIncrementalNamedTupleInMethod3] from ntcrash import nope [file ntcrash.py] from typing import NamedTuple class C: def a(self): class D: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] main:1: error: Module 'ntcrash' has no attribute 'nope' [case testIncrementalTypedDictInMethod] from tdcrash import nope [file tdcrash.py] from mypy_extensions import TypedDict class C: def f(self) -> None: A = TypedDict('A', {'x': int, 'y': int}) [builtins fixtures/dict.pyi] [out1] main:1: error: Module 'tdcrash' has no attribute 'nope' [out2] main:1: error: Module 'tdcrash' has no attribute 'nope' [case testIncrementalTypedDictInMethod2] from tdcrash import nope [file tdcrash.py] from mypy_extensions import TypedDict class C: class D: def f(self) -> None: A = TypedDict('A', {'x': int, 'y': int}) [builtins fixtures/dict.pyi] [out1] main:1: error: Module 'tdcrash' has no attribute 'nope' [out2] main:1: error: Module 'tdcrash' has no attribute 'nope' [case testIncrementalTypedDictInMethod3] from tdcrash import nope [file tdcrash.py] from mypy_extensions import TypedDict class C: def a(self): class D: def f(self) -> None: A = TypedDict('A', {'x': int, 'y': int}) [builtins fixtures/dict.pyi] [out1] main:1: error: Module 'tdcrash' has no attribute 'nope' [out2] main:1: error: Module 'tdcrash' has no attribute 'nope' [case testIncrementalInnerClassAttrInMethod] import crash nonexisting [file crash.py] class C: def f(self) -> None: class A: pass self.a = A() [out1] main:2: error: Name 'nonexisting' is not defined [out2] main:2: error: Name 'nonexisting' is not defined [case testIncrementalInnerClassAttrInMethodReveal] import crash reveal_type(crash.C().a) reveal_type(crash.D().a) [file crash.py] from typing import TypeVar, Generic T = TypeVar('T') class C: def f(self) -> None: class A: pass self.a = A() reveal_type(C().a) class D: def f(self) -> None: class A: def g(self) -> None: class B(Generic[T]): pass self.b = B[int]() self.a = A().b reveal_type(D().a) [out1] tmp/crash.py:8: note: Revealed type is 'crash.A@5' tmp/crash.py:17: note: Revealed type is 'crash.B@13[builtins.int*]' main:2: note: Revealed type is 'crash.A@5' main:3: note: Revealed type is 'crash.B@13[builtins.int*]' [out2] tmp/crash.py:8: note: Revealed type is 'crash.A@5' tmp/crash.py:17: note: Revealed type is 'crash.B@13[builtins.int*]' main:2: note: Revealed type is 'crash.A@5' main:3: note: Revealed type is 'crash.B@13[builtins.int*]' [case testGenericMethodRestoreMetaLevel] from typing import Dict d = {} # type: Dict[str, int] g = d.get # This should not crash: see https://github.com/python/mypy/issues/2804 [builtins fixtures/dict.pyi] [case testGenericMethodRestoreMetaLevel2] from typing import TypeVar T = TypeVar('T') class D: def m(self, x: T) -> T: return x g = D().m # This should not crash: see https://github.com/python/mypy/issues/2804 [builtins fixtures/dict.pyi] [case testGenericMethodRestoreMetaLevel3] from typing import TypeVar T = TypeVar('T') class C: def m(self, x: T) -> T: return x class D(C): def __init__(self) -> None: self.d = super().m # This should not crash: see https://github.com/python/mypy/issues/2804 [builtins fixtures/dict.pyi] [case testIncrementalPerFileFlags] # flags: --config-file tmp/mypy.ini import a [file a.py] pass [file mypy.ini] \[mypy] warn_no_return = False \[mypy-a] warn_no_return = True [rechecked] [case testIncrementalClassVar] from typing import ClassVar class A: x = None # type: ClassVar A().x = 0 [out1] main:4: error: Cannot assign to class variable "x" via instance [out2] main:4: error: Cannot assign to class variable "x" via instance [case testIncrementalClassVarGone] import m m.A().x = 0 [file m.py] from typing import ClassVar class A: x = None # type: ClassVar[int] [file m.py.2] class A: x = None # type: int [out1] main:2: error: Cannot assign to class variable "x" via instance [case testCachingClassVar] import b [file a.py] from typing import ClassVar class A: x = None # type: ClassVar[int] [file b.py] import a [file b.py.2] import a a.A().x = 0 [out2] tmp/b.py:2: error: Cannot assign to class variable "x" via instance [case testSerializeTypedDict] import b reveal_type(b.x) y: b.A reveal_type(y) [file b.py] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}) x: A [builtins fixtures/dict.pyi] [out1] main:2: note: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})' main:4: note: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})' [out2] main:2: note: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})' main:4: note: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})' [case testSerializeMetaclass] import b reveal_type(b.A.f()) m: b.M = b.A reveal_type(b.a.f()) [file b.py] from typing import Type class M(type): def f(cls) -> int: return 0 class A(metaclass=M): pass a: Type[A] [out] main:2: note: Revealed type is 'builtins.int' main:4: note: Revealed type is 'builtins.int' [out2] main:2: note: Revealed type is 'builtins.int' main:4: note: Revealed type is 'builtins.int' [case testSerializeMetaclassInImportCycle1] import b import c reveal_type(b.A.f()) m: c.M = b.A reveal_type(b.a.f()) [file b.py] from typing import Type from c import M class A(metaclass=M): pass a: Type[A] [file c.py] class M(type): def f(cls) -> int: return 0 [out] main:3: note: Revealed type is 'builtins.int' main:5: note: Revealed type is 'builtins.int' [out2] main:3: note: Revealed type is 'builtins.int' main:5: note: Revealed type is 'builtins.int' [case testSerializeMetaclassInImportCycle2] import b import c reveal_type(c.A.f()) m: b.M = c.A reveal_type(c.a.f()) [file b.py] from c import a class M(type): def f(cls) -> int: return 0 [file c.py] from typing import Type import b class A(metaclass=b.M): pass a: Type[A] [out] main:3: note: Revealed type is 'builtins.int' main:5: note: Revealed type is 'builtins.int' [out2] main:3: note: Revealed type is 'builtins.int' main:5: note: Revealed type is 'builtins.int' [case testDeleteFile] import n [file n.py] import m [file m.py] x = 1 [delete m.py.2] [rechecked n] [stale] [out2] tmp/n.py:1: error: Cannot find implementation or library stub for module named 'm' tmp/n.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testDeleteFileWithinCycle] import a [file a.py] import b [file b.py] import c [file c.py] import a [file a.py.2] import c [delete b.py.2] [rechecked a, c] [stale a] [out2] [case testThreePassesBasic] import m [file m.py] def foo(): pass [file m.py.2] def foo() -> None: pass [file m.py.3] def foo(): pass [rechecked m] [stale m] [rechecked2 m] [stale2 m] [out3] [case testThreePassesErrorInThirdPass] import m [file m.py] def foo(): pass [file m.py.2] def foo() -> None: pass [file m.py.3] def foo() -> int: return '' [rechecked m] [stale m] [rechecked2 m] [stale2] [out3] tmp/m.py:2: error: Incompatible return value type (got "str", expected "int") [case testThreePassesThirdPassFixesError] import n [file n.py] import m x = m.foo(1) [file m.py] def foo(x): pass [file m.py.2] def foo() -> str: pass [file m.py.3] def foo(x) -> int: pass [rechecked m, n] [stale m] [rechecked2 m, n] [stale2 m, n] [out2] tmp/n.py:2: error: Too many arguments for "foo" [out3] [case testCacheDeletedAfterErrorsFound] import a [file a.py] from b import x [file b.py] from c import x [file c.py] x = 1 [file c.py.2] 1 + 1 [file a.py.3] from b import x 1 + 1 [out] [out2] tmp/b.py:1: error: Module 'c' has no attribute 'x' [out3] tmp/b.py:1: error: Module 'c' has no attribute 'x' [case testCacheDeletedAfterErrorsFound2] import a [file a.py] from b import x [file b.py] from c import C x: C [file c.py] class C: pass [file c.py.2] def C(): pass [file a.py.3] from b import x 1 + 1 [out] [out2] tmp/b.py:2: error: Function "c.C" is not valid as a type tmp/b.py:2: note: Perhaps you need "Callable[...]" or a callback protocol? [out3] tmp/b.py:2: error: Function "c.C" is not valid as a type tmp/b.py:2: note: Perhaps you need "Callable[...]" or a callback protocol? [case testCacheDeletedAfterErrorsFound3] import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file b.py.2] def f(x) -> None: pass [out] [out2] tmp/a.py:2: error: Too few arguments for "f" [out3] tmp/a.py:2: error: Too few arguments for "f" [case testCacheDeletedAfterErrorsFound4] import a [file a.py] from b import x [file b.py] from c import x [file c.py] from d import x [file d.py] x = 1 [file d.py.2] 1 + 1 [file a.py.3] from b import x 1 + 1 [out] [out2] tmp/c.py:1: error: Module 'd' has no attribute 'x' [out3] tmp/c.py:1: error: Module 'd' has no attribute 'x' [case testNoCrashOnDeletedWithCacheOnCmdline] # cmd: mypy -m nonexistent # cmd2: mypy -m nonexistent [file nonexistent.py] [delete nonexistent.py.2] [out] [out2] mypy: can't read file 'tmp/nonexistent.py': No such file or directory [case testSerializeAbstractPropertyIncremental] from abc import abstractmethod import typing class A: @property def f(self) -> int: return 1 @f.setter # type: ignore @abstractmethod def f(self, x: int) -> None: pass a = A() [builtins fixtures/property.pyi] [case testSerializeAbstractPropertyDisallowUntypedIncremental] # flags: --disallow-untyped-defs from abc import abstractmethod import typing class A: @property def f(self) -> int: return 1 @f.setter # type: ignore @abstractmethod def f(self, x: int) -> None: pass a = A() [builtins fixtures/property.pyi] [case testClassNamesResolutionCrashAccess] import mod [file mod.py] class C: def __init__(self) -> None: self.int = '' def f(self, f: int) -> None: pass [file mod.py.2] class C: def __init__(self) -> None: self.int = '' def f(self, f: int) -> None: f.x [out] [out2] tmp/mod.py:6: error: "int" has no attribute "x" [case testClassNamesResolutionCrashReadCache] import mod [file mod.py] import submod [file mod.py.2] from submod import C c = C() reveal_type(c.int) reveal_type(c.y) [file submod.py] from typing import List class C: def __init__(self) -> None: self.int = [] # type: List[int] def f(self, f: int) -> None: self.y = f [builtins fixtures/list.pyi] [out] [out2] tmp/mod.py:4: note: Revealed type is 'builtins.list[builtins.int]' tmp/mod.py:5: note: Revealed type is 'builtins.int' [case testClassNamesResolutionCrashReveal] import mod [file mod.py] class Foo(object): def __init__(self) -> None: self.bytes = b"foo" def bar(self, f: bytes): pass foo = Foo() foo.bar(b"test") [file mod.py.2] class Foo(object): def __init__(self) -> None: self.bytes = b"foo" def bar(self, f: bytes): reveal_type(f) foo = Foo() foo.bar(b"test") [out] [out2] tmp/mod.py:7: note: Revealed type is 'builtins.bytes' [case testIncrementalWithSilentImports] # cmd: mypy -m a # cmd2: mypy -m b # flags: --follow-imports=silent [file a.py] import b b.foo(1, 2) [file b.py] def foo(a: int, b: int) -> str: return a + b [out1] [out2] tmp/b.py:2: error: Incompatible return value type (got "int", expected "str") [case testForwardNamedTupleToUnionWithOtherNamedTUple] from typing import NamedTuple, Union class Person(NamedTuple): name: Union[str, "Pair"] class Pair(NamedTuple): first: str last: str Person(name=Pair(first="John", last="Doe")) [out] [case testNoCrashForwardRefToBrokenDoubleNewTypeIncremental] from typing import Any, List, NewType Foo = NewType('NotFoo', int) # type: ignore Foos = NewType('Foos', List[Foo]) # type: ignore def frob(foos: List[Foos]) -> None: pass [builtins fixtures/list.pyi] [out] [case testNoCrashForwardRefOverloadIncremental] from typing import overload, List @overload def f(x: int) -> int: ... @overload def f(x: F) -> F: ... def f(x): pass F = List[int] [builtins fixtures/list.pyi] [out] [case testNoCrashForwardRefOverloadIncrementalClass] from typing import overload, Tuple, NamedTuple x: C class C: @overload def f(self, x: str) -> N: pass @overload def f(self, x: int) -> int: pass def f(self, x): pass class N(NamedTuple): x: A A = Tuple[int] [builtins fixtures/tuple.pyi] [out] [case testNewTypeFromForwardNamedTupleIncremental] from typing import NewType, NamedTuple, Tuple NT = NewType('NT', N) class N(NamedTuple): x: int x: NT = N(1) # type: ignore x = NT(N(1)) [out] [case testNewTypeFromForwardTypedDictIncremental] from typing import NewType, Tuple, Dict from mypy_extensions import TypedDict NT = NewType('NT', N) # type: ignore class N(TypedDict): x: A A = Dict[str, int] [builtins fixtures/dict.pyi] [out] -- Some crazy self-referential named tuples, types dicts, and aliases -- to be sure that everything can be _serialized_ (i.e. ForwardRef's are removed). -- For this reason errors are silenced (tests with # type: ignore have equivalents in other files) [case testForwardTypeAliasInBase1] from typing import List class C(List['A']): pass A = List[int] x: int = C()[0][0] [builtins fixtures/list.pyi] [out] [case testForwardTypeAliasInBase2] from typing import List, Generic, TypeVar, NamedTuple T = TypeVar('T') class C(A, B): # type: ignore pass class G(Generic[T]): pass A = G[C] # type: ignore class B(NamedTuple): x: int C(1).x C(1)[0] [builtins fixtures/list.pyi] [out] [case testSerializeRecursiveAliases1] from typing import Type, Callable, Union A = Union[A, int] # type: ignore B = Callable[[B], int] # type: ignore C = Type[C] # type: ignore [out] [case testSerializeRecursiveAliases2] from typing import Type, Callable, Union A = Union[B, int] # type: ignore B = Callable[[C], int] # type: ignore C = Type[A] # type: ignore [out] [case testSerializeRecursiveAliases3] from typing import Type, Callable, Union, NamedTuple A = Union[B, int] # type: ignore B = Callable[[C], int] # type: ignore class C(NamedTuple): # type: ignore x: A [out] [case testGenericTypeAliasesForwardAnyIncremental1] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') IntNode = Node[int, S] AnyNode = Node[S, T] class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: self.x = x self.y = y def output() -> IntNode[str]: return Node(1, 'x') x = output() # type: IntNode y = None # type: IntNode y.x = 1 y.y = 1 y.y = 'x' z = Node(1, 'x') # type: AnyNode [out] [case testGenericTypeAliasesForwardAnyIncremental2] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: self.x = x self.y = y def output() -> IntNode[str]: return Node(1, 'x') x = output() # type: IntNode y = None # type: IntNode y.x = 1 y.y = 1 y.y = 'x' z = Node(1, 'x') # type: AnyNode IntNode = Node[int, S] AnyNode = Node[S, T] [out] [case testNamedTupleForwardAsUpperBoundSerialization] from typing import NamedTuple, TypeVar, Generic T = TypeVar('T', bound='M') class G(Generic[T]): x: T yg: G[M] z: int = G[M]().x.x z = G[M]().x[0] M = NamedTuple('M', [('x', int)]) [out] [case testSelfRefNTIncremental1] from typing import Tuple, NamedTuple Node = NamedTuple('Node', [ ('name', str), ('children', Tuple['Node', ...]), # type: ignore ]) n: Node [builtins fixtures/tuple.pyi] [case testSelfRefNTIncremental2] from typing import Tuple, NamedTuple A = NamedTuple('A', [ ('x', str), ('y', Tuple['B', ...]), # type: ignore ]) class B(NamedTuple): x: A y: int n: A [builtins fixtures/tuple.pyi] [case testSelfRefNTIncremental3] from typing import NamedTuple, Tuple class B(NamedTuple): x: Tuple[A, int] # type: ignore y: int A = NamedTuple('A', [ ('x', str), ('y', 'B'), ]) n: B m: A lst = [m, n] [builtins fixtures/tuple.pyi] [case testSelfRefNTIncremental4] from typing import NamedTuple class B(NamedTuple): x: A # type: ignore y: int class A(NamedTuple): x: str y: B n: A [builtins fixtures/tuple.pyi] [case testSelfRefNTIncremental5] from typing import NamedTuple B = NamedTuple('B', [ ('x', A), # type: ignore ('y', int), ]) A = NamedTuple('A', [ ('x', str), ('y', 'B'), ]) n: A def f(m: B) -> None: pass [builtins fixtures/tuple.pyi] [case testCrashWithPartialGlobalAndCycle] import bar [file foo.py] import bar my_global_dict = {} # type: ignore def external_func_0() -> None: global my_global_dict bar.external_list my_global_dict[12] = 0 [file bar.py] import foo external_list = [0] [builtins fixtures/dict.pyi] [case testIncrementalCrashOnTypeWithFunction] import a [file a.py] import b [file a.py.2] from b import x [file b.py] from typing import TypeVar, Type T = TypeVar('T') def tp(arg: T) -> Type[T]: pass def func(x: int) -> int: pass x = tp(func) [out] [out2] [case testReprocessModuleEvenIfInterfaceHashDoesNotChange] import a import d [file a.py] import b x: b.c.A x = b.c.A() [file b.py] import c [file c.py] class A: x = 1 [file d.py] import a def f() -> None: pass [file a.py.2] import b x: b.c.A [file c.py.3] class A: x = 2 [file d.py.4] import a def f() -> None: from c import A a.x = [A(), a.x][0] [builtins fixtures/list.pyi] [stale] [rechecked a] [stale2] [rechecked2 c] [stale3] [rechecked3 d] [out1] [out2] [out3] [out4] [case testTreeShadowingViaParentPackage] import m.semanal [file m/__init__.py] pass [file m/nodes.py] if False: import m.types import m.semanal class Node: line: int class FuncBase(Node): type: m.types.Type class OverloadedFuncDef(FuncBase): pass [file m/types.py] from m.nodes import Node class Type(Node): pass class Overloaded(Type): pass [file m/semanal.py] from m.nodes import OverloadedFuncDef from m.types import Overloaded class C: def func(self, defn: OverloadedFuncDef): defn.type = Overloaded() defn.type.line = 0 [file m/nodes.py.2] if False: import m.types import m.semanal class Node: line: int class FuncBase(Node): type: m.types.Type class OverloadedFuncDef(FuncBase): pass extra = 1 [file m/types.py.2] from m.nodes import Node class Type(Node): pass class Overloaded(Type): pass extra = 1 [builtins fixtures/list.pyi] [file m/semanal.py.2] from m.nodes import OverloadedFuncDef from m.types import Overloaded class C: def func(self, defn: OverloadedFuncDef): defn.type = Overloaded() defn.type.line = 0 extra = 1 [out1] [out2] [case testErrorsAffectDependentsOnly] # cmd: mypy -m m.a m.b m.c [file m/__init__.py] [file m/a.py] 1 + '' # Deliberate error [file m/b.py] import m.a # Depends on module with error [file m/c.py] import m # No error here [rechecked m.a, m.b] [out1] tmp/m/a.py:1: error: Unsupported operand types for + ("int" and "str") [out2] tmp/m/a.py:1: error: Unsupported operand types for + ("int" and "str") [case testDisallowAnyExprIncremental] # cmd: mypy -m main # flags: --disallow-any-expr [file ns.py] class Namespace: def __init__(self): self.user = 0 [file main.py] import ns user = ns.Namespace.user [out1] tmp/main.py:2: error: Expression has type "Any" [out2] tmp/main.py:2: error: Expression has type "Any" [case testIncrementalStrictOptional] # flags: --strict-optional import a 1 + a.foo() [file a.py] def foo() -> int: return 0 [file a.py.2] from typing import Optional def foo() -> Optional[int]: return 0 [out1] [out2] main:3: error: Unsupported operand types for + ("int" and "None") main:3: note: Right operand is of type "Optional[int]" [case testAttrsIncrementalSubclassingCached] from a import A import attr @attr.s(auto_attribs=True) class B(A): e: str = 'e' a = B(5, [5], 'foo') a.a = 6 a._b = [2] a.c = 'yo' a._d = 22 a.e = 'hi' [file a.py] import attr import attr from typing import List, ClassVar @attr.s(auto_attribs=True) class A: a: int _b: List[int] c: str = '18' _d: int = attr.ib(validator=None, default=18) E = 7 F: ClassVar[int] = 22 [builtins fixtures/list.pyi] [out1] [out2] [case testAttrsIncrementalSubclassingCachedConverter] from a import A import attr @attr.s class B(A): pass reveal_type(B) [file a.py] def converter(s:int) -> str: return 'hello' import attr @attr.s class A: x: str = attr.ib(converter=converter) [builtins fixtures/list.pyi] [out1] main:6: note: Revealed type is 'def (x: builtins.int) -> __main__.B' [out2] main:6: note: Revealed type is 'def (x: builtins.int) -> __main__.B' [case testAttrsIncrementalSubclassingCachedType] from a import A import attr @attr.s class B(A): pass reveal_type(B) [file a.py] import attr @attr.s class A: x = attr.ib(type=int) [builtins fixtures/list.pyi] [out1] main:6: note: Revealed type is 'def (x: builtins.int) -> __main__.B' [out2] main:6: note: Revealed type is 'def (x: builtins.int) -> __main__.B' [case testAttrsIncrementalArguments] from a import Frozen, NoInit, NoCmp f = Frozen(5) f.x = 6 g = NoInit() Frozen(1) < Frozen(2) Frozen(1) <= Frozen(2) Frozen(1) > Frozen(2) Frozen(1) >= Frozen(2) NoCmp(1) < NoCmp(2) NoCmp(1) <= NoCmp(2) NoCmp(1) > NoCmp(2) NoCmp(1) >= NoCmp(2) [file a.py] import attr @attr.s(frozen=True) class Frozen: x: int = attr.ib() @attr.s(init=False) class NoInit: x: int = attr.ib() @attr.s(eq=False) class NoCmp: x: int = attr.ib() [builtins fixtures/list.pyi] [rechecked] [stale] [out1] main:3: error: Property "x" defined in "Frozen" is read-only main:12: error: Unsupported left operand type for < ("NoCmp") main:13: error: Unsupported left operand type for <= ("NoCmp") main:14: error: Unsupported left operand type for > ("NoCmp") main:15: error: Unsupported left operand type for >= ("NoCmp") [out2] main:3: error: Property "x" defined in "Frozen" is read-only main:12: error: Unsupported left operand type for < ("NoCmp") main:13: error: Unsupported left operand type for <= ("NoCmp") main:14: error: Unsupported left operand type for > ("NoCmp") main:15: error: Unsupported left operand type for >= ("NoCmp") [case testAttrsIncrementalDunder] from a import A reveal_type(A) # N: Revealed type is 'def (a: builtins.int) -> a.A' reveal_type(A.__eq__) # N: Revealed type is 'def (self: a.A, other: builtins.object) -> builtins.bool' reveal_type(A.__ne__) # N: Revealed type is 'def (self: a.A, other: builtins.object) -> builtins.bool' reveal_type(A.__lt__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(A.__le__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(A.__gt__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' reveal_type(A.__ge__) # N: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' A(1) < A(2) A(1) <= A(2) A(1) > A(2) A(1) >= A(2) A(1) == A(2) A(1) != A(2) A(1) < 1 # E: Unsupported operand types for < ("A" and "int") A(1) <= 1 # E: Unsupported operand types for <= ("A" and "int") A(1) > 1 # E: Unsupported operand types for > ("A" and "int") A(1) >= 1 # E: Unsupported operand types for >= ("A" and "int") A(1) == 1 A(1) != 1 1 < A(1) # E: Unsupported operand types for > ("A" and "int") 1 <= A(1) # E: Unsupported operand types for >= ("A" and "int") 1 > A(1) # E: Unsupported operand types for < ("A" and "int") 1 >= A(1) # E: Unsupported operand types for <= ("A" and "int") 1 == A(1) 1 != A(1) [file a.py] from attr import attrib, attrs @attrs(auto_attribs=True) class A: a: int [builtins fixtures/attr.pyi] [rechecked] [stale] [out2] main:2: note: Revealed type is 'def (a: builtins.int) -> a.A' main:3: note: Revealed type is 'def (self: a.A, other: builtins.object) -> builtins.bool' main:4: note: Revealed type is 'def (self: a.A, other: builtins.object) -> builtins.bool' main:5: note: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' main:6: note: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' main:7: note: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' main:8: note: Revealed type is 'def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool' main:17: error: Unsupported operand types for < ("A" and "int") main:18: error: Unsupported operand types for <= ("A" and "int") main:19: error: Unsupported operand types for > ("A" and "int") main:20: error: Unsupported operand types for >= ("A" and "int") main:24: error: Unsupported operand types for > ("A" and "int") main:25: error: Unsupported operand types for >= ("A" and "int") main:26: error: Unsupported operand types for < ("A" and "int") main:27: error: Unsupported operand types for <= ("A" and "int") [case testAttrsIncrementalSubclassModified] from b import B B(5, 'foo') [file a.py] import attr @attr.s(auto_attribs=True) class A: x: int [file b.py] import attr from a import A @attr.s(auto_attribs=True) class B(A): y: str [file b.py.2] import attr from a import A @attr.s(auto_attribs=True) class B(A): y: int [builtins fixtures/list.pyi] [out1] [out2] main:2: error: Argument 2 to "B" has incompatible type "str"; expected "int" [rechecked b] [case testAttrsIncrementalSubclassModifiedErrorFirst] from b import B B(5, 'foo') [file a.py] import attr @attr.s(auto_attribs=True) class A: x: int [file b.py] import attr from a import A @attr.s(auto_attribs=True) class B(A): y: int [file b.py.2] import attr from a import A @attr.s(auto_attribs=True) class B(A): y: str [builtins fixtures/list.pyi] [out1] main:2: error: Argument 2 to "B" has incompatible type "str"; expected "int" [out2] [rechecked b] [case testAttrsIncrementalThreeFiles] from c import C C(5, 'foo', True) [file a.py] import attr @attr.s class A: a: int = attr.ib() [file b.py] import attr @attr.s class B: b: str = attr.ib() [file c.py] from a import A from b import B import attr @attr.s class C(A, B): c: bool = attr.ib() [builtins fixtures/list.pyi] [out1] [out2] [case testAttrsIncrementalConverterInSubmodule] from a.a import A reveal_type(A) [file a/__init__.py] [file a/a.py] from typing import Optional def converter(s:Optional[int]) -> int: ... import attr @attr.s class A: x: int = attr.ib(converter=converter) [builtins fixtures/list.pyi] [out1] main:2: note: Revealed type is 'def (x: Union[builtins.int, None]) -> a.a.A' [out2] main:2: note: Revealed type is 'def (x: Union[builtins.int, None]) -> a.a.A' [case testAttrsIncrementalConverterManyStyles] import a [file a.py] from base import Base Base(1, 'str', True) Base(None, None, None) from subclass import A, B A(1, 'str', True) A(None, None, None) B(1, 'str', True, 1, 'str', True) B(None, None, None, None, None, None) from submodule.base import SubBase SubBase(1, 'str', True) SubBase(None, None, None) from submodule.subclass import AA, BB AA(1, 'str', True) AA(None, None, None) BB(1, 'str', True, 1, 'str', True) BB(None, None, None, None, None, None) from submodule.subsubclass import SubAA, SubBB SubAA(1, 'str', True) SubAA(None, None, None) SubBB(1, 'str', True, 1, 'str', True) SubBB(None, None, None, None, None, None) [file a.py.2] # Now with errors. from base import Base Base(1, 1, True) from subclass import A, B A(1, 1, True) B(1, 'str', True, 1, 1, True) from submodule.base import SubBase SubBase(1, 1, True) from submodule.subclass import AA, BB AA(1, 1, True) BB(1, 'str', True, 1, 1, True) from submodule.subsubclass import SubAA, SubBB SubAA(1, 1, True) SubBB(1, 'str', True, 1, 1, True) [file foo.py] from typing import Optional def maybe_int(x: Optional[int]) -> int: ... [file bar.py] from typing import Optional def maybe_bool(x: Optional[bool]) -> bool: ... [file base.py] from typing import Optional import attr import bar from foo import maybe_int def maybe_str(x: Optional[str]) -> str: ... @attr.s class Base: x: int = attr.ib(converter=maybe_int) y: str = attr.ib(converter=maybe_str) z: bool = attr.ib(converter=bar.maybe_bool) [file subclass.py] from typing import Optional import attr from base import Base @attr.s class A(Base): pass import bar from foo import maybe_int def maybe_str(x: Optional[str]) -> str: ... @attr.s class B(Base): xx: int = attr.ib(converter=maybe_int) yy: str = attr.ib(converter=maybe_str) zz: bool = attr.ib(converter=bar.maybe_bool) [file submodule/__init__.py] [file submodule/base.py] from typing import Optional import attr import bar from foo import maybe_int def maybe_str(x: Optional[str]) -> str: ... @attr.s class SubBase: x: int = attr.ib(converter=maybe_int) y: str = attr.ib(converter=maybe_str) z: bool = attr.ib(converter=bar.maybe_bool) [file submodule/subclass.py] from typing import Optional import attr from base import Base @attr.s class AA(Base): pass import bar from foo import maybe_int def maybe_str(x: Optional[str]) -> str: ... @attr.s class BB(Base): xx: int = attr.ib(converter=maybe_int) yy: str = attr.ib(converter=maybe_str) zz: bool = attr.ib(converter=bar.maybe_bool) [file submodule/subsubclass.py] from typing import Optional import attr from .base import SubBase @attr.s class SubAA(SubBase): pass import bar from foo import maybe_int def maybe_str(x: Optional[str]) -> str: ... @attr.s class SubBB(SubBase): xx: int = attr.ib(converter=maybe_int) yy: str = attr.ib(converter=maybe_str) zz: bool = attr.ib(converter=bar.maybe_bool) [builtins fixtures/list.pyi] [out1] [out2] tmp/a.py:3: error: Argument 2 to "Base" has incompatible type "int"; expected "Optional[str]" tmp/a.py:6: error: Argument 2 to "A" has incompatible type "int"; expected "Optional[str]" tmp/a.py:7: error: Argument 5 to "B" has incompatible type "int"; expected "Optional[str]" tmp/a.py:10: error: Argument 2 to "SubBase" has incompatible type "int"; expected "Optional[str]" tmp/a.py:13: error: Argument 2 to "AA" has incompatible type "int"; expected "Optional[str]" tmp/a.py:14: error: Argument 5 to "BB" has incompatible type "int"; expected "Optional[str]" tmp/a.py:17: error: Argument 2 to "SubAA" has incompatible type "int"; expected "Optional[str]" tmp/a.py:18: error: Argument 5 to "SubBB" has incompatible type "int"; expected "Optional[str]" [case testAttrsIncrementalConverterInFunction] import attr def foo() -> None: def foo(x: str) -> int: ... @attr.s class A: x: int = attr.ib(converter=foo) reveal_type(A) [builtins fixtures/list.pyi] [out1] main:8: note: Revealed type is 'def (x: builtins.str) -> __main__.A@6' [out2] main:8: note: Revealed type is 'def (x: builtins.str) -> __main__.A@6' -- FIXME: new analyzer busted [case testAttrsIncrementalConverterInSubmoduleForwardRef-skip] from a.a import A reveal_type(A) [file a/__init__.py] [file a/a.py] from typing import List def converter(s:F) -> int: ... import attr @attr.s class A: x: int = attr.ib(converter=converter) F = List[int] [builtins fixtures/list.pyi] [out1] main:3: note: Revealed type is 'def (x: builtins.list[builtins.int]) -> a.a.A' [out2] main:3: note: Revealed type is 'def (x: builtins.list[builtins.int]) -> a.a.A' -- FIXME: new analyzer busted [case testAttrsIncrementalConverterType-skip] from a import C import attr o = C("1", "2", "3", "4") o = C(1, 2, "3", 4) reveal_type(C) @attr.s class D(C): x: str = attr.ib() reveal_type(D) [file a.py] from typing import overload import attr @attr.dataclass class A: x: str @overload def parse(x: int) -> int: ... @overload def parse(x: str, y: str = '') -> int: ... def parse(x, y): ... @attr.s class C: a: complex = attr.ib(converter=complex) b: int = attr.ib(converter=int) c: A = attr.ib(converter=A) d: int = attr.ib(converter=parse) [builtins fixtures/attr.pyi] [out1] main:6: note: Revealed type is 'def (a: Union[builtins.float, builtins.str], b: Union[builtins.str, builtins.bytes, builtins.int], c: builtins.str, d: Union[builtins.int, builtins.str]) -> a.C' main:10: note: Revealed type is 'def (a: Union[builtins.float, builtins.str], b: Union[builtins.str, builtins.bytes, builtins.int], c: builtins.str, d: Union[builtins.int, builtins.str], x: builtins.str) -> __main__.D' [out2] main:6: note: Revealed type is 'def (a: Union[builtins.float, builtins.str], b: Union[builtins.str, builtins.bytes, builtins.int], c: builtins.str, d: Union[builtins.int, builtins.str]) -> a.C' main:10: note: Revealed type is 'def (a: Union[builtins.float, builtins.str], b: Union[builtins.str, builtins.bytes, builtins.int], c: builtins.str, d: Union[builtins.int, builtins.str], x: builtins.str) -> __main__.D' [case testAttrsIncrementalThreeRuns] from a import A A(5) [file a.py] import attr @attr.s(auto_attribs=True) class A: a: int [file a.py.2] import attr @attr.s(auto_attribs=True) class A: a: str [file a.py.3] import attr @attr.s(auto_attribs=True) class A: a: int = 6 [builtins fixtures/list.pyi] [out1] [out2] main:2: error: Argument 1 to "A" has incompatible type "int"; expected "str" [out3] [case testDeletedDepLineNumber] # The import is not on line 1 and that data should be preserved import a [file a.py] [delete a.py.2] [out1] [out2] main:2: error: Cannot find implementation or library stub for module named 'a' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testIncrementalInheritanceAddAnnotation] # flags: --strict-optional import a [file a.py] import b def foo() -> None: 1 + b.Bar().get() [file b.py] from c import Baz class Bar(Baz): pass [file c.py] class Baz: def get(self): return 1 [file c.py.2] from typing import Optional class Baz: def get(self) -> Optional[int]: return 1 [out] [out2] tmp/a.py:3: error: Unsupported operand types for + ("int" and "None") tmp/a.py:3: note: Right operand is of type "Optional[int]" [case testIncrementalMetaclassUpdate] import a [file a.py] from b import B B.x [file b.py] import c class B(metaclass=c.M): pass [file c.py] class M(type): x: int [file c.py.2] class M(type): y: int [out] [out2] tmp/a.py:2: error: "Type[B]" has no attribute "x" [case testIncrementalLotsOfInheritance] import a [file a.py] from b import B from d import D def take(d: D) -> None: pass def foo() -> None: take(B()) [file b.py] from c import C class B(C): pass [file c.py] from d import D class C(D): pass [file c.py.2] from d import D class C: pass [file d.py] class D: pass [out] [out2] tmp/a.py:5: error: Argument 1 to "take" has incompatible type "B"; expected "D" [case testIncrementalInheritanceProperty] import a [file a.py] import b def foo() -> None: 1 + b.Bar().x [file b.py] from c import Baz class Bar(Baz): pass [file c.py] class Baz: def __init__(self): self.x = 12 # type: int [file c.py.2] class Baz: def __init__(self): self.x = 'lol' # type: str [out] [out2] tmp/a.py:3: error: Unsupported operand types for + ("int" and "str") [case testIncrementalWithIgnoresTwice] import a [file a.py] import b import foo # type: ignore [file b.py] x = 1 [file b.py.2] x = 'hi' [file b.py.3] x = 1 [builtins fixtures/module.pyi] [out] [out2] [out3] [case testIgnoredImport2] import x [file y.py] import xyz # type: ignore B = 0 from x import A [file x.py] A = 0 from y import B [file x.py.2] A = 1 from y import B [file x.py.3] A = 2 from y import B [out] [out2] [out3] [case testDeletionOfSubmoduleTriggersImportFrom2] from p.q import f f() [file p/__init__.py] [file p/q.py] def f() -> None: pass [delete p/q.py.2] [file p/q.py.3] def f(x: int) -> None: pass [out] [out2] main:1: error: Cannot find implementation or library stub for module named 'p.q' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [out3] main:2: error: Too few arguments for "f" [case testDeleteIndirectDependency] import b b.x.foo() [file b.py] import c x = c.Foo() [file c.py] class Foo: def foo(self) -> None: pass [delete c.py.2] [file b.py.2] class Foo: def foo(self) -> None: pass x = Foo() [out] [out2] [case testImportReExportInCycle] from m import One [file m/__init__.py] from .one import One from .two import Two [file m/one.py] class One: pass [file m/two.py] import m class Two: pass [file m/one.py.2] class One: name: str [file m/two.py.2] import m reveal_type(m.One.name) class Two: pass [out2] tmp/m/two.py:2: note: Revealed type is 'builtins.str' [case testImportUnusedIgnore1] # flags: --warn-unused-ignores import a [file a.py] import b import foo # type: ignore [file b.py] x = 1 [file b.py.2] x = '2' [case testImportUnusedIgnore2] # flags: --warn-unused-ignores import a [file a.py] import b import c # type: ignore [file b.py] x = 1 [file b.py.2] x = 'hi' [file c.py.3] pass [out] [out2] [out3] tmp/a.py:2: error: unused 'type: ignore' comment -- Test that a non cache_fine_grained run can use a fine-grained cache [case testRegularUsesFgCache] # flags: --config-file tmp/mypy.ini import a [file a.py] x = 0 [file mypy.ini] \[mypy] cache_fine_grained = True [file mypy.ini.2] \[mypy] cache_fine_grained = False -- Nothing should get rechecked [rechecked] [stale] [case testFgCacheNeedsFgCache] # flags: --config-file tmp/mypy.ini import a [file a.py] x = 0 [file mypy.ini] \[mypy] cache_fine_grained = False [file mypy.ini.2] \[mypy] cache_fine_grained = True [rechecked a, builtins, typing] [stale a, builtins, typing] [case testIncrementalPackageNameOverload] # cmd: mypy -m main a # flags: --follow-imports=skip [file main.py] from a import x x.foo() [file a/__init__.py] pass [file a/__init__.py.2] x = 10 [file a/x.py] def foo() -> None: pass [out] [out2] tmp/main.py:2: error: "int" has no attribute "foo" [case testIncrementalFineGrainedCacheError1] # flags: --cache-fine-grained --no-sqlite-cache import a [file a.py] [file b.py] x = 0 [file a.py.2] from b import x 1 + 'lol' [out] [out2] tmp/a.py:2: error: Unsupported operand types for + ("int" and "str") [case testIncrementalBustedFineGrainedCache1] # flags: --cache-fine-grained --no-sqlite-cache import a import b [file a.py] [file b.py] -- This is a heinous hack, but we simulate having a invalid cache by clobbering -- the proto deps file with something with mtime mismatches. [file ../.mypy_cache/3.6/@deps.meta.json.2] {"snapshot": {"__main__": "a7c958b001a45bd6a2a320f4e53c4c16", "a": "d41d8cd98f00b204e9800998ecf8427e", "b": "d41d8cd98f00b204e9800998ecf8427e", "builtins": "c532c89da517a4b779bcf7a964478d67"}, "deps_meta": {"@root": {"path": "@root.deps.json", "mtime": 0}, "__main__": {"path": "__main__.deps.json", "mtime": 0}, "a": {"path": "a.deps.json", "mtime": 0}, "b": {"path": "b.deps.json", "mtime": 0}, "builtins": {"path": "builtins.deps.json", "mtime": 0}}} [file b.py.2] # uh -- Every file should get reloaded, since the cache was invalidated [stale a, b, builtins, typing] [rechecked a, b, builtins, typing] [case testIncrementalBustedFineGrainedCache2] # flags2: --cache-fine-grained import a import b [file a.py] [file b.py] [file b.py.2] # uh -- Every file should get reloaded, since the settings changed [stale a, b, builtins, typing] [rechecked a, b, builtins, typing] [case testIncrementalBustedFineGrainedCache3] # flags: --cache-fine-grained --no-sqlite-cache import a import b [file a.py] [file b.py] -- This is a heinous hack, but we simulate having a invalid cache by deleting -- the proto deps file. [delete ../.mypy_cache/3.6/@deps.meta.json.2] [file b.py.2] # uh -- Every file should get reloaded, since the cache was invalidated [stale a, b, builtins, typing] [rechecked a, b, builtins, typing] [case testIncrementalWorkingFineGrainedCache] # flags: --cache-fine-grained # flags2: --cache-fine-grained import a import b [file a.py] [file b.py] [file b.py.2] # uh -- b gets rechecked because it changed, but nothing is stale -- since the interface did not change [stale] [rechecked b] [case testIncrementalDataclassesSubclassingCached] from a import A from dataclasses import dataclass @dataclass class B(A): e: str = 'e' a = B(5, [5], 'foo') a.a = 6 a._b = [2] a.c = 'yo' a._d = 22 a.e = 'hi' [file a.py] from dataclasses import dataclass, field from typing import ClassVar, List @dataclass class A: a: int _b: List[int] c: str = '18' _d: int = field(default=False) E = 7 F: ClassVar[int] = 22 [builtins fixtures/list.pyi] [out1] [out2] [case testIncrementalDataclassesSubclassingCachedType] import b [file b.py] from a import A from dataclasses import dataclass @dataclass class B(A): pass [file b.py.2] from a import A from dataclasses import dataclass @dataclass class B(A): pass reveal_type(B) [file a.py] from dataclasses import dataclass @dataclass class A: x: int [builtins fixtures/list.pyi] [out1] [out2] tmp/b.py:8: note: Revealed type is 'def (x: builtins.int) -> b.B' [case testIncrementalDataclassesArguments] import b [file b.py] from a import Frozen, NoInit, NoCmp [file b.py.2] from a import Frozen, NoInit, NoCmp f = Frozen(5) f.x = 6 g = NoInit() Frozen(1) < Frozen(2) Frozen(1) <= Frozen(2) Frozen(1) > Frozen(2) Frozen(1) >= Frozen(2) NoCmp(1) < NoCmp(2) NoCmp(1) <= NoCmp(2) NoCmp(1) > NoCmp(2) NoCmp(1) >= NoCmp(2) [file a.py] from dataclasses import dataclass @dataclass(frozen=True, order=True) class Frozen: x: int @dataclass(init=False) class NoInit: x: int @dataclass(order=False) class NoCmp: x: int [builtins fixtures/list.pyi] [out1] [out2] tmp/b.py:4: error: Property "x" defined in "Frozen" is read-only tmp/b.py:13: error: Unsupported left operand type for < ("NoCmp") tmp/b.py:14: error: Unsupported left operand type for <= ("NoCmp") tmp/b.py:15: error: Unsupported left operand type for > ("NoCmp") tmp/b.py:16: error: Unsupported left operand type for >= ("NoCmp") [case testIncrementalDataclassesDunder] import b [file b.py] from a import A [file b.py.2] from a import A reveal_type(A) reveal_type(A.__eq__) reveal_type(A.__ne__) reveal_type(A.__lt__) reveal_type(A.__le__) reveal_type(A.__gt__) reveal_type(A.__ge__) A(1) < A(2) A(1) <= A(2) A(1) > A(2) A(1) >= A(2) A(1) == A(2) A(1) != A(2) A(1) < 1 A(1) <= 1 A(1) > 1 A(1) >= 1 A(1) == 1 A(1) != 1 1 < A(1) 1 <= A(1) 1 > A(1) 1 >= A(1) 1 == A(1) 1 != A(1) [file a.py] from dataclasses import dataclass @dataclass(order=True) class A: a: int [builtins fixtures/attr.pyi] [out1] [out2] tmp/b.py:3: note: Revealed type is 'def (a: builtins.int) -> a.A' tmp/b.py:4: note: Revealed type is 'def (builtins.object, builtins.object) -> builtins.bool' tmp/b.py:5: note: Revealed type is 'def (builtins.object, builtins.object) -> builtins.bool' tmp/b.py:6: note: Revealed type is 'def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool' tmp/b.py:7: note: Revealed type is 'def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool' tmp/b.py:8: note: Revealed type is 'def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool' tmp/b.py:9: note: Revealed type is 'def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool' tmp/b.py:18: error: Unsupported operand types for < ("A" and "int") tmp/b.py:19: error: Unsupported operand types for <= ("A" and "int") tmp/b.py:20: error: Unsupported operand types for > ("A" and "int") tmp/b.py:21: error: Unsupported operand types for >= ("A" and "int") tmp/b.py:25: error: Unsupported operand types for > ("A" and "int") tmp/b.py:26: error: Unsupported operand types for >= ("A" and "int") tmp/b.py:27: error: Unsupported operand types for < ("A" and "int") tmp/b.py:28: error: Unsupported operand types for <= ("A" and "int") [case testIncrementalDataclassesSubclassModified] from b import B B(5, 'foo') [file a.py] from dataclasses import dataclass @dataclass class A: x: int [file b.py] from a import A from dataclasses import dataclass @dataclass class B(A): y: str [file b.py.2] from a import A from dataclasses import dataclass @dataclass class B(A): y: int [builtins fixtures/list.pyi] [out1] [out2] main:2: error: Argument 2 to "B" has incompatible type "str"; expected "int" [rechecked b] [case testIncrementalDataclassesSubclassModifiedErrorFirst] from b import B B(5, 'foo') [file a.py] from dataclasses import dataclass @dataclass class A: x: int [file b.py] from a import A from dataclasses import dataclass @dataclass class B(A): y: int [file b.py.2] from a import A from dataclasses import dataclass @dataclass class B(A): y: str [builtins fixtures/list.pyi] [out1] main:2: error: Argument 2 to "B" has incompatible type "str"; expected "int" [out2] [rechecked b] [case testIncrementalDataclassesThreeFiles] from c import C C('foo', 5, True) [file a.py] from dataclasses import dataclass @dataclass class A: a: int [file b.py] from dataclasses import dataclass @dataclass class B: b: str [file b.py.2] from dataclasses import dataclass @dataclass class B: b: str c: str [file c.py] from a import A from b import B from dataclasses import dataclass @dataclass class C(A, B): c: bool [builtins fixtures/list.pyi] [out1] [out2] tmp/c.py:7: error: Incompatible types in assignment (expression has type "bool", base class "B" defined the type as "str") main:2: error: Argument 2 to "C" has incompatible type "int"; expected "bool" [case testIncrementalDataclassesThreeRuns] from a import A A(5) [file a.py] from dataclasses import dataclass @dataclass class A: a: int [file a.py.2] from dataclasses import dataclass @dataclass class A: a: str [file a.py.3] from dataclasses import dataclass @dataclass class A: a: int = 6 [builtins fixtures/list.pyi] [out1] [out2] main:2: error: Argument 1 to "A" has incompatible type "int"; expected "str" [out3] [case testParentPatchingMess] # flags: --ignore-missing-imports --follow-imports=skip # cmd: mypy -m d d.k d.k.a d.k.v t [file d/__init__.py] [file d/k/__init__.py] from d.k.a import x [file d/k/a.py] x = 10 [file d/k/v.py] from d.k.e import x [file t.py] from d import k [file t.py.2] from d import k # dummy change [case testCachedBadProtocolNote] import b [file a.py] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) [file b.py] from typing import Iterable from a import Point p: Point it: Iterable[int] = p [file b.py.2] from typing import Iterable from a import Point p: Point it: Iterable[int] = p # change [typing fixtures/typing-full.pyi] [builtins fixtures/dict.pyi] [out] tmp/b.py:4: error: Incompatible types in assignment (expression has type "Point", variable has type "Iterable[int]") tmp/b.py:4: note: Following member(s) of "Point" have conflicts: tmp/b.py:4: note: Expected: tmp/b.py:4: note: def __iter__(self) -> Iterator[int] tmp/b.py:4: note: Got: tmp/b.py:4: note: def __iter__(self) -> Iterator[str] [out2] tmp/b.py:4: error: Incompatible types in assignment (expression has type "Point", variable has type "Iterable[int]") tmp/b.py:4: note: Following member(s) of "Point" have conflicts: tmp/b.py:4: note: Expected: tmp/b.py:4: note: def __iter__(self) -> Iterator[int] tmp/b.py:4: note: Got: tmp/b.py:4: note: def __iter__(self) -> Iterator[str] [case testIndirectDepsAlwaysPatched-writescache] # flags: --no-incremental # flags2: --incremental from b import C def f() -> None: x: int = C().x [file b.py] from c import C [file c.pyi] class C: x: int [file c.pyi.2] class C: x: str [out] [out2] main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testBazelFlagIgnoresFileChanges-skip] -- This test fails on windows, when the mypy source in on a different drive than -- the run-directory. In this case os.path.relpath(...) fails with an exception -- Since the initial run wrote a cache file, the second run ignores the source # flags: --bazel from a import f f() [file a.py] def f(): pass [file a.py.2] [out] [out2] [case testModuleGetattrInitIncremental] import c [file c.py] import a.b x = a.b.f() [file c.py.2] import a.b x = a.b.f() # touch [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [out2] [case testModuleGetattrInitIncremental2] import c [file c.py] import a.b.c [file c.py.2] import a.b.c # touch [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [file a/b.pyi] # empty [builtins fixtures/module.pyi] [out] tmp/c.py:1: error: Cannot find implementation or library stub for module named 'a.b.c' tmp/c.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [out2] tmp/c.py:1: error: Cannot find implementation or library stub for module named 'a.b.c' tmp/c.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testAddedMissingStubs] # flags: --ignore-missing-imports from missing import f f(int()) [file missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddedMissingStubsPackage] # flags: --ignore-missing-imports import package.missing package.missing.f(int()) [file package/__init__.pyi.2] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddedMissingStubsPackageFrom] # flags: --ignore-missing-imports from package import missing missing.f(int()) [file package/__init__.pyi.2] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddedMissingStubsPackagePartial] # flags: --ignore-missing-imports import package.missing package.missing.f(int()) [file package/__init__.pyi] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddedMissingStubsPackagePartialGetAttr] import package.missing package.missing.f(int()) [file package/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [file package/missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddedMissingStubsIgnore] from missing import f # type: ignore f(int()) [file missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddedMissingStubsIgnorePackage] import package.missing # type: ignore package.missing.f(int()) [file package/__init__.pyi.2] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddedMissingStubsIgnorePackageFrom] from package import missing # type: ignore missing.f(int()) [file package/__init__.pyi.2] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddedMissingStubsIgnorePackagePartial] import package.missing # type: ignore package.missing.f(int()) [file package/__init__.pyi] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] [out2] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" -- Test cases for final qualifier [case testFinalAddFinalVarAssign] import mod from a import D from mod import x mod.x = 2 # This an all below are errors. x = 2 d: D d.y = 2 d.z = 2 D.y = 2 [file a.py] import mod class D(mod.C): pass [file mod.py] x = 1 class C: y = 1 def __init__(self) -> None: self.z = 1 [file mod.py.2] from typing import Final x: Final = 1 class C: y: Final = 1 def __init__(self) -> None: self.z: Final = 1 [out] [out2] main:5: error: Cannot assign to final name "x" main:6: error: Cannot assign to final name "x" main:8: error: Cannot assign to final attribute "y" main:9: error: Cannot assign to final attribute "z" main:10: error: Cannot assign to final attribute "y" [case testFinalAddFinalVarOverride] from mod import C class D(C): x = 2 def __init__(self) -> None: self.y = 2 class E(C): y = 2 def __init__(self) -> None: self.x = 2 [file mod.py] class C: x = 1 def __init__(self) -> None: self.y = 1 [file mod.py.2] from typing import Final class C: x: Final = 1 def __init__(self) -> None: self.y: Final = 1 [out] [out2] main:4: error: Cannot assign to final name "x" main:6: error: Cannot assign to final attribute "y" main:8: error: Cannot assign to final name "y" main:10: error: Cannot assign to final attribute "x" [case testFinalAddFinalMethodOverride] from mod import C class D(C): def meth(self) -> int: ... [file mod.py] class C: def meth(self) -> int: ... [file mod.py.2] from typing import final class C: @final def meth(self) -> int: ... [out] [out2] main:4: error: Cannot override final attribute "meth" (previously declared in base class "C") -- These tests should just not crash [case testOverrideByBadVar] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] class Slow: pass s: Slow from cext import Slow # type: ignore [out] [out2] [case testOverrideByBadVarAlias] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] class Slow: pass A = Slow from cext import Slow # type: ignore [out] [out2] [case testOverrideByBadVarClass] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] class C: class Slow: pass s: Slow from cext import Slow # type: ignore [out] [out2] [case testOverrideByBadVarClassAlias] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] class C: class Slow: pass A = Slow from cext import Slow # type: ignore [out] [out2] [case testOverrideByBadVarExisting] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] class Slow: pass s: Slow from cext import Slow # type: ignore [file cext.py] Slow = 1 [out] [out2] [case testOverrideByBadVarAliasExisting] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] class Slow: pass A = Slow from cext import Slow # type: ignore [file cext.py] Slow = 1 [out] [out2] [case testOverrideByBadFunction] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] class C: class Slow: pass s: Slow def Slow() -> None: ... # type: ignore [out] [out2] [case testOverrideByBadVarLocal] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] def outer() -> None: class Slow: pass s: Slow from cext import Slow # type: ignore [out] [out2] [case testRecursiveAliasImported] import a [file a.py] import lib x: int [file a.py.2] import lib x: lib.A reveal_type(x) [file lib.pyi] from typing import List MYPY = False if MYPY: # Force processing order from other import B A = List[B] # type: ignore [file other.pyi] from typing import List from lib import A B = List[A] [builtins fixtures/list.pyi] [out] tmp/lib.pyi:4: error: Module 'other' has no attribute 'B' tmp/other.pyi:3: error: Cannot resolve name "B" (possible cyclic definition) [out2] tmp/lib.pyi:4: error: Module 'other' has no attribute 'B' tmp/other.pyi:3: error: Cannot resolve name "B" (possible cyclic definition) tmp/a.py:3: note: Revealed type is 'builtins.list[Any]' [case testRecursiveNamedTupleTypedDict-skip] # https://github.com/python/mypy/issues/7125 import a [file a.py] import lib x: int [file a.py.2] import lib x: lib.A reveal_type(x.x['x']) [file lib.pyi] from typing import NamedTuple from other import B A = NamedTuple('A', [('x', B)]) # type: ignore [file other.pyi] from mypy_extensions import TypedDict from lib import A B = TypedDict('B', {'x': A}) [builtins fixtures/dict.pyi] [out] [out2] tmp/a.py:3: note: Revealed type is 'Tuple[TypedDict('other.B', {'x': Any}), fallback=lib.A]' [case testFollowImportSkipNotInvalidatedOnPresent] # flags: --follow-imports=skip # cmd: mypy -m main [file main.py] import other [file other.py] x = 1 [file other.py.2] x = 'hi' [stale] [rechecked] [case testFollowImportSkipNotInvalidatedOnPresentPackage] # flags: --follow-imports=skip # cmd: mypy -m main [file main.py] import other [file other/__init__.py] x = 1 [file other/__init__.py.2] x = 'hi' [stale] [rechecked] [case testFollowImportSkipNotInvalidatedOnAdded] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy -m main [file main.py] import other [file other.py.2] x = 1 [stale] [rechecked] [case testFollowImportSkipInvalidatedOnAddedStub] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy -m main [file main.py] import other [file other.pyi.2] x = 1 [stale main, other] [rechecked main, other] [case testFollowImportSkipNotInvalidatedOnAddedStubOnFollowForStubs] # flags: --follow-imports=skip --ignore-missing-imports --config-file=tmp/mypy.ini # cmd: mypy -m main [file main.py] import other [file other.pyi.2] x = 1 [file mypy.ini] \[mypy] follow_imports_for_stubs = True [stale] [rechecked] [case testAddedSkippedStubsPackageFrom] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy -m main # cmd2: mypy -m main package package.missing [file main.py] from package import missing missing.f(int()) [file package/__init__.py] [file package/missing.py] def f(x: str) -> None: pass [out] [out2] tmp/main.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testOverrideByIdemAlias] # https://github.com/python/mypy/issues/6404 import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] C = C # type: ignore class C: # type: ignore pass [out] [out2] [case testOverrideByIdemAliasReversed] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] class C: pass C = C # type: ignore x: C [out] [out2] [case testOverrideByIdemAliasGeneric] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] from typing import Generic, TypeVar T = TypeVar('T') class C(Generic[T]): pass C = C[int] # type: ignore x: C [out] [out2] [case testOverrideByIdemAliasImported] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] from other import C C = C # type: ignore x: C [file other.py] class C: pass [out] [out2] [case testOverrideByIdemAliasImportedReversed] import a [file a.py] import lib x = 1 [file a.py.2] import lib x = 2 [file lib.py] C = C # type: ignore from other import C [file other.py] class C: pass [out] [out2] [case testConditionalExceptionAliasOverride] import a [file a.py] import lib try: x = 1 except lib.Exception as e: pass [file a.py.2] import lib try: x = 2 except lib.Exception as e: pass [file lib.py] try: Exception = Exception except BaseException: class Exception(BaseException): pass # type: ignore try: pass except Exception as e: pass [builtins fixtures/exception.pyi] [out] [out2] [case testBadEnumLoading] import a [file a.py] from b import E x: E y = 1 [file a.py.2] from b import E x: E y = 2 [file b.py] from typing import List from enum import Enum def f() -> List[str]: ... E = Enum('E', f()) # type: ignore [builtins fixtures/list.pyi] [out] [out2] [case testChangedPluginsInvalidateCache] # flags: --config-file tmp/mypy.ini import a [file a.py] from b import x y: int = x [file a.py.2] from b import x y: int = x touch = 1 [file b.py] class C: ... def f() -> C: ... x = f() [file basic_plugin.py] from mypy.plugin import Plugin class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname.endswith('.f'): return my_hook assert fullname is not None return None def my_hook(ctx): return ctx.api.named_generic_type('builtins.int', []) def plugin(version): return MyPlugin [file basic_plugin.py.2] from mypy.plugin import Plugin class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname.endswith('.f'): return my_hook assert fullname is not None return None def my_hook(ctx): return ctx.api.named_generic_type('builtins.str', []) def plugin(version): return MyPlugin [file mypy.ini] \[mypy] plugins=basic_plugin.py [out] [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testChangedPluginsInvalidateCache2] # flags: --config-file tmp/mypy.ini import a [file a.py] from b import x y: int = x [file a.py.2] from b import x y: int = x touch = 1 [file b.py] class C: ... def f() -> C: ... x = f() [file basic_plugin.py] from mypy.plugin import Plugin from version_plugin import __version__, choice class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname.endswith('.f'): return my_hook assert fullname is not None return None def my_hook(ctx): if choice: return ctx.api.named_generic_type('builtins.int', []) else: return ctx.api.named_generic_type('builtins.str', []) def plugin(version): return MyPlugin [file version_plugin.py] __version__ = 0.1 choice = True [file version_plugin.py.2] __version__ = 0.2 choice = False [file mypy.ini] \[mypy] plugins=basic_plugin.py [out] [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testAddedPluginsInvalidateCache] # flags: --config-file tmp/mypy.ini import a [file a.py] from b import x y: int = x [file a.py.2] from b import x y: int = x touch = 1 [file b.py] def f() -> int: ... x = f() [file basic_plugin.py] from mypy.plugin import Plugin class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname.endswith('.f'): return my_hook assert fullname is not None return None def my_hook(ctx): return ctx.api.named_generic_type('builtins.str', []) def plugin(version): return MyPlugin [file mypy.ini] \[mypy] python_version=3.6 [file mypy.ini.2] \[mypy] python_version=3.6 plugins=basic_plugin.py [out] [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testRemovedPluginsInvalidateCache] # flags: --config-file tmp/mypy.ini import a [file a.py] from b import x y: str = x [file a.py.2] from b import x y: str = x touch = 1 [file b.py] def f() -> int: ... x = f() [file basic_plugin.py] from mypy.plugin import Plugin class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname.endswith('.f'): return my_hook assert fullname is not None return None def my_hook(ctx): return ctx.api.named_generic_type('builtins.str', []) def plugin(version): return MyPlugin [file mypy.ini] \[mypy] python_version=3.6 plugins=basic_plugin.py [file mypy.ini.2] \[mypy] python_version=3.6 [out] [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testPluginConfigData] # flags: --config-file tmp/mypy.ini import a import b [file a.py] [file b.py] [file test.json] {"a": false, "b": false} [file test.json.2] {"a": true, "b": false} [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/config_data.py # The config change will force a to be rechecked but not b. [rechecked a] [case testLiteralIncrementalTurningIntoLiteral] import mod reveal_type(mod.a) [file mod.py] from typing_extensions import Literal a = 1 [file mod.py.2] from typing_extensions import Literal a: Literal[2] = 2 [out] main:2: note: Revealed type is 'builtins.int' [out2] main:2: note: Revealed type is 'Literal[2]' [case testAddedSubStarImport] # cmd: mypy -m a pack pack.mod b # cmd2: mypy -m other [file a.py] from pack import * [file pack/__init__.py] [file pack/mod.py] [file b.py] import pack.mod [file other.py] import a [out] [out2] [case testNewAnalyzerIncrementalBrokenNamedTuple] import a [file a.py] from b import NT x: NT [file a.py.2] from b import NT x: NT reveal_type(x) [file b.py] from typing import NamedTuple NT = NamedTuple('BadName', [('x', int)]) [out] [out2] tmp/a.py:3: note: Revealed type is 'Tuple[builtins.int, fallback=b.BadName@2]' [case testNewAnalyzerIncrementalBrokenNamedTupleNested] import a [file a.py] from b import C x: C [file a.py.2] from b import C x: C # touch [file b.py] class C: ... from collections import namedtuple def test() -> None: NT = namedtuple('BadName', ['x', 'y']) [builtins fixtures/list.pyi] [out] [out2] [case testNewAnalyzerIncrementalMethodNamedTuple] import a [file a.py] from b import C x: C [file a.py.2] from b import C x: C reveal_type(x.h) [file b.py] from typing import NamedTuple class C: def __init__(self) -> None: self.h: Hidden Hidden = NamedTuple('Hidden', [('x', int)]) [out] [out2] tmp/a.py:3: note: Revealed type is 'Tuple[builtins.int, fallback=b.C.Hidden@5]' [case testIncrementalNodeCreatedFromGetattr] import a [file a.py] from b import C c: C [file b.py] from c import C [file c.pyi] def __getattr__(s): ... [file a.py.2] from b import C c: C reveal_type(c) [out] [out2] tmp/a.py:3: note: Revealed type is 'Any' [case testNewAnalyzerIncrementalNestedEnum] import a [file a.py] from b import C x: C [file a.py.2] from b import C x: C # touch [file b.py] class C: ... from enum import Enum def test() -> None: Color = Enum('Color', 'RED BLACK') [builtins fixtures/list.pyi] [out] [out2] [case testCannotDetermineTypeFromOtherModule] import aa [file aa.py] import a [file aa.py.2] import a # dummy [file a.py] from b import Sub Sub().foo Sub().foo [file b.py] from typing import Any class desc: def __get__(self, _: Any, __: Any = None) -> int: return 42 class Base: @property def foo(self) -> int: ... class Sub(Base): foo = desc(42) # type: ignore [builtins fixtures/property.pyi] [out] tmp/a.py:3: error: Cannot determine type of 'foo' tmp/a.py:4: error: Cannot determine type of 'foo' [out2] tmp/a.py:3: error: Cannot determine type of 'foo' tmp/a.py:4: error: Cannot determine type of 'foo' [case testRedefinitionClass] import b [file a.py] from whatever import Foo # type: ignore class Foo: # type: ignore def f(self) -> None: pass [file b.py] import a [file b.py.2] import a # a change mypy-0.761/test-data/unit/check-inference-context.test0000644€tŠÔÚ€2›s®0000010355413576752246027171 0ustar jukkaDROPBOX\Domain Users00000000000000 -- Basic test cases -- ---------------- [case testBasicContextInference] from typing import TypeVar, Generic T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B if int(): ao = f() if int(): ab = f() if int(): b = f() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") def f() -> 'A[T]': pass class A(Generic[T]): pass class B: pass [case testBasicContextInferenceForConstructor] from typing import TypeVar, Generic T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B if int(): ao = A() if int(): ab = A() if int(): b = A() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") class A(Generic[T]): pass class B: pass [case testIncompatibleContextInference] from typing import TypeVar, Generic T = TypeVar('T') b = None # type: B c = None # type: C ab = None # type: A[B] ao = None # type: A[object] ac = None # type: A[C] if int(): ac = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "C" if int(): ab = f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "B" if int(): ao = f(b) if int(): ab = f(b) if int(): ao = f(c) if int(): ac = f(c) def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass class C: pass -- Local variables -- --------------- [case testInferGenericLocalVariableTypeWithEmptyContext] from typing import TypeVar, Generic T = TypeVar('T') def g() -> None: ao = None # type: A[object] ab = None # type: A[B] o = None # type: object b = None # type: B x = f(o) if int(): ab = x # E: Incompatible types in assignment (expression has type "A[object]", variable has type "A[B]") ao = x y = f(b) if int(): ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ab = y def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass [out] [case testInferLocalVariableTypeWithUnderspecifiedGenericType] from typing import TypeVar, Generic T = TypeVar('T') def g() -> None: x = f() # E: Need type annotation for 'x' def f() -> 'A[T]': pass class A(Generic[T]): pass [out] [case testInferMultipleLocalVariableTypesWithTupleRvalue] from typing import TypeVar, Generic T = TypeVar('T') def g() -> None: ao = None # type: A[object] ab = None # type: A[B] b = None # type: B x, y = f(b), f(b) if int(): ao = x # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ab = x ab = y def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass [out] [case testInferMultipleLocalVariableTypesWithArrayRvalueAndNesting] from typing import TypeVar, List, Generic T = TypeVar('T') def h() -> None: ao = None # type: A[object] ab = None # type: A[B] b = None # type: B x, y = g(f(b)) if int(): ao = x # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ab = x ab = y def f(a: T) -> 'A[T]': pass def g(a: T) -> List[T]: pass class A(Generic[T]): pass class B: pass [builtins fixtures/for.pyi] [out] -- Return types with multiple tvar instances -- ----------------------------------------- [case testInferenceWithTypeVariableTwiceInReturnType] from typing import TypeVar, Tuple, Generic T = TypeVar('T') b = None # type: B o = None # type: object ab = None # type: A[B] ao = None # type: A[object] if int(): ab, ao = f(b) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") if int(): ao, ab = f(b) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") if int(): ao, ao = f(b) if int(): ab, ab = f(b) if int(): ao, ao = f(o) def f(a: T) -> 'Tuple[A[T], A[T]]': pass class A(Generic[T]): pass class B: pass [builtins fixtures/tuple.pyi] [case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables] from typing import TypeVar, Tuple, Generic S = TypeVar('S') T = TypeVar('T') b = None # type: B o = None # type: object ab = None # type: A[B] ao = None # type: A[object] if int(): ao, ao, ab = f(b, b) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") if int(): ao, ab, ao = g(b, b) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") if int(): ao, ab, ab, ab = h(b, b) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") if int(): ab, ab, ao, ab = h(b, b) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") if int(): ao, ab, ab = f(b, b) if int(): ab, ab, ao = g(b, b) if int(): ab, ab, ab, ab = h(b, b) def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass class A(Generic[T]): pass class B: pass [builtins fixtures/tuple.pyi] -- Multiple tvar instances in arguments -- ------------------------------------ [case testMultipleTvatInstancesInArgs] from typing import TypeVar, Generic T = TypeVar('T') ac = None # type: A[C] ab = None # type: A[B] ao = None # type: A[object] b = None # type: B c = None # type: C o = None # type: object if int(): ab = f(b, o) # E: Argument 2 to "f" has incompatible type "object"; expected "B" if int(): ab = f(o, b) # E: Argument 1 to "f" has incompatible type "object"; expected "B" if int(): ac = f(b, c) # E: Argument 1 to "f" has incompatible type "B"; expected "C" if int(): ac = f(c, b) # E: Argument 2 to "f" has incompatible type "B"; expected "C" if int(): ao = f(b, c) if int(): ao = f(c, b) if int(): ab = f(c, b) def f(a: T, b: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass class C(B): pass -- Nested generic function calls -- ----------------------------- [case testNestedGenericFunctionCall1] from typing import TypeVar, Generic T = TypeVar('T') aab = None # type: A[A[B]] aao = None # type: A[A[object]] ao = None # type: A[object] b = None # type: B o = None # type: object if int(): aab = f(f(o)) # E: Argument 1 to "f" has incompatible type "object"; expected "B" if int(): aab = f(f(b)) aao = f(f(b)) ao = f(f(b)) def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass [case testNestedGenericFunctionCall2] from typing import TypeVar, Generic T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B o = None # type: object if int(): ab = f(g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B" if int(): ab = f(g(b)) ao = f(g(b)) def f(a: T) -> T: pass def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass [case testNestedGenericFunctionCall3] from typing import TypeVar, Generic T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B o = None # type: object if int(): ab = f(g(o), g(b)) # E: Argument 1 to "g" has incompatible type "object"; expected "B" if int(): ab = f(g(b), g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B" if int(): ab = f(g(b), g(b)) ao = f(g(b), g(o)) if int(): ao = f(g(o), g(b)) def f(a: T, b: T) -> T: pass def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass -- Method calls -- ------------ [case testMethodCallWithContextInference] from typing import TypeVar, Generic T = TypeVar('T') o = None # type: object b = None # type: B c = None # type: C ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] ab.g(f(o)) # E: Argument 1 to "f" has incompatible type "object"; expected "B" if int(): ac = f(b).g(f(c)) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]") if int(): ac = f(c).g(f(b)) # E: Argument 1 to "f" has incompatible type "B"; expected "C" if int(): ab = f(b).g(f(c)) ab.g(f(c)) def f(a: T) -> 'A[T]': pass class A(Generic[T]): def g(self, a: 'A[T]') -> 'A[T]': pass class B: pass class C(B): pass -- List expressions -- ---------------- [case testEmptyListExpression] from typing import List aa = None # type: List[A] ao = None # type: List[object] a = None # type: A def f(): a, aa, ao # Prevent redefinition a = [] # E: Incompatible types in assignment (expression has type "List[]", variable has type "A") aa = [] ao = [] class A: pass [builtins fixtures/list.pyi] [case testSingleItemListExpressions] from typing import List aa = None # type: List[A] ab = None # type: List[B] ao = None # type: List[object] a = None # type: A b = None # type: B def f(): aa, ab, ao # Prevent redefinition aa = [b] # E: List item 0 has incompatible type "B"; expected "A" ab = [a] # E: List item 0 has incompatible type "A"; expected "B" aa = [a] ab = [b] ao = [a] aa = [None] ao = [None] class A: pass class B: pass [builtins fixtures/list.pyi] [case testMultiItemListExpressions] from typing import List aa = None # type: List[A] ab = None # type: List[B] ao = None # type: List[object] a = None # type: A b = None # type: B def f(): ab, aa, ao # Prevent redefinition ab = [b, a] # E: List item 1 has incompatible type "A"; expected "B" ab = [a, b] # E: List item 0 has incompatible type "A"; expected "B" aa = [a, b, a] ao = [a, b] class A: pass class B(A): pass [builtins fixtures/list.pyi] [case testLocalVariableInferenceFromEmptyList] import typing def f() -> None: a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") b = [None] c = [B()] if int(): c = [object()] # E: List item 0 has incompatible type "object"; expected "B" c = [B()] class B: pass [builtins fixtures/list.pyi] [out] [case testNestedListExpressions] from typing import List aao = None # type: List[List[object]] aab = None # type: List[List[B]] ab = None # type: List[B] b = None # type: B o = None # type: object def f(): aao, aab # Prevent redefinition aao = [[o], ab] # E: List item 1 has incompatible type "List[B]"; expected "List[object]" aab = [[], [o]] # E: List item 0 has incompatible type "object"; expected "B" aao = [[None], [b], [], [o]] aab = [[None], [b], []] aab = [ab, []] class B: pass [builtins fixtures/list.pyi] -- Complex context -- --------------- [case testParenthesesAndContext] from typing import List l = ([A()]) # type: List[object] class A: pass [builtins fixtures/list.pyi] [case testComplexTypeInferenceWithTuple] from typing import TypeVar, Tuple, Generic k = TypeVar('k') t = TypeVar('t') v = TypeVar('v') def f(x: Tuple[k]) -> 'A[k]': pass d = f((A(),)) # type: A[A[B]] class A(Generic[t]): pass class B: pass class C: pass class D(Generic[k, v]): pass [builtins fixtures/list.pyi] -- Dictionary literals -- ------------------- [case testDictionaryLiteralInContext] from typing import Dict, TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass class B: pass class C: pass a_b = A() # type: A[B] a_c = A() # type: A[C] d = {A() : a_c, a_b : A()} # type: Dict[A[B], A[C]] [builtins fixtures/dict.pyi] -- Special cases (regression tests etc.) -- ------------------------------------- [case testInitializationWithInferredGenericType] from typing import TypeVar, Generic T = TypeVar('T') c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]" def f(x: T) -> T: pass class C(Generic[T]): pass class A: pass [case testInferredGenericTypeAsReturnValue] from typing import TypeVar, Generic T = TypeVar('T') def t() -> 'A[B]': return f(D()) # E: Argument 1 to "f" has incompatible type "D"; expected "B" return A() return f(C()) def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass class C(B): pass class D: pass [out] [case testIntersectionWithInferredGenericArgument] from foo import * [file foo.pyi] from typing import overload, TypeVar, Generic T = TypeVar('T') f(A()) @overload def f(x: 'A[B]') -> None: pass @overload def f(x: 'B') -> None: pass class A(Generic[T]): pass class B: pass [case testInferenceWithAbstractClassContext] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') x = A() # type: I[int] a_object = A() # type: A[object] y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]") class I(Generic[t]): @abstractmethod def f(self): pass class A(I[t], Generic[t]): def f(self): pass [case testInferenceWithAbstractClassContext2] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') a = f(A()) # type: A[int] a_int = A() # type: A[int] aa = f(a_int) class I(Generic[t]): pass class A(I[t], Generic[t]): pass def f(i: I[t]) -> A[t]: pass [case testInferenceWithAbstractClassContext3] from typing import TypeVar, Generic, Iterable t = TypeVar('t') class set(Generic[t]): def __init__(self, iterable: Iterable[t]) -> None: pass b = bool() l = set([b]) if int(): l = set([object()]) # E: List item 0 has incompatible type "object"; expected "bool" [builtins fixtures/for.pyi] -- Infer generic type in 'Any' context -- ----------------------------------- [case testInferGenericTypeInAnyContext] from typing import Any, TypeVar, Generic s = TypeVar('s') t = TypeVar('t') x = [] # type: Any y = C() # type: Any class C(Generic[s, t]): pass [builtins fixtures/list.pyi] -- Lambdas -- ------- [case testInferLambdaArgumentTypeUsingContext] from typing import Callable f = None # type: Callable[[B], A] if int(): f = lambda x: x.o f = lambda x: x.x # E: "B" has no attribute "x" class A: pass class B: o = None # type: A [case testInferLambdaReturnTypeUsingContext] from typing import List, Callable f = None # type: Callable[[], List[A]] if int(): f = lambda: [] f = lambda: [B()] # E: List item 0 has incompatible type "B"; expected "A" class A: pass class B: pass [builtins fixtures/list.pyi] [case testInferLambdaTypeUsingContext] x : str = (lambda x: x + 1)(1) # E: Incompatible types in assignment (expression has type "int", variable has type "str") reveal_type((lambda x, y: x + y)(1, 2)) # N: Revealed type is 'builtins.int' (lambda x, y: x + y)(1, "") # E: Unsupported operand types for + ("int" and "str") (lambda *, x, y: x + y)(x=1, y="") # E: Unsupported operand types for + ("int" and "str") reveal_type((lambda s, i: s)(i=0, s='x')) # N: Revealed type is 'Literal['x']?' reveal_type((lambda s, i: i)(i=0, s='x')) # N: Revealed type is 'Literal[0]?' reveal_type((lambda x, s, i: x)(1.0, i=0, s='x')) # N: Revealed type is 'builtins.float' (lambda x, s, i: x)() # E: Too few arguments (lambda: 0)(1) # E: Too many arguments -- varargs are not handled, but it should not crash reveal_type((lambda *k, s, i: i)(type, i=0, s='x')) # N: Revealed type is 'Any' reveal_type((lambda s, *k, i: i)(i=0, s='x')) # N: Revealed type is 'Any' reveal_type((lambda s, i, **k: i)(i=0, s='x')) # N: Revealed type is 'Any' [builtins fixtures/dict.pyi] [case testInferLambdaAsGenericFunctionArgument] from typing import TypeVar, List, Any, Callable t = TypeVar('t') class A: x = None # type: A def f(a: List[t], fn: Callable[[t], Any]) -> None: pass list_a = [] # type: List[A] f(list_a, lambda a: a.x) [builtins fixtures/list.pyi] [case testLambdaWithoutContext] reveal_type(lambda x: x) # N: Revealed type is 'def (x: Any) -> Any' reveal_type(lambda x: 1) # N: Revealed type is 'def (x: Any) -> Literal[1]?' [case testLambdaContextVararg] from typing import Callable def f(t: Callable[[str], str]) -> str: '' f(lambda *_: '') [case testInvalidContextForLambda] from typing import Callable f = lambda x: A() # type: Callable[[], A] f2 = lambda: A() # type: Callable[[A], A] class A: pass [out] main:2: error: Cannot infer type of lambda main:2: error: Incompatible types in assignment (expression has type "Callable[[Any], A]", variable has type "Callable[[], A]") main:3: error: Cannot infer type of lambda main:3: error: Incompatible types in assignment (expression has type "Callable[[], A]", variable has type "Callable[[A], A]") [case testEllipsisContextForLambda] from typing import Callable f1 = lambda x: 1 # type: Callable[..., int] f2 = lambda: 1 # type: Callable[..., int] f3 = lambda *args, **kwargs: 1 # type: Callable[..., int] f4 = lambda x: x # type: Callable[..., int] g = lambda x: 1 # type: Callable[..., str] [builtins fixtures/dict.pyi] [out] main:6: error: Incompatible types in assignment (expression has type "Callable[[Any], int]", variable has type "Callable[..., str]") main:6: error: Incompatible return value type (got "int", expected "str") [case testEllipsisContextForLambda2] from typing import TypeVar, Callable T = TypeVar('T') def foo(arg: Callable[..., T]) -> None: pass foo(lambda: 1) [case testLambdaNoneInContext] from typing import Callable def f(x: Callable[[], None]) -> None: pass def g(x: Callable[[], int]) -> None: pass f(lambda: None) g(lambda: None) [case testIsinstanceInInferredLambda] from typing import TypeVar, Callable T = TypeVar('T') S = TypeVar('S') class A: pass class B(A): pass class C(A): pass def f(func: Callable[[T], S], *z: T, r: S = None) -> S: pass f(lambda x: 0 if isinstance(x, B) else 1) # E: Cannot infer type argument 1 of "f" f(lambda x: 0 if isinstance(x, B) else 1, A())() # E: "int" not callable f(lambda x: x if isinstance(x, B) else B(), A(), r=B())() # E: "B" not callable f( lambda x: # E: Argument 1 to "f" has incompatible type "Callable[[A], A]"; expected "Callable[[A], B]" B() if isinstance(x, B) else x, # E: Incompatible return value type (got "A", expected "B") A(), r=B()) [builtins fixtures/isinstance.pyi] -- Overloads + generic functions -- ----------------------------- [case testMapWithOverloadedFunc] from foo import * [file foo.pyi] from typing import TypeVar, Callable, List, overload, Any t = TypeVar('t') s = TypeVar('s') def map(f: Callable[[t], s], seq: List[t]) -> List[s]: pass @overload def g(o: object) -> 'B': pass @overload def g(o: 'A', x: Any = None) -> 'B': pass class A: pass class B: pass m = map(g, [A()]) b = m # type: List[B] a = m # type: List[A] # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]") [builtins fixtures/list.pyi] -- Boolean operators -- ----------------- [case testOrOperationInferredFromContext] from typing import List a, b, c = None, None, None # type: (List[A], List[B], List[C]) if int(): a = a or [] if int(): a = [] or a if int(): b = b or [C()] if int(): a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type "List[A]") if int(): b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type "List[B]") class A: pass class B: pass class C(B): pass [builtins fixtures/list.pyi] -- Special cases -- ------------- [case testSomeTypeVarsInferredFromContext] from typing import List, TypeVar t = TypeVar('t') s = TypeVar('s') # Some type variables can be inferred using context, but not all of them. a = None # type: List[A] if int(): a = f(A(), B()) if int(): a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" def f(a: s, b: t) -> List[s]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [case testSomeTypeVarsInferredFromContext2] from typing import List, TypeVar s = TypeVar('s') t = TypeVar('t') # Like testSomeTypeVarsInferredFromContext, but tvars in different order. a = None # type: List[A] if int(): a = f(A(), B()) if int(): a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" def f(a: s, b: t) -> List[s]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') map( [lambda x: x], []) def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass class A: pass [builtins fixtures/list.pyi] [out] [case testChainedAssignmentInferenceContexts] from typing import List i = None # type: List[int] s = None # type: List[str] if int(): i = i = [] if int(): i = s = [] # E: Incompatible types in assignment (expression has type "List[str]", variable has type "List[int]") [builtins fixtures/list.pyi] [case testContextForAttributeDeclaredInInit] from typing import List class A: def __init__(self): self.x = [] # type: List[int] a = A() a.x = [] a.x = [1] a.x = [''] # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testListMultiplyInContext] from typing import List a = None # type: List[int] if int(): a = [None] * 3 a = [''] * 3 # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testUnionTypeContext] from typing import Union, List, TypeVar T = TypeVar('T') def f(x: Union[List[T], str]) -> None: pass f([1]) f('') f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[], str]" [builtins fixtures/isinstancelist.pyi] [case testIgnoringInferenceContext] from typing import TypeVar, List T = TypeVar('T') def f(x: List[T]) -> T: pass def g(y: object) -> None: pass a = [1] g(f(a)) [builtins fixtures/list.pyi] [case testStar2Context] from typing import Any, Dict, Tuple, Iterable def f1(iterable: Iterable[Tuple[str, Any]] = None) -> None: f2(**dict(iterable)) def f2(iterable: Iterable[Tuple[str, Any]], **kw: Any) -> None: pass [builtins fixtures/dict.pyi] [out] [case testInferenceInGenericFunction] from typing import TypeVar, List T = TypeVar('T') def f(a: T) -> None: l = [] # type: List[T] l.append(a) l.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "T" [builtins fixtures/list.pyi] [out] [case testInferenceInGenericClass] from typing import TypeVar, Generic, List S = TypeVar('S') T = TypeVar('T') class A(Generic[S]): def f(self, a: T, b: S) -> None: l = [] # type: List[T] l.append(a) l.append(b) # E: Argument 1 to "append" of "list" has incompatible type "S"; expected "T" [builtins fixtures/list.pyi] [out] [case testLambdaInGenericFunction] from typing import TypeVar, Callable T = TypeVar('T') S = TypeVar('S') def f(a: T, b: S) -> None: c = lambda x: x # type: Callable[[T], S] [out] main:5: error: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], S]") main:5: error: Incompatible return value type (got "T", expected "S") [case testLambdaInGenericClass] from typing import TypeVar, Callable, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T]): def f(self, b: S) -> None: c = lambda x: x # type: Callable[[T], S] [out] main:6: error: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], S]") main:6: error: Incompatible return value type (got "T", expected "S") [case testRevealTypeContext] from typing import TypeVar, Callable, Generic T = TypeVar('T') class A(Generic[T]): pass reveal_type(A()) # N: Revealed type is '__main__.A[]' b = reveal_type(A()) # type: A[int] # N: Revealed type is '__main__.A[builtins.int]' [case testUnionWithGenericTypeItemContext] from typing import TypeVar, Union, List T = TypeVar('T') def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass reveal_type(f(1)) # N: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]' reveal_type(f([])) # N: Revealed type is 'builtins.list[builtins.int]' reveal_type(f(None)) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testUnionWithGenericTypeItemContextAndStrictOptional] # flags: --strict-optional from typing import TypeVar, Union, List T = TypeVar('T') def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass reveal_type(f(1)) # N: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]' reveal_type(f([])) # N: Revealed type is 'builtins.list[builtins.int]' reveal_type(f(None)) # N: Revealed type is 'Union[None, builtins.list[builtins.int]]' [builtins fixtures/list.pyi] [case testUnionWithGenericTypeItemContextInMethod] from typing import TypeVar, Union, List, Generic T = TypeVar('T') S = TypeVar('S') class C(Generic[T]): def f(self, x: Union[T, S]) -> Union[T, S]: pass c = C[List[int]]() reveal_type(c.f('')) # N: Revealed type is 'Union[builtins.list[builtins.int], builtins.str*]' reveal_type(c.f([1])) # N: Revealed type is 'builtins.list[builtins.int]' reveal_type(c.f([])) # N: Revealed type is 'builtins.list[builtins.int]' reveal_type(c.f(None)) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testGenericMethodCalledInGenericContext] # flags: --strict-optional from typing import TypeVar, Generic _KT = TypeVar('_KT') _VT = TypeVar('_VT') _T = TypeVar('_T') class M(Generic[_KT, _VT]): def get(self, k: _KT, default: _T) -> _T: ... def f(d: M[_KT, _VT], k: _KT) -> _VT: return d.get(k, None) # E: Incompatible return value type (got "None", expected "_VT") [case testGenericMethodCalledInGenericContext2] from typing import TypeVar, Generic, Union _KT = TypeVar('_KT') _VT = TypeVar('_VT') _T = TypeVar('_T') class M(Generic[_KT, _VT]): def get(self, k: _KT, default: _T) -> Union[_VT, _T]: ... def f(d: M[_KT, _VT], k: _KT) -> Union[_VT, None]: return d.get(k, None) [case testLambdaDeferredCrash] from typing import Callable class C: def f(self) -> None: g: Callable[[], int] = lambda: 1 or self.x self.x = int() [case testInferTypeVariableFromTwoGenericTypes1] from typing import TypeVar, List, Sequence T = TypeVar('T') class C: ... class D(C): ... def f(x: Sequence[T], y: Sequence[T]) -> List[T]: ... reveal_type(f([C()], [D()])) # N: Revealed type is 'builtins.list[__main__.C*]' [builtins fixtures/list.pyi] [case testInferTypeVariableFromTwoGenericTypes2] from typing import TypeVar, List T = TypeVar('T') class C: ... class D(C): ... def f(x: List[T], y: List[T]) -> List[T]: ... f([C()], [D()]) # E: Cannot infer type argument 1 of "f" [builtins fixtures/list.pyi] [case testInferTypeVariableFromTwoGenericTypes3] from typing import Generic, TypeVar T = TypeVar('T') T_contra = TypeVar('T_contra', contravariant=True) class A(Generic[T_contra]): pass class B(A[T]): pass class C: ... class D(C): ... def f(x: A[T], y: A[T]) -> B[T]: ... c: B[C] d: B[D] reveal_type(f(c, d)) # N: Revealed type is '__main__.B[__main__.D*]' [case testInferTypeVariableFromTwoGenericTypes4] from typing import Generic, TypeVar, Callable, List T = TypeVar('T') T_contra = TypeVar('T_contra', contravariant=True) class A(Generic[T_contra]): pass class B(A[T_contra]): pass class C: ... class D(C): ... def f(x: Callable[[B[T]], None], y: Callable[[B[T]], None]) -> List[T]: ... def gc(x: A[C]) -> None: pass # B[C] def gd(x: A[D]) -> None: pass # B[C] reveal_type(f(gc, gd)) # N: Revealed type is 'builtins.list[__main__.C*]' [builtins fixtures/list.pyi] [case testWideOuterContextSubClassBound] from typing import TypeVar class A: ... class B(A): ... T = TypeVar('T', bound=B) def f(x: T) -> T: ... def outer(x: A) -> None: ... outer(f(B())) x: A = f(B()) [case testWideOuterContextSubClassBoundGenericReturn] from typing import TypeVar, Iterable, List class A: ... class B(A): ... T = TypeVar('T', bound=B) def f(x: T) -> List[T]: ... def outer(x: Iterable[A]) -> None: ... outer(f(B())) x: Iterable[A] = f(B()) [builtins fixtures/list.pyi] [case testWideOuterContextSubClassValues] from typing import TypeVar class A: ... class B(A): ... T = TypeVar('T', B, int) def f(x: T) -> T: ... def outer(x: A) -> None: ... outer(f(B())) x: A = f(B()) [case testWideOuterContextSubClassValuesGenericReturn] from typing import TypeVar, Iterable, List class A: ... class B(A): ... T = TypeVar('T', B, int) def f(x: T) -> List[T]: ... def outer(x: Iterable[A]) -> None: ... outer(f(B())) x: Iterable[A] = f(B()) [builtins fixtures/list.pyi] [case testWideOuterContextSubclassBoundGeneric] from typing import TypeVar, Generic S = TypeVar('S') class A(Generic[S]): ... class B(A[S]): ... T = TypeVar('T', bound=B[int]) def f(x: T) -> T: ... def outer(x: A[int]) -> None: ... y: B[int] outer(f(y)) x: A[int] = f(y) [case testWideOuterContextSubclassBoundGenericCovariant] from typing import TypeVar, Generic S_co = TypeVar('S_co', covariant=True) class A(Generic[S_co]): ... class B(A[S_co]): ... T = TypeVar('T', bound=B[int]) def f(x: T) -> T: ... def outer(x: A[int]) -> None: ... y: B[int] outer(f(y)) x: A[int] = f(y) [case testWideOuterContextSubclassValuesGeneric] from typing import TypeVar, Generic S = TypeVar('S') class A(Generic[S]): ... class B(A[S]): ... T = TypeVar('T', B[int], int) def f(x: T) -> T: ... def outer(x: A[int]) -> None: ... y: B[int] outer(f(y)) x: A[int] = f(y) [case testWideOuterContextSubclassValuesGenericCovariant] from typing import TypeVar, Generic S_co = TypeVar('S_co', covariant=True) class A(Generic[S_co]): ... class B(A[S_co]): ... T = TypeVar('T', B[int], int) def f(x: T) -> T: ... def outer(x: A[int]) -> None: ... y: B[int] outer(f(y)) x: A[int] = f(y) [case testWideOuterContextUnionBound] from typing import TypeVar, Union class A: ... class B: ... T = TypeVar('T', bound=B) def f(x: T) -> T: ... def outer(x: Union[A, B]) -> None: ... outer(f(B())) x: Union[A, B] = f(B()) [case testWideOuterContextUnionBoundGenericReturn] from typing import TypeVar, Union, Iterable, List class A: ... class B: ... T = TypeVar('T', bound=B) def f(x: T) -> List[T]: ... def outer(x: Iterable[Union[A, B]]) -> None: ... outer(f(B())) x: Iterable[Union[A, B]] = f(B()) [builtins fixtures/list.pyi] [case testWideOuterContextUnionValues] from typing import TypeVar, Union class A: ... class B: ... T = TypeVar('T', B, int) def f(x: T) -> T: ... def outer(x: Union[A, B]) -> None: ... outer(f(B())) x: Union[A, B] = f(B()) [case testWideOuterContextUnionValuesGenericReturn] from typing import TypeVar, Union, Iterable, List class A: ... class B: ... T = TypeVar('T', B, int) def f(x: T) -> List[T]: ... def outer(x: Iterable[Union[A, B]]) -> None: ... outer(f(B())) x: Iterable[Union[A, B]] = f(B()) [builtins fixtures/list.pyi] [case testWideOuterContextOptional] # flags: --strict-optional from typing import Optional, Type, TypeVar class Custom: pass T = TypeVar('T', bound=Custom) def a(x: T) -> Optional[T]: ... def b(x: T) -> Optional[T]: return a(x) [case testWideOuterContextOptionalGenericReturn] # flags: --strict-optional from typing import Optional, Type, TypeVar, Iterable class Custom: pass T = TypeVar('T', bound=Custom) def a(x: T) -> Iterable[Optional[T]]: ... def b(x: T) -> Iterable[Optional[T]]: return a(x) [case testWideOuterContextOptionalMethod] # flags: --strict-optional from typing import Optional, Type, TypeVar class A: pass class B: pass T = TypeVar('T', A, B) class C: def meth_a(self) -> Optional[A]: return self.meth(A) def meth(self, cls: Type[T]) -> Optional[T]: ... [case testWideOuterContextValuesOverlapping] from typing import TypeVar, List class A: pass class B(A): pass class C: pass T = TypeVar('T', A, B, C) def foo(xs: List[T]) -> T: ... S = TypeVar('S', B, C) def bar(xs: List[S]) -> S: foo(xs) return xs[0] [builtins fixtures/list.pyi] [case testWideOuterContextOptionalTypeVarReturn] # flags: --strict-optional from typing import Callable, Iterable, List, Optional, TypeVar class C: x: str T = TypeVar('T') def f(i: Iterable[T], c: Callable[[T], str]) -> Optional[T]: ... def g(l: List[C], x: str) -> Optional[C]: def pred(c: C) -> str: return c.x return f(l, pred) [builtins fixtures/list.pyi] [case testWideOuterContextOptionalTypeVarReturnLambda] # flags: --strict-optional from typing import Callable, Iterable, List, Optional, TypeVar class C: x: str T = TypeVar('T') def f(i: Iterable[T], c: Callable[[T], str]) -> Optional[T]: ... def g(l: List[C], x: str) -> Optional[C]: return f(l, lambda c: reveal_type(c).x) # N: Revealed type is '__main__.C' [builtins fixtures/list.pyi] [case testWideOuterContextEmpty] from typing import List, TypeVar T = TypeVar('T', bound=int) def f(x: List[T]) -> T: ... # mypy infers List[] here, and is a subtype of str y: str = f([]) [builtins fixtures/list.pyi] [case testWideOuterContextEmptyError] from typing import List, TypeVar T = TypeVar('T', bound=int) def f(x: List[T]) -> List[T]: ... # TODO: improve error message for such cases, see #3283 and #5706 y: List[str] = f([]) \ # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[str]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] [case testWideOuterContextNoArgs] # flags: --strict-optional from typing import TypeVar, Optional T = TypeVar('T', bound=int) def f(x: Optional[T] = None) -> T: ... y: str = f() [case testWideOuterContextNoArgsError] # flags: --strict-optional from typing import TypeVar, Optional, List T = TypeVar('T', bound=int) def f(x: Optional[T] = None) -> List[T]: ... y: List[str] = f() \ # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[str]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] [case testUseCovariantGenericOuterContext] from typing import TypeVar, Callable, Tuple T = TypeVar('T') def f(x: Callable[..., T]) -> T: return x() x: Tuple[str, ...] = f(tuple) [builtins fixtures/tuple.pyi] [out] [case testUseCovariantGenericOuterContextUserDefined] from typing import TypeVar, Callable, Generic T_co = TypeVar('T_co', covariant=True) T = TypeVar('T') class G(Generic[T_co]): ... def f(x: Callable[..., T]) -> T: return x() x: G[str] = f(G) [out] mypy-0.761/test-data/unit/check-inference.test0000644€tŠÔÚ€2›s®0000022636113576752246025511 0ustar jukkaDROPBOX\Domain Users00000000000000-- Inferring locals/globals with simple types -- ------------------------------------------ [case testInferSimpleGvarType] import typing x = A() y = B() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): x = A() if int(): x = y # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): x = x class A: pass class B: pass [case testInferSimpleLvarType] import typing def f() -> None: x = A() y = B() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") x = A() x = y # E: Incompatible types in assignment (expression has type "B", variable has type "A") x = x class A: pass class B: pass [out] [case testLvarInitializedToVoid] import typing def f() -> None: a = g() # E: "g" does not return a value #b, c = g() # "g" does not return a value TODO def g() -> None: pass [out] [case testInferringLvarTypeFromArgument] import typing def f(a: 'A') -> None: b = a if int(): b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = a a = b class A: pass class B: pass [out] [case testInferringLvarTypeFromGvar] g = None # type: B def f() -> None: a = g if int(): a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = B() class A: pass class B: pass [out] [case testInferringImplicitDynamicTypeForLvar] import typing def f() -> None: a = g() None(a) # E: "None" not callable a.x() def g(): pass [out] [case testInferringExplicitDynamicTypeForLvar] from typing import Any g = None # type: Any def f(a: Any) -> None: b = g None(b) # E: "None" not callable a.x() [out] -- Inferring types of local variables with complex types -- ----------------------------------------------------- [case testInferringTupleTypeForLvar] def f() -> None: a = A(), B() aa = None # type: A bb = None # type: B if int(): bb = a[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B") aa = a[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A") aa = a[0] bb = a[1] class A: pass class B: pass [builtins fixtures/tuple.pyi] [out] [case testInferringTupleTypeForLvarWithNones] import typing def f() -> None: a = A(), None b = None, A() class A: pass [builtins fixtures/tuple.pyi] [out] [case testInferringGenericTypeForLvar] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass a_i = None # type: A[int] a_s = None # type: A[str] def f() -> None: a_int = A() # type: A[int] a = a_int if int(): a = a_s # E: Incompatible types in assignment (expression has type "A[str]", variable has type "A[int]") a = a_i [builtins fixtures/tuple.pyi] [out] [case testInferringFunctionTypeForLvar] import typing def f() -> None: a = g a(B()) # E: Argument 1 has incompatible type "B"; expected "A" a(A()) def g(a: 'A') -> None: pass class A: pass class B: pass [out] [case testInferringFunctionTypeForLvarFromTypeObject] import typing def f() -> None: a = A a(A()) # E: Too many arguments a() t = a # type: type class A: pass [out] -- Inferring variable types in multiple definition -- ----------------------------------------------- [case testInferringLvarTypesInMultiDef] import typing def f() -> None: a, b = A(), B() if int(): a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = A() b = B() class A: pass class B: pass [out] [case testInferringLvarTypesInTupleAssignment] from typing import Tuple def f() -> None: t = None # type: Tuple[A, B] a, b = t if int(): a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = A() b = B() class A: pass class B: pass [out] [case testInferringLvarTypesInNestedTupleAssignment1] from typing import Tuple def f() -> None: t = None # type: Tuple[A, B] a1, (a, b) = A(), t if int(): a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = A() b = B() class A: pass class B: pass [out] [case testInferringLvarTypesInNestedTupleAssignment2] import typing def f() -> None: a, (b, c) = A(), (B(), C()) if int(): a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C") a = A() b = B() c = C() class A: pass class B: pass class C: pass [out] [case testInferringLvarTypesInNestedListAssignment] import typing def f() -> None: a, (b, c) = A(), [B(), C()] if int(): a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C") a = A() b = B() c = C() class A: pass class B: pass class C: pass [out] [case testInferringLvarTypesInMultiDefWithNoneTypes] import typing def f() -> None: a, b = A(), None c, d = None, A() class A: pass [out] [case testInferringLvarTypesInNestedTupleAssignmentWithNoneTypes] import typing def f() -> None: a1, (a2, b) = A(), (A(), None) class A: pass [out] [case testInferringLvarTypesInMultiDefWithInvalidTuple] from typing import Tuple t = None # type: Tuple[object, object, object] def f() -> None: a, b = t # Fail c, d, e, f = t # Fail g, h, i = t [builtins fixtures/tuple.pyi] [out] main:5: error: Too many values to unpack (2 expected, 3 provided) main:6: error: Need more than 3 values to unpack (4 expected) [case testInvalidRvalueTypeInInferredMultipleLvarDefinition] import typing def f() -> None: a, b = f # E: 'def ()' object is not iterable c, d = A() # E: '__main__.A' object is not iterable class A: pass [builtins fixtures/for.pyi] [out] [case testInvalidRvalueTypeInInferredNestedTupleAssignment] import typing def f() -> None: a1, (a2, b) = A(), f # E: 'def ()' object is not iterable a3, (c, d) = A(), A() # E: '__main__.A' object is not iterable class A: pass [builtins fixtures/for.pyi] [out] [case testInferringMultipleLvarDefinitionWithListRvalue] from typing import List class C: pass class D: pass def f() -> None: list_c = [C()] list_d = [D()] a, b = list_c c, d, e = list_d if int(): a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") b = c # E: Incompatible types in assignment (expression has type "D", variable has type "C") a = C() b = C() c = D() d = D() e = D() a = b c = d d = e [builtins fixtures/for.pyi] [out] [case testInferringNestedTupleAssignmentWithListRvalue] from typing import List class C: pass class D: pass def f() -> None: list_c = [C()] list_d = [D()] c1, (a, b) = C(), list_c c2, (c, d, e) = C(), list_d if int(): a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") b = c # E: Incompatible types in assignment (expression has type "D", variable has type "C") a = C() b = C() c = D() d = D() e = D() a = b c = d d = e [builtins fixtures/for.pyi] [out] [case testInferringMultipleLvarDefinitionWithImplicitDynamicRvalue] import typing def f() -> None: a, b = g() a.x b.x def g(): pass [case testInferringMultipleLvarDefinitionWithExplicitDynamicRvalue] from typing import Any def f(d: Any) -> None: a, b = d a.x b.x [case testInferringTypesFromIterable] from typing import Iterable class Nums(Iterable[int]): def __iter__(self): pass def __next__(self): pass a, b = Nums() if int(): a = b = 1 if int(): a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): b = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/for.pyi] -- Type variable inference for generic functions -- --------------------------------------------- [case testInferSimpleGenericFunction] from typing import Tuple, TypeVar T = TypeVar('T') a = None # type: A b = None # type: B c = None # type: Tuple[A, object] if int(): b = id(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = id(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = id(c) # E: Incompatible types in assignment (expression has type "Tuple[A, object]", variable has type "A") if int(): a = id(a) b = id(b) c = id(c) def id(a: T) -> T: pass class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testInferringGenericFunctionTypeForLvar] from typing import TypeVar T = TypeVar('T') def f() -> None: a = id b = None # type: int c = None # type: str if int(): b = a(c) # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = a(b) c = a(c) def id(x: T) -> T: return x [out] [case testUnderspecifiedInferenceResult] # flags: --no-strict-optional from typing import TypeVar T = TypeVar('T') class A: pass a = None # type: A def ff() -> None: x = f() # E: Need type annotation for 'x' reveal_type(x) # N: Revealed type is 'Any' g(None) # Ok f() # Ok because not used to infer local variable type g(a) def f() -> T: pass def g(a: T) -> None: pass [out] [case testInferenceWithMultipleConstraints] from typing import TypeVar T = TypeVar('T') a = None # type: A b = None # type: B if int(): b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = f(a, b) if int(): a = f(b, a) def f(a: T, b: T) -> T: pass class A: pass class B(A): pass [case testInferenceWithMultipleVariables] from typing import Tuple, TypeVar T = TypeVar('T') S = TypeVar('S') a, b = None, None # type: (A, B) taa = None # type: Tuple[A, A] tab = None # type: Tuple[A, B] tba = None # type: Tuple[B, A] if int(): taa = f(a, b) # E: Argument 2 to "f" has incompatible type "B"; expected "A" if int(): taa = f(b, a) # E: Argument 1 to "f" has incompatible type "B"; expected "A" if int(): tba = f(a, b) # E: Argument 1 to "f" has incompatible type "A"; expected "B" \ # E: Argument 2 to "f" has incompatible type "B"; expected "A" if int(): tab = f(a, b) if int(): tba = f(b, a) def f(a: T, b: S) -> Tuple[T, S]: pass class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testConstraintSolvingWithSimpleGenerics] from typing import TypeVar, Generic T = TypeVar('T') ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] if int(): ab = f(ao) # E: Argument 1 to "f" has incompatible type "A[object]"; expected "A[B]" ao = f(ab) # E: Argument 1 to "f" has incompatible type "A[B]"; expected "A[object]" if int(): ab = f(ac) # E: Argument 1 to "f" has incompatible type "A[C]"; expected "A[B]" if int(): ab = g(ao) # E: Argument 1 to "g" has incompatible type "A[object]"; expected "A[B]" ao = g(ab) # E: Argument 1 to "g" has incompatible type "A[B]"; expected "A[object]" if int(): ab = f(ab) ac = f(ac) ao = f(ao) if int(): ab = g(ab) ao = g(ao) def f(a: 'A[T]') -> 'A[T]': pass def g(a: T) -> T: pass class A(Generic[T]): pass class B: pass class C: pass [case testConstraintSolvingFailureWithSimpleGenerics] from typing import TypeVar, Generic T = TypeVar('T') ao = None # type: A[object] ab = None # type: A[B] f(ao, ab) # E: Cannot infer type argument 1 of "f" f(ab, ao) # E: Cannot infer type argument 1 of "f" f(ao, ao) f(ab, ab) def f(a: 'A[T]', b: 'A[T]') -> None: pass class A(Generic[T]): pass class B: pass [case testTypeInferenceWithCalleeDefaultArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A o = None # type: object if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): a = g(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): o = f() if int(): o = f(o) if int(): a = f(a) if int(): a = g(a) def f(a: T = None) -> T: pass def g(a: T, b: T = None) -> T: pass class A: pass -- Generic function inference with multiple inheritance -- ---------------------------------------------------- [case testGenericFunctionInferenceWithMultipleInheritance] from typing import TypeVar class I: pass class J: pass class A(I, J): pass class B(I, J): pass class C(I): pass class D(J): pass T = TypeVar('T') def f(a: T, b: T) -> T: pass def g(x: I) -> None: pass a = f(A(), C()) g(a) b = f(A(), B()) g(b) c = f(A(), D()) g(c) # E: Argument 1 to "g" has incompatible type "J"; expected "I" d = f(D(), A()) g(d) # E: Argument 1 to "g" has incompatible type "J"; expected "I" e = f(D(), C()) g(e) # E: Argument 1 to "g" has incompatible type "object"; expected "I" [case testGenericFunctionInferenceWithMultipleInheritance2] from typing import TypeVar class I: pass class J: pass class A(I): pass class B(A, J): pass class C(I, J): pass T = TypeVar('T') def f(a: T, b: T) -> T: pass def g(x: I) -> None: pass def h(x: J) -> None: pass a = f(B(), C()) g(a) h(a) # E: Argument 1 to "h" has incompatible type "I"; expected "J" b = f(C(), B()) g(b) h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J" c = f(A(), B()) g(a) h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J" [case testGenericFunctionInferenceWithMultipleInheritance3] from typing import TypeVar class I: pass class J: pass class K(J): pass class A(K): pass class B(A, I): pass class C(I, J): pass T = TypeVar('T') def f(a: T, b: T) -> T: pass def g(x: K) -> None: pass a = f(B(), C()) g(a) # E: Argument 1 to "g" has incompatible type "J"; expected "K" b = f(A(), C()) g(b) # E: Argument 1 to "g" has incompatible type "J"; expected "K" c = f(A(), B()) g(c) [case testPrecedenceOfFirstBaseAsInferenceResult] from typing import TypeVar from abc import abstractmethod, ABCMeta T = TypeVar('T') a, i, j = None, None, None # type: (A, I, J) a = f(B(), C()) class I(metaclass=ABCMeta): pass class J(metaclass=ABCMeta): pass def f(a: T, b: T) -> T: pass class A: pass class B(A, I, J): pass class C(A, I, J): pass -- Generic function inference with function arguments -- -------------------------------------------------- [case testNonOverloadedMapInference] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') class A: pass b = bool() def f(x: bool) -> A: pass def mymap(f: Callable[[t], s], a: List[t]) -> List[s]: pass l = mymap(f, [b]) if int(): l = [A()] lb = [b] if int(): l = lb # E: Incompatible types in assignment (expression has type "List[bool]", variable has type "List[A]") [builtins fixtures/for.pyi] [case testGenericFunctionWithTypeTypeAsCallable] from typing import Callable, Type, TypeVar T = TypeVar('T') def f(x: Callable[..., T]) -> T: return x() class A: pass x = None # type: Type[A] y = f(x) reveal_type(y) # N: Revealed type is '__main__.A*' -- Generic function inference with unions -- -------------------------------------- [case testUnionInference] from typing import TypeVar, Union, List T = TypeVar('T') U = TypeVar('U') def f(x: Union[T, int], y: T) -> T: pass f(1, 'a')() # E: "str" not callable f('a', 1)() # E: "object" not callable f('a', 'a')() # E: "str" not callable f(1, 1)() # E: "int" not callable def g(x: Union[T, List[T]]) -> List[T]: pass def h(x: List[str]) -> None: pass g('a')() # E: "List[str]" not callable # The next line is a case where there are multiple ways to satisfy a constraint # involving a Union. Either T = List[str] or T = str would turn out to be valid, # but mypy doesn't know how to branch on these two options (and potentially have # to backtrack later) and defaults to T = . The result is an # awkward error message. Either a better error message, or simply accepting the # call, would be preferable here. g(['a']) # E: Argument 1 to "g" has incompatible type "List[str]"; expected "List[]" h(g(['a'])) def i(x: Union[List[T], List[U]], y: List[T], z: List[U]) -> None: pass a = [1] b = ['b'] i(a, a, b) i(b, a, b) i(a, b, b) # E: Argument 1 to "i" has incompatible type "List[int]"; expected "List[str]" [builtins fixtures/list.pyi] [case testCallableListJoinInference] from typing import Any, Callable def fun() -> None: callbacks = [ callback1, callback2, ] for c in callbacks: call(c, 1234) # this must not fail def callback1(i: int) -> int: return i def callback2(i: int) -> str: return 'hello' def call(c: Callable[[int], Any], i: int) -> None: c(i) [builtins fixtures/list.pyi] [out] [case testCallableMeetAndJoin] # flags: --python-version 3.6 from typing import Callable, Any, TypeVar class A: ... class B(A): ... def f(c: Callable[[B], int]) -> None: ... c: Callable[[A], int] d: Callable[[B], int] lst = [c, d] reveal_type(lst) # N: Revealed type is 'builtins.list[def (__main__.B) -> builtins.int]' T = TypeVar('T') def meet_test(x: Callable[[T], int], y: Callable[[T], int]) -> T: ... CA = Callable[[A], A] CB = Callable[[B], B] ca: Callable[[CA], int] cb: Callable[[CB], int] reveal_type(meet_test(ca, cb)) # N: Revealed type is 'def (__main__.A) -> __main__.B' [builtins fixtures/list.pyi] [out] [case testUnionInferenceWithTypeVarValues] from typing import TypeVar, Union AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass f('foo') f('foo', 'bar') f('foo', b'bar') # E: Value of type variable "AnyStr" of "f" cannot be "object" f(1) f(1, 'foo') f(1, 'foo', b'bar') # E: Value of type variable "AnyStr" of "f" cannot be "object" [builtins fixtures/primitives.pyi] [case testUnionTwoPassInference-skip] from typing import TypeVar, Union, List T = TypeVar('T') U = TypeVar('U') def j(x: Union[List[T], List[U]], y: List[T]) -> List[U]: pass a = [1] b = ['b'] # We could infer: Since List[str] <: List[T], we must have T = str. # Then since List[int] <: Union[List[str], List[U]], and List[int] is # not a subtype of List[str], we must have U = int. # This is not currently implemented. j(a, b) [builtins fixtures/list.pyi] [case testUnionContext] from typing import TypeVar, Union, List T = TypeVar('T') def f() -> List[T]: pass d1 = f() # type: Union[List[int], str] d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type "List[]", variable has type "Union[int, str]") def g(x: T) -> List[T]: pass d3 = g(1) # type: Union[List[int], List[str]] [builtins fixtures/list.pyi] [case testGenericFunctionSubtypingWithUnions] from typing import TypeVar, Union, List T = TypeVar('T') S = TypeVar('S') def k1(x: int, y: List[T]) -> List[Union[T, int]]: pass def k2(x: S, y: List[T]) -> List[Union[T, int]]: pass a = k2 if int(): a = k2 if int(): a = k1 # E: Incompatible types in assignment (expression has type "Callable[[int, List[T]], List[Union[T, int]]]", variable has type "Callable[[S, List[T]], List[Union[T, int]]]") b = k1 if int(): b = k1 if int(): b = k2 [builtins fixtures/list.pyi] [case testAmbiguousUnionContextAndMultipleInheritance] from typing import TypeVar, Union, Generic _T = TypeVar('_T') class T(Generic[_T]): pass class U(Generic[_T]): pass class V(T[_T], U[_T]): pass def wait_for(fut: Union[T[_T], U[_T]]) -> _T: ... reveal_type(wait_for(V[str]())) # N: Revealed type is 'builtins.str*' [case testAmbiguousUnionContextAndMultipleInheritance2] from typing import TypeVar, Union, Generic _T = TypeVar('_T') _S = TypeVar('_S') class T(Generic[_T, _S]): pass class U(Generic[_T, _S]): pass class V(T[_T, _S], U[_T, _S]): pass def wait_for(fut: Union[T[_T, _S], U[_T, _S]]) -> T[_T, _S]: ... reveal_type(wait_for(V[int, str]())) \ # N: Revealed type is '__main__.T[builtins.int*, builtins.str*]' -- Literal expressions -- ------------------- [case testDictLiteral] from typing import Dict class A: pass class B: pass def d_ab() -> Dict[A, B]: return {} def d_aa() -> Dict[A, A]: return {} a, b = None, None # type: (A, B) d = {a:b} if int(): d = d_ab() if int(): d = d_aa() # E: Incompatible types in assignment (expression has type "Dict[A, A]", variable has type "Dict[A, B]") [builtins fixtures/dict.pyi] [case testSetLiteral] from typing import Any, Set a, x = None, None # type: (int, Any) def s_i() -> Set[int]: return set() def s_s() -> Set[str]: return set() s = {a} if int(): s = {x} if int(): s = s_i() if int(): s = s_s() # E: Incompatible types in assignment (expression has type "Set[str]", variable has type "Set[int]") [builtins fixtures/set.pyi] [case testSetWithStarExpr] s = {1, 2, *(3, 4)} t = {1, 2, *s} reveal_type(s) # N: Revealed type is 'builtins.set[builtins.int*]' reveal_type(t) # N: Revealed type is 'builtins.set[builtins.int*]' [builtins fixtures/set.pyi] [case testListLiteralWithFunctionsErasesNames] def f1(x: int) -> int: ... def g1(y: int) -> int: ... def h1(x: int) -> int: ... list_1 = [f1, g1] list_2 = [f1, h1] reveal_type(list_1) # N: Revealed type is 'builtins.list[def (builtins.int) -> builtins.int]' reveal_type(list_2) # N: Revealed type is 'builtins.list[def (x: builtins.int) -> builtins.int]' def f2(x: int, z: str) -> int: ... def g2(y: int, z: str) -> int: ... def h2(x: int, z: str) -> int: ... list_3 = [f2, g2] list_4 = [f2, h2] reveal_type(list_3) # N: Revealed type is 'builtins.list[def (builtins.int, z: builtins.str) -> builtins.int]' reveal_type(list_4) # N: Revealed type is 'builtins.list[def (x: builtins.int, z: builtins.str) -> builtins.int]' [builtins fixtures/list.pyi] [case testListLiteralWithSimilarFunctionsErasesName] from typing import Union class A: ... class B(A): ... class C: ... class D: ... def f(x: Union[A, C], y: B) -> A: ... def g(z: Union[B, D], y: A) -> B: ... def h(x: Union[B, D], y: A) -> B: ... list_1 = [f, g] list_2 = [f, h] reveal_type(list_1) # N: Revealed type is 'builtins.list[def (__main__.B, y: __main__.B) -> __main__.A]' reveal_type(list_2) # N: Revealed type is 'builtins.list[def (x: __main__.B, y: __main__.B) -> __main__.A]' [builtins fixtures/list.pyi] [case testListLiteralWithNameOnlyArgsDoesNotEraseNames] def f(*, x: int) -> int: ... def g(*, y: int) -> int: ... def h(*, x: int) -> int: ... list_1 = [f, g] # E: List item 0 has incompatible type "Callable[[NamedArg(int, 'x')], int]"; expected "Callable[[NamedArg(int, 'y')], int]" list_2 = [f, h] [builtins fixtures/list.pyi] -- For statements -- -------------- [case testInferenceOfFor1] a, b = None, None # type: (A, B) for x in [A()]: b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x for y in []: # E: Need type annotation for 'y' a = y reveal_type(y) # N: Revealed type is 'Any' class A: pass class B: pass [builtins fixtures/for.pyi] [case testInferenceOfFor2] a, b, c = None, None, None # type: (A, B, C) for x, (y, z) in [(A(), (B(), C()))]: b = x # Fail c = y # Fail a = z # Fail a = x b = y c = z for xx, yy, zz in [(A(), B())]: # Fail pass for xx, (yy, zz) in [(A(), B())]: # Fail pass for xxx, yyy in [(None, None)]: pass class A: pass class B: pass class C: pass [builtins fixtures/for.pyi] [out] main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:5: error: Incompatible types in assignment (expression has type "B", variable has type "C") main:6: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:10: error: Need more than 2 values to unpack (3 expected) main:12: error: '__main__.B' object is not iterable [case testInferenceOfFor3] a, b = None, None # type: (A, B) for x, y in [[A()]]: b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = y # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x a = y for e, f in [[]]: # E: Need type annotation for 'e' \ # E: Need type annotation for 'f' reveal_type(e) # N: Revealed type is 'Any' reveal_type(f) # N: Revealed type is 'Any' class A: pass class B: pass [builtins fixtures/for.pyi] [case testForStatementInferenceWithVoid] import typing for x in f(): # E: "f" does not return a value pass def f() -> None: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex] import typing for a in [A()]: pass a = A() if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") for a in []: pass a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex2] # flags: --allow-redefinition def f() -> None: for a in [A()]: pass a = A() a if int(): a = B() \ # E: Incompatible types in assignment (expression has type "B", variable has type "A") for a in []: pass # E: Need type annotation for 'a' a = A() if int(): a = B() \ # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [builtins fixtures/for.pyi] [out] [case testReusingInferredForIndex3] # flags: --disallow-redefinition def f() -> None: for a in [A()]: pass a = A() a if int(): a = B() \ # E: Incompatible types in assignment (expression has type "B", variable has type "A") for a in []: pass a = A() if int(): a = B() \ # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [builtins fixtures/for.pyi] [out] -- Regression tests -- ---------------- [case testMultipleAssignmentWithPartialDefinition] a = None # type: A if int(): x, a = a, a if int(): x = a a = x if int(): x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") class A: pass [case testMultipleAssignmentWithPartialDefinition2] a = None # type: A if int(): a, x = [a, a] if int(): x = a a = x if int(): x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") class A: pass [builtins fixtures/for.pyi] [case testMultipleAssignmentWithPartialDefinition3] from typing import Any, cast a = None # type: A if int(): x, a = cast(Any, a) if int(): x = a a = x if int(): x = object() a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") class A: pass [case testInferGlobalDefinedInBlock] import typing if A: a = A() if int(): a = A() if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [case testAssigningAnyStrToNone] from typing import Tuple, TypeVar AnyStr = TypeVar('AnyStr', str, bytes) def f(x: AnyStr) -> Tuple[AnyStr]: pass x = None (x,) = f('') reveal_type(x) # N: Revealed type is 'builtins.str' -- Inferring attribute types -- ------------------------- [case testInferAttributeType] import typing class A: a = B() class B: pass A().a = B() A().a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testInferAttributeTypeAndAssignInInit] import typing class A: a = B() def __init__(self) -> None: self.a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") self.a = B() class B: pass [out] [case testInferAttributeInInit] import typing class B: pass class A: def __init__(self) -> None: self.a = A() self.b = B() a = A() a.a = A() a.b = B() a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a.b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testInferAttributeInInitUsingChainedAssignment] import typing class B: pass class A: def __init__(self) -> None: self.a = self.b = A() a = A() a.a = A() a.b = A() a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a.b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -- Lambdas -- ------- [case testInferLambdaType] from typing import List, Callable li = [1] l = lambda: li f1 = l # type: Callable[[], List[int]] f2 = l # type: Callable[[], List[str]] # E: Incompatible types in assignment (expression has type "Callable[[], List[int]]", variable has type "Callable[[], List[str]]") [builtins fixtures/list.pyi] [case testInferLambdaType2] from typing import List, Callable l = lambda: [B()] f1 = l # type: Callable[[], List[B]] f2 = l # type: Callable[[], List[A]] # E: Incompatible types in assignment (expression has type "Callable[[], List[B]]", variable has type "Callable[[], List[A]]") class A: pass class B: pass [builtins fixtures/list.pyi] [case testUninferableLambda] from typing import TypeVar, Callable X = TypeVar('X') def f(x: Callable[[X], X]) -> X: pass y = f(lambda x: x) # E: Cannot infer type argument 1 of "f" [case testUninferableLambdaWithTypeError] from typing import TypeVar, Callable X = TypeVar('X') def f(x: Callable[[X], X], y: str) -> X: pass y = f(lambda x: x, 1) # Fail [out] main:4: error: Cannot infer type argument 1 of "f" main:4: error: Argument 2 to "f" has incompatible type "int"; expected "str" [case testInferLambdaNone] from typing import Callable def f(x: Callable[[], None]) -> None: pass def g(x: Callable[[], int]) -> None: pass a = lambda: None f(a) g(a) b = lambda: None # type: Callable[[], None] f(b) g(b) [case testLambdaDefaultContext] # flags: --strict-optional from typing import Callable def f(a: Callable[..., None] = lambda *a, **k: None): pass def g(a: Callable[..., None] = lambda *a, **k: 1): # E: Incompatible default for argument "a" (default has type "Callable[[VarArg(Any), KwArg(Any)], int]", argument has type "Callable[..., None]") pass [builtins fixtures/dict.pyi] [case testLambdaVarargContext] # Should not crash from typing import Callable def f(a: Callable[[int, int, int], int] = lambda *a, **k: 1): pass [builtins fixtures/dict.pyi] [case testLambdaDeferredSpecialCase] from typing import Callable class A: def f(self) -> None: h(lambda: self.x) def g(self) -> None: self.x = 1 def h(x: Callable[[], int]) -> None: pass -- Boolean operators -- ----------------- [case testOrOperationWithGenericOperands] from typing import List a = None # type: List[A] o = None # type: List[object] a2 = a or [] if int(): a = a2 a2 = o # E: Incompatible types in assignment (expression has type "List[object]", variable has type "List[A]") class A: pass [builtins fixtures/list.pyi] -- Accessing variable before its type has been inferred -- ---------------------------------------------------- [case testAccessGlobalVarBeforeItsTypeIsAvailable] import typing x.y # E: Cannot determine type of 'x' x = object() x.y # E: "object" has no attribute "y" [case testAccessDataAttributeBeforeItsTypeIsAvailable] a = None # type: A a.x.y # E: Cannot determine type of 'x' class A: def __init__(self) -> None: self.x = object() a.x.y # E: "object" has no attribute "y" -- Ducktype declarations -- --------------------- [case testListWithDucktypeCompatibility] from typing import List, _promote class A: pass @_promote(A) class B: pass a = None # type: List[A] x1 = [A(), B()] x2 = [B(), A()] x3 = [B(), B()] if int(): a = x1 if int(): a = x2 if int(): a = x3 \ # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [case testListWithDucktypeCompatibilityAndTransitivity] from typing import List, _promote class A: pass @_promote(A) class B: pass @_promote(B) class C: pass a = None # type: List[A] x1 = [A(), C()] x2 = [C(), A()] x3 = [B(), C()] if int(): a = x1 if int(): a = x2 if int(): a = x3 \ # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] -- Inferring type of variable when initialized to an empty collection -- ------------------------------------------------------------------ [case testInferListInitializedToEmpty] a = [] a.append(1) a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyUsingUpdate] a = [] a.extend(['']) a.append(0) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str" [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndNotAnnotated] a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndReadBeforeAppend] a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") if a: pass a.xyz # E: "List[Any]" has no attribute "xyz" a.append('') [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndIncompleteTypeInAppend] a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") a.append([]) a() # E: "List[Any]" not callable [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndMultipleAssignment] a, b = [], [] a.append(1) b.append('') a() # E: "List[int]" not callable b() # E: "List[str]" not callable [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyInFunction] def f() -> None: a = [] a.append(1) a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndNotAnnotatedInFunction] def f() -> None: a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") def g() -> None: pass a = [] a.append(1) [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndReadBeforeAppendInFunction] def f() -> None: a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") if a: pass a.xyz # E: "List[Any]" has no attribute "xyz" a.append('') [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyInClassBody] class A: a = [] a.append(1) a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndNotAnnotatedInClassBody] class A: a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") class B: a = [] a.append(1) [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyInMethod] class A: def f(self) -> None: a = [] a.append(1) a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndNotAnnotatedInMethod] class A: def f(self) -> None: a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyInMethodViaAttribute] class A: def f(self) -> None: # Attributes aren't supported right now. self.a = [] self.a.append(1) self.a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyInClassBodyAndOverriden] from typing import List class A: def __init__(self) -> None: self.x = [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") class B(A): # TODO?: This error is kind of a false positive, unfortunately @property def x(self) -> List[int]: # E: Signature of "x" incompatible with supertype "A" return [123] [builtins fixtures/list.pyi] [case testInferSetInitializedToEmpty] a = set() a.add(1) a.add('') # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int" [builtins fixtures/set.pyi] [case testInferSetInitializedToEmptyUsingDiscard] a = set() a.discard('') a.add(0) # E: Argument 1 to "add" of "set" has incompatible type "int"; expected "str" [builtins fixtures/set.pyi] [case testInferSetInitializedToEmptyUsingUpdate] a = set() a.update({0}) a.add('') # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int" [builtins fixtures/set.pyi] [case testInferDictInitializedToEmpty] a = {} a[1] = '' a() # E: "Dict[int, str]" not callable [builtins fixtures/dict.pyi] [case testInferDictInitializedToEmptyUsingUpdate] a = {} a.update({'': 42}) a() # E: "Dict[str, int]" not callable [builtins fixtures/dict.pyi] [case testInferDictInitializedToEmptyUsingUpdateError] a = {} # E: Need type annotation for 'a' (hint: "a: Dict[, ] = ...") a.update([1, 2]) # E: Argument 1 to "update" of "dict" has incompatible type "List[int]"; expected "Mapping[Any, Any]" a() # E: "Dict[Any, Any]" not callable [builtins fixtures/dict.pyi] [case testInferDictInitializedToEmptyAndIncompleteTypeInUpdate] a = {} # E: Need type annotation for 'a' (hint: "a: Dict[, ] = ...") a[1] = {} b = {} # E: Need type annotation for 'b' (hint: "b: Dict[, ] = ...") b[{}] = 1 [builtins fixtures/dict.pyi] [case testInferDictInitializedToEmptyAndUpdatedFromMethod] map = {} def add() -> None: map[1] = 2 [builtins fixtures/dict.pyi] [case testInferDictInitializedToEmptyAndUpdatedFromMethodUnannotated] map = {} def add(): map[1] = 2 [builtins fixtures/dict.pyi] [case testSpecialCaseEmptyListInitialization] def f(blocks: Any): # E: Name 'Any' is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any") to_process = [] to_process = list(blocks) [builtins fixtures/list.pyi] [case testSpecialCaseEmptyListInitialization2] def f(blocks: object): to_process = [] to_process = list(blocks) # E: No overload variant of "list" matches argument type "object" \ # N: Possible overload variant: \ # N: def [T] __init__(self, x: Iterable[T]) -> List[T] \ # N: <1 more non-matching overload not shown> [builtins fixtures/list.pyi] [case testInferListInitializedToEmptyAndAssigned] a = [] if bool(): a = [1] reveal_type(a) # N: Revealed type is 'builtins.list[builtins.int*]' def f(): return [1] b = [] if bool(): b = f() reveal_type(b) # N: Revealed type is 'builtins.list[Any]' d = {} if bool(): d = {1: 'x'} reveal_type(d) # N: Revealed type is 'builtins.dict[builtins.int*, builtins.str*]' dd = {} # E: Need type annotation for 'dd' (hint: "dd: Dict[, ] = ...") if bool(): dd = [1] # E: Incompatible types in assignment (expression has type "List[int]", variable has type "Dict[Any, Any]") reveal_type(dd) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testInferOrderedDictInitializedToEmpty] from collections import OrderedDict o = OrderedDict() o[1] = 'x' reveal_type(o) # N: Revealed type is 'collections.OrderedDict[builtins.int, builtins.str]' d = {1: 'x'} oo = OrderedDict() oo.update(d) reveal_type(oo) # N: Revealed type is 'collections.OrderedDict[builtins.int*, builtins.str*]' [builtins fixtures/dict.pyi] [case testEmptyCollectionAssignedToVariableTwiceIncremental] x = [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") y = x x = [] reveal_type(x) # N: Revealed type is 'builtins.list[Any]' d = {} # E: Need type annotation for 'd' (hint: "d: Dict[, ] = ...") z = d d = {} reveal_type(d) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [out2] main:1: error: Need type annotation for 'x' (hint: "x: List[] = ...") main:4: note: Revealed type is 'builtins.list[Any]' main:5: error: Need type annotation for 'd' (hint: "d: Dict[, ] = ...") main:8: note: Revealed type is 'builtins.dict[Any, Any]' [case testEmptyCollectionAssignedToVariableTwiceNoReadIncremental] x = [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") x = [] [builtins fixtures/list.pyi] [out2] main:1: error: Need type annotation for 'x' (hint: "x: List[] = ...") [case testInferAttributeInitializedToEmptyAndAssigned] class C: def __init__(self) -> None: self.a = [] if bool(): self.a = [1] reveal_type(C().a) # N: Revealed type is 'builtins.list[builtins.int*]' [builtins fixtures/list.pyi] [case testInferAttributeInitializedToEmptyAndAppended] class C: def __init__(self) -> None: self.a = [] if bool(): self.a.append(1) reveal_type(C().a) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testInferAttributeInitializedToEmptyAndAssignedItem] class C: def __init__(self) -> None: self.a = {} if bool(): self.a[0] = 'yes' reveal_type(C().a) # N: Revealed type is 'builtins.dict[builtins.int, builtins.str]' [builtins fixtures/dict.pyi] [case testInferAttributeInitializedToNoneAndAssigned] # flags: --strict-optional class C: def __init__(self) -> None: self.a = None if bool(): self.a = 1 reveal_type(C().a) # N: Revealed type is 'Union[builtins.int, None]' [case testInferAttributeInitializedToEmptyNonSelf] class C: def __init__(self) -> None: self.a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") if bool(): a = self a.a = [1] a.a.append(1) reveal_type(C().a) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testInferAttributeInitializedToEmptyAndAssignedOtherMethod] class C: def __init__(self) -> None: self.a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") def meth(self) -> None: self.a = [1] reveal_type(C().a) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testInferAttributeInitializedToEmptyAndAppendedOtherMethod] class C: def __init__(self) -> None: self.a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") def meth(self) -> None: self.a.append(1) reveal_type(C().a) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testInferAttributeInitializedToEmptyAndAssignedItemOtherMethod] class C: def __init__(self) -> None: self.a = {} # E: Need type annotation for 'a' (hint: "a: Dict[, ] = ...") def meth(self) -> None: self.a[0] = 'yes' reveal_type(C().a) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testInferAttributeInitializedToNoneAndAssignedOtherMethod] # flags: --strict-optional class C: def __init__(self) -> None: self.a = None def meth(self) -> None: self.a = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "None") reveal_type(C().a) # N: Revealed type is 'None' [case testInferAttributeInitializedToEmptyAndAssignedClassBody] class C: a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") def __init__(self) -> None: self.a = [1] reveal_type(C().a) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testInferAttributeInitializedToEmptyAndAppendedClassBody] class C: a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") def __init__(self) -> None: self.a.append(1) reveal_type(C().a) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testInferAttributeInitializedToEmptyAndAssignedItemClassBody] class C: a = {} # E: Need type annotation for 'a' (hint: "a: Dict[, ] = ...") def __init__(self) -> None: self.a[0] = 'yes' reveal_type(C().a) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testInferAttributeInitializedToNoneAndAssignedClassBody] # flags: --strict-optional class C: a = None def __init__(self) -> None: self.a = 1 reveal_type(C().a) # N: Revealed type is 'Union[builtins.int, None]' -- Inferring types of variables first initialized to None (partial types) -- ---------------------------------------------------------------------- [case testLocalVariablePartiallyInitializedToNone] def f() -> None: if object(): x = None else: x = 1 x() # E: "int" not callable [out] [case testLocalVariablePartiallyTwiceInitializedToNone] def f() -> None: if object(): x = None elif object(): x = None else: x = 1 x() # E: "int" not callable [out] [case testLvarInitializedToNoneWithoutType] import typing def f() -> None: a = None a.x() # E: "None" has no attribute "x" [out] [case testGvarPartiallyInitializedToNone] x = None if object(): x = 1 x() # E: "int" not callable [case testPartiallyInitializedToNoneAndThenToPartialList] x = None if object(): # Promote from partial None to partial list. x = [] x.append(1) x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testPartiallyInitializedToNoneAndThenReadPartialList] x = None if object(): # Promote from partial None to partial list. x = [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") x [builtins fixtures/list.pyi] [case testPartiallyInitializedToNoneAndPartialListAndLeftPartial] def f() -> None: x = None if object(): # Promote from partial None to partial list. x = [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") [builtins fixtures/list.pyi] [out] [case testPartiallyInitializedToNoneAndThenToIncompleteType-skip] # TODO(ddfisher): fix partial type bug and re-enable from typing import TypeVar, Dict T = TypeVar('T') def f(*x: T) -> Dict[int, T]: pass x = None # E: Need type annotation for 'x' if object(): x = f() [builtins fixtures/dict.pyi] [case testPartiallyInitializedVariableDoesNotEscapeScope1] def f() -> None: x = None reveal_type(x) # N: Revealed type is 'None' x = 1 [out] [case testPartiallyInitializedVariableDoesNotEscapeScope2] x = None def f() -> None: x = None x = 1 x() # E: "None" not callable [case testAttributePartiallyInitializedToNone] class A: def f(self) -> None: self.x = None self.x = 1 self.x() # E: "int" not callable [out] [case testAttributePartiallyInitializedToNoneWithMissingAnnotation] class A: def f(self) -> None: self.x = None def g(self) -> None: self.x = 1 self.x() [out] main:6: error: Incompatible types in assignment (expression has type "int", variable has type "None") main:7: error: "None" not callable [case testGlobalInitializedToNoneSetFromFunction] a = None def f(): global a a = 42 [out] [case testGlobalInitializedToNoneSetFromMethod] a = None class C: def m(self): global a a = 42 [out] -- More partial type errors -- ------------------------ [case testPartialTypeErrorSpecialCase1] # This used to crash. class A: x = None def f(self) -> None: for a in self.x: pass [builtins fixtures/for.pyi] [out] main:5: error: "None" has no attribute "__iter__" (not iterable) [case testPartialTypeErrorSpecialCase2] # This used to crash. class A: x = [] def f(self) -> None: for a in self.x: pass [builtins fixtures/for.pyi] [out] main:3: error: Need type annotation for 'x' (hint: "x: List[] = ...") [case testPartialTypeErrorSpecialCase3] class A: x = None def f(self) -> None: for a in A.x: pass [builtins fixtures/for.pyi] [out] main:4: error: "None" has no attribute "__iter__" (not iterable) -- Multipass -- --------- [case testMultipassAndAccessVariableBeforeDefinition] def f() -> None: y = x y() # E: "int" not callable x = 1 [out] [case testMultipassAndAccessInstanceVariableBeforeDefinition] class A: def f(self) -> None: y = self.x y() # E: "int" not callable def g(self) -> None: self.x = 1 [out] [case testMultipassAndTopLevelVariable] y = x # E: Cannot determine type of 'x' y() x = 1+0 [out] [case testMultipassAndDecoratedMethod] from typing import Callable, TypeVar T = TypeVar('T') class A: def f(self) -> None: self.g() # E: Too few arguments for "g" of "A" self.g(1) @dec def g(self, x: str) -> None: pass def dec(f: Callable[[A, str], T]) -> Callable[[A, int], T]: pass [out] [case testMultipassAndDefineAttributeBasedOnNotReadyAttribute] class A: def f(self) -> None: self.y = self.x def g(self) -> None: self.x = 1 def h(self) -> None: self.y() # E: "int" not callable [out] [case testMultipassAndDefineAttributeBasedOnNotReadyAttribute2] class A: def f(self) -> None: self.y = self.x self.z = self.y self.z() # E self.y() # E def g(self) -> None: self.x = 1 def h(self) -> None: self.y() # E [out] main:5: error: "int" not callable main:6: error: "int" not callable main:12: error: "int" not callable [case testMultipassAndPartialTypes] def f() -> None: x = [] y x.append(1) x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" x.append(y) # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" y = '' [builtins fixtures/list.pyi] [out] [case testMultipassAndPartialTypes2] s = '' n = 0 def f() -> None: global s, n x = [] x.append(y) s = x[0] n = x[0] # E: Incompatible types in assignment (expression has type "str", variable has type "int") x.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str" y = '' [builtins fixtures/list.pyi] [out] [case testMultipassAndPartialTypes3] from typing import Dict def g(d: Dict[str, int]) -> None: pass def f() -> None: x = {} x[1] = y g(x) # E: Argument 1 to "g" has incompatible type "Dict[int, str]"; expected "Dict[str, int]" x[1] = 1 # E: Incompatible types in assignment (expression has type "int", target has type "str") x[1] = '' y = '' [builtins fixtures/dict.pyi] [out] [case testMultipassAndPartialTypes4] from typing import Dict def g(d: Dict[str, int]) -> None: pass def f() -> None: x = {} y x[1] = 1 g(x) # E: Argument 1 to "g" has incompatible type "Dict[int, int]"; expected "Dict[str, int]" y = '' [builtins fixtures/dict.pyi] [out] [case testMultipassAndCircularDependency] class A: def f(self) -> None: self.x = self.y # E: Cannot determine type of 'y' def g(self) -> None: self.y = self.x [out] [case testMultipassAndPartialTypesSpecialCase1] def f() -> None: y = o x = [] x.append(y) x() # E: "List[int]" not callable o = 1 [builtins fixtures/list.pyi] [out] [case testMultipassAndPartialTypesSpecialCase2] def f() -> None: y = o x = {} x[''] = y x() # E: "Dict[str, int]" not callable o = 1 [builtins fixtures/dict.pyi] [out] [case testMultipassAndPartialTypesSpecialCase3] def f() -> None: x = {} # E: Need type annotation for 'x' (hint: "x: Dict[, ] = ...") y = o z = {} # E: Need type annotation for 'z' (hint: "z: Dict[, ] = ...") o = 1 [builtins fixtures/dict.pyi] [out] [case testMultipassAndPartialTypesSpecialCase4] def f() -> None: y = o x = None x = y x() # E: "int" not callable o = 1 [out] [case testMultipassAndPartialTypesSpecialCase5] def f() -> None: x = None y = o x = y x() # E: "int" not callable o = 1 [out] [case testMultipassAndClassAttribute] class S: def foo(self) -> int: return R.X class R: X = 2 [case testMultipassAndMultipleFiles] import m def f() -> None: x() x = 0 [file m.py] def g() -> None: y() y = 0 [out] tmp/m.py:2: error: "int" not callable main:3: error: "int" not callable [case testForwardReferenceToDecoratedClassMethod] from typing import TypeVar, Callable T = TypeVar('T') def dec() -> Callable[[T], T]: pass A.g # E: Cannot determine type of 'g' class A: @classmethod def f(cls) -> None: reveal_type(cls.g) # N: Revealed type is 'def (x: builtins.str)' @classmethod @dec() def g(cls, x: str) -> None: pass @classmethod def h(cls) -> None: reveal_type(cls.g) # N: Revealed type is 'def (x: builtins.str)' reveal_type(A.g) # N: Revealed type is 'def (x: builtins.str)' [builtins fixtures/classmethod.pyi] -- Tests for special cases of unification -- -------------------------------------- [case testUnificationRedundantUnion] from typing import Union a = None # type: Union[int, str] b = None # type: Union[str, tuple] def f(): pass def g(x: Union[int, str]): pass c = a if f() else b g(c) # E: Argument 1 to "g" has incompatible type "Union[int, str, Tuple[Any, ...]]"; expected "Union[int, str]" [case testUnificationMultipleInheritance] class A: pass class B: def foo(self): pass class C(A, B): pass def f(): pass a1 = B() if f() else C() a1.foo() a2 = C() if f() else B() a2.foo() [case testUnificationMultipleInheritanceAmbiguous] # Show that join_instances_via_supertype() breaks ties using the first base class. class A1: pass class B1: def foo1(self): pass class C1(A1, B1): pass class A2: pass class B2: def foo2(self): pass class C2(A2, B2): pass class D1(C1, C2): pass class D2(C2, C1): pass def f(): pass a1 = D1() if f() else D2() a1.foo1() a2 = D2() if f() else D1() a2.foo2() [case testUnificationEmptyListLeft] def f(): pass a = [] if f() else [0] a() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testUnificationEmptyListRight] def f(): pass a = [0] if f() else [] a() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testUnificationEmptyListLeftInContext] from typing import List def f(): pass a = [] if f() else [0] # type: List[int] a() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testUnificationEmptyListRightInContext] # TODO Find an example that really needs the context from typing import List def f(): pass a = [0] if f() else [] # type: List[int] a() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testUnificationEmptySetLeft] def f(): pass a = set() if f() else {0} a() # E: "Set[int]" not callable [builtins fixtures/set.pyi] [case testUnificationEmptyDictLeft] def f(): pass a = {} if f() else {0: 0} a() # E: "Dict[int, int]" not callable [builtins fixtures/dict.pyi] [case testUnificationEmptyDictRight] def f(): pass a = {0: 0} if f() else {} a() # E: "Dict[int, int]" not callable [builtins fixtures/dict.pyi] [case testUnificationDictWithEmptyListLeft] def f(): pass a = {0: []} if f() else {0: [0]} a() # E: "Dict[int, List[int]]" not callable [builtins fixtures/dict.pyi] [case testUnificationDictWithEmptyListRight] def f(): pass a = {0: [0]} if f() else {0: []} a() # E: "Dict[int, List[int]]" not callable [builtins fixtures/dict.pyi] [case testMisguidedSetItem] from typing import Generic, Sequence, TypeVar T = TypeVar('T') class C(Sequence[T], Generic[T]): pass C[0] = 0 [out] main:4: error: Unsupported target for indexed assignment main:4: error: Invalid type: try using Literal[0] instead? [case testNoCrashOnPartialMember] class C: x = None def __init__(self) -> None: self.x = [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") [builtins fixtures/list.pyi] [out] [case testNoCrashOnPartialVariable] from typing import Tuple, TypeVar T = TypeVar('T', bound=str) def f(x: T) -> Tuple[T]: ... x = None (x,) = f('') reveal_type(x) # N: Revealed type is 'builtins.str' [out] [case testNoCrashOnPartialVariable2] from typing import Tuple, TypeVar T = TypeVar('T', bound=str) def f() -> Tuple[T]: ... x = None if int(): (x,) = f() [out] [case testNoCrashOnPartialVariable3] from typing import Tuple, TypeVar T = TypeVar('T') def f(x: T) -> Tuple[T, T]: ... x = None (x, x) = f('') reveal_type(x) # N: Revealed type is 'builtins.str' [out] [case testInferenceNestedTuplesFromGenericIterable] from typing import Tuple, TypeVar T = TypeVar('T') def make_tuple(elem: T) -> Tuple[T]: return (elem,) def main() -> None: ((a, b),) = make_tuple((1, 2)) reveal_type(a) # N: Revealed type is 'builtins.int' reveal_type(b) # N: Revealed type is 'builtins.int' [builtins fixtures/tuple.pyi] [out] [case testDontMarkUnreachableAfterInferenceUninhabited] from typing import TypeVar T = TypeVar('T') def f() -> T: pass class C: x = f() # E: Need type annotation for 'x' def m(self) -> str: return 42 # E: Incompatible return value type (got "int", expected "str") if bool(): f() 1() # E: "int" not callable [builtins fixtures/list.pyi] [out] [case testDontMarkUnreachableAfterInferenceUninhabited2] # flags: --strict-optional from typing import TypeVar, Optional T = TypeVar('T') def f(x: Optional[T] = None) -> T: pass class C: x = f() # E: Need type annotation for 'x' def m(self) -> str: return 42 # E: Incompatible return value type (got "int", expected "str") if bool(): f() 1() # E: "int" not callable [builtins fixtures/list.pyi] [out] [case testDontMarkUnreachableAfterInferenceUninhabited3] from typing import TypeVar, List T = TypeVar('T') def f(x: List[T]) -> T: pass class C: x = f([]) # E: Need type annotation for 'x' def m(self) -> str: return 42 # E: Incompatible return value type (got "int", expected "str") if bool(): f([]) 1() # E: "int" not callable [builtins fixtures/list.pyi] [out] -- --local-partial-types -- --------------------- [case testLocalPartialTypesWithGlobalInitializedToNone] # flags: --local-partial-types x = None # E: Need type annotation for 'x' def f() -> None: global x x = 1 # TODO: "Any" could be a better type here to avoid multiple error messages reveal_type(x) # N: Revealed type is 'None' [case testLocalPartialTypesWithGlobalInitializedToNone2] # flags: --local-partial-types x = None # E: Need type annotation for 'x' def f(): global x x = 1 # TODO: "Any" could be a better type here to avoid multiple error messages reveal_type(x) # N: Revealed type is 'None' [case testLocalPartialTypesWithGlobalInitializedToNone3] # flags: --local-partial-types --no-strict-optional x = None def f() -> None: global x x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") x = '' reveal_type(x) # N: Revealed type is 'builtins.str' [case testLocalPartialTypesWithGlobalInitializedToNoneStrictOptional] # flags: --local-partial-types --strict-optional x = None def f() -> None: global x x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Optional[str]") x = '' def g() -> None: reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' [case testLocalPartialTypesWithGlobalInitializedToNone4] # flags: --local-partial-types --no-strict-optional a = None def f() -> None: reveal_type(a) # N: Revealed type is 'builtins.str' # TODO: This should probably be 'builtins.str', since there could be a # call that causes a non-None value to be assigned reveal_type(a) # N: Revealed type is 'None' a = '' reveal_type(a) # N: Revealed type is 'builtins.str' [builtins fixtures/list.pyi] [case testLocalPartialTypesWithClassAttributeInitializedToNone] # flags: --local-partial-types class A: x = None # E: Need type annotation for 'x' def f(self) -> None: self.x = 1 [case testLocalPartialTypesWithClassAttributeInitializedToEmptyDict] # flags: --local-partial-types class A: x = {} # E: Need type annotation for 'x' (hint: "x: Dict[, ] = ...") def f(self) -> None: self.x[0] = '' reveal_type(A().x) # N: Revealed type is 'builtins.dict[Any, Any]' reveal_type(A.x) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testLocalPartialTypesWithGlobalInitializedToEmptyList] # flags: --local-partial-types a = [] def f() -> None: a[0] reveal_type(a) # N: Revealed type is 'builtins.list[builtins.int]' a.append(1) reveal_type(a) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testLocalPartialTypesWithGlobalInitializedToEmptyList2] # flags: --local-partial-types a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") def f() -> None: a.append(1) reveal_type(a) # N: Revealed type is 'builtins.list[Any]' reveal_type(a) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testLocalPartialTypesWithGlobalInitializedToEmptyList3] # flags: --local-partial-types a = [] # E: Need type annotation for 'a' (hint: "a: List[] = ...") def f(): a.append(1) reveal_type(a) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testLocalPartialTypesWithGlobalInitializedToEmptyDict] # flags: --local-partial-types a = {} def f() -> None: a[0] reveal_type(a) # N: Revealed type is 'builtins.dict[builtins.int, builtins.str]' a[0] = '' reveal_type(a) # N: Revealed type is 'builtins.dict[builtins.int, builtins.str]' [builtins fixtures/dict.pyi] [case testLocalPartialTypesWithGlobalInitializedToEmptyDict2] # flags: --local-partial-types a = {} # E: Need type annotation for 'a' (hint: "a: Dict[, ] = ...") def f() -> None: a[0] = '' reveal_type(a) # N: Revealed type is 'builtins.dict[Any, Any]' reveal_type(a) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testLocalPartialTypesWithGlobalInitializedToEmptyDict3] # flags: --local-partial-types a = {} # E: Need type annotation for 'a' (hint: "a: Dict[, ] = ...") def f(): a[0] = '' reveal_type(a) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testLocalPartialTypesWithNestedFunction] # flags: --local-partial-types def f() -> None: a = {} def g() -> None: a[0] = '' reveal_type(a) # N: Revealed type is 'builtins.dict[builtins.int, builtins.str]' [builtins fixtures/dict.pyi] [case testLocalPartialTypesWithNestedFunction2] # flags: --local-partial-types def f() -> None: a = [] def g() -> None: a.append(1) reveal_type(a) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testLocalPartialTypesWithNestedFunction3] # flags: --local-partial-types --no-strict-optional def f() -> None: a = None def g() -> None: nonlocal a a = '' reveal_type(a) # N: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [case testLocalPartialTypesWithInheritance] # flags: --local-partial-types from typing import Optional class A: x: Optional[str] class B(A): x = None reveal_type(B.x) # N: Revealed type is 'None' [case testLocalPartialTypesWithInheritance2] # flags: --local-partial-types --strict-optional class A: x: str class B(A): x = None # E: Incompatible types in assignment (expression has type "None", base class "A" defined the type as "str") [case testLocalPartialTypesWithAnyBaseClass] # flags: --local-partial-types --strict-optional from typing import Any A: Any class B(A): x = None class C(B): y = None [case testLocalPartialTypesInMultipleMroItems] # flags: --local-partial-types --strict-optional from typing import Optional class A: x: Optional[str] class B(A): x = None class C(B): x = None # TODO: Inferring None below is unsafe (https://github.com/python/mypy/issues/3208) reveal_type(B.x) # N: Revealed type is 'None' reveal_type(C.x) # N: Revealed type is 'None' [case testLocalPartialTypesWithInheritance2] # flags: --local-partial-types from typing import Optional class X: pass class Y(X): pass class A: x: Optional[X] class B(A): x = None x = Y() reveal_type(B.x) # N: Revealed type is 'Union[__main__.Y, None]' [case testLocalPartialTypesBinderSpecialCase] # flags: --local-partial-types from typing import List def f(x): pass class A: x = None # E: Need type annotation for 'x' def f(self, p: List[str]) -> None: self.x = f(p) f(z for z in p) [builtins fixtures/list.pyi] [case testLocalPartialTypesAccessPartialNoneAttribute] # flags: --local-partial-types class C: a = None # E: Need type annotation for 'a' def f(self, x) -> None: C.a.y # E: Item "None" of "Optional[Any]" has no attribute "y" [case testLocalPartialTypesAccessPartialNoneAttribute] # flags: --local-partial-types class C: a = None # E: Need type annotation for 'a' def f(self, x) -> None: self.a.y # E: Item "None" of "Optional[Any]" has no attribute "y" -- Special case for assignment to '_' -- ---------------------------------- [case testUnusedTargetLocal] def foo() -> None: _ = 0 _ = '' [case testUnusedTargetNotGlobal] _ = 0 _ = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testUnusedTargetNotClass] # flags: --allow-redefinition class C: _, _ = 0, 0 _ = '' reveal_type(C._) # N: Revealed type is 'builtins.str' [case testUnusedTargetNotClass2] # flags: --disallow-redefinition class C: _, _ = 0, 0 _ = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(C._) # N: Revealed type is 'builtins.int' [case testUnusedTargetTupleUnpacking] def foo() -> None: _, _ = (0, '') _ = 0 _ = '' def bar() -> None: t = (0, '') _, _ = t _ = 0 _ = '' [case testUnusedTargetMultipleTargets] def foo() -> None: _ = x = 0 _ = y = '' _ = 0 _ = '' def bar() -> None: x = _ = 0 y = _ = '' _ = 0 _ = '' x + 0 y + '' x + '' # E: Unsupported operand types for + ("int" and "str") y + 0 # E: Unsupported operand types for + ("str" and "int") [case testUnusedTargetNotImport] import d, c, b, a [file _.py] def f(): pass [file m.py] def f(): pass _ = f _ = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file a.py] def foo() -> None: import _ _.f() _ = 0 # E: Incompatible types in assignment (expression has type "int", variable has type Module) [file b.py] def foo() -> None: import m as _ _.f() _ = 0 # E: Incompatible types in assignment (expression has type "int", variable has type Module) [file c.py] def foo() -> None: from m import _ _() _ = '' # E: Incompatible types in assignment (expression has type "str", variable has type "Callable[[], Any]") [file d.py] def foo() -> None: from m import f as _ _() _ = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [builtins fixtures/module.pyi] [case testUnderscoreClass] def foo() -> None: class _: pass _().method() # E: "_" has no attribute "method" [case testUnusedTargetNotDef] def foo() -> None: def _() -> int: pass _() + '' # E: Unsupported operand types for + ("int" and "str") [case testUnusedTargetForLoop] def f() -> None: a = [(0, '', 0)] for _, _, x in a: x = 0 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") _ = 0 _ = '' [builtins fixtures/list.pyi] [case testUnusedTargetWithClause] class C: def __enter__(self) -> int: pass def __exit__(self, *args): pass def f() -> None: with C() as _: pass _ = 0 _ = '' [case testUnusedTargetNotExceptClause] # Things don't work for except clauses. # This is due to the implementation, but it's just as well. def f() -> None: try: pass except BaseException as _: _ = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "BaseException") _ = '' # E: Incompatible types in assignment (expression has type "str", variable has type "BaseException") [builtins fixtures/exception.pyi] -- Tests for permissive toplevel checking -- -------------- [case testPermissiveAttributeOverride1] # flags: --allow-untyped-globals class A: x = None class B(A): x = 12 class C(A): x = '12' reveal_type(A.x) # N: Revealed type is 'Union[Any, None]' reveal_type(B.x) # N: Revealed type is 'builtins.int' reveal_type(C.x) # N: Revealed type is 'builtins.str' [case testPermissiveAttributeOverride2] # flags: --allow-untyped-globals class A: x = [] class B(A): x = [12] class C(A): x = ['12'] reveal_type(A.x) # N: Revealed type is 'builtins.list[Any]' reveal_type(B.x) # N: Revealed type is 'builtins.list[builtins.int]' reveal_type(C.x) # N: Revealed type is 'builtins.list[builtins.str]' [builtins fixtures/list.pyi] [case testPermissiveAttribute] # flags: --allow-untyped-globals class A: x = [] def f(self) -> None: reveal_type(self.x) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testPermissiveGlobalContainer1] # flags: --allow-untyped-globals --local-partial-types import a [file b.py] x = [] y = {} def foo() -> None: reveal_type(x) # N: Revealed type is 'builtins.list[Any]' reveal_type(y) # N: Revealed type is 'builtins.dict[Any, Any]' [file a.py] from b import x, y reveal_type(x) # N: Revealed type is 'builtins.list[Any]' reveal_type(y) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testPermissiveGlobalContainer2] # flags: --allow-untyped-globals import a [file b.py] x = [] y = {} def foo() -> None: reveal_type(x) # N: Revealed type is 'builtins.list[Any]' reveal_type(y) # N: Revealed type is 'builtins.dict[Any, Any]' [file a.py] from b import x, y reveal_type(x) # N: Revealed type is 'builtins.list[Any]' reveal_type(y) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testPermissiveGlobalContainer3] # flags: --allow-untyped-globals --local-partial-types import a [file b.py] x = [] y = {} z = y [file a.py] from b import x, y reveal_type(x) # N: Revealed type is 'builtins.list[Any]' reveal_type(y) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testPermissiveGlobalContainer4] # flags: --allow-untyped-globals import a [file b.py] x = [] y = {} z = y [file a.py] from b import x, y reveal_type(x) # N: Revealed type is 'builtins.list[Any]' reveal_type(y) # N: Revealed type is 'builtins.dict[Any, Any]' [builtins fixtures/dict.pyi] [case testInheritedAttributeNoStrictOptional] # flags: --no-strict-optional class A: x: str class B(A): x = None x = '' reveal_type(x) # N: Revealed type is 'builtins.str' [case testIncompatibleInheritedAttributeNoStrictOptional] # flags: --no-strict-optional class A: x: str class B(A): x = None x = 2 # E: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "str") [case testInheritedAttributeStrictOptional] # flags: --strict-optional class A: x: str class B(A): x = None # E: Incompatible types in assignment (expression has type "None", base class "A" defined the type as "str") x = '' [case testNeedAnnotationForCallable] from typing import TypeVar, Optional, Callable T = TypeVar('T') def f(x: Optional[T] = None) -> Callable[..., T]: ... x = f() # E: Need type annotation for 'x' y = x [case testDontNeedAnnotationForCallable] from typing import TypeVar, Optional, Callable, NoReturn T = TypeVar('T') def f() -> Callable[..., NoReturn]: ... x = f() reveal_type(x) # N: Revealed type is 'def (*Any, **Any) -> ' [case testDeferralInNestedScopes] def g() -> None: def f() -> None: x + 'no way' # E: Unsupported operand types for + ("int" and "str") x = int() f() [case testDeferralOfMemberNested] from typing import Tuple def f() -> None: c: C t: Tuple[str, Tuple[str, str]] x, (y, c.a) = t # E: Incompatible types in assignment (expression has type "str", variable has type "int") class C: def __init__(self, a: int) -> None: self.a = a [case testUnionGenericWithBoundedVariable] from typing import Generic, TypeVar, Union T = TypeVar('T', bound=A) class Z(Generic[T]): def __init__(self, y: T) -> None: self.y = y class A: ... class B(A): ... F = TypeVar('F', bound=A) def q1(x: Union[F, Z[F]]) -> F: if isinstance(x, Z): return x.y else: return x def q2(x: Union[Z[F], F]) -> F: if isinstance(x, Z): return x.y else: return x b: B reveal_type(q1(b)) # N: Revealed type is '__main__.B*' reveal_type(q2(b)) # N: Revealed type is '__main__.B*' z: Z[B] reveal_type(q1(z)) # N: Revealed type is '__main__.B*' reveal_type(q2(z)) # N: Revealed type is '__main__.B*' reveal_type(q1(Z(b))) # N: Revealed type is '__main__.B*' reveal_type(q2(Z(b))) # N: Revealed type is '__main__.B*' [builtins fixtures/isinstancelist.pyi] [case testUnionInvariantSubClassAndCovariantBase] from typing import Union, Generic, TypeVar T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) class Cov(Generic[T_co]): ... class Inv(Cov[T]): ... X = Union[Cov[T], Inv[T]] def f(x: X[T]) -> T: ... x: Inv[int] reveal_type(f(x)) # N: Revealed type is 'builtins.int*' [case testOptionalTypeVarAgainstOptional] # flags: --strict-optional from typing import Optional, TypeVar, Iterable, Iterator, List _T = TypeVar('_T') def filter(__function: None, __iterable: Iterable[Optional[_T]]) -> List[_T]: ... x: Optional[str] y = filter(None, [x]) reveal_type(y) # N: Revealed type is 'builtins.list[builtins.str*]' [builtins fixtures/list.pyi] mypy-0.761/test-data/unit/check-inline-config.test0000644€tŠÔÚ€2›s®0000000620013576752246026260 0ustar jukkaDROPBOX\Domain Users00000000000000-- Checks for 'mypy: option' directives inside files [case testInlineSimple1] # mypy: disallow-any-generics, no-warn-no-return from typing import List def foo() -> List: # E: Missing type parameters for generic type "List" 20 [builtins fixtures/list.pyi] [case testInlineSimple2] # mypy: disallow-any-generics # mypy: no-warn-no-return from typing import List def foo() -> List: # E: Missing type parameters for generic type "List" 20 [builtins fixtures/list.pyi] [case testInlineSimple3] # mypy: disallow-any-generics=true, warn-no-return=0 from typing import List def foo() -> List: # E: Missing type parameters for generic type "List" 20 [builtins fixtures/list.pyi] [case testInlineSimple4] # mypy: disallow-any-generics = true, warn-no-return = 0 from typing import List def foo() -> List: # E: Missing type parameters for generic type "List" 20 [builtins fixtures/list.pyi] [case testInlineList] # mypy: disallow-any-generics,always-false="FOO,BAR" from typing import List def foo(FOO: bool, BAR: bool) -> List: # E: Missing type parameters for generic type "List" if FOO or BAR: 1+'lol' return [] [builtins fixtures/list.pyi] [case testInlineInvert1] # flags: --disallow-any-generics --allow-untyped-globals import a [file a.py] # mypy: allow-any-generics, disallow-untyped-globals x = [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") from typing import List def foo() -> List: ... [builtins fixtures/list.pyi] [case testInlineInvert2] import a [file a.py] # mypy: no-always-true [out] tmp/a.py:1: error: Can not invert non-boolean key always_true [case testInlineIncremental1] import a [file a.py] # mypy: disallow-any-generics, no-warn-no-return from typing import List def foo() -> List: 20 [file a.py.2] # mypy: no-warn-no-return from typing import List def foo() -> List: 20 [file a.py.3] from typing import List def foo() -> List: 20 [out] tmp/a.py:4: error: Missing type parameters for generic type "List" [out2] [out3] tmp/a.py:2: error: Missing return statement [builtins fixtures/list.pyi] [case testInlineIncremental2] # flags2: --disallow-any-generics import a [file a.py] # mypy: no-warn-no-return from typing import List def foo() -> List: 20 [file b.py.2] # no changes to a.py, but flag change should cause recheck [out] [out2] tmp/a.py:4: error: Missing type parameters for generic type "List" [builtins fixtures/list.pyi] [case testInlineIncremental3] import a, b [file a.py] # mypy: no-warn-no-return def foo() -> int: 20 [file b.py] [file b.py.2] # no changes to a.py and we want to make sure it isn't rechecked [out] [out2] [rechecked b] [case testInlineError1] # mypy: invalid-whatever # mypy: no-warn-no-return; no-strict-optional # mypy: always-true=FOO,BAR # mypy: always-true="FOO,BAR [out] main:1: error: Unrecognized option: invalid_whatever = True main:2: error: Unrecognized option: no_warn_no_return; no_strict_optional = True main:3: error: Unrecognized option: bar = True main:4: error: Unterminated quote in configuration comment [case testInlineError2] # mypy: skip-file [out] main:1: error: Unrecognized option: skip_file = True mypy-0.761/test-data/unit/check-isinstance.test0000644€tŠÔÚ€2›s®0000016650313576752246025714 0ustar jukkaDROPBOX\Domain Users00000000000000[case testForcedAssignment] x = 1 # type: object y = 1 def f(): x, y # Prevent independent redefinition y = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") x = 2 y = x [case testJoinAny] from typing import List, Any x = None # type: List[Any] def foo() -> List[int]: pass def bar() -> List[str]: pass if bool(): x = foo() else: x = bar() x * 2 [builtins fixtures/list.pyi] [case testGeneratorExpressionTypes] class A: y = 1 x = [A()] y = [x] z = [1,2] z = [a.y for b in y for a in b] [builtins fixtures/list.pyi] [case testIsinstanceNestedTuple] from typing import Union, List, Tuple, Dict def f(x: Union[int, str, List]) -> None: if isinstance(x, (str, (int,))): reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' x[1] # E: Value of type "Union[int, str]" is not indexable else: reveal_type(x) # N: Revealed type is 'builtins.list[Any]' x[1] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if isinstance(x, (str, (list,))): reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.list[Any]]' x[1] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' [builtins fixtures/isinstancelist.pyi] [case testClassAttributeInitialization] class A: x = None # type: int def __init__(self) -> None: self.y = None # type: int z = self.x w = self.y [case testAssignmentSubtypes] from typing import Union def foo(x: Union[str, int]): if isinstance(x, int): x = 'a' x + 'a' z = x y = [x] y[0] + 'a' # TODO: should we allow these two lines? y + [1] # E: List item 0 has incompatible type "int"; expected "str" z = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") x = None # type: int y = [x] [builtins fixtures/isinstancelist.pyi] [case testFunctionDefaultArgs] class A: pass class B(A): y = 1 x = A() def foo(x: A = B()): x.y # E: "A" has no attribute "y" [builtins fixtures/isinstance.pyi] [case testIsinstanceFancyConditionals] class A: pass class B(A): y = 1 x = A() if isinstance(x, B): x.y while isinstance(x, B): x.y while isinstance(x, B): x.y x = B() [builtins fixtures/isinstance.pyi] [case testSubtypingWithAny] class A: y = 1 class B(A): z = 1 def foo(): pass x = A() if int(): x = B() x.z x = foo() x.z # E: "A" has no attribute "z" x.y [case testSingleMultiAssignment] x = 'a' (x,) = ('a',) [case testUnionMultiAssignment] from typing import Union x = None # type: Union[int, str] if int(): x = 1 x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") x = 1 (x, y) = ('a', 1) x + 1 # E: Unsupported operand types for + ("str" and "int") [builtins fixtures/isinstancelist.pyi] [case testUnionIfZigzag] from typing import Union def f(x: Union[int, str]) -> None: if 1: # Without this, the assignment below could create a new variable "x" of type "int" x = 1 if x: x = 'a' x = 1 x + 1 x + 1 [builtins fixtures/isinstancelist.pyi] [case testTwoLoopsUnion] from typing import Union def foo() -> Union[int, str]: pass def bar() -> None: x = foo() if isinstance(x, int): return while bool(): x + 'a' while bool(): x = foo() if bool(): return x = 'a' x + 'a' [builtins fixtures/isinstancelist.pyi] [case testComplicatedBlocks] from typing import Union def foo() -> Union[int, str]: pass def bar() -> None: x = foo() if isinstance(x, int): return while bool(): x + 'a' while bool(): x = foo() if bool(): return x = 'a' x + 'a' x = foo() if isinstance(x, int): return while bool(): x + 'a' while bool(): x + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" x = foo() if bool(): continue x = 'a' x = 'a' x + 'a' [builtins fixtures/isinstancelist.pyi] [case testUnionTryExcept] class A: y = A() class B(A): z = 1 x = A() def f(): x # Prevent redefinition of x x = B() x.z try: x.z x = A() x = B() x.z except: pass x.z # E: "A" has no attribute "z" [case testUnionTryExcept2] class A: y = A() class B(A): z = 1 x = A() try: x.z # E: "A" has no attribute "z" x = A() x = B() x.z except: x.z # E: "A" has no attribute "z" x = B() x.z else: x = B() x.z [case testUnionTryExcept3] class A: y = A() class B(A): z = 1 x = A() def f(): x # Prevent redefinition of x x = B() try: raise BaseException() x = A() except: pass x.z x = B() try: x = A() raise BaseException() except: pass x.z # E: "A" has no attribute "z" x = B() try: pass except: x = A() raise BaseException() x.z try: x = A() except: pass x.z # E: "A" has no attribute "z" x = B() try: pass except: x = A() x.z # E: "A" has no attribute "z" [builtins fixtures/exception.pyi] [case testUnionTryExcept4] class A: pass class B(A): z = 1 x = A() while bool(): try: x.z # E: "A" has no attribute "z" x = A() except: x = B() else: x = B() x.z [builtins fixtures/exception.pyi] [case testUnionTryFinally] class A: pass class B(A): b = 1 x = A() def f(): x # Prevent redefinition x = B() try: x = A() x.b # E: "A" has no attribute "b" x = B() finally: x.b # E: "A" has no attribute "b" x.b [case testUnionTryFinally2] class A: pass class B(A): b = 1 x = A() def f(): x # Prevent redefinition x = B() try: x = A() x = B() except: pass finally: pass x.b # E: "A" has no attribute "b" [case testUnionTryFinally3] class A: pass class B(A): b = 1 x = A() def f(): x # Prevent redefinition x = B() try: x = A() x = B() except: pass finally: x = B() x.b [case testUnionTryFinally4] class A: pass class B(A): b = 1 while 2: x = A() def f(): x # Prevents redefinition x = B() try: x = A() x = B() except: pass finally: x.b # E: "A" has no attribute "b" if not isinstance(x, B): break x.b [builtins fixtures/isinstancelist.pyi] [case testUnionTryFinally5] class A: pass class B(A): b = 1 while 2: x = A() try: x = A() x = B() finally: x.b # E: "A" has no attribute "b" break x.b x.b [case testUnionTryFinally6] class A: pass class B(A): b = 1 def f() -> int: x = B() # type: A try: x = B() except: x = A() # An exception could occur here x = B() finally: return x.b # E: "A" has no attribute "b" [case testUnionListIsinstance] from typing import Union, List def f(x: Union[List[int], List[str], int]) -> None: if isinstance(x, list): a = x[0] if isinstance(a, int): a + 1 a + 'x' # E: Unsupported operand types for + ("int" and "str") # type of a? reveal_type(x) # N: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]' x + 1 # E: Unsupported operand types for + ("List[int]" and "int") \ # E: Unsupported operand types for + ("List[str]" and "int") \ # N: Left operand is of type "Union[List[int], List[str]]" else: x[0] # E: Value of type "int" is not indexable x + 1 x[0] # E: Value of type "Union[List[int], List[str], int]" is not indexable x + 1 # E: Unsupported operand types for + ("List[int]" and "int") \ # E: Unsupported operand types for + ("List[str]" and "int") \ # N: Left operand is of type "Union[List[int], List[str], int]" [builtins fixtures/isinstancelist.pyi] [case testUnionListIsinstance2] from typing import Union, List class A: a = 1 class B: pass class C: pass def g(x: Union[A, B]) -> A: pass def h(x: C) -> A: pass def f(x: Union[A, B, C]) -> None: if isinstance(x, C): x = h(x) else: x = g(x) x.a [builtins fixtures/isinstancelist.pyi] [case testUnionStrictDefnBasic] from typing import Union def foo() -> Union[int, str]: pass x = foo() if int(): x = 1 x = x + 1 x = foo() x = x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" if isinstance(x, str): x = x + 1 # E: Unsupported operand types for + ("str" and "int") x = 1 x = x + 1 [builtins fixtures/isinstancelist.pyi] [case testSubtypeRedefinitionBasic] from typing import Union class A: pass class B(A): y = 1 x = A() x.y # E: "A" has no attribute "y" x = B() x.y # OK: x is known to be a B [builtins fixtures/isinstancelist.pyi] [case testIsInstanceBasic] from typing import Union x = None # type: Union[int, str] if isinstance(x, str): x = x + 1 # E: Unsupported operand types for + ("str" and "int") x = x + 'a' else: x = x + 'a' # E: Unsupported operand types for + ("int" and "str") x = x + 1 [builtins fixtures/isinstancelist.pyi] [case testIsInstanceIndexing] from typing import Union x = None # type: Union[int, str] j = [x] if isinstance(j[0], str): j[0] = j[0] + 'a' j[0] = j[0] + 1 # E: Unsupported operand types for + ("str" and "int") else: j[0] = j[0] + 'a' # E: Unsupported operand types for + ("int" and "str") j[0] = j[0] + 1 [builtins fixtures/isinstancelist.pyi] [case testIsInstanceSubClassMember] from typing import Union class Animal: pass class Dog(Animal): paws = 4 # type: Union[int, str] def bark(self): pass class House: pet = None # type: Animal h = House() h.pet = Dog() while bool(): if isinstance(h.pet, Dog): if isinstance(h.pet.paws, str): x = h.pet.paws + 'a' y = h.pet.paws + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" z = h.pet.paws + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" if isinstance(h.pet.paws, str): x = h.pet.paws + 'a' break y = h.pet.paws + 1 z = h.pet.paws + 'a' # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/isinstancelist.pyi] [case testIsInstanceSubClassReset] class A: pass class B(A): b = 1 class C: a = A() x = C() x.a.b # E: "A" has no attribute "b" if isinstance(x.a, B): x.a.b x = C() x.a.b # E: "A" has no attribute "b" [builtins fixtures/isinstance.pyi] [case testIsinstanceTuple] from typing import Union class A: pass class B: def method2(self, arg: int): return 123 class C: def method2(self, arg: int): return 456 def method3(self, arg: str): return 'abc' v = A() # type: Union[A, B, C] if isinstance(v, (B, C)): v.method2(123) v.method3('xyz') # E: Item "B" of "Union[B, C]" has no attribute "method3" [builtins fixtures/isinstance.pyi] [case testIsinstanceNeverWidens] from typing import Union class A: pass class B: pass class C: pass a = A() # type: A assert isinstance(a, (A, B)) reveal_type(a) # N: Revealed type is '__main__.A' b = A() # type: Union[A, B] assert isinstance(b, (A, B, C)) reveal_type(b) # N: Revealed type is 'Union[__main__.A, __main__.B]' [builtins fixtures/isinstance.pyi] [case testMemberAssignmentChanges] from typing import Union class Dog: paws = 1 # type: Union[int, str] pet = Dog() pet.paws + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" pet.paws = 'a' pet.paws + 'a' pet.paws = 1 pet.paws + 1 [builtins fixtures/isinstancelist.pyi] [case testIsInstanceSubClassMemberHard] from typing import Union class Animal: pass class Dog(Animal): paws = 4 # type: Union[int, str] def bark(self): pass class House: pet = None # type: Animal h = House() h.pet = Dog() if isinstance(h.pet, Dog): if isinstance(h.pet.paws, str): for i in [1]: # TODO: should we allow this if iterable is of length one or zero? h.pet.paws + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" if bool(): break h.pet.paws = 1 h.pet.paws + 1 if isinstance(h.pet.paws, str): h.pet.paws + 'a' else: h.pet.paws + 1 [builtins fixtures/isinstancelist.pyi] [case testIsInstanceReturn] from typing import Union def foo() -> None: x = 1 # type: Union[int, str] if isinstance(x, int): return y = x + 'asdad' def bar() -> None: x = 1 # type: Union[int, str] if isinstance(x, int): return else: pass y = x + 'asdad' foo() [builtins fixtures/isinstancelist.pyi] [case testIsInstanceBadBreak] from typing import Union def foo() -> None: x = None # type: Union[int, str] if isinstance(x, int): for z in [1,2]: break else: pass y = x + 'asdad' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" foo() [builtins fixtures/isinstancelist.pyi] [case testIsInstanceThreeUnion] from typing import Union, List x = None # type: Union[int, str, List[int]] while bool(): if isinstance(x, int): x + 1 elif isinstance(x, str): x + 'a' else: x + [1] x + 'a' # E: Unsupported operand types for + ("int" and "str") \ # E: Unsupported operand types for + ("List[int]" and "str") \ # N: Left operand is of type "Union[int, str, List[int]]" x + [1] # E: Unsupported operand types for + ("int" and "List[int]") \ # E: Unsupported operand types for + ("str" and "List[int]") \ # N: Left operand is of type "Union[int, str, List[int]]" [builtins fixtures/isinstancelist.pyi] [case testIsInstanceThreeUnion2] from typing import Union, List x = None # type: Union[int, str, List[int]] while bool(): if isinstance(x, int): x + 1 break elif isinstance(x, str): x + 'a' break x + [1] x + 'a' # E: Unsupported operand types for + ("List[int]" and "str") x + [1] # E: Unsupported operand types for + ("int" and "List[int]") \ # E: Unsupported operand types for + ("str" and "List[int]") \ # N: Left operand is of type "Union[int, str, List[int]]" [builtins fixtures/isinstancelist.pyi] [case testIsInstanceThreeUnion3] from typing import Union, List while bool(): x = None # type: Union[int, str, List[int]] def f(): x # Prevent redefinition x = 1 if isinstance(x, int): x + 1 break elif isinstance(x, str): x + 'a' break x + [1] # These lines aren't reached because x was an int x + 'a' x + [1] # E: Unsupported operand types for + ("int" and "List[int]") \ # E: Unsupported operand types for + ("str" and "List[int]") \ # N: Left operand is of type "Union[int, str, List[int]]" [builtins fixtures/isinstancelist.pyi] [case testRemovingTypeRepeatedly] from typing import Union def foo() -> Union[int, str]: pass for i in [1, 2]: x = foo() x + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" if isinstance(x, int): break x + 'a' x = foo() x + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" if isinstance(x, int): break x + 'a' x = foo() x + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" if isinstance(x, int): break x + 'a' x + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" [builtins fixtures/isinstancelist.pyi] [case testModifyRepeatedly] from typing import Union def foo() -> Union[int, str]: pass x = foo() def f(): x # Prevent redefinition x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" x = 1 x + 1 x + 'a' # E: Unsupported operand types for + ("int" and "str") x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") x + 'a' x = foo() x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x + 'a' # E: Unsupported operand types for + ("int" and "str") \ # N: Left operand is of type "Union[int, str]" [builtins fixtures/isinstancelist.pyi] [case testModifyLoop] from typing import Union def foo() -> Union[int, str]: pass x = foo() def f(): x # Prevent redefinition x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") x = 1 x + 1 while bool(): x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x = 'a' [builtins fixtures/isinstancelist.pyi] [case testModifyLoop2] from typing import Union def foo() -> Union[int, str]: pass x = foo() def f(): x # Prevent redefinition x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") x = 1 x + 1 for i in [1]: x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" [builtins fixtures/isinstancelist.pyi] [case testModifyLoop3] from typing import Union def foo() -> Union[int, str]: pass x = foo() def f(): x # Prevent redefinition x = 1 while bool(): x + 1 x = 'a' break else: x + 1 x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x = 1 for y in [1]: x + 1 x = 'a' break else: x + 1 x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" [builtins fixtures/isinstancelist.pyi] [case testModifyLoopWhile4] from typing import Union def foo() -> Union[int, str]: pass x = foo() def f(): x # Prevent redefinition x = 1 while bool(): x + 1 if bool(): x = 'a' break else: x + 1 x = 'a' x + 'a' x = 1 while bool(): x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" if bool(): x = 'a' continue else: x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x = 'a' x + 'a' [builtins fixtures/isinstancelist.pyi] [case testModifyLoopFor4] from typing import Union def foo() -> Union[int, str]: pass x = foo() def f(): x # Prevent redefinition x = 1 for y in [1]: x + 1 if bool(): x = 'a' break else: x + 1 x = 'a' x + 'a' x = 1 for y in [1]: x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" if bool(): x = 'a' continue else: x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x = 'a' x + 'a' [builtins fixtures/isinstancelist.pyi] [case testModifyNestedLoop] from typing import Union def foo() -> Union[int, str]: pass x = foo() def f(): x # Prevent redefinition x = 1 for y in [1]: for z in [1]: break else: x = 'a' break else: x + 1 x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" x = 1 while bool(): while bool(): break else: x = 'a' break else: x + 1 x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" [builtins fixtures/isinstancelist.pyi] [case testModifyLoopLong] from typing import Union class A: a = 1 def foo() -> Union[int, str, A]: pass def bar() -> None: x = foo() x + 1 # E: Unsupported left operand type for + ("A") \ # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str, A]" if isinstance(x, A): x.a else: if isinstance(x, int): x + 1 x + 'a' # E: Unsupported operand types for + ("int" and "str") else: x + 'a' x.a # E: "str" has no attribute "a" x = A() if isinstance(x, str): x + 'a' else: while bool(): if isinstance(x, int): x + 1 else: x.a break while bool(): if isinstance(x, int): x + 1 else: x.a continue while bool(): if isinstance(x, int): x + 1 else: x.a # E: Item "str" of "Union[str, A]" has no attribute "a" x = 'a' [builtins fixtures/isinstancelist.pyi] [case testWhileExitCondition1] from typing import Union x = 1 # type: Union[int, str] while isinstance(x, int): if bool(): continue x = 'a' else: reveal_type(x) # N: Revealed type is 'builtins.str' reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstance.pyi] [case testWhileExitCondition2] from typing import Union x = 1 # type: Union[int, str] while isinstance(x, int): if bool(): break x = 'a' else: reveal_type(x) # N: Revealed type is 'builtins.str' reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/isinstance.pyi] [case testWhileLinkedList] from typing import Union LinkedList = Union['Cons', 'Nil'] class Nil: pass class Cons: tail = None # type: LinkedList def last(x: LinkedList) -> Nil: while isinstance(x, Cons): x = x.tail return x [builtins fixtures/isinstance.pyi] [case testReturnAndFlow] def foo() -> int: return 1 and 2 return 'a' [case testCastIsinstance] from typing import Union def foo() -> Union[int, str]: pass x = foo() y = 1 # type: int if isinstance(x, str): x = y x + 1 x + 'a' # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/isinstancelist.pyi] [case testUnreachableCode] x = 1 # type: int while bool(): x = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") break x = 'a' # Note: no error because unreachable code [builtins fixtures/isinstancelist.pyi] [case testUnreachableCode2] x = 1 while bool(): try: pass except: continue else: continue x + 'a' [builtins fixtures/isinstance.pyi] [case testUnreachableWhileTrue] def f(x: int) -> None: while True: if x: return 1() [builtins fixtures/bool.pyi] [case testUnreachableAssertFalse] def f() -> None: assert False 1() [builtins fixtures/bool.pyi] [case testUnreachableAssertFalse2] def f() -> None: # The old parser doesn't understand the syntax below assert False, "hi" 1() [builtins fixtures/bool.pyi] [case testUnreachableReturnOrAssertFalse] def f(x: int) -> int: if x: return x else: assert False 1() [builtins fixtures/bool.pyi] [case testUnreachableTryExcept] def f() -> None: try: f() return except BaseException: return 1() [builtins fixtures/exception.pyi] [case testUnreachableTryExceptElse] def f() -> None: try: f() except BaseException: return else: return 1() [builtins fixtures/exception.pyi] [case testUnreachableTryReturnFinally1] def f() -> None: try: return finally: pass 1() [case testUnreachableTryReturnFinally2] def f() -> None: try: pass finally: return 1() [case testUnreachableTryReturnExceptRaise] def f() -> None: try: return except: raise 1() [case testUnreachableReturnLambda] from typing import Callable def g(t: Callable[[int], int]) -> int: pass def f() -> int: return g(lambda x: x) 1() [case testIsinstanceAnd] class A: pass class B(A): flag = 1 x = B() # type: A if isinstance(x, B) and 1: x.flag [builtins fixtures/isinstancelist.pyi] [case testIsinstanceShortcircuit] class A: pass class B(A): flag = 1 x = B() # type: A if isinstance(x, B) and x.flag: pass if isinstance(x, B) or x.flag: # E: "A" has no attribute "flag" pass if not isinstance(x, B) or x.flag: pass if not isinstance(x, B) and x.flag: # E: "A" has no attribute "flag" pass [builtins fixtures/isinstancelist.pyi] [case testIsinstanceExpression] class A: pass class B(A): flag = 1 x = B() # type: A x.flag if isinstance(x, B) else 0 0 if not isinstance(x, B) else x.flag 0 if isinstance(x, B) else x.flag # E: "A" has no attribute "flag" [builtins fixtures/isinstancelist.pyi] [case testIsinstanceMultiAnd] class A: pass class B(A): flag = 1 class C(A): glaf = 1 x = B() # type: A y = C() # type: A if isinstance(x, B) and isinstance(y, C): x.flag += 1 y.glaf += 1 x() # E: "B" not callable y() # E: "C" not callable else: x() # E: "A" not callable y() # E: "A" not callable [builtins fixtures/isinstancelist.pyi] [case testIsinstanceMultiAndSpecialCase] class A: pass class B(A): flag = 1 class C(A): glaf = 1 x = B() # type: A y = C() # type: A if isinstance(x, B) and isinstance(y, int): 1() # type checking skipped if isinstance(y, int) and isinstance(x, B): 1() # type checking skipped if isinstance(y, int) and y > 42: 1() # type checking skipped [builtins fixtures/isinstancelist.pyi] [case testReturnWithCallExprAndIsinstance] from typing import Union def f(x: Union[int, str]) -> None: if not isinstance(x, int): return foo() x() # E: "int" not callable def foo(): pass [builtins fixtures/isinstancelist.pyi] [case testIsinstanceOr1] from typing import Optional def f(a: bool, x: object) -> Optional[int]: if a or not isinstance(x, int): return None reveal_type(x) # N: Revealed type is 'builtins.int' return x [builtins fixtures/isinstance.pyi] [case testIsinstanceOr2] from typing import Optional def g(a: bool, x: object) -> Optional[int]: if not isinstance(x, int) or a: return None reveal_type(x) # N: Revealed type is 'builtins.int' return x [builtins fixtures/isinstance.pyi] [case testIsinstanceOr3] from typing import Optional def h(a: bool, x: object) -> Optional[int]: if a or isinstance(x, int): return None return x # E: Incompatible return value type (got "object", expected "Optional[int]") [builtins fixtures/isinstance.pyi] [case testIsinstanceWithOverlappingUnionType] from typing import Union def f(x: Union[float, int]) -> None: if isinstance(x, float): pass if not isinstance(x, int): f(x) [builtins fixtures/isinstance.pyi] [case testIsinstanceWithOverlappingUnionType2] from typing import Union class A: pass class B(A): pass def f(x: Union[A, B]) -> None: if isinstance(x, A): pass if not isinstance(x, B): f(x) [builtins fixtures/isinstance.pyi] [case testIsinstanceWithOverlappingPromotionTypes-skip] # Currently disabled: see https://github.com/python/mypy/issues/6060 for context from typing import Union class FloatLike: pass class IntLike(FloatLike): pass def f1(x: Union[float, int]) -> None: # We ignore promotions in isinstance checks if isinstance(x, float): reveal_type(x) # N: Revealed type is 'builtins.float' else: reveal_type(x) # N: Revealed type is 'builtins.int' def f2(x: Union[FloatLike, IntLike]) -> None: # ...but not regular subtyping relationships if isinstance(x, FloatLike): reveal_type(x) # N: Revealed type is 'Union[__main__.FloatLike, __main__.IntLike]' [builtins fixtures/isinstance.pyi] [case testIsinstanceOfSuperclass] class A: pass class B(A): pass x = B() if isinstance(x, A): reveal_type(x) # N: Revealed type is '__main__.B' if not isinstance(x, A): reveal_type(x) # unreachable x = A() reveal_type(x) # N: Revealed type is '__main__.B' [builtins fixtures/isinstance.pyi] [case testIsinstanceOfNonoverlapping] class A: pass class B: pass x = B() if isinstance(x, A): reveal_type(x) # unreachable else: reveal_type(x) # N: Revealed type is '__main__.B' reveal_type(x) # N: Revealed type is '__main__.B' [builtins fixtures/isinstance.pyi] [case testAssertIsinstance] def f(x: object): assert isinstance(x, int) y = 0 # type: int y = x [builtins fixtures/isinstance.pyi] [case testUnionAssertIsinstance] from typing import Union def f(x: Union[str, int]): assert isinstance(x, int) y = 0 # type: int y = x [builtins fixtures/isinstance.pyi] [case testAnyAssertIsinstance] from typing import Any def f(x: Any): assert isinstance(x, int) # this should narrow x to type int x + "foo" # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/isinstance.pyi] [case testIsinstanceOfGenericClassRetainsParameters] from typing import List, Union def f(x: Union[List[int], str]) -> None: if isinstance(x, list): x[0]() # E: "int" not callable else: reveal_type(x) # N: Revealed type is 'builtins.str' reveal_type(x) # N: Revealed type is 'Union[builtins.list[builtins.int], builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsinstanceOrIsinstance] class A: pass class B(A): flag = 1 class C(A): flag = 2 x1 = A() if isinstance(x1, B) or isinstance(x1, C): reveal_type(x1) # N: Revealed type is 'Union[__main__.B, __main__.C]' f = x1.flag # type: int else: reveal_type(x1) # N: Revealed type is '__main__.A' f = 0 reveal_type(x1) # N: Revealed type is '__main__.A' x2 = A() if isinstance(x2, A) or isinstance(x2, C): reveal_type(x2) # N: Revealed type is '__main__.A' f = x2.flag # E: "A" has no attribute "flag" else: # unreachable 1() reveal_type(x2) # N: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testComprehensionIsInstance] from typing import List, Union a = [] # type: List[Union[int, str]] l = [x for x in a if isinstance(x, int)] g = (x for x in a if isinstance(x, int)) d = {0: x for x in a if isinstance(x, int)} reveal_type(l) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(g) # N: Revealed type is 'typing.Generator[builtins.int*, None, None]' reveal_type(d) # N: Revealed type is 'builtins.dict[builtins.int*, builtins.int*]' [builtins fixtures/isinstancelist.pyi] [case testIsinstanceInWrongOrderInBooleanOp] class A: m = 1 def f(x: object) -> None: if x.m and isinstance(x, A) or False: # E: "object" has no attribute "m" pass [builtins fixtures/isinstance.pyi] [case testIsinstanceAndOr] class A: a = None # type: A def f(x: object) -> None: b = isinstance(x, A) and x.a or A() reveal_type(b) # N: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testIsInstanceWithUnknownType] from typing import Union def f(x: Union[int, str], typ: type) -> None: if isinstance(x, (typ, int)): x + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' else: reveal_type(x) # N: Revealed type is 'builtins.str' reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithBoundedType] from typing import Union, Type class A: pass def f(x: Union[int, A], a: Type[A]) -> None: if isinstance(x, (a, int)): reveal_type(x) # N: Revealed type is 'Union[builtins.int, __main__.A]' else: reveal_type(x) # N: Revealed type is '__main__.A' reveal_type(x) # N: Revealed type is 'Union[builtins.int, __main__.A]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithEmtpy2ndArg] from typing import Union def f(x: Union[int, str]) -> None: if isinstance(x, ()): reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithTypeObject] from typing import Union, Type class A: pass def f(x: Union[int, A], a: Type[A]) -> None: if isinstance(x, a): reveal_type(x) # N: Revealed type is '__main__.A' elif isinstance(x, int): reveal_type(x) # N: Revealed type is 'builtins.int' else: reveal_type(x) # N: Revealed type is '__main__.A' reveal_type(x) # N: Revealed type is 'Union[builtins.int, __main__.A]' [builtins fixtures/isinstancelist.pyi] [case testIssubclassUnreachable] from typing import Type, Sequence, Union x: Type[str] if issubclass(x, int): reveal_type(x) # unreachable block class X: pass class Y(X): pass class Z(X): pass a: Union[Type[Y], Type[Z]] if issubclass(a, X): reveal_type(a) # N: Revealed type is 'Union[Type[__main__.Y], Type[__main__.Z]]' else: reveal_type(a) # unreachable block [builtins fixtures/isinstancelist.pyi] [case testIssubclasDestructuringUnions1] from typing import Union, List, Tuple, Dict, Type def f(x: Union[Type[int], Type[str], Type[List]]) -> None: if issubclass(x, (str, (int,))): reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str]' x()[1] # E: Value of type "Union[int, str]" is not indexable else: reveal_type(x) # N: Revealed type is 'Type[builtins.list[Any]]' reveal_type(x()) # N: Revealed type is 'builtins.list[Any]' x()[1] reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if issubclass(x, (str, (list,))): reveal_type(x) # N: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.str, builtins.list[Any]]' x()[1] reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' [builtins fixtures/isinstancelist.pyi] [case testIssubclasDestructuringUnions2] from typing import Union, List, Tuple, Dict, Type def f(x: Type[Union[int, str, List]]) -> None: if issubclass(x, (str, (int,))): reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str]' x()[1] # E: Value of type "Union[int, str]" is not indexable else: reveal_type(x) # N: Revealed type is 'Type[builtins.list[Any]]' reveal_type(x()) # N: Revealed type is 'builtins.list[Any]' x()[1] reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if issubclass(x, (str, (list,))): reveal_type(x) # N: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.str, builtins.list[Any]]' x()[1] reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' [builtins fixtures/isinstancelist.pyi] [case testIssubclasDestructuringUnions3] from typing import Union, List, Tuple, Dict, Type def f(x: Type[Union[int, str, List]]) -> None: reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if issubclass(x, (str, (int,))): reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str]' x()[1] # E: Value of type "Union[int, str]" is not indexable else: reveal_type(x) # N: Revealed type is 'Type[builtins.list[Any]]' reveal_type(x()) # N: Revealed type is 'builtins.list[Any]' x()[1] reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if issubclass(x, (str, (list,))): reveal_type(x) # N: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.str, builtins.list[Any]]' x()[1] reveal_type(x) # N: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # N: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' [builtins fixtures/isinstancelist.pyi] [case testIssubclass] from typing import Type, ClassVar class Goblin: level: int class GoblinAmbusher(Goblin): job: ClassVar[str] = 'Ranger' def test_issubclass(cls: Type[Goblin]) -> None: if issubclass(cls, GoblinAmbusher): reveal_type(cls) # N: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.level cls.job ga = cls() ga.level = 15 ga.job ga.job = "Warrior" # E: Cannot assign to class variable "job" via instance else: reveal_type(cls) # N: Revealed type is 'Type[__main__.Goblin]' cls.level cls.job # E: "Type[Goblin]" has no attribute "job" g = cls() g.level = 15 g.job # E: "Goblin" has no attribute "job" [builtins fixtures/isinstancelist.pyi] [case testIssubclassDeepHierarchy] from typing import Type, ClassVar class Mob: pass class Goblin(Mob): level: int class GoblinAmbusher(Goblin): job: ClassVar[str] = 'Ranger' def test_issubclass(cls: Type[Mob]) -> None: if issubclass(cls, Goblin): reveal_type(cls) # N: Revealed type is 'Type[__main__.Goblin]' cls.level cls.job # E: "Type[Goblin]" has no attribute "job" g = cls() g.level = 15 g.job # E: "Goblin" has no attribute "job" if issubclass(cls, GoblinAmbusher): reveal_type(cls) # N: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.level cls.job g = cls() g.level = 15 g.job g.job = 'Warrior' # E: Cannot assign to class variable "job" via instance else: reveal_type(cls) # N: Revealed type is 'Type[__main__.Mob]' cls.job # E: "Type[Mob]" has no attribute "job" cls.level # E: "Type[Mob]" has no attribute "level" m = cls() m.level = 15 # E: "Mob" has no attribute "level" m.job # E: "Mob" has no attribute "job" if issubclass(cls, GoblinAmbusher): reveal_type(cls) # N: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.job cls.level ga = cls() ga.level = 15 ga.job ga.job = 'Warrior' # E: Cannot assign to class variable "job" via instance if issubclass(cls, GoblinAmbusher): reveal_type(cls) # N: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.level cls.job ga = cls() ga.level = 15 ga.job ga.job = "Warrior" # E: Cannot assign to class variable "job" via instance [builtins fixtures/isinstancelist.pyi] [case testIssubclassTuple] from typing import Type, ClassVar class Mob: pass class Goblin(Mob): level: int class GoblinAmbusher(Goblin): job: ClassVar[str] = 'Ranger' class GoblinDigger(Goblin): job: ClassVar[str] = 'Thief' def test_issubclass(cls: Type[Mob]) -> None: if issubclass(cls, (Goblin, GoblinAmbusher)): reveal_type(cls) # N: Revealed type is 'Type[__main__.Goblin]' cls.level cls.job # E: "Type[Goblin]" has no attribute "job" g = cls() g.level = 15 g.job # E: "Goblin" has no attribute "job" if issubclass(cls, GoblinAmbusher): cls.level reveal_type(cls) # N: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.job ga = cls() ga.level = 15 ga.job ga.job = "Warrior" # E: Cannot assign to class variable "job" via instance else: reveal_type(cls) # N: Revealed type is 'Type[__main__.Mob]' cls.job # E: "Type[Mob]" has no attribute "job" cls.level # E: "Type[Mob]" has no attribute "level" m = cls() m.level = 15 # E: "Mob" has no attribute "level" m.job # E: "Mob" has no attribute "job" if issubclass(cls, GoblinAmbusher): reveal_type(cls) # N: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.job cls.level ga = cls() ga.level = 15 ga.job ga.job = "Warrior" # E: Cannot assign to class variable "job" via instance if issubclass(cls, (GoblinDigger, GoblinAmbusher)): reveal_type(cls) # N: Revealed type is 'Union[Type[__main__.GoblinDigger], Type[__main__.GoblinAmbusher]]' cls.level cls.job g = cls() g.level = 15 g.job g.job = "Warrior" # E: Cannot assign to class variable "job" via instance [builtins fixtures/isinstancelist.pyi] [case testIssubclassBuiltins] from typing import List, Type class MyList(List): pass class MyIntList(List[int]): pass def f(cls: Type[object]) -> None: if issubclass(cls, MyList): reveal_type(cls) # N: Revealed type is 'Type[__main__.MyList]' cls()[0] else: reveal_type(cls) # N: Revealed type is 'Type[builtins.object]' cls()[0] # E: Value of type "object" is not indexable if issubclass(cls, MyIntList): reveal_type(cls) # N: Revealed type is 'Type[__main__.MyIntList]' cls()[0] + 1 [builtins fixtures/isinstancelist.pyi] [case testIsinstanceTypeArgs] from typing import Iterable, TypeVar x = 1 T = TypeVar('T') isinstance(x, Iterable) isinstance(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, (str, Iterable[int]))) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstancelist.pyi] [typing fixtures/typing-full.pyi] [case testIsinstanceAnyAlias] from typing import Any A = Any isinstance(object(), A) # E: Cannot use isinstance() with Any type [builtins fixtures/isinstance.pyi] [case testIsinstanceTypeArgsAliases] from typing import Iterable, TypeVar x = 1 T = TypeVar('T') It = Iterable It2 = Iterable[T] isinstance(x, It[int]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, It) isinstance(x, It2[int]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, It2) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testIssubclassTypeArgs] from typing import Iterable, TypeVar x = int T = TypeVar('T') issubclass(x, Iterable) issubclass(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks issubclass(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks issubclass(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testIsinstanceAndNarrowTypeVariable] from typing import TypeVar class A: pass class B(A): pass T = TypeVar('T', bound=A) def f(x: T) -> None: if isinstance(x, B): reveal_type(x) # N: Revealed type is '__main__.B' else: reveal_type(x) # N: Revealed type is 'T`-1' reveal_type(x) # N: Revealed type is 'T`-1' [builtins fixtures/isinstance.pyi] [case testIsinstanceAndTypeType] from typing import Type def f(x: Type[int]) -> None: if isinstance(x, type): reveal_type(x) # N: Revealed type is 'Type[builtins.int]' else: reveal_type(x) # Unreachable reveal_type(x) # N: Revealed type is 'Type[builtins.int]' [builtins fixtures/isinstance.pyi] [case testIsinstanceVariableSubstitution] T = (int, str) U = (list, T) x: object = None if isinstance(x, T): reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' if isinstance(x, U): reveal_type(x) # N: Revealed type is 'Union[builtins.list[Any], builtins.int, builtins.str]' if isinstance(x, (set, (list, T))): reveal_type(x) # N: Revealed type is 'Union[builtins.set[Any], builtins.list[Any], builtins.int, builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTooFewArgs] isinstance() # E: Too few arguments for "isinstance" x: object if isinstance(): # E: Too few arguments for "isinstance" x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' if isinstance(x): # E: Too few arguments for "isinstance" x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' [builtins fixtures/isinstancelist.pyi] [case testIsSubclassTooFewArgs] from typing import Type issubclass() # E: Too few arguments for "issubclass" y: Type[object] if issubclass(): # E: Too few arguments for "issubclass" reveal_type(y) # N: Revealed type is 'Type[builtins.object]' if issubclass(y): # E: Too few arguments for "issubclass" reveal_type(y) # N: Revealed type is 'Type[builtins.object]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTooManyArgs] isinstance(1, 1, 1) # E: Too many arguments for "isinstance" \ # E: Argument 2 to "isinstance" has incompatible type "int"; expected "Union[type, Tuple[Any, ...]]" x: object if isinstance(x, str, 1): # E: Too many arguments for "isinstance" reveal_type(x) # N: Revealed type is 'builtins.object' x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' [builtins fixtures/isinstancelist.pyi] [case testIsinstanceNarrowAny] from typing import Any def narrow_any_to_str_then_reassign_to_int() -> None: v = 1 # type: Any if isinstance(v, str): reveal_type(v) # N: Revealed type is 'builtins.str' v = 2 reveal_type(v) # N: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [case testNarrowTypeAfterInList] # flags: --strict-optional from typing import List, Optional x: List[int] y: Optional[int] if y in x: reveal_type(y) # N: Revealed type is 'builtins.int' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' if y not in x: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInListOfOptional] # flags: --strict-optional from typing import List, Optional x: List[Optional[int]] y: Optional[int] if y not in x: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInListNonOverlapping] # flags: --strict-optional from typing import List, Optional x: List[str] y: Optional[int] if y in x: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInListNested] # flags: --strict-optional from typing import List, Optional, Any x: Optional[int] lst: Optional[List[int]] nested_any: List[List[Any]] if lst in nested_any: reveal_type(lst) # N: Revealed type is 'builtins.list[builtins.int]' if x in nested_any: reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInTuple] # flags: --strict-optional from typing import Optional class A: pass class B(A): pass class C(A): pass y: Optional[B] if y in (B(), C()): reveal_type(y) # N: Revealed type is '__main__.B' else: reveal_type(y) # N: Revealed type is 'Union[__main__.B, None]' [builtins fixtures/tuple.pyi] [out] [case testNarrowTypeAfterInNamedTuple] # flags: --strict-optional from typing import NamedTuple, Optional class NT(NamedTuple): x: int y: int nt: NT y: Optional[int] if y not in nt: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'builtins.int' [builtins fixtures/tuple.pyi] [out] [case testNarrowTypeAfterInDict] # flags: --strict-optional from typing import Dict, Optional x: Dict[str, int] y: Optional[str] if y in x: reveal_type(y) # N: Revealed type is 'builtins.str' else: reveal_type(y) # N: Revealed type is 'Union[builtins.str, None]' if y not in x: reveal_type(y) # N: Revealed type is 'Union[builtins.str, None]' else: reveal_type(y) # N: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [out] [case testNarrowTypeAfterInList_python2] # flags: --strict-optional from typing import List, Optional x = [] # type: List[int] y = None # type: Optional[int] # TODO: Fix running tests on Python 2: "Iterator[int]" has no attribute "next" if y in x: # type: ignore reveal_type(y) # N: Revealed type is 'builtins.int' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' if y not in x: # type: ignore reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'builtins.int' [builtins_py2 fixtures/python2.pyi] [out] [case testNarrowTypeAfterInNoAnyOrObject] # flags: --strict-optional from typing import Any, List, Optional x: List[Any] z: List[object] y: Optional[int] if y in x: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' if y not in z: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInUserDefined] # flags: --strict-optional from typing import Container, Optional class C(Container[int]): def __contains__(self, item: object) -> bool: return item is 'surprise' y: Optional[int] # We never trust user defined types if y in C(): reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' if y not in C(): reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInSet] # flags: --strict-optional from typing import Optional, Set s: Set[str] y: Optional[str] if y in {'a', 'b', 'c'}: reveal_type(y) # N: Revealed type is 'builtins.str' else: reveal_type(y) # N: Revealed type is 'Union[builtins.str, None]' if y not in s: reveal_type(y) # N: Revealed type is 'Union[builtins.str, None]' else: reveal_type(y) # N: Revealed type is 'builtins.str' [builtins fixtures/set.pyi] [out] [case testNarrowTypeAfterInTypedDict] # flags: --strict-optional from typing import Optional from mypy_extensions import TypedDict class TD(TypedDict): a: int b: str td: TD def f() -> None: x: Optional[str] if x not in td: return reveal_type(x) # N: Revealed type is 'builtins.str' [typing fixtures/typing-full.pyi] [builtins fixtures/dict.pyi] [out] [case testIsinstanceWidensWithAnyArg] from typing import Any class A: ... B: Any x: A x.foo() # E: "A" has no attribute "foo" assert isinstance(x, B) x.foo() reveal_type(x) # N: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [case testIsinstanceWidensUnionWithAnyArg] from typing import Any, Union class A: ... B: Any x: Union[A, B] reveal_type(x) # N: Revealed type is 'Union[__main__.A, Any]' assert isinstance(x, B) reveal_type(x) # N: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [case testIsinstanceIgnoredImport] from typing import Union from foo import A # type: ignore def f(x: Union[A, str]) -> None: x.method_only_in_a() # E: Item "str" of "Union[Any, str]" has no attribute "method_only_in_a" if isinstance(x, A): x.method_only_in_a() [builtins fixtures/isinstance.pyi] [case testIsinstanceIgnoredImportDualAny] from typing import Any from foo import Bad, OtherBad # type: ignore x: Any if isinstance(x, Bad): reveal_type(x) # N: Revealed type is 'Any' else: reveal_type(x) # N: Revealed type is 'Any' if isinstance(x, (Bad, OtherBad)): reveal_type(x) # N: Revealed type is 'Any' else: reveal_type(x) # N: Revealed type is 'Any' y: object if isinstance(y, Bad): reveal_type(y) # N: Revealed type is 'Any' else: reveal_type(y) # N: Revealed type is 'builtins.object' class Ok: pass z: Any if isinstance(z, Ok): reveal_type(z) # N: Revealed type is '__main__.Ok' else: reveal_type(z) # N: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [case testIsInstanceInitialNoneCheckSkipsImpossibleCasesNoStrictOptional] # flags: --strict-optional from typing import Optional, Union class A: pass def foo1(x: Union[A, str, None]) -> None: if x is None: reveal_type(x) # N: Revealed type is 'None' elif isinstance(x, A): reveal_type(x) # N: Revealed type is '__main__.A' else: reveal_type(x) # N: Revealed type is 'builtins.str' def foo2(x: Optional[str]) -> None: if x is None: reveal_type(x) # N: Revealed type is 'None' elif isinstance(x, A): reveal_type(x) else: reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstance.pyi] [case testIsInstanceInitialNoneCheckSkipsImpossibleCasesInNoStrictOptional] # flags: --no-strict-optional from typing import Optional, Union class A: pass def foo1(x: Union[A, str, None]) -> None: if x is None: reveal_type(x) # N: Revealed type is 'None' elif isinstance(x, A): # Note that Union[None, A] == A in no-strict-optional reveal_type(x) # N: Revealed type is '__main__.A' else: reveal_type(x) # N: Revealed type is 'builtins.str' def foo2(x: Optional[str]) -> None: if x is None: reveal_type(x) # N: Revealed type is 'None' elif isinstance(x, A): # Mypy should, however, be able to skip impossible cases reveal_type(x) else: reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstance.pyi] [case testNoneCheckDoesNotNarrowWhenUsingTypeVars] # flags: --strict-optional # Note: this test (and the following one) are testing checker.conditional_type_map: # if you set the 'prohibit_none_typevar_overlap' keyword argument to False when calling # 'is_overlapping_types', the binder will incorrectly infer that 'out' has a type of # Union[T, None] after the if statement. from typing import TypeVar T = TypeVar('T') def foo(x: T) -> T: out = None out = x if out is None: pass return out [builtins fixtures/isinstance.pyi] [case testNoneCheckDoesNotNarrowWhenUsingTypeVarsNoStrictOptional] # flags: --no-strict-optional from typing import TypeVar T = TypeVar('T') def foo(x: T) -> T: out = None out = x if out is None: pass return out [builtins fixtures/isinstance.pyi] [case testNoneAndGenericTypesOverlapNoStrictOptional] # flags: --no-strict-optional from typing import Union, Optional, List # Note: this test is indirectly making sure meet.is_overlapping_types # correctly ignores 'None' in unions. def foo(x: Optional[List[str]]) -> None: reveal_type(x) # N: Revealed type is 'Union[builtins.list[builtins.str], None]' assert isinstance(x, list) reveal_type(x) # N: Revealed type is 'builtins.list[builtins.str]' def bar(x: Union[List[str], List[int], None]) -> None: reveal_type(x) # N: Revealed type is 'Union[builtins.list[builtins.str], builtins.list[builtins.int], None]' assert isinstance(x, list) reveal_type(x) # N: Revealed type is 'Union[builtins.list[builtins.str], builtins.list[builtins.int]]' [builtins fixtures/isinstancelist.pyi] [case testNoneAndGenericTypesOverlapStrictOptional] # flags: --strict-optional from typing import Union, Optional, List # This test is the same as the one above, except for strict-optional. # It isn't testing anything explicitly and mostly exists for the sake # of completeness. def foo(x: Optional[List[str]]) -> None: reveal_type(x) # N: Revealed type is 'Union[builtins.list[builtins.str], None]' assert isinstance(x, list) reveal_type(x) # N: Revealed type is 'builtins.list[builtins.str]' def bar(x: Union[List[str], List[int], None]) -> None: reveal_type(x) # N: Revealed type is 'Union[builtins.list[builtins.str], builtins.list[builtins.int], None]' assert isinstance(x, list) reveal_type(x) # N: Revealed type is 'Union[builtins.list[builtins.str], builtins.list[builtins.int]]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithStarExpression] from typing import Union, List, Tuple def f(var: Union[List[str], Tuple[str, str], str]) -> None: reveal_type(var) # N: Revealed type is 'Union[builtins.list[builtins.str], Tuple[builtins.str, builtins.str], builtins.str]' if isinstance(var, (list, *(str, int))): reveal_type(var) # N: Revealed type is 'Union[builtins.list[builtins.str], builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithStarExpressionAndVariable] from typing import Union def f(var: Union[int, str]) -> None: reveal_type(var) # N: Revealed type is 'Union[builtins.int, builtins.str]' some_types = (str, tuple) another_type = list if isinstance(var, (*some_types, another_type)): reveal_type(var) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithWrongStarExpression] var = 'some string' if isinstance(var, *(str, int)): # E: Too many arguments for "isinstance" pass [builtins fixtures/isinstancelist.pyi] mypy-0.761/test-data/unit/check-kwargs.test0000644€tŠÔÚ€2›s®0000003432013576752246025041 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for keyword arguments. [case testTypeErrorInKeywordArgument] import typing def f(o: object) -> None: pass f(o=None()) # E: "None" not callable [case testSimpleKeywordArgument] import typing def f(a: 'A') -> None: pass f(a=A()) f(a=object()) # E: Argument "a" to "f" has incompatible type "object"; expected "A" class A: pass [case testTwoKeywordArgumentsNotInOrder] import typing def f(a: 'A', b: 'B') -> None: pass f(b=A(), a=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "B" f(b=B(), a=B()) # E: Argument "a" to "f" has incompatible type "B"; expected "A" f(a=A(), b=B()) f(b=B(), a=A()) class A: pass class B: pass [case testOneOfSeveralOptionalKeywordArguments] import typing def f(a: 'A' = None, b: 'B' = None, c: 'C' = None) -> None: pass f(a=A()) f(b=B()) f(c=C()) f(b=B(), c=C()) f(a=B()) # E: Argument "a" to "f" has incompatible type "B"; expected "Optional[A]" f(b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" f(c=B()) # E: Argument "c" to "f" has incompatible type "B"; expected "Optional[C]" f(b=B(), c=A()) # E: Argument "c" to "f" has incompatible type "A"; expected "Optional[C]" class A: pass class B: pass class C: pass [case testBothPositionalAndKeywordArguments] import typing def f(a: 'A', b: 'B') -> None: pass f(A(), b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "B" f(A(), b=B()) class A: pass class B: pass [case testContextSensitiveTypeInferenceForKeywordArg] from typing import List def f(a: 'A', b: 'List[A]') -> None: pass f(b=[], a=A()) class A: pass [builtins fixtures/list.pyi] [case testGivingSameKeywordArgumentTwice] import typing def f(a: 'A', b: 'B') -> None: pass f(a=A(), b=B(), a=A()) # E: keyword argument repeated class A: pass class B: pass [case testGivingArgumentAsPositionalAndKeywordArg] import typing def f(a: 'A', b: 'B' = None) -> None: pass f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass [case testGivingArgumentAsPositionalAndKeywordArg2] import typing def f(a: 'A' = None, b: 'B' = None) -> None: pass f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass [case testPositionalAndKeywordForSameArg] # This used to crash in check_argument_count(). See #1095. def f(a: int): pass def g(): f(0, a=1) [out] [case testInvalidKeywordArgument] import typing def f(a: 'A') -> None: pass # N: "f" defined here f(b=object()) # E: Unexpected keyword argument "b" for "f" class A: pass [case testKeywordMisspelling] def f(other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass [case testMultipleKeywordsForMisspelling] def f(thing : 'A', other: 'A', atter: 'A', btter: 'B') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other" or "atter"? class A: pass class B: pass [case testKeywordMisspellingDifferentType] def f(other: 'A') -> None: pass # N: "f" defined here f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass [case testKeywordMisspellingInheritance] def f(atter: 'A', btter: 'B', ctter: 'C') -> None: pass # N: "f" defined here f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter" or "atter"? class A: pass class B(A): pass class C: pass [case testKeywordMisspellingFloatInt] def f(atter: float, btter: int) -> None: pass # N: "f" defined here x: int = 5 f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter" or "atter"? [case testKeywordMisspellingVarArgs] def f(other: 'A', *atter: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass [case testKeywordMisspellingOnlyVarArgs] def f(*other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f" class A: pass [case testKeywordMisspellingVarArgsDifferentTypes] def f(other: 'B', *atter: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass [case testKeywordMisspellingVarKwargs] def f(other: 'A', **atter: 'A') -> None: pass f(otter=A()) # E: Missing positional argument "other" in call to "f" class A: pass [builtins fixtures/dict.pyi] [case testKeywordArgumentsWithDynamicallyTypedCallable] from typing import Any f = None # type: Any f(x=f(), z=None()) # E: "None" not callable f(f, zz=None()) # E: "None" not callable f(x=None) [case testKeywordArgumentWithFunctionObject] from typing import Callable f = None # type: Callable[[A, B], None] f(a=A(), b=B()) f(A(), b=B()) class A: pass class B: pass [out] main:3: error: Unexpected keyword argument "a" main:3: error: Unexpected keyword argument "b" main:4: error: Unexpected keyword argument "b" [case testKeywordOnlyArguments] import typing def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass f(A(), b=B()) f(b=B(), a=A()) f(A()) f(A(), B()) # E: Too many positional arguments for "f" g(A(), b=B()) g(b=B(), a=A()) g(A()) # E: Missing named argument "b" for "g" g(A(), B()) # E: Too many positional arguments for "g" h(A()) # E: Missing named argument "b" for "h" # E: Missing named argument "aa" for "h" h(A(), b=B()) # E: Missing named argument "aa" for "h" h(A(), aa=A()) # E: Missing named argument "b" for "h" h(A(), b=B(), aa=A()) h(A(), aa=A(), b=B()) i(A()) # E: Missing named argument "b" for "i" i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) class A: pass class B: pass [case testKeywordOnlyArgumentsFastparse] import typing def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass f(A(), b=B()) f(b=B(), a=A()) f(A()) f(A(), B()) # E: Too many positional arguments for "f" g(A(), b=B()) g(b=B(), a=A()) g(A()) # E: Missing named argument "b" for "g" g(A(), B()) # E: Too many positional arguments for "g" h(A()) # E: Missing named argument "b" for "h" # E: Missing named argument "aa" for "h" h(A(), b=B()) # E: Missing named argument "aa" for "h" h(A(), aa=A()) # E: Missing named argument "b" for "h" h(A(), b=B(), aa=A()) h(A(), aa=A(), b=B()) i(A()) # E: Missing named argument "b" for "i" i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) class A: pass class B: pass [case testKwargsAfterBareArgs] from typing import Tuple, Any def f(a, *, b=None) -> None: pass a = None # type: Any b = None # type: Any f(a, **b) [builtins fixtures/dict.pyi] [case testKeywordArgAfterVarArgs] import typing def f(*a: 'A', b: 'B' = None) -> None: pass f() f(A()) f(A(), A()) f(b=B()) f(A(), b=B()) f(A(), A(), b=B()) f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" class A: pass class B: pass [builtins fixtures/list.pyi] [case testKeywordArgAfterVarArgsWithBothCallerAndCalleeVarArgs] from typing import List def f(*a: 'A', b: 'B' = None) -> None: pass a = None # type: List[A] f(*a) f(A(), *a) f(b=B()) f(*a, b=B()) f(A(), *a, b=B()) f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(A(), b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" f(*a, b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" class A: pass class B: pass [builtins fixtures/list.pyi] [case testCallingDynamicallyTypedFunctionWithKeywordArgs] import typing def f(x, y=A()): pass f(x=A(), y=A()) f(y=A(), x=A()) f(y=A()) # E: Missing positional argument "x" in call to "f" f(A(), z=A()) # E: Unexpected keyword argument "z" for "f" class A: pass [case testKwargsArgumentInFunctionBody] from typing import Dict, Any def f( **kwargs: 'A') -> None: d1 = kwargs # type: Dict[str, A] d2 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type "Dict[str, A]", variable has type "Dict[A, Any]") d3 = kwargs # type: Dict[Any, str] # E: Incompatible types in assignment (expression has type "Dict[str, A]", variable has type "Dict[Any, str]") class A: pass [builtins fixtures/dict.pyi] [out] [case testKwargsArgumentInFunctionBodyWithImplicitAny] from typing import Dict, Any def f(**kwargs) -> None: d1 = kwargs # type: Dict[str, A] d2 = kwargs # type: Dict[str, str] d3 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type "Dict[str, Any]", variable has type "Dict[A, Any]") class A: pass [builtins fixtures/dict.pyi] [out] [case testCallingFunctionThatAcceptsVarKwargs] import typing def f( **kwargs: 'A') -> None: pass f() f(x=A()) f(y=A(), z=A()) f(x=B()) # E: Argument "x" to "f" has incompatible type "B"; expected "A" f(A()) # E: Too many arguments for "f" # Perhaps a better message would be "Too many *positional* arguments..." class A: pass class B: pass [builtins fixtures/dict.pyi] [case testCallingFunctionWithKeywordVarArgs] from typing import Dict def f( **kwargs: 'A') -> None: pass d = None # type: Dict[str, A] f(**d) f(x=A(), **d) d2 = None # type: Dict[str, B] f(**d2) # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A" f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type "**Dict[str, B]"; expected "A" class A: pass class B: pass [builtins fixtures/dict.pyi] [case testKwargsAllowedInDunderCall] class Formatter: def __call__(self, message: str, bold: bool = False) -> str: pass formatter = Formatter() formatter("test", bold=True) reveal_type(formatter.__call__) # N: Revealed type is 'def (message: builtins.str, bold: builtins.bool =) -> builtins.str' [builtins fixtures/bool.pyi] [out] [case testKwargsAllowedInDunderCallKwOnly] class Formatter: def __call__(self, message: str, *, bold: bool = False) -> str: pass formatter = Formatter() formatter("test", bold=True) reveal_type(formatter.__call__) # N: Revealed type is 'def (message: builtins.str, *, bold: builtins.bool =) -> builtins.str' [builtins fixtures/bool.pyi] [out] [case testPassingMappingForKeywordVarArg] from typing import Mapping def f(**kwargs: 'A') -> None: pass b = None # type: Mapping d = None # type: Mapping[A, A] m = None # type: Mapping[str, A] f(**d) # E: Keywords must be strings f(**m) f(**b) class A: pass [builtins fixtures/dict.pyi] [case testPassingMappingSubclassForKeywordVarArg] from typing import Mapping class MappingSubclass(Mapping[str, str]): pass def f(**kwargs: 'A') -> None: pass d = None # type: MappingSubclass f(**d) class A: pass [builtins fixtures/dict.pyi] [case testInvalidTypeForKeywordVarArg] from typing import Dict def f(**kwargs: 'A') -> None: pass d = None # type: Dict[A, A] f(**d) # E: Keywords must be strings f(**A()) # E: Argument after ** must be a mapping, not "A" class A: pass [builtins fixtures/dict.pyi] [case testPassingKeywordVarArgsToNonVarArgsFunction] from typing import Any, Dict def f(a: 'A', b: 'B') -> None: pass d = None # type: Dict[str, Any] f(**d) d2 = None # type: Dict[str, A] f(**d2) # E: Argument 1 to "f" has incompatible type "**Dict[str, A]"; expected "B" class A: pass class B: pass [builtins fixtures/dict.pyi] [case testBothKindsOfVarArgs] from typing import Any, List, Dict def f(a: 'A', b: 'A') -> None: pass l = None # type: List[Any] d = None # type: Dict[Any, Any] f(*l, **d) class A: pass [builtins fixtures/dict.pyi] [case testKeywordArgumentAndCommentSignature] import typing def f(x): # type: (int) -> str # N: "f" defined here pass f(x='') # E: Argument "x" to "f" has incompatible type "str"; expected "int" f(x=0) f(y=0) # E: Unexpected keyword argument "y" for "f" [case testKeywordArgumentAndCommentSignature2] import typing class A: def f(self, x): # type: (int) -> str # N: "f" of "A" defined here pass A().f(x='') # E: Argument "x" to "f" of "A" has incompatible type "str"; expected "int" A().f(x=0) A().f(y=0) # E: Unexpected keyword argument "y" for "f" of "A" [case testKeywordVarArgsAndCommentSignature] import typing def f(**kwargs): # type: (**int) -> None pass f(z=1) f(x=1, y=1) f(x='', y=1) # E: Argument "x" to "f" has incompatible type "str"; expected "int" f(x=1, y='') # E: Argument "y" to "f" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallsWithStars] def f(a: int) -> None: pass s = ('',) f(*s) # E: Argument 1 to "f" has incompatible type "*Tuple[str]"; expected "int" a = {'': 0} f(a) # E: Argument 1 to "f" has incompatible type "Dict[str, int]"; expected "int" f(**a) # okay b = {'': ''} f(b) # E: Argument 1 to "f" has incompatible type "Dict[str, str]"; expected "int" f(**b) # E: Argument 1 to "f" has incompatible type "**Dict[str, str]"; expected "int" c = {0: 0} f(**c) # E: Keywords must be strings [builtins fixtures/dict.pyi] [case testCallStar2WithStar] def f(**k): pass f(*(1, 2)) # E: Too many arguments for "f" [builtins fixtures/dict.pyi] [case testUnexpectedMethodKwargInNestedClass] class A: class B: def __init__(self) -> None: # N: "B" defined here pass A.B(x=1) # E: Unexpected keyword argument "x" for "B" [case testUnexpectedMethodKwargFromOtherModule] import m m.A(x=1) [file m.py] 1+'asdf' class A: def __init__(self) -> None: pass [out] -- Note that the messages appear "out of order" because the m.py:3 -- message is really an attachment to the main:2 error and should be -- reported with it. tmp/m.py:1: error: Unsupported operand types for + ("int" and "str") main:2: error: Unexpected keyword argument "x" for "A" tmp/m.py:3: note: "A" defined here [case testStarArgsAndKwArgsSpecialCase] from typing import Dict, Mapping def f(*vargs: int, **kwargs: object) -> None: pass def g(arg: int = 0, **kwargs: object) -> None: pass d = {} # type: Dict[str, object] f(**d) g(**d) # E: Argument 1 to "g" has incompatible type "**Dict[str, object]"; expected "int" m = {} # type: Mapping[str, object] f(**m) g(**m) # TODO: Should be an error [builtins fixtures/dict.pyi] mypy-0.761/test-data/unit/check-lists.test0000644€tŠÔÚ€2›s®0000000454313576752246024705 0ustar jukkaDROPBOX\Domain Users00000000000000-- Nested list assignment -- ----------------------------- [case testNestedListAssignment] from typing import List a1, b1, c1 = None, None, None # type: (A, B, C) a2, b2, c2 = None, None, None # type: (A, B, C) if int(): a1, [b1, c1] = a2, [b2, c2] if int(): a1, [a1, [b1, c1]] = a2, [a2, [b2, c2]] if int(): a1, [a1, [a1, b1]] = a1, [a1, [a1, c1]] # E: Incompatible types in assignment (expression has type "C", variable has type "B") class A: pass class B: pass class C: pass [builtins fixtures/list.pyi] [out] [case testNestedListAssignmentToTuple] from typing import List a, b, c = None, None, None # type: (A, B, C) a, b = [a, b] a, b = [a] # E: Need more than 1 value to unpack (2 expected) a, b = [a, b, c] # E: Too many values to unpack (2 expected, 3 provided) class A: pass class B: pass class C: pass [builtins fixtures/list.pyi] [out] [case testListAssignmentFromTuple] from typing import List a, b, c = None, None, None # type: (A, B, C) t = a, b if int(): [a, b], c = t, c if int(): [a, c], c = t, c # E: Incompatible types in assignment (expression has type "B", variable has type "C") if int(): [a, a, a], c = t, c # E: Need more than 2 values to unpack (3 expected) if int(): [a], c = t, c # E: Too many values to unpack (1 expected, 2 provided) class A: pass class B: pass class C: pass [builtins fixtures/list.pyi] [out] [case testListAssignmentUnequalAmountToUnpack] from typing import List a, b, c = None, None, None # type: (A, B, C) def f() -> None: # needed because test parser tries to parse [a, b] as section header [a, b] = [a, b] [a, b] = [a] # E: Need more than 1 value to unpack (2 expected) [a, b] = [a, b, c] # E: Too many values to unpack (2 expected, 3 provided) class A: pass class B: pass class C: pass [builtins fixtures/list.pyi] [out] [case testListWithStarExpr] (x, *a) = [1, 2, 3] a = [1, *[2, 3]] reveal_type(a) # N: Revealed type is 'builtins.list[builtins.int*]' b = [0, *a] reveal_type(b) # N: Revealed type is 'builtins.list[builtins.int*]' c = [*a, 0] reveal_type(c) # N: Revealed type is 'builtins.list[builtins.int*]' [builtins fixtures/list.pyi] [case testComprehensionShadowBinder] # flags: --strict-optional def foo(x: object) -> None: if isinstance(x, str): [reveal_type(x) for x in [1, 2, 3]] # N: Revealed type is 'builtins.int*' [builtins fixtures/isinstancelist.pyi] mypy-0.761/test-data/unit/check-literal.test0000644€tŠÔÚ€2›s®0000035110013576752246025175 0ustar jukkaDROPBOX\Domain Users00000000000000-- -- Check to see how we handle raw types, error handling, and other -- semantic analysis shenanigans -- [case testLiteralInvalidString] from typing_extensions import Literal def f1(x: 'A[') -> None: pass # E: Invalid type comment or annotation def g1(x: Literal['A[']) -> None: pass reveal_type(f1) # N: Revealed type is 'def (x: Any)' reveal_type(g1) # N: Revealed type is 'def (x: Literal['A['])' def f2(x: 'A B') -> None: pass # E: Invalid type comment or annotation def g2(x: Literal['A B']) -> None: pass reveal_type(f2) # N: Revealed type is 'def (x: Any)' reveal_type(g2) # N: Revealed type is 'def (x: Literal['A B'])' [out] [case testLiteralInvalidTypeComment] from typing_extensions import Literal def f(x): # E: syntax error in type comment '(A[) -> None' # type: (A[) -> None pass [case testLiteralInvalidTypeComment2] from typing_extensions import Literal def f(x): # E: Invalid type comment or annotation # type: ("A[") -> None pass def g(x): # type: (Literal["A["]) -> None pass reveal_type(f) # N: Revealed type is 'def (x: Any)' reveal_type(g) # N: Revealed type is 'def (x: Literal['A['])' [out] [case testLiteralFromTypingWorks] from typing import Literal x: Literal[42] x = 43 # E: Incompatible types in assignment (expression has type "Literal[43]", variable has type "Literal[42]") y: Literal[43] y = 43 [typing fixtures/typing-full.pyi] [case testLiteralParsingPython2] # flags: --python-version 2.7 from typing import Optional from typing_extensions import Literal def f(x): # E: Invalid type comment or annotation # type: ("A[") -> None pass def g(x): # type: (Literal["A["]) -> None pass x = None # type: Optional[1] # E: Invalid type: try using Literal[1] instead? y = None # type: Optional[Literal[1]] reveal_type(x) # N: Revealed type is 'Union[Any, None]' reveal_type(y) # N: Revealed type is 'Union[Literal[1], None]' [out] [case testLiteralInsideOtherTypes] from typing import Tuple from typing_extensions import Literal x: Tuple[1] # E: Invalid type: try using Literal[1] instead? def foo(x: Tuple[1]) -> None: ... # E: Invalid type: try using Literal[1] instead? y: Tuple[Literal[2]] def bar(x: Tuple[Literal[2]]) -> None: ... reveal_type(x) # N: Revealed type is 'Tuple[Any]' reveal_type(y) # N: Revealed type is 'Tuple[Literal[2]]' reveal_type(bar) # N: Revealed type is 'def (x: Tuple[Literal[2]])' [out] [case testLiteralInsideOtherTypesPython2] # flags: --python-version 2.7 from typing import Tuple, Optional from typing_extensions import Literal x = None # type: Optional[Tuple[1]] # E: Invalid type: try using Literal[1] instead? def foo(x): # E: Invalid type: try using Literal[1] instead? # type: (Tuple[1]) -> None pass y = None # type: Optional[Tuple[Literal[2]]] def bar(x): # type: (Tuple[Literal[2]]) -> None pass reveal_type(x) # N: Revealed type is 'Union[Tuple[Any], None]' reveal_type(y) # N: Revealed type is 'Union[Tuple[Literal[2]], None]' reveal_type(bar) # N: Revealed type is 'def (x: Tuple[Literal[2]])' [out] [case testLiteralInsideOtherTypesTypeCommentsPython3] # flags: --python-version 3.7 from typing import Tuple, Optional from typing_extensions import Literal x = None # type: Optional[Tuple[1]] # E: Invalid type: try using Literal[1] instead? def foo(x): # E: Invalid type: try using Literal[1] instead? # type: (Tuple[1]) -> None pass y = None # type: Optional[Tuple[Literal[2]]] def bar(x): # type: (Tuple[Literal[2]]) -> None pass reveal_type(x) # N: Revealed type is 'Union[Tuple[Any], None]' reveal_type(y) # N: Revealed type is 'Union[Tuple[Literal[2]], None]' reveal_type(bar) # N: Revealed type is 'def (x: Tuple[Literal[2]])' [out] [case testLiteralValidExpressionsInStringsPython3] from wrapper import * [file wrapper.pyi] from typing_extensions import Literal alias_1 = Literal['a+b'] alias_2 = Literal['1+2'] alias_3 = Literal['3'] alias_4 = Literal['True'] alias_5 = Literal['None'] alias_6 = Literal['"foo"'] expr_of_alias_1: alias_1 expr_of_alias_2: alias_2 expr_of_alias_3: alias_3 expr_of_alias_4: alias_4 expr_of_alias_5: alias_5 expr_of_alias_6: alias_6 reveal_type(expr_of_alias_1) # N: Revealed type is 'Literal['a+b']' reveal_type(expr_of_alias_2) # N: Revealed type is 'Literal['1+2']' reveal_type(expr_of_alias_3) # N: Revealed type is 'Literal['3']' reveal_type(expr_of_alias_4) # N: Revealed type is 'Literal['True']' reveal_type(expr_of_alias_5) # N: Revealed type is 'Literal['None']' reveal_type(expr_of_alias_6) # N: Revealed type is 'Literal['"foo"']' expr_ann_1: Literal['a+b'] expr_ann_2: Literal['1+2'] expr_ann_3: Literal['3'] expr_ann_4: Literal['True'] expr_ann_5: Literal['None'] expr_ann_6: Literal['"foo"'] reveal_type(expr_ann_1) # N: Revealed type is 'Literal['a+b']' reveal_type(expr_ann_2) # N: Revealed type is 'Literal['1+2']' reveal_type(expr_ann_3) # N: Revealed type is 'Literal['3']' reveal_type(expr_ann_4) # N: Revealed type is 'Literal['True']' reveal_type(expr_ann_5) # N: Revealed type is 'Literal['None']' reveal_type(expr_ann_6) # N: Revealed type is 'Literal['"foo"']' expr_str_1: "Literal['a+b']" expr_str_2: "Literal['1+2']" expr_str_3: "Literal['3']" expr_str_4: "Literal['True']" expr_str_5: "Literal['None']" expr_str_6: "Literal['\"foo\"']" reveal_type(expr_str_1) # N: Revealed type is 'Literal['a+b']' reveal_type(expr_str_2) # N: Revealed type is 'Literal['1+2']' reveal_type(expr_str_3) # N: Revealed type is 'Literal['3']' reveal_type(expr_str_4) # N: Revealed type is 'Literal['True']' reveal_type(expr_str_5) # N: Revealed type is 'Literal['None']' reveal_type(expr_str_6) # N: Revealed type is 'Literal['"foo"']' expr_com_1 = ... # type: Literal['a+b'] expr_com_2 = ... # type: Literal['1+2'] expr_com_3 = ... # type: Literal['3'] expr_com_4 = ... # type: Literal['True'] expr_com_5 = ... # type: Literal['None'] expr_com_6 = ... # type: Literal['"foo"'] reveal_type(expr_com_1) # N: Revealed type is 'Literal['a+b']' reveal_type(expr_com_2) # N: Revealed type is 'Literal['1+2']' reveal_type(expr_com_3) # N: Revealed type is 'Literal['3']' reveal_type(expr_com_4) # N: Revealed type is 'Literal['True']' reveal_type(expr_com_5) # N: Revealed type is 'Literal['None']' reveal_type(expr_com_6) # N: Revealed type is 'Literal['"foo"']' [builtins fixtures/bool.pyi] [out] [case testLiteralValidExpressionsInStringsPython2] # flags: --python-version=2.7 from wrapper import * [file wrapper.pyi] from typing_extensions import Literal alias_1 = Literal['a+b'] alias_2 = Literal['1+2'] alias_3 = Literal['3'] alias_4 = Literal['True'] alias_5 = Literal['None'] alias_6 = Literal['"foo"'] expr_of_alias_1: alias_1 expr_of_alias_2: alias_2 expr_of_alias_3: alias_3 expr_of_alias_4: alias_4 expr_of_alias_5: alias_5 expr_of_alias_6: alias_6 reveal_type(expr_of_alias_1) # N: Revealed type is 'Literal['a+b']' reveal_type(expr_of_alias_2) # N: Revealed type is 'Literal['1+2']' reveal_type(expr_of_alias_3) # N: Revealed type is 'Literal['3']' reveal_type(expr_of_alias_4) # N: Revealed type is 'Literal['True']' reveal_type(expr_of_alias_5) # N: Revealed type is 'Literal['None']' reveal_type(expr_of_alias_6) # N: Revealed type is 'Literal['"foo"']' expr_com_1 = ... # type: Literal['a+b'] expr_com_2 = ... # type: Literal['1+2'] expr_com_3 = ... # type: Literal['3'] expr_com_4 = ... # type: Literal['True'] expr_com_5 = ... # type: Literal['None'] expr_com_6 = ... # type: Literal['"foo"'] reveal_type(expr_com_1) # N: Revealed type is 'Literal[u'a+b']' reveal_type(expr_com_2) # N: Revealed type is 'Literal[u'1+2']' reveal_type(expr_com_3) # N: Revealed type is 'Literal[u'3']' reveal_type(expr_com_4) # N: Revealed type is 'Literal[u'True']' reveal_type(expr_com_5) # N: Revealed type is 'Literal[u'None']' reveal_type(expr_com_6) # N: Revealed type is 'Literal[u'"foo"']' [builtins fixtures/bool.pyi] [out] [case testLiteralMixingUnicodeAndBytesPython3] from typing_extensions import Literal a_ann: Literal[u"foo"] b_ann: Literal["foo"] c_ann: Literal[b"foo"] a_hint = u"foo" # type: Literal[u"foo"] b_hint = "foo" # type: Literal["foo"] c_hint = b"foo" # type: Literal[b"foo"] AAlias = Literal[u"foo"] BAlias = Literal["foo"] CAlias = Literal[b"foo"] a_alias: AAlias b_alias: BAlias c_alias: CAlias def accepts_str_1(x: Literal[u"foo"]) -> None: pass def accepts_str_2(x: Literal["foo"]) -> None: pass def accepts_bytes(x: Literal[b"foo"]) -> None: pass reveal_type(a_ann) # N: Revealed type is 'Literal['foo']' reveal_type(b_ann) # N: Revealed type is 'Literal['foo']' reveal_type(c_ann) # N: Revealed type is 'Literal[b'foo']' reveal_type(a_hint) # N: Revealed type is 'Literal['foo']' reveal_type(b_hint) # N: Revealed type is 'Literal['foo']' reveal_type(c_hint) # N: Revealed type is 'Literal[b'foo']' reveal_type(a_alias) # N: Revealed type is 'Literal['foo']' reveal_type(b_alias) # N: Revealed type is 'Literal['foo']' reveal_type(c_alias) # N: Revealed type is 'Literal[b'foo']' accepts_str_1(a_ann) accepts_str_1(b_ann) accepts_str_1(c_ann) # E: Argument 1 to "accepts_str_1" has incompatible type "Literal[b'foo']"; expected "Literal['foo']" accepts_str_1(a_hint) accepts_str_1(b_hint) accepts_str_1(c_hint) # E: Argument 1 to "accepts_str_1" has incompatible type "Literal[b'foo']"; expected "Literal['foo']" accepts_str_1(a_alias) accepts_str_1(b_alias) accepts_str_1(c_alias) # E: Argument 1 to "accepts_str_1" has incompatible type "Literal[b'foo']"; expected "Literal['foo']" accepts_str_2(a_ann) accepts_str_2(b_ann) accepts_str_2(c_ann) # E: Argument 1 to "accepts_str_2" has incompatible type "Literal[b'foo']"; expected "Literal['foo']" accepts_str_2(a_hint) accepts_str_2(b_hint) accepts_str_2(c_hint) # E: Argument 1 to "accepts_str_2" has incompatible type "Literal[b'foo']"; expected "Literal['foo']" accepts_str_2(a_alias) accepts_str_2(b_alias) accepts_str_2(c_alias) # E: Argument 1 to "accepts_str_2" has incompatible type "Literal[b'foo']"; expected "Literal['foo']" accepts_bytes(a_ann) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" accepts_bytes(b_ann) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" accepts_bytes(c_ann) accepts_bytes(a_hint) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" accepts_bytes(b_hint) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" accepts_bytes(c_hint) accepts_bytes(a_alias) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" accepts_bytes(b_alias) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" accepts_bytes(c_alias) [out] [case testLiteralMixingUnicodeAndBytesPython2] # flags: --python-version 2.7 from typing_extensions import Literal a_hint = u"foo" # type: Literal[u"foo"] b_hint = "foo" # type: Literal["foo"] c_hint = b"foo" # type: Literal[b"foo"] AAlias = Literal[u"foo"] BAlias = Literal["foo"] CAlias = Literal[b"foo"] a_alias = u"foo" # type: AAlias b_alias = "foo" # type: BAlias c_alias = b"foo" # type: CAlias def accepts_unicode(x): # type: (Literal[u"foo"]) -> None pass def accepts_bytes_1(x): # type: (Literal["foo"]) -> None pass def accepts_bytes_2(x): # type: (Literal[b"foo"]) -> None pass reveal_type(a_hint) # N: Revealed type is 'Literal[u'foo']' reveal_type(b_hint) # N: Revealed type is 'Literal['foo']' reveal_type(c_hint) # N: Revealed type is 'Literal['foo']' reveal_type(a_alias) # N: Revealed type is 'Literal[u'foo']' reveal_type(b_alias) # N: Revealed type is 'Literal['foo']' reveal_type(c_alias) # N: Revealed type is 'Literal['foo']' accepts_unicode(a_hint) accepts_unicode(b_hint) # E: Argument 1 to "accepts_unicode" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" accepts_unicode(c_hint) # E: Argument 1 to "accepts_unicode" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" accepts_unicode(a_alias) accepts_unicode(b_alias) # E: Argument 1 to "accepts_unicode" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" accepts_unicode(c_alias) # E: Argument 1 to "accepts_unicode" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" accepts_bytes_1(a_hint) # E: Argument 1 to "accepts_bytes_1" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" accepts_bytes_1(b_hint) accepts_bytes_1(c_hint) accepts_bytes_1(a_alias) # E: Argument 1 to "accepts_bytes_1" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" accepts_bytes_1(b_alias) accepts_bytes_1(c_alias) accepts_bytes_2(a_hint) # E: Argument 1 to "accepts_bytes_2" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" accepts_bytes_2(b_hint) accepts_bytes_2(c_hint) accepts_bytes_2(a_alias) # E: Argument 1 to "accepts_bytes_2" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" accepts_bytes_2(b_alias) accepts_bytes_2(c_alias) [builtins fixtures/primitives.pyi] [out] [case testLiteralMixingUnicodeAndBytesPython2UnicodeLiterals] # flags: --python-version 2.7 from __future__ import unicode_literals from typing_extensions import Literal a_hint = u"foo" # type: Literal[u"foo"] b_hint = "foo" # type: Literal["foo"] c_hint = b"foo" # type: Literal[b"foo"] AAlias = Literal[u"foo"] BAlias = Literal["foo"] CAlias = Literal[b"foo"] a_alias = u"foo" # type: AAlias b_alias = "foo" # type: BAlias c_alias = b"foo" # type: CAlias def accepts_unicode_1(x): # type: (Literal[u"foo"]) -> None pass def accepts_unicode_2(x): # type: (Literal["foo"]) -> None pass def accepts_bytes(x): # type: (Literal[b"foo"]) -> None pass reveal_type(a_hint) # N: Revealed type is 'Literal[u'foo']' reveal_type(b_hint) # N: Revealed type is 'Literal[u'foo']' reveal_type(c_hint) # N: Revealed type is 'Literal['foo']' reveal_type(a_alias) # N: Revealed type is 'Literal[u'foo']' reveal_type(b_alias) # N: Revealed type is 'Literal[u'foo']' reveal_type(c_alias) # N: Revealed type is 'Literal['foo']' accepts_unicode_1(a_hint) accepts_unicode_1(b_hint) accepts_unicode_1(c_hint) # E: Argument 1 to "accepts_unicode_1" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" accepts_unicode_1(a_alias) accepts_unicode_1(b_alias) accepts_unicode_1(c_alias) # E: Argument 1 to "accepts_unicode_1" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" accepts_unicode_2(a_hint) accepts_unicode_2(b_hint) accepts_unicode_2(c_hint) # E: Argument 1 to "accepts_unicode_2" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" accepts_unicode_2(a_alias) accepts_unicode_2(b_alias) accepts_unicode_2(c_alias) # E: Argument 1 to "accepts_unicode_2" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" accepts_bytes(a_hint) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" accepts_bytes(b_hint) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" accepts_bytes(c_hint) accepts_bytes(a_alias) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" accepts_bytes(b_alias) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" accepts_bytes(c_alias) [builtins fixtures/primitives.pyi] [out] [case testLiteralMixingUnicodeAndBytesPython3ForwardStrings] from typing import TypeVar, Generic from typing_extensions import Literal a_unicode_wrapper: u"Literal[u'foo']" b_unicode_wrapper: u"Literal['foo']" c_unicode_wrapper: u"Literal[b'foo']" a_str_wrapper: "Literal[u'foo']" b_str_wrapper: "Literal['foo']" c_str_wrapper: "Literal[b'foo']" # In Python 3, forward references MUST be str, not bytes a_bytes_wrapper: b"Literal[u'foo']" # E: Invalid type comment or annotation b_bytes_wrapper: b"Literal['foo']" # E: Invalid type comment or annotation c_bytes_wrapper: b"Literal[b'foo']" # E: Invalid type comment or annotation reveal_type(a_unicode_wrapper) # N: Revealed type is 'Literal['foo']' reveal_type(b_unicode_wrapper) # N: Revealed type is 'Literal['foo']' reveal_type(c_unicode_wrapper) # N: Revealed type is 'Literal[b'foo']' reveal_type(a_str_wrapper) # N: Revealed type is 'Literal['foo']' reveal_type(b_str_wrapper) # N: Revealed type is 'Literal['foo']' reveal_type(c_str_wrapper) # N: Revealed type is 'Literal[b'foo']' T = TypeVar('T') class Wrap(Generic[T]): pass AUnicodeWrapperAlias = Wrap[u"Literal[u'foo']"] BUnicodeWrapperAlias = Wrap[u"Literal['foo']"] CUnicodeWrapperAlias = Wrap[u"Literal[b'foo']"] a_unicode_wrapper_alias: AUnicodeWrapperAlias b_unicode_wrapper_alias: BUnicodeWrapperAlias c_unicode_wrapper_alias: CUnicodeWrapperAlias AStrWrapperAlias = Wrap["Literal[u'foo']"] BStrWrapperAlias = Wrap["Literal['foo']"] CStrWrapperAlias = Wrap["Literal[b'foo']"] a_str_wrapper_alias: AStrWrapperAlias b_str_wrapper_alias: BStrWrapperAlias c_str_wrapper_alias: CStrWrapperAlias ABytesWrapperAlias = Wrap[b"Literal[u'foo']"] BBytesWrapperAlias = Wrap[b"Literal['foo']"] CBytesWrapperAlias = Wrap[b"Literal[b'foo']"] a_bytes_wrapper_alias: ABytesWrapperAlias b_bytes_wrapper_alias: BBytesWrapperAlias c_bytes_wrapper_alias: CBytesWrapperAlias # In Python 3, we assume that Literal['foo'] and Literal[u'foo'] are always # equivalent, no matter what. reveal_type(a_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(b_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(c_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[b'foo']]' reveal_type(a_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(b_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(c_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[b'foo']]' reveal_type(a_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(b_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(c_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[b'foo']]' [out] [case testLiteralMixingUnicodeAndBytesPython2ForwardStrings] # flags: --python-version 2.7 from typing import TypeVar, Generic from typing_extensions import Literal T = TypeVar('T') class Wrap(Generic[T]): pass AUnicodeWrapperAlias = Wrap[u"Literal[u'foo']"] BUnicodeWrapperAlias = Wrap[u"Literal['foo']"] CUnicodeWrapperAlias = Wrap[u"Literal[b'foo']"] a_unicode_wrapper_alias = Wrap() # type: AUnicodeWrapperAlias b_unicode_wrapper_alias = Wrap() # type: BUnicodeWrapperAlias c_unicode_wrapper_alias = Wrap() # type: CUnicodeWrapperAlias AStrWrapperAlias = Wrap["Literal[u'foo']"] BStrWrapperAlias = Wrap["Literal['foo']"] CStrWrapperAlias = Wrap["Literal[b'foo']"] a_str_wrapper_alias = Wrap() # type: AStrWrapperAlias b_str_wrapper_alias = Wrap() # type: BStrWrapperAlias c_str_wrapper_alias = Wrap() # type: CStrWrapperAlias ABytesWrapperAlias = Wrap[b"Literal[u'foo']"] BBytesWrapperAlias = Wrap[b"Literal['foo']"] CBytesWrapperAlias = Wrap[b"Literal[b'foo']"] a_bytes_wrapper_alias = Wrap() # type: ABytesWrapperAlias b_bytes_wrapper_alias = Wrap() # type: BBytesWrapperAlias c_bytes_wrapper_alias = Wrap() # type: CBytesWrapperAlias # Unlike Python 3, the exact meaning of Literal['foo'] is "inherited" from the "outer" # string. For example, the "outer" string is unicode in the first example here. So # we treat Literal['foo'] as the same as Literal[u'foo']. reveal_type(a_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(b_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(c_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' # However, for both of these examples, the "outer" string is bytes, so we don't treat # Literal['foo'] as a unicode Literal. reveal_type(a_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(b_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(c_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(a_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(b_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(c_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' [out] [case testLiteralMixingUnicodeAndBytesPython2ForwardStringsUnicodeLiterals] # flags: --python-version 2.7 from __future__ import unicode_literals from typing import TypeVar, Generic from typing_extensions import Literal T = TypeVar('T') class Wrap(Generic[T]): pass AUnicodeWrapperAlias = Wrap[u"Literal[u'foo']"] BUnicodeWrapperAlias = Wrap[u"Literal['foo']"] CUnicodeWrapperAlias = Wrap[u"Literal[b'foo']"] a_unicode_wrapper_alias = Wrap() # type: AUnicodeWrapperAlias b_unicode_wrapper_alias = Wrap() # type: BUnicodeWrapperAlias c_unicode_wrapper_alias = Wrap() # type: CUnicodeWrapperAlias AStrWrapperAlias = Wrap["Literal[u'foo']"] BStrWrapperAlias = Wrap["Literal['foo']"] CStrWrapperAlias = Wrap["Literal[b'foo']"] a_str_wrapper_alias = Wrap() # type: AStrWrapperAlias b_str_wrapper_alias = Wrap() # type: BStrWrapperAlias c_str_wrapper_alias = Wrap() # type: CStrWrapperAlias ABytesWrapperAlias = Wrap[b"Literal[u'foo']"] BBytesWrapperAlias = Wrap[b"Literal['foo']"] CBytesWrapperAlias = Wrap[b"Literal[b'foo']"] a_bytes_wrapper_alias = Wrap() # type: ABytesWrapperAlias b_bytes_wrapper_alias = Wrap() # type: BBytesWrapperAlias c_bytes_wrapper_alias = Wrap() # type: CBytesWrapperAlias # This example is almost identical to the previous one, except that we're using # unicode literals. The first and last examples remain the same, but the middle # one changes: reveal_type(a_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(b_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(c_unicode_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' # Since unicode_literals is enabled, the "outer" string in Wrap["Literal['foo']"] is now # a unicode string, so we end up treating Literal['foo'] as the same as Literal[u'foo']. reveal_type(a_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(b_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(c_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(a_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[u'foo']]' reveal_type(b_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(c_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' [out] [case testLiteralMixingUnicodeAndBytesInconsistentUnicodeLiterals] # flags: --python-version 2.7 import mod_unicode as u import mod_bytes as b reveal_type(u.func) # N: Revealed type is 'def (x: Literal[u'foo'])' reveal_type(u.var) # N: Revealed type is 'Literal[u'foo']' reveal_type(b.func) # N: Revealed type is 'def (x: Literal['foo'])' reveal_type(b.var) # N: Revealed type is 'Literal['foo']' from_u = u"foo" # type: u.Alias from_b = "foo" # type: b.Alias u.func(u.var) u.func(from_u) u.func(b.var) # E: Argument 1 to "func" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" u.func(from_b) # E: Argument 1 to "func" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" b.func(u.var) # E: Argument 1 to "func" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" b.func(from_u) # E: Argument 1 to "func" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" b.func(b.var) b.func(from_b) [file mod_unicode.py] from __future__ import unicode_literals from typing_extensions import Literal def func(x): # type: (Literal["foo"]) -> None pass Alias = Literal["foo"] var = "foo" # type: Alias [file mod_bytes.py] from typing_extensions import Literal def func(x): # type: (Literal["foo"]) -> None pass Alias = Literal["foo"] var = "foo" # type: Alias [out] [case testLiteralUnicodeWeirdCharacters] from typing import Any from typing_extensions import Literal a1: Literal["\x00\xAC\x62 \u2227 \u03bb(p)"] b1: Literal["\x00¬b ∧ λ(p)"] c1: Literal["¬b ∧ λ(p)"] d1: Literal["\U0001F600"] e1: Literal["😀"] Alias1 = Literal["\x00\xAC\x62 \u2227 \u03bb(p)"] Alias2 = Literal["\x00¬b ∧ λ(p)"] Alias3 = Literal["¬b ∧ λ(p)"] Alias4 = Literal["\U0001F600"] Alias5 = Literal["😀"] a2: Alias1 b2: Alias2 c2: Alias3 d2: Alias4 e2: Alias5 blah: Any a3 = blah # type: Literal["\x00\xAC\x62 \u2227 \u03bb(p)"] b3 = blah # type: Literal["\x00¬b ∧ λ(p)"] c3 = blah # type: Literal["¬b ∧ λ(p)"] d3 = blah # type: Literal["\U0001F600"] e3 = blah # type: Literal["😀"] reveal_type(a1) # N: Revealed type is 'Literal['\x00¬b ∧ λ(p)']' reveal_type(b1) # N: Revealed type is 'Literal['\x00¬b ∧ λ(p)']' reveal_type(c1) # N: Revealed type is 'Literal['¬b ∧ λ(p)']' reveal_type(d1) # N: Revealed type is 'Literal['😀']' reveal_type(e1) # N: Revealed type is 'Literal['😀']' reveal_type(a2) # N: Revealed type is 'Literal['\x00¬b ∧ λ(p)']' reveal_type(b2) # N: Revealed type is 'Literal['\x00¬b ∧ λ(p)']' reveal_type(c2) # N: Revealed type is 'Literal['¬b ∧ λ(p)']' reveal_type(d2) # N: Revealed type is 'Literal['😀']' reveal_type(e2) # N: Revealed type is 'Literal['😀']' reveal_type(a3) # N: Revealed type is 'Literal['\x00¬b ∧ λ(p)']' reveal_type(b3) # N: Revealed type is 'Literal['\x00¬b ∧ λ(p)']' reveal_type(c3) # N: Revealed type is 'Literal['¬b ∧ λ(p)']' reveal_type(d3) # N: Revealed type is 'Literal['😀']' reveal_type(e3) # N: Revealed type is 'Literal['😀']' a1 = b1 a1 = c1 # E: Incompatible types in assignment (expression has type "Literal['¬b ∧ λ(p)']", variable has type "Literal['\x00¬b ∧ λ(p)']") a1 = a2 a1 = b2 a1 = c2 # E: Incompatible types in assignment (expression has type "Literal['¬b ∧ λ(p)']", variable has type "Literal['\x00¬b ∧ λ(p)']") a1 = a3 a1 = b3 a1 = c3 # E: Incompatible types in assignment (expression has type "Literal['¬b ∧ λ(p)']", variable has type "Literal['\x00¬b ∧ λ(p)']") [out skip-path-normalization] [case testLiteralRenamingImportWorks] from typing_extensions import Literal as Foo x: Foo[3] reveal_type(x) # N: Revealed type is 'Literal[3]' y: Foo["hello"] reveal_type(y) # N: Revealed type is 'Literal['hello']' [out] [case testLiteralRenamingImportViaAnotherImportWorks] from other_module import Foo, Bar x: Foo[3] y: Bar reveal_type(x) # N: Revealed type is 'Literal[3]' reveal_type(y) # N: Revealed type is 'Literal[4]' [file other_module.py] from typing_extensions import Literal as Foo Bar = Foo[4] [out] [case testLiteralRenamingImportNameConfusion] from typing_extensions import Literal as Foo x: Foo["Foo"] reveal_type(x) # N: Revealed type is 'Literal['Foo']' y: Foo[Foo] # E: Literal[...] must have at least one parameter [out] [case testLiteralBadRawExpressionWithBadType] NotAType = 3 def f() -> NotAType['also' + 'not' + 'a' + 'type']: ... # E: Variable "__main__.NotAType" is not valid as a type \ # E: Invalid type comment or annotation # Note: this makes us re-inspect the type (e.g. via '_patch_indirect_dependencies' # in build.py) so we can confirm the RawExpressionType did not leak out. indirect = f() [out] -- -- Check to make sure we can construct the correct range of literal -- types (and correctly reject invalid literal types) -- -- Note: the assignment tests exercise the logic in 'fastparse.py'; -- the type alias tests exercise the logic in 'exprtotype.py'. -- [case testLiteralBasicIntUsage] from typing_extensions import Literal a1: Literal[4] b1: Literal[0x2a] c1: Literal[-300] reveal_type(a1) # N: Revealed type is 'Literal[4]' reveal_type(b1) # N: Revealed type is 'Literal[42]' reveal_type(c1) # N: Revealed type is 'Literal[-300]' a2t = Literal[4] b2t = Literal[0x2a] c2t = Literal[-300] a2: a2t b2: b2t c2: c2t reveal_type(a2) # N: Revealed type is 'Literal[4]' reveal_type(b2) # N: Revealed type is 'Literal[42]' reveal_type(c2) # N: Revealed type is 'Literal[-300]' def f1(x: Literal[4]) -> Literal[4]: pass def f2(x: Literal[0x2a]) -> Literal[0x2a]: pass def f3(x: Literal[-300]) -> Literal[-300]: pass reveal_type(f1) # N: Revealed type is 'def (x: Literal[4]) -> Literal[4]' reveal_type(f2) # N: Revealed type is 'def (x: Literal[42]) -> Literal[42]' reveal_type(f3) # N: Revealed type is 'def (x: Literal[-300]) -> Literal[-300]' [out] [case testLiteralBasicBoolUsage] from typing_extensions import Literal a1: Literal[True] b1: Literal[False] reveal_type(a1) # N: Revealed type is 'Literal[True]' reveal_type(b1) # N: Revealed type is 'Literal[False]' a2t = Literal[True] b2t = Literal[False] a2: a2t b2: b2t reveal_type(a2) # N: Revealed type is 'Literal[True]' reveal_type(b2) # N: Revealed type is 'Literal[False]' def f1(x: Literal[True]) -> Literal[True]: pass def f2(x: Literal[False]) -> Literal[False]: pass reveal_type(f1) # N: Revealed type is 'def (x: Literal[True]) -> Literal[True]' reveal_type(f2) # N: Revealed type is 'def (x: Literal[False]) -> Literal[False]' [builtins fixtures/bool.pyi] [out] [case testLiteralBasicStrUsage] from typing_extensions import Literal a: Literal[""] b: Literal[" foo bar "] c: Literal[' foo bar '] d: Literal["foo"] e: Literal['foo'] reveal_type(a) # N: Revealed type is 'Literal['']' reveal_type(b) # N: Revealed type is 'Literal[' foo bar ']' reveal_type(c) # N: Revealed type is 'Literal[' foo bar ']' reveal_type(d) # N: Revealed type is 'Literal['foo']' reveal_type(e) # N: Revealed type is 'Literal['foo']' def f1(x: Literal[""]) -> Literal[""]: pass def f2(x: Literal[" foo bar "]) -> Literal[" foo bar "]: pass def f3(x: Literal[' foo bar ']) -> Literal[' foo bar ']: pass def f4(x: Literal["foo"]) -> Literal["foo"]: pass def f5(x: Literal['foo']) -> Literal['foo']: pass reveal_type(f1) # N: Revealed type is 'def (x: Literal['']) -> Literal['']' reveal_type(f2) # N: Revealed type is 'def (x: Literal[' foo bar ']) -> Literal[' foo bar ']' reveal_type(f3) # N: Revealed type is 'def (x: Literal[' foo bar ']) -> Literal[' foo bar ']' reveal_type(f4) # N: Revealed type is 'def (x: Literal['foo']) -> Literal['foo']' reveal_type(f5) # N: Revealed type is 'def (x: Literal['foo']) -> Literal['foo']' [out] [case testLiteralBasicStrUsageSlashes] from typing_extensions import Literal a: Literal[r"foo\nbar"] b: Literal["foo\nbar"] reveal_type(a) reveal_type(b) [out skip-path-normalization] main:6: note: Revealed type is 'Literal['foo\\nbar']' main:7: note: Revealed type is 'Literal['foo\nbar']' [case testLiteralBasicNoneUsage] # Note: Literal[None] and None are equivalent from typing_extensions import Literal a: Literal[None] reveal_type(a) # N: Revealed type is 'None' def f1(x: Literal[None]) -> None: pass def f2(x: None) -> Literal[None]: pass def f3(x: Literal[None]) -> Literal[None]: pass reveal_type(f1) # N: Revealed type is 'def (x: None)' reveal_type(f2) # N: Revealed type is 'def (x: None)' reveal_type(f3) # N: Revealed type is 'def (x: None)' [out] [case testLiteralCallingUnionFunction] from typing_extensions import Literal def func(x: Literal['foo', 'bar', ' foo ']) -> None: ... func('foo') func('bar') func(' foo ') func('baz') # E: Argument 1 to "func" has incompatible type "Literal['baz']"; expected "Union[Literal['foo'], Literal['bar'], Literal[' foo ']]" a: Literal['foo'] b: Literal['bar'] c: Literal[' foo '] d: Literal['foo', 'bar'] e: Literal['foo', 'bar', ' foo '] f: Literal['foo', 'bar', 'baz'] func(a) func(b) func(c) func(d) func(e) func(f) # E: Argument 1 to "func" has incompatible type "Union[Literal['foo'], Literal['bar'], Literal['baz']]"; expected "Union[Literal['foo'], Literal['bar'], Literal[' foo ']]" [out] [case testLiteralDisallowAny] from typing import Any from typing_extensions import Literal from missing_module import BadAlias # E: Cannot find implementation or library stub for module named 'missing_module' \ # N: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports a: Literal[Any] # E: Parameter 1 of Literal[...] cannot be of type "Any" b: Literal[BadAlias] # E: Parameter 1 of Literal[...] cannot be of type "Any" reveal_type(a) # N: Revealed type is 'Any' reveal_type(b) # N: Revealed type is 'Any' [out] [case testLiteralDisallowActualTypes] from typing_extensions import Literal a: Literal[int] # E: Parameter 1 of Literal[...] is invalid b: Literal[float] # E: Parameter 1 of Literal[...] is invalid c: Literal[bool] # E: Parameter 1 of Literal[...] is invalid d: Literal[str] # E: Parameter 1 of Literal[...] is invalid reveal_type(a) # N: Revealed type is 'Any' reveal_type(b) # N: Revealed type is 'Any' reveal_type(c) # N: Revealed type is 'Any' reveal_type(d) # N: Revealed type is 'Any' [builtins fixtures/primitives.pyi] [out] [case testLiteralDisallowFloatsAndComplex] from typing_extensions import Literal a1: Literal[3.14] # E: Parameter 1 of Literal[...] cannot be of type "float" b1: 3.14 # E: Invalid type: float literals cannot be used as a type c1: Literal[3j] # E: Parameter 1 of Literal[...] cannot be of type "complex" d1: 3j # E: Invalid type: complex literals cannot be used as a type a2t = Literal[3.14] # E: Parameter 1 of Literal[...] cannot be of type "float" b2t = 3.14 c2t = Literal[3j] # E: Parameter 1 of Literal[...] cannot be of type "complex" d2t = 3j a2: a2t reveal_type(a2) # N: Revealed type is 'Any' b2: b2t # E: Variable "__main__.b2t" is not valid as a type c2: c2t reveal_type(c2) # N: Revealed type is 'Any' d2: d2t # E: Variable "__main__.d2t" is not valid as a type [builtins fixtures/complex_tuple.pyi] [out] [case testLiteralDisallowComplexExpressions] from typing_extensions import Literal def dummy() -> int: return 3 a: Literal[3 + 4] # E: Invalid type: Literal[...] cannot contain arbitrary expressions b: Literal[" foo ".trim()] # E: Invalid type: Literal[...] cannot contain arbitrary expressions c: Literal[+42] # E: Invalid type: Literal[...] cannot contain arbitrary expressions d: Literal[~12] # E: Invalid type: Literal[...] cannot contain arbitrary expressions e: Literal[dummy()] # E: Invalid type: Literal[...] cannot contain arbitrary expressions [out] [case testLiteralDisallowCollections] from typing_extensions import Literal a: Literal[{"a": 1, "b": 2}] # E: Invalid type: Literal[...] cannot contain arbitrary expressions b: Literal[{1, 2, 3}] # E: Invalid type: Literal[...] cannot contain arbitrary expressions c: {"a": 1, "b": 2} # E: Invalid type comment or annotation d: {1, 2, 3} # E: Invalid type comment or annotation [case testLiteralDisallowCollections2] from typing_extensions import Literal a: (1, 2, 3) # E: Syntax error in type annotation \ # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) b: Literal[[1, 2, 3]] # E: Parameter 1 of Literal[...] is invalid c: [1, 2, 3] # E: Bracketed expression "[...]" is not valid as a type \ # N: Did you mean "List[...]"? [out] [case testLiteralDisallowCollectionsTypeAlias] from typing_extensions import Literal at = Literal[{"a": 1, "b": 2}] # E: Invalid type alias: expression is not a valid type bt = {"a": 1, "b": 2} a: at # E: Variable "__main__.at" is not valid as a type b: bt # E: Variable "__main__.bt" is not valid as a type [builtins fixtures/dict.pyi] [out] [case testLiteralDisallowCollectionsTypeAlias2] from typing_extensions import Literal at = Literal[{1, 2, 3}] # E: Invalid type alias: expression is not a valid type bt = {1, 2, 3} a: at # E: Variable "__main__.at" is not valid as a type b: bt # E: Variable "__main__.bt" is not valid as a type [builtins fixtures/set.pyi] [out] [case testLiteralDisallowTypeVar] from typing import TypeVar from typing_extensions import Literal T = TypeVar('T') at = Literal[T] # E: Parameter 1 of Literal[...] is invalid a: at def foo(b: Literal[T]) -> T: pass # E: Parameter 1 of Literal[...] is invalid [out] -- -- Test mixing and matching literals with other types -- [case testLiteralMultipleValues] # flags: --strict-optional from typing_extensions import Literal a: Literal[1, 2, 3] b: Literal["a", "b", "c"] c: Literal[1, "b", True, None] d: Literal[1, 1, 1] e: Literal[None, None, None] reveal_type(a) # N: Revealed type is 'Union[Literal[1], Literal[2], Literal[3]]' reveal_type(b) # N: Revealed type is 'Union[Literal['a'], Literal['b'], Literal['c']]' reveal_type(c) # N: Revealed type is 'Union[Literal[1], Literal['b'], Literal[True], None]' # Note: I was thinking these should be simplified, but it seems like # mypy doesn't simplify unions with duplicate values with other types. reveal_type(d) # N: Revealed type is 'Union[Literal[1], Literal[1], Literal[1]]' reveal_type(e) # N: Revealed type is 'Union[None, None, None]' [builtins fixtures/bool.pyi] [out] [case testLiteralMultipleValuesExplicitTuple] from typing_extensions import Literal # Unfortunately, it seems like typed_ast is unable to distinguish this from # Literal[1, 2, 3]. So we treat the two as being equivalent for now. a: Literal[1, 2, 3] b: Literal[(1, 2, 3)] reveal_type(a) # N: Revealed type is 'Union[Literal[1], Literal[2], Literal[3]]' reveal_type(b) # N: Revealed type is 'Union[Literal[1], Literal[2], Literal[3]]' [out] [case testLiteralNestedUsage] # flags: --strict-optional from typing_extensions import Literal a: Literal[Literal[3], 4, Literal["foo"]] reveal_type(a) # N: Revealed type is 'Union[Literal[3], Literal[4], Literal['foo']]' alias_for_literal = Literal[5] b: Literal[alias_for_literal] reveal_type(b) # N: Revealed type is 'Literal[5]' another_alias = Literal[1, None] c: Literal[alias_for_literal, another_alias, "r"] reveal_type(c) # N: Revealed type is 'Union[Literal[5], Literal[1], None, Literal['r']]' basic_mode = Literal["r", "w", "a"] basic_with_plus = Literal["r+", "w+", "a+"] combined: Literal[basic_mode, basic_with_plus] reveal_type(combined) # N: Revealed type is 'Union[Literal['r'], Literal['w'], Literal['a'], Literal['r+'], Literal['w+'], Literal['a+']]' [out] [case testLiteralBiasTowardsAssumingForwardReference] from typing_extensions import Literal a: "Foo" reveal_type(a) # N: Revealed type is '__main__.Foo' b: Literal["Foo"] reveal_type(b) # N: Revealed type is 'Literal['Foo']' c: "Literal[Foo]" # E: Parameter 1 of Literal[...] is invalid d: "Literal['Foo']" reveal_type(d) # N: Revealed type is 'Literal['Foo']' class Foo: pass [out] [case testLiteralBiasTowardsAssumingForwardReferenceForTypeAliases] from typing_extensions import Literal a: "Foo" reveal_type(a) # N: Revealed type is 'Literal[5]' b: Literal["Foo"] reveal_type(b) # N: Revealed type is 'Literal['Foo']' c: "Literal[Foo]" reveal_type(c) # N: Revealed type is 'Literal[5]' d: "Literal['Foo']" reveal_type(d) # N: Revealed type is 'Literal['Foo']' e: Literal[Foo, 'Foo'] reveal_type(e) # N: Revealed type is 'Union[Literal[5], Literal['Foo']]' Foo = Literal[5] [out] [case testLiteralBiasTowardsAssumingForwardReferencesForTypeComments] from typing_extensions import Literal a = None # type: Foo reveal_type(a) # N: Revealed type is '__main__.Foo' b = None # type: "Foo" reveal_type(b) # N: Revealed type is '__main__.Foo' c = None # type: Literal["Foo"] reveal_type(c) # N: Revealed type is 'Literal['Foo']' d = None # type: Literal[Foo] # E: Parameter 1 of Literal[...] is invalid class Foo: pass [out] -- -- Check how we handle very basic subtyping and other useful things -- [case testLiteralCallingFunction] from typing_extensions import Literal def foo(x: Literal[3]) -> None: pass a: Literal[1] b: Literal[2] c: int foo(a) # E: Argument 1 to "foo" has incompatible type "Literal[1]"; expected "Literal[3]" foo(b) # E: Argument 1 to "foo" has incompatible type "Literal[2]"; expected "Literal[3]" foo(c) # E: Argument 1 to "foo" has incompatible type "int"; expected "Literal[3]" [out] [case testLiteralCallingFunctionWithUnionLiteral] from typing_extensions import Literal def foo(x: Literal[1, 2, 3]) -> None: pass a: Literal[1] b: Literal[2, 3] c: Literal[4, 5] d: int foo(a) foo(b) foo(c) # E: Argument 1 to "foo" has incompatible type "Union[Literal[4], Literal[5]]"; expected "Union[Literal[1], Literal[2], Literal[3]]" foo(d) # E: Argument 1 to "foo" has incompatible type "int"; expected "Union[Literal[1], Literal[2], Literal[3]]" [out] [case testLiteralCallingFunctionWithStandardBase] from typing_extensions import Literal def foo(x: int) -> None: pass a: Literal[1] b: Literal[1, -4] c: Literal[4, 'foo'] foo(a) foo(b) foo(c) # E: Argument 1 to "foo" has incompatible type "Union[Literal[4], Literal['foo']]"; expected "int" [out] [case testLiteralCheckSubtypingStrictOptional] # flags: --strict-optional from typing import Any, NoReturn from typing_extensions import Literal lit: Literal[1] def f_lit(x: Literal[1]) -> None: pass def fa(x: Any) -> None: pass def fb(x: NoReturn) -> None: pass def fc(x: None) -> None: pass a: Any b: NoReturn c: None fa(lit) fb(lit) # E: Argument 1 to "fb" has incompatible type "Literal[1]"; expected "NoReturn" fc(lit) # E: Argument 1 to "fc" has incompatible type "Literal[1]"; expected "None" f_lit(a) f_lit(b) f_lit(c) # E: Argument 1 to "f_lit" has incompatible type "None"; expected "Literal[1]" [out] [case testLiteralCheckSubtypingNoStrictOptional] # flags: --no-strict-optional from typing import Any, NoReturn from typing_extensions import Literal lit: Literal[1] def f_lit(x: Literal[1]) -> None: pass def fa(x: Any) -> None: pass def fb(x: NoReturn) -> None: pass def fc(x: None) -> None: pass a: Any b: NoReturn c: None fa(lit) fb(lit) # E: Argument 1 to "fb" has incompatible type "Literal[1]"; expected "NoReturn" fc(lit) # E: Argument 1 to "fc" has incompatible type "Literal[1]"; expected "None" f_lit(a) f_lit(b) f_lit(c) [out] [case testLiteralCallingOverloadedFunction] from typing import overload, Generic, TypeVar, Any from typing_extensions import Literal T = TypeVar('T') class IOLike(Generic[T]): pass @overload def foo(x: Literal[1]) -> IOLike[int]: ... @overload def foo(x: Literal[2]) -> IOLike[str]: ... @overload def foo(x: int) -> IOLike[Any]: ... def foo(x: int) -> IOLike[Any]: if x == 1: return IOLike[int]() elif x == 2: return IOLike[str]() else: return IOLike() a: Literal[1] b: Literal[2] c: int d: Literal[3] reveal_type(foo(a)) # N: Revealed type is '__main__.IOLike[builtins.int]' reveal_type(foo(b)) # N: Revealed type is '__main__.IOLike[builtins.str]' reveal_type(foo(c)) # N: Revealed type is '__main__.IOLike[Any]' foo(d) [builtins fixtures/ops.pyi] [out] [case testLiteralVariance] from typing import Generic, TypeVar from typing_extensions import Literal T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) class Invariant(Generic[T]): pass class Covariant(Generic[T_co]): pass class Contravariant(Generic[T_contra]): pass a1: Invariant[Literal[1]] a2: Invariant[Literal[1, 2]] a3: Invariant[Literal[1, 2, 3]] a2 = a1 # E: Incompatible types in assignment (expression has type "Invariant[Literal[1]]", variable has type "Invariant[Union[Literal[1], Literal[2]]]") a2 = a3 # E: Incompatible types in assignment (expression has type "Invariant[Union[Literal[1], Literal[2], Literal[3]]]", variable has type "Invariant[Union[Literal[1], Literal[2]]]") b1: Covariant[Literal[1]] b2: Covariant[Literal[1, 2]] b3: Covariant[Literal[1, 2, 3]] b2 = b1 b2 = b3 # E: Incompatible types in assignment (expression has type "Covariant[Union[Literal[1], Literal[2], Literal[3]]]", variable has type "Covariant[Union[Literal[1], Literal[2]]]") c1: Contravariant[Literal[1]] c2: Contravariant[Literal[1, 2]] c3: Contravariant[Literal[1, 2, 3]] c2 = c1 # E: Incompatible types in assignment (expression has type "Contravariant[Literal[1]]", variable has type "Contravariant[Union[Literal[1], Literal[2]]]") c2 = c3 [out] [case testLiteralInListAndSequence] from typing import List, Sequence from typing_extensions import Literal def foo(x: List[Literal[1, 2]]) -> None: pass def bar(x: Sequence[Literal[1, 2]]) -> None: pass a: List[Literal[1]] b: List[Literal[1, 2, 3]] foo(a) # E: Argument 1 to "foo" has incompatible type "List[Literal[1]]"; expected "List[Union[Literal[1], Literal[2]]]" \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant foo(b) # E: Argument 1 to "foo" has incompatible type "List[Union[Literal[1], Literal[2], Literal[3]]]"; expected "List[Union[Literal[1], Literal[2]]]" bar(a) bar(b) # E: Argument 1 to "bar" has incompatible type "List[Union[Literal[1], Literal[2], Literal[3]]]"; expected "Sequence[Union[Literal[1], Literal[2]]]" [builtins fixtures/list.pyi] [out] [case testLiteralRenamingDoesNotChangeTypeChecking] from typing_extensions import Literal as Foo from other_module import Bar1, Bar2, c def func(x: Foo[15]) -> None: pass a: Bar1 b: Bar2 func(a) func(b) # E: Argument 1 to "func" has incompatible type "Literal[14]"; expected "Literal[15]" func(c) [file other_module.py] from typing_extensions import Literal Bar1 = Literal[15] Bar2 = Literal[14] c: Literal[15] -- -- Check to make sure we handle inference of literal values correctly, -- especially when doing assignments or calls -- [case testLiteralInferredInAssignment] from typing_extensions import Literal int1: Literal[1] = 1 int2 = 1 int3: int = 1 str1: Literal["foo"] = "foo" str2 = "foo" str3: str = "foo" bool1: Literal[True] = True bool2 = True bool3: bool = True none1: Literal[None] = None none2 = None none3: None = None reveal_type(int1) # N: Revealed type is 'Literal[1]' reveal_type(int2) # N: Revealed type is 'builtins.int' reveal_type(int3) # N: Revealed type is 'builtins.int' reveal_type(str1) # N: Revealed type is 'Literal['foo']' reveal_type(str2) # N: Revealed type is 'builtins.str' reveal_type(str3) # N: Revealed type is 'builtins.str' reveal_type(bool1) # N: Revealed type is 'Literal[True]' reveal_type(bool2) # N: Revealed type is 'builtins.bool' reveal_type(bool3) # N: Revealed type is 'builtins.bool' reveal_type(none1) # N: Revealed type is 'None' reveal_type(none2) # N: Revealed type is 'None' reveal_type(none3) # N: Revealed type is 'None' [builtins fixtures/primitives.pyi] [out] [case testLiteralInferredOnlyForActualLiterals] from typing_extensions import Literal w: Literal[1] x: Literal["foo"] y: Literal[True] z: Literal[None] combined: Literal[1, "foo", True, None] a = 1 b = "foo" c = True d = None w = a # E: Incompatible types in assignment (expression has type "int", variable has type "Literal[1]") x = b # E: Incompatible types in assignment (expression has type "str", variable has type "Literal['foo']") y = c # E: Incompatible types in assignment (expression has type "bool", variable has type "Literal[True]") z = d # This is ok: Literal[None] and None are equivalent. combined = a # E: Incompatible types in assignment (expression has type "int", variable has type "Union[Literal[1], Literal['foo'], Literal[True], None]") combined = b # E: Incompatible types in assignment (expression has type "str", variable has type "Union[Literal[1], Literal['foo'], Literal[True], None]") combined = c # E: Incompatible types in assignment (expression has type "bool", variable has type "Union[Literal[1], Literal['foo'], Literal[True], None]") combined = d # Also ok, for similar reasons. e: Literal[1] = 1 f: Literal["foo"] = "foo" g: Literal[True] = True h: Literal[None] = None w = e x = f y = g z = h combined = e combined = f combined = g combined = h [builtins fixtures/primitives.pyi] [out] [case testLiteralInferredTypeMustMatchExpected] from typing_extensions import Literal a: Literal[1] = 2 # E: Incompatible types in assignment (expression has type "Literal[2]", variable has type "Literal[1]") b: Literal["foo"] = "bar" # E: Incompatible types in assignment (expression has type "Literal['bar']", variable has type "Literal['foo']") c: Literal[True] = False # E: Incompatible types in assignment (expression has type "Literal[False]", variable has type "Literal[True]") d: Literal[1, 2] = 3 # E: Incompatible types in assignment (expression has type "Literal[3]", variable has type "Union[Literal[1], Literal[2]]") e: Literal["foo", "bar"] = "baz" # E: Incompatible types in assignment (expression has type "Literal['baz']", variable has type "Union[Literal['foo'], Literal['bar']]") f: Literal[True, 4] = False # E: Incompatible types in assignment (expression has type "Literal[False]", variable has type "Union[Literal[True], Literal[4]]") [builtins fixtures/primitives.pyi] [out] [case testLiteralInferredInCall] from typing_extensions import Literal def f_int_lit(x: Literal[1]) -> None: pass def f_int(x: int) -> None: pass def f_str_lit(x: Literal["foo"]) -> None: pass def f_str(x: str) -> None: pass def f_bool_lit(x: Literal[True]) -> None: pass def f_bool(x: bool) -> None: pass def f_none_lit(x: Literal[None]) -> None: pass def f_none(x: None) -> None: pass i1: Literal[1] i2: Literal[2] f_int_lit(1) f_int_lit(2) # E: Argument 1 to "f_int_lit" has incompatible type "Literal[2]"; expected "Literal[1]" f_int(1) f_int_lit(i1) f_int_lit(i2) # E: Argument 1 to "f_int_lit" has incompatible type "Literal[2]"; expected "Literal[1]" s1: Literal["foo"] s2: Literal["bar"] f_str_lit("foo") f_str_lit("bar") # E: Argument 1 to "f_str_lit" has incompatible type "Literal['bar']"; expected "Literal['foo']" f_str("baz") f_str_lit(s1) f_str_lit(s2) # E: Argument 1 to "f_str_lit" has incompatible type "Literal['bar']"; expected "Literal['foo']" b1: Literal[True] b2: Literal[False] f_bool_lit(True) f_bool_lit(False) # E: Argument 1 to "f_bool_lit" has incompatible type "Literal[False]"; expected "Literal[True]" f_bool(True) f_bool_lit(b1) f_bool_lit(b2) # E: Argument 1 to "f_bool_lit" has incompatible type "Literal[False]"; expected "Literal[True]" n1: Literal[None] f_none_lit(None) f_none(None) f_none_lit(n1) [builtins fixtures/primitives.pyi] [out] [case testLiteralInferredInReturnContext] from typing_extensions import Literal def f1() -> int: return 1 def f2() -> Literal[1]: return 1 def f3() -> Literal[1]: return 2 # E: Incompatible return value type (got "Literal[2]", expected "Literal[1]") def f4(x: Literal[1]) -> Literal[1]: return x def f5(x: Literal[2]) -> Literal[1]: return x # E: Incompatible return value type (got "Literal[2]", expected "Literal[1]") [out] [case testLiteralInferredInListContext] from typing import List from typing_extensions import Literal a: List[Literal[1]] = [1, 1, 1] b = [1, 1, 1] c: List[Literal[1, 2, 3]] = [1, 2, 3] d = [1, 2, 3] e: List[Literal[1, "x"]] = [1, "x"] f = [1, "x"] g: List[List[List[Literal[1, 2, 3]]]] = [[[1, 2, 3], [3]]] h: List[Literal[1]] = [] reveal_type(a) # N: Revealed type is 'builtins.list[Literal[1]]' reveal_type(b) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(c) # N: Revealed type is 'builtins.list[Union[Literal[1], Literal[2], Literal[3]]]' reveal_type(d) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(e) # N: Revealed type is 'builtins.list[Union[Literal[1], Literal['x']]]' reveal_type(f) # N: Revealed type is 'builtins.list[builtins.object*]' reveal_type(g) # N: Revealed type is 'builtins.list[builtins.list[builtins.list[Union[Literal[1], Literal[2], Literal[3]]]]]' reveal_type(h) # N: Revealed type is 'builtins.list[Literal[1]]' lit1: Literal[1] lit2: Literal[2] lit3: Literal["foo"] arr1 = [lit1, lit1, lit1] arr2 = [lit1, lit2] arr3 = [lit1, 4, 5] arr4 = [lit1, lit2, lit3] arr5 = [object(), lit1] reveal_type(arr1) # N: Revealed type is 'builtins.list[Literal[1]]' reveal_type(arr2) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(arr3) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(arr4) # N: Revealed type is 'builtins.list[builtins.object*]' reveal_type(arr5) # N: Revealed type is 'builtins.list[builtins.object*]' bad: List[Literal[1, 2]] = [1, 2, 3] # E: List item 2 has incompatible type "Literal[3]"; expected "Union[Literal[1], Literal[2]]" [builtins fixtures/list.pyi] [out] [case testLiteralInferredInTupleContext] # Note: most of the 'are we handling context correctly' tests should have been # handled up above, so we keep things comparatively simple for tuples and dicts. from typing import Tuple from typing_extensions import Literal a: Tuple[Literal[1], Literal[2]] = (1, 2) b: Tuple[int, Literal[1, 2], Literal[3], Tuple[Literal["foo"]]] = (1, 2, 3, ("foo",)) c: Tuple[Literal[1], Literal[2]] = (2, 1) # E: Incompatible types in assignment (expression has type "Tuple[Literal[2], Literal[1]]", variable has type "Tuple[Literal[1], Literal[2]]") d = (1, 2) reveal_type(d) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' [builtins fixtures/tuple.pyi] [out] [case testLiteralInferredInDictContext] from typing import Dict from typing_extensions import Literal a = {"x": 1, "y": 2} b: Dict[str, Literal[1, 2]] = {"x": 1, "y": 2} c: Dict[Literal["x", "y"], int] = {"x": 1, "y": 2} reveal_type(a) # N: Revealed type is 'builtins.dict[builtins.str*, builtins.int*]' [builtins fixtures/dict.pyi] [out] [case testLiteralInferredInOverloadContextBasic] from typing import overload from typing_extensions import Literal @overload def func(x: Literal[1]) -> str: ... @overload def func(x: Literal[2]) -> int: ... @overload def func(x: int) -> object: ... def func(x: int) -> object: pass a: Literal[1] b: Literal[2] c: Literal[1, 2] reveal_type(func(1)) # N: Revealed type is 'builtins.str' reveal_type(func(2)) # N: Revealed type is 'builtins.int' reveal_type(func(3)) # N: Revealed type is 'builtins.object' reveal_type(func(a)) # N: Revealed type is 'builtins.str' reveal_type(func(b)) # N: Revealed type is 'builtins.int' # Note: the fact that we don't do union math here is consistent # with the output we would have gotten if we replaced int and the # Literal types here with regular classes/subclasses. reveal_type(func(c)) # N: Revealed type is 'builtins.object' [out] [case testLiteralOverloadProhibitUnsafeOverlaps] from typing import overload from typing_extensions import Literal @overload def func1(x: Literal[1]) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def func1(x: int) -> int: ... def func1(x): pass @overload def func2(x: Literal['a']) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def func2(x: str) -> Literal[2]: ... def func2(x): pass # This one is typesafe @overload def func3(x: Literal['a']) -> Literal[2]: ... @overload def func3(x: str) -> int: ... def func3(x): pass [out] [case testLiteralInferredInOverloadContextUnionMath] from typing import overload, Union from typing_extensions import Literal class A: pass class B: pass class C: pass @overload def func(x: Literal[-40]) -> A: ... @overload def func(x: Literal[3, 4, 5, 6]) -> B: ... @overload def func(x: Literal["foo"]) -> C: ... def func(x: Union[int, str]) -> Union[A, B, C]: pass a: Literal[-40, "foo"] b: Literal[3] c: Literal[3, -40] d: Literal[6, 7] e: int f: Literal[7, "bar"] reveal_type(func(a)) # N: Revealed type is 'Union[__main__.A, __main__.C]' reveal_type(func(b)) # N: Revealed type is '__main__.B' reveal_type(func(c)) # N: Revealed type is 'Union[__main__.B, __main__.A]' reveal_type(func(d)) # N: Revealed type is '__main__.B' \ # E: Argument 1 to "func" has incompatible type "Union[Literal[6], Literal[7]]"; expected "Union[Literal[3], Literal[4], Literal[5], Literal[6]]" reveal_type(func(e)) # E: No overload variant of "func" matches argument type "int" \ # N: Possible overload variants: \ # N: def func(x: Literal[-40]) -> A \ # N: def func(x: Union[Literal[3], Literal[4], Literal[5], Literal[6]]) -> B \ # N: def func(x: Literal['foo']) -> C \ # N: Revealed type is 'Any' reveal_type(func(f)) # E: No overload variant of "func" matches argument type "Union[Literal[7], Literal['bar']]" \ # N: Possible overload variants: \ # N: def func(x: Literal[-40]) -> A \ # N: def func(x: Union[Literal[3], Literal[4], Literal[5], Literal[6]]) -> B \ # N: def func(x: Literal['foo']) -> C \ # N: Revealed type is 'Any' [out] [case testLiteralInferredInOverloadContextUnionMathOverloadingReturnsBestType] # This test is a transliteration of check-overloading::testUnionMathOverloadingReturnsBestType from typing import overload from typing_extensions import Literal @overload def f(x: Literal[1, 2]) -> int: ... @overload def f(x: int) -> object: ... def f(x): pass x: Literal[1, 2] y: Literal[1, 2, 3] z: Literal[1, 2, "three"] reveal_type(f(x)) # N: Revealed type is 'builtins.int' reveal_type(f(1)) # N: Revealed type is 'builtins.int' reveal_type(f(2)) # N: Revealed type is 'builtins.int' reveal_type(f(y)) # N: Revealed type is 'builtins.object' reveal_type(f(z)) # N: Revealed type is 'builtins.int' \ # E: Argument 1 to "f" has incompatible type "Union[Literal[1], Literal[2], Literal['three']]"; expected "Union[Literal[1], Literal[2]]" [out] [case testLiteralInferredInOverloadContextWithTypevars] from typing import TypeVar, overload, Union from typing_extensions import Literal T = TypeVar('T') @overload def f1(x: T, y: int) -> T: ... @overload def f1(x: T, y: str) -> Union[T, str]: ... def f1(x, y): pass a: Literal[1] reveal_type(f1(1, 1)) # N: Revealed type is 'builtins.int*' reveal_type(f1(a, 1)) # N: Revealed type is 'Literal[1]' @overload def f2(x: T, y: Literal[3]) -> T: ... @overload def f2(x: T, y: str) -> Union[T]: ... def f2(x, y): pass reveal_type(f2(1, 3)) # N: Revealed type is 'builtins.int*' reveal_type(f2(a, 3)) # N: Revealed type is 'Literal[1]' @overload def f3(x: Literal[3]) -> Literal[3]: ... @overload def f3(x: T) -> T: ... def f3(x): pass reveal_type(f3(1)) # N: Revealed type is 'builtins.int*' reveal_type(f3(a)) # N: Revealed type is 'Literal[1]' @overload def f4(x: str) -> str: ... @overload def f4(x: T) -> T: ... def f4(x): pass b: Literal['foo'] reveal_type(f4(1)) # N: Revealed type is 'builtins.int*' reveal_type(f4(a)) # N: Revealed type is 'Literal[1]' reveal_type(f4("foo")) # N: Revealed type is 'builtins.str' # Note: first overload is selected and prevents the typevar from # ever inferring a Literal["something"]. reveal_type(f4(b)) # N: Revealed type is 'builtins.str' [out] [case testLiteralInferredInOverloadContextUnionMathTrickyOverload] # This test is a transliteration of check-overloading::testUnionMathTrickyOverload1 from typing import overload from typing_extensions import Literal @overload def f(x: Literal['a'], y: Literal['a']) -> int: ... @overload def f(x: str, y: Literal['b']) -> str: ... def f(x): pass x: Literal['a', 'b'] y: Literal['a', 'b'] f(x, y) # E: Argument 1 to "f" has incompatible type "Union[Literal['a'], Literal['b']]"; expected "Literal['a']" \ # E: Argument 2 to "f" has incompatible type "Union[Literal['a'], Literal['b']]"; expected "Literal['a']" \ [out] --- --- Tests that make sure we're correctly using the fallback --- [case testLiteralFallbackOperatorsWorkCorrectly] from typing_extensions import Literal a: Literal[3] b: int c: Literal[4] d: Literal['foo'] e: str reveal_type(a + a) # N: Revealed type is 'builtins.int' reveal_type(a + b) # N: Revealed type is 'builtins.int' reveal_type(b + a) # N: Revealed type is 'builtins.int' reveal_type(a + 1) # N: Revealed type is 'builtins.int' reveal_type(1 + a) # N: Revealed type is 'builtins.int' reveal_type(a + c) # N: Revealed type is 'builtins.int' reveal_type(c + a) # N: Revealed type is 'builtins.int' reveal_type(d + d) # N: Revealed type is 'builtins.str' reveal_type(d + e) # N: Revealed type is 'builtins.str' reveal_type(e + d) # N: Revealed type is 'builtins.str' reveal_type(d + 'foo') # N: Revealed type is 'builtins.str' reveal_type('foo' + d) # N: Revealed type is 'builtins.str' reveal_type(a.__add__(b)) # N: Revealed type is 'builtins.int' reveal_type(b.__add__(a)) # N: Revealed type is 'builtins.int' a *= b # E: Incompatible types in assignment (expression has type "int", variable has type "Literal[3]") b *= a reveal_type(b) # N: Revealed type is 'builtins.int' [out] [case testLiteralFallbackInheritedMethodsWorkCorrectly] from typing_extensions import Literal a: Literal['foo'] b: str reveal_type(a.startswith(a)) # N: Revealed type is 'builtins.bool' reveal_type(b.startswith(a)) # N: Revealed type is 'builtins.bool' reveal_type(a.startswith(b)) # N: Revealed type is 'builtins.bool' reveal_type(a.strip()) # N: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testLiteralFallbackMethodsDoNotCoerceToLiteral] from typing_extensions import Literal a: Literal[3] b: int c: Literal["foo"] if int(): a = a * a # E: Incompatible types in assignment (expression has type "int", variable has type "Literal[3]") a = a * b # E: Incompatible types in assignment (expression has type "int", variable has type "Literal[3]") a = b * a # E: Incompatible types in assignment (expression has type "int", variable has type "Literal[3]") b = a * a b = a * b b = b * a c = c.strip() # E: Incompatible types in assignment (expression has type "str", variable has type "Literal['foo']") [builtins fixtures/ops.pyi] [out] -- -- Tests that check we report errors when we try using Literal[...] -- in invalid places. -- [case testLiteralErrorsWithIsInstanceAndIsSubclass] from typing_extensions import Literal from typing_extensions import Literal as Renamed import typing_extensions as indirect Alias = Literal[3] isinstance(3, Literal[3]) # E: Cannot use isinstance() with Literal type isinstance(3, Alias) # E: Cannot use isinstance() with Literal type \ # E: The type alias to Literal is invalid in runtime context isinstance(3, Renamed[3]) # E: Cannot use isinstance() with Literal type isinstance(3, indirect.Literal[3]) # E: Cannot use isinstance() with Literal type issubclass(int, Literal[3]) # E: Cannot use issubclass() with Literal type issubclass(int, Alias) # E: Cannot use issubclass() with Literal type \ # E: The type alias to Literal is invalid in runtime context issubclass(int, Renamed[3]) # E: Cannot use issubclass() with Literal type issubclass(int, indirect.Literal[3]) # E: Cannot use issubclass() with Literal type [builtins fixtures/isinstancelist.pyi] [out] [case testLiteralErrorsWhenSubclassed] from typing_extensions import Literal from typing_extensions import Literal as Renamed import typing_extensions as indirect Alias = Literal[3] class Bad1(Literal[3]): pass # E: Invalid base class "Literal" class Bad2(Renamed[3]): pass # E: Invalid base class "Renamed" class Bad3(indirect.Literal[3]): pass # E: Invalid base class "indirect.Literal" class Bad4(Alias): pass # E: Invalid base class "Alias" [out] [case testLiteralErrorsWhenInvoked-skip] # TODO: We don't seem to correctly handle invoking types like # 'Final' and 'Protocol' as well. When fixing this, also fix # those types? from typing_extensions import Literal from typing_extensions import Literal as Renamed import typing_extensions as indirect Alias = Literal[3] Literal[3]() # E: The type "Type[Literal]" is not generic and not indexable Renamed[3]() # E: The type "Type[Literal]" is not generic and not indexable indirect.Literal[3]() # E: The type "Type[Literal]" is not generic and not indexable Alias() # E: The type alias to Literal is invalid in runtime context # TODO: Add appropriate error messages to the following lines Literal() Renamed() indirect.Literal() [builtins fixtures/isinstancelist.pyi] [out] -- -- Test to make sure literals interact with generics as expected -- [case testLiteralAndGenericsWithSimpleFunctions] from typing import TypeVar from typing_extensions import Literal T = TypeVar('T') def foo(x: T) -> T: pass def expects_literal(x: Literal[3]) -> None: pass def expects_int(x: int) -> None: pass a: Literal[3] reveal_type(foo(3)) # N: Revealed type is 'builtins.int*' reveal_type(foo(a)) # N: Revealed type is 'Literal[3]' expects_literal(3) expects_literal(foo(3)) expects_literal(foo(foo(3))) expects_literal(a) expects_literal(foo(a)) expects_literal(foo(foo(a))) expects_literal(5) # E: Argument 1 to "expects_literal" has incompatible type "Literal[5]"; expected "Literal[3]" expects_literal(foo(5)) # E: Argument 1 to "foo" has incompatible type "Literal[5]"; expected "Literal[3]" expects_literal(foo(foo(5))) # E: Argument 1 to "foo" has incompatible type "Literal[5]"; expected "Literal[3]" expects_int(a) expects_int(foo(a)) expects_int(foo(foo(a))) [out] [case testLiteralAndGenericWithUnion] from typing import TypeVar, Union from typing_extensions import Literal T = TypeVar('T') def identity(x: T) -> T: return x a: Union[int, Literal['foo']] = identity('foo') b: Union[int, Literal['foo']] = identity('bar') # E: Argument 1 to "identity" has incompatible type "Literal['bar']"; expected "Union[int, Literal['foo']]" [out] [case testLiteralAndGenericsNoMatch] from typing import TypeVar, Union, List from typing_extensions import Literal def identity(x: T) -> T: return x Ok1 = Union[List[int], Literal['bad']] Ok2 = Union[List[Literal[42]], Literal['bad']] Bad = Union[List[Literal[43]], Literal['bad']] x: Ok1 = identity([42]) y: Ok2 = identity([42]) z: Bad = identity([42]) # E: List item 0 has incompatible type "Literal[42]"; expected "Literal[43]" [builtins fixtures/list.pyi] [out] [case testLiteralAndGenericsWithSimpleClasses] from typing import TypeVar, Generic from typing_extensions import Literal T = TypeVar('T') class Wrapper(Generic[T]): def __init__(self, val: T) -> None: self.val = val def inner(self) -> T: return self.val def expects_literal(a: Literal[3]) -> None: pass def expects_literal_wrapper(x: Wrapper[Literal[3]]) -> None: pass a: Literal[3] reveal_type(Wrapper(3)) # N: Revealed type is '__main__.Wrapper[builtins.int*]' reveal_type(Wrapper[Literal[3]](3)) # N: Revealed type is '__main__.Wrapper[Literal[3]]' reveal_type(Wrapper(a)) # N: Revealed type is '__main__.Wrapper[Literal[3]]' expects_literal(Wrapper(a).inner()) # Note: the following probably ought to type-check: it's reasonable to infer # Wrapper[Literal[3]] here. # TODO: Consider finding a way to handle this edge case better expects_literal(Wrapper(3).inner()) # E: Argument 1 to "expects_literal" has incompatible type "int"; expected "Literal[3]" # Note: if we handle the edge case above, we should make sure this error # message switches to warning about an incompatible type 'Literal[5]' rather # then an incompatible type 'int' expects_literal(Wrapper(5).inner()) # E: Argument 1 to "expects_literal" has incompatible type "int"; expected "Literal[3]" expects_literal_wrapper(Wrapper(a)) expects_literal_wrapper(Wrapper(3)) expects_literal_wrapper(Wrapper(5)) # E: Argument 1 to "Wrapper" has incompatible type "Literal[5]"; expected "Literal[3]" [out] [case testLiteralAndGenericsRespectsUpperBound] from typing import TypeVar from typing_extensions import Literal TLiteral = TypeVar('TLiteral', bound=Literal[3]) TInt = TypeVar('TInt', bound=int) def func1(x: TLiteral) -> TLiteral: pass def func2(x: TInt) -> TInt: pass def func3(x: TLiteral) -> TLiteral: y = func2(x) return y def func4(x: TInt) -> TInt: y = func1(x) # E: Value of type variable "TLiteral" of "func1" cannot be "TInt" return y a: Literal[3] b: Literal[4] c: int reveal_type(func1) # N: Revealed type is 'def [TLiteral <: Literal[3]] (x: TLiteral`-1) -> TLiteral`-1' reveal_type(func1(3)) # N: Revealed type is 'Literal[3]' reveal_type(func1(a)) # N: Revealed type is 'Literal[3]' reveal_type(func1(4)) # E: Value of type variable "TLiteral" of "func1" cannot be "Literal[4]" \ # N: Revealed type is 'Literal[4]' reveal_type(func1(b)) # E: Value of type variable "TLiteral" of "func1" cannot be "Literal[4]" \ # N: Revealed type is 'Literal[4]' reveal_type(func1(c)) # E: Value of type variable "TLiteral" of "func1" cannot be "int" \ # N: Revealed type is 'builtins.int*' reveal_type(func2(3)) # N: Revealed type is 'builtins.int*' reveal_type(func2(a)) # N: Revealed type is 'Literal[3]' reveal_type(func2(4)) # N: Revealed type is 'builtins.int*' reveal_type(func2(b)) # N: Revealed type is 'Literal[4]' reveal_type(func2(c)) # N: Revealed type is 'builtins.int*' [out] [case testLiteralAndGenericsRespectsValueRestriction] from typing import TypeVar from typing_extensions import Literal TLiteral = TypeVar('TLiteral', Literal[3], Literal['foo']) TNormal = TypeVar('TNormal', int, str) def func1(x: TLiteral) -> TLiteral: pass def func2(x: TNormal) -> TNormal: pass def func3(x: TLiteral) -> TLiteral: y = func2(x) return y # E: Incompatible return value type (got "int", expected "Literal[3]") \ # E: Incompatible return value type (got "str", expected "Literal['foo']") def func4(x: TNormal) -> TNormal: y = func1(x) # E: Value of type variable "TLiteral" of "func1" cannot be "int" \ # E: Value of type variable "TLiteral" of "func1" cannot be "str" return y i1: Literal[3] i2: Literal[4] i: int s1: Literal['foo'] s2: Literal['bar'] s: str reveal_type(func1) # N: Revealed type is 'def [TLiteral in (Literal[3], Literal['foo'])] (x: TLiteral`-1) -> TLiteral`-1' reveal_type(func1(3)) # N: Revealed type is 'Literal[3]' reveal_type(func1(i1)) # N: Revealed type is 'Literal[3]' reveal_type(func1(4)) # E: Value of type variable "TLiteral" of "func1" cannot be "Literal[4]" \ # N: Revealed type is 'Literal[4]' reveal_type(func1(i2)) # E: Value of type variable "TLiteral" of "func1" cannot be "Literal[4]" \ # N: Revealed type is 'Literal[4]' reveal_type(func1(i)) # E: Value of type variable "TLiteral" of "func1" cannot be "int" \ # N: Revealed type is 'builtins.int*' reveal_type(func1("foo")) # N: Revealed type is 'Literal['foo']' reveal_type(func1(s1)) # N: Revealed type is 'Literal['foo']' reveal_type(func1("bar")) # E: Value of type variable "TLiteral" of "func1" cannot be "Literal['bar']" \ # N: Revealed type is 'Literal['bar']' reveal_type(func1(s2)) # E: Value of type variable "TLiteral" of "func1" cannot be "Literal['bar']" \ # N: Revealed type is 'Literal['bar']' reveal_type(func1(s)) # E: Value of type variable "TLiteral" of "func1" cannot be "str" \ # N: Revealed type is 'builtins.str*' reveal_type(func2(3)) # N: Revealed type is 'builtins.int*' reveal_type(func2(i1)) # N: Revealed type is 'builtins.int*' reveal_type(func2(4)) # N: Revealed type is 'builtins.int*' reveal_type(func2(i2)) # N: Revealed type is 'builtins.int*' reveal_type(func2("foo")) # N: Revealed type is 'builtins.str*' reveal_type(func2(s1)) # N: Revealed type is 'builtins.str*' reveal_type(func2("bar")) # N: Revealed type is 'builtins.str*' reveal_type(func2(s2)) # N: Revealed type is 'builtins.str*' [out] [case testLiteralAndGenericsWithOverloads] from typing import TypeVar, overload, Union from typing_extensions import Literal @overload def func1(x: Literal[4]) -> Literal[19]: ... @overload def func1(x: int) -> int: ... def func1(x: int) -> int: pass T = TypeVar('T') def identity(x: T) -> T: pass a: Literal[4] b: Literal[5] reveal_type(func1(identity(4))) # N: Revealed type is 'Literal[19]' reveal_type(func1(identity(5))) # N: Revealed type is 'builtins.int' reveal_type(func1(identity(a))) # N: Revealed type is 'Literal[19]' reveal_type(func1(identity(b))) # N: Revealed type is 'builtins.int' -- -- Interactions with meets -- [case testLiteralMeets] from typing import TypeVar, List, Callable, Union from typing_extensions import Literal a: Callable[[Literal[1]], int] b: Callable[[Literal[2]], str] c: Callable[[int], str] d: Callable[[object], str] e: Callable[[Union[Literal[1], Literal[2]]], str] arr1 = [a, a] arr2 = [a, b] arr3 = [a, c] arr4 = [a, d] arr5 = [a, e] reveal_type(arr1) # N: Revealed type is 'builtins.list[def (Literal[1]) -> builtins.int]' reveal_type(arr2) # N: Revealed type is 'builtins.list[builtins.function*]' reveal_type(arr3) # N: Revealed type is 'builtins.list[def (Literal[1]) -> builtins.object]' reveal_type(arr4) # N: Revealed type is 'builtins.list[def (Literal[1]) -> builtins.object]' reveal_type(arr5) # N: Revealed type is 'builtins.list[def (Literal[1]) -> builtins.object]' # Inspect just only one interesting one lit: Literal[1] reveal_type(arr2[0](lit)) # E: Cannot call function of unknown type \ # N: Revealed type is 'Any' T = TypeVar('T') def unify(func: Callable[[T, T], None]) -> T: pass def f1(x: Literal[1], y: Literal[1]) -> None: pass def f2(x: Literal[1], y: Literal[2]) -> None: pass def f3(x: Literal[1], y: int) -> None: pass def f4(x: Literal[1], y: object) -> None: pass def f5(x: Literal[1], y: Union[Literal[1], Literal[2]]) -> None: pass reveal_type(unify(f1)) # N: Revealed type is 'Literal[1]' reveal_type(unify(f2)) # N: Revealed type is 'None' reveal_type(unify(f3)) # N: Revealed type is 'Literal[1]' reveal_type(unify(f4)) # N: Revealed type is 'Literal[1]' reveal_type(unify(f5)) # N: Revealed type is 'Literal[1]' [builtins fixtures/list.pyi] [out] [case testLiteralMeetsWithStrictOptional] # flags: --strict-optional from typing import TypeVar, Callable, Union from typing_extensions import Literal a: Callable[[Literal[1]], int] b: Callable[[Literal[2]], str] lit: Literal[1] arr = [a, b] reveal_type(arr) # N: Revealed type is 'builtins.list[builtins.function*]' reveal_type(arr[0](lit)) # E: Cannot call function of unknown type \ # N: Revealed type is 'Any' T = TypeVar('T') def unify(func: Callable[[T, T], None]) -> T: pass def func(x: Literal[1], y: Literal[2]) -> None: pass reveal_type(unify(func)) # N: Revealed type is '' [builtins fixtures/list.pyi] [out] -- -- Checks for intelligent indexing -- [case testLiteralIntelligentIndexingTuples] from typing import Tuple, NamedTuple from typing_extensions import Literal class A: pass class B: pass class C: pass class D: pass class E: pass idx0: Literal[0] idx1: Literal[1] idx2: Literal[2] idx3: Literal[3] idx4: Literal[4] idx5: Literal[5] idx_neg1: Literal[-1] tup1: Tuple[A, B, C, D, E] reveal_type(tup1[idx0]) # N: Revealed type is '__main__.A' reveal_type(tup1[idx1]) # N: Revealed type is '__main__.B' reveal_type(tup1[idx2]) # N: Revealed type is '__main__.C' reveal_type(tup1[idx3]) # N: Revealed type is '__main__.D' reveal_type(tup1[idx4]) # N: Revealed type is '__main__.E' reveal_type(tup1[idx_neg1]) # N: Revealed type is '__main__.E' tup1[idx5] # E: Tuple index out of range reveal_type(tup1[idx2:idx4]) # N: Revealed type is 'Tuple[__main__.C, __main__.D]' reveal_type(tup1[::idx2]) # N: Revealed type is 'Tuple[__main__.A, __main__.C, __main__.E]' Tup2Class = NamedTuple('Tup2Class', [('a', A), ('b', B), ('c', C), ('d', D), ('e', E)]) tup2: Tup2Class reveal_type(tup2[idx0]) # N: Revealed type is '__main__.A' reveal_type(tup2[idx1]) # N: Revealed type is '__main__.B' reveal_type(tup2[idx2]) # N: Revealed type is '__main__.C' reveal_type(tup2[idx3]) # N: Revealed type is '__main__.D' reveal_type(tup2[idx4]) # N: Revealed type is '__main__.E' reveal_type(tup2[idx_neg1]) # N: Revealed type is '__main__.E' tup2[idx5] # E: Tuple index out of range reveal_type(tup2[idx2:idx4]) # N: Revealed type is 'Tuple[__main__.C, __main__.D, fallback=__main__.Tup2Class]' reveal_type(tup2[::idx2]) # N: Revealed type is 'Tuple[__main__.A, __main__.C, __main__.E, fallback=__main__.Tup2Class]' [builtins fixtures/slice.pyi] [out] [case testLiteralIntelligentIndexingTypedDict] from typing_extensions import Literal from mypy_extensions import TypedDict class Unrelated: pass u: Unrelated class Inner(TypedDict): a: int class Outer(Inner, total=False): b: str a_key: Literal["a"] b_key: Literal["b"] c_key: Literal["c"] d: Outer reveal_type(d[a_key]) # N: Revealed type is 'builtins.int' reveal_type(d[b_key]) # N: Revealed type is 'builtins.str' d[c_key] # E: TypedDict "Outer" has no key 'c' reveal_type(d.get(a_key, u)) # N: Revealed type is 'Union[builtins.int, __main__.Unrelated]' reveal_type(d.get(b_key, u)) # N: Revealed type is 'Union[builtins.str, __main__.Unrelated]' d.get(c_key, u) # E: TypedDict "Outer" has no key 'c' reveal_type(d.pop(a_key)) # E: Key 'a' of TypedDict "Outer" cannot be deleted \ # N: Revealed type is 'builtins.int' reveal_type(d.pop(b_key)) # N: Revealed type is 'builtins.str' d.pop(c_key) # E: TypedDict "Outer" has no key 'c' del d[a_key] # E: Key 'a' of TypedDict "Outer" cannot be deleted del d[b_key] del d[c_key] # E: TypedDict "Outer" has no key 'c' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] [case testLiteralIntelligentIndexingUsingFinal] from typing import Tuple, NamedTuple from typing_extensions import Literal, Final from mypy_extensions import TypedDict int_key_good: Final = 0 int_key_bad: Final = 3 str_key_good: Final = "foo" str_key_bad: Final = "missing" class Unrelated: pass MyTuple = NamedTuple('MyTuple', [ ('foo', int), ('bar', str), ]) class MyDict(TypedDict): foo: int bar: str a: Tuple[int, str] b: MyTuple c: MyDict u: Unrelated reveal_type(a[int_key_good]) # N: Revealed type is 'builtins.int' reveal_type(b[int_key_good]) # N: Revealed type is 'builtins.int' reveal_type(c[str_key_good]) # N: Revealed type is 'builtins.int' reveal_type(c.get(str_key_good, u)) # N: Revealed type is 'Union[builtins.int, __main__.Unrelated]' a[int_key_bad] # E: Tuple index out of range b[int_key_bad] # E: Tuple index out of range c[str_key_bad] # E: TypedDict "MyDict" has no key 'missing' c.get(str_key_bad, u) # E: TypedDict "MyDict" has no key 'missing' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] [case testLiteralIntelligentIndexingTupleUnions] from typing import Tuple, NamedTuple from typing_extensions import Literal class A: pass class B: pass class C: pass class D: pass class E: pass idx1: Literal[1, 2] idx2: Literal[3, 4] idx_bad: Literal[1, 20] tup1: Tuple[A, B, C, D, E] Tup2Class = NamedTuple('Tup2Class', [('a', A), ('b', B), ('c', C), ('d', D), ('e', E)]) tup2: Tup2Class reveal_type(tup1[idx1]) # N: Revealed type is 'Union[__main__.B, __main__.C]' reveal_type(tup1[idx1:idx2]) # N: Revealed type is 'Union[Tuple[__main__.B, __main__.C], Tuple[__main__.B, __main__.C, __main__.D], Tuple[__main__.C], Tuple[__main__.C, __main__.D]]' reveal_type(tup1[0::idx1]) # N: Revealed type is 'Union[Tuple[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E], Tuple[__main__.A, __main__.C, __main__.E]]' tup1[idx_bad] # E: Tuple index out of range reveal_type(tup2[idx1]) # N: Revealed type is 'Union[__main__.B, __main__.C]' reveal_type(tup2[idx1:idx2]) # N: Revealed type is 'Union[Tuple[__main__.B, __main__.C, fallback=__main__.Tup2Class], Tuple[__main__.B, __main__.C, __main__.D, fallback=__main__.Tup2Class], Tuple[__main__.C, fallback=__main__.Tup2Class], Tuple[__main__.C, __main__.D, fallback=__main__.Tup2Class]]' reveal_type(tup2[0::idx1]) # N: Revealed type is 'Union[Tuple[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E, fallback=__main__.Tup2Class], Tuple[__main__.A, __main__.C, __main__.E, fallback=__main__.Tup2Class]]' tup2[idx_bad] # E: Tuple index out of range [builtins fixtures/slice.pyi] [out] [case testLiteralIntelligentIndexingTypedDictUnions] from typing_extensions import Literal, Final from mypy_extensions import TypedDict class A: pass class B: pass class C: pass class D: pass class E: pass class Base(TypedDict): a: A b: B c: C class Test(Base, total=False): d: D e: E class AAndB(A, B): pass test: Test good_keys: Literal["a", "b"] optional_keys: Literal["d", "e"] bad_keys: Literal["a", "bad"] reveal_type(test[good_keys]) # N: Revealed type is 'Union[__main__.A, __main__.B]' reveal_type(test.get(good_keys)) # N: Revealed type is 'Union[__main__.A, __main__.B]' reveal_type(test.get(good_keys, 3)) # N: Revealed type is 'Union[__main__.A, Literal[3]?, __main__.B]' reveal_type(test.pop(optional_keys)) # N: Revealed type is 'Union[__main__.D, __main__.E]' reveal_type(test.pop(optional_keys, 3)) # N: Revealed type is 'Union[__main__.D, __main__.E, Literal[3]?]' reveal_type(test.setdefault(good_keys, AAndB())) # N: Revealed type is 'Union[__main__.A, __main__.B]' del test[optional_keys] test[bad_keys] # E: TypedDict "Test" has no key 'bad' test.get(bad_keys) # E: TypedDict "Test" has no key 'bad' test.get(bad_keys, 3) # E: TypedDict "Test" has no key 'bad' test.pop(good_keys) # E: Key 'a' of TypedDict "Test" cannot be deleted \ # E: Key 'b' of TypedDict "Test" cannot be deleted test.pop(bad_keys) # E: Key 'a' of TypedDict "Test" cannot be deleted \ # E: TypedDict "Test" has no key 'bad' test.setdefault(good_keys, 3) # E: Argument 2 to "setdefault" of "TypedDict" has incompatible type "int"; expected "A" test.setdefault(bad_keys, 3 ) # E: Argument 2 to "setdefault" of "TypedDict" has incompatible type "int"; expected "A" del test[good_keys] # E: Key 'a' of TypedDict "Test" cannot be deleted \ # E: Key 'b' of TypedDict "Test" cannot be deleted del test[bad_keys] # E: Key 'a' of TypedDict "Test" cannot be deleted \ # E: TypedDict "Test" has no key 'bad' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] [case testLiteralIntelligentIndexingTypedDictPython2-skip] # flags: --python-version 2.7 from normal_mod import NormalDict from unicode_mod import UnicodeDict from typing_extensions import Literal normal_dict = NormalDict(key=4) unicode_dict = UnicodeDict(key=4) normal_key = "key" # type: Literal["key"] unicode_key = u"key" # type: Literal[u"key"] # TODO: Make the runtime and mypy behaviors here consistent # # At runtime, all eight of the below operations will successfully return # the int because b"key" == u"key" in Python 2. # # Mypy, in contrast, will accept all the four calls to `some_dict[...]` # but will reject `normal_dict.get(unicode_key)` and `unicode_dict.get(unicode_key)` # because the signature of `.get(...)` accepts only a str, not unicode. # # We get the same behavior if we replace all of the Literal[...] types for # actual string literals. # # See https://github.com/python/mypy/issues/6123 for more details. reveal_type(normal_dict[normal_key]) # N: Revealed type is 'builtins.int' reveal_type(normal_dict[unicode_key]) # N: Revealed type is 'builtins.int' reveal_type(unicode_dict[normal_key]) # N: Revealed type is 'builtins.int' reveal_type(unicode_dict[unicode_key]) # N: Revealed type is 'builtins.int' reveal_type(normal_dict.get(normal_key)) # N: Revealed type is 'builtins.int' reveal_type(normal_dict.get(unicode_key)) # N: Revealed type is 'builtins.int' reveal_type(unicode_dict.get(normal_key)) # N: Revealed type is 'builtins.int' reveal_type(unicode_dict.get(unicode_key)) # N: Revealed type is 'builtins.int' [file normal_mod.py] from mypy_extensions import TypedDict NormalDict = TypedDict('NormalDict', {'key': int}) [file unicode_mod.py] from __future__ import unicode_literals from mypy_extensions import TypedDict UnicodeDict = TypedDict(b'UnicodeDict', {'key': int}) [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testLiteralIntelligentIndexingMultiTypedDict] from typing import Union from typing_extensions import Literal from mypy_extensions import TypedDict class A: pass class B: pass class C: pass class D: pass class D1(TypedDict): a: A b: B c: C class D2(TypedDict): b: B c: C d: D x: Union[D1, D2] bad_keys: Literal['a', 'b', 'c', 'd'] good_keys: Literal['b', 'c'] x[bad_keys] # E: TypedDict "D1" has no key 'd' \ # E: TypedDict "D2" has no key 'a' x.get(bad_keys) # E: TypedDict "D1" has no key 'd' \ # E: TypedDict "D2" has no key 'a' x.get(bad_keys, 3) # E: TypedDict "D1" has no key 'd' \ # E: TypedDict "D2" has no key 'a' reveal_type(x[good_keys]) # N: Revealed type is 'Union[__main__.B, __main__.C]' reveal_type(x.get(good_keys)) # N: Revealed type is 'Union[__main__.B, __main__.C]' reveal_type(x.get(good_keys, 3)) # N: Revealed type is 'Union[__main__.B, Literal[3]?, __main__.C]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- -- Interactions with 'Final' -- [case testLiteralFinalInferredAsLiteral] from typing_extensions import Final, Literal var1: Final = 1 var2: Final = "foo" var3: Final = True var4: Final = None class Foo: classvar1: Final = 1 classvar2: Final = "foo" classvar3: Final = True classvar4: Final = None def __init__(self) -> None: self.instancevar1: Final = 1 self.instancevar2: Final = "foo" self.instancevar3: Final = True self.instancevar4: Final = None def force1(x: Literal[1]) -> None: pass def force2(x: Literal["foo"]) -> None: pass def force3(x: Literal[True]) -> None: pass def force4(x: Literal[None]) -> None: pass reveal_type(var1) # N: Revealed type is 'Literal[1]?' reveal_type(var2) # N: Revealed type is 'Literal['foo']?' reveal_type(var3) # N: Revealed type is 'Literal[True]?' reveal_type(var4) # N: Revealed type is 'None' force1(reveal_type(var1)) # N: Revealed type is 'Literal[1]' force2(reveal_type(var2)) # N: Revealed type is 'Literal['foo']' force3(reveal_type(var3)) # N: Revealed type is 'Literal[True]' force4(reveal_type(var4)) # N: Revealed type is 'None' reveal_type(Foo.classvar1) # N: Revealed type is 'Literal[1]?' reveal_type(Foo.classvar2) # N: Revealed type is 'Literal['foo']?' reveal_type(Foo.classvar3) # N: Revealed type is 'Literal[True]?' reveal_type(Foo.classvar4) # N: Revealed type is 'None' force1(reveal_type(Foo.classvar1)) # N: Revealed type is 'Literal[1]' force2(reveal_type(Foo.classvar2)) # N: Revealed type is 'Literal['foo']' force3(reveal_type(Foo.classvar3)) # N: Revealed type is 'Literal[True]' force4(reveal_type(Foo.classvar4)) # N: Revealed type is 'None' f = Foo() reveal_type(f.instancevar1) # N: Revealed type is 'Literal[1]?' reveal_type(f.instancevar2) # N: Revealed type is 'Literal['foo']?' reveal_type(f.instancevar3) # N: Revealed type is 'Literal[True]?' reveal_type(f.instancevar4) # N: Revealed type is 'None' force1(reveal_type(f.instancevar1)) # N: Revealed type is 'Literal[1]' force2(reveal_type(f.instancevar2)) # N: Revealed type is 'Literal['foo']' force3(reveal_type(f.instancevar3)) # N: Revealed type is 'Literal[True]' force4(reveal_type(f.instancevar4)) # N: Revealed type is 'None' [builtins fixtures/primitives.pyi] [out] [case testLiteralFinalDirectInstanceTypesSupercedeInferredLiteral] from typing_extensions import Final, Literal var1: Final[int] = 1 var2: Final[str] = "foo" var3: Final[bool] = True var4: Final[None] = None class Foo: classvar1: Final[int] = 1 classvar2: Final[str] = "foo" classvar3: Final[bool] = True classvar4: Final[None] = None def __init__(self) -> None: self.instancevar1: Final[int] = 1 self.instancevar2: Final[str] = "foo" self.instancevar3: Final[bool] = True self.instancevar4: Final[None] = None def force1(x: Literal[1]) -> None: pass def force2(x: Literal["foo"]) -> None: pass def force3(x: Literal[True]) -> None: pass def force4(x: Literal[None]) -> None: pass reveal_type(var1) # N: Revealed type is 'builtins.int' reveal_type(var2) # N: Revealed type is 'builtins.str' reveal_type(var3) # N: Revealed type is 'builtins.bool' reveal_type(var4) # N: Revealed type is 'None' force1(var1) # E: Argument 1 to "force1" has incompatible type "int"; expected "Literal[1]" force2(var2) # E: Argument 1 to "force2" has incompatible type "str"; expected "Literal['foo']" force3(var3) # E: Argument 1 to "force3" has incompatible type "bool"; expected "Literal[True]" force4(var4) reveal_type(Foo.classvar1) # N: Revealed type is 'builtins.int' reveal_type(Foo.classvar2) # N: Revealed type is 'builtins.str' reveal_type(Foo.classvar3) # N: Revealed type is 'builtins.bool' reveal_type(Foo.classvar4) # N: Revealed type is 'None' force1(Foo.classvar1) # E: Argument 1 to "force1" has incompatible type "int"; expected "Literal[1]" force2(Foo.classvar2) # E: Argument 1 to "force2" has incompatible type "str"; expected "Literal['foo']" force3(Foo.classvar3) # E: Argument 1 to "force3" has incompatible type "bool"; expected "Literal[True]" force4(Foo.classvar4) f = Foo() reveal_type(f.instancevar1) # N: Revealed type is 'builtins.int' reveal_type(f.instancevar2) # N: Revealed type is 'builtins.str' reveal_type(f.instancevar3) # N: Revealed type is 'builtins.bool' reveal_type(f.instancevar4) # N: Revealed type is 'None' force1(f.instancevar1) # E: Argument 1 to "force1" has incompatible type "int"; expected "Literal[1]" force2(f.instancevar2) # E: Argument 1 to "force2" has incompatible type "str"; expected "Literal['foo']" force3(f.instancevar3) # E: Argument 1 to "force3" has incompatible type "bool"; expected "Literal[True]" force4(f.instancevar4) [builtins fixtures/primitives.pyi] [out] [case testLiteralFinalDirectLiteralTypesForceLiteral] from typing_extensions import Final, Literal var1: Final[Literal[1]] = 1 var2: Final[Literal["foo"]] = "foo" var3: Final[Literal[True]] = True var4: Final[Literal[None]] = None class Foo: classvar1: Final[Literal[1]] = 1 classvar2: Final[Literal["foo"]] = "foo" classvar3: Final[Literal[True]] = True classvar4: Final[Literal[None]] = None def __init__(self) -> None: self.instancevar1: Final[Literal[1]] = 1 self.instancevar2: Final[Literal["foo"]] = "foo" self.instancevar3: Final[Literal[True]] = True self.instancevar4: Final[Literal[None]] = None def force1(x: Literal[1]) -> None: pass def force2(x: Literal["foo"]) -> None: pass def force3(x: Literal[True]) -> None: pass def force4(x: Literal[None]) -> None: pass reveal_type(var1) # N: Revealed type is 'Literal[1]' reveal_type(var2) # N: Revealed type is 'Literal['foo']' reveal_type(var3) # N: Revealed type is 'Literal[True]' reveal_type(var4) # N: Revealed type is 'None' force1(reveal_type(var1)) # N: Revealed type is 'Literal[1]' force2(reveal_type(var2)) # N: Revealed type is 'Literal['foo']' force3(reveal_type(var3)) # N: Revealed type is 'Literal[True]' force4(reveal_type(var4)) # N: Revealed type is 'None' reveal_type(Foo.classvar1) # N: Revealed type is 'Literal[1]' reveal_type(Foo.classvar2) # N: Revealed type is 'Literal['foo']' reveal_type(Foo.classvar3) # N: Revealed type is 'Literal[True]' reveal_type(Foo.classvar4) # N: Revealed type is 'None' force1(reveal_type(Foo.classvar1)) # N: Revealed type is 'Literal[1]' force2(reveal_type(Foo.classvar2)) # N: Revealed type is 'Literal['foo']' force3(reveal_type(Foo.classvar3)) # N: Revealed type is 'Literal[True]' force4(reveal_type(Foo.classvar4)) # N: Revealed type is 'None' f = Foo() reveal_type(f.instancevar1) # N: Revealed type is 'Literal[1]' reveal_type(f.instancevar2) # N: Revealed type is 'Literal['foo']' reveal_type(f.instancevar3) # N: Revealed type is 'Literal[True]' reveal_type(f.instancevar4) # N: Revealed type is 'None' force1(reveal_type(f.instancevar1)) # N: Revealed type is 'Literal[1]' force2(reveal_type(f.instancevar2)) # N: Revealed type is 'Literal['foo']' force3(reveal_type(f.instancevar3)) # N: Revealed type is 'Literal[True]' force4(reveal_type(f.instancevar4)) # N: Revealed type is 'None' [builtins fixtures/primitives.pyi] [out] [case testLiteralFinalErasureInMutableDatastructures1] # flags: --strict-optional from typing_extensions import Final var1: Final = [0, None] var2: Final = (0, None) reveal_type(var1) # N: Revealed type is 'builtins.list[Union[builtins.int, None]]' reveal_type(var2) # N: Revealed type is 'Tuple[Literal[0]?, None]' [builtins fixtures/tuple.pyi] [case testLiteralFinalErasureInMutableDatastructures2] from typing_extensions import Final, Literal var1: Final = [] var1.append(0) reveal_type(var1) # N: Revealed type is 'builtins.list[builtins.int]' var2 = [] var2.append(0) reveal_type(var2) # N: Revealed type is 'builtins.list[builtins.int]' x: Literal[0] = 0 var3 = [] var3.append(x) reveal_type(var3) # N: Revealed type is 'builtins.list[Literal[0]]' [builtins fixtures/list.pyi] [case testLiteralFinalMismatchCausesError] from typing_extensions import Final, Literal var1: Final[Literal[4]] = 1 # E: Incompatible types in assignment (expression has type "Literal[1]", variable has type "Literal[4]") var2: Final[Literal['bad']] = "foo" # E: Incompatible types in assignment (expression has type "Literal['foo']", variable has type "Literal['bad']") var3: Final[Literal[False]] = True # E: Incompatible types in assignment (expression has type "Literal[True]", variable has type "Literal[False]") class Foo: classvar1: Final[Literal[4]] = 1 # E: Incompatible types in assignment (expression has type "Literal[1]", variable has type "Literal[4]") classvar2: Final[Literal['bad']] = "foo" # E: Incompatible types in assignment (expression has type "Literal['foo']", variable has type "Literal['bad']") classvar3: Final[Literal[False]] = True # E: Incompatible types in assignment (expression has type "Literal[True]", variable has type "Literal[False]") def __init__(self) -> None: self.instancevar1: Final[Literal[4]] = 1 # E: Incompatible types in assignment (expression has type "Literal[1]", variable has type "Literal[4]") self.instancevar2: Final[Literal['bad']] = "foo" # E: Incompatible types in assignment (expression has type "Literal['foo']", variable has type "Literal['bad']") self.instancevar3: Final[Literal[False]] = True # E: Incompatible types in assignment (expression has type "Literal[True]", variable has type "Literal[False]") # TODO: Fix the order in which these error messages are shown to be more consistent. var1 = 10 # E: Cannot assign to final name "var1" \ # E: Incompatible types in assignment (expression has type "Literal[10]", variable has type "Literal[4]") Foo.classvar1 = 10 # E: Cannot assign to final attribute "classvar1" \ # E: Incompatible types in assignment (expression has type "Literal[10]", variable has type "Literal[4]") Foo().instancevar1 = 10 # E: Cannot assign to final attribute "instancevar1" \ # E: Incompatible types in assignment (expression has type "Literal[10]", variable has type "Literal[4]") [builtins fixtures/primitives.pyi] [out] [case testLiteralFinalGoesOnlyOneLevelDown] from typing import Tuple from typing_extensions import Final, Literal a: Final = 1 b: Final = (1, 2) def force1(x: Literal[1]) -> None: pass def force2(x: Tuple[Literal[1], Literal[2]]) -> None: pass reveal_type(a) # N: Revealed type is 'Literal[1]?' reveal_type(b) # N: Revealed type is 'Tuple[Literal[1]?, Literal[2]?]' # TODO: This test seems somewhat broken and might need a rewrite (and a fix somewhere in mypy). # See https://github.com/python/mypy/issues/7399#issuecomment-554188073 for more context. force1(reveal_type(a)) # N: Revealed type is 'Literal[1]' force2(reveal_type(b)) # E: Argument 1 to "force2" has incompatible type "Tuple[int, int]"; expected "Tuple[Literal[1], Literal[2]]" \ # N: Revealed type is 'Tuple[Literal[1]?, Literal[2]?]' [builtins fixtures/tuple.pyi] [out] [case testLiteralFinalCollectionPropagation] from typing import List from typing_extensions import Final, Literal a: Final = 1 implicit = [a] explicit: List[Literal[1]] = [a] direct = [1] def force1(x: List[Literal[1]]) -> None: pass def force2(x: Literal[1]) -> None: pass reveal_type(implicit) # N: Revealed type is 'builtins.list[builtins.int*]' force1(reveal_type(implicit)) # E: Argument 1 to "force1" has incompatible type "List[int]"; expected "List[Literal[1]]" \ # N: Revealed type is 'builtins.list[builtins.int*]' force2(reveal_type(implicit[0])) # E: Argument 1 to "force2" has incompatible type "int"; expected "Literal[1]" \ # N: Revealed type is 'builtins.int*' reveal_type(explicit) # N: Revealed type is 'builtins.list[Literal[1]]' force1(reveal_type(explicit)) # N: Revealed type is 'builtins.list[Literal[1]]' force2(reveal_type(explicit[0])) # N: Revealed type is 'Literal[1]' reveal_type(direct) # N: Revealed type is 'builtins.list[builtins.int*]' force1(reveal_type(direct)) # E: Argument 1 to "force1" has incompatible type "List[int]"; expected "List[Literal[1]]" \ # N: Revealed type is 'builtins.list[builtins.int*]' force2(reveal_type(direct[0])) # E: Argument 1 to "force2" has incompatible type "int"; expected "Literal[1]" \ # N: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [out] [case testLiteralFinalStringTypesPython3] from typing_extensions import Final, Literal a: Final = u"foo" b: Final = "foo" c: Final = b"foo" def force_unicode(x: Literal[u"foo"]) -> None: pass def force_bytes(x: Literal[b"foo"]) -> None: pass force_unicode(reveal_type(a)) # N: Revealed type is 'Literal['foo']' force_unicode(reveal_type(b)) # N: Revealed type is 'Literal['foo']' force_unicode(reveal_type(c)) # E: Argument 1 to "force_unicode" has incompatible type "Literal[b'foo']"; expected "Literal['foo']" \ # N: Revealed type is 'Literal[b'foo']' force_bytes(reveal_type(a)) # E: Argument 1 to "force_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" \ # N: Revealed type is 'Literal['foo']' force_bytes(reveal_type(b)) # E: Argument 1 to "force_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" \ # N: Revealed type is 'Literal['foo']' force_bytes(reveal_type(c)) # N: Revealed type is 'Literal[b'foo']' [out] [case testLiteralFinalStringTypesPython2UnicodeLiterals] # flags: --python-version 2.7 from __future__ import unicode_literals from typing_extensions import Final, Literal a = u"foo" # type: Final b = "foo" # type: Final c = b"foo" # type: Final def force_unicode(x): # type: (Literal[u"foo"]) -> None pass def force_bytes(x): # type: (Literal[b"foo"]) -> None pass force_unicode(reveal_type(a)) # N: Revealed type is 'Literal[u'foo']' force_unicode(reveal_type(b)) # N: Revealed type is 'Literal[u'foo']' force_unicode(reveal_type(c)) # E: Argument 1 to "force_unicode" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" \ # N: Revealed type is 'Literal['foo']' force_bytes(reveal_type(a)) # E: Argument 1 to "force_bytes" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" \ # N: Revealed type is 'Literal[u'foo']' force_bytes(reveal_type(b)) # E: Argument 1 to "force_bytes" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" \ # N: Revealed type is 'Literal[u'foo']' force_bytes(reveal_type(c)) # N: Revealed type is 'Literal['foo']' [out] [case testLiteralFinalStringTypesPython2] # flags: --python-version 2.7 from typing_extensions import Final, Literal a = u"foo" # type: Final b = "foo" # type: Final c = b"foo" # type: Final def force_unicode(x): # type: (Literal[u"foo"]) -> None pass def force_bytes(x): # type: (Literal[b"foo"]) -> None pass force_unicode(reveal_type(a)) # N: Revealed type is 'Literal[u'foo']' force_unicode(reveal_type(b)) # E: Argument 1 to "force_unicode" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" \ # N: Revealed type is 'Literal['foo']' force_unicode(reveal_type(c)) # E: Argument 1 to "force_unicode" has incompatible type "Literal['foo']"; expected "Literal[u'foo']" \ # N: Revealed type is 'Literal['foo']' force_bytes(reveal_type(a)) # E: Argument 1 to "force_bytes" has incompatible type "Literal[u'foo']"; expected "Literal['foo']" \ # N: Revealed type is 'Literal[u'foo']' force_bytes(reveal_type(b)) # N: Revealed type is 'Literal['foo']' force_bytes(reveal_type(c)) # N: Revealed type is 'Literal['foo']' [out] [case testLiteralFinalPropagatesThroughGenerics] from typing import TypeVar, Generic from typing_extensions import Final, Literal T = TypeVar('T') class WrapperClass(Generic[T]): def __init__(self, data: T) -> None: self.data = data def wrapper_func(x: T) -> T: return x def force(x: Literal[99]) -> None: pass def over_int(x: WrapperClass[int]) -> None: pass def over_literal(x: WrapperClass[Literal[99]]) -> None: pass var1: Final = 99 w1 = WrapperClass(var1) force(reveal_type(w1.data)) # E: Argument 1 to "force" has incompatible type "int"; expected "Literal[99]" \ # N: Revealed type is 'builtins.int*' force(reveal_type(WrapperClass(var1).data)) # E: Argument 1 to "force" has incompatible type "int"; expected "Literal[99]" \ # N: Revealed type is 'builtins.int*' force(reveal_type(wrapper_func(var1))) # N: Revealed type is 'Literal[99]' over_int(reveal_type(w1)) # N: Revealed type is '__main__.WrapperClass[builtins.int*]' over_literal(reveal_type(w1)) # E: Argument 1 to "over_literal" has incompatible type "WrapperClass[int]"; expected "WrapperClass[Literal[99]]" \ # N: Revealed type is '__main__.WrapperClass[builtins.int*]' over_int(reveal_type(WrapperClass(var1))) # N: Revealed type is '__main__.WrapperClass[builtins.int]' over_literal(reveal_type(WrapperClass(var1))) # N: Revealed type is '__main__.WrapperClass[Literal[99]]' w2 = WrapperClass(99) force(reveal_type(w2.data)) # E: Argument 1 to "force" has incompatible type "int"; expected "Literal[99]" \ # N: Revealed type is 'builtins.int*' force(reveal_type(WrapperClass(99).data)) # E: Argument 1 to "force" has incompatible type "int"; expected "Literal[99]" \ # N: Revealed type is 'builtins.int*' force(reveal_type(wrapper_func(99))) # N: Revealed type is 'Literal[99]' over_int(reveal_type(w2)) # N: Revealed type is '__main__.WrapperClass[builtins.int*]' over_literal(reveal_type(w2)) # E: Argument 1 to "over_literal" has incompatible type "WrapperClass[int]"; expected "WrapperClass[Literal[99]]" \ # N: Revealed type is '__main__.WrapperClass[builtins.int*]' over_int(reveal_type(WrapperClass(99))) # N: Revealed type is '__main__.WrapperClass[builtins.int]' over_literal(reveal_type(WrapperClass(99))) # N: Revealed type is '__main__.WrapperClass[Literal[99]]' var3: Literal[99] = 99 w3 = WrapperClass(var3) force(reveal_type(w3.data)) # N: Revealed type is 'Literal[99]' force(reveal_type(WrapperClass(var3).data)) # N: Revealed type is 'Literal[99]' force(reveal_type(wrapper_func(var3))) # N: Revealed type is 'Literal[99]' over_int(reveal_type(w3)) # E: Argument 1 to "over_int" has incompatible type "WrapperClass[Literal[99]]"; expected "WrapperClass[int]" \ # N: Revealed type is '__main__.WrapperClass[Literal[99]]' over_literal(reveal_type(w3)) # N: Revealed type is '__main__.WrapperClass[Literal[99]]' over_int(reveal_type(WrapperClass(var3))) # N: Revealed type is '__main__.WrapperClass[builtins.int]' over_literal(reveal_type(WrapperClass(var3))) # N: Revealed type is '__main__.WrapperClass[Literal[99]]' [out] [case testLiteralFinalUsedInLiteralType] from typing_extensions import Literal, Final a: Final[int] = 3 b: Final = 3 c: Final[Literal[3]] = 3 d: Literal[3] # TODO: Consider if we want to support cases 'b' and 'd' or not. # Probably not: we want to mostly keep the 'types' and 'value' worlds distinct. # However, according to final semantics, we ought to be able to substitute "b" with # "3" wherever it's used and get the same behavior -- so maybe we do need to support # at least case "b" for consistency? a_wrap: Literal[4, a] # E: Parameter 2 of Literal[...] is invalid \ # E: Variable "__main__.a" is not valid as a type b_wrap: Literal[4, b] # E: Parameter 2 of Literal[...] is invalid \ # E: Variable "__main__.b" is not valid as a type c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid \ # E: Variable "__main__.c" is not valid as a type d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid \ # E: Variable "__main__.d" is not valid as a type [out] [case testLiteralWithFinalPropagation] from typing_extensions import Final, Literal a: Final = 3 b: Final = a c = a def expect_3(x: Literal[3]) -> None: pass expect_3(a) expect_3(b) expect_3(c) # E: Argument 1 to "expect_3" has incompatible type "int"; expected "Literal[3]" [out] [case testLiteralWithFinalPropagationIsNotLeaking] from typing_extensions import Final, Literal final_tuple_direct: Final = (2, 3) final_tuple_indirect: Final = final_tuple_direct mutable_tuple = final_tuple_direct final_list_1: Final = [2] final_list_2: Final = [2, 2] final_dict: Final = {"foo": 2} final_set_1: Final = {2} final_set_2: Final = {2, 2} def expect_2(x: Literal[2]) -> None: pass expect_2(final_tuple_direct[0]) expect_2(final_tuple_indirect[0]) expect_2(mutable_tuple[0]) # E: Argument 1 to "expect_2" has incompatible type "int"; expected "Literal[2]" expect_2(final_list_1[0]) # E: Argument 1 to "expect_2" has incompatible type "int"; expected "Literal[2]" expect_2(final_list_2[0]) # E: Argument 1 to "expect_2" has incompatible type "int"; expected "Literal[2]" expect_2(final_dict["foo"]) # E: Argument 1 to "expect_2" has incompatible type "int"; expected "Literal[2]" expect_2(final_set_1.pop()) # E: Argument 1 to "expect_2" has incompatible type "int"; expected "Literal[2]" expect_2(final_set_2.pop()) # E: Argument 1 to "expect_2" has incompatible type "int"; expected "Literal[2]" [builtins fixtures/isinstancelist.pyi] [out] -- -- Tests for Literals and enums -- [case testLiteralWithEnumsBasic] from typing_extensions import Literal from enum import Enum class Color(Enum): RED = 1 GREEN = 2 BLUE = 3 def func(self) -> int: pass r: Literal[Color.RED] g: Literal[Color.GREEN] b: Literal[Color.BLUE] bad1: Literal[Color] # E: Parameter 1 of Literal[...] is invalid bad2: Literal[Color.func] # E: Function "__main__.Color.func" is not valid as a type \ # N: Perhaps you need "Callable[...]" or a callback protocol? \ # E: Parameter 1 of Literal[...] is invalid bad3: Literal[Color.func()] # E: Invalid type: Literal[...] cannot contain arbitrary expressions def expects_color(x: Color) -> None: pass def expects_red(x: Literal[Color.RED]) -> None: pass def bad_func(x: Color.RED) -> None: pass # E: Invalid type: try using Literal[Color.RED] instead? expects_color(r) expects_color(g) expects_color(b) expects_red(r) expects_red(g) # E: Argument 1 to "expects_red" has incompatible type "Literal[Color.GREEN]"; expected "Literal[Color.RED]" expects_red(b) # E: Argument 1 to "expects_red" has incompatible type "Literal[Color.BLUE]"; expected "Literal[Color.RED]" reveal_type(expects_red) # N: Revealed type is 'def (x: Literal[__main__.Color.RED])' reveal_type(r) # N: Revealed type is 'Literal[__main__.Color.RED]' reveal_type(r.func()) # N: Revealed type is 'builtins.int' [out] [case testLiteralWithEnumsDefinedInClass] from typing_extensions import Literal from enum import Enum class Wrapper: class Color(Enum): RED = 1 GREEN = 2 BLUE = 3 def foo(x: Literal[Wrapper.Color.RED]) -> None: pass r: Literal[Wrapper.Color.RED] g: Literal[Wrapper.Color.GREEN] foo(r) foo(g) # E: Argument 1 to "foo" has incompatible type "Literal[Color.GREEN]"; expected "Literal[Color.RED]" reveal_type(foo) # N: Revealed type is 'def (x: Literal[__main__.Wrapper.Color.RED])' reveal_type(r) # N: Revealed type is 'Literal[__main__.Wrapper.Color.RED]' [out] [case testLiteralWithEnumsSimilarDefinitions] from typing_extensions import Literal import mod_a import mod_b def f(x: Literal[mod_a.Test.FOO]) -> None: pass a: Literal[mod_a.Test.FOO] b: Literal[mod_a.Test2.FOO] c: Literal[mod_b.Test.FOO] f(a) f(b) # E: Argument 1 to "f" has incompatible type "Literal[Test2.FOO]"; expected "Literal[Test.FOO]" f(c) # E: Argument 1 to "f" has incompatible type "Literal[mod_b.Test.FOO]"; expected "Literal[mod_a.Test.FOO]" [file mod_a.py] from enum import Enum class Test(Enum): FOO = 1 BAR = 2 class Test2(Enum): FOO = 1 BAR = 2 [file mod_b.py] from enum import Enum class Test(Enum): FOO = 1 BAR = 2 [out] [case testLiteralWithEnumsDeclaredUsingCallSyntax] from typing_extensions import Literal from enum import Enum A = Enum('A', 'FOO BAR') B = Enum('B', ['FOO', 'BAR']) C = Enum('C', [('FOO', 1), ('BAR', 2)]) D = Enum('D', {'FOO': 1, 'BAR': 2}) a: Literal[A.FOO] b: Literal[B.FOO] c: Literal[C.FOO] d: Literal[D.FOO] reveal_type(a) # N: Revealed type is 'Literal[__main__.A.FOO]' reveal_type(b) # N: Revealed type is 'Literal[__main__.B.FOO]' reveal_type(c) # N: Revealed type is 'Literal[__main__.C.FOO]' reveal_type(d) # N: Revealed type is 'Literal[__main__.D.FOO]' [builtins fixtures/dict.pyi] [out] [case testLiteralWithEnumsDerivedEnums] from typing_extensions import Literal from enum import Enum, IntEnum, IntFlag, Flag def expects_int(x: int) -> None: pass class A(Enum): FOO = 1 class B(IntEnum): FOO = 1 class C(IntFlag): FOO = 1 class D(Flag): FOO = 1 a: Literal[A.FOO] b: Literal[B.FOO] c: Literal[C.FOO] d: Literal[D.FOO] expects_int(a) # E: Argument 1 to "expects_int" has incompatible type "Literal[A.FOO]"; expected "int" expects_int(b) expects_int(c) expects_int(d) # E: Argument 1 to "expects_int" has incompatible type "Literal[D.FOO]"; expected "int" [out] [case testLiteralWithEnumsAliases] from typing_extensions import Literal from enum import Enum class Test(Enum): FOO = 1 BAR = 2 Alias = Test x: Literal[Alias.FOO] reveal_type(x) # N: Revealed type is 'Literal[__main__.Test.FOO]' [out] [case testLiteralUsingEnumAttributesInLiteralContexts] from typing_extensions import Literal, Final from enum import Enum class Test1(Enum): FOO = 1 BAR = 2 Test2 = Enum('Test2', [('FOO', 1), ('BAR', 2)]) def expects_test1_foo(x: Literal[Test1.FOO]) -> None: ... def expects_test2_foo(x: Literal[Test2.FOO]) -> None: ... expects_test1_foo(Test1.FOO) expects_test1_foo(Test1.BAR) # E: Argument 1 to "expects_test1_foo" has incompatible type "Literal[Test1.BAR]"; expected "Literal[Test1.FOO]" expects_test2_foo(Test2.FOO) expects_test2_foo(Test2.BAR) # E: Argument 1 to "expects_test2_foo" has incompatible type "Literal[Test2.BAR]"; expected "Literal[Test2.FOO]" # Make sure the two 'FOO's are not interchangeable expects_test1_foo(Test2.FOO) # E: Argument 1 to "expects_test1_foo" has incompatible type "Literal[Test2.FOO]"; expected "Literal[Test1.FOO]" expects_test2_foo(Test1.FOO) # E: Argument 1 to "expects_test2_foo" has incompatible type "Literal[Test1.FOO]"; expected "Literal[Test2.FOO]" # Make sure enums follow the same semantics as 'x = 1' vs 'x: Final = 1' var1 = Test1.FOO final1: Final = Test1.FOO expects_test1_foo(var1) # E: Argument 1 to "expects_test1_foo" has incompatible type "Test1"; expected "Literal[Test1.FOO]" expects_test1_foo(final1) var2 = Test2.FOO final2: Final = Test2.FOO expects_test2_foo(var2) # E: Argument 1 to "expects_test2_foo" has incompatible type "Test2"; expected "Literal[Test2.FOO]" expects_test2_foo(final2) [out] [case testLiteralUsingEnumAttributeNamesInLiteralContexts] from typing_extensions import Literal, Final from enum import Enum class Test1(Enum): FOO = 1 BAR = 2 Test2 = Enum('Test2', [('FOO', 1), ('BAR', 2)]) Test3 = Enum('Test3', 'FOO BAR') Test4 = Enum('Test4', ['FOO', 'BAR']) Test5 = Enum('Test5', {'FOO': 1, 'BAR': 2}) def expects_foo(x: Literal['FOO']) -> None: ... expects_foo(Test1.FOO.name) expects_foo(Test2.FOO.name) expects_foo(Test3.FOO.name) expects_foo(Test4.FOO.name) expects_foo(Test5.FOO.name) expects_foo(Test1.BAR.name) # E: Argument 1 to "expects_foo" has incompatible type "Literal['BAR']"; expected "Literal['FOO']" expects_foo(Test2.BAR.name) # E: Argument 1 to "expects_foo" has incompatible type "Literal['BAR']"; expected "Literal['FOO']" expects_foo(Test3.BAR.name) # E: Argument 1 to "expects_foo" has incompatible type "Literal['BAR']"; expected "Literal['FOO']" expects_foo(Test4.BAR.name) # E: Argument 1 to "expects_foo" has incompatible type "Literal['BAR']"; expected "Literal['FOO']" expects_foo(Test5.BAR.name) # E: Argument 1 to "expects_foo" has incompatible type "Literal['BAR']"; expected "Literal['FOO']" reveal_type(Test1.FOO.name) # N: Revealed type is 'Literal['FOO']?' reveal_type(Test2.FOO.name) # N: Revealed type is 'Literal['FOO']?' reveal_type(Test3.FOO.name) # N: Revealed type is 'Literal['FOO']?' reveal_type(Test4.FOO.name) # N: Revealed type is 'Literal['FOO']?' reveal_type(Test5.FOO.name) # N: Revealed type is 'Literal['FOO']?' [out] [case testLiteralBinderLastValueErased] # mypy: strict-equality from typing_extensions import Literal def takes_three(x: Literal[3]) -> None: ... x: object x = 3 takes_three(x) # E: Argument 1 to "takes_three" has incompatible type "int"; expected "Literal[3]" if x == 2: # OK ... [builtins fixtures/bool.pyi] [case testLiteralBinderLastValueErasedPartialTypes] # mypy: strict-equality def test() -> None: x = None if bool(): x = 1 if x == 2: # OK ... [builtins fixtures/bool.pyi] [case testNegativeIntLiteral] from typing_extensions import Literal a: Literal[-2] = -2 b: Literal[-1] = -1 c: Literal[0] = 0 d: Literal[1] = 1 e: Literal[2] = 2 [out] [builtins fixtures/float.pyi] [case testNegativeIntLiteralWithFinal] from typing_extensions import Literal, Final ONE: Final = 1 x: Literal[-1] = -ONE TWO: Final = 2 THREE: Final = 3 err_code = -TWO if bool(): err_code = -THREE [builtins fixtures/float.pyi] [case testAliasForEnumTypeAsLiteral] from typing_extensions import Literal from enum import Enum class Foo(Enum): A = 1 F = Foo x: Literal[Foo.A] y: Literal[F.A] reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' mypy-0.761/test-data/unit/check-modules-case.test0000644€tŠÔÚ€2›s®0000000314713576752246026127 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases dealing with modules and imports on case-insensitive filesystems. [case testCaseSensitivityDir] from a import B # E: Module 'a' has no attribute 'B' [file a/__init__.py] [file a/b/__init__.py] [case testCaseInsensitivityDir] # flags: --config-file tmp/mypy.ini from a import B # E: Module 'a' has no attribute 'B' from other import x reveal_type(x) # N: Revealed type is 'builtins.int' [file a/__init__.py] [file a/b/__init__.py] [file FuNkY_CaSe/other.py] x = 1 [file mypy.ini] \[mypy] mypy_path = tmp/funky_case [case testPreferPackageOverFileCase] # flags: --config-file tmp/mypy.ini import a [file funky/a.py] / # Deliberate syntax error, this file should not be parsed. [file FuNkY/a/__init__.py] pass [file mypy.ini] \[mypy] mypy_path = tmp/funky [case testNotPreferPackageOverFileCase] import a [file a.py] 'no'() # E: "str" not callable [file A/__init__.py] / # Deliberate syntax error, this file should not be parsed. [case testNamespacePackagePickFirstOnMypyPathCase] # flags: --namespace-packages --config-file tmp/mypy.ini from foo.bar import x reveal_type(x) # N: Revealed type is 'builtins.int' [file XX/foo/bar.py] x = 0 [file yy/foo/bar.py] x = '' [file mypy.ini] \[mypy] mypy_path = tmp/xx, tmp/yy [case testClassicPackageInsideNamespacePackageCase] # flags: --namespace-packages --config-file tmp/mypy.ini from foo.bar.baz.boo import x reveal_type(x) # N: Revealed type is 'builtins.int' [file xx/foo/bar/baz/boo.py] x = '' [file xx/foo/bar/baz/__init__.py] [file yy/foo/bar/baz/boo.py] x = 0 [file yy/foo/bar/__init__.py] [file mypy.ini] \[mypy] mypy_path = TmP/xX, TmP/yY mypy-0.761/test-data/unit/check-modules.test0000644€tŠÔÚ€2›s®0000020001413576752246025206 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases dealing with modules and imports. -- Towards the end there are tests for PEP 420 (namespace packages, i.e. __init__.py-less packages). [case testAccessImportedDefinitions] import m import typing m.f() # E: Too few arguments for "f" m.f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A" m.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") m.f(m.A()) m.x = m.A() [file m.py] class A: pass def f(a: A) -> None: pass x = A() [case testAccessImportedDefinitions] import m import typing m.f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A" m.f(m.A()) [file m.py] class A: pass def f(a: A) -> None: pass [case testAccessImportedDefinitions2] from m import f, A import typing f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A" f(A()) [file m.py] class A: pass def f(a: A) -> None: pass [case testImportedExceptionType] import m import typing try: pass except m.Err: pass except m.Bad: # E: Exception type must be derived from BaseException pass [file m.py] class Err(BaseException): pass class Bad: pass [builtins fixtures/exception.pyi] [case testImportedExceptionType2] from m import Err, Bad import typing try: pass except Err: pass except Bad: # E: Exception type must be derived from BaseException pass [file m.py] class Err(BaseException): pass class Bad: pass [builtins fixtures/exception.pyi] [case testImportWithinBlock] import typing if 1: import m m.a = m.b # E: Incompatible types in assignment (expression has type "B", variable has type "A") m.a = m.a m.f() m.f(m.a) # E: Too many arguments for "f" m.a = m.A() m.a = m.B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [file m.py] class A: pass class B: pass a = A() b = B() def f() -> None: pass [case testImportWithinFunction] import typing def f() -> None: from m import a, b, f, A, B if int(): a = b \ # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = a f() f(a) # E: Too many arguments for "f" a = A() a = B() \ # E: Incompatible types in assignment (expression has type "B", variable has type "A") [file m.py] class A: pass class B: pass a = A() b = B() def f() -> None: pass [out] [case testImportWithinMethod] import typing class C: def f(self) -> None: from m import * if int(): a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = a f() f(a) # E: Too many arguments for "f" a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [file m.py] class A: pass class B: pass a = A() b = B() def f() -> None: pass [out] [case testImportWithinClassBody] import typing class C: import m m.f() m.f(C) # E: Too many arguments for "f" [file m.py] def f() -> None: pass [out] [case testImportWithinClassBody2] import typing class C: from m import f f() f(C) # E: Too many arguments for "f" [file m.py] def f() -> None: pass [out] [case testImportWithStub] import _m _m.f("hola") [file _m.pyi] def f(c:str) -> None: pass [out] [case testImportWithStubIncompatibleType] import _m _m.f("hola") _m.f(12) # E: Argument 1 to "f" has incompatible type "int"; expected "str" [file _m.py] def f(c): print(c) [file _m.pyi] def f(c:str) -> None: pass [case testInvalidOperationsOnModules] import m import typing class A: pass m() # E: Module not callable a = m # type: A # E: Incompatible types in assignment (expression has type Module, variable has type "A") m + None # E: Unsupported left operand type for + (Module) [file m.py] [builtins fixtures/module.pyi] [case testNameDefinedInDifferentModule] import m, n import typing m.x # E: Module has no attribute "x" [file m.py] y = object() [file n.py] x = object() [builtins fixtures/module.pyi] [case testChainedAssignmentAndImports] import m i, s = None, None # type: (int, str) if int(): i = m.x if int(): i = m.y if int(): s = m.x # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): s = m.y # E: Incompatible types in assignment (expression has type "int", variable has type "str") [file m.py] x = y = 1 [builtins fixtures/primitives.pyi] [case testConditionalFunctionDefinitionAndImports] import m import typing m.f(1) m.f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [file m.py] x = object() if x: def f(x: int) -> None: pass else: def f(x: int) -> None: pass [case testTypeCheckWithUnknownModule] import nonexistent None + '' [out] main:1: error: Cannot find implementation or library stub for module named 'nonexistent' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Unsupported left operand type for + ("None") [case testTypeCheckWithUnknownModule2] import m, nonexistent None + '' m.x = 1 m.x = '' [file m.py] x = 1 [out] main:1: error: Cannot find implementation or library stub for module named 'nonexistent' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Unsupported left operand type for + ("None") main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTypeCheckWithUnknownModule3] import nonexistent, m None + '' m.x = 1 m.x = '' [file m.py] x = 1 [out] main:1: error: Cannot find implementation or library stub for module named 'nonexistent' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Unsupported left operand type for + ("None") main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTypeCheckWithUnknownModule4] import nonexistent, another None + '' [out] main:1: error: Cannot find implementation or library stub for module named 'nonexistent' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'another' main:2: error: Unsupported left operand type for + ("None") [case testTypeCheckWithUnknownModule5] import nonexistent as x None + '' [out] main:1: error: Cannot find implementation or library stub for module named 'nonexistent' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Unsupported left operand type for + ("None") [case testTypeCheckWithUnknownModuleUsingFromImport] from nonexistent import x None + '' [out] main:1: error: Cannot find implementation or library stub for module named 'nonexistent' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Unsupported left operand type for + ("None") [case testTypeCheckWithUnknownModuleUsingImportStar] from nonexistent import * None + '' [out] main:1: error: Cannot find implementation or library stub for module named 'nonexistent' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Unsupported left operand type for + ("None") [case testAccessingUnknownModule] import xyz xyz.foo() xyz() [out] main:1: error: Cannot find implementation or library stub for module named 'xyz' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testAccessingUnknownModule2] import xyz, bar xyz.foo() bar() [out] main:1: error: Cannot find implementation or library stub for module named 'xyz' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'bar' [case testAccessingUnknownModule3] import xyz as z xyz.foo() z() [out] main:1: error: Cannot find implementation or library stub for module named 'xyz' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Name 'xyz' is not defined [case testAccessingNameImportedFromUnknownModule] from xyz import y, z y.foo() z() [out] main:1: error: Cannot find implementation or library stub for module named 'xyz' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testAccessingNameImportedFromUnknownModule2] from xyz import * y [out] main:1: error: Cannot find implementation or library stub for module named 'xyz' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Name 'y' is not defined [case testAccessingNameImportedFromUnknownModule3] from xyz import y as z y z [out] main:1: error: Cannot find implementation or library stub for module named 'xyz' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Name 'y' is not defined [case testUnknownModuleRedefinition] # Error messages differ with the new analyzer import xab # E: Cannot find implementation or library stub for module named 'xab' # N: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports def xab(): pass # E: Name 'xab' already defined (possibly by an import) [case testAccessingUnknownModuleFromOtherModule] import x x.nonexistent.foo x.z [file x.py] import nonexistent [builtins fixtures/module.pyi] [out] tmp/x.py:1: error: Cannot find implementation or library stub for module named 'nonexistent' tmp/x.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:3: error: Module has no attribute "z" [case testUnknownModuleImportedWithinFunction] def f(): import foobar def foobar(): pass foobar('') [out] main:2: error: Cannot find implementation or library stub for module named 'foobar' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:4: error: Too many arguments for "foobar" [case testUnknownModuleImportedWithinFunction2] def f(): from foobar import x def x(): pass x('') [out] main:2: error: Cannot find implementation or library stub for module named 'foobar' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:4: error: Too many arguments for "x" [case testRelativeImports] import typing import m.a m.a.x = m.a.y # Error [file m/__init__.py] [file m/a.py] import typing from .b import A, B, x, y z = x if int(): z = y # Error [file m/b.py] import typing class A: pass class B: pass x = A() y = B() [out] tmp/m/a.py:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testRelativeImports2] import typing import m.a m.a.x = m.a.y # E: Incompatible types in assignment (expression has type "B", variable has type "A") [file m/__init__.py] [file m/a.py] import typing from .b import A, B, x, y [file m/b.py] import typing class A: pass class B: pass x = A() y = B() [case testExportedValuesInImportAll] import typing from m import * _ = a _ = b _ = c _ = d _ = e _ = f # E: Name 'f' is not defined _ = _g # E: Name '_g' is not defined [file m.py] __all__ = ['a'] __all__ += ('b',) __all__.append('c') __all__.extend(('d', 'e')) a = b = c = d = e = f = _g = 1 [builtins fixtures/module_all.pyi] [case testAllMustBeSequenceStr] import typing __all__ = [1, 2, 3] [builtins fixtures/module_all.pyi] [out] main:2: error: Type of __all__ must be "Sequence[str]", not "List[int]" [case testAllMustBeSequenceStr_python2] import typing __all__ = [1, 2, 3] [builtins_py2 fixtures/module_all_python2.pyi] [out] main:2: error: Type of __all__ must be "Sequence[unicode]", not "List[int]" [case testAllUnicodeSequenceOK_python2] import typing __all__ = [u'a', u'b', u'c'] [builtins_py2 fixtures/module_all_python2.pyi] [out] [case testUnderscoreExportedValuesInImportAll] import typing from m import * _ = a _ = _b _ = __c__ _ = ___d _ = e _ = f # E: Name 'f' is not defined _ = _g # E: Name '_g' is not defined [file m.py] __all__ = ['a'] __all__ += ('_b',) __all__.append('__c__') __all__.extend(('___d', 'e')) a = _b = __c__ = ___d = e = f = _g = 1 [builtins fixtures/module_all.pyi] [case testEllipsisInitializerInStubFileWithType] import m m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [file m.pyi] x = ... # type: int [case testEllipsisInitializerInStubFileWithoutType] import m m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "ellipsis") [file m.pyi] # Ellipsis is only special with a # type: comment (not sure though if this is great) x = ... [case testEllipsisInitializerInModule] x = ... # type: int # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int") [case testEllipsisDefaultArgValueInStub] import m m.f(1) m.f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [file m.pyi] def f(x: int = ...) -> None: pass [case testEllipsisDefaultArgValueInStub2] import m def f1(x: int = ...) -> int: return 1 def f2(x: int = '') -> int: return 1 [file m.pyi] def g1(x: int = ...) -> int: pass def g2(x: int = '') -> int: pass [out] tmp/m.pyi:2: error: Incompatible default for argument "x" (default has type "str", argument has type "int") main:2: error: Incompatible default for argument "x" (default has type "ellipsis", argument has type "int") main:3: error: Incompatible default for argument "x" (default has type "str", argument has type "int") [case testEllipsisDefaultArgValueInNonStub] def ok_1(x: int = ...) -> None: pass def ok_2(x: int = ...) -> None: ... def ok_3(x: int = ...) -> None: raise NotImplementedError def ok_4(x: int = ...) -> None: raise NotImplementedError() def ok_5(x: int = ...) -> None: """Docstring here""" pass def bad_1(x: int = ...) -> None: 1 # E: Incompatible default for argument "x" (default has type "ellipsis", argument has type "int") def bad_2(x: int = ...) -> None: # E: Incompatible default for argument "x" (default has type "ellipsis", argument has type "int") """Docstring here""" ok_1() def bad_3(x: int = ...) -> None: # E: Incompatible default for argument "x" (default has type "ellipsis", argument has type "int") raise Exception("Some other exception") [builtins fixtures/exception.pyi] [out] [case testEllipsisDefaultArgValueInNonStubsOverload] from typing import overload, Union Both = Union[int, str] @overload def foo(x: int, y: int = ...) -> int: ... @overload def foo(x: str, y: str = ...) -> str: ... def foo(x: Both, y: Both = ...) -> Both: # E: Incompatible default for argument "y" (default has type "ellipsis", argument has type "Union[int, str]") return x @overload def bar(x: int, y: int = ...) -> int: ... @overload def bar(x: str, y: str = ...) -> str: ... def bar(x: Both, y: Both = ...) -> Both: raise NotImplementedError [builtins fixtures/exception.pyi] [out] [case testEllipsisDefaultArgValueInNonStubsMethods] from typing import Generic, TypeVar from typing_extensions import Protocol from abc import abstractmethod T = TypeVar('T') class Wrap(Generic[T]): ... class MyProtocol(Protocol): def no_impl(self, x: Wrap[int] = ...) -> int: ... def default_impl(self, x: Wrap[int] = ...) -> int: return 3 # E: Incompatible default for argument "x" (default has type "ellipsis", argument has type "Wrap[int]") class MyAbstractClass: @abstractmethod def no_impl(self, x: Wrap[int] = ...) -> int: raise NotImplementedError @abstractmethod def default_impl(self, x: Wrap[int] = ...) -> int: return 3 # E: Incompatible default for argument "x" (default has type "ellipsis", argument has type "Wrap[int]") [builtins fixtures/exception.pyi] [out] [case testStarImportOverlapping] from m1 import * from m2 import * j = '' [file m1.py] x = 1 [file m2.py] x = 1 [case testStarImportOverlappingMismatch] from m1 import * from m2 import * # E: Incompatible import of "x" (imported name has type "int", local name has type "str") j = '' [file m1.py] x = '' [file m2.py] x = 1 [case testStarImportOverridingLocalImports] from m1 import * from m2 import * x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [file m1.py] x = 1 [file m2.py] x = 1 [case testAssignToFuncDefViaImport] # flags: --strict-optional # Errors differ with the new analyzer. (Old analyzer gave error on the # input, which is maybe better, but no error about f, which seems # wrong) from m import * f = None # E: Incompatible types in assignment (expression has type "None", variable has type "Callable[[], Any]") x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [file m.py] def f(): pass x = 1+0 [out] -- Conditional definitions and function redefinitions via module object -- -------------------------------------------------------------------- [case testConditionalImportAndAssign] try: from m import x except: x = None try: from m import x as y except: y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [file m.py] x = '' [case testAssignAndConditionalImport] x = '' try: from m import x except: pass y = 1 try: from m import x as y # E: Incompatible import of "y" (imported name has type "str", local name has type "int") except: pass [file m.py] x = '' [case testAssignAndConditionalStarImport] x = '' y = 1 try: from m import * # E: Incompatible import of "y" (imported name has type "str", local name has type "int") except: pass [file m.py] x = '' y = '' [case testRedefineImportedFunctionViaImport] try: from m import f, g except: def f(x): pass def g(x): pass # E: All conditional function variants must have identical signatures [file m.py] def f(x): pass def g(x, y): pass [case testImportedVariableViaImport] try: from m import x except: from n import x # E: Incompatible import of "x" (imported name has type "str", local name has type "int") [file m.py] x = 1 [file n.py] x = '' [case testRedefineFunctionViaImport] def f(x): pass def g(x): pass try: from m import f, g # E: Incompatible import of "g" (imported name has type "Callable[[Any, Any], Any]", local name has type "Callable[[Any], Any]") except: pass [file m.py] def f(x): pass def g(x, y): pass [case testImportVariableAndAssignNone] try: from m import x except: x = None [file m.py] x = 1 [case testImportFunctionAndAssignNone] try: from m import f except: f = None [file m.py] def f(): pass [case testImportFunctionAndAssignFunction] def g(x): pass try: from m import f except: f = g [file m.py] def f(x): pass [case testImportFunctionAndAssignIncompatible] try: from m import f except: f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file m.py] def f(): pass [case testAssignToFuncDefViaGlobalDecl2] import typing from m import f def g() -> None: global f f = None if int(): f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file m.py] def f(): pass [out] [case testAssignToFuncDefViaNestedModules] import m.n m.n.f = None m.n.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file m/__init__.py] [file m/n.py] def f(): pass [out] [case testAssignToFuncDefViaModule] import m m.f = None m.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file m.py] def f(): pass [out] [case testConditionalImportAndAssignNoneToModule] if object(): import m else: m = None m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str" [file m.py] def f(x: str) -> None: pass [builtins fixtures/module.pyi] [out] [case testConditionalImportAndAssignInvalidToModule] if object(): import m else: m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Module) [file m.py] [builtins fixtures/module.pyi] [out] [case testImportAndAssignToModule] import m m = None m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str" [file m.py] def f(x: str) -> None: pass [builtins fixtures/module.pyi] [out] -- Test cases that simulate 'mypy -m modname' -- -- The module name to import is encoded in a comment. [case testTypeCheckNamedModule] # cmd: mypy -m m.a [file m/__init__.py] None + 1 [file m/a.py] [out] tmp/m/__init__.py:1: error: Unsupported left operand type for + ("None") [case testTypeCheckNamedModule2] # cmd: mypy -m m.a [file m/__init__.py] [file m/a.py] None + 1 [out] tmp/m/a.py:1: error: Unsupported left operand type for + ("None") [case testTypeCheckNamedModule3] # cmd: mypy -m m [file m/__init__.py] None + 1 [file m/a.py] [out] tmp/m/__init__.py:1: error: Unsupported left operand type for + ("None") [case testTypeCheckNamedModule4] # cmd: mypy -m m [file m/__init__.py] [file m/a.py] None + 1 # Not analyzed. [out] [case testTypeCheckNamedModule5] # cmd: mypy -m m None + '' # Not analyzed. [file m.py] None + 1 [out] tmp/m.py:1: error: Unsupported left operand type for + ("None") [case testTypeCheckNamedModuleWithImportCycle] # cmd: mypy -m m.a None + 1 # Does not generate error, as this file won't be analyzed. [file m/__init__.py] import m.a [file m/a.py] [out] [case testCheckDecoratedFuncAsAnnotWithImportCycle] import a [file a.py] from typing import TypeVar import b T = TypeVar('T') def idf(x: T) -> T: return x @idf def Session() -> None: pass [file b.py] MYPY = False if MYPY: from a import Session def f(self, session: Session) -> None: # E: Function "a.Session" is not valid as a type \ # N: Perhaps you need "Callable[...]" or a callback protocol? pass [builtins fixtures/bool.pyi] -- Checks dealing with submodules and different kinds of imports -- ------------------------------------------------------------- [case testSubmoduleRegularImportAddsAllParents] import a.b.c reveal_type(a.value) # N: Revealed type is 'builtins.int' reveal_type(a.b.value) # N: Revealed type is 'builtins.str' reveal_type(a.b.c.value) # N: Revealed type is 'builtins.float' b.value # E: Name 'b' is not defined c.value # E: Name 'c' is not defined [file a/__init__.py] value = 3 [file a/b/__init__.py] value = "a" [file a/b/c.py] value = 3.2 [out] [case testSubmoduleImportAsDoesNotAddParents] import a.b.c as foo reveal_type(foo.value) # N: Revealed type is 'builtins.float' a.value # E: Name 'a' is not defined b.value # E: Name 'b' is not defined c.value # E: Name 'c' is not defined [file a/__init__.py] value = 3 [file a/b/__init__.py] value = "a" [file a/b/c.py] value = 3.2 [out] [case testSubmoduleImportFromDoesNotAddParents] from a import b reveal_type(b.value) # N: Revealed type is 'builtins.str' b.c.value # E: Module has no attribute "c" a.value # E: Name 'a' is not defined [file a/__init__.py] value = 3 [file a/b/__init__.py] value = "a" [file a/b/c.py] value = 3.2 [builtins fixtures/module.pyi] [out] [case testSubmoduleImportFromDoesNotAddParents2] from a.b import c reveal_type(c.value) # N: Revealed type is 'builtins.float' a.value # E: Name 'a' is not defined b.value # E: Name 'b' is not defined [file a/__init__.py] value = 3 [file a/b/__init__.py] value = "a" [file a/b/c.py] value = 3.2 [out] [case testSubmoduleRegularImportNotDirectlyAddedToParent] import a.b.c def accept_float(x: float) -> None: pass accept_float(a.b.c.value) [file a/__init__.py] value = 3 b.value a.b.value [file a/b/__init__.py] value = "a" c.value a.b.c.value [file a/b/c.py] value = 3.2 [out] tmp/a/b/__init__.py:2: error: Name 'c' is not defined tmp/a/b/__init__.py:3: error: Name 'a' is not defined tmp/a/__init__.py:2: error: Name 'b' is not defined tmp/a/__init__.py:3: error: Name 'a' is not defined [case testSubmoduleMixingLocalAndQualifiedNames] from a.b import MyClass val1 = None # type: a.b.MyClass # E: Name 'a' is not defined val2 = None # type: MyClass [file a/__init__.py] [file a/b.py] class MyClass: pass [out] [case testSubmoduleMixingImportFrom] import parent.child [file parent/__init__.py] [file parent/common.py] class SomeClass: pass [file parent/child.py] from parent.common import SomeClass from parent import common foo = parent.common.SomeClass() [builtins fixtures/module.pyi] [out] tmp/parent/child.py:3: error: Name 'parent' is not defined [case testSubmoduleMixingImportFromAndImport] import parent.child [file parent/__init__.py] [file parent/common.py] class SomeClass: pass [file parent/unrelated.py] class ShouldNotLoad: pass [file parent/child.py] from parent.common import SomeClass import parent # Note, since this might be unintuitive -- when `parent.common` is loaded in any way, # shape, or form, it's added to `parent`'s namespace, which is why the below line # succeeds. foo = parent.common.SomeClass() reveal_type(foo) bar = parent.unrelated.ShouldNotLoad() [builtins fixtures/module.pyi] [out] tmp/parent/child.py:8: note: Revealed type is 'parent.common.SomeClass' tmp/parent/child.py:9: error: Module has no attribute "unrelated" [case testSubmoduleMixingImportFromAndImport2] import parent.child [file parent/__init__.py] [file parent/common.py] class SomeClass: pass [file parent/child.py] from parent import common import parent foo = parent.common.SomeClass() reveal_type(foo) [builtins fixtures/module.pyi] [out] tmp/parent/child.py:4: note: Revealed type is 'parent.common.SomeClass' -- Tests repeated imports [case testIdenticalImportFromTwice] from a import x, y, z from b import x, y, z [file a.py] from common import x, y, z [file b.py] from common import x, y, z [file common.py] x = 3 def y() -> int: return 3 class z: pass [out] [case testIdenticalImportStarTwice] from a import * from b import * [file a.py] from common import x, y, z [file b.py] from common import x, y, z [file common.py] x = 3 def y() -> int: return 3 class z: pass [out] [case testDifferentImportSameNameTwice] from a import x, y, z from b import x, y, z [file a.py] x = 3 def y() -> int: return 1 class z: pass [file b.py] x = "foo" def y() -> str: return "foo" class z: pass [out] main:2: error: Incompatible import of "x" (imported name has type "str", local name has type "int") main:2: error: Incompatible import of "y" (imported name has type "Callable[[], str]", local name has type "Callable[[], int]") main:2: error: Incompatible import of "z" (imported name has type "Type[b.z]", local name has type "Type[a.z]") -- Misc [case testInheritFromBadImport] # cmd: mypy -m bar [file foo.py] pass [file bar.py] from foo import B class C(B): pass [out] tmp/bar.py:1: error: Module 'foo' has no attribute 'B' [case testImportSuppressedWhileAlmostSilent] # cmd: mypy -m main # flags: --follow-imports=error [file main.py] import mod [file mod.py] [builtins fixtures/module.pyi] [out] tmp/main.py:1: error: Import of 'mod' ignored tmp/main.py:1: note: (Using --follow-imports=error, module not passed on command line) [case testAncestorSuppressedWhileAlmostSilent] # cmd: mypy -m foo.bar # flags: --follow-imports=error [file foo/bar.py] [file foo/__init__.py] [builtins fixtures/module.pyi] [out] tmp/foo/bar.py: error: Ancestor package 'foo' ignored tmp/foo/bar.py: note: (Using --follow-imports=error, submodule passed on command line) [case testStubImportNonStubWhileSilent] # cmd: mypy -m main # flags: --follow-imports=skip [file main.py] from stub import x, z # Followed from other import y # Not followed x + '' # No error here y + '' # No error here z + '' # Error here [file stub.pyi] from non_stub import x as x # this import is not followed z = 42 [file non_stub.py] x = 42 x + '' # no error because file is not analyzed [file other.py] y = 42 [builtins fixtures/module.pyi] [out] tmp/main.py:5: error: Unsupported left operand type for + ("int") [case testSilentSubmoduleImport] # cmd: mypy -m foo # flags: --follow-imports=skip [file foo/__init__.py] from foo import bar [file foo/bar.py] pass [case testImportReExportFromChildrenInCycle1] # cmd: mypy -m project.root project.study.a project.neighbor [file project/__init__.py] from project.study import CustomType x = 10 [file project/root.py] [file project/study/__init__.py] from project.study.a import CustomType [file project/study/a.py] from project import root # TODO (#4498): This test is basically testing the `all_are_submodules` logic # in build, which skips generating a dependency to a module if # everything in it is a submodule. But that is still all just a # workaround for bugs in cycle handling. If we uncomment the next # line, we'll still break: # from project import x CustomType = str [file project/neighbor/__init__.py] from project.study import CustomType def m(arg: CustomType) -> str: return 'test' [case testImportReExportFromChildrenInCycle2] # cmd: mypy -m project project.b project.ba project.c # See comments in above test about this being a workaround. [file foo.py] def get_foo() -> int: return 12 [file project/ba.py] from . import b b.FOO [file project/b.py] import foo from . import c FOO = foo.get_foo() [file project/c.py] [file project/__init__.py] from . import ba [case testSuperclassInImportCycle] import a import d a.A().f(d.D()) [file a.py] if 0: import d class B: pass class C(B): pass class A: def f(self, x: B) -> None: pass [file d.py] import a class D(a.C): pass [case testSuperclassInImportCycleReversedImports] import d import a a.A().f(d.D()) [file a.py] if 0: import d class B: pass class C(B): pass class A: def f(self, x: B) -> None: pass [file d.py] import a class D(a.C): pass [case testPreferPackageOverFile] import a [file a.py] / # intentional syntax error -- this file shouldn't be parsed [file a/__init__.py] pass [out] [case testPreferPackageOverFile2] from a import x [file a.py] / # intentional syntax error -- this file shouldn't be parsed [file a/__init__.py] x = 0 [out] [case testImportInClass] class C: import foo reveal_type(C.foo.bar) # N: Revealed type is 'builtins.int' [file foo.py] bar = 0 [builtins fixtures/module.pyi] [out] [case testIfFalseImport] if False: import a def f(x: 'a.A') -> int: return x.f() [file a.py] class A: def f(self) -> int: return 0 [builtins fixtures/bool.pyi] -- Test stability under import cycles -- ---------------------------------- -- The first two tests are identical except one main has 'import x' -- and the other 'import y'. Previously (before build.order_ascc() -- was added) one of these would fail because the imports were -- processed in the (reverse) order in which the files were -- encountered. [case testImportCycleStability1] import x [file x.py] def f() -> str: return '' class Base: attr = f() def foo(): import y [file y.py] import x class Sub(x.Base): attr = x.Base.attr [out] [case testImportCycleStability2] import y [file x.py] def f() -> str: return '' class Base: attr = f() def foo(): import y [file y.py] import x class Sub(x.Base): attr = x.Base.attr [out] -- This case isn't fixed by order_ascc(), but is fixed by the -- lightweight type inference added to semanal.py -- (analyze_simple_literal_type()). [case testImportCycleStability3] import y [file x.py] class Base: pass def foo() -> int: import y reveal_type(y.Sub.attr) return y.Sub.attr [file y.py] import x class Sub(x.Base): attr = 0 [out] tmp/x.py:5: note: Revealed type is 'builtins.int' -- This case has a symmetrical cycle, so it doesn't matter in what -- order the files are processed. It depends on the lightweight type -- interference. [case testImportCycleStability4] import x [file x.py] import y class C: attr = '' def foo() -> int: return y.D.attr [file y.py] import x class D: attr = 0 def bar() -> str: return x.C.attr -- These cases test all supported literal types. [case testImportCycleStability5] import y [file x.py] class Base: pass def foo() -> None: import y i = y.Sub.iattr # type: int f = y.Sub.fattr # type: float s = y.Sub.sattr # type: str b = y.Sub.battr # type: bytes [file y.py] import x class Sub(x.Base): iattr = 0 fattr = 0.0 sattr = '' battr = b'' [out] [case testImportCycleStability6_python2] import y [file x.py] class Base: pass def foo(): # type: () -> None import y i = y.Sub.iattr # type: int f = y.Sub.fattr # type: float s = y.Sub.sattr # type: str u = y.Sub.uattr # type: unicode [file y.py] import x class Sub(x.Base): iattr = 0 fattr = 0.0 sattr = '' uattr = u'' [out] -- This case tests module-level variables. [case testImportCycleStability7] import x [file x.py] def foo() -> int: import y reveal_type(y.value) return y.value [file y.py] import x value = 12 [out] tmp/x.py:3: note: Revealed type is 'builtins.int' -- This is not really cycle-related but still about the lightweight -- type checker. [case testImportCycleStability8] x = 1 # type: str reveal_type(x) [out] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") main:2: note: Revealed type is 'builtins.str' -- Tests for cross-module second_pass checking. [case testSymmetricImportCycle1] import a [file a.py] import b def f() -> int: return b.x y = 0 + 0 [file b.py] import a def g() -> int: reveal_type(a.y) return a.y x = 1 + 1 [out] tmp/b.py:3: note: Revealed type is 'builtins.int' [case testSymmetricImportCycle2] import b [file a.py] import b def f() -> int: reveal_type(b.x) return b.x y = 0 + 0 [file b.py] import a def g() -> int: return a.y x = 1 + 1 [out] tmp/a.py:3: note: Revealed type is 'builtins.int' [case testThreePassesRequired] import b [file a.py] import b class C: def f1(self) -> None: self.x2 def f2(self) -> None: self.x2 = b.b [file b.py] import a b = 1 + 1 [out] tmp/a.py:4: error: Cannot determine type of 'x2' [case testErrorInPassTwo1] import b [file a.py] import b def f() -> None: a = b.x + 1 a + '' [file b.py] import a x = 1 + 1 [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") [case testErrorInPassTwo2] import a [file a.py] import b def f() -> None: a = b.x + 1 a + '' [file b.py] import a x = 1 + 1 [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") [case testDeferredDecorator] import a [file a.py] import b def g() -> None: f('') @b.deco def f(a: str) -> int: pass reveal_type(f) x = 1 + 1 [file b.py] from typing import Callable, TypeVar import a T = TypeVar('T') def deco(f: Callable[[T], int]) -> Callable[[T], int]: a.x return f [out] tmp/a.py:6: note: Revealed type is 'def (builtins.str*) -> builtins.int' [case testDeferredClassContext] class A: def f(self) -> str: return 'foo' class B(A): def f(self) -> str: return self.x def initialize(self) -> None: self.x = 'bar' [case testDeferredClassContextUnannotated] class A: def f(self) -> str: return 'foo' class B(A): def f(self) -> str: return self.x def initialize(self): self.x = 'bar' -- Scripts and __main__ [case testScriptsAreModules] # flags: --scripts-are-modules [file a] pass [file b] pass -- Misc [case testScriptsAreNotModules] # cmd: mypy a b [file a] pass [file b] pass [out] [case testTypeCheckPrio] # cmd: mypy -m part1 part2 part3 part4 [file part1.py] from part3 import Thing class FirstThing: pass [file part2.py] from part4 import part4_thing as Thing [file part3.py] from part2 import Thing reveal_type(Thing) [file part4.py] from typing import TYPE_CHECKING if TYPE_CHECKING: from part1 import FirstThing def part4_thing(a: int) -> str: pass [builtins fixtures/bool.pyi] [out] tmp/part3.py:2: note: Revealed type is 'def (a: builtins.int) -> builtins.str' [case testImportStarAliasAnyList] import bar [file bar.py] from foo import * def bar(y: AnyAlias) -> None: pass l = None # type: ListAlias[int] reveal_type(l) [file foo.py] from typing import Any, List AnyAlias = Any ListAlias = List [builtins fixtures/list.pyi] [out] tmp/bar.py:5: note: Revealed type is 'builtins.list[builtins.int]' [case testImportStarAliasSimpleGeneric] from ex2a import * def do_something(dic: Row) -> None: pass def do_another() -> Row: return {} do_something({'good': 'bad'}) # E: Dict entry 0 has incompatible type "str": "str"; expected "str": "int" reveal_type(do_another()) # N: Revealed type is 'builtins.dict[builtins.str, builtins.int]' [file ex2a.py] from typing import Dict Row = Dict[str, int] [builtins fixtures/dict.pyi] [out] [case testImportStarAliasGeneric] from y import * notes = None # type: G[X] another = G[X]() second = XT[str]() last = XT[G]() reveal_type(notes) # N: Revealed type is 'y.G[y.G[builtins.int]]' reveal_type(another) # N: Revealed type is 'y.G[y.G*[builtins.int]]' reveal_type(second) # N: Revealed type is 'y.G[builtins.str*]' reveal_type(last) # N: Revealed type is 'y.G[y.G*[Any]]' [file y.py] from typing import Generic, TypeVar T = TypeVar('T') class G(Generic[T]): pass X = G[int] XT = G[T] [out] [case testImportStarAliasCallable] from foo import * from typing import Any def bar(x: Any, y: AnyCallable) -> Any: return 'foo' cb = None # type: AnyCallable reveal_type(cb) # N: Revealed type is 'def (*Any, **Any) -> Any' [file foo.py] from typing import Callable, Any AnyCallable = Callable[..., Any] [out] [case testRevealType] import types def f() -> types.ModuleType: return types reveal_type(f()) # N: Revealed type is 'types.ModuleType' reveal_type(types) # N: Revealed type is 'types.ModuleType' [builtins fixtures/module.pyi] [case testClassImportAccessedInMethod] class C: import m def foo(self) -> None: x = self.m.a reveal_type(x) # N: Revealed type is 'builtins.str' # ensure we distinguish self from other variables y = 'hello' z = y.m.a # E: "str" has no attribute "m" @classmethod def cmethod(cls) -> None: y = cls.m.a reveal_type(y) # N: Revealed type is 'builtins.str' @staticmethod def smethod(foo: int) -> None: # we aren't confused by first arg of a staticmethod y = foo.m.a # E: "int" has no attribute "m" [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testModuleAlias] import m m2 = m reveal_type(m2.a) # N: Revealed type is 'builtins.str' m2.b # E: Module has no attribute "b" m2.c = 'bar' # E: Module has no attribute "c" [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testClassModuleAlias] import m class C: x = m def foo(self) -> None: reveal_type(self.x.a) # N: Revealed type is 'builtins.str' [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testLocalModuleAlias] import m def foo() -> None: x = m reveal_type(x.a) # N: Revealed type is 'builtins.str' class C: def foo(self) -> None: x = m reveal_type(x.a) # N: Revealed type is 'builtins.str' [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testChainedModuleAlias] import m m3 = m2 = m m4 = m3 m5 = m4 reveal_type(m2.a) # N: Revealed type is 'builtins.str' reveal_type(m3.a) # N: Revealed type is 'builtins.str' reveal_type(m4.a) # N: Revealed type is 'builtins.str' reveal_type(m5.a) # N: Revealed type is 'builtins.str' [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testMultiModuleAlias] import m, n m2, n2, (m3, n3) = m, n, [m, n] reveal_type(m2.a) # N: Revealed type is 'builtins.str' reveal_type(n2.b) # N: Revealed type is 'builtins.str' reveal_type(m3.a) # N: Revealed type is 'builtins.str' reveal_type(n3.b) # N: Revealed type is 'builtins.str' x, y = m # E: 'types.ModuleType' object is not iterable x, y, z = m, n # E: Need more than 2 values to unpack (3 expected) x, y = m, m, m # E: Too many values to unpack (2 expected, 3 provided) x, (y, z) = m, n # E: 'types.ModuleType' object is not iterable x, (y, z) = m, (n, n, n) # E: Too many values to unpack (2 expected, 3 provided) [file m.py] a = 'foo' [file n.py] b = 'bar' [builtins fixtures/module.pyi] [case testModuleAliasWithExplicitAnnotation] from typing import Any import types import m mod_mod: types.ModuleType = m mod_mod2: types.ModuleType mod_mod2 = m mod_mod3 = m # type: types.ModuleType mod_any: Any = m mod_int: int = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") reveal_type(mod_mod) # N: Revealed type is 'types.ModuleType' mod_mod.a # E: Module has no attribute "a" reveal_type(mod_mod2) # N: Revealed type is 'types.ModuleType' mod_mod2.a # E: Module has no attribute "a" reveal_type(mod_mod3) # N: Revealed type is 'types.ModuleType' mod_mod3.a # E: Module has no attribute "a" reveal_type(mod_any) # N: Revealed type is 'Any' [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testModuleAliasPassedToFunction] import types import m def takes_module(x: types.ModuleType): reveal_type(x.__file__) # N: Revealed type is 'builtins.str' n = m takes_module(m) takes_module(n) [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testModuleAliasRepeated] import m, n if bool(): x = m else: x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type Module) if bool(): y = 3 else: y = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") if bool(): z = m else: z = n # E: Cannot assign multiple modules to name 'z' without explicit 'types.ModuleType' annotation [file m.py] a = 'foo' [file n.py] a = 3 [builtins fixtures/module.pyi] [case testModuleAliasRepeatedWithAnnotation] import types import m, n x: types.ModuleType if bool(): x = m else: x = n x.a # E: Module has no attribute "a" reveal_type(x.__file__) # N: Revealed type is 'builtins.str' [file m.py] a = 'foo' [file n.py] a = 3 [builtins fixtures/module.pyi] [case testModuleAliasRepeatedComplex] import m, n, o x = m if int(): x = n # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation if int(): x = o # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation y = o if int(): y, z = m, n # E: Cannot assign multiple modules to name 'y' without explicit 'types.ModuleType' annotation xx = m if int(): xx = m reveal_type(xx.a) # N: Revealed type is 'builtins.str' [file m.py] a = 'foo' [file n.py] a = 3 [file o.py] a = 'bar' [builtins fixtures/module.pyi] [case testModuleAliasToOtherModule] import m, n m = n # E: Cannot assign multiple modules to name 'm' without explicit 'types.ModuleType' annotation [file m.py] [file n.py] [builtins fixtures/module.pyi] [case testNoReExportFromStubs] from stub import Iterable # E: Module 'stub' has no attribute 'Iterable' from stub import C c = C() reveal_type(c.x) # N: Revealed type is 'builtins.int' it: Iterable[int] reveal_type(it) # N: Revealed type is 'Any' [file stub.pyi] from typing import Iterable from substub import C as C def fun(x: Iterable[str]) -> Iterable[int]: pass [file substub.pyi] class C: x: int [builtins fixtures/module.pyi] [case testNoReExportFromStubsMemberType] import stub c = stub.C() reveal_type(c.x) # N: Revealed type is 'builtins.int' it: stub.Iterable[int] # E: Name 'stub.Iterable' is not defined reveal_type(it) # N: Revealed type is 'Any' [file stub.pyi] from typing import Iterable from substub import C as C def fun(x: Iterable[str]) -> Iterable[int]: pass [file substub.pyi] class C: x: int [builtins fixtures/module.pyi] [case testNoReExportFromStubsMemberVar] import stub reveal_type(stub.y) # N: Revealed type is 'builtins.int' reveal_type(stub.z) # E: Module has no attribute "z" \ # N: Revealed type is 'Any' [file stub.pyi] from substub import y as y from substub import z [file substub.pyi] y = 42 z: int [builtins fixtures/module.pyi] [case testReExportChildStubs] import mod from mod import submod reveal_type(mod.x) # N: Revealed type is 'mod.submod.C' y = submod.C() reveal_type(y.a) # N: Revealed type is 'builtins.str' [file mod/__init__.pyi] from . import submod x: submod.C [file mod/submod.pyi] class C: a: str [builtins fixtures/module.pyi] [case testReExportChildStubs2] import mod.submod y = mod.submod.C() reveal_type(y.a) # N: Revealed type is 'builtins.str' [file mod/__init__.pyi] from . import submod x: submod.C [file mod/submod.pyi] class C: a: str [builtins fixtures/module.pyi] [case testNoReExportChildStubs] import mod from mod import C, D # E: Module 'mod' has no attribute 'C' reveal_type(mod.x) # N: Revealed type is 'mod.submod.C' mod.C # E: Module has no attribute "C" y = mod.D() reveal_type(y.a) # N: Revealed type is 'builtins.str' [file mod/__init__.pyi] from .submod import C, D as D x: C [file mod/submod.pyi] class C: pass class D: a: str [builtins fixtures/module.pyi] [case testNoReExportNestedStub] from stub import substub # E: Module 'stub' has no attribute 'substub' [file stub.pyi] import substub [file substub.pyi] x = 42 [file mod/submod.pyi] [case testModuleAliasToQualifiedImport] import package.module alias = package.module reveal_type(alias.whatever('/')) # N: Revealed type is 'builtins.str*' [file package/__init__.py] [file package/module.py] from typing import TypeVar T = TypeVar('T') def whatever(x: T) -> T: pass [builtins fixtures/module.pyi] [case testModuleAliasToQualifiedImport2] import mod import othermod alias = mod.submod reveal_type(alias.whatever('/')) # N: Revealed type is 'builtins.str*' if int(): alias = othermod # E: Cannot assign multiple modules to name 'alias' without explicit 'types.ModuleType' annotation [file mod.py] import submod [file submod.py] from typing import TypeVar T = TypeVar('T') def whatever(x: T) -> T: pass [file othermod.py] [builtins fixtures/module.pyi] [case testModuleLevelGetattr] import has_getattr reveal_type(has_getattr.any_attribute) # N: Revealed type is 'Any' [file has_getattr.pyi] from typing import Any def __getattr__(name: str) -> Any: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrReturnType] import has_getattr reveal_type(has_getattr.any_attribute) # N: Revealed type is 'builtins.str' [file has_getattr.pyi] def __getattr__(name: str) -> str: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrInvalidSignature] import has_getattr reveal_type(has_getattr.any_attribute) [file has_getattr.pyi] def __getattr__(x: int, y: str) -> str: ... [out] tmp/has_getattr.pyi:1: error: Invalid signature "def (builtins.int, builtins.str) -> builtins.str" for "__getattr__" main:3: note: Revealed type is 'builtins.str' [builtins fixtures/module.pyi] [case testModuleLevelGetattrNotCallable] import has_getattr reveal_type(has_getattr.any_attribute) [file has_getattr.pyi] __getattr__ = 3 [out] tmp/has_getattr.pyi:1: error: Invalid signature "builtins.int" for "__getattr__" main:3: note: Revealed type is 'Any' [builtins fixtures/module.pyi] [case testModuleLevelGetattrUntyped] import has_getattr reveal_type(has_getattr.any_attribute) # N: Revealed type is 'Any' [file has_getattr.pyi] def __getattr__(name): ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrNotStub36] # flags: --python-version 3.6 import has_getattr reveal_type(has_getattr.any_attribute) # E: Module has no attribute "any_attribute" \ # N: Revealed type is 'Any' [file has_getattr.py] def __getattr__(name) -> str: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrNotStub37] # flags: --python-version 3.7 import has_getattr reveal_type(has_getattr.any_attribute) # N: Revealed type is 'builtins.str' [file has_getattr.py] def __getattr__(name) -> str: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattribute] def __getattribute__(): ... # E: __getattribute__ is not valid at the module level [case testModuleLevelGetattrImportFrom] from has_attr import name reveal_type(name) # N: Revealed type is 'Any' [file has_attr.pyi] from typing import Any def __getattr__(name: str) -> Any: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrImportFromRetType] from has_attr import int_attr reveal_type(int_attr) # N: Revealed type is 'builtins.int' [file has_attr.pyi] def __getattr__(name: str) -> int: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrImportFromNotStub36] # flags: --python-version 3.6 from non_stub import name # E: Module 'non_stub' has no attribute 'name' reveal_type(name) # N: Revealed type is 'Any' [file non_stub.py] from typing import Any def __getattr__(name: str) -> Any: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrImportFromNotStub37] # flags: --python-version 3.7 from non_stub import name reveal_type(name) # N: Revealed type is 'Any' [file non_stub.py] from typing import Any def __getattr__(name: str) -> Any: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrImportFromAs] from has_attr import name as n reveal_type(name) # E: Name 'name' is not defined # N: Revealed type is 'Any' reveal_type(n) # N: Revealed type is 'Any' [file has_attr.pyi] from typing import Any def __getattr__(name: str) -> Any: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrImportFromAsTwice] from has_attr import name from has_attr import name from has_attr import x from has_attr import y as x # E: Name 'x' already defined (possibly by an import) reveal_type(name) # N: Revealed type is 'builtins.int' [file has_attr.pyi] from typing import Any def __getattr__(name: str) -> int: ... [case testModuleLevelGetattrAssignedGood] # flags: --python-version 3.7 import non_stub reveal_type(non_stub.name) # N: Revealed type is 'builtins.int' [file non_stub.py] from typing import Callable def make_getattr_good() -> Callable[[str], int]: ... __getattr__ = make_getattr_good() # OK [case testModuleLevelGetattrAssignedBad] # flags: --python-version 3.7 import non_stub reveal_type(non_stub.name) [file non_stub.py] from typing import Callable def make_getattr_bad() -> Callable[[], int]: ... __getattr__ = make_getattr_bad() [out] tmp/non_stub.py:4: error: Invalid signature "def () -> builtins.int" for "__getattr__" main:3: note: Revealed type is 'builtins.int' [case testModuleLevelGetattrImportedGood] # flags: --python-version 3.7 import non_stub reveal_type(non_stub.name) # N: Revealed type is 'builtins.int' [file non_stub.py] from has_getattr import __getattr__ [file has_getattr.py] def __getattr__(name: str) -> int: ... [case testModuleLevelGetattrImportedBad] # flags: --python-version 3.7 import non_stub reveal_type(non_stub.name) [file non_stub.py] from has_getattr import __getattr__ [file has_getattr.py] def __getattr__() -> int: ... [out] tmp/has_getattr.py:1: error: Invalid signature "def () -> builtins.int" for "__getattr__" main:3: note: Revealed type is 'builtins.int' [builtins fixtures/module.pyi] [case testFailedImportFromTwoModules] import c import b [file b.py] import c [out] -- TODO: it would be better for this to be in the other order tmp/b.py:1: error: Cannot find implementation or library stub for module named 'c' main:1: error: Cannot find implementation or library stub for module named 'c' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testIndirectFromImportWithinCycle1] import a [file a.py] from b import f from c import x [file b.py] from c import y from a import x def f() -> None: pass reveal_type(x) # N: Revealed type is 'builtins.str' [file c.py] x = str() y = int() [case testIndirectFromImportWithinCycle2] import a [file a.py] from c import y from b import x def f() -> None: pass reveal_type(x) # N: Revealed type is 'builtins.str' [file b.py] from a import f from c import x [file c.py] x = str() y = int() [case testIndirectFromImportWithinCycleInPackage] import p.a [file p/__init__.py] [file p/a.py] from p.b import f from p.c import x [file p/b.py] from p.c import y from p.a import x def f() -> None: pass reveal_type(x) # N: Revealed type is 'builtins.str' [file p/c.py] x = str() y = int() [case testIndirectFromImportWithinCycleInPackageIgnoredInit] # cmd: mypy -m p.a p.b p.c # flags: --follow-imports=skip --ignore-missing-imports [file p/__init__.py] [file p/a.py] from p.b import f from p.c import x [file p/b.py] from p.c import y from p.a import x def f() -> None: pass reveal_type(x) # N: Revealed type is 'builtins.str' [file p/c.py] x = str() y = int() [case testForwardReferenceToListAlias] x: List[int] reveal_type(x) # N: Revealed type is 'builtins.list[builtins.int]' def f() -> 'List[int]': pass reveal_type(f) # N: Revealed type is 'def () -> builtins.list[builtins.int]' class A: y: 'List[str]' def g(self, x: 'List[int]') -> None: pass reveal_type(A().y) # N: Revealed type is 'builtins.list[builtins.str]' reveal_type(A().g) # N: Revealed type is 'def (x: builtins.list[builtins.int])' from typing import List [builtins fixtures/list.pyi] [case testIndirectStarImportWithinCycle1] import a [file a.py] from b import f from c import x [file b.py] from c import y from a import * def f() -> None: pass reveal_type(x) # N: Revealed type is 'builtins.str' [file c.py] x = str() y = int() [case testIndirectStarImportWithinCycle2] import a [file a.py] from c import y from b import * def f() -> None: pass reveal_type(x) # N: Revealed type is 'builtins.str' [file b.py] from a import f from c import x [file c.py] x = str() y = int() [case testModuleGetattrInit1] from a import b x = b.f() [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [case testModuleGetattrInit2] import a.b x = a.b.f() [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [case testModuleGetattrInit3] import a.b x = a.b.f() [file a/__init__.py] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] main:1: error: Cannot find implementation or library stub for module named 'a.b' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testModuleGetattrInit4] import a.b.c x = a.b.c.f() [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [case testModuleGetattrInit5] from a.b import f x = f() [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [case testModuleGetattrInit5a] from a.b import f x = f() [file a/__init__.pyi] from types import ModuleType def __getattr__(attr: str) -> ModuleType: ... [builtins fixtures/module.pyi] [out] [case testModuleGetattrInit8] import a.b.c.d x = a.b.c.d.f() [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [file a/b/__init__.pyi] # empty (i.e. complete subpackage) [builtins fixtures/module.pyi] [out] main:1: error: Cannot find implementation or library stub for module named 'a.b.c.d' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'a.b.c' [case testModuleGetattrInit8a] import a.b.c # E: Cannot find implementation or library stub for module named 'a.b.c' # N: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports import a.d # OK [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [file a/b/__init__.pyi] # empty (i.e. complete subpackage) [builtins fixtures/module.pyi] [case testModuleGetattrInit10] # flags: --config-file tmp/mypy.ini import a.b.c # silenced import a.b.d # error [file a/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [file a/b/__init__.pyi] # empty (i.e. complete subpackage) [file mypy.ini] \[mypy] \[mypy-a.b.c] ignore_missing_imports = True [builtins fixtures/module.pyi] [out] main:3: error: Cannot find implementation or library stub for module named 'a.b.d' main:3: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testIndirectFromImportWithinCycleUsedAsBaseClass] import a [file a.py] from b import f from c import B [file b.py] from c import y class A(B): pass reveal_type(A().x) # N: Revealed type is 'builtins.int' from a import B def f() -> None: pass [file c.py] class B: x: int x = str() y = int() [case testImportFromReExportInCycleUsingRelativeImport1] from m import One reveal_type(One) [file m/__init__.py] from .one import One from .two import Two reveal_type(One) [file m/one.py] class One: pass [file m/two.py] from m import One reveal_type(One) x: One reveal_type(x) class Two(One): pass y: Two y = x x = y [out] tmp/m/two.py:2: note: Revealed type is 'def () -> m.one.One' tmp/m/two.py:4: note: Revealed type is 'm.one.One' tmp/m/two.py:9: error: Incompatible types in assignment (expression has type "One", variable has type "Two") tmp/m/__init__.py:3: note: Revealed type is 'def () -> m.one.One' main:2: note: Revealed type is 'def () -> m.one.One' [case testImportReExportInCycleUsingRelativeImport2] from m import One reveal_type(One) [file m/__init__.py] from .one import One from .two import Two reveal_type(One) [file m/one.py] class One: pass [file m/two.py] import m reveal_type(m.One) x: m.One reveal_type(x) class Two: pass [out] tmp/m/two.py:2: note: Revealed type is 'def () -> m.one.One' tmp/m/two.py:4: note: Revealed type is 'm.one.One' tmp/m/__init__.py:3: note: Revealed type is 'def () -> m.one.One' main:2: note: Revealed type is 'def () -> m.one.One' [case testImportReExportedNamedTupleInCycle1] from m import One [file m/__init__.py] from .one import One from .two import Two [file m/one.py] from typing import NamedTuple class One(NamedTuple): name: str [file m/two.py] import m x = m.One(name="Foo") reveal_type(x.name) class Two: pass [out] tmp/m/two.py:3: note: Revealed type is 'builtins.str' [case testImportReExportedNamedTupleInCycle2] from m import One [file m/__init__.py] from .one import One from .two import Two [file m/one.py] from typing import NamedTuple One = NamedTuple('One', [('name', str)]) [file m/two.py] import m x = m.One(name="Foo") reveal_type(x.name) class Two: pass [out] tmp/m/two.py:3: note: Revealed type is 'builtins.str' [case testImportReExportedTypeAliasInCycle] from m import One [file m/__init__.py] from .one import One from .two import Two [file m/one.py] from typing import Union One = Union[int, str] [file m/two.py] import m x: m.One reveal_type(x) class Two: pass [out] tmp/m/two.py:3: note: Revealed type is 'Union[builtins.int, builtins.str]' [case testImportCycleSpecialCase] import p [file p/__init__.py] from . import a from . import b reveal_type(a.foo()) [file p/a.py] import p def foo() -> int: pass [file p/b.py] import p def run() -> None: reveal_type(p.a.foo()) [builtins fixtures/module.pyi] [out] tmp/p/b.py:4: note: Revealed type is 'builtins.int' tmp/p/__init__.py:3: note: Revealed type is 'builtins.int' [case testMissingSubmoduleImportedWithIgnoreMissingImports] # flags: --ignore-missing-imports import whatever.works import a.b x = whatever.works.f() y = a.b.f() [file a/__init__.py] # empty [out] [case testMissingSubmoduleImportedWithIgnoreMissingImportsStub] # flags: --ignore-missing-imports --follow-imports=skip import whatever.works import a.b x = whatever.works.f() y = a.b.f() xx: whatever.works.C yy: a.b.C xx2: whatever.works.C.D yy2: a.b.C.D [file a/__init__.pyi] # empty [out] [case testMissingSubmoduleImportedWithIgnoreMissingImportsNested] # flags: --ignore-missing-imports import a.b.c.d y = a.b.c.d.f() [file a/__init__.py] # empty [file a/b/__init__.py] # empty [out] [case testModuleGetattrBusted] from a import A x: A reveal_type(x) # N: Revealed type is 'Any' [file a.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [case testModuleGetattrBusted2] from a import A def f(x: A.B) -> None: ... reveal_type(f) # N: Revealed type is 'def (x: Any)' [file a.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [builtins fixtures/module.pyi] [out] [case testNoGetattrInterference] import testmod as t def f(x: t.Cls) -> None: reveal_type(x) # N: Revealed type is 'testmod.Cls' [file testmod.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... class Cls: ... [builtins fixtures/module.pyi] [out] [case testFunctionWithDunderName] def __add__(self) -> int: ... [case testFunctionWithReversibleDunderName] def __radd__(self) -> int: ... [case testFunctionWithInPlaceDunderName] def __iadd__(self) -> int: ... -- Tests for PEP 420 namespace packages. [case testClassicPackage] from foo.bar import x [file foo/__init__.py] # empty [file foo/bar.py] x = 0 [case testClassicNotPackage] from foo.bar import x [file foo/bar.py] x = 0 [out] main:1: error: Cannot find implementation or library stub for module named 'foo.bar' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testNamespacePackage] # flags: --namespace-packages from foo.bar import x reveal_type(x) # N: Revealed type is 'builtins.int' [file foo/bar.py] x = 0 [case testNamespacePackageWithMypyPath] # flags: --namespace-packages --config-file tmp/mypy.ini from foo.bax import x from foo.bay import y from foo.baz import z reveal_type(x) # N: Revealed type is 'builtins.int' reveal_type(y) # N: Revealed type is 'builtins.int' reveal_type(z) # N: Revealed type is 'builtins.int' [file xx/foo/bax.py] x = 0 [file yy/foo/bay.py] y = 0 [file foo/baz.py] z = 0 [file mypy.ini] \[mypy] mypy_path = tmp/xx, tmp/yy [case testClassicPackageIgnoresEarlierNamespacePackage] # flags: --namespace-packages --config-file tmp/mypy.ini from foo.bar import y reveal_type(y) # N: Revealed type is 'builtins.int' [file xx/foo/bar.py] x = '' [file yy/foo/bar.py] y = 0 [file yy/foo/__init__.py] [file mypy.ini] \[mypy] mypy_path = tmp/xx, tmp/yy [case testNamespacePackagePickFirstOnMypyPath] # flags: --namespace-packages --config-file tmp/mypy.ini from foo.bar import x reveal_type(x) # N: Revealed type is 'builtins.int' [file xx/foo/bar.py] x = 0 [file yy/foo/bar.py] x = '' [file mypy.ini] \[mypy] mypy_path = tmp/xx, tmp/yy [case testNamespacePackageInsideClassicPackage] # flags: --namespace-packages --config-file tmp/mypy.ini from foo.bar.baz import x reveal_type(x) # N: Revealed type is 'builtins.int' [file xx/foo/bar/baz.py] x = '' [file yy/foo/bar/baz.py] x = 0 [file yy/foo/__init__.py] [file mypy.ini] \[mypy] mypy_path = tmp/xx, tmp/yy [case testClassicPackageInsideNamespacePackage] # flags: --namespace-packages --config-file tmp/mypy.ini from foo.bar.baz.boo import x reveal_type(x) # N: Revealed type is 'builtins.int' [file xx/foo/bar/baz/boo.py] x = '' [file xx/foo/bar/baz/__init__.py] [file yy/foo/bar/baz/boo.py] x = 0 [file yy/foo/bar/__init__.py] [file mypy.ini] \[mypy] mypy_path = tmp/xx, tmp/yy [case testNamespacePackagePlainImport] # flags: --namespace-packages import foo.bar.baz reveal_type(foo.bar.baz.x) # N: Revealed type is 'builtins.int' [file foo/bar/baz.py] x = 0 [case testModuleGetAttrAssignUnannotated] import roles # this should not crash roles.role = 1 [file roles.pyi] def __getattr__(name): ... [case testModuleGetAttrAssignUnannotatedDouble] import roles # this also should not crash roles.role.attr = 1 [file roles.pyi] def __getattr__(name): ... [case testModuleGetAttrAssignAny] import roles roles.role = 1 [file roles.pyi] from typing import Any def __getattr__(name: str) -> Any: ... [case testModuleGetAttrAssignError] import roles roles.role = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [file roles.pyi] def __getattr__(name: str) -> str: ... [case testModuleGetAttrAssignSubmodule] import roles roles.role = 1 roles.missing.attr = 1 [file roles/__init__.pyi] from typing import Any def __getattr__(name: str) -> Any: ... [case testModuleGetAttrAssignSubmoduleStrict] import roles roles.role = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Module) [file roles/__init__.pyi] from types import ModuleType def __getattr__(name: str) -> ModuleType: ... [builtins fixtures/module.pyi] [case testAlwaysReportMissingAttributesOnFoundModules] # flags: --ignore-missing-imports import pack.mod as alias x: alias.NonExistent # E: Name 'alias.NonExistent' is not defined [file pack/__init__.py] [file pack/mod.py] class Existent: pass [case testModuleAttributeTwoSuggestions] import m m.aaaa # E: Module has no attribute "aaaa"; maybe "aaaaa" or "aaa"? [file m.py] aaa: int aaaaa: int [builtins fixtures/module.pyi] [case testModuleAttributeThreeSuggestions] import m m.aaaaa # E: Module has no attribute "aaaaa"; maybe "aabaa", "aaaba", or "aaaab"? [file m.py] aaaab: int aaaba: int aabaa: int [builtins fixtures/module.pyi] mypy-0.761/test-data/unit/check-multiple-inheritance.test0000644€tŠÔÚ€2›s®0000004063513576752246027673 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for multiple inheritance. -- -- Related: check-abstract.test -- No name collisions -- ------------------ [case testSimpleMultipleInheritanceAndMethods] import typing class A: def f(self, x: int) -> None: pass class B: def g(self, x: str) -> None: pass class C(A, B): pass c = C() c.f(1) c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" c.g('') c.g(1) # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str" [case testSimpleMultipleInheritanceAndMethods2] import typing class A: def f(self, x: int) -> None: pass class B: def g(self, x): pass class C(A, B): pass c = C() c.f(1) c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" c.g('') c.g(1) [case testSimpleMultipleInheritanceAndInstanceVariables] import typing class A: def f(self) -> None: self.x = 1 class B: def g(self) -> None: self.y = '' class C(A, B): pass c = C() c.x = 1 c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") c.y = '' c.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testSimpleMultipleInheritanceAndInstanceVariableInClassBody] import typing class A: x = 1 class B: y = '' class C(A, B): pass c = C() c.x = 1 c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") c.y = '' c.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testSimpleMultipleInheritanceAndClassVariable] import typing class A: x = 1 class B: y = '' class C(A, B): pass C.x = 1 C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") C.y = '' C.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") -- Name collisions -- --------------- [case testMethodNameCollisionInMultipleInheritanceWithValidSigs] import typing class A: def f(self, x: int) -> None: pass class B: def f(self, x: int) -> None: pass class C(A, B): pass c = C() c.f(1) c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testInstanceVarNameOverlapInMultipleInheritanceWithCompatibleTypes] import typing class A: def f(self) -> None: self.x = 1 class B: def g(self) -> None: self.x = 1 class C(A, B): pass c = C() c.x = 1 c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testClassVarNameOverlapInMultipleInheritanceWithCompatibleTypes] import typing class A: x = 1 class B: x = 1 class C(A, B): pass c = C() c.x = 1 c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") C.x = 1 C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs] import typing class A: def f(self, x: int) -> None: pass class B: def f(self, x: str) -> None: pass class C(A, B): pass [out] main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" [case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs2] import typing class A: def f(self, x: int) -> None: pass class B: def f(self, x, y): pass class C(A, B): pass class D(B, A): pass [out] main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" main:7: error: Definition of "f" in base class "B" is incompatible with definition in base class "A" [case testMethodOverridingWithBothDynamicallyAndStaticallyTypedMethods] class A: def f(self) -> int: pass class B: def f(self): pass class C(B, A): pass class D(A, B): pass [out] [case testInstanceVarNameOverlapInMultipleInheritanceWithInvalidTypes] import typing class A: def f(self) -> None: self.x = 1 class B: def g(self) -> None: self.x = '' class C(A, B): pass [out] main:8: error: Definition of "x" in base class "A" is incompatible with definition in base class "B" [case testClassVarNameOverlapInMultipleInheritanceWithInvalidTypes] import typing class A: x = 1 class B: x = '' class C(A, B): pass [out] main:6: error: Definition of "x" in base class "A" is incompatible with definition in base class "B" [case testMethodOverlapsWithClassVariableInMultipleInheritance] from typing import Callable class A: def f(self) -> None: pass class B: f = '' class C(A, B): pass [out] main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" [case testMethodOverlapsWithInstanceVariableInMultipleInheritance] from typing import Callable class A: def f(self) -> None: pass class B: def g(self) -> None: self.f = '' class C(A, B): pass [out] main:7: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" [case testMultipleInheritanceAndInit] import typing class A: def __init__(self, x: int) -> None: pass class B: def __init__(self) -> None: pass class C(A, B): pass [case testMultipleInheritanceAndDifferentButCompatibleSignatures] class A: def clear(self): pass class B: def clear(self, x=None): pass class C(B, A): pass class D(A, B): pass [out] main:8: error: Definition of "clear" in base class "A" is incompatible with definition in base class "B" -- Special cases -- ------------- [case testGenericInheritanceAndOverridingWithMultipleInheritance] from typing import Generic, TypeVar T = TypeVar('T') class G(Generic[T]): def f(self, s: int) -> 'G[T]': pass class A(G[int]): def f(self, s: int) -> 'A': pass class B(A, int): pass [case testCannotDetermineTypeInMultipleInheritance] from typing import Callable, TypeVar T = TypeVar('T') class A(B, C): pass class B: @dec def f(self): pass class C: @dec def f(self): pass def dec(f: Callable[..., T]) -> Callable[..., T]: return f [out] main:3: error: Cannot determine type of 'f' in base class 'B' main:3: error: Cannot determine type of 'f' in base class 'C' [case testMultipleInheritance_NestedClassesWithSameName] class Mixin1: class Meta: pass class Mixin2: class Meta: pass class A(Mixin1, Mixin2): pass [out] main:7: error: Definition of "Meta" in base class "Mixin1" is incompatible with definition in base class "Mixin2" [case testMultipleInheritance_NestedClassesWithSameNameCustomMetaclass] class Metaclass(type): pass class Mixin1: class Meta(metaclass=Metaclass): pass class Mixin2: class Meta(metaclass=Metaclass): pass class A(Mixin1, Mixin2): pass [out] main:9: error: Definition of "Meta" in base class "Mixin1" is incompatible with definition in base class "Mixin2" [case testMultipleInheritance_NestedClassesWithSameNameOverloadedNew] from mixins import Mixin1, Mixin2 class A(Mixin1, Mixin2): pass [file mixins.py] class Mixin1: class Meta: pass class Mixin2: class Meta: pass [file mixins.pyi] from typing import overload, Any, Mapping, Dict class Mixin1: class Meta: @overload def __new__(cls, *args, **kwargs: None) -> Mixin1.Meta: pass @overload def __new__(cls, *args, **kwargs: Dict[str, Any]) -> Mixin1.Meta: pass class Mixin2: class Meta: pass [builtins fixtures/dict.pyi] [out] main:2: error: Definition of "Meta" in base class "Mixin1" is incompatible with definition in base class "Mixin2" [case testMultipleInheritance_NestedClassAndAttrHaveSameName] class Mixin1: class Nested1: pass class Mixin2: Nested1: str class A(Mixin1, Mixin2): pass [out] main:6: error: Definition of "Nested1" in base class "Mixin1" is incompatible with definition in base class "Mixin2" [case testMultipleInheritance_NestedClassAndFunctionHaveSameName] class Mixin1: class Nested1: pass class Mixin2: def Nested1(self) -> str: pass class A(Mixin1, Mixin2): pass [out] main:7: error: Definition of "Nested1" in base class "Mixin1" is incompatible with definition in base class "Mixin2" [case testMultipleInheritance_NestedClassAndRefToOtherClass] class Outer: pass class Mixin1: class Nested1: pass class Mixin2: Nested1 = Outer class A(Mixin2, Mixin1): pass [out] main:8: error: Definition of "Nested1" in base class "Mixin2" is incompatible with definition in base class "Mixin1" [case testMultipleInheritance_ReferenceToSubclassesFromSameMRO] class A: def __init__(self, arg: str) -> None: pass class B(A): pass class Base1: NestedVar = A class Base2: NestedVar = B class Combo(Base2, Base1): ... [out] [case testMultipleInheritance_ReferenceToSubclassesFromSameMROCustomMetaclass] class Metaclass(type): pass class A(metaclass=Metaclass): pass class B(A): pass class Base1: NestedVar = A class Base2: NestedVar = B class Combo(Base2, Base1): ... [out] [case testMultipleInheritance_ReferenceToSubclassesFromSameMROOverloadedNew] from mixins import A, B class Base1: NestedVar = A class Base2: NestedVar = B class Combo(Base2, Base1): ... [file mixins.py] class A: pass class B(A): pass [file mixins.pyi] from typing import overload, Dict, Any class A: @overload def __new__(cls, *args, **kwargs: None) -> A: pass @overload def __new__(cls, *args, **kwargs: Dict[str, Any]) -> A: pass class B: pass [builtins fixtures/dict.pyi] [out] main:6: error: Definition of "NestedVar" in base class "Base2" is incompatible with definition in base class "Base1" [case testMultipleInheritance_ReferenceToGenericClasses] from typing import TypeVar, Generic T = TypeVar('T') class Generic1(Generic[T]): pass class Generic2(Generic[T]): pass class Base1: Nested = Generic1 class Base2: Nested = Generic2 class A(Base1, Base2): pass [out] main:11: error: Definition of "Nested" in base class "Base1" is incompatible with definition in base class "Base2" [case testMultipleInheritance_GenericSubclasses_SuperclassFirst] from typing import TypeVar, Generic T = TypeVar('T') class ParentGeneric(Generic[T]): pass class ChildGeneric(ParentGeneric[T]): pass class Base1: Nested = ParentGeneric class Base2: Nested = ChildGeneric class A(Base1, Base2): pass [out] main:11: error: Definition of "Nested" in base class "Base1" is incompatible with definition in base class "Base2" [case testMultipleInheritance_GenericSubclasses_SubclassFirst] from typing import TypeVar, Generic T = TypeVar('T') class ParentGeneric(Generic[T]): pass class ChildGeneric(ParentGeneric[T]): pass class Base1: Nested = ParentGeneric class Base2: Nested = ChildGeneric class A(Base2, Base1): pass [out] [case testMultipleInheritance_RefersToNamedTuples] from typing import NamedTuple class NamedTuple1: attr1: int class NamedTuple2: attr2: int class Base1: Nested = NamedTuple1 class Base2: Nested = NamedTuple2 class A(Base1, Base2): pass [out] main:10: error: Definition of "Nested" in base class "Base1" is incompatible with definition in base class "Base2" [case testMultipleInheritance_NestedVariableRefersToSuperlassUnderSubclass] class A: def __init__(self, arg: str) -> None: pass class B(A): pass class Base1: NestedVar = B class Base2: NestedVar = A class Combo(Base2, Base1): ... [out] main:10: error: Definition of "NestedVar" in base class "Base2" is incompatible with definition in base class "Base1" [case testMultipleInheritance_NestedVariableOverriddenWithCompatibleType] from typing import TypeVar, Generic T = TypeVar('T', covariant=True) class GenericBase(Generic[T]): pass class Base1: Nested: GenericBase['Base1'] class Base2: Nested: GenericBase['Base2'] class A(Base1, Base2): Nested: GenericBase['A'] [out] [case testMultipleInheritance_NestedVariableOverriddenWithIncompatibleType1] from typing import TypeVar, Generic T = TypeVar('T', covariant=True) class GenericBase(Generic[T]): pass class Base1: Nested: GenericBase['Base1'] class Base2: Nested: GenericBase['Base2'] class A(Base1, Base2): Nested: GenericBase['Base1'] [out] main:10: error: Incompatible types in assignment (expression has type "GenericBase[Base1]", base class "Base2" defined the type as "GenericBase[Base2]") [case testMultipleInheritance_NestedVariableOverriddenWithIncompatibleType2] from typing import TypeVar, Generic T = TypeVar('T', covariant=True) class GenericBase(Generic[T]): pass class Base1: Nested: GenericBase['Base1'] class Base2: Nested: GenericBase['Base2'] class A(Base1, Base2): Nested: GenericBase['Base2'] [out] main:10: error: Incompatible types in assignment (expression has type "GenericBase[Base2]", base class "Base1" defined the type as "GenericBase[Base1]") [case testMultipleInheritance_NestedVariableOverriddenWithCompatibleType] from typing import TypeVar, Generic T = TypeVar('T', covariant=True) class GenericBase(Generic[T]): pass class Base1: Nested: GenericBase['Base1'] class Base2: Nested: GenericBase['Base1'] class A(Base1, Base2): Nested: GenericBase['Base1'] [out] [case testMultipleInheritance_MethodDefinitionsCompatibleWithOverride] from typing import TypeVar, Union _T = TypeVar('_T') class Flag: def __or__(self: _T, other: _T) -> _T: ... # int defines __or__ as: # def __or__(self, n: int) -> int: ... class IntFlag(int, Flag): def __or__(self: _T, other: Union[int, _T]) -> _T: ... [out] [case testMultipleInheritance_MethodDefinitionsIncompatibleOverride] from typing import TypeVar, Union _T = TypeVar('_T') class Flag: def __or__(self: _T, other: _T) -> _T: ... class IntFlag(int, Flag): def __or__(self: _T, other: str) -> _T: ... [out] main:8: error: Argument 1 of "__or__" is incompatible with supertype "Flag"; supertype defines the argument type as "IntFlag" [case testMultipleInheritance_MethodDefinitionsCompatibleNoOverride] from typing import TypeVar, Union _T = TypeVar('_T') class Flag: def __or__(self: _T, other: _T) -> _T: ... class IntFlag(int, Flag): pass [out] [case testMultipleInheritance_MethodsReturningSelfCompatible] class A(object): def x(self) -> 'A': return self class B(object): def x(self) -> 'B': return self class C(A, B): def x(self) -> 'C': return self [case testMultipleInheritance_MethodsReturningSelfIncompatible] class A(object): def x(self) -> 'A': return self class B(object): def x(self) -> 'B': return self class C(A, B): # E: Definition of "x" in base class "A" is incompatible with definition in base class "B" pass [case testNestedVariableRefersToSubclassOfAnotherNestedClass] class Mixin1: class Meta: pass class Outer(Mixin1.Meta): pass class Mixin2: NestedVar = Outer class Combo(Mixin2, Mixin1): ... [out] [case testNestedVariableRefersToCompletelyDifferentClasses] class A: pass class B: pass class Base1: NestedVar = A class Base2: NestedVar = B class Combo(Base2, Base1): ... [out] main:9: error: Definition of "NestedVar" in base class "Base2" is incompatible with definition in base class "Base1" [case testDoNotFailIfBothNestedClassesInheritFromAny] from typing import Any class Mixin1: class Meta(Any): pass class Mixin2: class Meta(Any): pass class A(Mixin1, Mixin2): pass [out] [case testDoNotFailIfOneOfNestedClassesIsOuterInheritedFromAny] from typing import Any class Outer(Any): pass class Mixin1: Meta = Outer class Mixin2: class Meta(Any): pass class A(Mixin1, Mixin2): pass [out] [case testGenericMultipleOverrideRemap] from typing import TypeVar, Generic, Tuple K = TypeVar('K') V = TypeVar('V') T = TypeVar('T') class ItemsView(Generic[K, V]): def __iter__(self) -> Tuple[K, V]: ... class Sequence(Generic[T]): def __iter__(self) -> T: ... # Override compatible between bases. class OrderedItemsView(ItemsView[K, V], Sequence[Tuple[K, V]]): def __iter__(self) -> Tuple[K, V]: ... class OrderedItemsViewDirect(ItemsView[K, V], Sequence[Tuple[K, V]]): pass [case testGenericMultipleOverrideReplace] from typing import TypeVar, Generic, Union T = TypeVar('T') class A(Generic[T]): def foo(self, x: T) -> None: ... class B(A[T]): ... class C1: def foo(self, x: str) -> None: ... class C2: def foo(self, x: Union[str, int]) -> None: ... class D1(B[str], C1): ... class D2(B[Union[int, str]], C2): ... class D3(C2, B[str]): ... class D4(B[str], C2): ... # E: Definition of "foo" in base class "A" is incompatible with definition in base class "C2" mypy-0.761/test-data/unit/check-namedtuple.test0000644€tŠÔÚ€2›s®0000006135613576752246025712 0ustar jukkaDROPBOX\Domain Users00000000000000[case testNamedTupleUsedAsTuple] from collections import namedtuple X = namedtuple('X', 'x y') x = None # type: X a, b = x b = x[0] a = x[1] a, b, c = x # E: Need more than 2 values to unpack (3 expected) x[2] # E: Tuple index out of range [case testNamedTupleWithTupleFieldNamesUsedAsTuple] from collections import namedtuple X = namedtuple('X', ('x', 'y')) x = None # type: X a, b = x b = x[0] a = x[1] a, b, c = x # E: Need more than 2 values to unpack (3 expected) x[2] # E: Tuple index out of range [case testNamedTupleUnicode_python2] from __future__ import unicode_literals from collections import namedtuple # This test is a regression test for a bug where mypyc-compiled mypy # would crash on namedtuple's with unicode arguments. Our test stubs # don't actually allow that, though, so we ignore the error and just # care we don't crash. X = namedtuple('X', ('x', 'y')) # type: ignore [case testNamedTupleNoUnderscoreFields] from collections import namedtuple X = namedtuple('X', 'x, _y, _z') # E: namedtuple() field names cannot start with an underscore: _y, _z [case testNamedTupleAccessingAttributes] from collections import namedtuple X = namedtuple('X', 'x y') x = None # type: X x.x x.y x.z # E: "X" has no attribute "z" [case testNamedTupleClassPython35] # flags: --python-version 3.5 from typing import NamedTuple class A(NamedTuple): x = 3 # type: int [out] main:4: error: NamedTuple class syntax is only supported in Python 3.6 [case testNamedTupleClassInStubPython35] # flags: --python-version 3.5 import foo [file foo.pyi] from typing import NamedTuple class A(NamedTuple): x: int [case testNamedTupleAttributesAreReadOnly] from collections import namedtuple X = namedtuple('X', 'x y') x = None # type: X x.x = 5 # E: Property "x" defined in "X" is read-only x.y = 5 # E: Property "y" defined in "X" is read-only x.z = 5 # E: "X" has no attribute "z" class A(X): pass a = None # type: A a.x = 5 # E: Property "x" defined in "X" is read-only a.y = 5 # E: Property "y" defined in "X" is read-only -- a.z = 5 # not supported yet [case testTypingNamedTupleAttributesAreReadOnly] from typing import NamedTuple from typing_extensions import Protocol class HasX(Protocol): x: str class A(NamedTuple): x: str a: HasX = A("foo") a.x = "bar" [out] main:10: error: Incompatible types in assignment (expression has type "A", variable has type "HasX") main:10: note: Protocol member HasX.x expected settable variable, got read-only attribute [case testNamedTupleCreateWithPositionalArguments] from collections import namedtuple X = namedtuple('X', 'x y') x = X(1, 'x') x.x x.z # E: "X" has no attribute "z" x = X(1) # E: Too few arguments for "X" x = X(1, 2, 3) # E: Too many arguments for "X" [case testCreateNamedTupleWithKeywordArguments] from collections import namedtuple X = namedtuple('X', 'x y') x = X(x=1, y='x') x = X(1, y='x') x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X" x = X(y=1) # E: Missing positional argument "x" in call to "X" [case testNamedTupleCreateAndUseAsTuple] from collections import namedtuple X = namedtuple('X', 'x y') x = X(1, 'x') a, b = x a, b, c = x # E: Need more than 2 values to unpack (3 expected) [case testNamedTupleAdditionalArgs] from collections import namedtuple A = namedtuple('A', 'a b') B = namedtuple('B', 'a b', rename=1) C = namedtuple('C', 'a b', rename='not a bool') D = namedtuple('D', 'a b', unrecognized_arg=False) E = namedtuple('E', 'a b', 0) [builtins fixtures/bool.pyi] [out] main:5: error: Argument "rename" to "namedtuple" has incompatible type "str"; expected "int" main:6: error: Unexpected keyword argument "unrecognized_arg" for "namedtuple" /test-data/unit/lib-stub/collections.pyi:3: note: "namedtuple" defined here main:7: error: Too many positional arguments for "namedtuple" [case testNamedTupleDefaults] # flags: --python-version 3.7 from collections import namedtuple X = namedtuple('X', ['x', 'y'], defaults=(1,)) X() # E: Too few arguments for "X" X(0) # ok X(0, 1) # ok X(0, 1, 2) # E: Too many arguments for "X" Y = namedtuple('Y', ['x', 'y'], defaults=(1, 2, 3)) # E: Too many defaults given in call to namedtuple() Z = namedtuple('Z', ['x', 'y'], defaults='not a tuple') # E: List or tuple literal expected as the defaults argument to namedtuple() # E: Argument "defaults" to "namedtuple" has incompatible type "str"; expected "Optional[Iterable[Any]]" [builtins fixtures/list.pyi] [case testNamedTupleWithItemTypes] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) n = N(1, 'x') s = n.a # type: str # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i = n.b # type: int # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") x, y = n if int(): x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") [targets __main__, __main__.N.__new__, __main__.N._asdict, __main__.N._make, __main__.N._replace] [case testNamedTupleWithTupleFieldNamesWithItemTypes] from typing import NamedTuple N = NamedTuple('N', (('a', int), ('b', str))) n = N(1, 'x') s = n.a # type: str # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i = n.b # type: int # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") x, y = n if int(): x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNamedTupleConstructorArgumentTypes] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int" n = N(1, b=2) # E: Argument "b" to "N" has incompatible type "int"; expected "str" N(1, 'x') N(b='x', a=1) [case testNamedTupleAsBaseClass] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) class X(N): pass x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str" s = '' i = 0 if int(): s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): i, s = x if int(): s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testNamedTupleAsBaseClass2] from typing import NamedTuple class X(NamedTuple('N', [('a', int), ('b', str)])): pass x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str" s = '' i = 0 if int(): s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): i, s = x if int(): s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testNamedTuplesTwoAsBaseClasses] from typing import NamedTuple A = NamedTuple('A', [('a', int)]) B = NamedTuple('B', [('a', int)]) class X(A, B): # E: Class has two incompatible bases derived from tuple pass [case testNamedTuplesTwoAsBaseClasses2] from typing import NamedTuple A = NamedTuple('A', [('a', int)]) class X(A, NamedTuple('B', [('a', int)])): # E: Class has two incompatible bases derived from tuple pass [case testNamedTupleSelfTypeWithNamedTupleAsBase] from typing import NamedTuple A = NamedTuple('A', [('a', int), ('b', str)]) class B(A): def f(self, x: int) -> None: self.f(self.a) self.f(self.b) # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int" i = 0 s = '' if int(): i, s = self i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") [out] [case testNamedTupleTypeReferenceToClassDerivedFrom] from typing import NamedTuple A = NamedTuple('A', [('a', int), ('b', str)]) class B(A): def f(self, x: 'B') -> None: i = 0 s = '' if int(): self = x i, s = x i, s = x.a, x.b i, s = x.a, x.a # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") [out] [case testNamedTupleSubtyping] from typing import NamedTuple, Tuple A = NamedTuple('A', [('a', int), ('b', str)]) class B(A): pass a = A(1, '') b = B(1, '') t = None # type: Tuple[int, str] if int(): b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A") if int(): b = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B") if int(): t = a if int(): t = (1, '') if int(): t = b if int(): a = b [case testNamedTupleSimpleTypeInference] from typing import NamedTuple, Tuple A = NamedTuple('A', [('a', int)]) l = [A(1), A(2)] a = A(1) if int(): a = l[0] (i,) = l[0] if int(): i, i = l[0] # E: Need more than 1 value to unpack (2 expected) if int(): l = [A(1)] if int(): a = (1,) # E: Incompatible types in assignment (expression has type "Tuple[int]", \ variable has type "A") [builtins fixtures/list.pyi] [case testNamedTupleMissingClassAttribute] import collections MyNamedTuple = collections.namedtuple('MyNamedTuple', ['spam', 'eggs']) MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x" [builtins fixtures/list.pyi] [case testNamedTupleEmptyItems] from typing import NamedTuple A = NamedTuple('A', []) [case testNamedTupleProperty] from typing import NamedTuple A = NamedTuple('A', [('a', int)]) class B(A): @property def b(self) -> int: return self.a class C(B): pass B(1).b C(2).b [builtins fixtures/property.pyi] [case testNamedTupleAsDict] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X reveal_type(x._asdict()) # N: Revealed type is 'builtins.dict[builtins.str, Any]' [builtins fixtures/dict.pyi] [case testNamedTupleReplace] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X reveal_type(x._replace()) # N: Revealed type is 'Tuple[Any, Any, fallback=__main__.X]' x._replace(y=5) x._replace(x=3) x._replace(x=3, y=5) x._replace(z=5) # E: Unexpected keyword argument "z" for "_replace" of "X" x._replace(5) # E: Too many positional arguments for "_replace" of "X" [builtins fixtures/list.pyi] [case testNamedTupleReplaceAsClass] # flags: --no-strict-optional from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X X._replace(x, x=1, y=2) X._replace(x=1, y=2) # E: Missing positional argument "_self" in call to "_replace" of "X" [builtins fixtures/list.pyi] [case testNamedTupleReplaceTyped] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) x = None # type: X reveal_type(x._replace()) # N: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' x._replace(x=5) x._replace(y=5) # E: Argument "y" to "_replace" of "X" has incompatible type "int"; expected "str" [case testNamedTupleMake] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._make([5, 'a'])) # N: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' X._make('a b') # E: Argument 1 to "_make" of "X" has incompatible type "str"; expected "Iterable[Any]" -- # FIX: not a proper class method -- x = None # type: X -- reveal_type(x._make([5, 'a'])) # N: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' -- x._make('a b') # E: Argument 1 to "_make" of "X" has incompatible type "str"; expected Iterable[Any] [builtins fixtures/list.pyi] [case testNamedTupleFields] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._fields) # N: Revealed type is 'Tuple[builtins.str, builtins.str]' [case testNamedTupleSource] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._source) # N: Revealed type is 'builtins.str' x = None # type: X reveal_type(x._source) # N: Revealed type is 'builtins.str' [case testNamedTupleUnit] from typing import NamedTuple X = NamedTuple('X', []) x = X() # type: X x._replace() x._fields[0] # E: Tuple index out of range [case testNamedTupleJoinNamedTuple] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) Y = NamedTuple('Y', [('x', int), ('y', str)]) reveal_type([X(3, 'b'), Y(1, 'a')]) # N: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' [builtins fixtures/list.pyi] [case testNamedTupleJoinTuple] from typing import NamedTuple, Tuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type([(3, 'b'), X(1, 'a')]) # N: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' reveal_type([X(1, 'a'), (3, 'b')]) # N: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' [builtins fixtures/list.pyi] [case testNamedTupleFieldTypes] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._field_types) # N: Revealed type is 'builtins.dict[builtins.str, Any]' x = None # type: X reveal_type(x._field_types) # N: Revealed type is 'builtins.dict[builtins.str, Any]' [builtins fixtures/dict.pyi] [case testNamedTupleAndOtherSuperclass] from typing import NamedTuple class A: pass def f(x: A) -> None: pass class B(NamedTuple('B', []), A): pass f(B()) x = None # type: A if int(): x = B() # Sanity check: fail if baseclass does not match class C: pass def g(x: C) -> None: pass class D(NamedTuple('D', []), A): pass g(D()) # E: Argument 1 to "g" has incompatible type "D"; expected "C" y = None # type: C if int(): y = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") [case testNamedTupleSelfTypeMethod] from typing import TypeVar, NamedTuple T = TypeVar('T', bound='A') class A(NamedTuple('A', [('x', str)])): def member(self: T) -> T: return self class B(A): pass a = None # type: A a = A('').member() b = None # type: B b = B('').member() a = B('') a = B('').member() [case testNamedTupleSelfTypeReplace] from typing import NamedTuple, TypeVar A = NamedTuple('A', [('x', str)]) reveal_type(A('hello')._replace(x='')) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]' a = None # type: A a = A('hello')._replace(x='') class B(A): pass reveal_type(B('hello')._replace(x='')) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]' b = None # type: B b = B('hello')._replace(x='') [case testNamedTupleSelfTypeMake] from typing import NamedTuple, TypeVar A = NamedTuple('A', [('x', str)]) reveal_type(A._make([''])) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]' a = A._make(['']) # type: A class B(A): pass reveal_type(B._make([''])) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]' b = B._make(['']) # type: B [builtins fixtures/list.pyi] [case testNamedTupleIncompatibleRedefinition] from typing import NamedTuple class Crash(NamedTuple): count: int # E: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], Any], int]") [builtins fixtures/tuple.pyi] [case testNamedTupleInClassNamespace] # https://github.com/python/mypy/pull/2553#issuecomment-266474341 from typing import NamedTuple class C: def f(self): A = NamedTuple('A', [('x', int)]) def g(self): A = NamedTuple('A', [('y', int)]) C.A # E: "Type[C]" has no attribute "A" [case testNamedTupleInFunction] from typing import NamedTuple def f() -> None: A = NamedTuple('A', [('x', int)]) A # E: Name 'A' is not defined [case testNamedTupleForwardAsUpperBound] from typing import NamedTuple, TypeVar, Generic T = TypeVar('T', bound='M') class G(Generic[T]): x: T yb: G[int] # E: Type argument "builtins.int" of "G" must be a subtype of "Tuple[builtins.int, fallback=__main__.M]" yg: G[M] reveal_type(G[M]().x.x) # N: Revealed type is 'builtins.int' reveal_type(G[M]().x[0]) # N: Revealed type is 'builtins.int' M = NamedTuple('M', [('x', int)]) [out] [case testNamedTupleWithImportCycle] import a [file a.py] from collections import namedtuple from b import f N = namedtuple('N', 'a') class X(N): pass [file b.py] import a def f(x: a.X) -> None: reveal_type(x) x = a.X(1) reveal_type(x) [out] tmp/b.py:4: note: Revealed type is 'Tuple[Any, fallback=a.X]' tmp/b.py:6: note: Revealed type is 'Tuple[Any, fallback=a.X]' [case testNamedTupleWithImportCycle2] import a [file a.py] from collections import namedtuple from b import f N = namedtuple('N', 'a') [file b.py] import a def f(x: a.N) -> None: reveal_type(x) if int(): x = a.N(1) reveal_type(x) [out] tmp/b.py:4: note: Revealed type is 'Tuple[Any, fallback=a.N]' tmp/b.py:7: note: Revealed type is 'Tuple[Any, fallback=a.N]' [case testSimpleSelfReferentialNamedTuple] from typing import NamedTuple class MyNamedTuple(NamedTuple): parent: 'MyNamedTuple' # E: Cannot resolve name "MyNamedTuple" (possible cyclic definition) def bar(nt: MyNamedTuple) -> MyNamedTuple: return nt x: MyNamedTuple reveal_type(x.parent) # N: Revealed type is 'Any' -- Some crazy self-referential named tuples and types dicts -- to be sure that everything works [case testCrossFileNamedTupleForwardRefs] import a [file a.py] import b from typing import Any, NamedTuple class A: def a(self, b: 'b.B') -> str: return 'a' ATuple = NamedTuple('ATuple', [('a', Any)]) [file b.py] import a class B: def b(self, a: 'a.A') -> str: return 'b' def aWithTuple(self, atuple: 'a.ATuple') -> str: return 'a' [out] [case testSelfRefNT1] from typing import Tuple, NamedTuple Node = NamedTuple('Node', [ ('name', str), ('children', Tuple['Node', ...]), # E: Cannot resolve name "Node" (possible cyclic definition) ]) n: Node reveal_type(n) # N: Revealed type is 'Tuple[builtins.str, builtins.tuple[Any], fallback=__main__.Node]' [builtins fixtures/tuple.pyi] [case testSelfRefNT2] from typing import Tuple, NamedTuple A = NamedTuple('A', [ ('x', str), ('y', Tuple['B', ...]), # E: Cannot resolve name "B" (possible cyclic definition) ]) class B(NamedTuple): x: A y: int n: A reveal_type(n) # N: Revealed type is 'Tuple[builtins.str, builtins.tuple[Any], fallback=__main__.A]' [builtins fixtures/tuple.pyi] [case testSelfRefNT3] from typing import NamedTuple, Tuple class B(NamedTuple): x: Tuple[A, int] # E: Cannot resolve name "A" (possible cyclic definition) y: int A = NamedTuple('A', [ ('x', str), ('y', 'B'), ]) n: B m: A reveal_type(n.x) # N: Revealed type is 'Tuple[Any, builtins.int]' reveal_type(m[0]) # N: Revealed type is 'builtins.str' lst = [m, n] reveal_type(lst[0]) # N: Revealed type is 'Tuple[builtins.object, builtins.object]' [builtins fixtures/tuple.pyi] [case testSelfRefNT4] from typing import NamedTuple class B(NamedTuple): x: A # E: Cannot resolve name "A" (possible cyclic definition) y: int class A(NamedTuple): x: str y: B n: A reveal_type(n.y[0]) # N: Revealed type is 'Any' [builtins fixtures/tuple.pyi] [case testSelfRefNT5] from typing import NamedTuple B = NamedTuple('B', [ ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) ('y', int), ]) A = NamedTuple('A', [ ('x', str), ('y', 'B'), ]) n: A def f(m: B) -> None: pass reveal_type(n) # N: Revealed type is 'Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A]' reveal_type(f) # N: Revealed type is 'def (m: Tuple[Any, builtins.int, fallback=__main__.B])' [builtins fixtures/tuple.pyi] [case testRecursiveNamedTupleInBases] from typing import List, NamedTuple, Union Exp = Union['A', 'B'] # E: Cannot resolve name "Exp" (possible cyclic definition) \ # E: Cannot resolve name "A" (possible cyclic definition) class A(NamedTuple('A', [('attr', List[Exp])])): pass class B(NamedTuple('B', [('val', object)])): pass def my_eval(exp: Exp) -> int: reveal_type(exp) # N: Revealed type is 'Union[Any, Tuple[builtins.object, fallback=__main__.B]]' if isinstance(exp, A): my_eval(exp[0][0]) return my_eval(exp.attr[0]) if isinstance(exp, B): return exp.val # E: Incompatible return value type (got "object", expected "int") return 0 my_eval(A([B(1), B(2)])) # OK [builtins fixtures/isinstancelist.pyi] [out] [case testNamedTupleImportCycle] import b [file a.py] class C: pass from b import tp x: tp reveal_type(x.x) # N: Revealed type is 'builtins.int' reveal_type(tp) # N: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=b.tp]' tp('x') # E: Argument 1 to "tp" has incompatible type "str"; expected "int" [file b.py] from a import C from typing import NamedTuple tp = NamedTuple('tp', [('x', int)]) [out] [case testSubclassOfRecursiveNamedTuple] from typing import List, NamedTuple class Command(NamedTuple): subcommands: List['Command'] # E: Cannot resolve name "Command" (possible cyclic definition) class HelpCommand(Command): pass hc = HelpCommand(subcommands=[]) reveal_type(hc) # N: Revealed type is 'Tuple[builtins.list[Any], fallback=__main__.HelpCommand]' [builtins fixtures/list.pyi] [out] [case testUnsafeOverlappingNamedTuple] from typing import NamedTuple class Real(NamedTuple): def __sub__(self, other: Real) -> str: return "" class Fraction(Real): def __rsub__(self, other: Real) -> Real: return other # E: Signatures of "__rsub__" of "Fraction" and "__sub__" of "Real" are unsafely overlapping [case testForwardReferenceInNamedTuple] from typing import NamedTuple class A(NamedTuple): b: 'B' x: int class B: pass [case testTypeNamedTupleClassmethod] from typing import Type, NamedTuple class D(NamedTuple): @classmethod def f(cls) -> None: pass d: Type[D] d.g() # E: "Type[D]" has no attribute "g" d.f() [builtins fixtures/classmethod.pyi] [case testTypeNamedTupleCall] from typing import NamedTuple Thing = NamedTuple('Thing', [('s', str), ('n', int)]) class CallableTuple(Thing): def __call__(self) -> None: pass o = CallableTuple('hello ', 12) o() [case testNamedTupleSubclassMulti] from typing import NamedTuple class Base: pass class BaseTuple(NamedTuple): value: float class MyTuple(BaseTuple, Base): pass def f(o: Base) -> None: if isinstance(o, MyTuple): reveal_type(o.value) # N: Revealed type is 'builtins.float' [builtins fixtures/isinstance.pyi] [out] [case testNamedTupleNew] from typing import NamedTuple Base = NamedTuple('Base', [('param', int)]) class Child(Base): def __new__(cls, param: int = 1) -> 'Child': return Base.__new__(cls, param) Base(param=10) Child(param=10) [case testNamedTupleClassMethodWithGenericReturnValue] from typing import TypeVar, Type, NamedTuple T = TypeVar('T', bound='Parent') class Parent(NamedTuple): x: str @classmethod def class_method(cls: Type[T]) -> T: return cls(x='text') class Child(Parent): pass reveal_type(Child.class_method()) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.Child]' [builtins fixtures/classmethod.pyi] [case testNamedTupleAsConditionalStrictOptionalDisabled] # flags: --no-strict-optional from typing import NamedTuple class C(NamedTuple): a: int b: str a: C if not a: 1() # E: "int" not callable b = (1, 2) if not b: ''() # E: "str" not callable [builtins fixtures/tuple.pyi] [case testNamedTupleDoubleForward] from typing import Union, Mapping, NamedTuple class MyBaseTuple(NamedTuple): base_field_1: int base_field_2: int MyBaseTupleMapping = Mapping[MyBaseTuple, int] MyTupleUnion = Union[MyTupleA, MyTupleB] class MyTupleA(NamedTuple): field_1: MyBaseTupleMapping field_2: MyBaseTuple class MyTupleB(NamedTuple): field_1: MyBaseTupleMapping field_2: MyBaseTuple u: MyTupleUnion reveal_type(u.field_1) # N: Revealed type is 'typing.Mapping[Tuple[builtins.int, builtins.int, fallback=__main__.MyBaseTuple], builtins.int]' reveal_type(u.field_2) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.MyBaseTuple]' reveal_type(u[0]) # N: Revealed type is 'typing.Mapping[Tuple[builtins.int, builtins.int, fallback=__main__.MyBaseTuple], builtins.int]' reveal_type(u[1]) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.MyBaseTuple]' [case testAssignNamedTupleAsAttribute] from typing import NamedTuple class A: def __init__(self) -> None: self.b = NamedTuple('x', [('s', str), ('n', int)]) # E: NamedTuple type as an attribute is not supported reveal_type(A().b) # N: Revealed type is 'Any' mypy-0.761/test-data/unit/check-narrowing.test0000644€tŠÔÚ€2›s®0000003667313576752246025566 0ustar jukkaDROPBOX\Domain Users00000000000000[case testNarrowingParentWithEnumsBasic] from enum import Enum from dataclasses import dataclass from typing import NamedTuple, Tuple, Union from typing_extensions import Literal, TypedDict class Key(Enum): A = 1 B = 2 C = 3 class Object1: key: Literal[Key.A] foo: int class Object2: key: Literal[Key.B] bar: str @dataclass class Dataclass1: key: Literal[Key.A] foo: int @dataclass class Dataclass2: key: Literal[Key.B] foo: str class NamedTuple1(NamedTuple): key: Literal[Key.A] foo: int class NamedTuple2(NamedTuple): key: Literal[Key.B] foo: str Tuple1 = Tuple[Literal[Key.A], int] Tuple2 = Tuple[Literal[Key.B], str] class TypedDict1(TypedDict): key: Literal[Key.A] foo: int class TypedDict2(TypedDict): key: Literal[Key.B] foo: str x1: Union[Object1, Object2] if x1.key is Key.A: reveal_type(x1) # N: Revealed type is '__main__.Object1' reveal_type(x1.key) # N: Revealed type is 'Literal[__main__.Key.A]' else: reveal_type(x1) # N: Revealed type is '__main__.Object2' reveal_type(x1.key) # N: Revealed type is 'Literal[__main__.Key.B]' x2: Union[Dataclass1, Dataclass2] if x2.key is Key.A: reveal_type(x2) # N: Revealed type is '__main__.Dataclass1' reveal_type(x2.key) # N: Revealed type is 'Literal[__main__.Key.A]' else: reveal_type(x2) # N: Revealed type is '__main__.Dataclass2' reveal_type(x2.key) # N: Revealed type is 'Literal[__main__.Key.B]' x3: Union[NamedTuple1, NamedTuple2] if x3.key is Key.A: reveal_type(x3) # N: Revealed type is 'Tuple[Literal[__main__.Key.A], builtins.int, fallback=__main__.NamedTuple1]' reveal_type(x3.key) # N: Revealed type is 'Literal[__main__.Key.A]' else: reveal_type(x3) # N: Revealed type is 'Tuple[Literal[__main__.Key.B], builtins.str, fallback=__main__.NamedTuple2]' reveal_type(x3.key) # N: Revealed type is 'Literal[__main__.Key.B]' if x3[0] is Key.A: reveal_type(x3) # N: Revealed type is 'Tuple[Literal[__main__.Key.A], builtins.int, fallback=__main__.NamedTuple1]' reveal_type(x3[0]) # N: Revealed type is 'Literal[__main__.Key.A]' else: reveal_type(x3) # N: Revealed type is 'Tuple[Literal[__main__.Key.B], builtins.str, fallback=__main__.NamedTuple2]' reveal_type(x3[0]) # N: Revealed type is 'Literal[__main__.Key.B]' x4: Union[Tuple1, Tuple2] if x4[0] is Key.A: reveal_type(x4) # N: Revealed type is 'Tuple[Literal[__main__.Key.A], builtins.int]' reveal_type(x4[0]) # N: Revealed type is 'Literal[__main__.Key.A]' else: reveal_type(x4) # N: Revealed type is 'Tuple[Literal[__main__.Key.B], builtins.str]' reveal_type(x4[0]) # N: Revealed type is 'Literal[__main__.Key.B]' x5: Union[TypedDict1, TypedDict2] if x5["key"] is Key.A: reveal_type(x5) # N: Revealed type is 'TypedDict('__main__.TypedDict1', {'key': Literal[__main__.Key.A], 'foo': builtins.int})' else: reveal_type(x5) # N: Revealed type is 'TypedDict('__main__.TypedDict2', {'key': Literal[__main__.Key.B], 'foo': builtins.str})' [case testNarrowingParentWithIsInstanceBasic] from dataclasses import dataclass from typing import NamedTuple, Tuple, Union from typing_extensions import TypedDict class Object1: key: int class Object2: key: str @dataclass class Dataclass1: key: int @dataclass class Dataclass2: key: str class NamedTuple1(NamedTuple): key: int class NamedTuple2(NamedTuple): key: str Tuple1 = Tuple[int] Tuple2 = Tuple[str] class TypedDict1(TypedDict): key: int class TypedDict2(TypedDict): key: str x1: Union[Object1, Object2] if isinstance(x1.key, int): reveal_type(x1) # N: Revealed type is '__main__.Object1' else: reveal_type(x1) # N: Revealed type is '__main__.Object2' x2: Union[Dataclass1, Dataclass2] if isinstance(x2.key, int): reveal_type(x2) # N: Revealed type is '__main__.Dataclass1' else: reveal_type(x2) # N: Revealed type is '__main__.Dataclass2' x3: Union[NamedTuple1, NamedTuple2] if isinstance(x3.key, int): reveal_type(x3) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.NamedTuple1]' else: reveal_type(x3) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.NamedTuple2]' if isinstance(x3[0], int): reveal_type(x3) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.NamedTuple1]' else: reveal_type(x3) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.NamedTuple2]' x4: Union[Tuple1, Tuple2] if isinstance(x4[0], int): reveal_type(x4) # N: Revealed type is 'Tuple[builtins.int]' else: reveal_type(x4) # N: Revealed type is 'Tuple[builtins.str]' x5: Union[TypedDict1, TypedDict2] if isinstance(x5["key"], int): reveal_type(x5) # N: Revealed type is 'TypedDict('__main__.TypedDict1', {'key': builtins.int})' else: reveal_type(x5) # N: Revealed type is 'TypedDict('__main__.TypedDict2', {'key': builtins.str})' [builtins fixtures/isinstance.pyi] [case testNarrowingParentMultipleKeys] # flags: --warn-unreachable from enum import Enum from typing import Union from typing_extensions import Literal class Key(Enum): A = 1 B = 2 C = 3 D = 4 class Object1: key: Literal[Key.A, Key.C] class Object2: key: Literal[Key.B, Key.C] x: Union[Object1, Object2] if x.key is Key.A: reveal_type(x) # N: Revealed type is '__main__.Object1' else: reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2]' if x.key is Key.C: reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2]' else: reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2]' if x.key is Key.D: reveal_type(x) # E: Statement is unreachable else: reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2]' [case testNarrowingParentWithMultipleParents] from enum import Enum from typing import Union from typing_extensions import Literal class Key(Enum): A = 1 B = 2 C = 3 class Object1: key: Literal[Key.A] class Object2: key: Literal[Key.B] class Object3: key: Literal[Key.C] class Object4: key: str x: Union[Object1, Object2, Object3, Object4] if x.key is Key.A: reveal_type(x) # N: Revealed type is '__main__.Object1' else: reveal_type(x) # N: Revealed type is 'Union[__main__.Object2, __main__.Object3, __main__.Object4]' if isinstance(x.key, str): reveal_type(x) # N: Revealed type is '__main__.Object4' else: reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2, __main__.Object3]' [builtins fixtures/isinstance.pyi] [case testNarrowingParentsWithGenerics] from typing import Union, TypeVar, Generic T = TypeVar('T') class Wrapper(Generic[T]): key: T x: Union[Wrapper[int], Wrapper[str]] if isinstance(x.key, int): reveal_type(x) # N: Revealed type is '__main__.Wrapper[builtins.int]' else: reveal_type(x) # N: Revealed type is '__main__.Wrapper[builtins.str]' [builtins fixtures/isinstance.pyi] [case testNarrowingParentWithParentMixtures] from enum import Enum from typing import Union, NamedTuple from typing_extensions import Literal, TypedDict class Key(Enum): A = 1 B = 2 C = 3 class KeyedObject: key: Literal[Key.A] class KeyedTypedDict(TypedDict): key: Literal[Key.B] class KeyedNamedTuple(NamedTuple): key: Literal[Key.C] ok_mixture: Union[KeyedObject, KeyedNamedTuple] if ok_mixture.key is Key.A: reveal_type(ok_mixture) # N: Revealed type is '__main__.KeyedObject' else: reveal_type(ok_mixture) # N: Revealed type is 'Tuple[Literal[__main__.Key.C], fallback=__main__.KeyedNamedTuple]' impossible_mixture: Union[KeyedObject, KeyedTypedDict] if impossible_mixture.key is Key.A: # E: Item "KeyedTypedDict" of "Union[KeyedObject, KeyedTypedDict]" has no attribute "key" reveal_type(impossible_mixture) # N: Revealed type is 'Union[__main__.KeyedObject, TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]})]' else: reveal_type(impossible_mixture) # N: Revealed type is 'Union[__main__.KeyedObject, TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]})]' if impossible_mixture["key"] is Key.A: # E: Value of type "Union[KeyedObject, KeyedTypedDict]" is not indexable reveal_type(impossible_mixture) # N: Revealed type is 'Union[__main__.KeyedObject, TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]})]' else: reveal_type(impossible_mixture) # N: Revealed type is 'Union[__main__.KeyedObject, TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]})]' weird_mixture: Union[KeyedTypedDict, KeyedNamedTuple] if weird_mixture["key"] is Key.B: # E: Invalid tuple index type (actual type "str", expected type "Union[int, slice]") reveal_type(weird_mixture) # N: Revealed type is 'Union[TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]}), Tuple[Literal[__main__.Key.C], fallback=__main__.KeyedNamedTuple]]' else: reveal_type(weird_mixture) # N: Revealed type is 'Union[TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]}), Tuple[Literal[__main__.Key.C], fallback=__main__.KeyedNamedTuple]]' if weird_mixture[0] is Key.B: # E: TypedDict key must be a string literal; expected one of ('key') reveal_type(weird_mixture) # N: Revealed type is 'Union[TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]}), Tuple[Literal[__main__.Key.C], fallback=__main__.KeyedNamedTuple]]' else: reveal_type(weird_mixture) # N: Revealed type is 'Union[TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]}), Tuple[Literal[__main__.Key.C], fallback=__main__.KeyedNamedTuple]]' [builtins fixtures/slice.pyi] [case testNarrowingParentWithProperties] from enum import Enum from typing import Union from typing_extensions import Literal class Key(Enum): A = 1 B = 2 C = 3 class Object1: key: Literal[Key.A] class Object2: @property def key(self) -> Literal[Key.A]: ... class Object3: @property def key(self) -> Literal[Key.B]: ... x: Union[Object1, Object2, Object3] if x.key is Key.A: reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2]' else: reveal_type(x) # N: Revealed type is '__main__.Object3' [builtins fixtures/property.pyi] [case testNarrowingParentWithAny] from enum import Enum from typing import Union, Any from typing_extensions import Literal class Key(Enum): A = 1 B = 2 C = 3 class Object1: key: Literal[Key.A] class Object2: key: Literal[Key.B] x: Union[Object1, Object2, Any] if x.key is Key.A: reveal_type(x.key) # N: Revealed type is 'Literal[__main__.Key.A]' reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, Any]' else: # TODO: Is this a bug? Should we skip inferring Any for singleton types? reveal_type(x.key) # N: Revealed type is 'Union[Any, Literal[__main__.Key.B]]' reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2, Any]' [case testNarrowingParentsHierarchy] from typing import Union from typing_extensions import Literal from enum import Enum class Key(Enum): A = 1 B = 2 C = 3 class Parent1: child: Union[Child1, Child2] class Parent2: child: Union[Child2, Child3] class Parent3: child: Union[Child3, Child1] class Child1: main: Literal[Key.A] same_for_1_and_2: Literal[Key.A] class Child2: main: Literal[Key.B] same_for_1_and_2: Literal[Key.A] class Child3: main: Literal[Key.C] same_for_1_and_2: Literal[Key.B] x: Union[Parent1, Parent2, Parent3] if x.child.main is Key.A: reveal_type(x) # N: Revealed type is 'Union[__main__.Parent1, __main__.Parent3]' reveal_type(x.child) # N: Revealed type is '__main__.Child1' else: reveal_type(x) # N: Revealed type is 'Union[__main__.Parent1, __main__.Parent2, __main__.Parent3]' reveal_type(x.child) # N: Revealed type is 'Union[__main__.Child2, __main__.Child3]' if x.child.same_for_1_and_2 is Key.A: reveal_type(x) # N: Revealed type is 'Union[__main__.Parent1, __main__.Parent2, __main__.Parent3]' reveal_type(x.child) # N: Revealed type is 'Union[__main__.Child1, __main__.Child2]' else: reveal_type(x) # N: Revealed type is 'Union[__main__.Parent2, __main__.Parent3]' reveal_type(x.child) # N: Revealed type is '__main__.Child3' y: Union[Parent1, Parent2] if y.child.main is Key.A: reveal_type(y) # N: Revealed type is '__main__.Parent1' reveal_type(y.child) # N: Revealed type is '__main__.Child1' else: reveal_type(y) # N: Revealed type is 'Union[__main__.Parent1, __main__.Parent2]' reveal_type(y.child) # N: Revealed type is 'Union[__main__.Child2, __main__.Child3]' if y.child.same_for_1_and_2 is Key.A: reveal_type(y) # N: Revealed type is 'Union[__main__.Parent1, __main__.Parent2]' reveal_type(y.child) # N: Revealed type is 'Union[__main__.Child1, __main__.Child2]' else: reveal_type(y) # N: Revealed type is '__main__.Parent2' reveal_type(y.child) # N: Revealed type is '__main__.Child3' [case testNarrowingParentsHierarchyGenerics] from typing import Generic, TypeVar, Union T = TypeVar('T') class Model(Generic[T]): attr: T class A: model: Model[int] class B: model: Model[str] x: Union[A, B] if isinstance(x.model.attr, int): reveal_type(x) # N: Revealed type is '__main__.A' reveal_type(x.model) # N: Revealed type is '__main__.Model[builtins.int]' else: reveal_type(x) # N: Revealed type is '__main__.B' reveal_type(x.model) # N: Revealed type is '__main__.Model[builtins.str]' [builtins fixtures/isinstance.pyi] [case testNarrowingParentsHierarchyTypedDict] # flags: --warn-unreachable from typing import Union from typing_extensions import TypedDict, Literal from enum import Enum class Key(Enum): A = 1 B = 2 C = 3 class Parent1(TypedDict): model: Model1 foo: int class Parent2(TypedDict): model: Model2 bar: str class Model1(TypedDict): key: Literal[Key.A] class Model2(TypedDict): key: Literal[Key.B] x: Union[Parent1, Parent2] if x["model"]["key"] is Key.A: reveal_type(x) # N: Revealed type is 'TypedDict('__main__.Parent1', {'model': TypedDict('__main__.Model1', {'key': Literal[__main__.Key.A]}), 'foo': builtins.int})' reveal_type(x["model"]) # N: Revealed type is 'TypedDict('__main__.Model1', {'key': Literal[__main__.Key.A]})' else: reveal_type(x) # N: Revealed type is 'TypedDict('__main__.Parent2', {'model': TypedDict('__main__.Model2', {'key': Literal[__main__.Key.B]}), 'bar': builtins.str})' reveal_type(x["model"]) # N: Revealed type is 'TypedDict('__main__.Model2', {'key': Literal[__main__.Key.B]})' y: Union[Parent1, Parent2] if y["model"]["key"] is Key.C: reveal_type(y) # E: Statement is unreachable reveal_type(y["model"]) else: reveal_type(y) # N: Revealed type is 'Union[TypedDict('__main__.Parent1', {'model': TypedDict('__main__.Model1', {'key': Literal[__main__.Key.A]}), 'foo': builtins.int}), TypedDict('__main__.Parent2', {'model': TypedDict('__main__.Model2', {'key': Literal[__main__.Key.B]}), 'bar': builtins.str})]' reveal_type(y["model"]) # N: Revealed type is 'Union[TypedDict('__main__.Model1', {'key': Literal[__main__.Key.A]}), TypedDict('__main__.Model2', {'key': Literal[__main__.Key.B]})]' mypy-0.761/test-data/unit/check-newsemanal.test0000644€tŠÔÚ€2›s®0000022122313576752246025675 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the new semantic analyzer [case testNewAnalyzerEmpty] [case testNewAnalyzerSimpleAssignment] x = 1 x.y # E: "int" has no attribute "y" y # E: Name 'y' is not defined [case testNewAnalyzerSimpleAnnotation] x: int = 0 y: str = 0 \ # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testNewAnalyzerSimpleClass] class A: x: int a: A a.x a.y # E: "A" has no attribute "y" [case testNewAnalyzerErrorInClassBody] class A: x # E: Name 'x' is not defined [case testNewAnalyzerTypeAnnotationForwardReference] class A: b: B class B: a: A a: A b: B a.b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") a.b = b b.a = a b.a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testNewAnalyzerTypeAnnotationCycle1] import b [file a.py] import b class A: pass y: b.B y() # E: "B" not callable [file b.py] import a class B: pass x: a.A reveal_type(x) # N: Revealed type is 'a.A' [case testNewAnalyzerTypeAnnotationCycle2] import a [file a.py] from b import B class A: pass y: B y() [file b.py] from a import A class B: pass x: A x() [out] tmp/b.py:4: error: "A" not callable tmp/a.py:4: error: "B" not callable [case testNewAnalyzerTypeAnnotationCycle3] import b [file a.py] from b import bad # E: Module 'b' has no attribute 'bad'; maybe "bad2"? [file b.py] from a import bad2 # E: Module 'a' has no attribute 'bad2'; maybe "bad"? [case testNewAnalyzerTypeAnnotationCycle4] import b [file a.py] from b import bad # E: Module 'b' has no attribute 'bad' [file b.py] # TODO: Could we generate an error here as well? from a import bad [targets a, b, a, b, a, b, a, b, __main__] [case testNewAnalyzerExportedValuesInImportAll] from m import * _ = a _ = b _ = c _ = d _e = e _f = f # E: Name 'f' is not defined _ = _g # E: Name '_g' is not defined reveal_type(_e) # N: Revealed type is 'm.A' [file m.py] __all__ = ['a'] __all__ += ('b',) __all__.append('c') __all__.extend(('d', 'e')) a = b = c = d = _g = 1 e: 'A' f: 'A' class A: ... [builtins fixtures/module_all.pyi] [case testNewAnalyzerSimpleFunction] def f(x: int) -> str: return 'x' def g(x: int) -> int: y = f(1) return y # E: Incompatible return value type (got "str", expected "int") [case testNewAnalyzerSimpleMethod] class A: def __init__(self, x: int) -> None: self.x = x def f(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") def g(self) -> int: return self.f() # E: Incompatible return value type (got "str", expected "int") [case testNewAnalyzerFunctionForwardRef] def f() -> None: x = g(1) # E: Argument 1 to "g" has incompatible type "int"; expected "str" reveal_type(x) # N: Revealed type is 'builtins.str' def g(x: str) -> str: return x [case testNewAnalyzerExportedImportThreePasses] import b [file a.py] from b import b1 as a2 from b import b2 as a3 def a1() -> int: pass reveal_type(a3()) # N: Revealed type is 'builtins.int' [file b.py] from a import a1 as b2 from a import a2 as b3 def b1() -> str: pass reveal_type(b3()) # N: Revealed type is 'builtins.str' [case testNewAnalyzerBool] reveal_type(True) # N: Revealed type is 'Literal[True]?' reveal_type(False) # N: Revealed type is 'Literal[False]?' [case testNewAnalyzerNewTypeMultiplePasses] import b [file a.py] from typing import NewType import b class A: pass N2 = NewType('N2', b.N1) def f1(x: A) -> None: pass def f2(x: b.N1) -> None: pass def f3(x: N2) -> None: pass a = A() n1 = b.N1(a) n2 = N2(n1) f1(a) f1(n1) f1(n2) f2(a) # E: Argument 1 to "f2" has incompatible type "A"; expected "N1" f2(n1) f2(n2) f3(a) # E: Argument 1 to "f3" has incompatible type "A"; expected "N2" f3(n1) # E: Argument 1 to "f3" has incompatible type "N1"; expected "N2" f3(n2) # Test N2 etc. [file b.py] from typing import NewType import a N1 = NewType('N1', a.A) [case testNewAnalyzerInheritanceForwardRef] class C(B): pass class B(A): pass class A: def __init__(self, x: str) -> None: pass def f(self, x: int) -> None: pass C(1) # E: Argument 1 to "C" has incompatible type "int"; expected "str" B(1) # E: Argument 1 to "B" has incompatible type "int"; expected "str" C('').f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" B('').f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testNewAnalyzerInheritanceMROInCycle] import a [file a.py] from b import A import b class B(A): b: int class D(b.C): d: int d = D() reveal_type(d.a) # N: Revealed type is 'builtins.int' reveal_type(d.b) # N: Revealed type is 'builtins.int' reveal_type(d.c) # N: Revealed type is 'builtins.int' reveal_type(d.d) # N: Revealed type is 'builtins.int' [file b.py] from a import B class A: a: int class C(B): c: int [targets b, a, b, a, __main__] [case testNewAnalyzerTypedDictClass] from mypy_extensions import TypedDict import a class T1(TypedDict): x: A class A: pass reveal_type(T1(x=A())) # E [file a.py] from mypy_extensions import TypedDict from b import TD1 as TD2, TD3 class T2(TD3): x: int reveal_type(T2(x=2)) # E [file b.py] from a import TypedDict as TD1 from a import TD2 as TD3 [out] tmp/a.py:5: note: Revealed type is 'TypedDict('a.T2', {'x': builtins.int})' main:6: note: Revealed type is 'TypedDict('__main__.T1', {'x': __main__.A})' [case testNewAnalyzerTypedDictClassInheritance] from mypy_extensions import TypedDict class T2(T1): y: int class T1(TypedDict): x: str class T3(TypedDict): x: str class T4(T3): y: A class A: pass T2(x=0, y=0) # E: Incompatible types (expression has type "int", TypedDict item "x" has type "str") x: T2 reveal_type(x) # N: Revealed type is 'TypedDict('__main__.T2', {'x': builtins.str, 'y': builtins.int})' y: T4 reveal_type(y) # N: Revealed type is 'TypedDict('__main__.T4', {'x': builtins.str, 'y': __main__.A})' [case testNewAnalyzerRedefinitionAndDeferral1a] import a [file a.py] MYPY = False if MYPY: from b import x as y x = 0 def y(): pass # E: Name 'y' already defined on line 4 reveal_type(y) # N: Revealed type is 'builtins.int' y2 = y class y2: pass # E: Name 'y2' already defined on line 9 reveal_type(y2) # N: Revealed type is 'builtins.int' y3, y4 = y, y if MYPY: # Tweak processing order from b import f as y3 # E: Incompatible import of "y3" (imported name has type "Callable[[], Any]", local name has type "int") reveal_type(y3) # N: Revealed type is 'builtins.int' [file b.py] from a import x def f(): pass [targets a, b, a, a.y, b.f, __main__] [case testNewAnalyzerRedefinitionAndDeferral1b] import a [file a.py] from b import x as y x = 0 def y(): pass # E: Name 'y' already defined on line 2 reveal_type(y) # N: Revealed type is 'builtins.int' y2 = y class y2: pass # E: Name 'y2' already defined on line 7 reveal_type(y2) # N: Revealed type is 'builtins.int' y3, y4 = y, y from b import f as y3 # E: Incompatible import of "y3" (imported name has type "Callable[[], Any]", local name has type "int") reveal_type(y3) # N: Revealed type is 'builtins.int' [file b.py] MYPY = False if MYPY: # Tweak processing order from a import x def f(): pass [targets b, a, b, a, b.f, a.y, __main__] [case testNewAnalyzerRedefinitionAndDeferral2a] import a [file a.py] MYPY = False if MYPY: # Tweak processing order from b import C as C2 class C: pass class C2: pass # E: Name 'C2' already defined on line 4 [file b.py] from a import C [case testNewAnalyzerRedefinitionAndDeferral2b] import a [file a.py] from b import C as C2 class C: pass class C2: pass # E: Name 'C2' already defined on line 2 [file b.py] MYPY = False if MYPY: # Tweak processing order from a import C [case testNewAnalyzerRedefinitionAndDeferral3] import a [file a.py] from b import f as g def f(): pass a, *b = g() class b(): pass # E: Name 'b' already defined on line 4 reveal_type(b) # N: Revealed type is 'Any' [file b.py] from a import f [case testNewAnalyzerImportStarForwardRef1] import a [file a.py] x: A reveal_type(x) # N: Revealed type is 'b.A' from b import * class A: pass # E: Name 'A' already defined (possibly by an import) [file b.py] class A: pass MYPY = False if MYPY: # Tweak processing order from a import x [case testNewAnalyzerImportStarForwardRef2] import a [file a.py] x: A reveal_type(x) # N: Revealed type is 'b.A' MYPY = False if MYPY: # Tweak processing order from b import * class A: pass # E: Name 'A' already defined (possibly by an import) [file b.py] class A: pass from a import x [case testNewAnalyzerClassInFunction] def main() -> None: x: C class C: def __init__(self) -> None: self.x: A x() # E: "C" not callable reveal_type(x.x) # N: Revealed type is '__main__.A@8' class A: pass [case testNewAnalyzerMutuallyRecursiveFunctions] def main() -> None: def f() -> int: reveal_type(g()) # N: Revealed type is 'builtins.str' return int() def g() -> str: reveal_type(f()) # N: Revealed type is 'builtins.int' return str() [case testNewAnalyzerMissingNamesInFunctions] def main() -> None: def f() -> None: x # E: Name 'x' is not defined class C: x # E: Name 'x' is not defined [case testNewAnalyzerCyclicDefinitions] gx = gy # E: Cannot resolve name "gy" (possible cyclic definition) gy = gx def main() -> None: class C: def meth(self) -> None: lx = ly # E: Cannot resolve name "ly" (possible cyclic definition) ly = lx [case testNewAnalyzerCyclicDefinitionCrossModule] import b [file a.py] import b x = b.x # E: Cannot resolve attribute "x" (possible cyclic definition) \ # E: Module has no attribute "x" [file b.py] import a x = a.x [builtins fixtures/module.pyi] [case testNewAnalyzerMutuallyRecursiveOverloadedFunctions] from typing import overload, Union def main() -> None: @overload def f(x: int) -> int: ... @overload def f(x: str) -> str: ... def f(x: Union[int, str]) -> Union[int, str]: reveal_type(g(str())) # N: Revealed type is 'builtins.str' return x @overload def g(x: int) -> int: ... @overload def g(x: str) -> str: ... def g(x: Union[int, str]) -> Union[int, str]: reveal_type(f(int())) # N: Revealed type is 'builtins.int' return float() # E: Incompatible return value type (got "float", expected "Union[int, str]") [case testNewAnalyzerNestedClassInMethod] class C: class D: def meth(self) -> None: x: Out.In reveal_type(x.t) # N: Revealed type is 'builtins.int' class Out: class In: def meth(self) -> None: self.t: int [case testNewAnalyzerDeeplyNestedFunctions] class Out: class In: def meth(self) -> None: x: C.D reveal_type(x.t) # N: Revealed type is '__main__.Test@10' class C: class D: def meth(self) -> None: self.t: Test class Test: def test(self) -> None: def one() -> int: reveal_type(other()) # N: Revealed type is 'builtins.str' return int() def other() -> str: reveal_type(one()) # N: Revealed type is 'builtins.int' return str() [case testNewAnalyzerNestedClass1] class A: class B: x: int def __init__(self, x: int) -> None: self.x = x def f(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") b: A.B b = A.B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" reveal_type(b) # N: Revealed type is '__main__.A.B' reveal_type(b.x) # N: Revealed type is 'builtins.int' reveal_type(b.f()) # N: Revealed type is 'builtins.str' [case testNewAnalyzerNestedClass2] b: A.B b = A.B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" reveal_type(b) # N: Revealed type is '__main__.A.B' reveal_type(b.x) # N: Revealed type is 'builtins.int' reveal_type(b.f()) # N: Revealed type is 'builtins.str' class A: class B: x: int def __init__(self, x: int) -> None: self.x = x def f(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") [case testNewAnalyzerGenerics] from typing import TypeVar, Generic c: C[int] c2: C[int, str] # E: "C" expects 1 type argument, but 2 given c3: C c = C('') # E: Argument 1 to "C" has incompatible type "str"; expected "int" reveal_type(c.get()) # N: Revealed type is 'builtins.int*' reveal_type(c2) # N: Revealed type is '__main__.C[Any]' reveal_type(c3) # N: Revealed type is '__main__.C[Any]' T = TypeVar('T') class C(Generic[T]): def __init__(self, x: T) -> None: self.x = x def get(self) -> T: return self.x [case testNewAnalyzerGenericsTypeVarForwardRef] from typing import TypeVar, Generic class C(Generic[T]): def __init__(self, x: T) -> None: self.x = x def get(self) -> T: return self.x T = TypeVar('T') c: C[int] reveal_type(c) # N: Revealed type is '__main__.C[builtins.int]' c = C('') # E: Argument 1 to "C" has incompatible type "str"; expected "int" reveal_type(c.get()) # N: Revealed type is 'builtins.int*' [case testNewAnalyzerTypeAlias] from typing import Union, TypeVar, Generic C2 = C U = Union[C, int] G = D[T, C] c: C2 reveal_type(c) # N: Revealed type is '__main__.C' u: U reveal_type(u) # N: Revealed type is 'Union[__main__.C, builtins.int]' g: G[int] reveal_type(g) # N: Revealed type is '__main__.D[builtins.int, __main__.C]' class C: pass T = TypeVar('T') S = TypeVar('S') class D(Generic[T, S]): pass [case testNewAnalyzerTypeAlias2] from typing import Union class C(D): pass A = Union[C, int] x: A reveal_type(x) # N: Revealed type is 'Union[__main__.C, builtins.int]' class D: pass [case testNewAnalyzerBuiltinTypeAliases] from typing import List x: List[C] reveal_type(x) # N: Revealed type is 'builtins.list[__main__.C]' class C: pass [builtins fixtures/list.pyi] [case testNewAnalyzerVersionCheck] import sys if sys.version_info[0] < 2: 1() import nonexistent else: def f(x: int) -> None: pass f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" def g() -> None: if sys.version_info[0] < 3: import nonexistent2 else: 1() # E: "int" not callable [builtins fixtures/ops.pyi] [case testNewAnalyzerVersionCheck2] import sys assert sys.version_info[0] == 3 1() # E: "int" not callable assert sys.version_info[0] < 3 ''() [builtins fixtures/ops.pyi] [case testNewAnalyzerOverload] from typing import overload, Union @overload def f(x: int) -> int: ... @overload def f(x: str) -> str: ... def f(x: Union[int, str]) -> Union[int, str]: return 1.0 # E: Incompatible return value type (got "float", expected "Union[int, str]") f(1) f('') f(1.0) # E: No overload variant of "f" matches argument type "float" \ # N: Possible overload variants: \ # N: def f(x: int) -> int \ # N: def f(x: str) -> str [case testNewAnalyzerOverload2] from typing import overload, Union class A: @overload def f(self, x: int) -> int: ... @overload def f(self, x: str) -> str: ... def f(self, x: Union[int, str]) -> Union[int, str]: return 1.0 # E: Incompatible return value type (got "float", expected "Union[int, str]") a = A() a.f(1) a.f('') a.f(1.0) # E: No overload variant of "f" of "A" matches argument type "float" \ # N: Possible overload variants: \ # N: def f(self, x: int) -> int \ # N: def f(self, x: str) -> str [case testNewAnalyzerPromotion] y: int f(y) f(1) def f(x: float) -> None: pass [builtins fixtures/primitives.pyi] [case testNewAnalyzerFunctionDecorator] from typing import Callable @dec def f1(x: int) -> int: return '' # E: Incompatible return value type (got "str", expected "int") def dec(f: Callable[[int], int]) -> Callable[[str], str]: ... @dec def f2(x: int) -> int: return '' # E: Incompatible return value type (got "str", expected "int") f1(1) # E: Argument 1 to "f1" has incompatible type "int"; expected "str" reveal_type(f1('')) # N: Revealed type is 'builtins.str' f2(1) # E: Argument 1 to "f2" has incompatible type "int"; expected "str" [case testNewAnalyzerTypeVarForwardReference] from typing import TypeVar, Generic T = TypeVar('T') XY = TypeVar('XY', X, Y) class C(Generic[T]): pass class D(C[XY], Generic[XY]): pass class X: pass class Y: pass x: D[int] # E: Value of type variable "XY" of "D" cannot be "int" y: D[Y] [case testNewAnalyzerTypeVarForwardReference2] from typing import TypeVar, Generic T = TypeVar('T') XY = TypeVar('XY', X, Y) class C(Generic[T]): pass class D(C[XY]): pass class X: pass class Y: pass x: D[int] # E: Value of type variable "XY" of "D" cannot be "int" y: D[Y] [case testNewAnalyzerTypeVarForwardReferenceValuesDeferred] from typing import TypeVar, Generic T = TypeVar('T') XY = TypeVar('XY', X, Y) class C(Generic[T]): pass class D(C[XY], Generic[XY]): pass class X(Defer): pass class Y(Defer): pass class Defer: ... x: D[int] # E: Value of type variable "XY" of "D" cannot be "int" y: D[Y] [builtins fixtures/list.pyi] [case testNewAnalyzerTypeVarForwardReferenceBoundDeferred] from typing import TypeVar, Generic T = TypeVar('T') TY = TypeVar('TY', bound=Y) class C(Generic[T]): pass class D(C[TY], Generic[TY]): pass class Y(Defer): pass class Defer: ... x: D[int] # E: Type argument "builtins.int" of "D" must be a subtype of "__main__.Y" y: D[Y] [case testNewAnalyzerTypeVarForwardReferenceErrors] from typing import TypeVar, Generic class C(Generic[T]): def __init__(self, x: T) -> None: ... def func(x: U) -> U: ... U = TypeVar('U', asdf, asdf) # E: Name 'asdf' is not defined T = TypeVar('T', bound=asdf) # E: Name 'asdf' is not defined reveal_type(C) # N: Revealed type is 'def [T <: Any] (x: T`1) -> __main__.C[T`1]' reveal_type(func) # N: Revealed type is 'def [U in (Any, Any)] (x: U`-1) -> U`-1' [case testNewAnalyzerSubModuleInCycle] import a [file a.py] MYPY = False if MYPY: from b.c import x [file b/__init__.pyi] import b.c [file b/c.pyi] x = 0 import a [case testNewAnalyzerBaseClassSelfReference] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass a1: A[C] = C() a2: A[D] = C() \ # E: Incompatible types in assignment (expression has type "C", variable has type "A[D]") class C(A[C]): pass class D(A[D]): pass [case testNewAnalyzerTypeVarBoundForwardRef] from typing import TypeVar T = TypeVar('T', bound='C') class C: pass class D(C): pass class E: pass def f(x: T) -> T: return x reveal_type(f(D())) # N: Revealed type is '__main__.D*' f(E()) # E: Value of type variable "T" of "f" cannot be "E" [case testNewAnalyzerNameExprRefersToIncompleteType] import a [file a.py] from b import f class C(D): pass class D: pass [file b.py] from a import C reveal_type(C()) # N: Revealed type is 'a.C' def f(): pass [case testNewAnalyzerMemberExprRefersToIncompleteType] import a [file a.py] from b import f class C(D): pass class D: pass [file b.py] import a reveal_type(a.C()) # N: Revealed type is 'a.C' def f(): pass [case testNewAnalyzerNamedTupleCall] from typing import NamedTuple o: Out i: In Out = NamedTuple('Out', [('x', In), ('y', Other)]) reveal_type(o) # N: Revealed type is 'Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]' reveal_type(o.x) # N: Revealed type is 'Tuple[builtins.str, __main__.Other, fallback=__main__.In]' reveal_type(o.y) # N: Revealed type is '__main__.Other' reveal_type(o.x.t) # N: Revealed type is '__main__.Other' reveal_type(i.t) # N: Revealed type is '__main__.Other' In = NamedTuple('In', [('s', str), ('t', Other)]) class Other: pass [case testNewAnalyzerNamedTupleClass] from typing import NamedTuple o: Out i: In class Out(NamedTuple): x: In y: Other reveal_type(o) # N: Revealed type is 'Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]' reveal_type(o.x) # N: Revealed type is 'Tuple[builtins.str, __main__.Other, fallback=__main__.In]' reveal_type(o.y) # N: Revealed type is '__main__.Other' reveal_type(o.x.t) # N: Revealed type is '__main__.Other' reveal_type(i.t) # N: Revealed type is '__main__.Other' class In(NamedTuple): s: str t: Other class Other: pass [case testNewAnalyzerNamedTupleCallNested] from typing import NamedTuple o: C.Out i: C.In reveal_type(o) # N: Revealed type is 'Tuple[Tuple[builtins.str, __main__.C.Other, fallback=__main__.C.In], __main__.C.Other, fallback=__main__.C.Out]' reveal_type(o.x) # N: Revealed type is 'Tuple[builtins.str, __main__.C.Other, fallback=__main__.C.In]' reveal_type(o.y) # N: Revealed type is '__main__.C.Other' reveal_type(o.x.t) # N: Revealed type is '__main__.C.Other' reveal_type(i.t) # N: Revealed type is '__main__.C.Other' class C: In = NamedTuple('In', [('s', str), ('t', Other)]) Out = NamedTuple('Out', [('x', In), ('y', Other)]) class Other: pass [case testNewAnalyzerNamedTupleClassNested] from typing import NamedTuple o: C.Out i: C.In reveal_type(o) # N: Revealed type is 'Tuple[Tuple[builtins.str, __main__.C.Other, fallback=__main__.C.In], __main__.C.Other, fallback=__main__.C.Out]' reveal_type(o.x) # N: Revealed type is 'Tuple[builtins.str, __main__.C.Other, fallback=__main__.C.In]' reveal_type(o.y) # N: Revealed type is '__main__.C.Other' reveal_type(o.x.t) # N: Revealed type is '__main__.C.Other' reveal_type(i.t) # N: Revealed type is '__main__.C.Other' class C: class Out(NamedTuple): x: C.In y: C.Other class In(NamedTuple): s: str t: C.Other class Other: pass [case testNewAnalyzerNamedTupleCallNestedMethod] from typing import NamedTuple c = C() reveal_type(c.o) # N: Revealed type is 'Tuple[Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@11], __main__.Other@12, fallback=__main__.C.Out@10]' reveal_type(c.o.x) # N: Revealed type is 'Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@11]' class C: def get_tuple(self) -> None: self.o: Out Out = NamedTuple('Out', [('x', In), ('y', Other)]) In = NamedTuple('In', [('s', str), ('t', Other)]) class Other: pass [case testNewAnalyzerNamedTupleClassNestedMethod] from typing import NamedTuple c = C() reveal_type(c.o) # N: Revealed type is 'Tuple[Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15], __main__.Other@18, fallback=__main__.C.Out@11]' reveal_type(c.o.x) # N: Revealed type is 'Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15]' reveal_type(c.o.method()) # N: Revealed type is 'Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15]' class C: def get_tuple(self) -> None: self.o: Out class Out(NamedTuple): x: In y: Other def method(self) -> In: ... class In(NamedTuple): s: str t: Other class Other: pass [case testNewAnalyzerNamedTupleClassForwardMethod] from typing import NamedTuple n: NT reveal_type(n.get_other()) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.Other]' reveal_type(n.get_other().s) # N: Revealed type is 'builtins.str' class NT(NamedTuple): x: int y: int def get_other(self) -> Other: pass class Other(NamedTuple): s: str [case testNewAnalyzerNamedTupleSpecialMethods] from typing import NamedTuple o: SubO reveal_type(SubO._make) # N: Revealed type is 'def (iterable: typing.Iterable[Any], *, new: Any =, len: Any =) -> Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.SubO]' reveal_type(o._replace(y=Other())) # N: Revealed type is 'Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.SubO]' class SubO(Out): pass Out = NamedTuple('Out', [('x', In), ('y', Other)]) In = NamedTuple('In', [('s', str), ('t', Other)]) class Other: pass [case testNewAnalyzerNamedTupleBaseClass] from typing import NamedTuple o: Out reveal_type(o) # N: Revealed type is 'Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]' reveal_type(o.x) # N: Revealed type is 'Tuple[builtins.str, __main__.Other, fallback=__main__.In]' reveal_type(o.x.t) # N: Revealed type is '__main__.Other' reveal_type(Out._make) # N: Revealed type is 'def (iterable: typing.Iterable[Any], *, new: Any =, len: Any =) -> Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]' class Out(NamedTuple('Out', [('x', In), ('y', Other)])): pass class In(NamedTuple): s: str t: Other class Other: pass [case testNewAnalyzerIncompleteRefShadowsBuiltin1] import a [file a.py] from typing import TypeVar, Generic from b import C as int x: int[str] reveal_type(x) # N: Revealed type is 'a.C[builtins.str]' T = TypeVar('T') class C(Generic[T]): pass [file b.py] from a import C [case testNewAnalyzerIncompleteRefShadowsBuiltin2] import b [file a.py] import b int = b.C class C: pass x: int reveal_type(x) # N: Revealed type is 'b.C' [file b.py] import a int = a.C class C: pass x: int reveal_type(x) # N: Revealed type is 'a.C' [case testNewAnalyzerNamespaceCompleteness] import a [file a.py] import b x: b.C [file b.py] from c import * class C: pass [file c.py] import a from b import C [targets c, b, a, c, b, __main__] [case testNewAnalyzerImportOverExistingInCycle] import a [file a.py] C = 1 from b import C # E: Incompatible import of "C" (imported name has type "Type[C]", local name has type "int") [file b.py] import a class C(B): ... class B: ... [case testNewAnalyzerImportOverExistingInCycleStar1] import a [file a.py] C = 1 MYPY = False if MYPY: # Tweak processing ordre from b import * # E: Incompatible import of "C" (imported name has type "Type[C]", local name has type "int") [file b.py] import a class C(B): ... class B: ... [case testNewAnalyzerImportOverExistingInCycleStar2] import a [file a.py] C = 1 from b import * # E: Incompatible import of "C" (imported name has type "Type[C]", local name has type "int") [file b.py] MYPY = False if MYPY: # Tweak processing order import a class C(B): ... class B: ... [case testNewAnalyzerIncompleteFixture] from typing import Tuple x: Tuple[int] # E: Name 'tuple' is not defined [builtins fixtures/complex.pyi] [case testNewAnalyzerMetaclass1] class A(metaclass=B): pass class B(type): def f(cls) -> int: return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' [case testNewAnalyzerMetaclass2] reveal_type(A.f()) # N: Revealed type is 'builtins.int' class A(metaclass=B): pass class AA(metaclass=C): # E: Metaclasses not inheriting from 'type' are not supported pass class B(type): def f(cls) -> int: return 0 class C: pass [case testNewAnalyzerMetaclassPlaceholder] class B(C): pass class A(metaclass=B): pass class C(type): def f(cls) -> int: return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' [case testNewAnalyzerMetaclassSix1] import six class A(six.with_metaclass(B)): pass class B(type): def f(cls) -> int: return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' [case testNewAnalyzerMetaclassSix2] import six @six.add_metaclass(B) class A: pass class B(type): def f(cls) -> int: return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' [case testNewAnalyzerMetaclassSix3] import six class A(six.with_metaclass(B, Defer)): pass class B(type): def f(cls) -> int: return 0 class Defer: x: str reveal_type(A.f()) # N: Revealed type is 'builtins.int' reveal_type(A.x) # N: Revealed type is 'builtins.str' [case testNewAnalyzerMetaclassSix4] import six class B(type): def f(cls) -> int: return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' reveal_type(A.x) # N: Revealed type is 'builtins.str' class A(six.with_metaclass(B, Defer)): pass class Defer: x: str [case testNewAnalyzerMetaclassFuture1] import future.utils class A(future.utils.with_metaclass(B)): pass class B(type): def f(cls) -> int: return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' [case testNewAnalyzerMetaclassFuture3] import future.utils class A(future.utils.with_metaclass(B, Defer)): pass class B(type): def f(cls) -> int: return 0 class Defer: x: str reveal_type(A.f()) # N: Revealed type is 'builtins.int' reveal_type(A.x) # N: Revealed type is 'builtins.str' [case testNewAnalyzerMetaclassFuture4] import future.utils class B(type): def f(cls) -> int: return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' reveal_type(A.x) # N: Revealed type is 'builtins.str' class A(future.utils.with_metaclass(B, Defer)): pass class Defer: x: str [case testNewAnalyzerMetaclass1_python2] class A: __metaclass__ = B reveal_type(A.f()) # N: Revealed type is 'builtins.int' class B(type): def f(cls): # type: () -> int return 0 [case testNewAnalyzerMetaclass2_python2] reveal_type(A.f()) # N: Revealed type is 'builtins.int' class A: __metaclass__ = B class AA: __metaclass__ = C # E: Metaclasses not inheriting from 'type' are not supported class B(type): def f(cls): # type: () -> int return 0 class C: pass [case testNewAnalyzerFinalDefiningModuleVar] from typing import Final x: Final = C() y: Final[C] = D() bad: Final[D] = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") reveal_type(x) # N: Revealed type is '__main__.C' reveal_type(y) # N: Revealed type is '__main__.C' class D(C): ... class C: ... [case testNewAnalyzerFinalDefiningInstanceVar] from typing import Final class C: def __init__(self, x: D) -> None: self.x: Final = x self.y: Final[C] = E(D()) reveal_type(C(D()).x) # N: Revealed type is '__main__.D' reveal_type(C(D()).y) # N: Revealed type is '__main__.C' class D: ... class E(C): ... [case testNewAnalyzerFinalReassignModuleVar] from typing import Final x: Final = A() x = A() # E: Cannot assign to final name "x" x2: Final = A() def f2() -> None: global x2 def f() -> None: g() x2 = A() # E: Cannot assign to final name "x2" def g() -> None: f() class A: ... [case testNewAnalyzerFinalReassignModuleReexport] import a [file a.py] from b import ID, A class C(A): ... ID = C() # E: Cannot assign to final name "ID" [file b.py] from typing import Final from a import C class A: x: C ID: Final = A() [case testNewAnalyzerFinalOverrideInSubclass] from typing import Final class B: def __init__(self, x: int) -> None: self.x: Final = x class C(B): x = 1 # E: Cannot assign to final name "x" [case testNewAnalyzerAssignmentAfterStarImport] import a [file a.py] from b import * x = 1 def f(): ... [file b.py] from a import f x: int [case testNewAnalyzerClassLevelImport] # flags: --ignore-missing-imports class Test: import a def __init__(self) -> None: some_module = self.a [case testNewAnalyzerAliasToNotReadyClass] import a [file a.py] from b import B x: A A = B [file b.py] from typing import List from a import x class B(List[B]): pass reveal_type(x[0][0]) # N: Revealed type is 'b.B*' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyClass2] from typing import List x: A class A(List[B]): pass B = A reveal_type(x[0][0]) # N: Revealed type is '__main__.A*' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyClass3] from typing import List x: B B = A A = C class C(List[B]): pass reveal_type(x[0][0]) # N: Revealed type is '__main__.C*' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyNestedClass] import a [file a.py] from b import Out x: A A = Out.B [file b.py] from typing import List from a import x class Out: class B(List[B]): pass reveal_type(x[0][0]) # N: Revealed type is 'b.Out.B*' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyNestedClass2] from typing import List x: Out.A class Out: class A(List[B]): pass B = Out.A reveal_type(x[0][0]) # N: Revealed type is '__main__.Out.A*' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyClassGeneric] import a [file a.py] from typing import Tuple from b import B, T x: A[int] A = B[Tuple[T, T]] [file b.py] from typing import List, Generic, TypeVar from a import x class B(List[B], Generic[T]): pass T = TypeVar('T') reveal_type(x) # N: Revealed type is 'b.B[Tuple[builtins.int, builtins.int]]' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyClassInGeneric] import a [file a.py] from typing import Tuple from b import B x: A A = Tuple[B, B] [file b.py] from typing import List from a import x class B(List[B]): pass reveal_type(x) # N: Revealed type is 'Tuple[b.B, b.B]' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyClassDoubleGeneric] from typing import List, TypeVar, Union T = TypeVar('T') x: B[int] B = A[List[T]] A = Union[int, T] class C(List[B[int]]): pass reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.list[builtins.int]]' reveal_type(y[0]) # N: Revealed type is 'Union[builtins.int, builtins.list[builtins.int]]' y: C [builtins fixtures/list.pyi] [case testNewAnalyzerForwardAliasFromUnion] from typing import Union, List A = Union['B', 'C'] class D: x: List[A] def test(self) -> None: reveal_type(self.x[0].y) # N: Revealed type is 'builtins.int' class B: y: int class C: y: int [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyTwoDeferrals] from typing import List x: B B = List[C] A = C class C(List[A]): pass reveal_type(x) # N: Revealed type is 'builtins.list[__main__.C]' reveal_type(x[0][0]) # N: Revealed type is '__main__.C*' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBase] from typing import List x: B B = List[C] class C(B): pass reveal_type(x) reveal_type(x[0][0]) [builtins fixtures/list.pyi] [out] main:3: error: Cannot resolve name "B" (possible cyclic definition) main:4: error: Cannot resolve name "B" (possible cyclic definition) main:4: error: Cannot resolve name "C" (possible cyclic definition) main:7: note: Revealed type is 'Any' main:8: note: Revealed type is 'Any' [case testNewAnalyzerAliasToNotReadyTwoDeferralsFunction] import a [file a.py] from typing import List from b import D def f(x: B) -> List[B]: ... B = List[C] A = C class C(List[A]): pass [file b.py] from a import f class D: ... reveal_type(f) # N: Revealed type is 'def (x: builtins.list[a.C]) -> builtins.list[builtins.list[a.C]]' [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBaseFunction] import a [file a.py] from typing import List from b import D def f(x: B) -> List[B]: ... B = List[C] # E class C(B): pass [file b.py] from a import f class D: ... reveal_type(f) # N [builtins fixtures/list.pyi] [out] tmp/b.py:3: note: Revealed type is 'def (x: builtins.list[Any]) -> builtins.list[builtins.list[Any]]' tmp/a.py:5: error: Cannot resolve name "B" (possible cyclic definition) tmp/a.py:5: error: Cannot resolve name "C" (possible cyclic definition) [case testNewAnalyzerAliasToNotReadyMixed] from typing import List, Union x: A A = Union[B, C] class B(List[A]): pass class C(List[A]): pass reveal_type(x) # N: Revealed type is 'Union[__main__.B, __main__.C]' reveal_type(x[0]) # N: Revealed type is 'Union[__main__.B, __main__.C]' [builtins fixtures/list.pyi] [case testNewAnalyzerTrickyAliasInFuncDef] import a [file a.py] from b import B def func() -> B: ... reveal_type(func()) # N: Revealed type is 'builtins.list[Tuple[b.C, b.C]]' [file b.py] from typing import List, Tuple from a import func B = List[Tuple[C, C]] class C(A): ... class A: ... [builtins fixtures/list.pyi] [case testNewAnalyzerListComprehension] from typing import List a: List[A] a = [x for x in a] b: List[B] = [x for x in a] # E: List comprehension has incompatible type List[A]; expected List[B] class A: pass class B: pass [builtins fixtures/for.pyi] [case testNewAnalyzerDictionaryComprehension] from typing import Dict, List, Tuple abd: Dict[A, B] abl: List[Tuple[A, B]] abd = {a: b for a, b in abl} x: Dict[B, A] = {a: b for a, b in abl} # E: Key expression in dictionary comprehension has incompatible type "A"; expected type "B" \ # E: Value expression in dictionary comprehension has incompatible type "B"; expected type "A" y: A = {a: b for a, b in abl} # E: Incompatible types in assignment (expression has type "Dict[A, B]", variable has type "A") class A: pass class B: pass [builtins fixtures/dict.pyi] [case testNewAnalyzerTypeArgBoundCheck] from typing import TypeVar, Generic class F(E): pass class E: pass T = TypeVar('T', bound=E) class C(Generic[T]): pass class D(B): pass x: C[D] # E: Type argument "__main__.D" of "C" must be a subtype of "__main__.E" y: C[F] class B: pass [case testNewAnalyzerTypeArgValueRestriction] from typing import TypeVar, Generic class F(E): pass class E: pass T = TypeVar('T', E, str) class C(Generic[T]): pass class D(B): pass x: C[D] # E: Value of type variable "T" of "C" cannot be "D" y: C[E] z: C[str] class B: pass [case testNewAnalyzerTypeArgBoundCheckWithContext] # flags: --show-error-context import a [file a.py] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class C(Generic[T]): pass def f(x: C[str]) -> None: # E y: C[str] # E class A(C[str]): # E z: C[str] # E def g(self, x: C[str]) -> None: # E a: C[str] # E [out] main:2: note: In module imported here: tmp/a.py: note: In function "f": tmp/a.py:6: error: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" tmp/a.py:7: error: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" tmp/a.py: note: In class "A": tmp/a.py:8: error: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" tmp/a.py:9: error: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" tmp/a.py: note: In member "g" of class "A": tmp/a.py:10: error: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" tmp/a.py:11: error: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" [case testNewAnalyzerTypeArgBoundCheckDifferentNodes] from typing import TypeVar, Generic, NamedTuple, NewType, Union, Any, cast, overload from mypy_extensions import TypedDict T = TypeVar('T', bound=int) class C(Generic[T]): pass class C2(Generic[T]): pass A = C[str] # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" \ # E: Value of type variable "T" of "C" cannot be "str" B = Union[C[str], int] # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" S = TypeVar('S', bound=C[str]) # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" U = TypeVar('U', C[str], str) # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" N = NamedTuple('N', [ ('x', C[str])]) # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" class N2(NamedTuple): x: C[str] # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" class TD(TypedDict): x: C[str] # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" class TD2(TD): y: C2[str] # E: Type argument "builtins.str" of "C2" must be a subtype of "builtins.int" NT = NewType('NT', C[str]) # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" class D( C[str]): # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" pass TD3 = TypedDict('TD3', {'x': C[str]}) # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" a: Any for i in a: # type: C[str] # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" pass with a as w: # type: C[str] # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" pass cast(C[str], a) # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" C[str]() # E: Value of type variable "T" of "C" cannot be "str" def f(s: S, y: U) -> None: pass # No error here @overload def g(x: C[str]) -> int: ... # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" @overload def g(x: int) -> int: ... def g(x: Union[C[str], int]) -> int: # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" y: C[object] # E: Type argument "builtins.object" of "C" must be a subtype of "builtins.int" return 0 [case testNewAnalyzerTypeArgBoundCheckWithStrictOptional] # flags: --config-file tmp/mypy.ini import a [file b.py] from typing import TypeVar, Generic x: C[None] y: C[str] # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" z: C[int] T = TypeVar('T', bound=int) class C(Generic[T]): pass [file a.py] from b import C x: C[None] # E: Type argument "None" of "C" must be a subtype of "builtins.int" y: C[str] # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" z: C[int] [file mypy.ini] \[mypy-a] strict_optional = True \[mypy-b] strict_optional = False [case testNewAnalyzerProperty] class A: @property def x(self) -> B: return 0 # E: Incompatible return value type (got "int", expected "B") @property def y(self) -> B: pass @y.setter def y(self, x: B) -> None: pass class B: pass a = A() reveal_type(a.x) # N: Revealed type is '__main__.B' a.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "B") [builtins fixtures/property.pyi] [case testNewAnalyzerAliasesFixedFew] from typing import List, Generic, TypeVar def func(x: List[C[T]]) -> T: ... x: A A = List[C] reveal_type(x) # N: Revealed type is 'builtins.list[__main__.C[Any]]' reveal_type(func(x)) # N: Revealed type is 'Any' class C(Generic[T]): ... T = TypeVar('T') [builtins fixtures/list.pyi] [case testNewAnalyzerAliasesFixedMany] from typing import List, Generic, TypeVar def func(x: List[C[T]]) -> T: ... x: A A = List[C[int, str]] # E: "C" expects 1 type argument, but 2 given reveal_type(x) # N: Revealed type is 'builtins.list[__main__.C[Any]]' reveal_type(func(x)) # N: Revealed type is 'Any' class C(Generic[T]): ... T = TypeVar('T') [builtins fixtures/list.pyi] [case testNewAnalyzerBuiltinAliasesFixed] from typing import List, Optional x: Optional[List] = None y: List[str] reveal_type(x) # N: Revealed type is 'Union[builtins.list[Any], None]' x = ['a', 'b'] reveal_type(x) # N: Revealed type is 'builtins.list[Any]' x.extend(y) [builtins fixtures/list.pyi] [case testNewAnalyzerImportPriosB] import b [file a.py] from b import x reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' [file b.py] import a x = (1, 2) [case testNewAnalyzerImportPriosA] import a [file a.py] from b import x reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' [file b.py] import a x = (1, 2) [case testNewAnalyzerConditionalFunc] if int(): def f(x: int) -> None: pass def g(x: int) -> None: pass elif bool(): def f(x: int) -> None: 1() # E: "int" not callable def g(x: str) -> None: # E: All conditional function variants must have identical signatures pass else: def f(x: int) -> None: ''() # E: "str" not callable reveal_type(g) # N: Revealed type is 'def (x: builtins.int)' [case testNewAnalyzerConditionalFuncDefer] if int(): def f(x: A) -> None: pass def g(x: A) -> None: pass else: def f(x: A) -> None: 1() # E: "int" not callable def g(x: str) -> None: # E: All conditional function variants must have identical signatures pass reveal_type(g) # N: Revealed type is 'def (x: __main__.A)' class A: pass [case testNewAnalyzerConditionalDecoratedFunc] from typing import Callable def dec(f: Callable[[int], None]) -> Callable[[str], None]: pass if int(): from m import f else: @dec def f(x: int) -> None: 1() # E: "int" not callable reveal_type(f) # N: Revealed type is 'def (x: builtins.str)' [file m.py] def f(x: str) -> None: pass [case testNewAnalyzerConditionallyDefineFuncOverVar] from typing import Callable if int(): f: Callable[[str], None] else: def f(x: str) -> None: ... reveal_type(f) # N: Revealed type is 'def (builtins.str)' [case testNewAnalyzerConditionallyDefineFuncOverClass] class C: 1() # E: "int" not callable def C() -> None: # E: Name 'C' already defined on line 1 ''() # E: "str" not callable [case testNewAnalyzerTupleIteration] from typing import Union, Tuple, NamedTuple class T(Tuple[B, C]): pass class A: pass class B(A): pass class C(A): pass class NTInt(NamedTuple): x: int y: int class NTStr(NamedTuple): x: str y: str t1: T reveal_type(t1.__iter__) # N: Revealed type is 'def () -> typing.Iterator[__main__.A*]' t2: NTInt reveal_type(t2.__iter__) # N: Revealed type is 'def () -> typing.Iterator[builtins.int*]' nt: Union[NTInt, NTStr] reveal_type(nt.__iter__) # N: Revealed type is 'Union[def () -> typing.Iterator[builtins.int*], def () -> typing.Iterator[builtins.str*]]' for nx in nt: reveal_type(nx) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' t: Union[Tuple[int, int], Tuple[str, str]] for x in t: reveal_type(x) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' [builtins fixtures/for.pyi] [out] [case testNewAnalyzerFallbackUpperBoundCheckAndFallbacks] from typing import TypeVar, Generic, Tuple class A: pass class B: pass class C(B): pass S = TypeVar('S', bound=Tuple[G[A], ...]) class GG(Generic[S]): pass g: GG[Tuple[G[B], G[C]]] \ # E: Type argument "Tuple[__main__.G[__main__.B], __main__.G[__main__.C]]" of "GG" must be a subtype of "builtins.tuple[__main__.G[__main__.A]]" \ # E: Type argument "__main__.B" of "G" must be a subtype of "__main__.A" \ # E: Type argument "__main__.C" of "G" must be a subtype of "__main__.A" T = TypeVar('T', bound=A, covariant=True) class G(Generic[T]): pass t: Tuple[G[B], G[C]] # E: Type argument "__main__.B" of "G" must be a subtype of "__main__.A" \ # E: Type argument "__main__.C" of "G" must be a subtype of "__main__.A" reveal_type(t.__iter__) # N: Revealed type is 'def () -> typing.Iterator[__main__.G*[__main__.B]]' [builtins fixtures/tuple.pyi] [case testNewAnalyzerClassKeywordsForward] class C(B, other=A): ... class B: ... class A: ... [case testNewAnalyzerClassKeywordsCyclic] from typing import List class C(List[C], other=C): ... [builtins fixtures/list.pyi] [case testNewAnalyzerClassKeywordsError] class C(other=asdf): ... # E: Name 'asdf' is not defined [case testNewAnalyzerMissingImport] # flags: --ignore-missing-imports import non_existing x: C class C: ... [case testNewAnalyzerMissingImportFrom] # flags: --ignore-missing-imports from non_existing import stuff x: C class C: ... [case testNewAnalyzerFollowSkip] # flags: --follow-imports=skip from other import y x: C class C: ... [file other.py] y = 1 [case testNewAnalyzerMissingImportErrors] # flags: --ignore-missing-imports from non_existing import stuff, other_stuff stuff = 1 # OK other_stuff: int = 1 # E: Name 'other_stuff' already defined (possibly by an import) x: C class C: ... [case testNewAnalyzerMissingImportErrorsRedefinition] # flags: --ignore-missing-imports class Other: ... from non_existing import Other # E: Name 'Other' already defined on line 3 from non_existing import Cls class Cls: ... # E: Name 'Cls' already defined (possibly by an import) x: C class C: ... [case testNewAnalyzerTupleInit] from typing import Tuple c: C class C(Tuple[int, str]): def __init__(self) -> None: pass [case testNewAnalyzerNotAnAlias] class Meta(type): x = int() y = C.x reveal_type(y) # N: Revealed type is 'builtins.int' class C(metaclass=Meta): pass [case testNewAnalyzerFunctionError] def f(x: asdf) -> None: # E: Name 'asdf' is not defined pass [case testNewAnalyzerEnumRedefinition] from enum import Enum A = Enum('A', ['x', 'y']) A = Enum('A', ['z', 't']) # E: Name 'A' already defined on line 3 [case testNewAnalyzerNewTypeRedefinition] from typing import NewType A = NewType('A', int) A = NewType('A', str) # E: Cannot redefine 'A' as a NewType \ # E: Name 'A' already defined on line 3 [case testNewAnalyzerNewTypeForwardClass] from typing import NewType, List x: C reveal_type(x[0]) # N: Revealed type is '__main__.C*' C = NewType('C', B) class B(List[C]): pass [builtins fixtures/list.pyi] [case testNewAnalyzerNewTypeForwardClassAlias] from typing import NewType, List x: D reveal_type(x[0]) # N: Revealed type is '__main__.C*' D = C C = NewType('C', B) class B(List[D]): pass [builtins fixtures/list.pyi] [case testNewAnalyzerNewTypeForwardClassAliasReversed] from typing import NewType, List x: D reveal_type(x[0][0]) # N: Revealed type is '__main__.C*' D = C C = NewType('C', List[B]) class B(List[C]): pass [builtins fixtures/list.pyi] [case testNewAnalyzerNewTypeForwardClassAliasDirect] from typing import NewType, List x: D reveal_type(x[0][0]) D = List[C] C = NewType('C', B) class B(D): pass [builtins fixtures/list.pyi] [out] main:3: error: Cannot resolve name "D" (possible cyclic definition) main:4: note: Revealed type is 'Any' main:6: error: Cannot resolve name "D" (possible cyclic definition) main:6: error: Cannot resolve name "C" (possible cyclic definition) main:7: error: Argument 2 to NewType(...) must be a valid type main:7: error: Cannot resolve name "B" (possible cyclic definition) -- Copied from check-classes.test (tricky corner cases). [case testNewAnalyzerNoCrashForwardRefToBrokenDoubleNewTypeClass] from typing import Any, Dict, List, NewType Foo = NewType('NotFoo', int) # type: ignore Foos = NewType('Foos', List[Foo]) x: C class C: def frob(self, foos: Dict[Any, Foos]) -> None: foo = foos.get(1) dict(foo) [builtins fixtures/dict.pyi] [case testNewAnalyzerForwardTypeAliasInBase] from typing import List, Generic, TypeVar, NamedTuple T = TypeVar('T') class C(A, B): # E: Cannot resolve name "A" (possible cyclic definition) pass class G(Generic[T]): pass A = G[C] # E: Cannot resolve name "A" (possible cyclic definition) class B(NamedTuple): x: int y: C reveal_type(y.x) # N: Revealed type is 'builtins.int' reveal_type(y[0]) # N: Revealed type is 'builtins.int' x: A reveal_type(x) # N: Revealed type is '__main__.G[Tuple[builtins.int, fallback=__main__.C]]' [builtins fixtures/list.pyi] [case testNewAnalyzerDuplicateTypeVar] from typing import TypeVar, Generic, Any T = TypeVar('T', bound=B[Any]) # The "int" error is because of typing fixture. T = TypeVar('T', bound=C) # E: Cannot redefine 'T' as a type variable \ # E: Invalid assignment target \ # E: "int" not callable class B(Generic[T]): x: T class C: ... x: B[int] # E: Type argument "builtins.int" of "B" must be a subtype of "__main__.B[Any]" y: B[B[Any]] reveal_type(y.x) # N: Revealed type is '__main__.B*[Any]' [case testNewAnalyzerDuplicateTypeVarImportCycle] import a [file a.py] from typing import TypeVar, Any from b import B, C T = TypeVar('T', bound=B[Any]) T = TypeVar('T', bound=C) [file b.py] from typing import Generic, Any from a import T class B(Generic[T]): x: T class C: ... x: B[int] y: B[B[Any]] reveal_type(y.x) [out] tmp/b.py:8: error: Type argument "builtins.int" of "B" must be a subtype of "b.B[Any]" tmp/b.py:10: note: Revealed type is 'b.B*[Any]' tmp/a.py:5: error: Cannot redefine 'T' as a type variable tmp/a.py:5: error: Invalid assignment target tmp/a.py:5: error: "int" not callable [case testNewAnalyzerDuplicateTypeVarImportCycleWithAliases] import a [file a.py] from typing import TypeVar, Any from b import BA, C T = TypeVar('T', bound=BAA[Any]) T = TypeVar('T', bound=C) BAA = BA [file b.py] from typing import Generic, Any from a import T BA = B class B(Generic[T]): x: T class C: ... x: B[int] y: B[B[Any]] reveal_type(y.x) [out] tmp/b.py:9: error: Type argument "builtins.int" of "B" must be a subtype of "b.B[Any]" tmp/b.py:11: note: Revealed type is 'b.B*[Any]' tmp/a.py:5: error: Cannot redefine 'T' as a type variable tmp/a.py:5: error: Invalid assignment target [case testNewAnalyzerTypeVarBoundInCycle] import factory, box [file factory.py] from typing import Generic, Type from box import BoxT class Factory(Generic[BoxT]): value: int def create(self, boxClass: Type[BoxT]) -> BoxT: reveal_type(boxClass.create(self)) # N: Revealed type is 'BoxT`1' return boxClass.create(self) [file box.py] from typing import TYPE_CHECKING, Type, TypeVar if TYPE_CHECKING: from factory import Factory BoxT = TypeVar('BoxT', bound='Box') class Box: @classmethod def create(cls: Type[BoxT], f: Factory) -> BoxT: return cls(f.value) def __init__(self, value: int) -> None: ... [builtins fixtures/classmethod.pyi] [case testNewAnalyzerCastForward1] from typing import cast x = cast('C', None) class A: def foo(self) -> None: self.x = cast('C', None) reveal_type(x) # N: Revealed type is '__main__.C' reveal_type(A().x) # N: Revealed type is '__main__.C' class C(A): ... [case testNewAnalyzerCastForward2] from typing import cast x = cast('C', None) reveal_type(x) # N: Revealed type is 'builtins.int' C = int [case testNewAnalyzerCastForward2] from typing import cast, NamedTuple x = cast('C', None) reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.C]' reveal_type(x.x) # N: Revealed type is 'builtins.int' C = NamedTuple('C', [('x', int)]) [case testNewAnalyzerApplicationForward1] from typing import Generic, TypeVar x = C[int]() reveal_type(x) # N: Revealed type is '__main__.C[builtins.int*]' T = TypeVar('T') class C(Generic[T]): ... [case testNewAnalyzerApplicationForward2] from typing import Generic, TypeVar T = TypeVar('T') class C(Generic[T]): ... x = C['A']() reveal_type(x) # N: Revealed type is '__main__.C[__main__.A*]' class A: ... [case testNewAnalyzerApplicationForward3] from typing import Generic, TypeVar x = C[A]() reveal_type(x) # N: Revealed type is '__main__.C[__main__.A*]' T = TypeVar('T') class C(Generic[T]): ... class A: ... [case testNewAnalyzerApplicationForward4] from typing import Generic, TypeVar x = C[A]() # E: Value of type variable "T" of "C" cannot be "A" reveal_type(x) # N: Revealed type is '__main__.C[__main__.A*]' T = TypeVar('T', bound='D') class C(Generic[T]): ... class A: ... class D: ... [case testNewAnalyzerAddedSubStarImport_incremental] # TODO: This can be removed once testAddedSubStarImport is enabled in check-incremental.test. # cmd: mypy -m a pack pack.mod b # cmd2: mypy -m other [file a.py] from pack import * [file pack/__init__.py] [file pack/mod.py] [file b.py] import pack.mod [file other.py] import a [out] [out2] [case testNewAnalyzerModuleGetattrSerialize_incremental] import a [file a.py] import p [file a.py.2] import p reveal_type(p.y) [file p.pyi] from pp import x as y [file pp.pyi] def __getattr__(attr): ... [out2] tmp/a.py:2: note: Revealed type is 'Any' [case testNewAnanlyzerTrickyImportPackage] from lib import config import lib reveal_type(lib.config.x) # N: Revealed type is 'builtins.int' reveal_type(config.x) # N: Revealed type is 'builtins.int' [file lib/__init__.py] from lib.config import config [file lib/config.py] class Config: x: int config = Config() [builtins fixtures/module.pyi] [case testNewAnanlyzerTrickyImportPackageAlt] import lib.config import lib.config as tmp reveal_type(lib.config.x) # N: Revealed type is 'builtins.int' # TODO: this actually doesn't match runtime behavior, variable wins. tmp.x # E: Module has no attribute "x" [file lib/__init__.py] from lib.config import config [file lib/config.py] class Config: x: int config = Config() [builtins fixtures/module.pyi] [case testNewAnanlyzerTrickyImportPackage_incremental] import a [file a.py] from lib import config import lib [file a.py.2] from lib import config import lib reveal_type(lib.config.x) reveal_type(config.x) [file lib/__init__.py] from lib.config import config [file lib/config.py] class Config: x: int config = Config() [builtins fixtures/module.pyi] [out2] tmp/a.py:4: note: Revealed type is 'builtins.int' tmp/a.py:5: note: Revealed type is 'builtins.int' [case testNewAnalyzerRedefineAsClass] from typing import Any from other import C # type: ignore y = 'bad' class C: # E: Name 'C' already defined (possibly by an import) def meth(self, other: int) -> None: y() # E: "str" not callable [case testNewAnalyzerRedefineAsOverload] from typing import overload y = 'bad' if int(): def f(x: int) -> None: pass else: @overload # E: Name 'f' already defined on line 6 def f(x: int) -> None: ... @overload def f(x: str) -> None: ... def f(x) -> None: y() # E: "str" not callable [case testNewAnalyzerFirstAliasTargetWins] if int(): Alias = DesiredTarget else: class DummyTarget: pass Alias = DummyTarget # type: ignore x: Alias reveal_type(x.attr) # N: Revealed type is 'builtins.int' class DesiredTarget: attr: int [case testNewAnalyzerFirstVarDefinitionWins] x = y x = 1 # We want to check that the first definition creates the variable. def x() -> None: ... # E: Name 'x' already defined on line 1 y = 2 [case testNewAnalyzerImportStarSpecialCase] import unittest [file unittest/__init__.pyi] from unittest.suite import * def load_tests() -> TestSuite: ... [file unittest/suite.pyi] from typing import Union # Iterable not imported import unittest.case _TestType = Union[unittest.case.TestCase] class BaseTestSuite(Iterable[_TestType]): ... class TestSuite(BaseTestSuite): ... [file unittest/case.pyi] class TestCase: ... [out] tmp/unittest/suite.pyi:6: error: Name 'Iterable' is not defined tmp/unittest/suite.pyi:6: note: Did you forget to import it from "typing"? (Suggestion: "from typing import Iterable") [case testNewAnalyzerNewTypeSpecialCase] from typing import NewType from typing_extensions import Final, Literal X = NewType('X', int) var1: Final = 1 def force1(x: Literal[1]) -> None: pass force1(reveal_type(var1)) # N: Revealed type is 'Literal[1]' [case testNewAnalyzerReportLoopInMRO] class A(A): ... # E: Cannot resolve name "A" (possible cyclic definition) [out] [case testNewSemanticAnalyzerUnimportedSpecialCase] # flags: --ignore-missing-imports import other import p.u [file p/__init__.py] [file p/u.pyi] from . import c x: c.C [file other.py] from p.c import B [out] [case testNewSemanticAnalyzerQualifiedFunctionAsType] import m x: m.C.a.b # E: Name 'm.C.a.b' is not defined [file m.py] def C(): pass [case testNewSemanticAnalyzerModulePrivateRefInMiddleOfQualified] import m x: m.n.C # E: Name 'm.n.C' is not defined reveal_type(x) # N: Revealed type is 'Any' [file m.pyi] import n [file n.pyi] class C: pass [case testNewAnalyzerModuleGetAttrInPython36] # flags: --python-version 3.6 import m import n x: m.n.C # E: Name 'm.n.C' is not defined y: n.D # E: Name 'n.D' is not defined [file m.py] import n [file n.py] def __getattr__(x): pass [case testNewAnalyzerModuleGetAttrInPython37] # flags: --python-version 3.7 import m import n x: m.n.C y: n.D [file m.py] import n [file n.py] def __getattr__(x): pass [case testNewAnalyzerReportLoopInMRO2] def f() -> None: class A(A): ... # E: Cannot resolve name "A" (possible cyclic definition) [case testNewAnalyzerUnsupportedBaseClassInsideFunction] class C: class E: pass def f(self) -> None: # TODO: Error message could be better class D(self.E): # E: Name 'self.E' is not defined pass [case testNewAnalyzerShadowOuterDefinitionBasedOnOrderSinglePass] # Only one semantic analysis pass class X: pass class C: X = X reveal_type(X) # N: Revealed type is 'def () -> __main__.X' reveal_type(C.X) # N: Revealed type is 'def () -> __main__.X' [case testNewAnalyzerShadowOuterDefinitionBasedOnOrderTwoPasses] c: C # Force second semantic analysis pass class X: pass class C: X = X reveal_type(X) # N: Revealed type is 'def () -> __main__.X' reveal_type(C.X) # N: Revealed type is 'def () -> __main__.X' [case testNewAnalyzerAnnotationConflictsWithAttributeSinglePass] class C: def x(self) -> int: return 0 def __init__(self) -> None: self.int = '' def y(self) -> int: return 0 z: str def str(self) -> str: return 0 # E: Incompatible return value type (got "int", expected "str") zz: str # E: Function "__main__.C.str" is not valid as a type \ # N: Perhaps you need "Callable[...]" or a callback protocol? reveal_type(C().x()) # N: Revealed type is 'builtins.int' reveal_type(C().y()) # N: Revealed type is 'builtins.int' reveal_type(C().z) # N: Revealed type is 'builtins.str' reveal_type(C().str()) # N: Revealed type is 'builtins.str' [case testNewAnalyzerAnnotationConflictsWithAttributeTwoPasses] c: C # Force second semantic analysis pass class C: def x(self) -> int: return 0 def __init__(self) -> None: self.int = '' def y(self) -> int: return 0 z: str def str(self) -> str: return 0 # E: Incompatible return value type (got "int", expected "str") zz: str # E: Function "__main__.C.str" is not valid as a type \ # N: Perhaps you need "Callable[...]" or a callback protocol? reveal_type(C().x()) # N: Revealed type is 'builtins.int' reveal_type(C().y()) # N: Revealed type is 'builtins.int' reveal_type(C().z) # N: Revealed type is 'builtins.str' reveal_type(C().str()) # N: Revealed type is 'builtins.str' [case testNewAnalyzerNameConflictsAndMultiLineDefinition] c: C # Force second semantic analysis pass class X: pass class C: X = ( X) def str(self ) -> str: return 0 # E: Incompatible return value type (got "int", expected "str") reveal_type(C.X) # E: # N: Revealed type is 'def () -> __main__.X' reveal_type(C().str()) # N: Revealed type is 'builtins.str' [case testNewAnalyzerNameNotDefinedYetInClassBody] class C: X = Y # E: Name 'Y' is not defined Y = 1 f = g # E: Name 'g' is not defined def g(self) -> None: pass reveal_type(C.X) # N: Revealed type is 'Any' [case testNewAnalyzerImportedNameUsedInClassBody] import m [file m.py] class C: from mm import f @dec(f) def m(self): pass def dec(f): pass [file mm.py] # 1 padding to increase line number of 'f' # 2 padding # 3 padding # 4 padding # 5 padding # 6 padding def f(): pass [case testNewAnalyzerImportedNameUsedInClassBody2] import m [file m/__init__.py] class C: from m.m import f @dec(f) def m(self): pass def dec(f): pass [file m/m.py] # 1 padding to increase line number of 'f' # 2 padding # 3 padding # 4 padding # 5 padding # 6 padding def f(): pass [case testNewAnalyzerOverrideClassWithTypeAlias] from typing import Generic, TypeVar T = TypeVar('T') class C(Generic[T]): pass # TODO: Error message is confusing C = C[int] # E: Cannot assign to a type \ # E: Incompatible types in assignment (expression has type "Type[C[Any]]", variable has type "Type[C[Any]]") x: C reveal_type(x) # N: Revealed type is '__main__.C[Any]' [out] [out2] [case testNewAnalyzerClassVariableOrdering] def foo(x: str) -> None: pass class Something: def run(self) -> None: foo(self.IDS[0]) # E: Argument 1 to "foo" has incompatible type "int"; expected "str" IDS = [87] [builtins fixtures/list.pyi] [case testNewAnalyzerPlaceholderFromOuterScope] import b [file a.py] import b class A(B): ... class B: ... [file b.py] from a import A class C: A = A # Initially rvalue will be a placeholder reveal_type(C.A) # N: Revealed type is 'def () -> a.A' [case testNewAnalyzerFinalLiteralInferredAsLiteralWithDeferral] from typing_extensions import Final, Literal defer: Yes var: Final = 42 def force(x: Literal[42]) -> None: pass force(reveal_type(var)) # N: Revealed type is 'Literal[42]' class Yes: ... [case testNewAnalyzerImportCycleWithIgnoreMissingImports] # flags: --ignore-missing-imports import p reveal_type(p.get) # N: Revealed type is 'def () -> builtins.int' [file p/__init__.pyi] from . import api get = api.get [file p/api.pyi] import p def get() -> int: ... [case testUseObsoleteNameForTypeVar3] import typing t = typing.typevar('t') # E: Module has no attribute "typevar" [builtins fixtures/module.pyi] [case testNewAnalyzerImportFromTopLevelFunction] import a.b # This works at runtime reveal_type(a.b) # N [file a/__init__.py] from .b import B from . import b as c def b() -> None: pass reveal_type(b) # N reveal_type(c.B()) # N x: Forward class Forward: ... [file a/b.py] class B: ... [builtins fixtures/module.pyi] [out] tmp/a/__init__.py:4: note: Revealed type is 'def ()' tmp/a/__init__.py:5: note: Revealed type is 'a.b.B' main:2: note: Revealed type is 'def ()' [case testNewAnalyzerImportFromTopLevelAlias] import a.b # This works at runtime reveal_type(a.b) # N [file a/__init__.py] from .b import B from . import b as c b = int y: b reveal_type(y) # N reveal_type(c.B) # N x: Forward class Forward: ... [file a/b.py] class B: ... [builtins fixtures/module.pyi] [out] tmp/a/__init__.py:5: note: Revealed type is 'builtins.int' tmp/a/__init__.py:6: note: Revealed type is 'def () -> a.b.B' main:2: note: Revealed type is 'def () -> builtins.int' [case testNewAnalyzerImportAmbiguousWithTopLevelFunction] import a.b # This works at runtime x: a.b.B # E reveal_type(a.b) # N [file a/__init__.py] import a.b import a.b as c def b() -> None: pass reveal_type(b) # N reveal_type(c.B()) # N x: Forward class Forward: ... [file a/b.py] class B: ... [builtins fixtures/module.pyi] [out] tmp/a/__init__.py:4: note: Revealed type is 'def ()' tmp/a/__init__.py:5: note: Revealed type is 'a.b.B' main:2: error: Name 'a.b.B' is not defined main:3: note: Revealed type is 'def ()' [case testNewAnalyzerConfusingImportConflictingNames] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy -m other a.b a [file a/__init__.py] [file a/b/__init__.py] import other import a.b.a import a.b.c [file other.py] from a.b.a import foo [builtins fixtures/module.pyi] [out] [case testNewAnalyzerNamedTupleMethod] from typing import NamedTuple g: N class N(NamedTuple): def f(self) -> None: b = ( a for a in [1] ) b [builtins fixtures/tuple.pyi] [case testWithMultipleTargetsDeferred] a: A class A: def __enter__(self) -> int: pass def __exit__(self, x, y, z): pass with A() as x, A() as y: # type: int, int pass [case testNewAnalyzerLessErrorsNeedAnnotation] from typing import TypeVar, Optional T = TypeVar('T') def f(x: Optional[T] = None) -> T: ... x = f() # E: Need type annotation for 'x' y = x def g() -> None: x = f() # E: Need type annotation for 'x' y = x [case testNewAnalyzerLessErrorsNeedAnnotationList] x = [] # type: ignore reveal_type(x) # N: Revealed type is 'builtins.list[Any]' def g() -> None: x = [] # type: ignore reveal_type(x) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [case testNewAnalyzerLessErrorsNeedAnnotationNested] from typing import TypeVar, Optional, Generic T = TypeVar('T') class G(Generic[T]): ... def f(x: Optional[T] = None) -> G[T]: ... x = f() # E: Need type annotation for 'x' y = x reveal_type(y) # N: Revealed type is '__main__.G[Any]' def g() -> None: x = f() # E: Need type annotation for 'x' y = x reveal_type(y) # N: Revealed type is '__main__.G[Any]' [case testNewAnalyzerRedefinedNonlocal] import typing def f(): bar = [] # type: typing.List[int] def foo(): nonlocal bar bar = [] # type: typing.List[int] def g() -> None: bar = [] # type: typing.List[int] def foo() -> None: nonlocal bar bar = [] # type: typing.List[int] # E: Name 'bar' already defined on line 11 [builtins fixtures/list.pyi] [case testNewAnalyzerMoreInvalidTypeVarArgumentsDeferred] from typing import TypeVar, Generic defer: Yes S = TypeVar('S', covariant=True, contravariant=True) # E: TypeVar cannot be both covariant and contravariant \ # E: "int" not callable class Yes: ... [builtins fixtures/bool.pyi] [case testNewAnalyzerDisallowAnyGenericsMessages] # mypy: disallow-any-generics from a import B x: B [file a.py] from typing import TypeVar, List T = TypeVar('T') A = List[T] B = A [builtins fixtures/list.pyi] [case testNewAnalyzerVarTypeVarNoCrash] from typing import Callable, TypeVar FooT = TypeVar('FooT', bound='Foo') class Foo: ... f = lambda x: True # type: Callable[[FooT], bool] reveal_type(f) # N: Revealed type is 'def [FooT <: __main__.Foo] (FooT`-1) -> builtins.bool' [builtins fixtures/bool.pyi] [case testNewAnalyzerVarTypeVarNoCrashImportCycle] import a [file a.py] from b import B from typing import TypeVar FooT = TypeVar('FooT', bound='Foo') class Foo: ... [file b.py] from a import FooT from typing import Callable f = lambda x: True # type: Callable[[FooT], bool] reveal_type(f) # N: Revealed type is 'def [FooT <: a.Foo] (FooT`-1) -> builtins.bool' class B: ... [builtins fixtures/bool.pyi] [case testNewAnalyzerFuncTypeVarNoCrashImportCycle] import a [file a.py] from b import B from typing import TypeVar FooT = TypeVar('FooT', bound='Foo') class Foo: ... [file b.py] from a import FooT from typing import Callable def f(x: FooT) -> bool: ... reveal_type(f) # N: Revealed type is 'def [FooT <: a.Foo] (x: FooT`-1) -> builtins.bool' class B: ... [builtins fixtures/bool.pyi] [case testNewAnalyzerNoCrashOnStarInference] from typing import Tuple def f() -> None: t: Tuple[str, Tuple[str, str, str]] x, (y, *z) = t reveal_type(z) # N: Revealed type is 'builtins.list[builtins.str*]' [builtins fixtures/list.pyi] [case testNewAnalyzerIdentityAssignment1] from foo import * try: X = X except: class X: # E: Name 'X' already defined (possibly by an import) pass reveal_type(X()) # N: Revealed type is 'foo.X' [file foo.py] class X: pass [case testNewAnalyzerIdentityAssignment2] try: int = int reveal_type(int()) # N: Revealed type is 'builtins.int' except: class int: # E: Name 'int' already defined (possibly by an import) pass reveal_type(int()) # N: Revealed type is 'builtins.int' [case testNewAnalyzerIdentityAssignment3] forwardref: C try: int = int reveal_type(int()) # N: Revealed type is 'builtins.int' except: class int: # E: Name 'int' already defined (possibly by an import) pass reveal_type(int()) # N: Revealed type is 'builtins.int' class C: pass [case testNewAnalyzerIdentityAssignment4] try: C = C C except: class C: pass reveal_type(C()) # N: Revealed type is '__main__.C' [case testNewAnalyzerIdentityAssignment5] forwardref: D try: C = C C except: class C: pass class D: pass reveal_type(C()) # N: Revealed type is '__main__.C' [case testNewAnalyzerIdentityAssignment6] x: C class C: pass C = C reveal_type(C()) # N: Revealed type is '__main__.C' reveal_type(x) # N: Revealed type is '__main__.C' [case testNewAnalyzerIdentityAssignment7] C = C # E: Name 'C' is not defined reveal_type(C) # E: Name 'C' is not defined \ # N: Revealed type is 'Any' [case testNewAnalyzerIdentityAssignment8] from typing import Final x: Final = 0 x = x # E: Cannot assign to final name "x" [case testNewAnalyzerClassPropertiesInAllScopes] from abc import abstractmethod, ABCMeta class TopLevel(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass TopLevel() # E: Cannot instantiate abstract class 'TopLevel' with abstract attribute 'f' def func() -> None: class Function(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass Function() # E: Cannot instantiate abstract class 'Function' with abstract attribute 'f' class C: def meth(self) -> None: class Method(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass Method() # E: Cannot instantiate abstract class 'Method' with abstract attribute 'f' [case testModulesAndFuncsTargetsInCycle] import a [file a.py] import b defer: Yes def func() -> int: ... class Yes: ... [file b.py] import a def func() -> int: ... [targets b, a, a, b.func, a.func, __main__] [case testNewAnalyzerForwardReferenceInFunction] def f(x: 'A') -> 'A': return A() class A: pass [targets __main__, __main__.f] [case testNewAnalyzerSimpleImportStarNoDeferral] from m import * x: A f() [file m.py] class A: pass def f() -> None: pass [targets m, m.f, __main__] [case testNewAnalyzerNoCrashOnCustomProperty] # flags: --ignore-missing-imports from unimported import custom class User: first_name: str @custom def name(self) -> str: return self.first_name @name.setter # type: ignore def name(self, value: str) -> None: self.first_name = value def __init__(self, name: str) -> None: self.name = name # E: Cannot assign to a method \ # E: Incompatible types in assignment (expression has type "str", variable has type "Callable[..., Any]") mypy-0.761/test-data/unit/check-newsyntax.test0000644€tŠÔÚ€2›s®0000001075313576752246025607 0ustar jukkaDROPBOX\Domain Users00000000000000[case testNewSyntaxRequire36] # flags: --python-version 3.5 x: int = 5 # E: Variable annotation syntax is only supported in Python 3.6 and greater [out] [case testNewSyntaxSyntaxError] # flags: --python-version 3.6 x: int: int # E: invalid syntax [out] [case testNewSyntaxBasics] # flags: --python-version 3.6 x: int x = 5 y: int = 5 a: str a = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") b: str = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") zzz: int zzz: str # E: Name 'zzz' already defined on line 10 [out] [case testNewSyntaxWithDict] # flags: --python-version 3.6 from typing import Dict, Any d: Dict[int, str] = {} d[42] = 'ab' d[42] = 42 # E: Incompatible types in assignment (expression has type "int", target has type "str") d['ab'] = 'ab' # E: Invalid index type "str" for "Dict[int, str]"; expected type "int" [builtins fixtures/dict.pyi] [out] [case testNewSyntaxWithRevealType] # flags: --python-version 3.6 from typing import Dict def tst_local(dct: Dict[int, T]) -> Dict[T, int]: ret: Dict[T, int] = {} return ret reveal_type(tst_local({1: 'a'})) # N: Revealed type is 'builtins.dict[builtins.str*, builtins.int]' [builtins fixtures/dict.pyi] [out] [case testNewSyntaxWithInstanceVars] # flags: --python-version 3.6 class TstInstance: a: str def __init__(self) -> None: self.x: int TstInstance().x = 5 TstInstance().x = 'ab' # E: Incompatible types in assignment (expression has type "str", variable has type "int") TstInstance().a = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") TstInstance().a = 'ab' [out] [case testNewSyntaxWithClassVars] # flags: --strict-optional --python-version 3.6 class CCC: a: str = None # E: Incompatible types in assignment (expression has type "None", variable has type "str") [out] [case testNewSyntaxWithStrictOptional] # flags: --strict-optional --python-version 3.6 strict: int strict = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") strict2: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testNewSyntaxWithStrictOptionalFunctions] # flags: --strict-optional --python-version 3.6 def f() -> None: x: int if int(): x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testNewSyntaxWithStrictOptionalClasses] # flags: --strict-optional --python-version 3.6 class C: def meth(self) -> None: x: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") self.x: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testNewSyntaxSpecialAssign] # flags: --python-version 3.6 class X: x: str x[0]: int x.x: int [out] main:4: error: Unexpected type declaration main:4: error: Unsupported target for indexed assignment main:5: error: Type cannot be declared in assignment to non-self attribute main:5: error: "str" has no attribute "x" [case testNewSyntaxAsyncComprehensionError] # flags: --python-version 3.5 async def f(): results = [i async for i in aiter() if i % 2] # E: Async comprehensions are only supported in Python 3.6 and greater [case testNewSyntaxFstringError] # flags: --python-version 3.5 f'' # E: Format strings are only supported in Python 3.6 and greater [case testNewSyntaxFStringBasics] # flags: --python-version 3.6 f'foobar' f'{"foobar"}' f'foo{"bar"}' f'.{1}.' f'{type(1)}' a: str a = f'foobar' a = f'{"foobar"}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringExpressionsOk] # flags: --python-version 3.6 f'.{1 + 1}.' f'.{1 + 1}.{"foo" + "bar"}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringExpressionsErrors] # flags: --python-version 3.6 f'{1 + ""}' f'.{1 + ""}' [builtins fixtures/f_string.pyi] [out] main:2: error: Unsupported operand types for + ("int" and "str") main:3: error: Unsupported operand types for + ("int" and "str") [case testNewSyntaxFStringParseFormatOptions] # flags: --python-version 3.6 value = 10.5142 width = 10 precision = 4 f'result: {value:{width}.{precision}}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringSingleField] # flags: --python-version 3.6 v = 1 reveal_type(f'{v}') # N: Revealed type is 'builtins.str' reveal_type(f'{1}') # N: Revealed type is 'builtins.str' [builtins fixtures/f_string.pyi] mypy-0.761/test-data/unit/check-newtype.test0000644€tŠÔÚ€2›s®0000002370113576752246025237 0ustar jukkaDROPBOX\Domain Users00000000000000-- Checks NewType(...) -- Checks for basic functionality [case testNewTypePEP484Example1] from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: return "foo" UserId('user') # E: Argument 1 to "UserId" has incompatible type "str"; expected "int" name_by_id(42) # E: Argument 1 to "name_by_id" has incompatible type "int"; expected "UserId" name_by_id(UserId(42)) id = UserId(5) num = id + 1 reveal_type(id) # N: Revealed type is '__main__.UserId' reveal_type(num) # N: Revealed type is 'builtins.int' [targets __main__, __main__.UserId.__init__, __main__.name_by_id] [case testNewTypePEP484Example2] from typing import NewType class PacketId: def __init__(self, major: int, minor: int) -> None: self._major = major self._minor = minor TcpPacketId = NewType('TcpPacketId', PacketId) packet = PacketId(100, 100) tcp_packet = TcpPacketId(packet) tcp_packet = TcpPacketId(127, 0) [out] main:12: error: Too many arguments for "TcpPacketId" main:12: error: Argument 1 to "TcpPacketId" has incompatible type "int"; expected "PacketId" [case testNewTypeWithTuples] from typing import NewType, Tuple TwoTuple = NewType('TwoTuple', Tuple[int, str]) a = TwoTuple((3, "a")) b = TwoTuple(("a", 3)) # E: Argument 1 to "TwoTuple" has incompatible type "Tuple[str, int]"; expected "Tuple[int, str]" reveal_type(a[0]) # N: Revealed type is 'builtins.int' reveal_type(a[1]) # N: Revealed type is 'builtins.str' [builtins fixtures/tuple.pyi] [out] [case testNewTypeWithLists] from typing import NewType, List UserId = NewType('UserId', int) IdList = NewType('IdList', List[UserId]) bad1 = IdList([1]) # E: List item 0 has incompatible type "int"; expected "UserId" foo = IdList([]) foo.append(3) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "UserId" foo.append(UserId(3)) foo.extend([UserId(1), UserId(2), UserId(3)]) foo.extend(IdList([UserId(1), UserId(2), UserId(3)])) bar = IdList([UserId(2)]) baz = foo + bar reveal_type(foo) # N: Revealed type is '__main__.IdList' reveal_type(bar) # N: Revealed type is '__main__.IdList' reveal_type(baz) # N: Revealed type is 'builtins.list[__main__.UserId*]' [builtins fixtures/list.pyi] [out] [case testNewTypeWithGenerics] from typing import TypeVar, Generic, NewType, Any T = TypeVar('T') class Base(Generic[T]): def __init__(self, item: T) -> None: self.item = item def getter(self) -> T: return self.item Derived1 = NewType('Derived1', Base[str]) Derived2 = NewType('Derived2', Base) # Implicit 'Any' Derived3 = NewType('Derived3', Base[Any]) # Explicit 'Any' Derived1(Base(1)) # E: Argument 1 to "Base" has incompatible type "int"; expected "str" Derived1(Base('a')) Derived2(Base(1)) Derived2(Base('a')) Derived3(Base(1)) Derived3(Base('a')) reveal_type(Derived1(Base('a')).getter()) # N: Revealed type is 'builtins.str*' reveal_type(Derived3(Base('a')).getter()) # N: Revealed type is 'Any' [out] [case testNewTypeWithNamedTuple] from collections import namedtuple from typing import NewType, NamedTuple Vector1 = namedtuple('Vector1', ['x', 'y']) Point1 = NewType('Point1', Vector1) p1 = Point1(Vector1(1, 2)) reveal_type(p1.x) # N: Revealed type is 'Any' reveal_type(p1.y) # N: Revealed type is 'Any' Vector2 = NamedTuple('Vector2', [('x', int), ('y', int)]) Point2 = NewType('Point2', Vector2) p2 = Point2(Vector2(1, 2)) reveal_type(p2.x) # N: Revealed type is 'builtins.int' reveal_type(p2.y) # N: Revealed type is 'builtins.int' class Vector3: def __init__(self, x: int, y: int) -> None: self.x = x self.y = y Point3 = NewType('Point3', Vector3) p3 = Point3(Vector3(1, 3)) reveal_type(p3.x) # N: Revealed type is 'builtins.int' reveal_type(p3.y) # N: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [out] [case testNewTypeWithCasts] from typing import NewType, cast UserId = NewType('UserId', int) foo = UserId(3) foo = cast(UserId, 3) foo = cast(UserId, "foo") foo = cast(UserId, UserId(4)) [out] [case testNewTypeWithTypeAliases] from typing import NewType Foo = int Bar = NewType('Bar', Foo) Bar2 = Bar def func1(x: Foo) -> Bar: return Bar(x) def func2(x: int) -> Bar: return Bar(x) def func3(x: Bar2) -> Bar: return x x = Bar(42) y = Bar2(42) y = func3(x) [out] [case testNewTypeWithNewType] from typing import NewType A = NewType('A', int) B = NewType('B', A) C = A D = C E = NewType('E', D) a = A(1) b = B(a) e = E(a) def funca(a: A) -> None: ... def funcb(b: B) -> None: ... funca(a) funca(b) funca(e) funcb(a) # E: Argument 1 to "funcb" has incompatible type "A"; expected "B" funcb(b) funcb(e) # E: Argument 1 to "funcb" has incompatible type "E"; expected "B" [out] -- Make sure NewType works as expected in a variety of different scopes/across files [case testNewTypeInLocalScope] from typing import NewType A = NewType('A', int) a = A(3) def func() -> None: A = NewType('A', str) B = NewType('B', str) a = A(3) # E: Argument 1 to "A" has incompatible type "int"; expected "str" a = A('xyz') b = B('xyz') class MyClass: C = NewType('C', float) def foo(self) -> 'MyClass.C': return MyClass.C(3.2) b = A(3) c = MyClass.C(3.5) [out] [case testNewTypeInMultipleFiles] import a import b list1 = [a.UserId(1), a.UserId(2)] list1.append(b.UserId(3)) # E: Argument 1 to "append" of "list" has incompatible type "b.UserId"; expected "a.UserId" [file a.py] from typing import NewType UserId = NewType('UserId', int) [file b.py] from typing import NewType UserId = NewType('UserId', int) [builtins fixtures/list.pyi] [out] [case testNewTypeWithIncremental] import m [file m.py] from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: return "foo" name_by_id(UserId(42)) id = UserId(5) num = id + 1 [file m.py.2] from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: return "foo" name_by_id(UserId(42)) id = UserId(5) num = id + 1 reveal_type(id) reveal_type(num) [rechecked m] [stale] [out1] [out2] tmp/m.py:13: note: Revealed type is 'm.UserId' tmp/m.py:14: note: Revealed type is 'builtins.int' -- Check misuses of NewType fail [case testNewTypeBadInitializationFails] from typing import NewType a = NewType('b', int) # E: String argument 1 'b' to NewType(...) does not match variable name 'a' b = NewType('b', 3) # E: Argument 2 to NewType(...) must be a valid type c = NewType(2, int) # E: Argument 1 to NewType(...) must be a string literal foo = "d" d = NewType(foo, int) # E: Argument 1 to NewType(...) must be a string literal e = NewType(name='e', tp=int) # E: NewType(...) expects exactly two positional arguments f = NewType('f', tp=int) # E: NewType(...) expects exactly two positional arguments [out] [case testNewTypeWithAnyFails] from typing import NewType, Any A = NewType('A', Any) # E: Argument 2 to NewType(...) must be subclassable (got "Any") [out] [case testNewTypeWithUnionsFails] from typing import NewType, Union Foo = NewType('Foo', Union[int, float]) # E: Argument 2 to NewType(...) must be subclassable (got "Union[int, float]") [out] [case testNewTypeWithTypeTypeFails] from typing import NewType, Type Foo = NewType('Foo', Type[int]) # E: Argument 2 to NewType(...) must be subclassable (got "Type[int]") a = Foo(type(3)) [builtins fixtures/args.pyi] [out] [case testNewTypeWithTypeVarsFails] from typing import NewType, TypeVar, List T = TypeVar('T') A = NewType('A', T) B = NewType('B', List[T]) [builtins fixtures/list.pyi] [out] main:4: error: Argument 2 to NewType(...) must be subclassable (got T?) main:4: error: Type variable "__main__.T" is unbound main:4: note: (Hint: Use "Generic[T]" or "Protocol[T]" base class to bind "T" inside a class) main:4: note: (Hint: Use "T" in function signature to bind "T" inside a function) main:5: error: Type variable "__main__.T" is unbound main:5: note: (Hint: Use "Generic[T]" or "Protocol[T]" base class to bind "T" inside a class) main:5: note: (Hint: Use "T" in function signature to bind "T" inside a function) [case testNewTypeRedefiningVariablesFails] from typing import NewType a = 3 def f(): a a = NewType('a', int) # E: Cannot redefine 'a' as a NewType \ # E: Name 'a' already defined on line 4 b = NewType('b', int) def g(): b b = NewType('b', float) # E: Cannot redefine 'b' as a NewType \ # E: Name 'b' already defined on line 8 c = NewType('c', str) # type: str # E: Cannot declare the type of a NewType declaration [case testNewTypeAddingExplicitTypesFails] from typing import NewType UserId = NewType('UserId', int) a = 3 # type: UserId # E: Incompatible types in assignment (expression has type "int", variable has type "UserId") [out] [case testNewTypeTestSubclassingFails] from typing import NewType class A: pass B = NewType('B', A) class C(B): pass # E: Cannot subclass NewType [out] [case testCannotUseNewTypeWithProtocols] from typing import Protocol, NewType class P(Protocol): attr: int = 0 class D: attr: int C = NewType('C', P) # E: NewType cannot be used with protocol classes x: C = C(D()) # We still accept this, treating 'C' as non-protocol subclass. reveal_type(x.attr) # N: Revealed type is 'builtins.int' x.bad_attr # E: "C" has no attribute "bad_attr" C(1) # E: Argument 1 to "C" has incompatible type "int"; expected "P" [out] [case testNewTypeAny] from typing import NewType Any = NewType('Any', int) Any(5) [case testNewTypeWithIsInstanceAndIsSubclass] from typing import NewType T = NewType('T', int) d: object if isinstance(d, T): # E: Cannot use isinstance() with NewType type reveal_type(d) # N: Revealed type is '__main__.T' issubclass(object, T) # E: Cannot use issubclass() with NewType type [builtins fixtures/isinstancelist.pyi] [case testInvalidNewTypeCrash] from typing import List, NewType, Union N = NewType('N', XXX) # E: Argument 2 to NewType(...) must be subclassable (got "Any") \ # E: Name 'XXX' is not defined x: List[Union[N, int]] [builtins fixtures/list.pyi] mypy-0.761/test-data/unit/check-optional.test0000644€tŠÔÚ€2›s®0000005005313576752246025371 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for strict Optional behavior [case testImplicitNoneType] x = None x() # E: "None" not callable [case testImplicitNoneTypeInNestedFunction] def f() -> None: def g() -> None: x = None x() # E: "None" not callable [case testExplicitNoneType] x = None # type: None x() # E: "None" not callable [case testNoneMemberOfOptional] from typing import Optional x = None # type: Optional[int] [case testTypeMemberOfOptional] from typing import Optional x = 0 # type: Optional[int] [case testNoneNotMemberOfType] x = None # type: int [out] main:1: error: Incompatible types in assignment (expression has type "None", variable has type "int") [case testTypeNotMemberOfNone] x = 0 # type: None [out] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "None") [case testOptionalNotMemberOfType] from typing import Optional def f(a: int) -> None: pass x = None # type: Optional[int] f(x) # E: Argument 1 to "f" has incompatible type "Optional[int]"; expected "int" [case testIsinstanceCases] from typing import Optional x = None # type: Optional[int] if isinstance(x, int): reveal_type(x) # N: Revealed type is 'builtins.int' else: reveal_type(x) # N: Revealed type is 'None' [builtins fixtures/isinstance.pyi] [case testIfCases] from typing import Optional x = None # type: Optional[int] if x: reveal_type(x) # N: Revealed type is 'builtins.int' else: reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' [builtins fixtures/bool.pyi] [case testIfNotCases] from typing import Optional x = None # type: Optional[int] if not x: reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(x) # N: Revealed type is 'builtins.int' [builtins fixtures/bool.pyi] [case testIsNotNoneCases] from typing import Optional x = None # type: Optional[int] if x is not None: reveal_type(x) # N: Revealed type is 'builtins.int' else: reveal_type(x) # N: Revealed type is 'None' [builtins fixtures/bool.pyi] [case testIsNoneCases] from typing import Optional x = None # type: Optional[int] if x is None: reveal_type(x) # N: Revealed type is 'None' else: reveal_type(x) # N: Revealed type is 'builtins.int' reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' [builtins fixtures/bool.pyi] [case testAnyCanBeNone] from typing import Optional, Any x = None # type: Any if x is None: reveal_type(x) # N: Revealed type is 'None' else: reveal_type(x) # N: Revealed type is 'Any' [builtins fixtures/bool.pyi] [case testOrCases] from typing import Optional x = None # type: Optional[str] y1 = x or 'a' reveal_type(y1) # N: Revealed type is 'builtins.str' y2 = x or 1 reveal_type(y2) # N: Revealed type is 'Union[builtins.str, builtins.int]' z1 = 'a' or x reveal_type(z1) # N: Revealed type is 'Union[builtins.str, None]' z2 = int() or x reveal_type(z2) # N: Revealed type is 'Union[builtins.int, builtins.str, None]' [case testAndCases] from typing import Optional x = None # type: Optional[str] y1 = x and 'b' reveal_type(y1) # N: Revealed type is 'Union[builtins.str, None]' y2 = x and 1 # x could be '', so... reveal_type(y2) # N: Revealed type is 'Union[builtins.str, None, builtins.int]' z1 = 'b' and x reveal_type(z1) # N: Revealed type is 'Union[builtins.str, None]' z2 = int() and x reveal_type(z2) # N: Revealed type is 'Union[builtins.int, builtins.str, None]' [case testLambdaReturningNone] f = lambda: None x = f() reveal_type(x) # N: Revealed type is 'None' [case testNoneArgumentType] def f(x: None) -> None: pass f(None) [case testInferOptionalFromDefaultNone] def f(x: int = None) -> None: x + 1 # E: Unsupported left operand type for + ("None") \ # N: Left operand is of type "Optional[int]" f(None) [out] [case testNoInferOptionalFromDefaultNone] # flags: --no-implicit-optional def f(x: int = None) -> None: # E: Incompatible default for argument "x" (default has type "None", argument has type "int") pass [out] [case testInferOptionalFromDefaultNoneComment] def f(x=None): # type: (int) -> None x + 1 # E: Unsupported left operand type for + ("None") \ # N: Left operand is of type "Optional[int]" f(None) [out] [case testNoInferOptionalFromDefaultNoneComment] # flags: --no-implicit-optional def f(x=None): # E: Incompatible default for argument "x" (default has type "None", argument has type "int") # type: (int) -> None pass [out] [case testInferOptionalType] x = None if bool(): # scope limit assignment x = 1 # in scope of the assignment, x is an int reveal_type(x) # N: Revealed type is 'builtins.int' # out of scope of the assignment, it's an Optional[int] reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' [builtins fixtures/bool.pyi] [case testInferOptionalTypeLocallyBound] x = None x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' [case testInferOptionalAnyType] from typing import Any x = None a = None # type: Any if bool(): x = a reveal_type(x) # N: Revealed type is 'Any' reveal_type(x) # N: Revealed type is 'Union[Any, None]' [builtins fixtures/bool.pyi] [case testInferOptionalTypeFromOptional] from typing import Optional y = None # type: Optional[int] x = None x = y reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' [case testInferOptionalListType] x = [None] x.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "None" [builtins fixtures/list.pyi] [case testInferNonOptionalListType] x = [] x.append(1) x() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testInferOptionalDictKeyValueTypes] x = {None: None} x["bar"] = 1 [builtins fixtures/dict.pyi] [out] main:2: error: Invalid index type "str" for "Dict[None, None]"; expected type "None" main:2: error: Incompatible types in assignment (expression has type "int", target has type "None") [case testInferNonOptionalDictType] x = {} x["bar"] = 1 x() # E: "Dict[str, int]" not callable [builtins fixtures/dict.pyi] [case testNoneClassVariable] from typing import Optional class C: x = None # type: int def __init__(self) -> None: self.x = 0 [case testNoneClassVariableInInit] from typing import Optional class C: x = None # type: int def __init__(self) -> None: self.x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testMultipleAssignmentNoneClassVariableInInit] from typing import Optional class C: x, y = None, None # type: int, str def __init__(self) -> None: self.x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") self.y = None # E: Incompatible types in assignment (expression has type "None", variable has type "str") [out] [case testOverloadWithNone] from foo import * [file foo.pyi] from typing import overload @overload def f(x: None) -> str: pass @overload def f(x: int) -> int: pass reveal_type(f(None)) # N: Revealed type is 'builtins.str' reveal_type(f(0)) # N: Revealed type is 'builtins.int' [case testOptionalTypeOrTypePlain] from typing import Optional def f(a: Optional[int]) -> int: return a or 0 [out] [case testOptionalTypeOrTypeTypeVar] from typing import Optional, TypeVar T = TypeVar('T') def f(a: Optional[T], b: T) -> T: return a or b [out] [case testOptionalTypeOrTypeBothOptional] from typing import Optional def f(a: Optional[int], b: Optional[int]) -> None: reveal_type(a or b) def g(a: int, b: Optional[int]) -> None: reveal_type(a or b) [out] main:3: note: Revealed type is 'Union[builtins.int, None]' main:5: note: Revealed type is 'Union[builtins.int, None]' [case testOptionalTypeOrTypeComplexUnion] from typing import Union def f(a: Union[int, str, None]) -> None: reveal_type(a or 'default') [out] main:3: note: Revealed type is 'Union[builtins.int, builtins.str]' [case testOptionalTypeOrTypeNoTriggerPlain] from typing import Optional def f(a: Optional[int], b: int) -> int: return b or a [out] main:3: error: Incompatible return value type (got "Optional[int]", expected "int") [case testOptionalTypeOrTypeNoTriggerTypeVar] from typing import Optional, TypeVar T = TypeVar('T') def f(a: Optional[T], b: T) -> T: return b or a [out] main:4: error: Incompatible return value type (got "Optional[T]", expected "T") [case testNoneOrStringIsString] def f() -> str: a = None b = '' return a or b [out] [case testNoneOrTypeVarIsTypeVar] from typing import TypeVar T = TypeVar('T') def f(b: T) -> T: a = None return a or b [out] [case testYieldNothingInFunctionReturningGenerator] from typing import Generator def f() -> Generator[None, None, None]: yield [out] [case testNoneAndStringIsNone] a = None b = "foo" reveal_type(a and b) # N: Revealed type is 'None' [case testNoneMatchesObjectInOverload] import a a.f(None) [file a.pyi] from typing import overload @overload def f() -> None: ... @overload def f(o: object) -> None: ... [case testGenericSubclassReturningNone] from typing import Generic, TypeVar T = TypeVar('T') class Base(Generic[T]): def f(self) -> T: pass class SubNone(Base[None]): def f(self) -> None: pass class SubInt(Base[int]): def f(self) -> int: return 1 [case testUseOfNoneReturningFunction] from typing import Optional def f() -> None: pass def g(x: Optional[int]) -> int: pass x = f() # E: "f" does not return a value f() + 1 # E: "f" does not return a value g(f()) # E: "f" does not return a value [case testEmptyReturn] def f() -> None: return [case testReturnNone] def f() -> None: return None [case testNoneCallable] from typing import Callable def f() -> None: pass x = f # type: Callable[[], None] [case testOptionalCallable] from typing import Callable, Optional T = Optional[Callable[..., None]] [case testAnyTypeInPartialTypeList] # flags: --check-untyped-defs def f(): ... def lookup_field(name, obj): try: pass except: attr = f() else: attr = None [case testTernaryWithNone] reveal_type(None if bool() else 0) # N: Revealed type is 'Union[Literal[0]?, None]' [builtins fixtures/bool.pyi] [case testListWithNone] reveal_type([0, None, 0]) # N: Revealed type is 'builtins.list[Union[builtins.int, None]]' [builtins fixtures/list.pyi] [case testOptionalWhitelistSuppressesOptionalErrors] # flags: --strict-optional-whitelist import a import b [file a.py] from typing import Optional x = None # type: Optional[str] x + "foo" [file b.py] from typing import Optional x = None # type: Optional[int] x + 1 [case testOptionalWhitelistPermitsOtherErrors] # flags: --strict-optional-whitelist import a import b [file a.py] from typing import Optional x = None # type: Optional[str] x + "foo" [file b.py] from typing import Optional x = None # type: Optional[int] x + 1 1 + "foo" [out] tmp/b.py:4: error: Unsupported operand types for + ("int" and "str") [case testOptionalWhitelistPermitsWhitelistedFiles] # flags: --strict-optional-whitelist **/a.py import a import b [file a.py] from typing import Optional x = None # type: Optional[str] x + "foo" [file b.py] from typing import Optional x = None # type: Optional[int] x + 1 [out] tmp/a.py:3: error: Unsupported left operand type for + ("None") tmp/a.py:3: note: Left operand is of type "Optional[str]" [case testNoneContextInference] from typing import Dict, List def f() -> List[None]: return [] def g() -> Dict[None, None]: return {} [builtins fixtures/dict.pyi] [case testRaiseFromNone] raise BaseException from None [builtins fixtures/exception.pyi] [case testOptionalNonPartialTypeWithNone] from typing import Generator def f() -> Generator[str, None, None]: pass x = f() reveal_type(x) # N: Revealed type is 'typing.Generator[builtins.str, None, None]' l = [f()] reveal_type(l) # N: Revealed type is 'builtins.list[typing.Generator*[builtins.str, None, None]]' [builtins fixtures/list.pyi] [case testNoneListTernary] x = [None] if "" else [1] # E: List item 0 has incompatible type "int"; expected "None" [builtins fixtures/list.pyi] [case testListIncompatibleErrorMessage] from typing import List, Callable def foo(l: List[Callable[[], str]]) -> None: pass def f() -> int: return 42 foo([f]) # E: List item 0 has incompatible type "Callable[[], int]"; expected "Callable[[], str]" [builtins fixtures/list.pyi] [case testInferEqualsNotOptional] from typing import Optional x = '' # type: Optional[str] if x == '': reveal_type(x) # N: Revealed type is 'builtins.str' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' [builtins fixtures/ops.pyi] [case testInferEqualsNotOptionalWithUnion] from typing import Union x = '' # type: Union[str, int, None] if x == '': reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' [builtins fixtures/ops.pyi] [case testInferEqualsNotOptionalWithOverlap] from typing import Union x = '' # type: Union[str, int, None] if x == object(): reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' [builtins fixtures/ops.pyi] [case testInferEqualsStillOptionalWithNoOverlap] from typing import Optional x = '' # type: Optional[str] if x == 0: reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' [builtins fixtures/ops.pyi] [case testInferEqualsStillOptionalWithBothOptional] from typing import Union x = '' # type: Union[str, int, None] y = '' # type: Union[str, None] if x == y: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' [builtins fixtures/ops.pyi] [case testWarnNoReturnWorksWithStrictOptional] # flags: --warn-no-return def f() -> None: 1 + 1 # no error def g() -> int: 1 + 1 # [out] main:5: error: Missing return statement [case testGenericTypeAliasesOptional] from typing import TypeVar, Generic, Optional T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: self.x = x ONode = Optional[Node[T]] def f(x: T) -> ONode[T]: if 1 > 0: return Node(x) else: return None x = None # type: ONode[int] if int(): x = f(1) if int(): x = f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" x.x = 1 # E: Item "None" of "Optional[Node[int]]" has no attribute "x" if x is not None: x.x = 1 # OK here [builtins fixtures/ops.pyi] [case testOptionalTypeNarrowedInBooleanStatement] from typing import Optional x: Optional[int] = None x is not None and x + 42 x is not None and x + '42' # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/isinstance.pyi] [case testInvalidBooleanBranchIgnored] from typing import Optional x = None x is not None and x + 42 [builtins fixtures/isinstance.pyi] [case testOptionalLambdaInference] from typing import Optional, Callable f = None # type: Optional[Callable[[int], None]] f = lambda x: None f(0) [builtins fixtures/function.pyi] [case testDontSimplifyNoneUnionsWithStrictOptional] from typing import Any, TypeVar, Union A = None # type: Any class C(A): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass a = None # type: Any # Test both orders reveal_type(u(C(), None)) # N: Revealed type is 'Union[None, __main__.C*]' reveal_type(u(None, C())) # N: Revealed type is 'Union[__main__.C*, None]' reveal_type(u(a, None)) # N: Revealed type is 'Union[None, Any]' reveal_type(u(None, a)) # N: Revealed type is 'Union[Any, None]' reveal_type(u(1, None)) # N: Revealed type is 'Union[None, builtins.int*]' reveal_type(u(None, 1)) # N: Revealed type is 'Union[builtins.int*, None]' [case testOptionalAndAnyBaseClass] from typing import Any, Optional A = None # type: Any class C(A): pass x = None # type: Optional[C] x.foo() # E: Item "None" of "Optional[C]" has no attribute "foo" [case testIsinstanceAndOptionalAndAnyBase] from typing import Any, Optional B = None # type: Any class A(B): pass def f(a: Optional[A]): reveal_type(a) # N: Revealed type is 'Union[__main__.A, None]' if a is not None: reveal_type(a) # N: Revealed type is '__main__.A' else: reveal_type(a) # N: Revealed type is 'None' reveal_type(a) # N: Revealed type is 'Union[__main__.A, None]' [builtins fixtures/isinstance.pyi] [case testFlattenOptionalUnion] from typing import Optional, Union x: Optional[Union[int, str]] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str, None]' y: Optional[Union[int, None]] reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' [case testOverloadWithNoneAndOptional] from typing import overload, Optional @overload def f(x: int) -> str: ... @overload def f(x: Optional[int]) -> Optional[str]: ... def f(x): return x reveal_type(f(1)) # N: Revealed type is 'builtins.str' reveal_type(f(None)) # N: Revealed type is 'Union[builtins.str, None]' x: Optional[int] reveal_type(f(x)) # N: Revealed type is 'Union[builtins.str, None]' [case testUnionTruthinessTracking] from typing import Optional, Any def test_or_shortcut(value: Optional[Any]) -> None: if not value: pass if not value or value.get('foo') == 'hello': pass [builtins fixtures/bool.pyi] [case testNarrowingFromObjectToOptional] from typing import Optional x: object y: Optional[int] x = y reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' [out] [case testNarrowOptionalOutsideLambda] from typing import Optional class A: a: int def f(x: Optional[A]) -> None: assert x lambda: x.a [builtins fixtures/isinstancelist.pyi] [case testNarrowOptionalOutsideLambdaWithDeferred] from typing import Optional class A: a: int def f(self, x: Optional['A']) -> None: assert x lambda: (self.y, x.a) # E: Cannot determine type of 'y' self.y = int() [builtins fixtures/isinstancelist.pyi] [case testDeferredAndOptionalInferenceSpecialCase] def f() -> str: y x = None if int(): x = '' if x is None: x = '' g(x) return x def g(x: str) -> None: pass y = int() [builtins fixtures/bool.pyi] [case testOptionalAssignAny1] from typing import Optional def f(): return 0 def g(x: Optional[int]) -> int: if x is None: reveal_type(x) # N: Revealed type is 'None' # As a special case for Unions containing None, during x = f() reveal_type(x) # N: Revealed type is 'Union[builtins.int, Any]' reveal_type(x) # N: Revealed type is 'Union[builtins.int, Any]' return x [builtins fixtures/bool.pyi] [case testOptionalAssignAny2] from typing import Optional def f(): return 0 def g(x: Optional[int]) -> int: if x is None: reveal_type(x) # N: Revealed type is 'None' x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' # Since we've assigned to x, the special case None behavior shouldn't happen x = f() reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' return x # E: Incompatible return value type (got "Optional[int]", expected "int") [builtins fixtures/bool.pyi] [case testOptionalAssignAny3] from typing import Optional def f(): return 0 def g(x: Optional[int]) -> int: if x is not None: return x reveal_type(x) # N: Revealed type is 'None' if 1: x = f() reveal_type(x) # N: Revealed type is 'Union[builtins.int, Any]' return x [builtins fixtures/bool.pyi] [case testStrictOptionalCovarianceCrossModule] # flags: --config-file tmp/mypy.ini from a import asdf x = ["lol"] asdf(x) [file a.py] from typing import List, Optional def asdf(x: List[Optional[str]]) -> None: pass x = ["lol"] asdf(x) [file mypy.ini] \[mypy] \[mypy-a] strict_optional = False [out] main:4: error: Argument 1 to "asdf" has incompatible type "List[str]"; expected "List[Optional[str]]" main:4: note: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance main:4: note: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] mypy-0.761/test-data/unit/check-overloading.test0000644€tŠÔÚ€2›s®0000041350313576752246026060 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for function overloading [case testOverloadNotImportedNoCrash] @overload def f(a): pass @overload def f(a): pass def f(a): pass f(0) @overload # E: Name 'overload' is not defined def g(a:int): pass def g(a): pass # E: Name 'g' already defined on line 9 g(0) @something # E: Name 'something' is not defined def r(a:int): pass def r(a): pass # E: Name 'r' already defined on line 14 r(0) [out] main:2: error: Name 'overload' is not defined main:4: error: Name 'f' already defined on line 2 main:4: error: Name 'overload' is not defined main:6: error: Name 'f' already defined on line 2 [case testTypeCheckOverloadWithImplementation] from typing import overload, Any @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... def f(x: Any) -> Any: pass reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadNeedsImplementation] from typing import overload, Any @overload # E: An overloaded function outside a stub file must have an implementation def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testSingleOverloadNoImplementation] from typing import overload, Any @overload # E: Single overload definition, multiple required def f(x: 'A') -> 'B': ... class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadByAnyOtherName] from typing import overload as rose from typing import Any @rose def f(x: 'A') -> 'B': ... @rose def f(x: 'B') -> 'A': ... def f(x: Any) -> Any: pass reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithDecoratedImplementation] from typing import overload, Any def deco(fun): ... @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... @deco def f(x: Any) -> Any: pass reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadDecoratedImplementationNotLast] from typing import overload, Any def deco(fun): ... @overload def f(x: 'A') -> 'B': ... @deco # E: The implementation for an overloaded function must come last def f(x: Any) -> Any: pass @overload def f(x: 'B') -> 'A': ... class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadImplementationNotLast] from typing import overload, Any @overload def f(x: 'A') -> 'B': ... def f(x: Any) -> Any: # E: The implementation for an overloaded function must come last pass @overload def f(x: 'B') -> 'A': ... class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testDecoratedRedefinitionIsNotOverload] from typing import overload, Any def deco(fun): ... @deco def f(x: 'A') -> 'B': ... @deco # E: Name 'f' already defined on line 5 def f(x: 'B') -> 'A': ... @deco # E: Name 'f' already defined on line 5 def f(x: Any) -> Any: ... class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplementationPy2] # flags: --python-version 2.7 from typing import overload @overload def f(x): # type: (A) -> B pass @overload def f(x): # type: (B) -> A pass def f(x): pass reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplementationError] from typing import overload, Any @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... def f(x: Any) -> Any: foo = 1 if int(): foo = "bar" # E: Incompatible types in assignment (expression has type "str", variable has type "int") @overload def g(x: 'A') -> 'B': ... @overload def g(x: 'B') -> 'A': ... def g(x): foo = 1 if int(): foo = "bar" reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithUntypedImplAndMultipleVariants] from typing import overload @overload def f(x: int) -> str: ... @overload def f(x: str) -> int: ... # E: Overloaded function signatures 2 and 3 overlap with incompatible return types @overload def f(x: object) -> str: ... def f(x): ... [case testTypeCheckOverloadWithImplTooSpecificArg] from typing import overload, Any class A: pass class B: pass a = A() @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... def f(x: 'A') -> Any: # E: Overloaded function implementation does not accept all possible arguments of signature 2 pass reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplTooSpecificRetType] from typing import overload, Any class A: pass class B: pass a = A() @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... def f(x: Any) -> 'B': # E: Overloaded function implementation cannot produce return type of signature 2 return B() reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplTypeVar] from typing import overload, Any, TypeVar T = TypeVar('T') class A: pass class B: pass a = A() @overload def f(x: 'A') -> 'A': ... @overload def f(x: 'B') -> 'B': ... def f(x: T) -> T: ... reveal_type(f(A())) # N: Revealed type is '__main__.A' reveal_type(f(B())) # N: Revealed type is '__main__.B' [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplTypeVarProblems] from typing import overload, Any, TypeVar, Union T = TypeVar('T', bound='A') class A: pass class B: pass a = A() @overload def f(x: 'A') -> 'A': ... @overload def f(x: 'B') -> 'B': ... def f(x: Union[T, B]) -> T: # E: Overloaded function implementation cannot satisfy signature 2 due to inconsistencies in how they use type variables ... reveal_type(f(A())) # N: Revealed type is '__main__.A' reveal_type(f(B())) # N: Revealed type is '__main__.B' [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadImplementationTypeVarWithValueRestriction] from typing import overload, TypeVar, Union class A: pass class B: pass class C: pass T = TypeVar('T', A, B) @overload def foo(x: T) -> T: ... @overload def foo(x: C) -> int: ... def foo(x: Union[A, B, C]) -> Union[A, B, int]: if isinstance(x, C): return 3 else: return x @overload def bar(x: T) -> T: ... @overload def bar(x: C) -> int: ... def bar(x: Union[T, C]) -> Union[T, int]: if isinstance(x, C): return 3 else: return x [builtins fixtures/isinstancelist.pyi] [case testTypeCheckOverloadImplementationTypeVarDifferingUsage1] from typing import overload, Union, List, TypeVar, Generic T = TypeVar('T') @overload def foo(t: List[T]) -> T: ... @overload def foo(t: T) -> T: ... def foo(t: Union[List[T], T]) -> T: if isinstance(t, list): return t[0] else: return t class Wrapper(Generic[T]): @overload def foo(self, t: List[T]) -> T: ... @overload def foo(self, t: T) -> T: ... def foo(self, t: Union[List[T], T]) -> T: if isinstance(t, list): return t[0] else: return t [builtins fixtures/isinstancelist.pyi] [case testTypeCheckOverloadImplementationTypeVarDifferingUsage2] from typing import overload, Union, List, TypeVar, Generic T = TypeVar('T') # Note: this is unsafe when T = object @overload def foo(t: List[T], s: T) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo(t: T, s: T) -> str: ... def foo(t, s): pass class Wrapper(Generic[T]): @overload def foo(self, t: List[T], s: T) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo(self, t: T, s: T) -> str: ... def foo(self, t, s): pass class Dummy(Generic[T]): pass # Same root issue: why does the additional constraint bound T <: T # cause the constraint solver to not infer T = object like it did in the # first example? @overload def bar(d: Dummy[T], t: List[T], s: T) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def bar(d: Dummy[T], t: T, s: T) -> str: ... def bar(d: Dummy[T], t, s): pass [builtins fixtures/isinstancelist.pyi] [case testTypeCheckOverloadedFunctionBody] from foo import * [file foo.pyi] from typing import overload @overload def f(x: 'A'): if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") x = A() @overload def f(x: 'B'): if int(): x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") x = B() class A: pass class B: pass [out] [case testTypeCheckOverloadedMethodBody] from foo import * [file foo.pyi] from typing import overload class A: @overload def f(self, x: 'A'): if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") x = A() @overload def f(self, x: 'B'): if int(): x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") x = B() class B: pass [out] [case testCallToOverloadedFunction] from foo import * [file foo.pyi] from typing import overload f(C()) # E: No overload variant of "f" matches argument type "C" \ # N: Possible overload variants: \ # N: def f(x: A) -> None \ # N: def f(x: B) -> None f(A()) f(B()) @overload def f(x: 'A') -> None: pass @overload def f(x: 'B') -> None: pass class A: pass class B: pass class C: pass [case testOverloadedFunctionReturnValue] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) if int(): b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = f(a) if int(): b = f(b) @overload def f(x: 'A') -> 'A': pass @overload def f(x: 'B') -> 'B': pass class A: pass class B: pass [case testCallToOverloadedMethod] from foo import * [file foo.pyi] from typing import overload A().f(C()) # E: No overload variant of "f" of "A" matches argument type "C" \ # N: Possible overload variants: \ # N: def f(self, x: A) -> None \ # N: def f(self, x: B) -> None A().f(A()) A().f(B()) class A: @overload def f(self, x: 'A') -> None: pass @overload def f(self, x: 'B') -> None: pass class B: pass class C: pass [case testOverloadedMethodReturnValue] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) if int(): b = a.f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = a.f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = a.f(a) if int(): b = a.f(b) class A: @overload def f(self, x: 'A') -> 'A': pass @overload def f(self, x: 'B') -> 'B': pass class B: pass [case testOverloadsWithDifferentArgumentCounts] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) if int(): a = f(a) if int(): b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") f(b) # E: No overload variant of "f" matches argument type "B" \ # N: Possible overload variant: \ # N: def f(x: A) -> A \ # N: <1 more non-matching overload not shown> if int(): b = f(b, a) if int(): a = f(b, a) # E: Incompatible types in assignment (expression has type "B", variable has type "A") f(a, a) # E: No overload variant of "f" matches argument types "A", "A" \ # N: Possible overload variant: \ # N: def f(x: B, y: A) -> B \ # N: <1 more non-matching overload not shown> f(b, b) # E: No overload variant of "f" matches argument types "B", "B" \ # N: Possible overload variant: \ # N: def f(x: B, y: A) -> B \ # N: <1 more non-matching overload not shown> @overload def f(x: 'A') -> 'A': pass @overload def f(x: 'B', y: 'A') -> 'B': pass class A: pass class B: pass [case testGenericOverloadVariant] from foo import * [file foo.pyi] from typing import overload, TypeVar, Generic t = TypeVar('t') ab, ac, b, c = None, None, None, None # type: (A[B], A[C], B, C) if int(): b = f(ab) c = f(ac) b = f(ac) # E: Incompatible types in assignment (expression has type "C", variable has type "B") b = f(b) c = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "C") @overload def f(x: 'A[t]') -> t: pass @overload def f(x: 'B') -> 'B': pass class A(Generic[t]): pass class B: pass class C: pass [case testOverloadedInit] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) a = A(a) a = A(b) a = A(object()) # E: No overload variant of "A" matches argument type "object" \ # N: Possible overload variants: \ # N: def __init__(self, a: A) -> A \ # N: def __init__(self, b: B) -> A class A: @overload def __init__(self, a: 'A') -> None: pass @overload def __init__(self, b: 'B') -> None: pass class B: pass [case testIntersectionTypeCompatibility] from foo import * [file foo.pyi] from typing import overload, Callable o = None # type: object a = None # type: A if int(): a = f # E: Incompatible types in assignment (expression has type overloaded function, variable has type "A") if int(): o = f @overload def f(a: 'A') -> None: pass @overload def f(a: Callable[[], None]) -> None: pass class A: pass [case testCompatibilityOfIntersectionTypeObjectWithStdType] from foo import * [file foo.pyi] from typing import overload t, a = None, None # type: (type, A) if int(): a = A # E: Incompatible types in assignment (expression has type "Type[A]", variable has type "A") t = A class A: @overload def __init__(self, a: 'A') -> None: pass @overload def __init__(self, a: 'B') -> None: pass class B: pass [case testOverloadedGetitem] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: int, str if int(): a = A()[a] if int(): b = A()[a] # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): b = A()[b] if int(): a = A()[b] # E: Incompatible types in assignment (expression has type "str", variable has type "int") class A: @overload def __getitem__(self, a: int) -> int: pass @overload def __getitem__(self, b: str) -> str: pass [case testOverloadedGetitemWithGenerics] from foo import * [file foo.pyi] from typing import TypeVar, Generic, overload t = TypeVar('t') a, b, c = None, None, None # type: (A, B, C[A]) if int(): a = c[a] b = c[a] # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = c[b] b = c[b] # E: Incompatible types in assignment (expression has type "A", variable has type "B") class C(Generic[t]): @overload def __getitem__(self, a: 'A') -> t: pass @overload def __getitem__(self, b: 'B') -> t: pass class A: pass class B: pass [case testImplementingOverloadedMethod] from foo import * [file foo.pyi] from typing import overload from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @overload @abstractmethod def f(self) -> None: pass @overload @abstractmethod def f(self, a: 'A') -> None: pass class A(I): @overload def f(self) -> None: pass @overload def f(self, a: 'A') -> None: pass [case testOverloadWithFunctionType] from foo import * [file foo.pyi] from typing import overload, Callable class A: pass @overload def f(x: A) -> None: pass @overload def f(x: Callable[[], None]) -> None: pass f(A()) [builtins fixtures/function.pyi] [case testVarArgsOverload] from foo import * [file foo.pyi] from typing import overload, Any @overload def f(x: 'A', *more: Any) -> 'A': pass @overload def f(x: 'B', *more: Any) -> 'A': pass f(A()) f(A(), A, A) f(B()) f(B(), B) f(B(), B, B) f(object()) # E: No overload variant of "f" matches argument type "object" \ # N: Possible overload variants: \ # N: def f(x: A, *more: Any) -> A \ # N: def f(x: B, *more: Any) -> A class A: pass class B: pass [builtins fixtures/list.pyi] [case testVarArgsOverload2] from foo import * [file foo.pyi] from typing import overload @overload def f(x: 'A', *more: 'B') -> 'A': pass @overload def f(x: 'B', *more: 'A') -> 'A': pass f(A(), B()) f(A(), B(), B()) f(A(), A(), B()) # E: No overload variant of "f" matches argument types "A", "A", "B" \ # N: Possible overload variants: \ # N: def f(x: A, *more: B) -> A \ # N: def f(x: B, *more: A) -> A f(A(), B(), A()) # E: No overload variant of "f" matches argument types "A", "B", "A" \ # N: Possible overload variants: \ # N: def f(x: A, *more: B) -> A \ # N: def f(x: B, *more: A) -> A class A: pass class B: pass [builtins fixtures/list.pyi] [case testOverloadWithTypeObject] from foo import * [file foo.pyi] from typing import overload @overload def f(a: 'A', t: type) -> None: pass @overload def f(a: 'B', t: type) -> None: pass f(A(), B) f(B(), A) class A: pass class B: pass [builtins fixtures/function.pyi] [case testOverloadedInitAndTypeObjectInOverload] from foo import * [file foo.pyi] from typing import overload @overload def f(t: type) -> 'A': pass @overload def f(t: 'A') -> 'B': pass a, b = None, None # type: (A, B) if int(): a = f(A) if int(): b = f(a) if int(): b = f(A) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = f(a) # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: @overload def __init__(self) -> None: pass @overload def __init__(self, a: 'A') -> None: pass class B: pass [case testOverlappingErasedSignatures] from foo import * [file foo.pyi] from typing import overload, List @overload def f(a: List[int]) -> int: pass @overload def f(a: List[str]) -> int: pass list_int = [] # type: List[int] list_str = [] # type: List[str] list_object = [] # type: List[object] n = f(list_int) m = f(list_str) def p(): n, m # Prevent redefinition n = 1 m = 1 n = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int") m = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int") f(list_object) # E: Argument 1 to "f" has incompatible type "List[object]"; expected "List[int]" [builtins fixtures/list.pyi] [case testOverlappingOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def f(x: B) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: A) -> str: pass [case testContravariantOverlappingOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [case testPartiallyCovariantOverlappingOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def f(x: B) -> A: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: A) -> B: pass [case testPartiallyContravariantOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def g(x: A) -> int: pass @overload def g(x: B) -> str: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [case testCovariantOverlappingOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def g(x: B) -> B: pass @overload def g(x: A) -> A: pass [case testCovariantOverlappingOverloadSignaturesWithSomeSameArgTypes] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def g(x: int, y: B) -> B: pass @overload def g(x: int, y: A) -> A: pass [case testCovariantOverlappingOverloadSignaturesWithAnyType] from foo import * [file foo.pyi] from typing import Any, overload @overload def g(x: int) -> int: pass @overload def g(x: Any) -> Any: pass [case testContravariantOverlappingOverloadSignaturesWithAnyType] from foo import * [file foo.pyi] from typing import Any, overload @overload def g(x: Any) -> Any: pass @overload def g(x: int) -> int: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [case testOverloadedLtAndGtMethods] from foo import * [file foo.pyi] from typing import overload class A: def __lt__(self, x: A) -> int: pass def __gt__(self, x: A) -> int: pass class B: @overload def __lt__(self, x: B) -> int: pass @overload def __lt__(self, x: A) -> int: pass @overload def __gt__(self, x: B) -> int: pass @overload def __gt__(self, x: A) -> int: pass A() < A() A() < B() B() < A() B() < B() A() < object() # E: Unsupported operand types for < ("A" and "object") B() < object() # E: No overload variant of "__lt__" of "B" matches argument type "object" \ # N: Possible overload variants: \ # N: def __lt__(self, B) -> int \ # N: def __lt__(self, A) -> int [case testOverloadedForwardMethodAndCallingReverseMethod] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: 'A') -> int: pass @overload def __add__(self, x: int) -> int: pass class B: def __radd__(self, x: A) -> int: pass A() + A() A() + 1 A() + B() A() + '' # E: No overload variant of "__add__" of "A" matches argument type "str" \ # N: Possible overload variants: \ # N: def __add__(self, A) -> int \ # N: def __add__(self, int) -> int [case testOverrideOverloadSwapped] from foo import * [file foo.pyi] from typing import overload class Parent: @overload def f(self, x: int) -> int: ... @overload def f(self, x: str) -> str: ... class Child(Parent): @overload # E: Signature of "f" incompatible with supertype "Parent" \ # N: Overload variants must be defined in the same order as they are in "Parent" def f(self, x: str) -> str: ... @overload def f(self, x: int) -> int: ... [case testOverrideOverloadSwappedWithExtraVariants] from foo import * [file foo.pyi] from typing import overload class bool: pass class Parent: @overload def f(self, x: int) -> int: ... @overload def f(self, x: str) -> str: ... class Child1(Parent): @overload # E: Signature of "f" incompatible with supertype "Parent" \ # N: Overload variants must be defined in the same order as they are in "Parent" def f(self, x: bool) -> bool: ... @overload def f(self, x: str) -> str: ... @overload def f(self, x: int) -> int: ... class Child2(Parent): @overload # E: Signature of "f" incompatible with supertype "Parent" \ # N: Overload variants must be defined in the same order as they are in "Parent" def f(self, x: str) -> str: ... @overload def f(self, x: bool) -> bool: ... @overload def f(self, x: int) -> int: ... class Child3(Parent): @overload # E: Signature of "f" incompatible with supertype "Parent" \ # N: Overload variants must be defined in the same order as they are in "Parent" def f(self, x: str) -> str: ... @overload def f(self, x: int) -> int: ... @overload def f(self, x: bool) -> bool: ... [case testOverrideOverloadSwappedWithAdjustedVariants] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass class C(B): pass class Parent: @overload def f(self, x: int) -> int: ... @overload def f(self, x: B) -> B: ... class Child1(Parent): @overload # E: Signature of "f" incompatible with supertype "Parent" \ # N: Overload variants must be defined in the same order as they are in "Parent" def f(self, x: A) -> B: ... @overload def f(self, x: int) -> int: ... class Child2(Parent): @overload # E: Signature of "f" incompatible with supertype "Parent" \ # N: Overload variants must be defined in the same order as they are in "Parent" def f(self, x: B) -> C: ... @overload def f(self, x: int) -> int: ... class Child3(Parent): @overload # E: Signature of "f" incompatible with supertype "Parent" def f(self, x: B) -> A: ... @overload def f(self, x: int) -> int: ... [case testOverrideOverloadedMethodWithMoreGeneralArgumentTypes] from foo import * [file foo.pyi] from typing import overload class IntSub(int): pass class StrSub(str): pass class A: @overload def f(self, x: IntSub) -> int: return 0 @overload def f(self, x: StrSub) -> str: return '' class B(A): @overload def f(self, x: int) -> int: return 0 @overload def f(self, x: str) -> str: return '' [out] [case testOverrideOverloadedMethodWithMoreSpecificArgumentTypes] from foo import * [file foo.pyi] from typing import overload class IntSub(int): pass class StrSub(str): pass class A: @overload def f(self, x: int) -> int: return 0 @overload def f(self, x: str) -> str: return '' class B(A): @overload def f(self, x: IntSub) -> int: return 0 @overload def f(self, x: str) -> str: return '' class C(A): @overload def f(self, x: int) -> int: return 0 @overload def f(self, x: StrSub) -> str: return '' class D(A): @overload def f(self, x: int) -> int: return 0 @overload def f(self, x: str) -> str: return '' [out] tmp/foo.pyi:12: error: Signature of "f" incompatible with supertype "A" tmp/foo.pyi:17: error: Signature of "f" incompatible with supertype "A" [case testOverloadingAndDucktypeCompatibility] from foo import * [file foo.pyi] from typing import overload, _promote class A: pass @_promote(A) class B: pass @overload def f(n: B) -> B: return n @overload def f(n: A) -> A: return n f(B()) + 'x' # E: Unsupported left operand type for + ("B") f(A()) + 'x' # E: Unsupported left operand type for + ("A") [typing fixtures/typing-full.pyi] [case testOverloadingAndIntFloatSubtyping] from foo import * [file foo.pyi] from typing import overload @overload def f(x: float) -> None: pass @overload def f(x: str) -> None: pass f(1.1) f('') f(1) f(()) # E: No overload variant of "f" matches argument type "Tuple[]" \ # N: Possible overload variants: \ # N: def f(x: float) -> None \ # N: def f(x: str) -> None [builtins fixtures/primitives.pyi] [out] [case testOverloadingVariableInputs] from foo import * [file foo.pyi] from typing import overload @overload def f(x: int, y: int) -> None: pass @overload def f(x: int) -> None: pass f(1) f(1, 2) z = (1, 2) f(*z) [builtins fixtures/primitives.pyi] [out] [case testTypeInferenceSpecialCaseWithOverloading] from foo import * [file foo.pyi] from typing import overload class A: def __add__(self, x: A) -> A: pass class B: def __radd__(self, x: A) -> B: pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass f(A() + B())() # E: "B" not callable [case testKeywordArgOverload] from foo import * [file foo.pyi] from typing import overload @overload def f(x: int, y: str) -> int: pass @overload def f(x: str, y: int) -> str: pass f(x=1, y='')() # E: "int" not callable f(y=1, x='')() # E: "str" not callable [case testIgnoreOverloadVariantBasedOnKeywordArg] from foo import * [file foo.pyi] from typing import overload @overload def f(x: int) -> int: pass @overload def f(y: int) -> str: pass f(x=1)() # E: "int" not callable f(y=1)() # E: "str" not callable [case testOverloadWithTupleVarArg] from foo import * [file foo.pyi] from typing import overload @overload def f(x: int, y: str) -> int: pass @overload def f(*x: str) -> str: pass f(*(1,))() # E: No overload variant of "f" matches argument type "Tuple[int]" \ # N: Possible overload variant: \ # N: def f(*x: str) -> str \ # N: <1 more non-matching overload not shown> f(*('',))() # E: "str" not callable f(*(1, ''))() # E: "int" not callable f(*(1, '', 1))() # E: No overload variant of "f" matches argument type "Tuple[int, str, int]" \ # N: Possible overload variant: \ # N: def f(*x: str) -> str \ # N: <1 more non-matching overload not shown> [case testPreferExactSignatureMatchInOverload] from foo import * [file foo.pyi] from typing import overload, List @overload def f(x: int, y: List[int] = None) -> int: pass @overload def f(x: int, y: List[str] = None) -> int: pass f(y=[1], x=0)() # E: "int" not callable f(y=[''], x=0)() # E: "int" not callable a = f(y=[['']], x=0) # E: List item 0 has incompatible type "List[str]"; expected "int" reveal_type(a) # N: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [case testOverloadWithDerivedFromAny] from foo import * [file foo.pyi] from typing import Any, overload Base = None # type: Any class C: @overload def __init__(self, a: str) -> None: pass @overload def __init__(self, a: int) -> None: pass class Derived(Base): def to_dict(self) -> C: return C(self) # fails without the fix for #1363 C(Derived()) # fails without the hack C(Base()) # Always ok [case testOverloadWithBoundedTypeVar] from foo import * [file foo.pyi] from typing import overload, TypeVar T = TypeVar('T', bound=str) @overload def f(x: T) -> T: pass @overload def f(x: int) -> bool: pass class mystr(str): pass f('x')() # E: "str" not callable f(1)() # E: "bool" not callable f(1.1) # E: No overload variant of "f" matches argument type "float" \ # N: Possible overload variants: \ # N: def [T <: str] f(x: T) -> T \ # N: def f(x: int) -> bool f(mystr())() # E: "mystr" not callable [builtins fixtures/primitives.pyi] [case testOverloadedCallWithVariableTypes] from foo import * [file foo.pyi] from typing import overload, TypeVar, List T = TypeVar('T', bound=str) @overload def f(x: T) -> T: pass @overload def f(x: List[T]) -> None: pass class mystr(str): pass U = TypeVar('U', bound=mystr) V = TypeVar('V') def g(x: U, y: V) -> None: f(x)() # E: "mystr" not callable f(y) # E: No overload variant of "f" matches argument type "V" \ # N: Possible overload variants: \ # N: def [T <: str] f(x: T) -> T \ # N: def [T <: str] f(x: List[T]) -> None a = f([x]) reveal_type(a) # N: Revealed type is 'None' f([y]) # E: Value of type variable "T" of "f" cannot be "V" f([x, y]) # E: Value of type variable "T" of "f" cannot be "object" [builtins fixtures/list.pyi] [out] [case testOverloadOverlapWithTypeVars] from foo import * [file foo.pyi] from typing import overload, TypeVar, Sequence, List T = TypeVar('T', bound=str) @overload def f(x: Sequence[T]) -> None: pass @overload def f(x: Sequence[int]) -> int: pass @overload def g(x: Sequence[T]) -> None: pass @overload def g(x: Sequence[str]) -> int: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def h(x: Sequence[str]) -> int: pass @overload def h(x: Sequence[T]) -> None: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def i(x: List[str]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def i(x: List[T]) -> None: pass [builtins fixtures/list.pyi] [case testOverloadOverlapWithTypeVarsWithValues] from foo import * [file foo.pyi] from typing import overload, TypeVar AnyStr = TypeVar('AnyStr', bytes, str) @overload def f(x: int) -> int: pass @overload def f(x: AnyStr) -> str: pass f(1)() # E: "int" not callable f('1')() # E: "str" not callable f(b'1')() # E: "str" not callable f(1.0) # E: No overload variant of "f" matches argument type "float" \ # N: Possible overload variants: \ # N: def f(x: int) -> int \ # N: def [AnyStr in (bytes, str)] f(x: AnyStr) -> str @overload def g(x: AnyStr, *a: AnyStr) -> None: pass @overload def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') g('foo', b'bar') # E: Value of type variable "AnyStr" of "g" cannot be "object" g(1) g(1, 'foo') g(1, 'foo', b'bar') # E: Value of type variable "AnyStr" of "g" cannot be "object" [builtins fixtures/primitives.pyi] [case testOverloadOverlapWithTypeVarsWithValuesOrdering] from foo import * [file foo.pyi] from typing import overload, TypeVar AnyStr = TypeVar('AnyStr', bytes, str) @overload def f(x: AnyStr) -> AnyStr: pass @overload def f(x: str) -> str: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def g(x: str) -> str: pass @overload def g(x: AnyStr) -> AnyStr: pass [builtins fixtures/primitives.pyi] [case testOverloadsUsingAny] from typing import overload, List, Any, Union @overload def foo(x: List[int]) -> int: ... @overload def foo(x: List[str]) -> str: ... def foo(x): pass a: List[int] b: List[str] c: List[Any] d: Union[List[int], List[str]] e: List[bool] f: List[object] g: List[Union[int, str]] reveal_type(foo(a)) reveal_type(foo(b)) reveal_type(foo(c)) reveal_type(foo(d)) foo(e) foo(f) foo(g) [builtins fixtures/list.pyi] [out] main:17: note: Revealed type is 'builtins.int' main:18: note: Revealed type is 'builtins.str' main:19: note: Revealed type is 'Any' main:20: note: Revealed type is 'Union[builtins.int, builtins.str]' main:21: error: Argument 1 to "foo" has incompatible type "List[bool]"; expected "List[int]" main:21: note: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance main:21: note: Consider using "Sequence" instead, which is covariant main:22: error: Argument 1 to "foo" has incompatible type "List[object]"; expected "List[int]" main:23: error: Argument 1 to "foo" has incompatible type "List[Union[int, str]]"; expected "List[int]" [case testOverloadAgainstEmptyCollections] from typing import overload, List @overload def f(x: List[int]) -> int: ... @overload def f(x: List[str]) -> str: ... def f(x): pass reveal_type(f([])) # N: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [case testOverloadAgainstEmptyCovariantCollections] from typing import overload, TypeVar, Generic T = TypeVar('T', covariant=True) class Wrapper(Generic[T]): pass class A: pass class B(A): pass class C: pass @overload def f(x: Wrapper[A]) -> int: ... @overload def f(x: Wrapper[C]) -> str: ... def f(x): pass reveal_type(f(Wrapper())) # N: Revealed type is 'builtins.int' reveal_type(f(Wrapper[C]())) # N: Revealed type is 'builtins.str' reveal_type(f(Wrapper[B]())) # N: Revealed type is 'builtins.int' [case testOverlappingOverloadCounting] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def f(x: int) -> None: pass @overload def f(x: B) -> str: pass # E: Overloaded function signatures 2 and 3 overlap with incompatible return types @overload def f(x: A) -> int: pass [case testOverloadWithTupleMatchingTypeVar] from foo import * [file foo.pyi] from typing import TypeVar, Generic, Tuple, overload T = TypeVar('T') class A(Generic[T]): @overload def f(self, arg: T) -> None: pass @overload def f(self, arg: T, default: int) -> None: pass b = A() # type: A[Tuple[int, int]] b.f((0, 0)) b.f((0, '')) # E: Argument 1 to "f" of "A" has incompatible type "Tuple[int, str]"; expected "Tuple[int, int]" [case testSingleOverloadStub] from foo import * [file foo.pyi] from typing import overload @overload def f(a: int) -> None: pass def f(a: int) -> None: pass [out] tmp/foo.pyi:2: error: Single overload definition, multiple required tmp/foo.pyi:4: error: An implementation for an overloaded function is not allowed in a stub file [case testSingleOverload2] from foo import * [file foo.pyi] from typing import overload def f(a: int) -> None: pass @overload def f(a: str) -> None: pass [out] tmp/foo.pyi:3: error: Name 'f' already defined on line 2 tmp/foo.pyi:3: error: Single overload definition, multiple required [case testNonconsecutiveOverloads] from foo import * [file foo.pyi] from typing import overload @overload def f(a: int) -> None: pass 1 @overload def f(a: str) -> None: pass [out] tmp/foo.pyi:2: error: Single overload definition, multiple required tmp/foo.pyi:5: error: Name 'f' already defined on line 2 tmp/foo.pyi:5: error: Single overload definition, multiple required [case testNonconsecutiveOverloadsMissingFirstOverload] from foo import * [file foo.pyi] from typing import overload def f(a: int) -> None: pass 1 @overload def f(a: str) -> None: pass [out] tmp/foo.pyi:4: error: Name 'f' already defined on line 2 tmp/foo.pyi:4: error: Single overload definition, multiple required [case testNonconsecutiveOverloadsMissingLaterOverload] from foo import * [file foo.pyi] from typing import overload @overload def f(a: int) -> None: pass 1 def f(a: str) -> None: pass [out] tmp/foo.pyi:2: error: Single overload definition, multiple required tmp/foo.pyi:5: error: Name 'f' already defined on line 2 [case testOverloadTuple] from foo import * [file foo.pyi] from typing import overload, Tuple @overload def f(x: int, y: Tuple[str, ...]) -> None: pass @overload def f(x: int, y: str) -> None: pass f(1, ('2', '3')) f(1, (2, '3')) # E: Argument 2 to "f" has incompatible type "Tuple[int, str]"; expected "Tuple[str, ...]" f(1, ('2',)) f(1, '2') f(1, (2, 3)) # E: Argument 2 to "f" has incompatible type "Tuple[int, int]"; expected "Tuple[str, ...]" x = ('2', '3') # type: Tuple[str, ...] f(1, x) y = (2, 3) # type: Tuple[int, ...] f(1, y) # E: Argument 2 to "f" has incompatible type "Tuple[int, ...]"; expected "Tuple[str, ...]" [builtins fixtures/tuple.pyi] [case testCallableSpecificOverload] from foo import * [file foo.pyi] from typing import overload, Callable @overload def f(a: Callable[[], int]) -> None: pass @overload def f(a: str) -> None: pass f(0) # E: No overload variant of "f" matches argument type "int" \ # N: Possible overload variants: \ # N: def f(a: Callable[[], int]) -> None \ # N: def f(a: str) -> None [case testCustomRedefinitionDecorator] from typing import Any, Callable, Type class Chain(object): def chain(self, function: Callable[[Any], int]) -> 'Chain': return self class Test(object): do_chain = Chain() @do_chain.chain # E: Name 'do_chain' already defined on line 9 def do_chain(self) -> int: return 2 @do_chain.chain # E: Name 'do_chain' already defined on line 11 def do_chain(self) -> int: return 3 t = Test() reveal_type(t.do_chain) # N: Revealed type is '__main__.Chain' [case testOverloadWithOverlappingItemsAndAnyArgument1] from typing import overload, Any @overload def f(x: int) -> int: ... @overload def f(x: object) -> object: ... def f(x): pass a: Any reveal_type(f(a)) # N: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument2] from typing import overload, Any @overload def f(x: int) -> int: ... @overload def f(x: float) -> float: ... def f(x): pass a: Any reveal_type(f(a)) # N: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument3] from typing import overload, Any @overload def f(x: int) -> int: ... @overload def f(x: str) -> str: ... def f(x): pass a: Any reveal_type(f(a)) # N: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument4] from typing import overload, Any @overload def f(x: int, y: int, z: str) -> int: ... @overload def f(x: object, y: int, z: str) -> object: ... def f(x): pass a: Any # Any causes ambiguity reveal_type(f(a, 1, '')) # N: Revealed type is 'Any' # Any causes no ambiguity reveal_type(f(1, a, a)) # N: Revealed type is 'builtins.int' reveal_type(f('', a, a)) # N: Revealed type is 'builtins.object' # Like above, but use keyword arguments. reveal_type(f(y=1, z='', x=a)) # N: Revealed type is 'Any' reveal_type(f(y=a, z='', x=1)) # N: Revealed type is 'builtins.int' reveal_type(f(z='', x=1, y=a)) # N: Revealed type is 'builtins.int' reveal_type(f(z='', x=a, y=1)) # N: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument5] from typing import overload, Any, Union class A: pass class B(A): pass @overload def f(x: B) -> B: ... @overload def f(x: Union[A, B]) -> A: ... def f(x): pass # Note: overloads ignore promotions so we treat 'int' and 'float' as distinct types @overload def g(x: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(x: Union[int, float]) -> float: ... def g(x): pass a: Any reveal_type(f(a)) # N: Revealed type is 'Any' reveal_type(g(a)) # N: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument6] from typing import overload, Any @overload def f(x: int, y: int) -> int: ... @overload def f(x: float, y: int, z: str) -> float: ... @overload def f(x: object, y: int, z: str, a: None) -> str: ... def f(x): pass a: Any # Any causes ambiguity reveal_type(f(*a)) # N: Revealed type is 'Any' reveal_type(f(a, *a)) # N: Revealed type is 'Any' reveal_type(f(1, *a)) # N: Revealed type is 'Any' reveal_type(f(1.1, *a)) # N: Revealed type is 'Any' reveal_type(f('', *a)) # N: Revealed type is 'builtins.str' [case testOverloadWithOverlappingItemsAndAnyArgument7] from typing import overload, Any @overload def f(x: int, y: int, z: int) -> int: ... @overload def f(x: object, y: int, z: int) -> object: ... def f(x): pass @overload def g(x: int, y: int, z: int) -> int: ... @overload def g(x: object, y: int, z: str) -> object: ... def g(x): pass a: Any reveal_type(f(1, *a)) # N: Revealed type is 'builtins.int' reveal_type(g(1, *a)) # N: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument8] from typing import overload, Any @overload def f(x: int, y: int, z: int) -> str: ... @overload def f(x: object, y: int, z: int) -> str: ... def f(x): pass a: Any # The return type is not ambiguous so Any arguments cause no ambiguity. reveal_type(f(a, 1, 1)) # N: Revealed type is 'builtins.str' reveal_type(f(1, *a)) # N: Revealed type is 'builtins.str' [case testOverloadWithOverlappingItemsAndAnyArgument9] from typing import overload, Any, List @overload def f(x: List[int]) -> List[int]: ... @overload def f(x: List[Any]) -> List[Any]: ... def f(x): pass a: Any b: List[Any] c: List[str] d: List[int] reveal_type(f(a)) # N: Revealed type is 'builtins.list[Any]' reveal_type(f(b)) # N: Revealed type is 'builtins.list[Any]' reveal_type(f(c)) # N: Revealed type is 'builtins.list[Any]' reveal_type(f(d)) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testOverloadWithOverlappingItemsAndAnyArgument10] from typing import overload, Any @overload def f(*, x: int = 3, y: int = 3) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(**kwargs: str) -> str: ... def f(*args, **kwargs): pass # Checking an overload flagged as unsafe is a bit weird, but this is the # cleanest way to make sure 'Any' ambiguity checks work correctly with # keyword arguments. a: Any i: int reveal_type(f(x=a, y=i)) # N: Revealed type is 'builtins.int' reveal_type(f(y=a)) # N: Revealed type is 'Any' reveal_type(f(x=a, y=a)) # N: Revealed type is 'Any' [builtins fixtures/dict.pyi] [case testOverloadWithOverlappingItemsAndAnyArgument11] from typing import overload, Any, Dict @overload def f(x: int = 3, **kwargs: int) -> int: ... @overload def f(**kwargs: str) -> str: ... def f(*args, **kwargs): pass a: Dict[str, Any] i: int reveal_type(f(x=i, **a)) # N: Revealed type is 'builtins.int' reveal_type(f(**a)) # N: Revealed type is 'Any' [builtins fixtures/dict.pyi] [case testOverloadWithOverlappingItemsAndAnyArgument12] from typing import overload, Any @overload def f(x: int) -> Any: ... @overload def f(x: str) -> str: ... def f(x): pass a: Any reveal_type(f(a)) # N: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument13] from typing import Any, overload, TypeVar, Generic class slice: pass T = TypeVar('T') class A(Generic[T]): @overload def f(self, x: int) -> T: ... @overload def f(self, x: slice) -> A[T]: ... def f(self, x): ... i: Any a: A[Any] reveal_type(a.f(i)) # N: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument14] from typing import Any, overload, TypeVar, Generic T = TypeVar('T') class Wrapper(Generic[T]): pass class slice: pass class A(Generic[T]): @overload def f(self, x: int) -> Wrapper[T]: ... @overload def f(self, x: slice) -> Wrapper[A[T]]: ... def f(self, x): ... i: Any a: A[Any] reveal_type(a.f(i)) # N: Revealed type is '__main__.Wrapper[Any]' [case testOverloadWithOverlappingItemsAndAnyArgument15] from typing import overload, Any, Union @overload def f(x: int) -> str: ... @overload def f(x: str) -> str: ... def f(x): pass @overload def g(x: int) -> Union[str, int]: ... @overload def g(x: str) -> Union[int, str]: ... def g(x): pass a: Any reveal_type(f(a)) # N: Revealed type is 'builtins.str' reveal_type(g(a)) # N: Revealed type is 'Union[builtins.str, builtins.int]' [case testOverloadWithOverlappingItemsAndAnyArgument16] from typing import overload, Any, Union, Callable @overload def f(x: int) -> Callable[[int, int], int]: ... @overload def f(x: str) -> Callable[[str], str]: ... def f(x): pass a: Any reveal_type(f(a)) # N: Revealed type is 'def (*Any, **Any) -> Any' reveal_type(f(a)(a)) # N: Revealed type is 'Any' [case testOverloadOnOverloadWithType] from typing import Any, Type, TypeVar, overload from mod import MyInt T = TypeVar('T') @overload def make(cls: Type[T]) -> T: pass @overload def make() -> Any: pass def make(*args): pass c = make(MyInt) reveal_type(c) # N: Revealed type is 'mod.MyInt*' [file mod.pyi] from typing import overload class MyInt: @overload def __init__(self, x: str) -> None: pass @overload def __init__(self, x: str, y: int) -> None: pass [out] [case testOverloadTupleInstance] from typing import overload, Tuple, Any class A: ... class A1(A): ... class B: ... class C: ... class D: ... @overload def f(x: A) -> A: ... @overload def f(x: Tuple[C]) -> B: ... @overload def f(x: Tuple[A1, int]) -> C: ... # E: Overloaded function signatures 3 and 5 overlap with incompatible return types @overload def f(x: Tuple[A, str]) -> D: ... @overload def f(x: Tuple[A, int]) -> D: ... @overload def f(x: Tuple[()]) -> D: ... def f(x: Any) -> Any:... [case testOverloadTupleEllipsisNumargs] from typing import overload, Tuple, Any class A: ... class B: ... @overload def r1(x: Tuple[()]) -> B: ... # E: Overloaded function signatures 1 and 4 overlap with incompatible return types @overload def r1(x: Tuple[A]) -> B: ... # E: Overloaded function signatures 2 and 4 overlap with incompatible return types @overload def r1(x: Tuple[A, A]) -> B: ... # E: Overloaded function signatures 3 and 4 overlap with incompatible return types @overload def r1(x: Tuple[A, ...]) -> A: ... def r1(x: Any) -> Any: ... @overload def r2(x: Tuple[A, ...]) -> A: ... @overload def r2(x: Tuple[A, A]) -> B: ... # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def r2(x: Tuple[A]) -> B: ... # E: Overloaded function signature 3 will never be matched: signature 1's parameter type(s) are the same or broader @overload def r2(x: Tuple[()]) -> B: ... # E: Overloaded function signature 4 will never be matched: signature 1's parameter type(s) are the same or broader def r2(x: Any) -> Any: ... [builtins fixtures/tuple.pyi] [case testOverloadTupleEllipsisVariance] from typing import overload, Tuple, Any class A: ... class A1(A): ... class B: ... class C: ... class D: ... @overload def r(x: Tuple[A1, ...]) -> A: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def r(x: Tuple[A, ...]) -> B: ... @overload def r(x: Tuple[B, ...]) -> C: ... def r(x: Any) -> Any:... @overload def g(x: A) -> A: ... @overload def g(x: Tuple[A1, ...]) -> B: ... # E: Overloaded function signatures 2 and 3 overlap with incompatible return types @overload def g(x: Tuple[A, A]) -> C: ... @overload def g(x: Tuple[A, B]) -> D: ... def g(x: Any) -> Any:... [builtins fixtures/tuple.pyi] [case testOverloadWithMethodOverrideAndImplementation] from typing import overload, Union, Any class Parent: @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Any) -> Any: ... class Child1(Parent): @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Union[int, str]) -> Union[int, str]: ... class Child2(Parent): @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Union[int, str]) -> int: ... # E: Overloaded function implementation cannot produce return type of signature 2 class Child3(Parent): @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Any) -> Any: ... class Child4(Parent): @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Union[int, str]) -> Union[int, str]: return True # E: Incompatible return value type (got "bool", expected "Union[int, str]") [builtins fixtures/tuple.pyi] [case testOverloadWithIncompatibleMethodOverrideAndImplementation] from typing import overload, Union, Any class StrSub: pass class ParentWithTypedImpl: @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Union[int, str]) -> Union[int, str]: ... class Child1(ParentWithTypedImpl): @overload # E: Signature of "f" incompatible with supertype "ParentWithTypedImpl" def f(self, arg: int) -> int: ... @overload def f(self, arg: StrSub) -> str: ... def f(self, arg: Union[int, StrSub]) -> Union[int, str]: ... class Child2(ParentWithTypedImpl): @overload # E: Signature of "f" incompatible with supertype "ParentWithTypedImpl" def f(self, arg: int) -> int: ... @overload def f(self, arg: StrSub) -> str: ... def f(self, arg: Any) -> Any: ... class ParentWithDynamicImpl: @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Any) -> Any: ... class Child3(ParentWithDynamicImpl): @overload # E: Signature of "f" incompatible with supertype "ParentWithDynamicImpl" def f(self, arg: int) -> int: ... @overload def f(self, arg: StrSub) -> str: ... def f(self, arg: Union[int, StrSub]) -> Union[int, str]: ... class Child4(ParentWithDynamicImpl): @overload # E: Signature of "f" incompatible with supertype "ParentWithDynamicImpl" def f(self, arg: int) -> int: ... @overload def f(self, arg: StrSub) -> str: ... def f(self, arg: Any) -> Any: ... [builtins fixtures/tuple.pyi] [case testOverloadAnyIsConsideredValidReturnSubtype] from typing import Any, overload, Optional @overload def foo(x: None) -> Any: ... @overload def foo(x: Optional[str]) -> str: ... def foo(x): pass @overload def bar(x: None) -> object: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def bar(x: Optional[str]) -> str: ... def bar(x): pass [case testOverloadWithNonPositionalArgs] from typing import overload class A: ... class B: ... class C: ... @overload def foo(*, p1: A, p2: B = B()) -> A: ... @overload def foo(*, p2: B = B()) -> B: ... def foo(p1, p2=None): ... reveal_type(foo()) # N: Revealed type is '__main__.B' reveal_type(foo(p2=B())) # N: Revealed type is '__main__.B' reveal_type(foo(p1=A())) # N: Revealed type is '__main__.A' [case testOverloadWithNonPositionalArgsIgnoresOrder] from typing import overload class A: ... class B(A): ... class X: ... class Y: ... @overload def f(*, p1: X, p2: A) -> X: ... @overload def f(*, p2: B, p1: X) -> Y: ... # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader def f(*, p1, p2): ... @overload def g(*, p1: X, p2: B) -> X: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(*, p2: A, p1: X) -> Y: ... def g(*, p1, p2): ... [case testOverloadWithVariableArgsAreOverlapping] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(*x: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo1(x: int, y: int, z: int) -> str: ... @overload def foo2(*x: int) -> int: ... @overload def foo2(x: int, y: str, z: int) -> str: ... @overload def bar1(x: int, y: int, z: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def bar1(*x: int) -> int: ... @overload def bar2(x: int, y: str, z: int) -> str: ... @overload def bar2(*x: int) -> int: ... [case testOverloadDetectsPossibleMatchesWithGenerics] from typing import overload, TypeVar, Generic T = TypeVar('T') @overload def foo(x: None, y: None) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo(x: T, y: T) -> int: ... def foo(x): ... # What if 'T' is 'object'? @overload def bar(x: None, y: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def bar(x: T, y: T) -> int: ... def bar(x, y): ... class Wrapper(Generic[T]): @overload def foo(self, x: None, y: None) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo(self, x: T, y: None) -> int: ... def foo(self, x): ... @overload def bar(self, x: None, y: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def bar(self, x: T, y: T) -> int: ... def bar(self, x, y): ... [case testOverloadFlagsPossibleMatches] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(x: str) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo1(x: str, y: str = ...) -> int: ... @overload def foo2(x: str, y: str = ...) -> int: ... @overload def foo2(x: str) -> str: ... # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def foo3(x: str) -> str: ... @overload def foo3(x: str, y: str) -> int: ... [case testOverloadPossibleOverlapWithArgsAndKwargs] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(*args: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo1(**kwargs: int) -> str: ... @overload def foo2(**kwargs: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo2(*args: int) -> int: ... [builtins fixtures/dict.pyi] [case testOverloadPossibleOverlapWithVarargs] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(*args: int) -> int: ... @overload def foo1(*args2: int) -> str: ... # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def foo2(*args: int) -> str: ... @overload def foo2(*args2: str) -> int: ... @overload def foo3(*args: int) -> str: ... @overload def foo3(*args: str) -> int: ... [case testOverloadPossibleOverlapWithVarargs2] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(*args: str) -> int: ... @overload def foo1(x: int, *args2: int) -> str: ... @overload def foo2(x: int, *args: int) -> str: ... @overload def foo2(*args2: str) -> int: ... @overload def foo3(*args: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo3(x: int, *args2: int) -> str: ... @overload def foo4(x: int, *args: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo4(*args2: int) -> int: ... [case testOverloadPossibleOverlapWithVarargs3] from wrapper import * [file wrapper.pyi] from typing import overload class Other: ... @overload def foo1(x: Other, *args: int) -> str: ... @overload def foo1(*args: str) -> int: ... @overload def foo2(*args: int) -> str: ... @overload def foo2(x: Other, *args: str) -> int: ... @overload def foo3(x: Other = ..., *args: int) -> str: ... @overload def foo3(*args: str) -> int: ... @overload def foo4(*args: int) -> str: ... @overload def foo4(x: Other = ..., *args: str) -> int: ... [case testOverloadPossibleOverlapWithVarargs4] from typing import overload @overload def foo1(x: int = 0, y: int = 0) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo1(*xs: int) -> str: ... def foo1(*args): pass @overload def foo2(*xs: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo2(x: int = 0, y: int = 0) -> int: ... def foo2(*args): pass [case testOverloadPossibleOverlapWithKwargs] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(**kwargs: int) -> int: ... @overload def foo1(**kwargs2: int) -> str: ... # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def foo2(**kwargs: int) -> str: ... @overload def foo2(**kwargs2: str) -> int: ... @overload def foo(**kwargs: int) -> str: ... @overload def foo(**kwargs: str) -> int: ... [builtins fixtures/dict.pyi] [case testOverloadPossibleOverlapMixingNamedArgsWithVarargs] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(x: str, *, y: str) -> str: ... @overload def foo1(*x: str) -> int: ... @overload def foo2(*x: str) -> int: ... @overload def foo2(x: str, *, y: str) -> str: ... [case testOverloadPossibleOverlapMixingOptionalArgsWithVarargs] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(x: str, y: str = ..., z: str = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo1(*x: str) -> int: ... @overload def foo2(*x: str) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo2(x: str, y: str = ..., z: str = ...) -> str: ... @overload def foo3(x: int, y: str = ..., z: str = ...) -> str: ... @overload def foo3(*x: str) -> int: ... [case testOverloadPossibleOverlapMixingOptionalArgsWithVarargs2] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(x: str, y: str = ..., z: int = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo1(*x: str) -> int: ... @overload def foo2(x: str, y: str = ..., z: int = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo2(*x: str) -> int: ... [case testOverloadPossibleOverlapMixingNamedArgsWithKwargs] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(*, x: str, y: str, z: str) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo1(**x: str) -> int: ... @overload def foo2(**x: str) -> int: ... @overload def foo2(*, x: str, y: str, z: str) -> str: ... # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader @overload def foo3(*, x: int, y: str, z: str) -> str: ... @overload def foo3(*x: str) -> int: ... [builtins fixtures/dict.pyi] [case testOverloadPossibleOverlapMixingNamedArgsWithKwargs2] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(*, x: str, y: str, z: int) -> str: ... @overload def foo1(**x: str) -> int: ... @overload def foo2(**x: str) -> int: ... @overload def foo2(*, x: str, y: str, z: int) -> str: ... @overload def foo3(*, x: str, y: str, z: int = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo3(**x: str) -> int: ... @overload def foo4(**x: str) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo4(*, x: str, y: str, z: int = ...) -> str: ... [builtins fixtures/dict.pyi] [case testOverloadPossibleOverlapMixingNamedArgsWithKwargs3] from wrapper import * [file wrapper.pyi] from typing import overload @overload def foo1(x: str, *, y: str, z: str) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo1(**x: str) -> int: ... @overload def foo2(**x: str) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo2(x: str, *, y: str, z: str) -> str: ... [builtins fixtures/dict.pyi] [case testOverloadVarargInputAndVarargDefinition] from typing import overload, List class A: ... class B: ... class C: ... @overload def foo(x: int) -> A: ... @overload def foo(x: int, y: int) -> B: ... @overload def foo(x: int, y: int, z: int, *args: int) -> C: ... def foo(*args): pass reveal_type(foo(1)) # N: Revealed type is '__main__.A' reveal_type(foo(1, 2)) # N: Revealed type is '__main__.B' reveal_type(foo(1, 2, 3)) # N: Revealed type is '__main__.C' reveal_type(foo(*[1])) # N: Revealed type is '__main__.C' reveal_type(foo(*[1, 2])) # N: Revealed type is '__main__.C' reveal_type(foo(*[1, 2, 3])) # N: Revealed type is '__main__.C' x: List[int] reveal_type(foo(*x)) # N: Revealed type is '__main__.C' y: List[str] foo(*y) # E: No overload variant of "foo" matches argument type "List[str]" \ # N: Possible overload variants: \ # N: def foo(x: int, y: int, z: int, *args: int) -> C \ # N: def foo(x: int) -> A \ # N: def foo(x: int, y: int) -> B [builtins fixtures/list.pyi] [case testOverloadMultipleVarargDefinition] from typing import overload, List, Any class A: ... class B: ... class C: ... class D: ... @overload def foo(x: int) -> A: ... @overload def foo(x: int, y: int) -> B: ... @overload def foo(x: int, y: int, z: int, *args: int) -> C: ... @overload def foo(*x: str) -> D: ... def foo(*args): pass reveal_type(foo(*[1, 2])) # N: Revealed type is '__main__.C' reveal_type(foo(*["a", "b"])) # N: Revealed type is '__main__.D' x: List[Any] reveal_type(foo(*x)) # N: Revealed type is 'Any' [builtins fixtures/list.pyi] [case testOverloadMultipleVarargDefinitionComplex] from typing import TypeVar, overload, Any, Callable T1 = TypeVar('T1') T2 = TypeVar('T2') T3 = TypeVar('T3') @overload def chain_call(input_value: T1, f1: Callable[[T1], T2]) -> T2: ... @overload def chain_call(input_value: T1, f1: Callable[[T1], T2], f2: Callable[[T2], T3]) -> T3: ... @overload def chain_call(input_value: T1, *f_rest: Callable[[T1], T1]) -> T1: ... @overload def chain_call(input_value: T1, f1: Callable[[T1], T2], f2: Callable[[T2], T3], f3: Callable[[T3], Any], *f_rest: Callable[[Any], Any]) -> Any: ... def chain_call(input_value, *f_rest): for function in f_rest: input_value = function(input_value) return input_value class A: ... class B: ... class C: ... class D: ... def f(x: A) -> A: ... def f1(x: A) -> B: ... def f2(x: B) -> C: ... def f3(x: C) -> D: ... reveal_type(chain_call(A(), f1, f2)) # N: Revealed type is '__main__.C*' reveal_type(chain_call(A(), f1, f2, f3)) # N: Revealed type is 'Any' reveal_type(chain_call(A(), f, f, f, f)) # N: Revealed type is '__main__.A' [builtins fixtures/list.pyi] [case testOverloadVarargsSelection] from typing import overload, Tuple @overload def f(x: int) -> Tuple[int]: ... @overload def f(x: int, y: int) -> Tuple[int, int]: ... @overload def f(*xs: int) -> Tuple[int, ...]: ... def f(*args): pass i: int reveal_type(f(i)) # N: Revealed type is 'Tuple[builtins.int]' reveal_type(f(i, i)) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(f(i, i, i)) # N: Revealed type is 'builtins.tuple[builtins.int]' reveal_type(f(*[])) # N: Revealed type is 'builtins.tuple[builtins.int]' reveal_type(f(*[i])) # N: Revealed type is 'builtins.tuple[builtins.int]' reveal_type(f(*[i, i])) # N: Revealed type is 'builtins.tuple[builtins.int]' reveal_type(f(*[i, i, i])) # N: Revealed type is 'builtins.tuple[builtins.int]' [builtins fixtures/list.pyi] [case testOverloadVarargsSelectionWithTuples] from typing import overload, Tuple @overload def f(x: int) -> Tuple[int]: ... @overload def f(x: int, y: int) -> Tuple[int, int]: ... @overload def f(*xs: int) -> Tuple[int, ...]: ... def f(*args): pass i: int reveal_type(f(*())) # N: Revealed type is 'builtins.tuple[builtins.int]' reveal_type(f(*(i,))) # N: Revealed type is 'Tuple[builtins.int]' reveal_type(f(*(i, i))) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(f(*(i, i, i))) # N: Revealed type is 'builtins.tuple[builtins.int]' [builtins fixtures/tuple.pyi] [case testOverloadVarargsSelectionWithNamedTuples] from typing import overload, Tuple, NamedTuple @overload def f(x: int, y: int) -> Tuple[int, int]: ... @overload def f(*xs: int) -> Tuple[int, ...]: ... def f(*args): pass A = NamedTuple('A', [('x', int), ('y', int)]) B = NamedTuple('B', [('a', int), ('b', int)]) C = NamedTuple('C', [('a', int), ('b', int), ('c', int)]) a: A b: B c: C reveal_type(f(*a)) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(f(*b)) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(f(*c)) # N: Revealed type is 'builtins.tuple[builtins.int]' [builtins fixtures/tuple.pyi] [case testOverloadKwargsSelectionWithDict] from typing import overload, Tuple, Dict @overload def f(*, x: int) -> Tuple[int]: ... @overload def f(*, x: int, y: int) -> Tuple[int, int]: ... @overload def f(**xs: int) -> Tuple[int, ...]: ... def f(**kwargs): pass empty: Dict[str, int] reveal_type(f(**empty)) # N: Revealed type is 'builtins.tuple[builtins.int]' reveal_type(f(**{'x': 4})) # N: Revealed type is 'builtins.tuple[builtins.int]' reveal_type(f(**{'x': 4, 'y': 4})) # N: Revealed type is 'builtins.tuple[builtins.int]' reveal_type(f(**{'a': 4, 'b': 4, 'c': 4})) # N: Revealed type is 'builtins.tuple[builtins.int]' [builtins fixtures/dict.pyi] [case testOverloadKwargsSelectionWithTypedDict] from typing import overload, Tuple from typing_extensions import TypedDict @overload def f(*, x: int) -> Tuple[int]: ... @overload def f(*, x: int, y: int) -> Tuple[int, int]: ... @overload def f(**xs: int) -> Tuple[int, ...]: ... def f(**args): pass A = TypedDict('A', {'x': int}) B = TypedDict('B', {'x': int, 'y': int}) C = TypedDict('C', {'x': int, 'y': int, 'z': int}) a: A b: B c: C reveal_type(f(**a)) # N: Revealed type is 'Tuple[builtins.int]' reveal_type(f(**b)) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(f(**c)) # N: Revealed type is 'builtins.tuple[builtins.int]' [builtins fixtures/dict.pyi] [case testOverloadVarargsAndKwargsSelection] from typing import overload, Any, Tuple, Dict class A: pass class B(A): pass @overload def f(x: int, y: int) -> B: pass @overload def f(x: int, y: int, **kwargs: int) -> A: pass @overload def f(*args: int, **kwargs: int) -> Any: pass def f(*args, **kwargs): pass a: Tuple[int, int] b: Tuple[int, ...] c: Dict[str, int] reveal_type(f(*a, **c)) # N: Revealed type is '__main__.A' reveal_type(f(*b, **c)) # N: Revealed type is '__main__.A' reveal_type(f(*a)) # N: Revealed type is '__main__.B' reveal_type(f(*b)) # N: Revealed type is 'Any' # TODO: Should this be 'Any' instead? # The first matching overload with a kwarg is f(int, int, **int) -> A, # but f(*int, **int) -> Any feels like a better fit. reveal_type(f(**c)) # N: Revealed type is '__main__.A' [builtins fixtures/args.pyi] [case testOverloadWithPartiallyOverlappingUnions] from typing import overload, Union class A: ... class B: ... class C: ... class D: ... @overload def f(x: Union[A, B]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: Union[B, C]) -> str: ... def f(x): ... @overload def g(x: Union[A, B]) -> int: ... @overload def g(x: Union[B, C]) -> int: ... def g(x): ... @overload def h(x: Union[A, B]) -> int: ... @overload def h(x: Union[C, D]) -> str: ... def h(x): ... @overload def i(x: Union[A, B]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def i(x: Union[A, B, C]) -> str: ... def i(x): ... [case testOverloadWithPartiallyOverlappingUnionsNested] from typing import overload, Union, List class A: ... class B: ... class C: ... class D: ... @overload def f(x: List[Union[A, B]]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: List[Union[B, C]]) -> str: ... def f(x): ... @overload def g(x: List[Union[A, B]]) -> int: ... @overload def g(x: List[Union[B, C]]) -> int: ... def g(x): ... @overload def h(x: List[Union[A, B]]) -> int: ... @overload def h(x: List[Union[C, D]]) -> str: ... def h(x): ... @overload def i(x: List[Union[A, B]]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def i(x: List[Union[A, B, C]]) -> str: ... def i(x): ... [builtins fixtures/list.pyi] [case testOverloadPartialOverlapWithUnrestrictedTypeVar] from typing import TypeVar, overload T = TypeVar('T') @overload def f(x: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: T) -> T: ... def f(x): ... @overload def g(x: int) -> int: ... @overload def g(x: T) -> T: ... def g(x): ... [case testOverloadPartialOverlapWithUnrestrictedTypeVarNested] from typing import TypeVar, overload, List T = TypeVar('T') @overload def f1(x: List[int]) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f1(x: List[T]) -> T: ... def f1(x): ... @overload def f2(x: List[int]) -> List[str]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f2(x: List[T]) -> List[T]: ... def f2(x): ... @overload def g1(x: List[int]) -> int: ... @overload def g1(x: List[T]) -> T: ... def g1(x): ... @overload def g2(x: List[int]) -> List[int]: ... @overload def g2(x: List[T]) -> List[T]: ... def g2(x): ... [builtins fixtures/list.pyi] [case testOverloadPartialOverlapWithUnrestrictedTypeVarInClass] from typing import TypeVar, overload, Generic T = TypeVar('T') class Wrapper(Generic[T]): @overload def f(self, x: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(self, x: T) -> T: ... def f(self, x): ... # TODO: This shouldn't trigger an error message? # Related to testTypeCheckOverloadImplementationTypeVarDifferingUsage2? # See https://github.com/python/mypy/issues/5510 @overload def g(self, x: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(self, x: T) -> T: ... def g(self, x): ... [case testOverloadPartialOverlapWithUnrestrictedTypeVarInClassNested] from typing import TypeVar, overload, Generic, List T = TypeVar('T') class Wrapper(Generic[T]): @overload def f1(self, x: List[int]) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f1(self, x: List[T]) -> T: ... def f1(self, x): ... @overload def f2(self, x: List[int]) -> List[str]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f2(self, x: List[T]) -> List[T]: ... def f2(self, x): ... # TODO: This shouldn't trigger an error message? # See https://github.com/python/mypy/issues/5510 @overload def g1(self, x: List[int]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g1(self, x: List[T]) -> T: ... def g1(self, x): ... @overload def g2(self, x: List[int]) -> List[int]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g2(self, x: List[T]) -> List[T]: ... def g2(self, x): ... [builtins fixtures/list.pyi] [case testOverloadTypedDictDifferentRequiredKeysMeansDictsAreDisjoint] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': int}) B = TypedDict('B', {'x': int, 'y': str}) @overload def f(x: A) -> int: ... @overload def f(x: B) -> str: ... def f(x): pass [builtins fixtures/dict.pyi] [case testOverloadedTypedDictPartiallyOverlappingRequiredKeys] from typing import overload, Union from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': Union[int, str]}) B = TypedDict('B', {'x': int, 'y': Union[str, float]}) @overload def f(x: A) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: B) -> str: ... def f(x): pass @overload def g(x: A) -> int: ... @overload def g(x: B) -> object: ... def g(x): pass [builtins fixtures/dict.pyi] [case testOverloadedTypedDictFullyNonTotalDictsAreAlwaysPartiallyOverlapping] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}, total=False) B = TypedDict('B', {'a': bool}, total=False) C = TypedDict('C', {'x': str, 'y': int}, total=False) @overload def f(x: A) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: B) -> str: ... def f(x): pass @overload def g(x: A) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(x: C) -> str: ... def g(x): pass [builtins fixtures/dict.pyi] [case testOverloadedTotalAndNonTotalTypedDictsCanPartiallyOverlap] from typing import overload, Union from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}) B = TypedDict('B', {'x': Union[int, str], 'y': str, 'z': int}, total=False) @overload def f1(x: A) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f1(x: B) -> str: ... def f1(x): pass @overload def f2(x: B) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f2(x: A) -> str: ... def f2(x): pass [builtins fixtures/dict.pyi] [case testOverloadedTypedDictsWithSomeOptionalKeysArePartiallyOverlapping] from typing import overload, Union from mypy_extensions import TypedDict class A(TypedDict): x: int y: int class B(TypedDict, total=False): z: str class C(TypedDict, total=False): z: int @overload def f(x: B) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: C) -> str: ... def f(x): pass [builtins fixtures/dict.pyi] [case testOverloadedPartiallyOverlappingInheritedTypes1] from typing import overload, List, Union, TypeVar, Generic class A: pass class B: pass class C: pass T = TypeVar('T') class ListSubclass(List[T]): pass class Unrelated(Generic[T]): pass @overload def f(x: List[Union[A, B]]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: ListSubclass[Union[B, C]]) -> str: ... def f(x): pass @overload def g(x: List[Union[A, B]]) -> int: ... @overload def g(x: Unrelated[Union[B, C]]) -> str: ... def g(x): pass [builtins fixtures/list.pyi] [case testOverloadedPartiallyOverlappingInheritedTypes2] from typing import overload, List, Union class A: pass class B: pass class C: pass class ListSubclass(List[Union[B, C]]): pass @overload def f(x: List[Union[A, B]]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: ListSubclass) -> str: ... def f(x): pass [builtins fixtures/list.pyi] [case testOverloadedPartiallyOverlappingInheritedTypes3] from typing import overload, Union, Dict, TypeVar class A: pass class B: pass class C: pass S = TypeVar('S') class DictSubclass(Dict[str, S]): pass @overload def f(x: Dict[str, Union[A, B]]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: DictSubclass[Union[B, C]]) -> str: ... def f(x): pass [builtins fixtures/dict.pyi] [case testOverloadedPartiallyOverlappingTypeVarsAndUnion] from typing import overload, TypeVar, Union class A: pass class B: pass class C: pass S = TypeVar('S', A, B) @overload def f(x: S) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: Union[B, C]) -> str: ... def f(x): pass @overload def g(x: Union[B, C]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(x: S) -> str: ... def g(x): pass [case testOverloadPartiallyOverlappingTypeVarsIdentical] from typing import overload, TypeVar, Union T = TypeVar('T') class A: pass class B: pass class C: pass @overload def f(x: T, y: T, z: Union[A, B]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: T, y: T, z: Union[B, C]) -> str: ... def f(x, y, z): pass [case testOverloadedPartiallyOverlappingCallables] from typing import overload, Union, Callable class A: pass class B: pass class C: pass @overload def f(x: Callable[[Union[A, B]], int]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: Callable[[Union[B, C]], int]) -> str: ... def f(x): pass [case testOverloadNotConfusedForProperty] from typing import overload class PropertyClass: @property def foo(self) -> str: return "..." @foo.setter def foo(self, value: str) -> None: pass @foo.deleter def foo(self) -> None: pass class OverloadClass: @overload def foo(self) -> str: pass @overload def foo(self, value: str) -> None: pass @overload def foo(self) -> None: pass # E: Overloaded function signature 3 will never be matched: signature 1's parameter type(s) are the same or broader def foo(self, *args): pass [builtins fixtures/property.pyi] [case testOverloadInferUnionReturnBasic] from typing import overload, Union class A: ... class B: ... class C: ... class D: ... @overload def f1(x: A) -> B: ... @overload def f1(x: C) -> D: ... def f1(x): ... arg1: Union[A, C] reveal_type(f1(arg1)) # N: Revealed type is 'Union[__main__.B, __main__.D]' arg2: Union[A, B] f1(arg2) # E: Argument 1 to "f1" has incompatible type "Union[A, B]"; expected "A" @overload def f2(x: A) -> B: ... @overload def f2(x: C) -> B: ... def f2(x): ... reveal_type(f2(arg1)) # N: Revealed type is '__main__.B' [case testOverloadInferUnionReturnMultipleArguments] from typing import overload, Union class A: ... class B: ... class C: ... class D: ... @overload def f1(x: A, y: C) -> B: ... @overload def f1(x: C, y: A) -> D: ... def f1(x, y): ... arg1: Union[A, C] reveal_type(f1(arg1, arg1)) @overload def f2(x: A, y: C) -> B: ... @overload def f2(x: C, y: C) -> D: ... def f2(x, y): ... reveal_type(f2(arg1, arg1)) reveal_type(f2(arg1, C())) [out] main:15: note: Revealed type is '__main__.B' main:15: error: Argument 1 to "f1" has incompatible type "Union[A, C]"; expected "A" main:15: error: Argument 2 to "f1" has incompatible type "Union[A, C]"; expected "C" main:23: note: Revealed type is '__main__.B' main:23: error: Argument 1 to "f2" has incompatible type "Union[A, C]"; expected "A" main:23: error: Argument 2 to "f2" has incompatible type "Union[A, C]"; expected "C" main:24: note: Revealed type is 'Union[__main__.B, __main__.D]' [case testOverloadInferUnionRespectsVariance] from typing import overload, TypeVar, Union, Generic class A: pass class B(A): pass class C(B): pass T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) class WrapperCo(Generic[T_co]): pass class WrapperContra(Generic[T_contra]): pass @overload def foo(x: WrapperCo[B]) -> int: ... @overload def foo(x: WrapperContra[B]) -> str: ... def foo(x): pass compat: Union[WrapperCo[C], WrapperContra[A]] reveal_type(foo(compat)) # N: Revealed type is 'Union[builtins.int, builtins.str]' not_compat: Union[WrapperCo[A], WrapperContra[C]] foo(not_compat) # E: Argument 1 to "foo" has incompatible type "Union[WrapperCo[A], WrapperContra[C]]"; expected "WrapperCo[B]" [case testOverloadInferUnionIfParameterNamesAreDifferent] from typing import overload, Union class A: ... class B: ... class C: ... @overload def f(x: A) -> B: ... @overload def f(y: B) -> C: ... def f(x): ... x: Union[A, B] reveal_type(f(A())) # N: Revealed type is '__main__.B' reveal_type(f(B())) # N: Revealed type is '__main__.C' reveal_type(f(x)) # N: Revealed type is 'Union[__main__.B, __main__.C]' [case testOverloadInferUnionReturnFunctionsWithKwargs] from typing import overload, Union, Optional class A: ... class B: ... class C: ... class D(B, C): ... @overload def f(x: A) -> D: ... @overload def f(x: A, y: Optional[B] = None) -> C: ... @overload def f(x: A, z: Optional[C] = None) -> B: ... def f(x, y=None, z=None): ... reveal_type(f(A(), B())) # N: Revealed type is '__main__.C' reveal_type(f(A(), C())) # N: Revealed type is '__main__.B' arg: Union[B, C] reveal_type(f(A(), arg)) # N: Revealed type is 'Union[__main__.C, __main__.B]' reveal_type(f(A())) # N: Revealed type is '__main__.D' [builtins fixtures/tuple.pyi] [case testOverloadInferUnionWithDifferingLengths] from typing import overload, Union class Parent: ... class Child(Parent): ... class A: ... class B: ... @overload def f(x: A) -> Child: ... @overload def f(x: B, y: B = B()) -> Parent: ... def f(*args): ... x: Union[A, B] reveal_type(f(x)) # N: Revealed type is '__main__.Parent' f(x, B()) # E: Argument 1 to "f" has incompatible type "Union[A, B]"; expected "B" [case testOverloadInferUnionWithMixOfPositionalAndOptionalArgs] # flags: --strict-optional from typing import overload, Union, Optional class A: ... class B: ... @overload def f(x: A) -> int: ... @overload def f(x: Optional[B] = None) -> str: ... def f(*args): ... x: Union[A, B] y: Optional[A] z: Union[A, Optional[B]] reveal_type(f(x)) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(f(y)) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(f(z)) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(f()) # N: Revealed type is 'builtins.str' [case testOverloadingInferUnionReturnWithTypevarWithValueRestriction] from typing import overload, Union, TypeVar, Generic class A: pass class B: pass class C: pass T = TypeVar('T', B, C) class Wrapper(Generic[T]): @overload def f(self, x: T) -> B: ... @overload def f(self, x: A) -> C: ... def f(self, x): ... obj: Wrapper[B] = Wrapper() x: Union[A, B] reveal_type(obj.f(A())) # N: Revealed type is '__main__.C' reveal_type(obj.f(B())) # N: Revealed type is '__main__.B' reveal_type(obj.f(x)) # N: Revealed type is 'Union[__main__.C, __main__.B]' [case testOverloadingInferUnionReturnWithFunctionTypevarReturn] from typing import overload, Union, TypeVar, Generic T = TypeVar('T') class W1(Generic[T]): pass class W2(Generic[T]): pass class A: pass class B: pass @overload def foo(x: W1[T]) -> T: ... @overload def foo(x: W2[T]) -> T: ... def foo(x): ... def bar(x: Union[W1[T], W2[T]]) -> T: ... def wrapper() -> None: obj1: Union[W1[A], W2[A]] a1: A = foo(obj1) a2 = foo(obj1) reveal_type(a1) # N: Revealed type is '__main__.A' reveal_type(a2) # N: Revealed type is '__main__.A*' obj2: Union[W1[A], W2[B]] reveal_type(foo(obj2)) # N: Revealed type is 'Union[__main__.A*, __main__.B*]' bar(obj2) # E: Cannot infer type argument 1 of "bar" b1_overload: A = foo(obj2) # E: Incompatible types in assignment (expression has type "Union[A, B]", variable has type "A") b1_union: A = bar(obj2) # E: Cannot infer type argument 1 of "bar" [case testOverloadingInferUnionReturnWithObjectTypevarReturn] from typing import overload, Union, TypeVar, Generic T = TypeVar('T') class W1(Generic[T]): pass class W2(Generic[T]): pass class A: pass class B: pass class SomeType(Generic[T]): @overload def foo(self, x: W1[T]) -> T: ... @overload def foo(self, x: W2[T]) -> T: ... def foo(self, x): ... def bar(self, x: Union[W1[T], W2[T]]) -> T: ... def wrapper() -> None: obj1: Union[W1[A], W2[A]] a1 = SomeType[A]().foo(obj1) reveal_type(a1) # N: Revealed type is '__main__.A*' # Note: These should be fine, but mypy has an unrelated bug # that makes them error out? a2_overload: A = SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[]" a2_union: A = SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[], W2[]]" SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[]" SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[], W2[]]" [case testOverloadingInferUnionReturnWithBadObjectTypevarReturn] from typing import overload, Union, TypeVar, Generic T = TypeVar('T') class W1(Generic[T]): pass class W2(Generic[T]): pass class A: pass class B: pass class SomeType(Generic[T]): @overload def foo(self, x: W1[T]) -> T: ... @overload def foo(self, x: W2[T]) -> T: ... def foo(self, x): ... def bar(self, x: Union[W1[T], W2[T]]) -> T: ... def wrapper(mysterious: T) -> T: obj1: Union[W1[A], W2[B]] SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[]" SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[], W2[]]" SomeType[A]().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[A]" SomeType[A]().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[A], W2[A]]" SomeType[T]().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[T]" SomeType[T]().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[T], W2[T]]" return mysterious [case testOverloadingInferUnionReturnWithMixedTypevars] from typing import overload, Generic, TypeVar, List, Tuple, Union class A: pass class B(A): pass class C(A): pass T = TypeVar('T', bound=A) S = TypeVar('S') class Dummy(Generic[T]): @overload def foo(self, x: List[Tuple[T, S]], y: S) -> T: ... @overload def foo(self, x: List[S], y: S) -> S: ... def foo(self, x: Union[List[Tuple[T, S]], List[S]], y: S) -> Union[T, S]: ... T1 = TypeVar('T1', bound=A) def t_is_same_bound(arg1: T1, arg2: S) -> Tuple[T1, S]: x1: Union[List[S], List[Tuple[T1, S]]] y1: S reveal_type(Dummy[T1]().foo(x1, y1)) # N: Revealed type is 'Union[S`-2, T1`-1]' x2: Union[List[T1], List[Tuple[T1, T1]]] y2: T1 reveal_type(Dummy[T1]().foo(x2, y2)) # N: Revealed type is 'T1`-1' return arg1, arg2 [builtins fixtures/list.pyi] [case testOverloadingInferUnionReturnWithMixedTypevarsInnerMismatch] from typing import overload, Generic, TypeVar, List, Tuple, Union class A: pass class B(A): pass class C(A): pass T = TypeVar('T', bound=A) S = TypeVar('S') class Dummy(Generic[T]): @overload def foo(self, x: List[Tuple[T, S]], y: S) -> T: ... @overload def foo(self, x: List[S], y: S) -> S: ... def foo(self, x: Union[List[Tuple[T, S]], List[S]], y: S) -> Union[T, S]: ... T1 = TypeVar('T1', bound=A) def t_is_same_bound(arg1: T1, arg2: S) -> Tuple[T1, S]: # The arguments in the tuple are swapped x3: Union[List[S], List[Tuple[S, T1]]] y3: S Dummy[T1]().foo(x3, y3) # E: Cannot infer type argument 1 of "foo" of "Dummy" \ # E: Argument 1 to "foo" of "Dummy" has incompatible type "Union[List[S], List[Tuple[S, T1]]]"; expected "List[Tuple[T1, Any]]" x4: Union[List[int], List[Tuple[C, int]]] y4: int reveal_type(Dummy[C]().foo(x4, y4)) # N: Revealed type is 'Union[builtins.int*, __main__.C]' Dummy[A]().foo(x4, y4) # E: Argument 1 to "foo" of "Dummy" has incompatible type "Union[List[int], List[Tuple[C, int]]]"; expected "List[Tuple[A, int]]" return arg1, arg2 [builtins fixtures/list.pyi] [case testOverloadingInferUnionReturnWithMixedTypevarsTighterBound] from typing import overload, Generic, TypeVar, List, Tuple, Union class A: pass class B(A): pass class C(A): pass T = TypeVar('T', bound=A) S = TypeVar('S') class Dummy(Generic[T]): @overload def foo(self, x: List[Tuple[T, S]], y: S) -> T: ... @overload def foo(self, x: List[S], y: S) -> S: ... def foo(self, x: Union[List[Tuple[T, S]], List[S]], y: S) -> Union[T, S]: ... T1 = TypeVar('T1', bound=B) def t_is_tighter_bound(arg1: T1, arg2: S) -> Tuple[T1, S]: x1: Union[List[S], List[Tuple[T1, S]]] y1: S reveal_type(Dummy[T1]().foo(x1, y1)) # N: Revealed type is 'Union[S`-2, T1`-1]' x2: Union[List[T1], List[Tuple[T1, T1]]] y2: T1 reveal_type(Dummy[T1]().foo(x2, y2)) # N: Revealed type is 'T1`-1' return arg1, arg2 [builtins fixtures/list.pyi] [case testOverloadingInferUnionReturnWithTypevarsAndValueRestrictions] from typing import overload, Generic, TypeVar, List, Tuple, Union class A: pass class B(A): pass class C(A): pass T = TypeVar('T', bound=A) S = TypeVar('S') class Dummy(Generic[T]): @overload def foo(self, x: List[Tuple[T, S]], y: S) -> T: ... @overload def foo(self, x: List[S], y: S) -> S: ... def foo(self, x: Union[List[Tuple[T, S]], List[S]], y: S) -> Union[T, S]: ... T3 = TypeVar('T3', B, C) def t_is_compatible_bound(arg1: T3, arg2: S) -> Tuple[T3, S]: x1: Union[List[S], List[Tuple[T3, S]]] y1: S reveal_type(Dummy[T3]().foo(x1, y1)) x2: Union[List[T3], List[Tuple[T3, T3]]] y2: T3 reveal_type(Dummy[T3]().foo(x2, y2)) return arg1, arg2 [builtins fixtures/list.pyi] [out] main:22: note: Revealed type is 'Union[S`-2, __main__.B]' main:22: note: Revealed type is 'Union[S`-2, __main__.C]' main:26: note: Revealed type is '__main__.B*' main:26: note: Revealed type is '__main__.C*' [case testOverloadInferUnionReturnWithInconsistentTypevarNames] from typing import overload, TypeVar, Union T = TypeVar('T') S = TypeVar('S') @overload def consistent(x: T, y: str) -> T: ... @overload def consistent(x: T, y: int) -> T: ... def consistent(x: T, y: Union[str, int]) -> T: return x @overload def inconsistent(x: T, y: str) -> T: ... @overload def inconsistent(x: S, y: int) -> S: ... def inconsistent(x: T, y: Union[str, int]) -> T: return x def test(x: T) -> T: y: Union[str, int] reveal_type(consistent(x, y)) # N: Revealed type is 'T`-1' # On one hand, this overload is defined in a weird way; on the other, there's technically nothing wrong with it. inconsistent(x, y) return x [case testOverloadsAndNoneWithoutStrictOptional] # flags: --no-strict-optional from typing import overload, Optional @overload def f(x: None) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: object) -> str: ... def f(x): ... # We pretend strict-optional is enabled for overload definitions, # even in non-strict optional mode @overload def g(x: None) -> int: ... @overload def g(x: int) -> str: ... def g(x): ... # Calls are still checked normally though a: None b: int c: Optional[int] reveal_type(g(a)) # N: Revealed type is 'builtins.int' reveal_type(g(b)) # N: Revealed type is 'builtins.str' reveal_type(g(c)) # N: Revealed type is 'builtins.str' [case testOverloadsAndNoneWithStrictOptional] # flags: --strict-optional from typing import overload, Optional @overload def f(x: None) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: object) -> str: ... def f(x): ... @overload def g(x: None) -> int: ... @overload def g(x: int) -> str: ... def g(x): ... a: None b: int c: Optional[int] reveal_type(g(a)) # N: Revealed type is 'builtins.int' reveal_type(g(b)) # N: Revealed type is 'builtins.str' reveal_type(g(c)) # N: Revealed type is 'Union[builtins.str, builtins.int]' [case testOverloadsNoneAndTypeVarsWithNoStrictOptional] # flags: --no-strict-optional from typing import Callable, Iterable, TypeVar, overload, Optional T = TypeVar('T') S = TypeVar('S') @overload def mymap(func: None, seq: Iterable[T]) -> Iterable[T]: ... @overload def mymap(func: Callable[[T], S], seq: Iterable[T]) -> Iterable[S]: ... def mymap(*args): ... seq = [1, 2, 3] f1: Callable[[int], str] f2: None f3: Optional[Callable[[int], str]] reveal_type(mymap(f1, seq)) # N: Revealed type is 'typing.Iterable[builtins.str*]' reveal_type(mymap(f2, seq)) # N: Revealed type is 'typing.Iterable[builtins.int*]' reveal_type(mymap(f3, seq)) # N: Revealed type is 'typing.Iterable[builtins.str*]' [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsNoneAndTypeVarsWithStrictOptional] # flags: --strict-optional from typing import Callable, Iterable, TypeVar, overload, Optional T = TypeVar('T') S = TypeVar('S') @overload def mymap(func: None, seq: Iterable[T]) -> Iterable[T]: ... @overload def mymap(func: Callable[[T], S], seq: Iterable[T]) -> Iterable[S]: ... def mymap(*args): ... seq = [1, 2, 3] f1: Callable[[int], str] f2: None f3: Optional[Callable[[int], str]] reveal_type(mymap(f1, seq)) # N: Revealed type is 'typing.Iterable[builtins.str*]' reveal_type(mymap(f2, seq)) # N: Revealed type is 'typing.Iterable[builtins.int*]' reveal_type(mymap(f3, seq)) # N: Revealed type is 'Union[typing.Iterable[builtins.str*], typing.Iterable[builtins.int*]]' [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsAndNoReturnNarrowTypeNoStrictOptional1] # flags: --no-strict-optional from typing import overload, Union, NoReturn @overload def narrow_int(x: str) -> NoReturn: ... @overload def narrow_int(x: int) -> int: ... def narrow_int(x: Union[int, str]) -> Union[int, NoReturn]: assert isinstance(x, int) return x def test_narrow_int() -> None: a: Union[int, str] if int(): a = narrow_int(a) reveal_type(a) # N: Revealed type is 'builtins.int' b: int if int(): b = narrow_int(b) reveal_type(b) # N: Revealed type is 'builtins.int' c: str if int(): c = narrow_int(c) reveal_type(c) # Note: branch is now dead, so no type is revealed # TODO: maybe we should make mypy report a warning instead? [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional1] # flags: --strict-optional from typing import overload, Union, NoReturn @overload def narrow_int(x: str) -> NoReturn: ... @overload def narrow_int(x: int) -> int: ... def narrow_int(x: Union[int, str]) -> Union[int, NoReturn]: assert isinstance(x, int) return x def test_narrow_int() -> None: a: Union[int, str] if int(): a = narrow_int(a) reveal_type(a) # N: Revealed type is 'builtins.int' b: int if int(): b = narrow_int(b) reveal_type(b) # N: Revealed type is 'builtins.int' c: str if int(): c = narrow_int(c) reveal_type(c) # Note: branch is now dead, so no type is revealed # TODO: maybe we should make mypy report a warning instead? [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsAndNoReturnNarrowTypeNoStrictOptional2] # flags: --no-strict-optional from typing import overload, Union, TypeVar, NoReturn, Optional T = TypeVar('T') @overload def narrow_none(x: None) -> NoReturn: ... @overload def narrow_none(x: T) -> T: ... def narrow_none(x: Optional[T]) -> Union[NoReturn, T]: assert x is not None return x def test_narrow_none() -> None: a: Optional[int] if int(): a = narrow_none(a) reveal_type(a) # N: Revealed type is 'builtins.int' b: int if int(): b = narrow_none(b) reveal_type(b) # N: Revealed type is 'builtins.int' c: None if int(): c = narrow_none(c) reveal_type(c) # Note: branch is now dead, so no type is revealed [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional2] # flags: --strict-optional from typing import overload, Union, TypeVar, NoReturn, Optional T = TypeVar('T') @overload def narrow_none(x: None) -> NoReturn: ... @overload def narrow_none(x: T) -> T: ... def narrow_none(x: Optional[T]) -> Union[NoReturn, T]: assert x is not None return x def test_narrow_none() -> None: a: Optional[int] if int(): a = narrow_none(a) reveal_type(a) # N: Revealed type is 'builtins.int' b: int if int(): b = narrow_none(b) reveal_type(b) # N: Revealed type is 'builtins.int' c: None if int(): c = narrow_none(c) reveal_type(c) # Branch is now dead [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsAndNoReturnNarrowTypeNoStrictOptional3] # flags: --no-strict-optional from typing import overload, TypeVar, NoReturn, Optional @overload def narrow_none_v2(x: None) -> NoReturn: ... @overload def narrow_none_v2(x: T) -> T: ... def narrow_none_v2(x: Optional[T]) -> T: assert x is not None return x def test_narrow_none_v2() -> None: a: Optional[int] if int(): a = narrow_none_v2(a) reveal_type(a) # N: Revealed type is 'builtins.int' b: int if int(): b = narrow_none_v2(b) reveal_type(b) # N: Revealed type is 'builtins.int' c: None if int(): c = narrow_none_v2(c) reveal_type(c) # Note: branch is now dead, so no type is revealed [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional3] # flags: --strict-optional from typing import overload, TypeVar, NoReturn, Optional @overload def narrow_none_v2(x: None) -> NoReturn: ... @overload def narrow_none_v2(x: T) -> T: ... def narrow_none_v2(x: Optional[T]) -> T: assert x is not None return x def test_narrow_none_v2() -> None: a: Optional[int] if int(): a = narrow_none_v2(a) reveal_type(a) # N: Revealed type is 'builtins.int' b: int if int(): b = narrow_none_v2(b) reveal_type(b) # N: Revealed type is 'builtins.int' c: None if int(): c = narrow_none_v2(c) reveal_type(c) # Note: branch is now dead, so no type is revealed [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsAndNoReturnNarrowWhenBlacklistingSubtype] from typing import TypeVar, NoReturn, Union, overload class Parent: ... class A(Parent): ... class B(Parent): ... T = TypeVar('T', bound=Parent) @overload def narrow_to_not_a(x: A) -> NoReturn: ... @overload def narrow_to_not_a(x: T) -> T: ... def narrow_to_not_a(x: T) -> Union[NoReturn, T]: assert not isinstance(x, A) return x def test() -> None: val: Union[A, B] if int(): val = narrow_to_not_a(val) reveal_type(val) # N: Revealed type is '__main__.B' val2: A if int(): val2 = narrow_to_not_a(val2) reveal_type(val2) # Branch now dead [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsAndNoReturnNarrowWhenBlacklistingSubtype2] from typing import TypeVar, NoReturn, Union, overload class Parent: ... class A(Parent): ... class B(Parent): ... T = TypeVar('T', bound=Parent) @overload def narrow_to_not_a_v2(x: A) -> NoReturn: ... @overload def narrow_to_not_a_v2(x: T) -> T: ... def narrow_to_not_a_v2(x: T) -> T: assert not isinstance(x, A) return x def test_v2(val: Union[A, B], val2: A) -> None: if int(): val = narrow_to_not_a_v2(val) reveal_type(val) # N: Revealed type is '__main__.B' if int(): val2 = narrow_to_not_a_v2(val2) reveal_type(val2) # Branch now dead [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadWithNonGenericDescriptor] from typing import overload, Any, Optional, Union class NumberAttribute: @overload def __get__(self, instance: None, owner: Any) -> 'NumberAttribute': ... @overload def __get__(self, instance: object, owner: Any) -> int: ... def __get__(self, instance: Optional[object], owner: Any) -> Union['NumberAttribute', int]: if instance is None: return self else: return 3 def foo(self) -> str: ... class MyModel: my_number = NumberAttribute() reveal_type(MyModel().my_number) # N: Revealed type is 'builtins.int' MyModel().my_number.foo() # E: "int" has no attribute "foo" reveal_type(MyModel.my_number) # N: Revealed type is '__main__.NumberAttribute' reveal_type(MyModel.my_number.foo()) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadWithNonGenericDescriptorLookalike] from typing import overload, Any, Optional, Union class FakeAttribute: @overload def dummy(self, instance: None, owner: Any) -> 'FakeAttribute': ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def dummy(self, instance: object, owner: Any) -> int: ... def dummy(self, instance: Optional[object], owner: Any) -> Union['FakeAttribute', int]: ... [case testOverloadWithGenericDescriptor] from typing import overload, Any, Optional, TypeVar, Type, Union, Generic T = TypeVar('T') class NumberAttribute(Generic[T]): @overload def __get__(self, instance: None, owner: Type[T]) -> 'NumberAttribute[T]': ... @overload def __get__(self, instance: T, owner: Type[T]) -> int: ... def __get__(self, instance: Optional[T], owner: Type[T]) -> Union['NumberAttribute[T]', int]: if instance is None: return self else: return 3 def foo(self) -> str: ... class MyModel: my_number = NumberAttribute[MyModel]() reveal_type(MyModel().my_number) # N: Revealed type is 'builtins.int' MyModel().my_number.foo() # E: "int" has no attribute "foo" reveal_type(MyModel.my_number) # N: Revealed type is '__main__.NumberAttribute[__main__.MyModel*]' reveal_type(MyModel.my_number.foo()) # N: Revealed type is 'builtins.str' reveal_type(NumberAttribute[MyModel]().__get__(None, MyModel)) # N: Revealed type is '__main__.NumberAttribute[__main__.MyModel*]' reveal_type(NumberAttribute[str]().__get__(None, str)) # N: Revealed type is '__main__.NumberAttribute[builtins.str*]' [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testOverloadWithGenericDescriptorLookalike] from typing import overload, Any, Optional, TypeVar, Type, Union, Generic T = TypeVar('T') class FakeAttribute(Generic[T]): @overload def dummy(self, instance: None, owner: Type[T]) -> 'FakeAttribute[T]': ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def dummy(self, instance: T, owner: Type[T]) -> int: ... def dummy(self, instance: Optional[T], owner: Type[T]) -> Union['FakeAttribute[T]', int]: ... [case testOverloadLambdaUnpackingInference] # flags: --py2 from typing import Callable, TypeVar, overload T = TypeVar('T') S = TypeVar('S') @overload def foo(func, item): # type: (Callable[[T], S], T) -> S pass @overload def foo(): # type: () -> None pass def foo(*args): pass def add_proxy(x, y): # type: (int, str) -> str pass # The lambda definition is a syntax error in Python 3 tup = (1, '2') reveal_type(foo(lambda (x, y): add_proxy(x, y), tup)) # N: Revealed type is 'builtins.str*' [builtins fixtures/primitives.pyi] [case testOverloadWithClassMethods] from typing import overload class Wrapper: @overload @classmethod def foo(cls, x: int) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass reveal_type(Wrapper.foo(3)) # N: Revealed type is 'builtins.int' reveal_type(Wrapper.foo("foo")) # N: Revealed type is 'builtins.str' [builtins fixtures/classmethod.pyi] [case testOverloadWithInconsistentClassMethods] from typing import overload class Wrapper1: @overload # E: Overload does not consistently use the "@classmethod" decorator on all function signatures. @classmethod def foo(cls, x: int) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... def foo(cls, x): pass class Wrapper2: @overload # E: Overload does not consistently use the "@classmethod" decorator on all function signatures. @classmethod def foo(cls, x: int) -> int: ... @overload def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass class Wrapper3: @overload # E: Overload does not consistently use the "@classmethod" decorator on all function signatures. def foo(cls, x: int) -> int: ... @overload def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass [builtins fixtures/classmethod.pyi] [case testOverloadWithSwappedDecorators] from typing import overload class Wrapper1: @classmethod @overload def foo(cls, x: int) -> int: ... @classmethod @overload def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass class Wrapper2: @classmethod @overload def foo(cls, x: int) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass class Wrapper3: @classmethod # E: Overload does not consistently use the "@classmethod" decorator on all function signatures. @overload def foo(cls, x: int) -> int: ... @overload def foo(cls, x: str) -> str: ... def foo(cls, x): pass reveal_type(Wrapper1.foo(3)) # N: Revealed type is 'builtins.int' reveal_type(Wrapper2.foo(3)) # N: Revealed type is 'builtins.int' [builtins fixtures/classmethod.pyi] [case testOverloadFaultyClassMethodInheritance] from typing import overload class A: pass class B(A): pass class C(B): pass class Parent: @overload @classmethod def foo(cls, x: B) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass class BadChild(Parent): @overload # E: Signature of "foo" incompatible with supertype "Parent" @classmethod def foo(cls, x: C) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass class GoodChild(Parent): @overload @classmethod def foo(cls, x: A) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass [builtins fixtures/classmethod.pyi] [case testOverloadClassMethodMixingInheritance] from typing import overload class BadParent: @overload @classmethod def foo(cls, x: int) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass class BadChild(BadParent): @overload # E: Signature of "foo" incompatible with supertype "BadParent" def foo(cls, x: int) -> int: ... @overload def foo(cls, x: str) -> str: ... def foo(cls, x): pass class GoodParent: @overload def foo(cls, x: int) -> int: ... @overload def foo(cls, x: str) -> str: ... def foo(cls, x): pass class GoodChild(GoodParent): @overload @classmethod def foo(cls, x: int) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod def foo(cls, x): pass [builtins fixtures/classmethod.pyi] [case testOverloadClassMethodImplementation] from typing import overload, Union class Wrapper: @classmethod def other(cls) -> str: return "..." @overload @classmethod def foo(cls, x: int) -> int: ... @overload @classmethod def foo(cls, x: str) -> str: ... @classmethod # E: Overloaded function implementation cannot produce return type of signature 1 def foo(cls, x: Union[int, str]) -> str: reveal_type(cls) # N: Revealed type is 'Type[__main__.Wrapper]' reveal_type(cls.other()) # N: Revealed type is 'builtins.str' return "..." [builtins fixtures/classmethod.pyi] [case testOverloadWithStaticMethods] from typing import overload class Wrapper: @overload @staticmethod def foo(x: int) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... @staticmethod def foo(x): pass reveal_type(Wrapper.foo(3)) # N: Revealed type is 'builtins.int' reveal_type(Wrapper.foo("foo")) # N: Revealed type is 'builtins.str' [builtins fixtures/staticmethod.pyi] [case testOverloadWithInconsistentStaticMethods] from typing import overload, Union class Wrapper1: @overload # E: Overload does not consistently use the "@staticmethod" decorator on all function signatures. @staticmethod def foo(x: int) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... def foo(x): pass class Wrapper2: @overload # E: Overload does not consistently use the "@staticmethod" decorator on all function signatures. @staticmethod def foo(x: int) -> int: ... @overload def foo(x: str) -> str: ... # E: Self argument missing for a non-static method (or an invalid type for self) @staticmethod def foo(x): pass class Wrapper3: @overload # E: Overload does not consistently use the "@staticmethod" decorator on all function signatures. @staticmethod def foo(x: int) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... def foo(x: Union[int, str]): pass # E: Self argument missing for a non-static method (or an invalid type for self) [builtins fixtures/staticmethod.pyi] [case testOverloadWithSwappedDecorators] from typing import overload class Wrapper1: @staticmethod @overload def foo(x: int) -> int: ... @staticmethod @overload def foo(x: str) -> str: ... @staticmethod def foo(x): pass class Wrapper2: @staticmethod @overload def foo(x: int) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... @staticmethod def foo(x): pass class Wrapper3: @staticmethod # E: Overload does not consistently use the "@staticmethod" decorator on all function signatures. @overload def foo(x: int) -> int: ... @overload def foo(x: str) -> str: ... # E: Self argument missing for a non-static method (or an invalid type for self) @staticmethod def foo(x): pass reveal_type(Wrapper1.foo(3)) # N: Revealed type is 'builtins.int' reveal_type(Wrapper2.foo(3)) # N: Revealed type is 'builtins.int' [builtins fixtures/staticmethod.pyi] [case testOverloadFaultyStaticMethodInheritance] from typing import overload class A: pass class B(A): pass class C(B): pass class Parent: @overload @staticmethod def foo(x: B) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... @staticmethod def foo(x): pass class BadChild(Parent): @overload # E: Signature of "foo" incompatible with supertype "Parent" @staticmethod def foo(x: C) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... @staticmethod def foo(x): pass class GoodChild(Parent): @overload @staticmethod def foo(x: A) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... @staticmethod def foo(x): pass [builtins fixtures/staticmethod.pyi] [case testOverloadStaticMethodMixingInheritance] from typing import overload class BadParent: @overload @staticmethod def foo(x: int) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... @staticmethod def foo(x): pass class BadChild(BadParent): @overload # E: Signature of "foo" incompatible with supertype "BadParent" def foo(self, x: int) -> int: ... @overload def foo(self, x: str) -> str: ... def foo(self, x): pass class GoodParent: @overload def foo(self, x: int) -> int: ... @overload def foo(self, x: str) -> str: ... def foo(self, x): pass class GoodChild(GoodParent): @overload @staticmethod def foo(x: int) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... @staticmethod def foo(x): pass [builtins fixtures/staticmethod.pyi] [case testOverloadStaticMethodImplementation] from typing import overload, Union class Wrapper: @staticmethod def other() -> str: return "..." @overload @staticmethod def foo(x: int) -> int: ... @overload @staticmethod def foo(x: str) -> str: ... @staticmethod # E: Overloaded function implementation cannot produce return type of signature 1 def foo(x: Union[int, str]) -> str: return 3 # E: Incompatible return value type (got "int", expected "str") [builtins fixtures/staticmethod.pyi] [case testUnionMathOverloadingReturnsBestType] from typing import Union, overload @overload def f(x: Union[int, str]) -> int: ... @overload def f(x: object) -> object: ... def f(x): pass x: Union[int, str] reveal_type(f(x)) # N: Revealed type is 'builtins.int' [out] [case testOverloadAndSelfTypes] from typing import overload, Union, TypeVar, Type T = TypeVar('T', bound='Parent') class Parent: @overload def foo(self: T, x: int) -> T: pass @overload def foo(self, x: str) -> str: pass def foo(self: T, x: Union[int, str]) -> Union[T, str]: reveal_type(self.bar()) # N: Revealed type is 'builtins.str' return self def bar(self) -> str: pass class Child(Parent): def child_only(self) -> int: pass x: Union[int, str] reveal_type(Parent().foo(3)) # N: Revealed type is '__main__.Parent*' reveal_type(Child().foo(3)) # N: Revealed type is '__main__.Child*' reveal_type(Child().foo("...")) # N: Revealed type is 'builtins.str' reveal_type(Child().foo(x)) # N: Revealed type is 'Union[__main__.Child*, builtins.str]' reveal_type(Child().foo(3).child_only()) # N: Revealed type is 'builtins.int' [case testOverloadAndClassTypes] from typing import overload, Union, TypeVar, Type T = TypeVar('T', bound='Parent') class Parent: @overload @classmethod def foo(cls: Type[T], x: int) -> Type[T]: pass @overload @classmethod def foo(cls, x: str) -> str: pass @classmethod def foo(cls: Type[T], x: Union[int, str]) -> Union[Type[T], str]: reveal_type(cls.bar()) # N: Revealed type is 'builtins.str' return cls @classmethod def bar(cls) -> str: pass class Child(Parent): def child_only(self) -> int: pass x: Union[int, str] reveal_type(Parent.foo(3)) # N: Revealed type is 'Type[__main__.Parent*]' reveal_type(Child.foo(3)) # N: Revealed type is 'Type[__main__.Child*]' reveal_type(Child.foo("...")) # N: Revealed type is 'builtins.str' reveal_type(Child.foo(x)) # N: Revealed type is 'Union[Type[__main__.Child*], builtins.str]' reveal_type(Child.foo(3)().child_only()) # N: Revealed type is 'builtins.int' [builtins fixtures/classmethod.pyi] [case testOptionalIsNotAUnionIfNoStrictOverload] # flags: --no-strict-optional from typing import Optional, overload class B: pass class C(B): pass @overload def rp(x: C) -> C: ... @overload def rp(x: B) -> B: ... def rp(x): pass x: Optional[C] reveal_type(rp(x)) # N: Revealed type is '__main__.C' [out] [case testOptionalIsNotAUnionIfNoStrictOverloadStr] # flags: -2 --no-strict-optional from typing import Optional from m import relpath a = '' # type: Optional[str] reveal_type(relpath(a)) # N: Revealed type is 'builtins.str' [file m.pyi] from typing import overload @overload def relpath(path: str) -> str: ... @overload def relpath(path: unicode) -> unicode: ... [out] [case testUnionMathTrickyOverload1] from typing import Union, overload @overload def f(x: int, y: int) -> int: ... @overload def f(x: object, y: str) -> str: ... def f(x): pass x: Union[int, str] y: Union[int, str] f(x, y) [out] main:12: error: Argument 1 to "f" has incompatible type "Union[int, str]"; expected "int" main:12: error: Argument 2 to "f" has incompatible type "Union[int, str]"; expected "int" [case testUnionMathTrickyOverload2] from typing import overload, Union, Any class C: def f(self, other: C) -> C: ... class D(C): @overload def f(self, other: D) -> D: ... @overload def f(self, other: C) -> C: ... def f(self, other): ... x: D y: Union[D, Any] reveal_type(x.f(y)) # N: Revealed type is 'Union[__main__.D, Any]' [out] [case testManyUnionsInOverload] from typing import overload, TypeVar, Union T = TypeVar('T') @overload def f(x: int, y: object, z: object, t: object, u: object, w: object, v: object, s: object) -> int: ... @overload def f(x: str, y: object, z: object, t: object, u: object, w: object, v: object, s: object) -> str: ... @overload def f(x: T, y: object, z: object, t: object, u: object, w: object, v: object, s: object) -> T: ... def f(*args, **kwargs): pass class A: pass class B: pass x: Union[int, str, A, B] y = f(x, x, x, x, x, x, x, x) # 8 args reveal_type(y) # N: Revealed type is 'Union[builtins.int, builtins.str, __main__.A, __main__.B]' [builtins fixtures/dict.pyi] [out] [case testOverloadsWithNoneComingSecondAreAlwaysFlaggedInNoStrictOptional] # flags: --no-strict-optional from typing import overload @overload def none_first(x: None) -> None: ... @overload def none_first(x: int) -> int: ... def none_first(x: int) -> int: return x @overload def none_second(x: int) -> int: ... @overload def none_second(x: None) -> None: ... # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader def none_second(x: int) -> int: return x [case testOverloadsWithNoneComingSecondIsOkInStrictOptional] # flags: --strict-optional from typing import overload, Optional @overload def none_first(x: None) -> None: ... @overload def none_first(x: int) -> int: ... def none_first(x: Optional[int]) -> Optional[int]: return x @overload def none_second(x: int) -> int: ... @overload def none_second(x: None) -> None: ... def none_second(x: Optional[int]) -> Optional[int]: return x @overload def none_loose_impl(x: None) -> None: ... @overload def none_loose_impl(x: int) -> int: ... def none_loose_impl(x: int) -> int: return x [out] main:22: error: Overloaded function implementation does not accept all possible arguments of signature 1 main:22: error: Overloaded function implementation cannot produce return type of signature 1 [case testTooManyUnionsException] from typing import overload, Union @overload def f(*args: int) -> int: ... @overload def f(*args: str) -> str: ... def f(*args): pass x: Union[int, str] f(x, x, x, x, x, x, x, x) [out] main:11: error: Not all union combinations were tried because there are too many unions main:11: error: Argument 1 to "f" has incompatible type "Union[int, str]"; expected "int" main:11: error: Argument 2 to "f" has incompatible type "Union[int, str]"; expected "int" main:11: error: Argument 3 to "f" has incompatible type "Union[int, str]"; expected "int" main:11: error: Argument 4 to "f" has incompatible type "Union[int, str]"; expected "int" main:11: error: Argument 5 to "f" has incompatible type "Union[int, str]"; expected "int" main:11: error: Argument 6 to "f" has incompatible type "Union[int, str]"; expected "int" main:11: error: Argument 7 to "f" has incompatible type "Union[int, str]"; expected "int" main:11: error: Argument 8 to "f" has incompatible type "Union[int, str]"; expected "int" [case testSafeDunderOverlapInSubclass] from typing import overload class A: def __add__(self, x : 'A') -> 'A': ... class B(A): @overload def __add__(self, x : 'B') -> 'B': ... @overload def __add__(self, x : 'A') -> 'A' : ... def __add__(self, x): pass [out] [case testUnsafeDunderOverlapInSubclass] from typing import overload class A: def __add__(self, x : 'A') -> 'A': if isinstance(x, A): return A() else: return NotImplemented # This is unsafe override because of the problem below class B(A): @overload # E: Signature of "__add__" incompatible with supertype "A" \ # N: Overloaded operator methods can't have wider argument types in overrides def __add__(self, x : 'Other') -> 'B' : ... @overload def __add__(self, x : 'A') -> 'A': ... def __add__(self, x): if isinstance(x, Other): return B() elif isinstance(x, A): return A() else: return NotImplemented class Other: def __radd__(self, x: 'A') -> 'Other': if isinstance(x, A): return Other() else: return NotImplemented actually_b: A = B() reveal_type(actually_b + Other()) # N: Revealed type is '__main__.Other' # Runtime type is B, this is why we report the error on overriding. [builtins fixtures/isinstance.pyi] [out] [case testOverloadErrorMessageManyMatches] from typing import overload class A: pass class B: pass class C: pass class D: pass @overload def f(x: A) -> None: ... @overload def f(x: B) -> None: ... @overload def f(x: C) -> None: ... @overload def f(x: D) -> None: ... @overload def f(x: int, y: int) -> None: ... def f(*args): pass f(3) # E: No overload variant of "f" matches argument type "int" \ # N: Possible overload variants: \ # N: def f(x: A) -> None \ # N: def f(x: B) -> None \ # N: <2 more similar overloads not shown, out of 5 total overloads> @overload def g(x: A) -> None: ... @overload def g(x: B) -> None: ... @overload def g(x: C) -> None: ... def g(*args): pass g(3) # E: No overload variant of "g" matches argument type "int" \ # N: Possible overload variants: \ # N: def g(x: A) -> None \ # N: def g(x: B) -> None \ # N: def g(x: C) -> None [case testOverloadedInIter] from lib import f, g for fun in [f, g]: reveal_type(fun) # N: Revealed type is 'Overload(def (x: builtins.int) -> builtins.str, def (x: builtins.str) -> builtins.int)' [file lib.pyi] from typing import overload @overload def f(x: int) -> str: ... @overload def f(x: str) -> int: ... @overload def g(x: int) -> str: ... @overload def g(x: str) -> int: ... [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [out] [case testNestedOverloadsNoCrash] from typing import overload def f() -> None: @overload def g(x: str) -> str: ... @overload def g(x: int) -> int: ... def g(x): pass g(str()) [out] [case testNestedOverloadsTypeVar] from typing import overload, TypeVar T = TypeVar('T') def f() -> None: @overload def g(x: str) -> str: ... @overload def g(x: T, y: int) -> T: ... def g(x): pass g(str(), str()) # E: No overload variant of "g" matches argument types "str", "str" \ # N: Possible overload variant: \ # N: def [T] g(x: T, y: int) -> T \ # N: <1 more non-matching overload not shown> reveal_type(g(str(), int())) # N: Revealed type is 'builtins.str*' [out] [case testNestedOverloadsTypeVarOverlap] from typing import overload, TypeVar T = TypeVar('T') def f() -> None: @overload def g(x: str) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(x: T) -> T: ... def g(x): pass [out] [case testNestedOverloadsMutuallyRecursive] from typing import overload, TypeVar, Dict, Any class C: ... T = TypeVar('T') def f() -> None: @overload def g() -> None: ... @overload def g(x: T) -> Dict[int, T]: ... def g(*args, **kwargs) -> Any: reveal_type(h(C())) # N: Revealed type is 'builtins.dict[builtins.str, __main__.C*]' @overload def h() -> None: ... @overload def h(x: T) -> Dict[str, T]: ... def h(*args, **kwargs) -> Any: reveal_type(g(C())) # N: Revealed type is 'builtins.dict[builtins.int, __main__.C*]' [builtins fixtures/dict.pyi] [out] [case testOverloadConstrainedTypevarNotShadowingAny] from lib import attr from typing import Any reveal_type(attr(1)) # N: Revealed type is 'builtins.int*' reveal_type(attr("hi")) # N: Revealed type is 'builtins.int' x: Any reveal_type(attr(x)) # N: Revealed type is 'Any' attr("hi", 1) # E: No overload variant of "attr" matches argument types "str", "int" \ # N: Possible overload variant: \ # N: def [T in (int, float)] attr(default: T = ..., blah: int = ...) -> T \ # N: <1 more non-matching overload not shown> [file lib.pyi] from typing import overload, Any, TypeVar T = TypeVar('T', int, float) @overload def attr(default: T = ..., blah: int = ...) -> T: ... @overload def attr(default: Any = ...) -> int: ... [out] [case testOverloadBoundedTypevarNotShadowingAny] from lib import attr from typing import Any reveal_type(attr(1)) # N: Revealed type is 'builtins.int*' reveal_type(attr("hi")) # N: Revealed type is 'builtins.int' x: Any reveal_type(attr(x)) # N: Revealed type is 'Any' attr("hi", 1) # E: No overload variant of "attr" matches argument types "str", "int" \ # N: Possible overload variant: \ # N: def [T <: int] attr(default: T = ..., blah: int = ...) -> T \ # N: <1 more non-matching overload not shown> [file lib.pyi] from typing import overload, TypeVar, Any T = TypeVar('T', bound=int) @overload def attr(default: T = ..., blah: int = ...) -> T: ... @overload def attr(default: Any = ...) -> int: ... [out] [case testAnyIsOKAsFallbackInOverloads] import stub [file stub.pyi] from typing import TypeVar, Any, overload T = TypeVar('T') @overload def foo(x: T) -> T: ... @overload def foo(x: Any) -> Any: ... @overload def bar(x: T) -> T: ... @overload def bar(x: Any) -> int: ... [out] [case testOverloadsIgnorePromotions] from typing import overload, List, Union, _promote class Parent: pass class Child(Parent): pass children: List[Child] parents: List[Parent] @overload def f(x: Child) -> List[Child]: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: Parent) -> List[Parent]: pass def f(x: Union[Child, Parent]) -> Union[List[Child], List[Parent]]: if isinstance(x, Child): reveal_type(x) # N: Revealed type is '__main__.Child' return children else: reveal_type(x) # N: Revealed type is '__main__.Parent' return parents ints: List[int] floats: List[float] @overload def g(x: int) -> List[int]: pass @overload def g(x: float) -> List[float]: pass def g(x: Union[int, float]) -> Union[List[int], List[float]]: if isinstance(x, int): reveal_type(x) # N: Revealed type is 'builtins.int' return ints else: reveal_type(x) # N: Revealed type is 'builtins.float' return floats [builtins fixtures/isinstancelist.pyi] [typing fixtures/typing-full.pyi] [case testOverloadsTypesAndUnions] from typing import overload, Type, Union class A: pass class B: pass @overload def f(x: Type[A]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: Union[Type[A], Type[B]]) -> str: ... def f(x: Union[Type[A], Type[B]]) -> Union[int, str]: return 1 [case testBadOverloadProbableMatch] from typing import overload, List, Type class Other: pass @overload def multiple_plausible(x: int) -> int: ... @overload def multiple_plausible(x: str) -> str: ... def multiple_plausible(x): pass @overload def single_plausible(x: Type[int]) -> int: ... @overload def single_plausible(x: List[str]) -> str: ... def single_plausible(x): pass a = multiple_plausible(Other()) # E: No overload variant of "multiple_plausible" matches argument type "Other" \ # N: Possible overload variants: \ # N: def multiple_plausible(x: int) -> int \ # N: def multiple_plausible(x: str) -> str reveal_type(a) # N: Revealed type is 'Any' b = single_plausible(Other) # E: Argument 1 to "single_plausible" has incompatible type "Type[Other]"; expected "Type[int]" reveal_type(b) # N: Revealed type is 'builtins.int' c = single_plausible([Other()]) # E: List item 0 has incompatible type "Other"; expected "str" reveal_type(c) # N: Revealed type is 'builtins.str' [builtins fixtures/list.pyi] [case testDisallowUntypedDecoratorsOverload] # flags: --disallow-untyped-decorators from typing import Any, Callable, overload, TypeVar F = TypeVar('F', bound=Callable[..., Any]) @overload def dec(x: F) -> F: ... @overload def dec(x: str) -> Callable[[F], F]: ... def dec(x) -> Any: pass @dec def f(name: str) -> int: return 0 @dec('abc') def g(name: str) -> int: return 0 reveal_type(f) # N: Revealed type is 'def (name: builtins.str) -> builtins.int' reveal_type(g) # N: Revealed type is 'def (name: builtins.str) -> builtins.int' [case testOverloadBadArgumentsInferredToAny1] from typing import Union, Any, overload def bar(x: int) -> Union[int, Any]: ... @overload def foo(x: str) -> None: ... @overload def foo(x: int) -> None: ... def foo(x) -> None: pass foo(bar('lol')) # E: Argument 1 to "bar" has incompatible type "str"; expected "int" [case testOverloadBadArgumentsInferredToAny2] from typing import Union, Iterable, Tuple, TypeVar, Generic, overload, Any class A: def foo(self) -> Iterable[int]: pass def bar(x: int) -> Union[A, int]: ... _T = TypeVar('_T') @overload def foo() -> None: ... @overload def foo(iterable: Iterable[_T]) -> None: ... def foo(iterable = None) -> None: pass foo(bar('lol').foo()) # E: Item "int" of "Union[A, int]" has no attribute "foo" \ # E: Argument 1 to "bar" has incompatible type "str"; expected "int" [case testOverloadInferringArgumentsUsingContext1] from typing import Optional, List, overload, TypeVar T = TypeVar('T') def g(x: Optional[T] = None) -> List[T]: ... @overload def f(x: int) -> int: ... @overload def f(x: List[int]) -> List[int]: ... def f(x): pass reveal_type(f(g())) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testOverloadInferringArgumentsUsingContext2-skip] # TODO: Overloads only use outer context to infer type variables in a given overload variant, # but never use outer context to _choose_ a better overload in ambiguous situations # like empty containers or multiple inheritance, instead just always choosing the first one. from typing import Optional, List, overload, TypeVar T = TypeVar('T') @overload def g(x: List[str]) -> List[str]: ... @overload def g(x: List[int]) -> List[int]: ... def g(x): pass @overload def f(x: int) -> int: ... @overload def f(x: List[int]) -> List[int]: ... def f(x): pass reveal_type(f(g([]))) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testOverloadDeferredNode] from typing import Callable, TypeVar, Generic, Any, overload _S = TypeVar('_S') _T = TypeVar('_T') _R = TypeVar('_R') @overload def partial(__func: Callable[[_T], _S], __arg: _T) -> Callable[[], _S]: ... @overload def partial(__func: Callable[[_T, _S], _S], __arg: _T) -> Callable[[_S], _R]: ... def partial(*args: Any) -> Any: pass def f(f: Callable[[int], int]) -> None: pass def dec(f: Callable[[_S, _T], _R]) -> Callable[[_S, _T], _R]: pass def asdf() -> None: f(partial(lol, 0)) @dec def lol(x: int, y: int) -> int: pass [case testVeryBrokenOverload] import lib reveal_type(lib.func) [file lib.pyi] @overload def func(x: int) -> int: ... def func(x): return x [out] tmp/lib.pyi:1: error: Name 'overload' is not defined tmp/lib.pyi:4: error: Name 'func' already defined on line 1 main:2: note: Revealed type is 'Any' -- Order of errors is different [case testVeryBrokenOverload2] import lib reveal_type(lib.func) [file lib.pyi] @overload def func(x: int) -> int: ... @overload def func(x: str) -> str: ... [out] tmp/lib.pyi:1: error: Name 'overload' is not defined tmp/lib.pyi:3: error: Name 'func' already defined on line 1 tmp/lib.pyi:3: error: Name 'overload' is not defined main:3: note: Revealed type is 'Any' [case testLiteralSubtypeOverlap] from typing import overload from typing_extensions import Literal class MyInt(int): ... # Strictly speaking we can't prove this is unsafe (this depends on the implementation), # but such APIs seem like an anti-pattern anyways. @overload def foo(x: Literal[0]) -> None: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo(x: MyInt) -> int: ... def foo(x): ... mypy-0.761/test-data/unit/check-protocols.test0000644€tŠÔÚ€2›s®0000017342013576752246025574 0ustar jukkaDROPBOX\Domain Users00000000000000-- Simple protocol types -- --------------------- [case testCannotInstantiateProtocol] from typing import Protocol class P(Protocol): def meth(self) -> None: pass P() # E: Cannot instantiate protocol class "P" [case testSimpleProtocolOneMethod] from typing import Protocol class P(Protocol): def meth(self) -> None: pass class B: pass class C: def meth(self) -> None: pass x: P def fun(x: P) -> None: x.meth() x.meth(x) # E: Too many arguments for "meth" of "P" x.bad # E: "P" has no attribute "bad" x = C() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P") fun(C()) fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "P" def fun2() -> P: return C() def fun3() -> P: return B() # E: Incompatible return value type (got "B", expected "P") [case testSimpleProtocolOneAbstractMethod] from typing import Protocol from abc import abstractmethod class P(Protocol): @abstractmethod def meth(self) -> None: pass class B: pass class C: def meth(self) -> None: pass class D(B): def meth(self) -> None: pass x: P def fun(x: P) -> None: x.meth() x.meth(x) # E: Too many arguments for "meth" of "P" x.bad # E: "P" has no attribute "bad" x = C() x = D() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P") fun(C()) fun(D()) fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "P" fun(x) [case testProtocolMethodBodies] from typing import Protocol, List class P(Protocol): def meth(self) -> int: return 'no way' # E: Incompatible return value type (got "str", expected "int") # explicit ellipsis is OK in protocol methods class P2(Protocol): def meth2(self) -> List[int]: ... [builtins fixtures/list.pyi] [case testSimpleProtocolOneMethodOverride] from typing import Protocol, Union class P(Protocol): def meth(self) -> Union[int, str]: pass class SubP(P, Protocol): def meth(self) -> int: pass class B: pass class C: def meth(self) -> int: pass z: P x: SubP def fun(x: SubP) -> str: x.bad # E: "SubP" has no attribute "bad" return x.meth() # E: Incompatible return value type (got "int", expected "str") z = x x = C() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "SubP") reveal_type(fun(C())) # N: Revealed type is 'builtins.str' fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "SubP" [case testSimpleProtocolTwoMethodsMerge] from typing import Protocol class P1(Protocol): def meth1(self) -> int: pass class P2(Protocol): def meth2(self) -> str: pass class P(P1, P2, Protocol): pass class B: pass class C1: def meth1(self) -> int: pass class C2(C1): def meth2(self) -> str: pass class C: def meth1(self) -> int: pass def meth2(self) -> str: pass class AnotherP(Protocol): def meth1(self) -> int: pass def meth2(self) -> str: pass x: P reveal_type(x.meth1()) # N: Revealed type is 'builtins.int' reveal_type(x.meth2()) # N: Revealed type is 'builtins.str' c: C c1: C1 c2: C2 y: AnotherP if int(): x = c if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P") if int(): x = c1 # E: Incompatible types in assignment (expression has type "C1", variable has type "P") \ # N: 'C1' is missing following 'P' protocol member: \ # N: meth2 if int(): x = c2 if int(): x = y if int(): y = x [case testSimpleProtocolTwoMethodsExtend] from typing import Protocol class P1(Protocol): def meth1(self) -> int: pass class P2(P1, Protocol): def meth2(self) -> str: pass class Cbad: def meth1(self) -> int: pass class C: def meth1(self) -> int: pass def meth2(self) -> str: pass x: P2 reveal_type(x.meth1()) # N: Revealed type is 'builtins.int' reveal_type(x.meth2()) # N: Revealed type is 'builtins.str' if int(): x = C() # OK if int(): x = Cbad() # E: Incompatible types in assignment (expression has type "Cbad", variable has type "P2") \ # N: 'Cbad' is missing following 'P2' protocol member: \ # N: meth2 [case testProtocolMethodVsAttributeErrors] from typing import Protocol class P(Protocol): def meth(self) -> int: pass class C: meth: int x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \ # N: Following member(s) of "C" have conflicts: \ # N: meth: expected "Callable[[], int]", got "int" [case testProtocolMethodVsAttributeErrors2] from typing import Protocol class P(Protocol): @property def meth(self) -> int: pass class C: def meth(self) -> int: pass x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \ # N: Following member(s) of "C" have conflicts: \ # N: meth: expected "int", got "Callable[[], int]" [builtins fixtures/property.pyi] [case testCannotAssignNormalToProtocol] from typing import Protocol class P(Protocol): def meth(self) -> int: pass class C: def meth(self) -> int: pass x: C y: P x = y # E: Incompatible types in assignment (expression has type "P", variable has type "C") [case testIndependentProtocolSubtyping] from typing import Protocol class P1(Protocol): def meth(self) -> int: pass class P2(Protocol): def meth(self) -> int: pass x1: P1 x2: P2 x1 = x2 x2 = x1 def f1(x: P1) -> None: pass def f2(x: P2) -> None: pass f1(x2) f2(x1) [case testNoneDisablesProtocolImplementation] from typing import Protocol class MyHashable(Protocol): def __my_hash__(self) -> int: return 0 class C: __my_hash__ = None var: MyHashable = C() # E: Incompatible types in assignment (expression has type "C", variable has type "MyHashable") [case testNoneDisablesProtocolSubclassingWithStrictOptional] # flags: --strict-optional from typing import Protocol class MyHashable(Protocol): def __my_hash__(self) -> int: return 0 class C(MyHashable): __my_hash__ = None # E: Incompatible types in assignment \ (expression has type "None", base class "MyHashable" defined the type as "Callable[[MyHashable], int]") [case testProtocolsWithNoneAndStrictOptional] # flags: --strict-optional from typing import Protocol class P(Protocol): x = 0 # type: int class C: x = None x: P = C() # Error! def f(x: P) -> None: pass f(C()) # Error! [out] main:9: error: Incompatible types in assignment (expression has type "C", variable has type "P") main:9: note: Following member(s) of "C" have conflicts: main:9: note: x: expected "int", got "None" main:11: error: Argument 1 to "f" has incompatible type "C"; expected "P" main:11: note: Following member(s) of "C" have conflicts: main:11: note: x: expected "int", got "None" -- Semanal errors in protocol types -- -------------------------------- [case testBasicSemanalErrorsInProtocols] from typing import Protocol, Generic, TypeVar, Iterable T = TypeVar('T', covariant=True) S = TypeVar('S', covariant=True) class P1(Protocol[T, T]): # E: Duplicate type variables in Generic[...] or Protocol[...] def meth(self) -> T: pass class P2(Protocol[T], Protocol[S]): # E: Only single Generic[...] or Protocol[...] can be in bases def meth(self) -> T: pass class P3(Protocol[T], Generic[S]): # E: Only single Generic[...] or Protocol[...] can be in bases def meth(self) -> T: pass class P4(Protocol[T]): attr: Iterable[S] # E: Type variable "__main__.S" is unbound \ # N: (Hint: Use "Generic[S]" or "Protocol[S]" base class to bind "S" inside a class) \ # N: (Hint: Use "S" in function signature to bind "S" inside a function) class P5(Iterable[S], Protocol[T]): # E: If Generic[...] or Protocol[...] is present it should list all type variables def meth(self) -> T: pass [case testProhibitSelfDefinitionInProtocols] from typing import Protocol class P(Protocol): def __init__(self, a: int) -> None: self.a = a # E: Protocol members cannot be defined via assignment to self \ # E: "P" has no attribute "a" class B: pass class C: def __init__(self, a: int) -> None: pass x: P x = B() # The above has an incompatible __init__, but mypy ignores this for nominal subtypes? x = C(1) class P2(Protocol): a: int def __init__(self) -> None: self.a = 1 class B2(P2): a: int x2: P2 = B2() # OK [case testProtocolAndRuntimeAreDefinedAlsoInTypingExtensions] from typing_extensions import Protocol, runtime_checkable @runtime_checkable class P(Protocol): def meth(self) -> int: pass x: object if isinstance(x, P): reveal_type(x) # N: Revealed type is '__main__.P' reveal_type(x.meth()) # N: Revealed type is 'builtins.int' class C: def meth(self) -> int: pass z: P = C() [builtins fixtures/dict.pyi] [case testProtocolsCannotInheritFromNormal] from typing import Protocol class C: pass class D: pass class P(C, Protocol): # E: All bases of a protocol must be protocols attr: int class P2(P, D, Protocol): # E: All bases of a protocol must be protocols pass P2() # E: Cannot instantiate abstract class 'P2' with abstract attribute 'attr' p: P2 reveal_type(p.attr) # N: Revealed type is 'builtins.int' -- Generic protocol types -- ---------------------- [case testGenericMethodWithProtocol] from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol): def meth(self, x: int) -> int: return x class C: def meth(self, x: T) -> T: return x x: P = C() [case testGenericMethodWithProtocol2] from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol): def meth(self, x: T) -> T: return x class C: def meth(self, x: int) -> int: return x x: P = C() [out] main:11: error: Incompatible types in assignment (expression has type "C", variable has type "P") main:11: note: Following member(s) of "C" have conflicts: main:11: note: Expected: main:11: note: def [T] meth(self, x: T) -> T main:11: note: Got: main:11: note: def meth(self, x: int) -> int [case testAutomaticProtocolVariance] from typing import TypeVar, Protocol T = TypeVar('T') # In case of these errors we proceed with declared variance. class Pco(Protocol[T]): # E: Invariant type variable 'T' used in protocol where covariant one is expected def meth(self) -> T: pass class Pcontra(Protocol[T]): # E: Invariant type variable 'T' used in protocol where contravariant one is expected def meth(self, x: T) -> None: pass class Pinv(Protocol[T]): attr: T class A: pass class B(A): pass x1: Pco[B] y1: Pco[A] if int(): x1 = y1 # E: Incompatible types in assignment (expression has type "Pco[A]", variable has type "Pco[B]") if int(): y1 = x1 # E: Incompatible types in assignment (expression has type "Pco[B]", variable has type "Pco[A]") x2: Pcontra[B] y2: Pcontra[A] if int(): y2 = x2 # E: Incompatible types in assignment (expression has type "Pcontra[B]", variable has type "Pcontra[A]") if int(): x2 = y2 # E: Incompatible types in assignment (expression has type "Pcontra[A]", variable has type "Pcontra[B]") x3: Pinv[B] y3: Pinv[A] if int(): y3 = x3 # E: Incompatible types in assignment (expression has type "Pinv[B]", variable has type "Pinv[A]") if int(): x3 = y3 # E: Incompatible types in assignment (expression has type "Pinv[A]", variable has type "Pinv[B]") [case testProtocolVarianceWithCallableAndList] from typing import Protocol, TypeVar, Callable, List T = TypeVar('T') S = TypeVar('S') T_co = TypeVar('T_co', covariant=True) class P(Protocol[T, S]): # E: Invariant type variable 'T' used in protocol where covariant one is expected \ # E: Invariant type variable 'S' used in protocol where contravariant one is expected def fun(self, callback: Callable[[T], S]) -> None: pass class P2(Protocol[T_co]): # E: Covariant type variable 'T_co' used in protocol where invariant one is expected lst: List[T_co] [builtins fixtures/list.pyi] [case testProtocolVarianceWithUnusedVariable] from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol[T]): # E: Invariant type variable 'T' used in protocol where covariant one is expected attr: int [case testGenericProtocolsInference1] from typing import Protocol, Sequence, TypeVar T = TypeVar('T', covariant=True) class Closeable(Protocol[T]): def close(self) -> T: pass class F: def close(self) -> int: return 0 def close(arg: Closeable[T]) -> T: return arg.close() def close_all(args: Sequence[Closeable[T]]) -> T: for arg in args: arg.close() return args[0].close() arg: Closeable[int] reveal_type(close(F())) # N: Revealed type is 'builtins.int*' reveal_type(close(arg)) # N: Revealed type is 'builtins.int*' reveal_type(close_all([F()])) # N: Revealed type is 'builtins.int*' reveal_type(close_all([arg])) # N: Revealed type is 'builtins.int*' [builtins fixtures/isinstancelist.pyi] [typing fixtures/typing-full.pyi] [case testProtocolGenericInference2] from typing import Generic, TypeVar, Protocol T = TypeVar('T') S = TypeVar('S') class P(Protocol[T, S]): x: T y: S class C: x: int y: int def fun3(x: P[T, T]) -> T: pass reveal_type(fun3(C())) # N: Revealed type is 'builtins.int*' [case testProtocolGenericInferenceCovariant] from typing import Generic, TypeVar, Protocol T = TypeVar('T', covariant=True) S = TypeVar('S', covariant=True) U = TypeVar('U') class P(Protocol[T, S]): def x(self) -> T: pass def y(self) -> S: pass class C: def x(self) -> int: pass def y(self) -> int: pass def fun4(x: U, y: P[U, U]) -> U: pass reveal_type(fun4('a', C())) # N: Revealed type is 'builtins.object*' [case testUnrealtedGenericProtolsEquivalent] from typing import TypeVar, Protocol T = TypeVar('T') class PA(Protocol[T]): attr: int def meth(self) -> T: pass def other(self, arg: T) -> None: pass class PB(Protocol[T]): # exactly the same as above attr: int def meth(self) -> T: pass def other(self, arg: T) -> None: pass def fun(x: PA[T]) -> PA[T]: y: PB[T] = x z: PB[T] return z x: PA y: PB x = y y = x xi: PA[int] yi: PB[int] xi = yi yi = xi [case testGenericSubProtocols] from typing import TypeVar, Protocol, Tuple, Generic T = TypeVar('T') S = TypeVar('S') class P1(Protocol[T]): attr1: T class P2(P1[T], Protocol[T, S]): attr2: Tuple[T, S] class C: def __init__(self, a1: int, a2: Tuple[int, int]) -> None: self.attr1 = a1 self.attr2 = a2 c: C var: P2[int, int] = c var2: P2[int, str] = c # E: Incompatible types in assignment (expression has type "C", variable has type "P2[int, str]") \ # N: Following member(s) of "C" have conflicts: \ # N: attr2: expected "Tuple[int, str]", got "Tuple[int, int]" class D(Generic[T]): attr1: T class E(D[T]): attr2: Tuple[T, T] def f(x: T) -> T: z: P2[T, T] = E[T]() y: P2[T, T] = D[T]() # E: Incompatible types in assignment (expression has type "D[T]", variable has type "P2[T, T]") \ # N: 'D' is missing following 'P2' protocol member: \ # N: attr2 return x [builtins fixtures/isinstancelist.pyi] [case testGenericSubProtocolsExtensionInvariant] from typing import TypeVar, Protocol, Union T = TypeVar('T') S = TypeVar('S') class P1(Protocol[T]): attr1: T class P2(Protocol[T]): attr2: T class P(P1[T], P2[S], Protocol): pass class C: attr1: int attr2: str class A: attr1: A class B: attr2: B class D(A, B): pass x: P = D() # Same as P[Any, Any] var: P[Union[int, P], Union[P, str]] = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P[Union[int, P[Any, Any]], Union[P[Any, Any], str]]") \ # N: Following member(s) of "C" have conflicts: \ # N: attr1: expected "Union[int, P[Any, Any]]", got "int" \ # N: attr2: expected "Union[P[Any, Any], str]", got "str" [case testGenericSubProtocolsExtensionCovariant] from typing import TypeVar, Protocol, Union T = TypeVar('T', covariant=True) S = TypeVar('S', covariant=True) class P1(Protocol[T]): def attr1(self) -> T: pass class P2(Protocol[T]): def attr2(self) -> T: pass class P(P1[T], P2[S], Protocol): pass class C: def attr1(self) -> int: pass def attr2(self) -> str: pass var: P[Union[int, P], Union[P, str]] = C() # OK for covariant var2: P[Union[str, P], Union[P, int]] = C() [out] main:18: error: Incompatible types in assignment (expression has type "C", variable has type "P[Union[str, P[Any, Any]], Union[P[Any, Any], int]]") main:18: note: Following member(s) of "C" have conflicts: main:18: note: Expected: main:18: note: def attr1(self) -> Union[str, P[Any, Any]] main:18: note: Got: main:18: note: def attr1(self) -> int main:18: note: Expected: main:18: note: def attr2(self) -> Union[P[Any, Any], int] main:18: note: Got: main:18: note: def attr2(self) -> str [case testSelfTypesWithProtocolsBehaveAsWithNominal] from typing import Protocol, TypeVar T = TypeVar('T', bound=Shape) class Shape(Protocol): def combine(self: T, other: T) -> T: pass class NonProtoShape: def combine(self: T, other: T) -> T: pass class Circle: def combine(self: T, other: Shape) -> T: pass class Triangle: def combine(self, other: Shape) -> Shape: pass class Bad: def combine(self, other: int) -> str: pass def f(s: Shape) -> None: pass f(NonProtoShape()) f(Circle()) s: Shape if int(): s = Triangle() s = Bad() n2: NonProtoShape = s [out] main:26: error: Incompatible types in assignment (expression has type "Triangle", variable has type "Shape") main:26: note: Following member(s) of "Triangle" have conflicts: main:26: note: Expected: main:26: note: def combine(self, other: Triangle) -> Triangle main:26: note: Got: main:26: note: def combine(self, other: Shape) -> Shape main:27: error: Incompatible types in assignment (expression has type "Bad", variable has type "Shape") main:27: note: Following member(s) of "Bad" have conflicts: main:27: note: Expected: main:27: note: def combine(self, other: Bad) -> Bad main:27: note: Got: main:27: note: def combine(self, other: int) -> str main:29: error: Incompatible types in assignment (expression has type "Shape", variable has type "NonProtoShape") [case testBadVarianceInProtocols] from typing import Protocol, TypeVar T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) class Proto(Protocol[T_co, T_contra]): # type: ignore def one(self, x: T_co) -> None: # E: Cannot use a covariant type variable as a parameter pass def other(self) -> T_contra: # E: Cannot use a contravariant type variable as return type pass # Check that we respect user overrides of variance after the errors are reported x: Proto[int, float] y: Proto[float, int] y = x # OK [builtins fixtures/list.pyi] [case testSubtleBadVarianceInProtocols] from typing import Protocol, TypeVar, Iterable, Sequence T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) class Proto(Protocol[T_co, T_contra]): # E: Covariant type variable 'T_co' used in protocol where contravariant one is expected \ # E: Contravariant type variable 'T_contra' used in protocol where covariant one is expected def one(self, x: Iterable[T_co]) -> None: pass def other(self) -> Sequence[T_contra]: pass # Check that we respect user overrides of variance after the errors are reported x: Proto[int, float] y: Proto[float, int] y = x # OK [builtins fixtures/list.pyi] -- Recursive protocol types -- ------------------------ [case testRecursiveProtocols1] from typing import Protocol, Sequence, List, Generic, TypeVar T = TypeVar('T') class Traversable(Protocol): @property def leaves(self) -> Sequence[Traversable]: pass class C: pass class D(Generic[T]): leaves: List[D[T]] t: Traversable t = D[int]() # OK if int(): t = C() # E: Incompatible types in assignment (expression has type "C", variable has type "Traversable") [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [case testRecursiveProtocols2] from typing import Protocol, TypeVar T = TypeVar('T') class Linked(Protocol[T]): val: T def next(self) -> Linked[T]: pass class L: val: int def next(self) -> L: pass def last(seq: Linked[T]) -> T: pass reveal_type(last(L())) # N: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [case testRecursiveProtocolSubtleMismatch] from typing import Protocol, TypeVar T = TypeVar('T') class Linked(Protocol[T]): val: T def next(self) -> Linked[T]: pass class L: val: int def next(self) -> int: pass def last(seq: Linked[T]) -> T: pass last(L()) # E: Argument 1 to "last" has incompatible type "L"; expected "Linked[]" [case testMutuallyRecursiveProtocols] from typing import Protocol, Sequence, List class P1(Protocol): @property def attr1(self) -> Sequence[P2]: pass class P2(Protocol): @property def attr2(self) -> Sequence[P1]: pass class C: pass class A: attr1: List[B] class B: attr2: List[A] t: P1 t = A() # OK if int(): t = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P1") t = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P1") [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [case testMutuallyRecursiveProtocolsTypesWithSubteMismatch] from typing import Protocol, Sequence, List class P1(Protocol): @property def attr1(self) -> Sequence[P2]: pass class P2(Protocol): @property def attr2(self) -> Sequence[P1]: pass class C: pass class A: attr1: List[B] class B: attr2: List[C] t: P1 t = A() # E: Incompatible types in assignment (expression has type "A", variable has type "P1") \ # N: Following member(s) of "A" have conflicts: \ # N: attr1: expected "Sequence[P2]", got "List[B]" [builtins fixtures/list.pyi] [case testMutuallyRecursiveProtocolsTypesWithSubteMismatchWriteable] from typing import Protocol class P1(Protocol): @property def attr1(self) -> P2: pass class P2(Protocol): attr2: P1 class A: attr1: B class B: attr2: A x: P1 = A() # E: Incompatible types in assignment (expression has type "A", variable has type "P1") \ # N: Following member(s) of "A" have conflicts: \ # N: attr1: expected "P2", got "B" [builtins fixtures/property.pyi] -- FIXME: things like this should work [case testWeirdRecursiveInferenceForProtocols-skip] from typing import Protocol, TypeVar, Generic T_co = TypeVar('T_co', covariant=True) T = TypeVar('T') class P(Protocol[T_co]): def meth(self) -> P[T_co]: pass class C(Generic[T]): def meth(self) -> C[T]: pass x: C[int] def f(arg: P[T]) -> T: pass reveal_type(f(x)) #E: Revealed type is 'builtins.int*' -- @property, @classmethod and @staticmethod in protocol types -- ----------------------------------------------------------- [case testCannotInstantiateAbstractMethodExplicitProtocolSubtypes] from typing import Protocol from abc import abstractmethod class P(Protocol): @abstractmethod def meth(self) -> int: pass class A(P): pass A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'meth' class C(A): def meth(self) -> int: pass class C2(P): def meth(self) -> int: pass C() C2() [case testCannotInstantiateAbstractVariableExplicitProtocolSubtypes] from typing import Protocol class P(Protocol): attr: int class A(P): pass A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'attr' class C(A): attr: int class C2(P): def __init__(self) -> None: self.attr = 1 C() C2() class P2(Protocol): attr: int = 1 class B(P2): pass B() # OK, attr is not abstract [case testClassVarsInProtocols] from typing import Protocol, ClassVar class PInst(Protocol): v: int class PClass(Protocol): v: ClassVar[int] class CInst: v: int class CClass: v: ClassVar[int] x: PInst y: PClass x = CInst() if int(): x = CClass() # E: Incompatible types in assignment (expression has type "CClass", variable has type "PInst") \ # N: Protocol member PInst.v expected instance variable, got class variable y = CClass() if int(): y = CInst() # E: Incompatible types in assignment (expression has type "CInst", variable has type "PClass") \ # N: Protocol member PClass.v expected class variable, got instance variable [case testPropertyInProtocols] from typing import Protocol class PP(Protocol): @property def attr(self) -> int: pass class P(Protocol): attr: int x: P y: PP y = x x2: P y2: PP x2 = y2 # E: Incompatible types in assignment (expression has type "PP", variable has type "P") \ # N: Protocol member P.attr expected settable variable, got read-only attribute [builtins fixtures/property.pyi] [case testSettablePropertyInProtocols] from typing import Protocol class PPS(Protocol): @property def attr(self) -> int: pass @attr.setter def attr(self, x: int) -> None: pass class PP(Protocol): @property def attr(self) -> int: pass class P(Protocol): attr: int x: P z: PPS z = x x2: P z2: PPS x2 = z2 y3: PP z3: PPS y3 = z3 y4: PP z4: PPS z4 = y4 # E: Incompatible types in assignment (expression has type "PP", variable has type "PPS") \ # N: Protocol member PPS.attr expected settable variable, got read-only attribute [builtins fixtures/property.pyi] [case testStaticAndClassMethodsInProtocols] from typing import Protocol, Type, TypeVar class P(Protocol): def meth(self, x: int) -> str: pass class PC(Protocol): @classmethod def meth(cls, x: int) -> str: pass class B: @staticmethod def meth(x: int) -> str: pass class C: def meth(self, x: int) -> str: pass x: P x = C() if int(): x = B() y: PC y = B() if int(): y = C() \ # E: Incompatible types in assignment (expression has type "C", variable has type "PC") \ # N: Protocol member PC.meth expected class or static method [builtins fixtures/classmethod.pyi] [case testOverloadedMethodsInProtocols] from typing import overload, Protocol, Union class P(Protocol): @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass class C: def f(self, x: Union[int, str]) -> None: pass class D: def f(self, x: int) -> None: pass x: P = C() if int(): x = D() [out] main:18: error: Incompatible types in assignment (expression has type "D", variable has type "P") main:18: note: Following member(s) of "D" have conflicts: main:18: note: Expected: main:18: note: @overload main:18: note: def f(self, x: int) -> int main:18: note: @overload main:18: note: def f(self, x: str) -> str main:18: note: Got: main:18: note: def f(self, x: int) -> None [case testCannotInstantiateProtocolWithOverloadedUnimplementedMethod] from typing import overload, Protocol class P(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bytes: pass class C(P): pass C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'meth' [case testCanUseOverloadedImplementationsInProtocols] from typing import overload, Protocol, Union class P(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bool: pass def meth(self, x: Union[int, str]): if isinstance(x, int): return x return True class C(P): pass x = C() reveal_type(x.meth('hi')) # N: Revealed type is 'builtins.bool' [builtins fixtures/isinstance.pyi] [case testProtocolsWithIdenticalOverloads] from typing import overload, Protocol class PA(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bytes: pass class PB(Protocol): # identical to above @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bytes: pass x: PA y: PB x = y def fun(arg: PB) -> None: pass fun(x) [case testProtocolsWithIncompatibleOverloads] from typing import overload, Protocol class PA(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bytes: pass class PB(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: bytes) -> str: pass x: PA y: PB x = y [out] main:16: error: Incompatible types in assignment (expression has type "PB", variable has type "PA") main:16: note: Following member(s) of "PB" have conflicts: main:16: note: Expected: main:16: note: @overload main:16: note: def meth(self, x: int) -> int main:16: note: @overload main:16: note: def meth(self, x: str) -> bytes main:16: note: Got: main:16: note: @overload main:16: note: def meth(self, x: int) -> int main:16: note: @overload main:16: note: def meth(self, x: bytes) -> str -- Join and meet with protocol types -- --------------------------------- [case testJoinProtocolWithProtocol] from typing import Protocol class P(Protocol): attr: int class P2(Protocol): attr: int attr2: str x: P y: P2 l0 = [x, x] l1 = [y, y] l = [x, y] reveal_type(l0) # N: Revealed type is 'builtins.list[__main__.P*]' reveal_type(l1) # N: Revealed type is 'builtins.list[__main__.P2*]' reveal_type(l) # N: Revealed type is 'builtins.list[__main__.P*]' [builtins fixtures/list.pyi] [case testJoinOfIncompatibleProtocols] from typing import Protocol class P(Protocol): attr: int class P2(Protocol): attr2: str x: P y: P2 reveal_type([x, y]) # N: Revealed type is 'builtins.list[builtins.object*]' [builtins fixtures/list.pyi] [case testJoinProtocolWithNormal] from typing import Protocol class P(Protocol): attr: int class C: attr: int x: P y: C l = [x, y] reveal_type(l) # N: Revealed type is 'builtins.list[__main__.P*]' [builtins fixtures/list.pyi] [case testMeetProtocolWithProtocol] from typing import Protocol, Callable, TypeVar class P(Protocol): attr: int class P2(Protocol): attr: int attr2: str T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: P, y: P2) -> None: pass reveal_type(f(g)) # N: Revealed type is '__main__.P2*' [case testMeetOfIncompatibleProtocols] from typing import Protocol, Callable, TypeVar class P(Protocol): attr: int class P2(Protocol): attr2: str T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: P, y: P2) -> None: pass x = f(g) reveal_type(x) # N: Revealed type is 'None' [case testMeetProtocolWithNormal] from typing import Protocol, Callable, TypeVar class P(Protocol): attr: int class C: attr: int T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: P, y: C) -> None: pass reveal_type(f(g)) # N: Revealed type is '__main__.C*' [case testInferProtocolFromProtocol] from typing import Protocol, Sequence, TypeVar, Generic T = TypeVar('T') class Box(Protocol[T]): content: T class Linked(Protocol[T]): val: T def next(self) -> Linked[T]: pass class L(Generic[T]): val: Box[T] def next(self) -> L[T]: pass def last(seq: Linked[T]) -> T: pass reveal_type(last(L[int]())) # N: Revealed type is '__main__.Box*[builtins.int*]' reveal_type(last(L[str]()).content) # N: Revealed type is 'builtins.str*' [case testOverloadOnProtocol] from typing import overload, Protocol, runtime_checkable @runtime_checkable class P1(Protocol): attr1: int class P2(Protocol): attr2: str class C1: attr1: int class C2: attr2: str class C: pass @overload def f(x: P1) -> int: ... @overload def f(x: P2) -> str: ... def f(x): if isinstance(x, P1): return P1.attr1 if isinstance(x, P2): # E: Only @runtime_checkable protocols can be used with instance and class checks return P1.attr2 reveal_type(f(C1())) # N: Revealed type is 'builtins.int' reveal_type(f(C2())) # N: Revealed type is 'builtins.str' class D(C1, C2): pass # Compatible with both P1 and P2 # TODO: Should this return a union instead? reveal_type(f(D())) # N: Revealed type is 'builtins.int' f(C()) # E: No overload variant of "f" matches argument type "C" \ # N: Possible overload variants: \ # N: def f(x: P1) -> int \ # N: def f(x: P2) -> str [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] -- Unions of protocol types -- ------------------------ [case testBasicUnionsOfProtocols] from typing import Union, Protocol class P1(Protocol): attr1: int class P2(Protocol): attr2: int class C1: attr1: int class C2: attr2: int class C(C1, C2): pass class B: ... x: Union[P1, P2] x = C1() if int(): x = C2() x = C() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "Union[P1, P2]") [case testUnionsOfNormalClassesWithProtocols] from typing import Protocol, Union class P1(Protocol): attr1: int class P2(Protocol): attr2: int class C1: attr1: int class C2: attr2: int class C(C1, C2): pass class D1: attr1: int def f1(x: P1) -> None: pass def f2(x: P2) -> None: pass x: Union[C1, C2] y: Union[C1, D1] z: Union[C, D1] f1(x) # E: Argument 1 to "f1" has incompatible type "Union[C1, C2]"; expected "P1" f1(y) f1(z) f2(x) # E: Argument 1 to "f2" has incompatible type "Union[C1, C2]"; expected "P2" f2(z) # E: Argument 1 to "f2" has incompatible type "Union[C, D1]"; expected "P2" -- Type[] with protocol types -- -------------------------- [case testInstantiationProtocolInTypeForFunctions] from typing import Type, Protocol class P(Protocol): def m(self) -> None: pass class P1(Protocol): def m(self) -> None: pass class Pbad(Protocol): def mbad(self) -> int: pass class B(P): pass class C: def m(self) -> None: pass def f(cls: Type[P]) -> P: return cls() # OK def g() -> P: return P() # E: Cannot instantiate protocol class "P" f(P) # E: Only concrete class can be given where "Type[P]" is expected f(B) # OK f(C) # OK x: Type[P1] xbad: Type[Pbad] f(x) # OK f(xbad) # E: Argument 1 to "f" has incompatible type "Type[Pbad]"; expected "Type[P]" [case testInstantiationProtocolInTypeForAliases] from typing import Type, Protocol class P(Protocol): def m(self) -> None: pass class C: def m(self) -> None: pass def f(cls: Type[P]) -> P: return cls() # OK Alias = P GoodAlias = C Alias() # E: Cannot instantiate protocol class "P" GoodAlias() f(Alias) # E: Only concrete class can be given where "Type[P]" is expected f(GoodAlias) [case testInstantiationProtocolInTypeForVariables] from typing import Type, Protocol class P(Protocol): def m(self) -> None: pass class B(P): pass class C: def m(self) -> None: pass var: Type[P] var() if int(): var = P # E: Can only assign concrete classes to a variable of type "Type[P]" var = B # OK var = C # OK var_old = None # type: Type[P] # Old syntax for variable annotations var_old() if int(): var_old = P # E: Can only assign concrete classes to a variable of type "Type[P]" var_old = B # OK var_old = C # OK [case testInstantiationProtocolInTypeForClassMethods] from typing import Type, Protocol class Logger: @staticmethod def log(a: Type[C]): pass class C(Protocol): @classmethod def action(cls) -> None: cls() #OK for classmethods Logger.log(cls) #OK for classmethods [builtins fixtures/classmethod.pyi] -- isinstance() with @runtime_checkable protocols -- ---------------------------------------------- [case testSimpleRuntimeProtocolCheck] from typing import Protocol, runtime_checkable @runtime_checkable class C: # E: @runtime_checkable can only be used with protocol classes pass class P(Protocol): def meth(self) -> None: pass @runtime_checkable class R(Protocol): def meth(self) -> int: pass x: object if isinstance(x, P): # E: Only @runtime_checkable protocols can be used with instance and class checks reveal_type(x) # N: Revealed type is '__main__.P' if isinstance(x, R): reveal_type(x) # N: Revealed type is '__main__.R' reveal_type(x.meth()) # N: Revealed type is 'builtins.int' [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [case testRuntimeIterableProtocolCheck] from typing import Iterable, List, Union x: Union[int, List[str]] if isinstance(x, Iterable): reveal_type(x) # N: Revealed type is 'builtins.list[builtins.str]' [builtins fixtures/isinstancelist.pyi] [typing fixtures/typing-full.pyi] [case testConcreteClassesInProtocolsIsInstance] from typing import Protocol, runtime_checkable, TypeVar, Generic T = TypeVar('T') @runtime_checkable class P1(Protocol): def meth1(self) -> int: pass @runtime_checkable class P2(Protocol): def meth2(self) -> int: pass @runtime_checkable class P(P1, P2, Protocol): pass class C1(Generic[T]): def meth1(self) -> T: pass class C2: def meth2(self) -> int: pass class C(C1[int], C2): pass c = C() if isinstance(c, P1): reveal_type(c) # N: Revealed type is '__main__.C' else: reveal_type(c) # Unreachable if isinstance(c, P): reveal_type(c) # N: Revealed type is '__main__.C' else: reveal_type(c) # Unreachable c1i: C1[int] if isinstance(c1i, P1): reveal_type(c1i) # N: Revealed type is '__main__.C1[builtins.int]' else: reveal_type(c1i) # Unreachable if isinstance(c1i, P): reveal_type(c1i) # Unreachable else: reveal_type(c1i) # N: Revealed type is '__main__.C1[builtins.int]' c1s: C1[str] if isinstance(c1s, P1): reveal_type(c1s) # Unreachable else: reveal_type(c1s) # N: Revealed type is '__main__.C1[builtins.str]' c2: C2 if isinstance(c2, P): reveal_type(c2) # Unreachable else: reveal_type(c2) # N: Revealed type is '__main__.C2' [builtins fixtures/isinstancelist.pyi] [typing fixtures/typing-full.pyi] [case testConcreteClassesUnionInProtocolsIsInstance] from typing import Protocol, runtime_checkable, TypeVar, Generic, Union T = TypeVar('T') @runtime_checkable class P1(Protocol): def meth1(self) -> int: pass @runtime_checkable class P2(Protocol): def meth2(self) -> int: pass class C1(Generic[T]): def meth1(self) -> T: pass class C2: def meth2(self) -> int: pass x: Union[C1[int], C2] if isinstance(x, P1): reveal_type(x) # N: Revealed type is '__main__.C1[builtins.int]' else: reveal_type(x) # N: Revealed type is '__main__.C2' if isinstance(x, P2): reveal_type(x) # N: Revealed type is '__main__.C2' else: reveal_type(x) # N: Revealed type is '__main__.C1[builtins.int]' [builtins fixtures/isinstancelist.pyi] [typing fixtures/typing-full.pyi] -- Non-Instances and protocol types (Callable vs __call__ etc.) -- ------------------------------------------------------------ [case testBasicTupleStructuralSubtyping] from typing import Tuple, TypeVar, Protocol T = TypeVar('T', covariant=True) class MyProto(Protocol[T]): def __len__(self) -> T: pass t: Tuple[int, str] def f(x: MyProto[int]) -> None: pass f(t) # OK y: MyProto[str] y = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "MyProto[str]") [builtins fixtures/isinstancelist.pyi] [case testBasicNamedTupleStructuralSubtyping] from typing import NamedTuple, TypeVar, Protocol T = TypeVar('T', covariant=True) S = TypeVar('S', covariant=True) class P(Protocol[T, S]): @property def x(self) -> T: pass @property def y(self) -> S: pass class N(NamedTuple): x: int y: str class N2(NamedTuple): x: int class N3(NamedTuple): x: int y: int z: N z3: N3 def fun(x: P[int, str]) -> None: pass def fun2(x: P[int, int]) -> None: pass def fun3(x: P[T, T]) -> T: return x.x fun(z) fun2(z) # E: Argument 1 to "fun2" has incompatible type "N"; expected "P[int, int]" \ # N: Following member(s) of "N" have conflicts: \ # N: y: expected "int", got "str" fun(N2(1)) # E: Argument 1 to "fun" has incompatible type "N2"; expected "P[int, str]" \ # N: 'N2' is missing following 'P' protocol member: \ # N: y reveal_type(fun3(z)) # N: Revealed type is 'builtins.object*' reveal_type(fun3(z3)) # N: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [case testBasicCallableStructuralSubtyping] from typing import Callable, Generic, TypeVar def apply(f: Callable[[int], int], x: int) -> int: return f(x) class Add5: def __call__(self, x: int) -> int: return x + 5 apply(Add5(), 5) T = TypeVar('T') def apply_gen(f: Callable[[T], T]) -> T: pass reveal_type(apply_gen(Add5())) # N: Revealed type is 'builtins.int*' def apply_str(f: Callable[[str], int], x: str) -> int: return f(x) apply_str(Add5(), 'a') # E: Argument 1 to "apply_str" has incompatible type "Add5"; expected "Callable[[str], int]" \ # N: "Add5.__call__" has type "Callable[[Arg(int, 'x')], int]" [builtins fixtures/isinstancelist.pyi] [case testMoreComplexCallableStructuralSubtyping] from mypy_extensions import Arg, VarArg from typing import Protocol, Callable def call_soon(cb: Callable[[Arg(int, 'x'), VarArg(str)], int]): pass class Good: def __call__(self, x: int, *rest: str) -> int: pass class Bad1: def __call__(self, x: int, *rest: int) -> int: pass class Bad2: def __call__(self, y: int, *rest: str) -> int: pass call_soon(Good()) call_soon(Bad1()) # E: Argument 1 to "call_soon" has incompatible type "Bad1"; expected "Callable[[int, VarArg(str)], int]" \ # N: "Bad1.__call__" has type "Callable[[Arg(int, 'x'), VarArg(int)], int]" call_soon(Bad2()) # E: Argument 1 to "call_soon" has incompatible type "Bad2"; expected "Callable[[int, VarArg(str)], int]" \ # N: "Bad2.__call__" has type "Callable[[Arg(int, 'y'), VarArg(str)], int]" [builtins fixtures/isinstancelist.pyi] [case testStructuralSupportForPartial] from typing import Callable, TypeVar, Generic, Any T = TypeVar('T') class partial(Generic[T]): def __init__(self, func: Callable[..., T], *args: Any) -> None: ... def __call__(self, *args: Any) -> T: ... def inc(a: int, temp: str) -> int: pass def foo(f: Callable[[int], T]) -> T: return f(1) reveal_type(foo(partial(inc, 'temp'))) # N: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [case testStructuralInferenceForCallable] from typing import Callable, TypeVar, Tuple T = TypeVar('T') S = TypeVar('S') class Actual: def __call__(self, arg: int) -> str: pass def fun(cb: Callable[[T], S]) -> Tuple[T, S]: pass reveal_type(fun(Actual())) # N: Revealed type is 'Tuple[builtins.int*, builtins.str*]' [builtins fixtures/tuple.pyi] -- Standard protocol types (SupportsInt, Sized, etc.) -- -------------------------------------------------- -- More tests could be added for types from typing converted to protocols [case testBasicSizedProtocol] from typing import Sized class Foo: def __len__(self) -> int: return 42 def bar(a: Sized) -> int: return a.__len__() bar(Foo()) bar((1, 2)) bar(1) # E: Argument 1 to "bar" has incompatible type "int"; expected "Sized" [builtins fixtures/isinstancelist.pyi] [typing fixtures/typing-full.pyi] [case testBasicSupportsIntProtocol] from typing import SupportsInt class Bar: def __int__(self): return 1 def foo(a: SupportsInt): pass foo(Bar()) foo('no way') # E: Argument 1 to "foo" has incompatible type "str"; expected "SupportsInt" [builtins fixtures/isinstancelist.pyi] [typing fixtures/typing-full.pyi] -- Additional tests and corner cases for protocols -- ---------------------------------------------- [case testAnyWithProtocols] from typing import Protocol, Any, TypeVar T = TypeVar('T') class P1(Protocol): attr1: int class P2(Protocol[T]): attr2: T class P3(Protocol): attr: P3 def f1(x: P1) -> None: pass def f2(x: P2[str]) -> None: pass def f3(x: P3) -> None: pass class C1: attr1: Any class C2: attr2: Any class C3: attr: Any f1(C1()) f2(C2()) f3(C3()) f2(C3()) # E: Argument 1 to "f2" has incompatible type "C3"; expected "P2[str]" a: Any f1(a) f2(a) f3(a) [case testErrorsForProtocolsInDifferentPlaces] from typing import Protocol class P(Protocol): attr1: int attr2: str attr3: int class C: attr1: str @property def attr2(self) -> int: pass x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \ # N: 'C' is missing following 'P' protocol member: \ # N: attr3 \ # N: Following member(s) of "C" have conflicts: \ # N: attr1: expected "int", got "str" \ # N: attr2: expected "str", got "int" \ # N: Protocol member P.attr2 expected settable variable, got read-only attribute def f(x: P) -> P: return C() # E: Incompatible return value type (got "C", expected "P") \ # N: 'C' is missing following 'P' protocol member: \ # N: attr3 \ # N: Following member(s) of "C" have conflicts: \ # N: attr1: expected "int", got "str" \ # N: attr2: expected "str", got "int" \ # N: Protocol member P.attr2 expected settable variable, got read-only attribute f(C()) # E: Argument 1 to "f" has incompatible type "C"; expected "P" \ # N: 'C' is missing following 'P' protocol member: \ # N: attr3 \ # N: Following member(s) of "C" have conflicts: \ # N: attr1: expected "int", got "str" \ # N: attr2: expected "str", got "int" \ # N: Protocol member P.attr2 expected settable variable, got read-only attribute [builtins fixtures/list.pyi] [case testIterableProtocolOnClass] from typing import TypeVar, Iterator T = TypeVar('T', bound='A') class A: def __iter__(self: T) -> Iterator[T]: pass class B(A): pass reveal_type(list(b for b in B())) # N: Revealed type is 'builtins.list[__main__.B*]' reveal_type(list(B())) # N: Revealed type is 'builtins.list[__main__.B*]' [builtins fixtures/list.pyi] [case testIterableProtocolOnMetaclass] from typing import TypeVar, Iterator, Type T = TypeVar('T') class EMeta(type): def __iter__(self: Type[T]) -> Iterator[T]: pass class E(metaclass=EMeta): pass class C(E): pass reveal_type(list(c for c in C)) # N: Revealed type is 'builtins.list[__main__.C*]' reveal_type(list(C)) # N: Revealed type is 'builtins.list[__main__.C*]' [builtins fixtures/list.pyi] [case testClassesGetattrWithProtocols] from typing import Protocol class P(Protocol): attr: int class PP(Protocol): @property def attr(self) -> int: pass class C: def __getattr__(self, attr: str) -> int: pass class C2(C): def __setattr__(self, attr: str, val: int) -> None: pass class D: def __getattr__(self, attr: str) -> str: pass def fun(x: P) -> None: reveal_type(P.attr) # N: Revealed type is 'builtins.int' def fun_p(x: PP) -> None: reveal_type(P.attr) # N: Revealed type is 'builtins.int' fun(C()) # E: Argument 1 to "fun" has incompatible type "C"; expected "P" \ # N: Protocol member P.attr expected settable variable, got read-only attribute fun(C2()) fun_p(D()) # E: Argument 1 to "fun_p" has incompatible type "D"; expected "PP" \ # N: Following member(s) of "D" have conflicts: \ # N: attr: expected "int", got "str" fun_p(C()) # OK [builtins fixtures/list.pyi] [case testImplicitTypesInProtocols] from typing import Protocol class P(Protocol): x = 1 # E: All protocol members must have explicitly declared types class C: x: int class D: x: str x: P x = D() # E: Incompatible types in assignment (expression has type "D", variable has type "P") \ # N: Following member(s) of "D" have conflicts: \ # N: x: expected "int", got "str" x = C() # OK [builtins fixtures/list.pyi] [case testProtocolIncompatibilityWithGenericMethod] from typing import Protocol, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Protocol): def f(self, x: T) -> None: pass class B: def f(self, x: S, y: T) -> None: pass x: A = B() [out] main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:11: note: Following member(s) of "B" have conflicts: main:11: note: Expected: main:11: note: def [T] f(self, x: T) -> None main:11: note: Got: main:11: note: def [S, T] f(self, x: S, y: T) -> None [case testProtocolIncompatibilityWithGenericMethodBounded] from typing import Protocol, TypeVar T = TypeVar('T') S = TypeVar('S', bound=int) class A(Protocol): def f(self, x: T) -> None: pass class B: def f(self, x: S, y: T) -> None: pass x: A = B() [out] main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:11: note: Following member(s) of "B" have conflicts: main:11: note: Expected: main:11: note: def [T] f(self, x: T) -> None main:11: note: Got: main:11: note: def [S <: int, T] f(self, x: S, y: T) -> None [case testProtocolIncompatibilityWithGenericRestricted] from typing import Protocol, TypeVar T = TypeVar('T') S = TypeVar('S', int, str) class A(Protocol): def f(self, x: T) -> None: pass class B: def f(self, x: S, y: T) -> None: pass x: A = B() [out] main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:11: note: Following member(s) of "B" have conflicts: main:11: note: Expected: main:11: note: def [T] f(self, x: T) -> None main:11: note: Got: main:11: note: def [S in (int, str), T] f(self, x: S, y: T) -> None [case testProtocolIncompatibilityWithManyOverloads] from typing import Protocol, overload class C1: pass class C2: pass class A(Protocol): @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass @overload def f(self, x: C1) -> C2: pass @overload def f(self, x: C2) -> C1: pass class B: def f(self) -> None: pass x: A = B() [out] main:18: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:18: note: Following member(s) of "B" have conflicts: main:18: note: Expected: main:18: note: @overload main:18: note: def f(self, x: int) -> int main:18: note: @overload main:18: note: def f(self, x: str) -> str main:18: note: <2 more overloads not shown> main:18: note: Got: main:18: note: def f(self) -> None [case testProtocolIncompatibilityWithManyConflicts] from typing import Protocol class A(Protocol): def f(self, x: int) -> None: pass def g(self, x: int) -> None: pass def h(self, x: int) -> None: pass def i(self, x: int) -> None: pass class B: def f(self, x: str) -> None: pass def g(self, x: str) -> None: pass def h(self, x: str) -> None: pass def i(self, x: str) -> None: pass x: A = B() [out] main:14: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:14: note: Following member(s) of "B" have conflicts: main:14: note: Expected: main:14: note: def f(self, x: int) -> None main:14: note: Got: main:14: note: def f(self, x: str) -> None main:14: note: Expected: main:14: note: def g(self, x: int) -> None main:14: note: Got: main:14: note: def g(self, x: str) -> None main:14: note: <2 more conflict(s) not shown> [case testDontShowNotesForTupleAndIterableProtocol] from typing import Iterable, Sequence, Protocol, NamedTuple class N(NamedTuple): x: int def f1(x: Iterable[str]) -> None: pass def f2(x: Sequence[str]) -> None: pass # The errors below should be short f1(N(1)) # E: Argument 1 to "f1" has incompatible type "N"; expected "Iterable[str]" f2(N(2)) # E: Argument 1 to "f2" has incompatible type "N"; expected "Sequence[str]" [builtins fixtures/tuple.pyi] [case testNotManyFlagConflitsShownInProtocols] from typing import Protocol class AllSettable(Protocol): a: int b: int c: int d: int class AllReadOnly: @property def a(self) -> int: pass @property def b(self) -> int: pass @property def c(self) -> int: pass @property def d(self) -> int: pass x: AllSettable = AllReadOnly() [builtins fixtures/property.pyi] [out] main:19: error: Incompatible types in assignment (expression has type "AllReadOnly", variable has type "AllSettable") main:19: note: Protocol member AllSettable.a expected settable variable, got read-only attribute main:19: note: Protocol member AllSettable.b expected settable variable, got read-only attribute main:19: note: <2 more conflict(s) not shown> [case testProtocolsMoreConflictsNotShown] from typing_extensions import Protocol from typing import Generic, TypeVar T = TypeVar('T') class MockMapping(Protocol[T]): def a(self, x: T) -> int: pass def b(self, x: T) -> int: pass def c(self, x: T) -> int: pass d: T e: T f: T class MockDict(MockMapping[T]): more: int def f(x: MockMapping[int]) -> None: pass x: MockDict[str] f(x) # E: Argument 1 to "f" has incompatible type "MockDict[str]"; expected "MockMapping[int]" [case testProtocolNotesForComplexSignatures] from typing import Protocol, Optional class P(Protocol): def meth(self, x: int, *args: str) -> None: pass def other(self, *args, hint: Optional[str] = None, **kwargs: str) -> None: pass class C: def meth(self) -> int: pass def other(self) -> int: pass x: P = C() [builtins fixtures/dict.pyi] [out] main:10: error: Incompatible types in assignment (expression has type "C", variable has type "P") main:10: note: Following member(s) of "C" have conflicts: main:10: note: Expected: main:10: note: def meth(self, x: int, *args: str) -> None main:10: note: Got: main:10: note: def meth(self) -> int main:10: note: Expected: main:10: note: def other(self, *args: Any, hint: Optional[str] = ..., **kwargs: str) -> None main:10: note: Got: main:10: note: def other(self) -> int [case testObjectAllowedInProtocolBases] from typing import Protocol class P(Protocol, object): pass [out] [case testNoneSubtypeOfEmptyProtocol] from typing import Protocol class P(Protocol): pass x: P = None [out] [case testNoneSubtypeOfAllProtocolsWithoutStrictOptional] from typing import Protocol class P(Protocol): attr: int def meth(self, arg: str) -> str: pass x: P = None [out] [case testNoneSubtypeOfEmptyProtocolStrict] # flags: --strict-optional from typing import Protocol class P(Protocol): pass x: P = None class PBad(Protocol): x: int y: PBad = None # E: Incompatible types in assignment (expression has type "None", variable has type "PBad") [out] [case testOnlyMethodProtocolUsableWithIsSubclass] from typing import Protocol, runtime_checkable, Union, Type @runtime_checkable class P(Protocol): def meth(self) -> int: pass @runtime_checkable class PBad(Protocol): x: str class C: x: str def meth(self) -> int: pass class E: pass cls: Type[Union[C, E]] issubclass(cls, PBad) # E: Only protocols that don't have non-method members can be used with issubclass() \ # N: Protocol "PBad" has non-method member(s): x if issubclass(cls, P): reveal_type(cls) # N: Revealed type is 'Type[__main__.C]' else: reveal_type(cls) # N: Revealed type is 'Type[__main__.E]' [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] [out] [case testCallableImplementsProtocol] from typing import Protocol class Caller(Protocol): def __call__(self, x: str, *args: int) -> None: ... def call(x: str, *args: int) -> None: pass def bad(x: int, *args: str) -> None: pass def func(caller: Caller) -> None: pass func(call) func(bad) # E: Argument 1 to "func" has incompatible type "Callable[[int, VarArg(str)], None]"; expected "Caller" [out] [case testCallableImplementsProtocolGeneric] from typing import Protocol, TypeVar, Tuple T = TypeVar('T') S = TypeVar('S') class Caller(Protocol[T, S]): def __call__(self, x: T, y: S) -> Tuple[T, S]: ... def call(x: int, y: str) -> Tuple[int, str]: ... def func(caller: Caller[T, S]) -> Tuple[T, S]: pass reveal_type(func(call)) # N: Revealed type is 'Tuple[builtins.int*, builtins.str*]' [builtins fixtures/tuple.pyi] [out] [case testCallableImplementsProtocolGenericTight] from typing import Protocol, TypeVar T = TypeVar('T') class Caller(Protocol): def __call__(self, x: T) -> T: ... def call(x: T) -> T: ... def bad(x: int) -> int: ... def func(caller: Caller) -> None: pass func(call) func(bad) # E: Argument 1 to "func" has incompatible type "Callable[[int], int]"; expected "Caller" [builtins fixtures/tuple.pyi] [out] [case testCallableImplementsProtocolGenericNotGeneric] from typing import Protocol, TypeVar, Tuple T = TypeVar('T') class Caller(Protocol): def __call__(self, x: int) -> int: ... def call(x: T) -> T: ... def bad(x: T) -> Tuple[T, T]: ... def func(caller: Caller) -> None: pass func(call) func(bad) # E: Argument 1 to "func" has incompatible type "Callable[[T], Tuple[T, T]]"; expected "Caller" [builtins fixtures/tuple.pyi] [out] [case testCallableImplementsProtocolOverload] from typing import Protocol, overload, Union class Caller(Protocol): @overload def __call__(self, x: int) -> int: ... @overload def __call__(self, x: str) -> str: ... @overload def call(x: int) -> int: ... @overload def call(x: str) -> str: ... def call(x: Union[int, str]) -> Union[int, str]: pass def bad(x: Union[int, str]) -> Union[int, str]: pass def func(caller: Caller) -> None: pass func(call) func(bad) # E: Argument 1 to "func" has incompatible type "Callable[[Union[int, str]], Union[int, str]]"; expected "Caller" [out] [case testCallableImplementsProtocolExtraNote] from typing import Protocol class Caller(Protocol): def __call__(self, x: str, *args: int) -> None: ... def bad(x: int, *args: str) -> None: pass cb: Caller = bad # E: Incompatible types in assignment (expression has type "Callable[[int, VarArg(str)], None]", variable has type "Caller") \ # N: "Caller.__call__" has type "Callable[[Arg(str, 'x'), VarArg(int)], None]" [out] [case testCallableImplementsProtocolArgName] from typing import Protocol class Caller(Protocol): def __call__(self, x: str) -> None: ... class CallerAnon(Protocol): def __call__(self, __x: str) -> None: ... def call(x: str) -> None: pass def bad(y: str) -> None: pass def func(caller: Caller) -> None: pass def anon(caller: CallerAnon) -> None: pass func(call) func(bad) # E: Argument 1 to "func" has incompatible type "Callable[[str], None]"; expected "Caller" anon(bad) [out] [case testCallableProtocolVsProtocol] from typing import Protocol class One(Protocol): def __call__(self, x: str) -> None: ... class Other(Protocol): def __call__(self, x: str) -> None: ... class Bad(Protocol): def __call__(self, zzz: str) -> None: ... def func(caller: One) -> None: pass a: Other b: Bad func(a) func(b) # E: Argument 1 to "func" has incompatible type "Bad"; expected "One" [out] [case testJoinProtocolCallback] from typing import Protocol, Callable class A: ... class B(A): ... class C(B): ... class D(B): ... class Call(Protocol): def __call__(self, x: B) -> C: ... Normal = Callable[[A], D] a: Call b: Normal reveal_type([a, b]) # N: Revealed type is 'builtins.list[def (__main__.B) -> __main__.B]' reveal_type([b, a]) # N: Revealed type is 'builtins.list[def (__main__.B) -> __main__.B]' [builtins fixtures/list.pyi] [out] [case testMeetProtocolCallback] from typing import Protocol, Callable class A: ... class B(A): ... class C(B): ... class D(B): ... class Call(Protocol): def __call__(self, __x: C) -> B: ... Normal = Callable[[D], A] def a(x: Call) -> None: ... def b(x: Normal) -> None: ... reveal_type([a, b]) # N: Revealed type is 'builtins.list[def (x: def (__main__.B) -> __main__.B)]' reveal_type([b, a]) # N: Revealed type is 'builtins.list[def (x: def (__main__.B) -> __main__.B)]' [builtins fixtures/list.pyi] [out] [case testProtocolsAlwaysABCs] from typing import Protocol class P(Protocol): ... class C(P): ... reveal_type(C.register(int)) # N: Revealed type is 'def () -> builtins.int' [typing fixtures/typing-full.pyi] [out] [case testProtocolVarianceAfterDecorators] # The test case is simplified, in reality this caused problems with @abstractmethod # in stubs and test fixtures. from typing import Protocol, TypeVar T = TypeVar('T') def dec(x: T) -> T: ... alias = dec class P(Protocol[T]): @alias def meth(self, arg: T) -> T: ... [out] [case testNamedTupleWithNoArgsCallableField] from typing import Callable, NamedTuple, Protocol class N(NamedTuple): func: Callable[[], str] class P(Protocol): @property def func(self) -> Callable[[], str]: ... p: P = N(lambda: 'foo') [builtins fixtures/property.pyi] [case testNamedTupleWithManyArgsCallableField] from typing import Callable, NamedTuple, Protocol class N(NamedTuple): func: Callable[[str, str, str], str] class P(Protocol): @property def func(self) -> Callable[[str, str, str], str]: ... p: P = N(lambda a, b, c: 'foo') [builtins fixtures/property.pyi] [case testLiteralsAgainstProtocols] from typing import SupportsInt, SupportsAbs, TypeVar from typing_extensions import Literal, Final T = TypeVar('T') def abs(x: SupportsAbs[T]) -> T: ... def foo(x: SupportsInt) -> None: ... ONE: Final = 1 TWO: Literal[2] ALL: Literal[1, 2, 3] foo(ONE) foo(TWO) foo(3) reveal_type(abs(ONE)) # N: Revealed type is 'builtins.int*' reveal_type(abs(TWO)) # N: Revealed type is 'builtins.int*' reveal_type(abs(3)) # N: Revealed type is 'builtins.int*' reveal_type(abs(ALL)) # N: Revealed type is 'builtins.int*' [builtins fixtures/float.pyi] [typing fixtures/typing-full.pyi] mypy-0.761/test-data/unit/check-python2.test0000644€tŠÔÚ€2›s®0000002250513576752246025150 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases for Python 2.x mode. [case testUnicode] u = u'foo' if int(): u = unicode() if int(): s = '' if int(): s = u'foo' # E: Incompatible types in assignment (expression has type "unicode", variable has type "str") if int(): s = b'foo' [builtins_py2 fixtures/python2.pyi] [case testTypeVariableUnicode] from typing import TypeVar T = TypeVar(u'T') [case testPrintStatement] print ''() # E: "str" not callable print 1, 1() # E: "int" not callable [case testPrintStatementWithTarget] class A: def write(self, s): # type: (str) -> None pass print >>A(), '' print >>None, '' print >>1, '' # E: "int" has no attribute "write" print >>(None + ''), None # E: Unsupported left operand type for + ("None") [case testDivision] class A: def __div__(self, x): # type: (int) -> str pass s = A() / 1 if int(): s = '' if int(): s = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testStrUnicodeCompatibility] import typing def f(x): # type: (unicode) -> None pass f('') f(u'') f(b'') [builtins_py2 fixtures/python2.pyi] [case testStaticMethodWithCommentSignature] class A: @staticmethod def f(x): # type: (int) -> str return '' A.f(1) A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [builtins_py2 fixtures/staticmethod.pyi] [case testRaiseTuple] import typing raise BaseException, "a" raise BaseException, "a", None [builtins_py2 fixtures/exception.pyi] [case testRaiseTupleTypeFail] import typing x = None # type: typing.Type[typing.Tuple[typing.Any, typing.Any, typing.Any]] raise x # E: Exception must be derived from BaseException [builtins_py2 fixtures/exception.pyi] [case testTryExceptWithTuple] try: None except BaseException, e: e() # E: "BaseException" not callable [builtins_py2 fixtures/exception.pyi] [case testTryExceptUnsupported] try: pass except BaseException, (e, f): # E: Sorry, `except , ` is not supported pass try: pass except BaseException, [e, f, g]: # E: Sorry, `except , ` is not supported pass try: pass except BaseException, e[0]: # E: Sorry, `except , ` is not supported pass [builtins_py2 fixtures/exception.pyi] [case testAlternateNameSuggestions] class Foo(object): def say_hello(self): pass def say_hell(self): pass def say_hullo(self): pass def say_goodbye(self): pass def go_away(self): pass def go_around(self): pass def append(self): pass def extend(self): pass def _add(self): pass f = Foo() f.say_hallo() # E: "Foo" has no attribute "say_hallo"; maybe "say_hullo", "say_hello", or "say_hell"? f.go_array() # E: "Foo" has no attribute "go_array"; maybe "go_away"? f.add() # E: "Foo" has no attribute "add"; maybe "append", "extend", or "_add"? [case testTupleArgListDynamicallyTyped] def f(x, (y, z)): x = y + z f(1, 1) f(1, (1, 2)) [case testTupleArgListAnnotated] from typing import Tuple def f(x, (y, z)): # type: (object, Tuple[int, str]) -> None x() # E y() # E z() # E f(object(), (1, '')) f(1, 1) # E [builtins_py2 fixtures/tuple.pyi] [out] main:3: error: "object" not callable main:4: error: "int" not callable main:5: error: "str" not callable main:7: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, str]" [case testNestedTupleArgListAnnotated] from typing import Tuple def f(x, (y, (a, b))): # type: (object, Tuple[int, Tuple[str, int]]) -> None x() # E y() # E a() # E b() # E f(object(), (1, ('', 2))) f(1, 1) # E [builtins fixtures/tuple.pyi] [out] main:3: error: "object" not callable main:4: error: "int" not callable main:5: error: "str" not callable main:6: error: "int" not callable main:8: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, Tuple[str, int]]" [case testBackquoteExpr] `1`.x # E: "str" has no attribute "x" [case testPython2OnlyStdLibModuleWithoutStub] import asyncio import Bastion [out] main:1: error: Cannot find implementation or library stub for module named 'asyncio' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: No library stub file for standard library module 'Bastion' main:2: note: (Stub files are from https://github.com/python/typeshed) [case testImportFromPython2Builtin] from __builtin__ import int as i x = 1 # type: i y = '' # type: i # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testImportPython2Builtin] import __builtin__ x = 1 # type: __builtin__.int y = '' # type: __builtin__.int # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testImportAsPython2Builtin] import __builtin__ as bi x = 1 # type: bi.int y = '' # type: bi.int # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testImportFromPython2BuiltinOverridingDefault] from __builtin__ import int x = 1 # type: int y = '' # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int") -- Copied from check-functions.test [case testEllipsisWithArbitraryArgsOnBareFunctionInPython2] def f(x, y, z): # type: (...) -> None pass -- Copied from check-functions.test [case testEllipsisWithSomethingAfterItFailsInPython2] def f(x, y, z): # type: (..., int) -> None pass [out] main:1: error: Ellipses cannot accompany other argument types in function type signature [case testLambdaTupleArgInPython2] f = lambda (x, y): x + y f((0, 0)) def g(): # type: () -> None pass reveal_type(lambda (x,): g()) # N: Revealed type is 'def (Any)' [out] [case testLambdaTupleArgInferenceInPython2] from typing import Callable, Tuple def f(c): # type: (Callable[[Tuple[int, int]], int]) -> None pass def g(c): # type: (Callable[[Tuple[int, int]], str]) -> None pass f(lambda (x, y): y) f(lambda (x, y): x()) # E: "int" not callable g(lambda (x, y): y) # E: Argument 1 to "g" has incompatible type "Callable[[Tuple[int, int]], int]"; expected "Callable[[Tuple[int, int]], str]" \ # E: Incompatible return value type (got "int", expected "str") [out] [case testLambdaSingletonTupleArgInPython2] f = lambda (x,): x + 1 f((0,)) [out] [case testLambdaNoTupleArgInPython2] f = lambda (x): x + 1 f(0) [out] [case testDefTupleEdgeCasesPython2] def f((x,)): return x def g((x)): return x f(0) + g(0) [out] [case testLambdaAsSortKeyForTuplePython2] from typing import Any, Tuple, Callable def bar(key): # type: (Callable[[Tuple[int, int]], int]) -> int pass def foo(): # type: () -> int return bar(key=lambda (a, b): a) [out] [case testImportBuiltins] import __builtin__ __builtin__.str [case testUnicodeAlias] from typing import List Alias = List[u'Foo'] class Foo: pass [builtins_py2 fixtures/python2.pyi] [case testExec] exec('print 1 + 1') [case testUnicodeDocStrings] # flags: --python-version=2.7 __doc__ = u"unicode" class A: u"unicode" def f(): # type: () -> None u"unicode" [case testMetaclassBasics] class M(type): x = 0 # type: int def test(cls): # type: () -> str return "test" class A(object): __metaclass__ = M reveal_type(A.x) # N: Revealed type is 'builtins.int' reveal_type(A.test()) # N: Revealed type is 'builtins.str' [case testImportedMetaclass] import m class A(object): __metaclass__ = m.M reveal_type(A.x) # N: Revealed type is 'builtins.int' reveal_type(A.test()) # N: Revealed type is 'builtins.str' [file m.py] class M(type): x = 0 def test(cls): # type: () -> str return "test" [case testDynamicMetaclass] class C(object): __metaclass__ = int() # E: Dynamic metaclass not supported for 'C' [case testMetaclassDefinedAsClass] class C(object): class __metaclass__: pass # E: Metaclasses defined as inner classes are not supported [case testErrorInMetaclass] x = 0 class A(object): __metaclass__ = m.M # E: Name 'm' is not defined class B(object): __metaclass__ = M # E: Name 'M' is not defined [case testMetaclassAndSkippedImportInPython2] # flags: --ignore-missing-imports from missing import M class A(object): __metaclass__ = M y = 0 reveal_type(A.y) # N: Revealed type is 'builtins.int' A.x # E: "Type[A]" has no attribute "x" [case testAnyAsBaseOfMetaclass] from typing import Any, Type M = None # type: Any class MM(M): pass class A(object): __metaclass__ = MM [case testSelfTypeNotSelfType2] class A: def g(self): # type: (None) -> None pass [out] main:2: error: Invalid type for self, or extra argument type in function annotation main:2: note: (Hint: typically annotations omit the type for self) [case testSuper] class A: def f(self): # type: () -> None pass class B(A): def g(self): # type: () -> None super(B, self).f() super().f() # E: Too few arguments for "super" [case testPartialTypeComments_python2] def foo( a, # type: str b, args=None, ): # type: (...) -> None pass [case testNoneHasNoBoolInPython2] none = None b = none.__bool__() # E: "None" has no attribute "__bool__" [case testDictWithoutTypeCommentInPython2] # flags: --py2 d = dict() # E: Need type comment for 'd' (hint: "d = ... \# type: Dict[, ]") [builtins_py2 fixtures/floatdict_python2.pyi] mypy-0.761/test-data/unit/check-python38.test0000644€tŠÔÚ€2›s®0000002230413576752246025236 0ustar jukkaDROPBOX\Domain Users00000000000000[case testDecoratedClassLine] def d(c): ... @d class C: ... class C: ... # E: Name 'C' already defined on line 4 [case testDecoratedFunctionLine] # flags: --disallow-untyped-defs def d(f): ... # type: ignore @d def f(): ... # E: Function is missing a return type annotation \ # N: Use "-> None" if function does not return a value [case testIgnoreDecoratedFunction1] # flags: --disallow-untyped-defs --warn-unused-ignores def d(f): ... # type: ignore @d # type: ignore def f(): ... # type: ignore # E: unused 'type: ignore' comment [case testIgnoreDecoratedFunction2] # flags: --disallow-untyped-defs def d(f): ... # type: ignore @d def f(): ... # type: ignore [case testIgnoreScopeIssue1032] def f(a: int): ... f( 1, 2, ) # type: ignore [case testIgnoreScopeNested1] def g(x: str) -> str: ... def f(a: int) -> int: ... f( f( g( "IGNORE" ) # type: ignore ) ) [case testIgnoreScopeNested2] [ "IGNORE" # type: ignore & "IGNORE", ] [builtins fixtures/list.pyi] [case testIgnoreScopeNested3] { "IGNORE" | # type: ignore "IGNORE", } [builtins fixtures/set.pyi] [case testIgnoreScopeNested4] { None: "IGNORE" ^ "IGNORE", # type: ignore } [builtins fixtures/dict.pyi] [case testIgnoreScopeNestedNonOverlapping] def f(x: int): ... def g(x: int): ... ( f("ERROR"), # E: Argument 1 to "f" has incompatible type "str"; expected "int" g("IGNORE"), # type: ignore f("ERROR"), # E: Argument 1 to "f" has incompatible type "str"; expected "int" ) [case testIgnoreScopeNestedOverlapping] def f(x: int): ... def g(x: int): ... ( f("ERROR"), g( # E: Argument 1 to "f" has incompatible type "str"; expected "int" "IGNORE" # type: ignore ), f("ERROR"), # E: Argument 1 to "f" has incompatible type "str"; expected "int" ) [case testIgnoreScopeUnused1] # flags: --warn-unused-ignores ( # type: ignore # E: unused 'type: ignore' comment "IGNORE" # type: ignore # E: unused 'type: ignore' comment + # type: ignore # E: unused 'type: ignore' comment 0 # type: ignore ) # type: ignore # E: unused 'type: ignore' comment [case testIgnoreScopeUnused2] # flags: --warn-unused-ignores ( # type: ignore # E: unused 'type: ignore' comment "IGNORE" - # type: ignore 0 # type: ignore # E: unused 'type: ignore' comment ) # type: ignore # E: unused 'type: ignore' comment [case testIgnoreScopeUnused3] # flags: --warn-unused-ignores ( # type: ignore # E: unused 'type: ignore' comment "IGNORE" / 0 # type: ignore ) # type: ignore # E: unused 'type: ignore' comment [case testPEP570ArgTypesMissing] # flags: --disallow-untyped-defs def f(arg, /) -> None: ... # E: Function is missing a type annotation for one or more arguments [case testPEP570ArgTypesBadDefault] def f(arg: int = "ERROR", /) -> None: ... # E: Incompatible default for argument "arg" (default has type "str", argument has type "int") [case testPEP570ArgTypesDefault] def f(arg: int = 0, /) -> None: reveal_type(arg) # N: Revealed type is 'builtins.int' [case testPEP570ArgTypesRequired] def f(arg: int, /) -> None: reveal_type(arg) # N: Revealed type is 'builtins.int' [case testPEP570Required] def f(arg: int, /) -> None: ... # N: "f" defined here f(1) f("ERROR") # E: Argument 1 to "f" has incompatible type "str"; expected "int" f(arg=1) # E: Unexpected keyword argument "arg" for "f" f(arg="ERROR") # E: Unexpected keyword argument "arg" for "f" [case testPEP570Default] def f(arg: int = 0, /) -> None: ... # N: "f" defined here f() f(1) f("ERROR") # E: Argument 1 to "f" has incompatible type "str"; expected "int" f(arg=1) # E: Unexpected keyword argument "arg" for "f" f(arg="ERROR") # E: Unexpected keyword argument "arg" for "f" [case testPEP570Calls] def f(p, /, p_or_kw, *, kw) -> None: ... # N: "f" defined here f(0, 0, 0) # E: Too many positional arguments for "f" f(0, 0, kw=0) f(0, p_or_kw=0, kw=0) f(p=0, p_or_kw=0, kw=0) # E: Unexpected keyword argument "p" for "f" [case testPEP570Signatures1] def f(p1: bytes, p2: float, /, p_or_kw: int, *, kw: str) -> None: reveal_type(p1) # N: Revealed type is 'builtins.bytes' reveal_type(p2) # N: Revealed type is 'builtins.float' reveal_type(p_or_kw) # N: Revealed type is 'builtins.int' reveal_type(kw) # N: Revealed type is 'builtins.str' [case testPEP570Signatures2] def f(p1: bytes, p2: float = 0.0, /, p_or_kw: int = 0, *, kw: str) -> None: reveal_type(p1) # N: Revealed type is 'builtins.bytes' reveal_type(p2) # N: Revealed type is 'builtins.float' reveal_type(p_or_kw) # N: Revealed type is 'builtins.int' reveal_type(kw) # N: Revealed type is 'builtins.str' [case testPEP570Signatures3] def f(p1: bytes, p2: float = 0.0, /, *, kw: int) -> None: reveal_type(p1) # N: Revealed type is 'builtins.bytes' reveal_type(p2) # N: Revealed type is 'builtins.float' reveal_type(kw) # N: Revealed type is 'builtins.int' [case testPEP570Signatures4] def f(p1: bytes, p2: int = 0, /) -> None: reveal_type(p1) # N: Revealed type is 'builtins.bytes' reveal_type(p2) # N: Revealed type is 'builtins.int' [case testPEP570Signatures5] def f(p1: bytes, p2: float, /, p_or_kw: int) -> None: reveal_type(p1) # N: Revealed type is 'builtins.bytes' reveal_type(p2) # N: Revealed type is 'builtins.float' reveal_type(p_or_kw) # N: Revealed type is 'builtins.int' [case testPEP570Signatures6] def f(p1: bytes, p2: float, /) -> None: reveal_type(p1) # N: Revealed type is 'builtins.bytes' reveal_type(p2) # N: Revealed type is 'builtins.float' [case testWalrus] # flags: --strict-optional from typing import NamedTuple, Optional from typing_extensions import Final if a := 2: reveal_type(a) # N: Revealed type is 'builtins.int' while b := "x": reveal_type(b) # N: Revealed type is 'builtins.str' def f(x: int = (c := 4)) -> int: if a := 2: reveal_type(a) # N: Revealed type is 'builtins.int' while b := "x": reveal_type(b) # N: Revealed type is 'builtins.str' x = (y := 1) + (z := 2) reveal_type(x) # N: Revealed type is 'builtins.int' reveal_type(y) # N: Revealed type is 'builtins.int' reveal_type(z) # N: Revealed type is 'builtins.int' l = [y2 := 1, y2 + 2, y2 + 3] reveal_type(y2) # N: Revealed type is 'builtins.int' reveal_type(l) # N: Revealed type is 'builtins.list[builtins.int*]' filtered_data = [y3 for x in l if (y3 := a) is not None] reveal_type(filtered_data) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(y3) # N: Revealed type is 'builtins.int' # https://www.python.org/dev/peps/pep-0572/#exceptional-cases (y4 := 3) reveal_type(y4) # N: Revealed type is 'builtins.int' y5 = (y6 := 3) reveal_type(y5) # N: Revealed type is 'builtins.int' reveal_type(y6) # N: Revealed type is 'builtins.int' f(x=(y7 := 3)) reveal_type(y7) # N: Revealed type is 'builtins.int' reveal_type((lambda: (y8 := 3) and y8)()) # N: Revealed type is 'Literal[3]?' y8 # E: Name 'y8' is not defined y7 = 1.0 # E: Incompatible types in assignment (expression has type "float", variable has type "int") if y7 := "x": # E: Incompatible types in assignment (expression has type "str", variable has type "int") pass # Just make sure we don't crash on this sort of thing. if NT := NamedTuple("NT", [("x", int)]): # E: "int" not callable z2: NT # E: Variable "NT" is not valid as a type if Alias := int: z3: Alias # E: Variable "Alias" is not valid as a type if (reveal_type(y9 := 3) and # N: Revealed type is 'Literal[3]?' reveal_type(y9)): # N: Revealed type is 'builtins.int' reveal_type(y9) # N: Revealed type is 'builtins.int' return (y10 := 3) + y10 reveal_type(c) # N: Revealed type is 'builtins.int' def check_final() -> None: x: Final = 3 if x := 4: # E: Cannot assign to final name "x" pass def check_binder(x: Optional[int], y: Optional[int], z: Optional[int], a: Optional[int], b: Optional[int]) -> None: reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' (x := 1) reveal_type(x) # N: Revealed type is 'builtins.int' if x or (y := 1): reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' if x and (y := 1): # TODO should just be int # This is because in check_boolean_op in checkexpr.py we accept the right conditional # within a binder frame context, so the types assigned in it are lost later. Perhaps # we need to make find_isinstance_check() walrus-aware. reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' if (a := 1) and x: reveal_type(a) # N: Revealed type is 'builtins.int' if (b := 1) or x: reveal_type(b) # N: Revealed type is 'builtins.int' if z := 1: reveal_type(z) # N: Revealed type is 'builtins.int' def check_partial() -> None: x = None if bool() and (x := 2): pass reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' def check_partial_list() -> None: if (x := []): x.append(3) reveal_type(x) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/f_string.pyi] mypy-0.761/test-data/unit/check-redefine.test0000644€tŠÔÚ€2›s®0000003126613576752246025332 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the redefinition of variable with a different type. -- Redefine local variable -- ----------------------- [case testRedefineLocalWithDifferentType] # flags: --allow-redefinition def f() -> None: x = 0 reveal_type(x) # N: Revealed type is 'builtins.int' x = '' reveal_type(x) # N: Revealed type is 'builtins.str' [case testCannotConditionallyRedefineLocalWithDifferentType] # flags: --allow-redefinition def f() -> None: y = 0 reveal_type(y) # N: Revealed type is 'builtins.int' if int(): y = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(y) # N: Revealed type is 'builtins.int' reveal_type(y) # N: Revealed type is 'builtins.int' [case testRedefineFunctionArg] # flags: --allow-redefinition def f(x: int) -> None: g(x) x = '' reveal_type(x) # N: Revealed type is 'builtins.str' def g(x: int) -> None: if int(): x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(x) # N: Revealed type is 'builtins.int' [case testRedefineAnnotationOnly] # flags: --allow-redefinition def f() -> None: x: int x = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(x) # N: Revealed type is 'builtins.int' def g() -> None: x: int x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' x = '' reveal_type(x) # N: Revealed type is 'builtins.str' [case testRedefineLocalUsingOldValue] # flags: --allow-redefinition from typing import TypeVar, Union T = TypeVar('T') def f(x: int) -> None: x = g(x) reveal_type(x) # N: Revealed type is 'Union[builtins.int*, builtins.str]' y = 1 y = g(y) reveal_type(y) # N: Revealed type is 'Union[builtins.int*, builtins.str]' def g(x: T) -> Union[T, str]: pass [case testRedefineLocalForLoopIndexVariable] # flags: --allow-redefinition from typing import Iterable def f(a: Iterable[int], b: Iterable[str]) -> None: for x in a: x = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(x) # N: Revealed type is 'builtins.int*' for x in b: x = 1 \ # E: Incompatible types in assignment (expression has type "int", variable has type "str") reveal_type(x) # N: Revealed type is 'builtins.str*' def g(a: Iterable[int]) -> None: for x in a: pass x = '' def h(a: Iterable[int]) -> None: x = '' reveal_type(x) # N: Revealed type is 'builtins.str' for x in a: pass [case testCannotRedefineLocalWithinTry] # flags: --allow-redefinition def f() -> None: try: x = 0 x g() # Might raise an exception x = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") except: pass reveal_type(x) # N: Revealed type is 'builtins.int' y = 0 y y = '' def g(): pass [case testRedefineLocalWithinWith] # flags: --allow-redefinition def f() -> None: with g(): x = 0 x g() # Might raise an exception, but we ignore this x = '' reveal_type(x) # N: Revealed type is 'builtins.str' y = 0 y y = '' def g(): pass [case testCannotRedefineAcrossNestedFunction] # flags: --allow-redefinition def f() -> None: x = 0 x def g() -> None: x g() x = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") g() y = 0 y y = '' [case testCannotRedefineAcrossNestedDecoratedFunction] # flags: --allow-redefinition def dec(f): return f def f() -> None: x = 0 x @dec def g() -> None: x g() x = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") g() y = 0 y y = '' [case testCannotRedefineAcrossNestedOverloadedFunction] # flags: --allow-redefinition from typing import overload def f() -> None: x = 0 x @overload def g() -> None: pass @overload def g(x: int) -> None: pass def g(x=0): pass g() x = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") g() y = 0 y y = '' [case testRedefineLocalInMultipleAssignment] # flags: --allow-redefinition def f() -> None: x, x = 1, '' reveal_type(x) # N: Revealed type is 'builtins.str' x = object() reveal_type(x) # N: Revealed type is 'builtins.object' def g() -> None: x = 1 if 1: x, x = '', 1 \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testRedefineUnderscore] # flags: --allow-redefinition def f() -> None: _, _ = 1, '' if 1: _, _ = '', 1 reveal_type(_) # N: Revealed type is 'Any' [case testRedefineWithBreakAndContinue] # flags: --allow-redefinition def f() -> None: y = 0 y while int(): z = 0 z z = '' x = 0 if int(): break x = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(x) # N: Revealed type is 'builtins.int' y = '' def g() -> None: y = 0 y for a in h(): z = 0 z z = '' x = 0 if int(): continue x = '' \ # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(x) # N: Revealed type is 'builtins.int' y = '' def h(): pass [case testRedefineLocalAndNestedLoops] # flags: --allow-redefinition def f() -> None: z = 0 z while int(): x = 0 x while int(): if 1: y = 1 y if int(): break y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") x = '' z = '' [case testCannotRedefineVarAsFunction] # flags: --allow-redefinition def f() -> None: def x(): pass x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") reveal_type(x) # N: Revealed type is 'def () -> Any' y = 1 def y(): pass # E: Name 'y' already defined on line 6 [case testCannotRedefineVarAsClass] # flags: --allow-redefinition def f() -> None: class x: pass x = 1 # E: Cannot assign to a type \ # E: Incompatible types in assignment (expression has type "int", variable has type "Type[x]") y = 1 class y: pass # E: Name 'y' already defined on line 5 [case testRedefineVarAsTypeVar] # flags: --allow-redefinition from typing import TypeVar def f() -> None: x = TypeVar('x') x = 1 # E: Invalid assignment target reveal_type(x) # N: Revealed type is 'builtins.int' y = 1 # NOTE: '"int" not callable' is due to test stubs y = TypeVar('y') # E: Cannot redefine 'y' as a type variable \ # E: "int" not callable def h(a: y) -> y: return a # E: Variable "y" is not valid as a type [case testCannotRedefineVarAsModule] # flags: --allow-redefinition def f() -> None: import typing as m m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Module) n = 1 import typing as n # E: Name 'n' already defined on line 5 [builtins fixtures/module.pyi] [case testRedefineLocalWithTypeAnnotation] # flags: --allow-redefinition def f() -> None: x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' x = '' # type: object reveal_type(x) # N: Revealed type is 'builtins.object' def g() -> None: x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' x: object = '' reveal_type(x) # N: Revealed type is 'builtins.object' def h() -> None: x: int x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' x: object x: object = '' # E: Name 'x' already defined on line 16 def farg(x: int) -> None: x: str = '' # E: Name 'x' already defined on line 18 def farg2(x: int) -> None: x: str = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testRedefineLocalWithTypeAnnotationSpecialCases] # flags: --allow-redefinition def f() -> None: x: object x = 1 if int(): x = '' reveal_type(x) # N: Revealed type is 'builtins.object' x = '' reveal_type(x) # N: Revealed type is 'builtins.str' if int(): x = 2 \ # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testCannotRedefineSelf] # flags: --allow-redefinition class A: x = 0 def f(self) -> None: reveal_type(self.x) # N: Revealed type is 'builtins.int' self = f() self.y: str = '' reveal_type(self.y) # N: Revealed type is 'builtins.str' def f() -> A: return A() -- Redefine global variable -- ------------------------ [case testRedefineGlobalWithDifferentType] # flags: --allow-redefinition import m reveal_type(m.x) [file m.py] x = 0 reveal_type(x) x = object() reveal_type(x) x = '' reveal_type(x) [out] tmp/m.py:2: note: Revealed type is 'builtins.int' tmp/m.py:4: note: Revealed type is 'builtins.object' tmp/m.py:6: note: Revealed type is 'builtins.str' main:3: note: Revealed type is 'builtins.str' [case testRedefineGlobalForIndex] # flags: --allow-redefinition import m reveal_type(m.x) [file m.py] from typing import Iterable def f(): pass it1: Iterable[int] = f() it2: Iterable[str] = f() for x in it1: reveal_type(x) for x in it2: reveal_type(x) reveal_type(x) [out] tmp/m.py:6: note: Revealed type is 'builtins.int*' tmp/m.py:8: note: Revealed type is 'builtins.str*' tmp/m.py:9: note: Revealed type is 'builtins.str*' main:3: note: Revealed type is 'builtins.str*' [case testRedefineGlobalBasedOnPreviousValues] # flags: --allow-redefinition from typing import TypeVar, Iterable T = TypeVar('T') def f(x: T) -> Iterable[T]: pass a = 0 a = f(a) reveal_type(a) # N: Revealed type is 'typing.Iterable[builtins.int*]' [case testRedefineGlobalWithSeparateDeclaration] # flags: --allow-redefinition x = '' reveal_type(x) # N: Revealed type is 'builtins.str' x: int x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(x) # N: Revealed type is 'builtins.int' x: object x = 1 reveal_type(x) # N: Revealed type is 'builtins.int' if int(): x = object() [case testRedefineGlobalUsingForLoop] # flags: --allow-redefinition from typing import Iterable, TypeVar, Union T = TypeVar('T') def f(x: T) -> Iterable[Union[T, str]]: pass x = 0 reveal_type(x) # N: Revealed type is 'builtins.int' for x in f(x): pass reveal_type(x) # N: Revealed type is 'Union[builtins.int*, builtins.str]' [case testNoRedefinitionIfOnlyInitialized] # flags: --allow-redefinition --no-strict-optional x = None # type: int x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "int") x # Reference to variable x = '' y = 0 y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNoRedefinitionIfNoValueAssigned] # flags: --allow-redefinition x: int x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") reveal_type(x) # N: Revealed type is 'builtins.int' x: object [case testNoRedefinitionIfExplicitlyDisallowed] # flags: --disallow-redefinition x = 0 x = 2 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") def f() -> None: y = 0 y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") class C: y = 0 y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") def g() -> None: # _ is a special case _ = 0 _ = '' x, _ = 0, C() [builtins fixtures/tuple.pyi] [case testRedefineAsException] # flags: --allow-redefinition e = 1 reveal_type(e) # N: Revealed type is 'builtins.int' try: pass except Exception as e: reveal_type(e) # N: Revealed type is 'builtins.Exception' e = '' reveal_type(e) # N: Revealed type is 'builtins.str' [builtins fixtures/exception.pyi] [case testRedefineUsingWithStatement] # flags: --allow-redefinition class A: def __enter__(self) -> int: ... def __exit__(self, x, y, z) -> None: ... class B: def __enter__(self) -> str: ... def __exit__(self, x, y, z) -> None: ... with A() as x: reveal_type(x) # N: Revealed type is 'builtins.int' with B() as x: x = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "str") mypy-0.761/test-data/unit/check-reports.test0000644€tŠÔÚ€2›s®0000002335113576752246025243 0ustar jukkaDROPBOX\Domain Users00000000000000[case testReportBasic] # flags: --xml-report out def f(): pass def g() -> None: pass [outfile out/index.xml] [case testLinePrecisionBasic] # flags: --lineprecision-report out def f(): pass def g() -> None: a = 1 [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 5 2 0 1 2 0 [case testLinePrecisionImpreciseType] # flags: --lineprecision-report out def f(x: list) -> None: pass [builtins fixtures/list.pyi] [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 2 0 1 0 1 0 [case testLinePrecisionUnanalyzed] # flags: --lineprecision-report out import sys MYPY = False if not MYPY: a = 1 def f(x: int) -> None: if isinstance(x, str): b = 1 c = 1 [builtins fixtures/isinstance.pyi] [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 10 5 0 0 2 3 [case testLinePrecisionEmptyLines] # flags: --lineprecision-report out def f() -> None: """docstring long """ x = 0 # comment y = 0 # comment (non-empty) [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 10 3 0 0 7 0 [case testLinePrecisionImportFrom] # flags: --lineprecision-report out --ignore-missing-imports from m import f from m import g from bad import foo from bad import ( # treated as a single line foo2, foo3, ) [file m.py] def f(): pass def g() -> None: pass [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 8 2 0 2 4 0 m 2 1 0 1 0 0 [case testLinePrecisionImport] # flags: --lineprecision-report out --ignore-missing-imports import m import bad import m, bad [file m.py] [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 4 1 0 2 1 0 m 0 0 0 0 0 0 [case testLinePrecisionStarImport] # flags: --lineprecision-report out --ignore-missing-imports from m import * from bad import * [file m.py] def f(): pass def g() -> None: pass [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 3 1 0 1 1 0 m 2 1 0 1 0 0 [case testLinePrecisionRelativeImport] # flags: --lineprecision-report out --ignore-missing-imports import a [file a/__init__.py] from .m import f from .bad import g [file a/m.py] def f(): pass [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 2 1 0 0 1 0 a 2 1 0 1 0 0 a.m 1 0 0 1 0 0 [case testLinePrecisionPassStatement] # flags: --lineprecision-report out def f() -> None: pass def g(): pass class C: pass [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 7 4 0 2 1 0 [case testLinePrecisionBreakAndContinueStatement] # flags: --lineprecision-report out import a import b [file a.py] def f() -> int: while f(): break return f() def g(): while g(): break [file b.py] def f() -> int: while f(): continue return f() def g(): while g(): continue [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 3 2 0 0 1 0 a 7 4 0 3 0 0 b 7 4 0 3 0 0 [case testLinePrecisionLiterals] # flags: --lineprecision-report out import str_lit import bytes_lit import int_lit import float_lit import true_lit import false_lit import none_lit import complex_lit import dots_lit [file str_lit.py] def f() -> object: return '' def g(): return '' [file bytes_lit.py] def f() -> object: return b'' def g(): return b'' [file int_lit.py] def f() -> object: return 1 def g(): return 1 [file float_lit.py] def f() -> object: return 1.1 def g(): return 1.1 [file true_lit.py] def f() -> object: return True def g(): return True [file false_lit.py] def f() -> object: return False def g(): return False [file none_lit.py] def f() -> object: return None def g(): return None [file complex_lit.py] def f() -> object: return None def g(): return None [file dots_lit.py] def f() -> object: return ... def g(): return ... [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ---------------------------------------------------------------- __main__ 10 9 0 0 1 0 bytes_lit 4 2 0 2 0 0 complex_lit 4 2 0 2 0 0 dots_lit 4 2 0 2 0 0 false_lit 4 2 0 2 0 0 float_lit 4 2 0 2 0 0 int_lit 4 2 0 2 0 0 none_lit 4 2 0 2 0 0 str_lit 4 2 0 2 0 0 true_lit 4 2 0 2 0 0 [case testLinePrecisionUnicodeLiterals_python2] # flags: --lineprecision-report out def f(): # type: () -> object return u'' def g(): return u'' [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 5 2 0 2 1 0 [case testLinePrecisionIfStatement] # flags: --lineprecision-report out if int(): x = 1 else: # This is treated as empty x = 2 [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 5 3 0 0 2 0 [case testLinePrecisionCallAnyArg] # flags: --lineprecision-report out from m import f def g() -> None: f(1) # Precise f(1, 2) # Any [file m.py] from typing import Any def f(x: int, y: Any = 0) -> None: pass [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 5 3 0 1 1 0 m 3 2 0 1 0 0 [case testLinePrecisionCallImpreciseArg] # flags: --lineprecision-report out from m import f def g() -> None: f(1) # Precise f(1, [2]) # Imprecise [file m.py] from typing import List, Any def f(x: int, y: List[Any] = []) -> None: pass [builtins fixtures/list.pyi] [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 5 3 1 0 1 0 m 3 2 1 0 0 0 [case testLinePrecisionCallAnyArgWithKeywords] # flags: --lineprecision-report out from m import f def g() -> None: f(x=1) # Precise f(x=1, z=1) # Precise f(z=1, x=1) # Precise f(y=1) # Any f(y=1, x=1) # Any [file m.py] from typing import Any def f(x: int = 0, y: Any = 0, z: int = 0) -> None: pass [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 8 5 0 2 1 0 m 3 2 0 1 0 0 [case testLinePrecisionCallAnyMethodArg] # flags: --lineprecision-report out from m import C def g(c: C) -> None: c.f(1) # Precise c.f(1, 2) # Any [file m.py] from typing import Any class C: def f(self, x: int, y: Any = 0) -> None: pass [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 5 3 0 1 1 0 m 4 3 0 1 0 0 [case testLinePrecisionCallAnyConstructorArg] # flags: --lineprecision-report out from m import C def g() -> None: C(1) # Precise C(1, 2) # Any [file m.py] from typing import Any class C: def __init__(self, x: int, y: Any = 0) -> None: pass [outfile out/lineprecision.txt] Name Lines Precise Imprecise Any Empty Unanalyzed ------------------------------------------------------------- __main__ 5 3 0 1 1 0 m 4 3 0 1 0 0 mypy-0.761/test-data/unit/check-selftype.test0000644€tŠÔÚ€2›s®0000007774513576752246025420 0ustar jukkaDROPBOX\Domain Users00000000000000[case testSelfTypeInstance] from typing import TypeVar T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: pass class B(A): pass reveal_type(A().copy) # N: Revealed type is 'def () -> __main__.A*' reveal_type(B().copy) # N: Revealed type is 'def () -> __main__.B*' reveal_type(A().copy()) # N: Revealed type is '__main__.A*' reveal_type(B().copy()) # N: Revealed type is '__main__.B*' [builtins fixtures/bool.pyi] [case testSelfTypeStaticAccess] from typing import TypeVar T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: pass class B(A): pass # Erased instances appear on reveal_type; unrelated to self type def f(a: A) -> None: pass f(A.copy(A())) f(A.copy(B())) f(B.copy(B())) # TODO: make it an error # f(B.copy(A())) def g(a: B) -> None: pass g(A.copy(A())) # E: Argument 1 to "g" has incompatible type "A"; expected "B" g(A.copy(B())) g(B.copy(B())) [builtins fixtures/bool.pyi] [case testSelfTypeReturn] from typing import TypeVar, Type R = TypeVar('R') def _type(self: R) -> Type[R]: pass T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: if B(): return A() # E: Incompatible return value type (got "A", expected "T") elif A(): return B() # E: Incompatible return value type (got "B", expected "T") reveal_type(_type(self)) # N: Revealed type is 'Type[T`-1]' return reveal_type(_type(self)()) # N: Revealed type is 'T`-1' class B(A): pass Q = TypeVar('Q', bound='C', covariant=True) class C: def __init__(self, a: int) -> None: pass def copy(self: Q) -> Q: if self: return reveal_type(_type(self)(1)) # N: Revealed type is 'Q`-1' else: return _type(self)() # E: Too few arguments for "C" [builtins fixtures/bool.pyi] [case testSelfTypeClass] from typing import TypeVar, Type T = TypeVar('T', bound='A') class A: @classmethod def new(cls: Type[T]) -> T: return reveal_type(cls()) # N: Revealed type is 'T`-1' class B(A): pass Q = TypeVar('Q', bound='C', covariant=True) class C: def __init__(self, a: int) -> None: pass @classmethod def new(cls: Type[Q]) -> Q: if cls: return cls(1) else: return cls() # E: Too few arguments for "C" reveal_type(A.new) # N: Revealed type is 'def () -> __main__.A*' reveal_type(B.new) # N: Revealed type is 'def () -> __main__.B*' reveal_type(A.new()) # N: Revealed type is '__main__.A*' reveal_type(B.new()) # N: Revealed type is '__main__.B*' [builtins fixtures/classmethod.pyi] [case testSelfTypeOverride] from typing import TypeVar, cast T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: pass class B(A): pass Q = TypeVar('Q', bound='C', covariant=True) class C(A): def copy(self: Q) -> Q: pass reveal_type(C().copy) # N: Revealed type is 'def () -> __main__.C*' reveal_type(C().copy()) # N: Revealed type is '__main__.C*' reveal_type(cast(A, C()).copy) # N: Revealed type is 'def () -> __main__.A*' reveal_type(cast(A, C()).copy()) # N: Revealed type is '__main__.A*' [builtins fixtures/bool.pyi] [case testSelfTypeSuper] from typing import TypeVar, cast T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: pass Q = TypeVar('Q', bound='B', covariant=True) class B(A): def copy(self: Q) -> Q: reveal_type(self) # N: Revealed type is 'Q`-1' reveal_type(super().copy) # N: Revealed type is 'def () -> Q`-1' return super().copy() [builtins fixtures/bool.pyi] [case testSelfTypeRecursiveBinding] from typing import TypeVar, Callable, Type T = TypeVar('T', bound='A', covariant=True) class A: # TODO: This is potentially unsafe, as we use T in an argument type def copy(self: T, factory: Callable[[T], T]) -> T: return factory(self) @classmethod def new(cls: Type[T], factory: Callable[[T], T]) -> T: reveal_type(cls) # N: Revealed type is 'Type[T`-1]' reveal_type(cls()) # N: Revealed type is 'T`-1' cls(2) # E: Too many arguments for "A" return cls() class B(A): pass reveal_type(A().copy) # N: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*' reveal_type(B().copy) # N: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*' reveal_type(A.new) # N: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*' reveal_type(B.new) # N: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*' [builtins fixtures/classmethod.pyi] [case testSelfTypeBound] from typing import TypeVar, Callable, cast TA = TypeVar('TA', bound='A', covariant=True) class A: def copy(self: TA) -> TA: pass class C(A): def copy(self: C) -> C: pass class D(A): def copy(self: A) -> A: # E: Return type "A" of "copy" incompatible with return type "D" in supertype "A" pass TB = TypeVar('TB', bound='B', covariant=True) class B(A): x = 1 def copy(self: TB) -> TB: reveal_type(self.x) # N: Revealed type is 'builtins.int' return cast(TB, None) [builtins fixtures/bool.pyi] -- # TODO: fail for this -- [case testSelfTypeBare] -- from typing import TypeVar, Type -- -- T = TypeVar('T', bound='E') -- -- class E: -- def copy(self: T, other: T) -> T: pass [case testSelfTypeClone] from typing import TypeVar, Type T = TypeVar('T', bound='C') class C: def copy(self: T) -> T: return self @classmethod def new(cls: Type[T]) -> T: return cls() class D(C): pass reveal_type(D.new) # N: Revealed type is 'def () -> __main__.D*' reveal_type(D().new) # N: Revealed type is 'def () -> __main__.D*' reveal_type(D.new()) # N: Revealed type is '__main__.D*' reveal_type(D().new()) # N: Revealed type is '__main__.D*' Q = TypeVar('Q', bound=C) def clone(arg: Q) -> Q: reveal_type(arg.copy) # N: Revealed type is 'def () -> Q`-1' reveal_type(arg.copy()) # N: Revealed type is 'Q`-1' reveal_type(arg.new) # N: Revealed type is 'def () -> Q`-1' reveal_type(arg.new()) # N: Revealed type is 'Q`-1' return arg.copy() def make(cls: Type[Q]) -> Q: reveal_type(cls.new) # N: Revealed type is 'def () -> Q`-1' reveal_type(cls().new) # N: Revealed type is 'def () -> Q`-1' reveal_type(cls().new()) # N: Revealed type is 'Q`-1' return cls.new() [builtins fixtures/classmethod.pyi] [case testSelfTypeGeneric] from typing import TypeVar T = TypeVar('T', int, str) class A: pass class B(A): def __init__(self, arg: T) -> None: super(B, self).__init__() [case testSelfTypeNonsensical] from typing import TypeVar, Type T = TypeVar('T', bound=str) class A: def foo(self: T) -> T: # E: The erased type of self "builtins.str" is not a supertype of its class "__main__.A" return self @classmethod def cfoo(cls: Type[T]) -> T: # E: The erased type of self "Type[builtins.str]" is not a supertype of its class "Type[__main__.A]" return cls() Q = TypeVar('Q', bound='B') class B: def foo(self: Q) -> Q: return self @classmethod def cfoo(cls: Type[Q]) -> Q: return cls() class C: def foo(self: C) -> C: return self @classmethod def cfoo(cls: Type[C]) -> C: return cls() class D: def foo(self: Q) -> Q: # E: The erased type of self "__main__.B" is not a supertype of its class "__main__.D" return self @staticmethod def bar(self: str) -> str: return self @classmethod def cfoo(cls: Type[Q]) -> Q: # E: The erased type of self "Type[__main__.B]" is not a supertype of its class "Type[__main__.D]" return cls() [builtins fixtures/classmethod.pyi] [case testSelfTypeLambdaDefault] from typing import Callable class C: @classmethod def foo(cls, arg: Callable[[int], str] = lambda a: '' ) -> None: pass def bar(self, arg: Callable[[int], str] = lambda a: '' ) -> None: pass [builtins fixtures/classmethod.pyi] [case testSelfTypeNew] from typing import TypeVar, Type T = TypeVar('T', bound=A) class A: def __new__(cls: Type[T]) -> T: return cls() def __init_subclass__(cls: Type[T]) -> None: pass class B: def __new__(cls: Type[T]) -> T: # E: The erased type of self "Type[__main__.A]" is not a supertype of its class "Type[__main__.B]" return cls() def __init_subclass__(cls: Type[T]) -> None: # E: The erased type of self "Type[__main__.A]" is not a supertype of its class "Type[__main__.B]" pass class C: def __new__(cls: Type[C]) -> C: return cls() def __init_subclass__(cls: Type[C]) -> None: pass class D: def __new__(cls: D) -> D: # E: The erased type of self "__main__.D" is not a supertype of its class "Type[__main__.D]" return cls def __init_subclass__(cls: D) -> None: # E: The erased type of self "__main__.D" is not a supertype of its class "Type[__main__.D]" pass class E: def __new__(cls) -> E: reveal_type(cls) # N: Revealed type is 'Type[__main__.E]' return cls() def __init_subclass__(cls) -> None: reveal_type(cls) # N: Revealed type is 'Type[__main__.E]' [case testSelfTypePropertyUnion] from typing import Union class A: @property def f(self: A) -> int: pass class B: @property def f(self: B) -> int: pass x: Union[A, B] reveal_type(x.f) # N: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttribute] from typing import Callable, TypeVar class K: pass T = TypeVar('T', bound=K) class A(K): @property def g(self: K) -> int: return 0 @property def gt(self: T) -> T: return self f: Callable[[object], int] ft: Callable[[T], T] class B(A): pass reveal_type(A().g) # N: Revealed type is 'builtins.int' reveal_type(A().gt) # N: Revealed type is '__main__.A*' reveal_type(A().f()) # N: Revealed type is 'builtins.int' reveal_type(A().ft()) # N: Revealed type is '__main__.A*' reveal_type(B().g) # N: Revealed type is 'builtins.int' reveal_type(B().gt) # N: Revealed type is '__main__.B*' reveal_type(B().f()) # N: Revealed type is 'builtins.int' reveal_type(B().ft()) # N: Revealed type is '__main__.B*' [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeTuple] from typing import Callable, TypeVar, Tuple T = TypeVar('T') class A(Tuple[int, int]): @property def g(self: object) -> int: return 0 @property def gt(self: T) -> T: return self f: Callable[[object], int] ft: Callable[[T], T] class B(A): pass reveal_type(A().g) # N: Revealed type is 'builtins.int' reveal_type(A().gt) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.A]' reveal_type(A().f()) # N: Revealed type is 'builtins.int' reveal_type(A().ft()) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.A]' reveal_type(B().g) # N: Revealed type is 'builtins.int' reveal_type(B().gt) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.B]' reveal_type(B().f()) # N: Revealed type is 'builtins.int' reveal_type(B().ft()) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.B]' [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeMeta] from typing import Callable, TypeVar, Type T = TypeVar('T') class A(type): @property def g(cls: object) -> int: return 0 @property def gt(cls: T) -> T: return cls f: Callable[[object], int] ft: Callable[[T], T] class B(A): pass class X(metaclass=B): def __init__(self, x: int) -> None: pass class Y(X): pass X1: Type[X] reveal_type(X.g) # N: Revealed type is 'builtins.int' reveal_type(X.gt) # N: Revealed type is 'def (x: builtins.int) -> __main__.X' reveal_type(X.f()) # N: Revealed type is 'builtins.int' reveal_type(X.ft()) # N: Revealed type is 'def (x: builtins.int) -> __main__.X' reveal_type(Y.g) # N: Revealed type is 'builtins.int' reveal_type(Y.gt) # N: Revealed type is 'def (x: builtins.int) -> __main__.Y' reveal_type(Y.f()) # N: Revealed type is 'builtins.int' reveal_type(Y.ft()) # N: Revealed type is 'def (x: builtins.int) -> __main__.Y' reveal_type(X1.g) # N: Revealed type is 'builtins.int' reveal_type(X1.gt) # N: Revealed type is 'Type[__main__.X]' reveal_type(X1.f()) # N: Revealed type is 'builtins.int' reveal_type(X1.ft()) # N: Revealed type is 'Type[__main__.X]' [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeGeneric] from typing import Callable, TypeVar, Generic Q = TypeVar('Q', covariant=True) class K(Generic[Q]): q: Q T = TypeVar('T') class A(K[Q]): @property def g(self: K[object]) -> int: return 0 @property def gt(self: K[T]) -> T: return self.q f: Callable[[object], int] ft: Callable[[T], T] class B(A[Q]): pass a: A[int] b: B[str] reveal_type(a.g) # N: Revealed type is 'builtins.int' reveal_type(a.gt) # N: Revealed type is 'builtins.int' reveal_type(a.f()) # N: Revealed type is 'builtins.int' reveal_type(a.ft()) # N: Revealed type is '__main__.A[builtins.int]' reveal_type(b.g) # N: Revealed type is 'builtins.int' reveal_type(b.gt) # N: Revealed type is 'builtins.str' reveal_type(b.f()) # N: Revealed type is 'builtins.int' reveal_type(b.ft()) # N: Revealed type is '__main__.B[builtins.str]' [builtins fixtures/property.pyi] [case testSelfTypeRestrictedMethod] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def from_item(self: C[str]) -> None: ... i: C[int] s: C[str] i.from_item() # E: Invalid self argument "C[int]" to attribute function "from_item" with type "Callable[[C[str]], None]" s.from_item() [case testSelfTypeRestrictedClassMethod] from typing import TypeVar, Generic, Type T = TypeVar('T') class C(Generic[T]): @classmethod def from_item(cls: Type[C[str]]) -> None: ... class DI(C[int]): ... class DS(C[str]): ... DI().from_item() # E: Invalid self argument "Type[DI]" to class attribute function "from_item" with type "Callable[[Type[C[str]]], None]" DS().from_item() DI.from_item() # E: Invalid self argument "Type[DI]" to attribute function "from_item" with type "Callable[[Type[C[str]]], None]" DS.from_item() [builtins fixtures/classmethod.pyi] [case testSelfTypeRestrictedMethodOverload] from typing import TypeVar, Generic, overload, Tuple T = TypeVar('T') class C(Generic[T]): @overload def from_item(self: C[str], item: str) -> None: ... @overload def from_item(self: C[int], item: Tuple[int]) -> None: ... def from_item(self, item): ... ci: C[int] cs: C[str] reveal_type(ci.from_item) # N: Revealed type is 'def (item: Tuple[builtins.int])' reveal_type(cs.from_item) # N: Revealed type is 'def (item: builtins.str)' [case testSelfTypeRestrictedMethodOverloadFallback] from typing import TypeVar, Generic, overload, Callable T = TypeVar('T') class C(Generic[T]): @overload def from_item(self: C[str]) -> str: ... @overload def from_item(self, converter: Callable[[T], str]) -> str: ... def from_item(self, converter): ... ci: C[int] cs: C[str] reveal_type(cs.from_item()) # N: Revealed type is 'builtins.str' ci.from_item() # E: Too few arguments for "from_item" of "C" def conv(x: int) -> str: ... def bad(x: str) -> str: ... reveal_type(ci.from_item(conv)) # N: Revealed type is 'builtins.str' ci.from_item(bad) # E: Argument 1 to "from_item" of "C" has incompatible type "Callable[[str], str]"; expected "Callable[[int], str]" [case testSelfTypeRestrictedMethodOverloadInit] from typing import TypeVar from lib import P, C reveal_type(P) # N: Revealed type is 'Overload(def [T] (use_str: Literal[True]) -> lib.P[builtins.str], def [T] (use_str: Literal[False]) -> lib.P[builtins.int])' reveal_type(P(use_str=True)) # N: Revealed type is 'lib.P[builtins.str]' reveal_type(P(use_str=False)) # N: Revealed type is 'lib.P[builtins.int]' reveal_type(C) # N: Revealed type is 'Overload(def [T] (item: T`1, use_tuple: Literal[False]) -> lib.C[T`1], def [T] (item: T`1, use_tuple: Literal[True]) -> lib.C[builtins.tuple[T`1]])' reveal_type(C(0, use_tuple=False)) # N: Revealed type is 'lib.C[builtins.int*]' reveal_type(C(0, use_tuple=True)) # N: Revealed type is 'lib.C[builtins.tuple[builtins.int*]]' T = TypeVar('T') class SubP(P[T]): pass SubP('no') # E: No overload variant of "SubP" matches argument type "str" \ # N: Possible overload variants: \ # N: def [T] __init__(self, use_str: Literal[True]) -> SubP[T] \ # N: def [T] __init__(self, use_str: Literal[False]) -> SubP[T] # This is a bit unfortunate: we don't have a way to map the overloaded __init__ to subtype. x = SubP(use_str=True) # E: Need type annotation for 'x' reveal_type(x) # N: Revealed type is '__main__.SubP[Any]' y: SubP[str] = SubP(use_str=True) [file lib.pyi] from typing import TypeVar, Generic, overload, Tuple from typing_extensions import Literal T = TypeVar('T') class P(Generic[T]): @overload def __init__(self: P[str], use_str: Literal[True]) -> None: ... @overload def __init__(self: P[int], use_str: Literal[False]) -> None: ... class C(Generic[T]): @overload def __init__(self: C[T], item: T, use_tuple: Literal[False]) -> None: ... @overload def __init__(self: C[Tuple[T, ...]], item: T, use_tuple: Literal[True]) -> None: ... [builtins fixtures/bool.pyi] [case testSelfTypeRestrictedMethodOverloadInitFallBacks] from lib import PFallBack, PFallBackAny t: bool xx = PFallBack(t) # E: Need type annotation for 'xx' yy = PFallBackAny(t) # OK [file lib.pyi] from typing import TypeVar, Generic, overload, Tuple, Any from typing_extensions import Literal class PFallBack(Generic[T]): @overload def __init__(self: PFallBack[str], use_str: Literal[True]) -> None: ... @overload def __init__(self: PFallBack[int], use_str: Literal[False]) -> None: ... @overload def __init__(self, use_str: bool) -> None: ... class PFallBackAny(Generic[T]): @overload def __init__(self: PFallBackAny[str], use_str: Literal[True]) -> None: ... @overload def __init__(self: PFallBackAny[int], use_str: Literal[False]) -> None: ... @overload def __init__(self: PFallBackAny[Any], use_str: bool) -> None: ... [builtins fixtures/bool.pyi] [case testSelfTypeRestrictedMethodOverloadInitBadTypeNoCrash] from lib import P P(0) [file lib.pyi] from typing import overload class P: @overload def __init__(self: Bad, x: int) -> None: ... # E: Name 'Bad' is not defined @overload def __init__(self) -> None: ... [case testSelfTypeNarrowBinding] from typing import TypeVar, List, Generic T = TypeVar('T') S = TypeVar('S') class Base(Generic[T]): def get_item(self: Base[List[S]]) -> S: ... class Sub(Base[List[int]]): ... class BadSub(Base[int]): ... reveal_type(Sub().get_item()) # N: Revealed type is 'builtins.int' BadSub().get_item() # E: Invalid self argument "BadSub" to attribute function "get_item" with type "Callable[[Base[List[S]]], S]" [builtins fixtures/list.pyi] [case testMixinAllowedWithProtocol] from typing import TypeVar from typing_extensions import Protocol class Resource(Protocol): def close(self) -> int: ... class AtomicClose: def atomic_close(self: Resource) -> int: return self.close() T = TypeVar('T', bound=Resource) class Copyable: def copy(self: T) -> T: ... class File(AtomicClose, Copyable): def close(self) -> int: ... class Bad(AtomicClose, Copyable): ... f: File b: Bad f.atomic_close() # OK b.atomic_close() # E: Invalid self argument "Bad" to attribute function "atomic_close" with type "Callable[[Resource], int]" reveal_type(f.copy()) # N: Revealed type is '__main__.File*' b.copy() # E: Invalid self argument "Bad" to attribute function "copy" with type "Callable[[T], T]" [case testBadClassLevelDecoratorHack] from typing_extensions import Protocol from typing import TypeVar, Any class FuncLike(Protocol): __call__: Any F = TypeVar('F', bound=FuncLike) class Test: def _deco(func: F) -> F: ... @_deco def meth(self, x: str) -> int: ... reveal_type(Test().meth) # N: Revealed type is 'def (x: builtins.str) -> builtins.int' Test()._deco # E: Invalid self argument "Test" to attribute function "_deco" with type "Callable[[F], F]" [case testSelfTypeTrickyExample] from typing import * In = TypeVar('In') Out = TypeVar('Out') Mid = TypeVar('Mid') NewOut = TypeVar('NewOut') class Lnk(Generic[In, Out]): def test(self: Lnk[In, Mid], other: Lnk[Mid, NewOut]) -> Lnk[In, NewOut]: ... class X: pass class Y: pass class Z: pass a: Lnk[X, Y] = Lnk() b: Lnk[Y, Z] = Lnk() a.test(b) b.test(a) # E: Argument 1 to "test" of "Lnk" has incompatible type "Lnk[X, Y]"; expected "Lnk[Z, Y]" [case testSelfTypeReallyTrickyExample] from typing import * In = TypeVar('In') Out = TypeVar('Out') Other = TypeVar('Other') _1 = TypeVar('_1') _2 = TypeVar('_2') __1 = TypeVar('__1') __2 = TypeVar('__2') class Lnk(Generic[In, Out]): @overload def __rshift__(self, other: Lnk[Out, Other]) -> Lnk[In,Other]: ... @overload def __rshift__(self: Lnk[In, Tuple[_1, _2]], other: Tuple[Lnk[_1, __1], Lnk[_2, __2]]) -> Lnk[In, Tuple[__1, __2]]: ... def __rshift__(self: Any, other: Any) -> Any: ... a: Lnk[str, Tuple[str, int]] = Lnk() b: Lnk[str, int] = Lnk() c: Lnk[int, float] = Lnk() d: Lnk[str, float] = b >> c # OK e: Lnk[str, Tuple[int, float]] = a >> (b, c) # OK f: Lnk[str, Tuple[float, int]] = a >> (c, b) # E: Unsupported operand types for >> ("Lnk[str, Tuple[str, int]]" and "Tuple[Lnk[int, float], Lnk[str, int]]") [case testSelfTypeMutuallyExclusiveRestrictions] from typing import Generic, TypeVar T = TypeVar('T') class Foo(Generic[T]): def f1(self: Foo[str]) -> None: self.f2() # E: Invalid self argument "Foo[str]" to attribute function "f2" with type "Callable[[Foo[int]], None]" def f2(self: Foo[int]) -> None: self.f1() # E: Invalid self argument "Foo[int]" to attribute function "f1" with type "Callable[[Foo[str]], None]" [case testSelfTypeStructureMetaclassMatch] from typing import TypeVar, Type, Generic, cast Cls = TypeVar('Cls') T = TypeVar('T') class Manager(Generic[Cls]): def create(self: Manager[Type[T]]) -> T: ... class ModelMeta(type): @property def objects(cls: T) -> Manager[T]: ... class Model(metaclass=ModelMeta): pass class Dog(Model): ... class Cat(Model): ... c: Cat = Dog.objects.create() # E: Incompatible types in assignment (expression has type "Dog", variable has type "Cat") d: Dog = Dog.objects.create() [builtins fixtures/property.pyi] [case testSelfTypeProtocolMetaclassMatch] from typing import Type, TypeVar, Protocol class HasX(Protocol): x: int T = TypeVar('T', bound=HasX) class Meta(type): def do_x(cls: Type[T]) -> T: cls.x return cls() class Good(metaclass=Meta): x: int class Bad(metaclass=Meta): pass Good.do_x() Bad.do_x() # E: Invalid self argument "Type[Bad]" to attribute function "do_x" with type "Callable[[Type[T]], T]" [case testSelfTypeNotSelfType] # Friendlier error messages for common mistakes. See #2950 class A: def f(x: int) -> None: ... # def g(self: None) -> None: ... see in check-python2.test [out] main:3: error: Self argument missing for a non-static method (or an invalid type for self) [case testUnionPropertyField] from typing import Union class A: x: int class B: @property def x(self) -> int: return 1 class C: @property def x(self) -> int: return 1 ab: Union[A, B, C] reveal_type(ab.x) # N: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [case testSelfTypeNoTypeVars] from typing import Generic, List, Optional, TypeVar, Any Q = TypeVar("Q") T = TypeVar("T", bound=Super[Any]) class Super(Generic[Q]): @classmethod def meth(cls, arg: List[T]) -> List[T]: pass class Sub(Super[int]): ... def test(x: List[Sub]) -> None: reveal_type(Sub.meth(x)) # N: Revealed type is 'builtins.list[__main__.Sub*]' [builtins fixtures/isinstancelist.pyi] [case testSelfTypeNoTypeVarsRestrict] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class C(Generic[T]): def limited(self: C[str], arg: S) -> S: ... reveal_type(C[str]().limited(0)) # N: Revealed type is 'builtins.int*' [case testSelfTypeMultipleTypeVars] from typing import Generic, TypeVar, Tuple T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') class C(Generic[T]): def magic(self: C[Tuple[S, U]]) -> Tuple[T, S, U]: ... reveal_type(C[Tuple[int, str]]().magic()) # N: Revealed type is 'Tuple[Tuple[builtins.int, builtins.str], builtins.int, builtins.str]' [case testSelfTypeOnUnion] from typing import TypeVar, Union T = TypeVar('T') class A: same: int class C: def same(self: T) -> T: ... x: Union[A, C] reveal_type(x.same) # N: Revealed type is 'Union[builtins.int, def () -> __main__.C*]' [case testSelfTypeOnUnionClassMethod] from typing import TypeVar, Union, Type T = TypeVar('T') class A: same: int class C: @classmethod def same(cls: Type[T]) -> T: ... x: Union[A, C] reveal_type(x.same) # N: Revealed type is 'Union[builtins.int, def () -> __main__.C*]' [builtins fixtures/classmethod.pyi] [case SelfTypeOverloadedClassMethod] from lib import Base from typing import overload, Tuple class Sub(Base): @overload @classmethod def make(cls) -> Sub: ... @overload @classmethod def make(cls, num: int) -> Tuple[Sub, ...]: ... @classmethod def make(cls, num=1): ... class Other(Base): ... class Double(Sub): ... reveal_type(Other.make()) # N: Revealed type is '__main__.Other*' reveal_type(Other.make(3)) # N: Revealed type is 'builtins.tuple[__main__.Other*]' reveal_type(Double.make()) # N: Revealed type is '__main__.Sub' reveal_type(Double.make(3)) # N: Revealed type is 'builtins.tuple[__main__.Sub]' [file lib.pyi] from typing import overload, TypeVar, Type, Tuple T = TypeVar('T', bound=Base) class Base: @overload @classmethod def make(cls: Type[T]) -> T: ... @overload @classmethod def make(cls: Type[T], num: int) -> Tuple[T, ...]: ... [builtins fixtures/classmethod.pyi] [case testSelfTypeClassMethodOnUnion] from typing import Type, Union, TypeVar T = TypeVar('T') class A: @classmethod def meth(cls: Type[T]) -> T: ... class B(A): ... class C(A): ... t: Type[Union[B, C]] reveal_type(t.meth) # N: Revealed type is 'Union[def () -> __main__.B*, def () -> __main__.C*]' x = t.meth() reveal_type(x) # N: Revealed type is 'Union[__main__.B*, __main__.C*]' [builtins fixtures/classmethod.pyi] [case testSelfTypeClassMethodOnUnionGeneric] from typing import Type, Union, TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T]): @classmethod def meth(cls: Type[S]) -> S: ... t: Type[Union[A[int], A[str]]] x = t.meth() reveal_type(x) # N: Revealed type is 'Union[__main__.A[builtins.int], __main__.A[builtins.str]]' [builtins fixtures/classmethod.pyi] [case testSelfTypeClassMethodOnUnionList] from typing import Type, Union, TypeVar, List T = TypeVar('T') class A: @classmethod def meth(cls: Type[T]) -> List[T]: ... class B(A): ... class C(A): ... t: Type[Union[B, C]] x = t.meth()[0] reveal_type(x) # N: Revealed type is 'Union[__main__.B*, __main__.C*]' [builtins fixtures/isinstancelist.pyi] [case testSelfTypeClassMethodOverloadedOnInstance] from typing import Optional, Type, TypeVar, overload, Union Id = int A = TypeVar("A", bound=AClass) class AClass: @overload @classmethod def delete(cls: Type[A], id: Id, id2: Id) -> Optional[int]: ... @overload @classmethod def delete(cls: Type[A], id: A, id2: None = None) -> Optional[int]: ... @classmethod def delete(cls: Type[A], id: Union[A, Id], id2: Optional[Id] = None) -> Optional[int]: ... def foo(x: Type[AClass]) -> None: reveal_type(x.delete) # N: Revealed type is 'Overload(def (id: builtins.int, id2: builtins.int) -> builtins.int, def (id: __main__.AClass*, id2: None =) -> builtins.int)' y = x() reveal_type(y.delete) # N: Revealed type is 'Overload(def (id: builtins.int, id2: builtins.int) -> builtins.int, def (id: __main__.AClass*, id2: None =) -> builtins.int)' y.delete(10, 20) y.delete(y) def bar(x: AClass) -> None: reveal_type(x.delete) # N: Revealed type is 'Overload(def (id: builtins.int, id2: builtins.int) -> builtins.int, def (id: __main__.AClass*, id2: None =) -> builtins.int)' x.delete(10, 20) [builtins fixtures/classmethod.pyi] [case testSelfTypeBadTypeIgnoredInConstructor] class Base: ... class Sub(Base): def __init__(self: Base) -> None: ... reveal_type(Sub()) # N: Revealed type is '__main__.Sub' [case testSelfTypeBadTypeIgnoredInConstructorGeneric] from typing import Generic, TypeVar T = TypeVar('T') class Base(Generic[T]): ... class Sub(Base[T]): def __init__(self: Base[T], item: T) -> None: ... reveal_type(Sub(42)) # N: Revealed type is '__main__.Sub[builtins.int*]' [case testSelfTypeBadTypeIgnoredInConstructorOverload] from typing import overload class Base: ... class Sub(Base): @overload def __init__(self: Sub, item: int) -> None: ... @overload def __init__(self: Base) -> None: ... def __init__(self, item=None): ... reveal_type(Sub) # N: Revealed type is 'Overload(def (item: builtins.int) -> __main__.Sub, def () -> __main__.Sub)' [case testSelfTypeBadTypeIgnoredInConstructorAbstract] from abc import abstractmethod from typing import Protocol class Blah(Protocol): @abstractmethod def something(self) -> None: ... class Concrete(Blah): def __init__(self: Blah) -> None: ... def something(self) -> None: ... Concrete() # OK [case testSelfTypeGenericClassNoClashInstanceMethod] from typing import TypeVar, Generic M = TypeVar("M") T = TypeVar("T") S = TypeVar("S") class Descriptor(Generic[M]): ... class BaseWrapper(Generic[M]): def create_wrapper(self: T, metric_descriptor: Descriptor[M]) -> T: ... class SubWrapper(BaseWrapper[M]): ... def build_wrapper(descriptor: Descriptor[M]) -> BaseWrapper[M]: wrapper: BaseWrapper[M] return wrapper.create_wrapper(descriptor) def build_sub_wrapper(descriptor: Descriptor[S]) -> SubWrapper[S]: wrapper: SubWrapper[S] x = wrapper.create_wrapper(descriptor) reveal_type(x) # N: Revealed type is '__main__.SubWrapper[S`-1]' return x [case testSelfTypeGenericClassNoClashClassMethod] from typing import TypeVar, Generic, Type M = TypeVar("M") T = TypeVar("T") S = TypeVar("S") class Descriptor(Generic[M]): ... class BaseWrapper(Generic[M]): @classmethod def create_wrapper(cls: Type[T], metric_descriptor: Descriptor[M]) -> T: ... class SubWrapper(BaseWrapper[M]): ... def build_wrapper(descriptor: Descriptor[M]) -> BaseWrapper[M]: wrapper_cls: Type[BaseWrapper[M]] return wrapper_cls.create_wrapper(descriptor) def build_sub_wrapper(descriptor: Descriptor[S]) -> SubWrapper[S]: wrapper_cls: Type[SubWrapper[S]] x = wrapper_cls.create_wrapper(descriptor) reveal_type(x) # N: Revealed type is '__main__.SubWrapper[S`-1]' return x [builtins fixtures/classmethod.pyi] [case testSelfTypeGenericClassNoClashClassMethodClassObject] from typing import TypeVar, Generic, Type M = TypeVar("M") T = TypeVar("T") class Descriptor(Generic[M]): ... class BaseWrapper(Generic[M]): @classmethod def create_wrapper(cls: Type[T], metric_descriptor: Descriptor[M]) -> T: ... class SubWrapper(BaseWrapper[M]): ... def build_wrapper(descriptor: Descriptor[M]) -> BaseWrapper[M]: return BaseWrapper.create_wrapper(descriptor) def build_sub_wrapper(descriptor: Descriptor[M]) -> SubWrapper[M]: x = SubWrapper.create_wrapper(descriptor) reveal_type(x) # N: Revealed type is '__main__.SubWrapper[M`-1]' return x def build_wrapper_non_gen(descriptor: Descriptor[int]) -> BaseWrapper[str]: return BaseWrapper.create_wrapper(descriptor) # E: Argument 1 to "create_wrapper" of "BaseWrapper" has incompatible type "Descriptor[int]"; expected "Descriptor[str]" def build_sub_wrapper_non_gen(descriptor: Descriptor[int]) -> SubWrapper[str]: return SubWrapper.create_wrapper(descriptor) # E: Argument 1 to "create_wrapper" of "BaseWrapper" has incompatible type "Descriptor[int]"; expected "Descriptor[str]" [builtins fixtures/classmethod.pyi] mypy-0.761/test-data/unit/check-semanal-error.test0000644€tŠÔÚ€2›s®0000000577213576752246026323 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checking after an error during semantic analysis -- ----------------------------------------------------- -- -- This tests both the semantic analyzer (that it does not generate -- corrupt state on error) and the type checker (that it can deal with -- whatever state the semantic analyzer sets up). -- TODO: -- - invalid type in annotation -- - invalid function comment type annotation -- - invalid multiple assignment type annotation -- - using a type variable as a value -- - using special names defined in typing as values [case testMissingModuleImport1] import m # E m.foo() m.x = m.y 1() # E [out] main:1: error: Cannot find implementation or library stub for module named 'm' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:4: error: "int" not callable [case testMissingModuleImport2] from m import x # E x.foo() x.a = x.b 1() # E [out] main:1: error: Cannot find implementation or library stub for module named 'm' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:4: error: "int" not callable [case testMissingModuleImport3] from m import * # E x # E 1() # E [out] main:1: error: Cannot find implementation or library stub for module named 'm' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Name 'x' is not defined main:3: error: "int" not callable [case testInvalidBaseClass1] class A(X): # E: Name 'X' is not defined x = 1 A().foo(1) A().x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testInvalidBaseClass2] X = 1 class A(X): # E x = 1 A().foo(1) A().x = '' # E [out] main:3: error: Variable "__main__.X" is not valid as a type main:3: error: Invalid base class "X" main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testInvalidNumberOfTypeArgs] from typing import TypeVar T = TypeVar('T') class C: # Forgot to add type params here def __init__(self, t: T) -> None: pass c = C(t=3) # type: C[int] # E: "C" expects no type arguments, but 1 given [case testBreakOutsideLoop] break # E: 'break' outside loop [case testContinueOutsideLoop] continue # E: 'continue' outside loop [case testYieldOutsideFunction] yield # E: 'yield' outside function [case testYieldFromOutsideFunction] x = 1 yield from x # E: 'yield from' outside function [case testImportFuncDup] import m def m() -> None: ... # E: Name 'm' already defined (by an import) [file m.py] [out] [case testIgnoredImportDup] import m # type: ignore from m import f # type: ignore def m() -> None: ... # E: Name 'm' already defined (possibly by an import) def f() -> None: ... # E: Name 'f' already defined (possibly by an import) [out] [case testRuntimeProtoTwoBases] from typing_extensions import Protocol, runtime_checkable from typing import TypeVar, Generic T = TypeVar('T') @runtime_checkable class P(Protocol, Generic[T]): attr: T class C: attr: int x: P[int] = C() [out] mypy-0.761/test-data/unit/check-serialize.test0000644€tŠÔÚ€2›s®0000007151513576752246025541 0ustar jukkaDROPBOX\Domain Users00000000000000-- Serialization test cases (incremental type checking) -- -- These test that modules deserialized from cache files behave -- identically to modules that have undergone full type checking. -- -- These tests are written using the same syntax as test cases in -- check-incremental.test. Look at the comment at that the top of -- that file for the details of how these tests work. -- -- There is probably some overlap with check-incremental.test, but it -- is perhaps not worth trying to simplify these, since a few redundant -- test cases are cheap but accidentally losing test coverage is bad. -- -- These are intended to be straightforward, and do not test import -- cycles and other tricky business. Add test cases for complex things -- to check-incremental.test. -- -- Basic things -- [case testSerializeModuleAttribute] import a [file a.py] import b [file a.py.2] import b y = b.x # type: int [file b.py] x = '' -- We only do the following two sections once here to avoid repetition. -- Most other test cases are similar. [rechecked a] [stale] [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") -- -- Functions -- [case testSerializeAnnotatedFunction] import a [file a.py] import b [file a.py.2] import b b.f(1) x = b.f('') # type: str [file b.py] def f(x: str) -> int: pass [out2] tmp/a.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" tmp/a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testSerializeUnannotatedFunction] import a [file a.py] import b [file a.py.2] import b b.f(x=1) b.f() [file b.py] def f(x): pass [out2] tmp/a.py:3: error: Too few arguments for "f" [case testSerializeGenericFunction] import a [file a.py] import b [file a.py.2] from b import f reveal_type(f(1)) reveal_type(f(x='')) [file b.py] from typing import TypeVar T = TypeVar('T') def f(x: T) -> T: return x [out2] tmp/a.py:2: note: Revealed type is 'builtins.int*' tmp/a.py:3: note: Revealed type is 'builtins.str*' [case testSerializeFunctionReturningGenericFunction] import a [file a.py] import b [file a.py.2] import b reveal_type(b.f) reveal_type(b.f()('')) [file b.py] from typing import TypeVar, Callable T = TypeVar('T') def f() -> Callable[[T], T]: pass [out2] tmp/a.py:2: note: Revealed type is 'def () -> def [T] (T`-1) -> T`-1' tmp/a.py:3: note: Revealed type is 'builtins.str*' [case testSerializeArgumentKinds] import a [file a.py] import b [file a.py.2] from b import f f(1, z=1) f(1, '', z=1) f(1, y='', z=1) f(1, '', 2, 3, z=1) f(1, '', zz=1, z=1) f(1, '', foo='', z=1) [file b.py] def f(x: int, y: str = '', *args: int, z: int, zz: int = 1, **kw: str) -> None: pass [builtins fixtures/dict.pyi] [out2] [case testSerializeCallableWithBoundTypeArguments] import a [file a.py] import b [file a.py.2] import b x = b.f [file b.py] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def f(self, x: T) -> None: pass c: C[int] f = c.f [out] [out2] [case testSerializePositionalOnlyArgument] import a [file a.py] import b [file a.py.2] import b b.f(1) b.f('') b.f(__x=1) [file b.py] def f(__x: int) -> None: pass [out2] tmp/a.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" tmp/a.py:4: error: Unexpected keyword argument "__x" for "f" [case testSerializeArgumentKindsErrors] import a [file a.py] import b [file a.py.2] from b import f f('', z=1) # Line 2 f(1, 2, z=1) # 3 f(1, y=1, z=1) # 4 f(1, '', 2, '', z=1) # 5 f(1, '', z='') # 6 f(1, '', zz='', z=1) # 7 f(1, '', z=1, foo=1) # 8 [file b.py] def f(x: int, y: str = '', *args: int, z: int, zz: int = 1, **kw: str) -> None: pass [builtins fixtures/dict.pyi] [out2] tmp/a.py:2: error: Argument 1 to "f" has incompatible type "str"; expected "int" tmp/a.py:3: error: Argument 2 to "f" has incompatible type "int"; expected "str" tmp/a.py:4: error: Argument "y" to "f" has incompatible type "int"; expected "str" tmp/a.py:5: error: Argument 4 to "f" has incompatible type "str"; expected "int" tmp/a.py:6: error: Argument "z" to "f" has incompatible type "str"; expected "int" tmp/a.py:7: error: Argument "zz" to "f" has incompatible type "str"; expected "int" tmp/a.py:8: error: Argument "foo" to "f" has incompatible type "int"; expected "str" [case testSerializeOverloadedFunction] import a [file a.py] import b [file a.py.2] import b reveal_type(b.f(1)) reveal_type(b.f('')) [file b.pyi] from typing import overload @overload def f(x: int) -> int: pass @overload def f(x: str) -> str: pass [out2] tmp/a.py:2: note: Revealed type is 'builtins.int' tmp/a.py:3: note: Revealed type is 'builtins.str' [case testSerializeDecoratedFunction] import a [file a.py] import b [file a.py.2] import b reveal_type(b.f('')) b.f(x=1) [file b.py] from typing import Callable def dec(f: Callable[[int], int]) -> Callable[[str], str]: pass @dec def f(x: int) -> int: pass [out2] tmp/a.py:2: note: Revealed type is 'builtins.str' tmp/a.py:3: error: Unexpected keyword argument "x" for "f" -- -- Classes -- [case testSerializeClassAttribute] import a [file a.py] import b [file a.py.2] import b b.A().x = '' [file b.py] class A: x = 1 [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testSerializeMethod] import a [file a.py] import b [file a.py.2] import b b.A().f('') [file b.py] class A: def f(self, x: int) -> None: pass [out2] tmp/a.py:2: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testSerialize__init__] import a [file a.py] import b [file a.py.2] from b import A A('') class B(A): def f(self) -> None: super().__init__('') [file b.py] class A: def __init__(self, x: int) -> None: pass [out2] tmp/a.py:2: error: Argument 1 to "A" has incompatible type "str"; expected "int" tmp/a.py:5: error: Argument 1 to "__init__" of "A" has incompatible type "str"; expected "int" [case testSerializeOverloaded__init__] import a [file a.py] import b [file a.py.2] from b import A A(object()) # E A(x='') A(0) class B(A): def f(self) -> None: super().__init__(object()) # E super().__init__('') super().__init__(0) [file b.pyi] from typing import overload class A: @overload def __init__(self, x: int) -> None: pass @overload def __init__(self, x: str) -> None: pass [out2] tmp/a.py:2: error: No overload variant of "A" matches argument type "object" tmp/a.py:2: note: Possible overload variants: tmp/a.py:2: note: def A(self, x: int) -> A tmp/a.py:2: note: def A(self, x: str) -> A tmp/a.py:7: error: No overload variant of "__init__" of "A" matches argument type "object" tmp/a.py:7: note: Possible overload variants: tmp/a.py:7: note: def __init__(self, x: int) -> None tmp/a.py:7: note: def __init__(self, x: str) -> None [case testSerialize__new__] import a [file a.py] import b [file a.py.2] from b import A A('') [file b.py] class A: def __new__(cls, x: int) -> 'A': pass [out2] tmp/a.py:2: error: Argument 1 to "A" has incompatible type "str"; expected "int" [case testSerializeClassVar] import a [file a.py] import b [file a.py.2] from b import A A.x = '' A().x = 1 [file b.py] from typing import ClassVar class A: x: ClassVar[int] [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/a.py:3: error: Cannot assign to class variable "x" via instance [case testSerializeGenericClass] import a [file a.py] import b [file a.py.2] from b import A a1: A[int, str] = A(1) a2: A[int, str] = A('') reveal_type(a1.y) reveal_type(a1.f()) [file b.py] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): x: T y: S def __init__(self, x: T) -> None: self.x = x def f(self) -> T: return self.x [out2] tmp/a.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int" tmp/a.py:4: note: Revealed type is 'builtins.str*' tmp/a.py:5: note: Revealed type is 'builtins.int*' [case testSerializeAbstractClass] import a [file a.py] import b [file a.py.2] from b import A A() class B(A): def f(self) -> None: pass x: int B() a: A a.f() a.x = 1 [file b.py] from abc import ABCMeta, abstractmethod, abstractproperty class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass @abstractproperty def x(self) -> int: return 0 [typing fixtures/typing-full.pyi] [out2] tmp/a.py:2: error: Cannot instantiate abstract class 'A' with abstract attributes 'f' and 'x' tmp/a.py:9: error: Property "x" defined in "A" is read-only [case testSerializeStaticMethod] import a [file a.py] import b [file a.py.2] from b import A A.f(1) A.f() A().f() [file b.py] class A: @staticmethod def f() -> None: pass [builtins fixtures/staticmethod.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "A" [case testSerializeClassMethod] import a [file a.py] import b [file a.py.2] from b import A A.f(1) A.f() A().f() [file b.py] class A: @classmethod def f(cls) -> None: pass [builtins fixtures/classmethod.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "A" [case testSerializeReadOnlyProperty] import a [file a.py] import b [file a.py.2] from b import A reveal_type(A().x) A().x = 0 [file b.py] class A: @property def x(self) -> int: return 0 [builtins fixtures/property.pyi] [out2] tmp/a.py:2: note: Revealed type is 'builtins.int' tmp/a.py:3: error: Property "x" defined in "A" is read-only [case testSerializeReadWriteProperty] import a [file a.py] import b [file a.py.2] from b import A reveal_type(A().x) A().x = '' A().x = 0 [file b.py] class A: @property def x(self) -> int: return 0 @x.setter def x(self, v: int) -> None: pass [builtins fixtures/property.pyi] [out2] tmp/a.py:2: note: Revealed type is 'builtins.int' tmp/a.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testSerializeSelfType] import a [file a.py] import b [file a.py.2] from b import A reveal_type(A().f()) class B(A): pass reveal_type(B().f()) [file b.py] from typing import TypeVar T = TypeVar('T', bound='A') class A: def f(self: T) -> T: return self [out2] tmp/a.py:2: note: Revealed type is 'b.A*' tmp/a.py:4: note: Revealed type is 'a.B*' [case testSerializeInheritance] import a [file a.py] import b [file a.py.2] from b import A, B, C C().f(1) # E C().g(1) # E reveal_type(C().h()) a: A = C() b: B = C() i: int = C() # E [file b.py] class A: def f(self) -> int: pass class B: def g(self) -> str: pass def h(self) -> object: pass class C(A, B): def h(self) -> int: pass [out2] tmp/a.py:2: error: Too many arguments for "f" of "A" tmp/a.py:3: error: Too many arguments for "g" of "B" tmp/a.py:4: note: Revealed type is 'builtins.int' tmp/a.py:7: error: Incompatible types in assignment (expression has type "C", variable has type "int") [case testSerializeGenericInheritance] import a [file a.py] import b [file a.py.2] from b import B b: B[int] reveal_type(b.f()) [file b.py] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> T: pass class B(A[A[T]]): pass [out2] tmp/a.py:3: note: Revealed type is 'b.A*[builtins.int*]' [case testSerializeFixedLengthTupleBaseClass] import a [file a.py] import b [file a.py.2] from b import A a: A a.f(1) reveal_type((a[0], a[1])) [file b.py] from typing import Tuple class A(Tuple[int, str]): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:3: error: Too many arguments for "f" of "A" tmp/a.py:4: note: Revealed type is 'Tuple[builtins.int, builtins.str]' [case testSerializeVariableLengthTupleBaseClass] import a [file a.py] import b [file a.py.2] from b import A a: A a.f(1) reveal_type((a[0], a[1])) [file b.py] from typing import Tuple class A(Tuple[int, ...]): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:3: error: Too many arguments for "f" of "A" tmp/a.py:4: note: Revealed type is 'Tuple[builtins.int*, builtins.int*]' [case testSerializePlainTupleBaseClass] import a [file a.py] import b [file a.py.2] from b import A a: A a.f(1) reveal_type((a[0], a[1])) [file b.py] from typing import Tuple class A(tuple): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:3: error: Too many arguments for "f" of "A" tmp/a.py:4: note: Revealed type is 'Tuple[Any, Any]' [case testSerializeNamedTupleBaseClass] import a [file a.py] import b [file a.py.2] from b import A a: A a.f(1) reveal_type((a[0], a[1])) reveal_type((a.x, a.y)) [file b.py] from typing import NamedTuple class A(NamedTuple('N', [('x', int), ('y', str)])): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:3: error: Too many arguments for "f" of "A" tmp/a.py:4: note: Revealed type is 'Tuple[builtins.int, builtins.str]' tmp/a.py:5: note: Revealed type is 'Tuple[builtins.int, builtins.str]' [case testSerializeAnyBaseClass] import a [file a.py] import b [file a.py.2] from b import B B().f(1) reveal_type(B().xyz) [file b.py] from typing import Any A: Any class B(A): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "B" tmp/a.py:3: note: Revealed type is 'Any' [case testSerializeIndirectAnyBaseClass] import a [file a.py] import b [file a.py.2] from b import C C().f(1) C().g(1) reveal_type(C().xyz) [file b.py] from typing import Any A: Any class B(A): def f(self) -> None: pass class C(B): def g(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "B" tmp/a.py:3: error: Too many arguments for "g" of "C" tmp/a.py:4: note: Revealed type is 'Any' [case testSerializeNestedClass] import a [file a.py] import b [file a.py.2] import b b.A.B().f(1) b.A.B.C().g(1) b.b.f(1) b.c.g(1) [file b.py] class A: class B: def f(self) -> None: pass class C: def g(self) -> None: pass b: A.B c: A.B.C [builtins fixtures/tuple.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "B" tmp/a.py:3: error: Too many arguments for "g" of "C" tmp/a.py:4: error: Too many arguments for "f" of "B" tmp/a.py:5: error: Too many arguments for "g" of "C" [case testSerializeCallableVsTypeObjectDistinction] import a [file a.py] import b [file a.py.2] import b t: type t = b.A if int(): t = b.f # E [file b.py] class A: pass def f() -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:5: error: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "type") [case testSerializeOverloadedVsTypeObjectDistinction] import a [file a.py] import b [file a.py.2] import b t: type t = b.A if int(): t = b.f # E [file b.pyi] from typing import overload class A: @overload def __init__(self) -> None: pass @overload def __init__(self, x: int) -> None: pass @overload def f() -> None: pass @overload def f(x: int) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:5: error: Incompatible types in assignment (expression has type overloaded function, variable has type "type") [case testSerializeNamedTupleInMethod4] from ntcrash import C reveal_type(C().a) reveal_type(C().b) reveal_type(C().c) [file ntcrash.py] from typing import NamedTuple class C: def __init__(self) -> None: A = NamedTuple('A', [('x', int)]) self.a = A(0) self.b = A(0) # type: A self.c = A [out1] main:2: note: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:3: note: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:4: note: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A@4]' [out2] main:2: note: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:3: note: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:4: note: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A@4]' -- -- Strict optional -- [case testSerializeOptionalType] # flags: --strict-optional import a [file a.py] import b [file a.py.2] import b reveal_type(b.x) b.f(b.x) [file b.py] from typing import Optional x: Optional[int] def f(x: int) -> None: pass [out2] tmp/a.py:2: note: Revealed type is 'Union[builtins.int, None]' tmp/a.py:3: error: Argument 1 to "f" has incompatible type "Optional[int]"; expected "int" -- -- # type: ignore -- [case testSerializeIgnoredUndefinedType] import b reveal_type(b.x) [file b.py] x: NonExistent # type: ignore [out1] main:2: note: Revealed type is 'Any' [out2] main:2: note: Revealed type is 'Any' [case testSerializeIgnoredInvalidType] import b reveal_type(b.x) [file b.py] A = 0 x: A # type: ignore [out1] main:2: note: Revealed type is 'A?' [out2] main:2: note: Revealed type is 'A?' [case testSerializeIgnoredMissingBaseClass] import b reveal_type(b.B()) reveal_type(b.B().x) [file b.py] class B(A): pass # type: ignore [out1] main:2: note: Revealed type is 'b.B' main:3: note: Revealed type is 'Any' [out2] main:2: note: Revealed type is 'b.B' main:3: note: Revealed type is 'Any' [case testSerializeIgnoredInvalidBaseClass] import b reveal_type(b.B()) reveal_type(b.B().x) [file b.py] A = 0 class B(A): pass # type: ignore [out1] main:2: note: Revealed type is 'b.B' main:3: note: Revealed type is 'Any' [out2] main:2: note: Revealed type is 'b.B' main:3: note: Revealed type is 'Any' [case testSerializeIgnoredImport] import a [file a.py] import b [file a.py.2] import b reveal_type(b.m) reveal_type(b.x) [file b.py] import m # type: ignore from m import x # type: ignore [out2] tmp/a.py:2: note: Revealed type is 'Any' tmp/a.py:3: note: Revealed type is 'Any' -- -- TypeVar -- [case testSerializeSimpleTypeVar] import a [file a.py] import b [file a.py.2] import b def f(x: b.T) -> b.T: return x reveal_type(f) [file b.py] from typing import TypeVar T = TypeVar('T') [out2] tmp/a.py:3: note: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' [case testSerializeBoundedTypeVar] import a [file a.py] import b [file a.py.2] import b def f(x: b.T) -> b.T: return x reveal_type(f) reveal_type(b.g) [file b.py] from typing import TypeVar T = TypeVar('T', bound=int) def g(x: T) -> T: return x [out2] tmp/a.py:3: note: Revealed type is 'def [b.T <: builtins.int] (x: b.T`-1) -> b.T`-1' tmp/a.py:4: note: Revealed type is 'def [T <: builtins.int] (x: T`-1) -> T`-1' [case testSerializeTypeVarWithValues] import a [file a.py] import b [file a.py.2] import b def f(x: b.T) -> b.T: return x reveal_type(f) reveal_type(b.g) [file b.py] from typing import TypeVar T = TypeVar('T', int, str) def g(x: T) -> T: return x [out2] tmp/a.py:3: note: Revealed type is 'def [b.T in (builtins.int, builtins.str)] (x: b.T`-1) -> b.T`-1' tmp/a.py:4: note: Revealed type is 'def [T in (builtins.int, builtins.str)] (x: T`-1) -> T`-1' [case testSerializeTypeVarInClassBody] import a [file a.py] import b [file a.py.2] from b import A def f(x: A.T) -> A.T: return x reveal_type(f) [file b.py] from typing import TypeVar class A: T = TypeVar('T', int, str) [out2] tmp/a.py:3: note: Revealed type is 'def [A.T in (builtins.int, builtins.str)] (x: A.T`-1) -> A.T`-1' -- -- NewType -- [case testSerializeNewType] import a [file a.py] import b [file a.py.2] import b y: b.N y = 1 i = y b.x = 1 b.x = y y = b.N(1) y = b.N('') [file b.py] from typing import NewType N = NewType('N', int) x: N [out2] tmp/a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "N") tmp/a.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "N") tmp/a.py:8: error: Argument 1 to "N" has incompatible type "str"; expected "int" -- -- Named tuples -- [case testSerializeNamedTuple] import a [file a.py] import b [file a.py.2] import b from typing import Tuple y: b.N t: Tuple[int] y = t b.x = t t = y b.x = t reveal_type(b.N(x=1)) reveal_type(y[0]) b.N(x='') [file b.py] from typing import NamedTuple N = NamedTuple('N', [('x', int)]) x: N [out2] tmp/a.py:5: error: Incompatible types in assignment (expression has type "Tuple[int]", variable has type "N") tmp/a.py:6: error: Incompatible types in assignment (expression has type "Tuple[int]", variable has type "N") tmp/a.py:9: note: Revealed type is 'Tuple[builtins.int, fallback=b.N]' tmp/a.py:10: note: Revealed type is 'builtins.int' tmp/a.py:11: error: Argument "x" to "N" has incompatible type "str"; expected "int" -- -- Types and type aliases -- [case testSerializeTypeAliases] import a [file a.py] import b [file a.py.2] import b d: b.D a: b.A u: b.U l: b.L t: b.T c: b.C ty: b.Ty reveal_type(d) reveal_type(a) reveal_type(u) reveal_type(l) reveal_type(t) reveal_type(c) reveal_type(ty) c2: b.C2 reveal_type(c2) ty2: b.Ty2 reveal_type(ty2) [file b.py] from typing import Any, Union, List, Tuple, Callable, Type class DD: pass D = DD A = Any U = Union[int, str] L = List[int] T = Tuple[int, str] C = Callable[[int], str] C2 = Callable[..., str] Ty = Type[int] Ty2 = type [builtins fixtures/list.pyi] [out2] tmp/a.py:9: note: Revealed type is 'b.DD' tmp/a.py:10: note: Revealed type is 'Any' tmp/a.py:11: note: Revealed type is 'Union[builtins.int, builtins.str]' tmp/a.py:12: note: Revealed type is 'builtins.list[builtins.int]' tmp/a.py:13: note: Revealed type is 'Tuple[builtins.int, builtins.str]' tmp/a.py:14: note: Revealed type is 'def (builtins.int) -> builtins.str' tmp/a.py:15: note: Revealed type is 'Type[builtins.int]' tmp/a.py:17: note: Revealed type is 'def (*Any, **Any) -> builtins.str' tmp/a.py:19: note: Revealed type is 'builtins.type' [case testSerializeGenericTypeAlias] import b from b import X # Work around https://github.com/python/mypy/issues/2887 t: b.Y[int] reveal_type(t) [file b.py] from typing import TypeVar, Tuple X = TypeVar('X') Y = Tuple[X, str] [builtins fixtures/tuple.pyi] [out1] main:4: note: Revealed type is 'Tuple[builtins.int, builtins.str]' [out2] main:4: note: Revealed type is 'Tuple[builtins.int, builtins.str]' [case testSerializeTuple] # Don't repreat types tested by testSerializeTypeAliases here. import a [file a.py] import b [file a.py.2] import b reveal_type(b.x) reveal_type(b.y) [file b.py] from typing import Tuple x: Tuple[int, ...] y: tuple [builtins fixtures/tuple.pyi] [out2] tmp/a.py:2: note: Revealed type is 'builtins.tuple[builtins.int]' tmp/a.py:3: note: Revealed type is 'builtins.tuple[Any]' [case testSerializeNone] import a [file a.py] import b [file a.py.2] import b reveal_type(b.x) [file b.py] x: None [out2] tmp/a.py:2: note: Revealed type is 'None' -- -- TypedDict -- [case testSerializeTypedDictInMethod] from ntcrash import C reveal_type(C().a) reveal_type(C().b) reveal_type(C().c) [file ntcrash.py] from mypy_extensions import TypedDict class C: def __init__(self) -> None: A = TypedDict('A', {'x': int}) self.a = A(x=0) self.b = A(x=0) # type: A self.c = A [builtins fixtures/dict.pyi] [out1] main:2: note: Revealed type is 'TypedDict('ntcrash.C.A@4', {'x': builtins.int})' main:3: note: Revealed type is 'TypedDict('ntcrash.C.A@4', {'x': builtins.int})' main:4: note: Revealed type is 'def () -> ntcrash.C.A@4' [out2] main:2: note: Revealed type is 'TypedDict('ntcrash.C.A@4', {'x': builtins.int})' main:3: note: Revealed type is 'TypedDict('ntcrash.C.A@4', {'x': builtins.int})' main:4: note: Revealed type is 'def () -> ntcrash.C.A@4' [case testSerializeNonTotalTypedDict] from m import d reveal_type(d) [file m.py] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=False) d: D [builtins fixtures/dict.pyi] [out1] main:2: note: Revealed type is 'TypedDict('m.D', {'x'?: builtins.int, 'y'?: builtins.str})' [out2] main:2: note: Revealed type is 'TypedDict('m.D', {'x'?: builtins.int, 'y'?: builtins.str})' -- -- Modules -- [case testSerializeImport] import b b.c.f() b.c.g() [file b.py] import c [file c.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeImportAs] import b b.d.f() b.d.g() [file b.py] import c as d [file c.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeFromImportedClass] import b b.A(1) reveal_type(b.A()) [file b.py] from c import A [file c.py] class A: pass [out1] main:2: error: Too many arguments for "A" main:3: note: Revealed type is 'c.A' [out2] main:2: error: Too many arguments for "A" main:3: note: Revealed type is 'c.A' [case testSerializeFromImportedClassAs] import b b.B(1) reveal_type(b.B()) [file b.py] from c import A as B [file c.py] class A: pass [out1] main:2: error: Too many arguments for "A" main:3: note: Revealed type is 'c.A' [out2] main:2: error: Too many arguments for "A" main:3: note: Revealed type is 'c.A' [case testSerializeFromImportedModule] import b b.d.f() b.d.g() [file b.py] from c import d [file c/__init__.py] [file c/d.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeQualifiedImport] import b b.c.d.f() b.c.d.g() [file b.py] import c.d [file c/__init__.py] [file c/d.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeQualifiedImportAs] import b b.e.f() b.e.g() [file b.py] import c.d as e [file c/__init__.py] [file c/d.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerialize__init__ModuleImport] import b b.c.f() b.c.g() a: b.c.d.A reveal_type(a) [file b.py] import c [file c/__init__.py] import d def f() -> None: pass def g(x: int) -> None: pass [file d.py] class A: pass [out1] main:3: error: Too few arguments for "g" main:5: note: Revealed type is 'd.A' [out2] main:3: error: Too few arguments for "g" main:5: note: Revealed type is 'd.A' [case testSerializeImportInClassBody] import b b.A.c.f() b.A.c.g() [file b.py] class A: import c [file c.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeImportedTypeAlias] import b x: b.B reveal_type(x) [file b.py] from c import B [file c.py] from typing import Any class A: pass B = A [out1] main:3: note: Revealed type is 'c.A' [out2] main:3: note: Revealed type is 'c.A' [case testSerializeStarImport] import a [file a.py] import b [file a.py.2] import b b.f(1) x: b.A reveal_type(x) [file b.py] from c import * [file c.py] def f() -> None: pass class A: pass [out2] tmp/a.py:2: error: Too many arguments for "f" tmp/a.py:4: note: Revealed type is 'c.A' [case testSerializeRelativeImport] import b.c b.c.f(1) [file b/__init__.py] [file b/c.py] from .d import f [file b/d.py] def f() -> None: pass [out1] main:2: error: Too many arguments for "f" [out2] main:2: error: Too many arguments for "f" [case testSerializeDummyType] import a [file a.py] import b reveal_type(b.Test(None).foo) [file a.py.2] import b reveal_type(b.Test(b.Foo()).foo) [file b.py] class Foo(object): pass class Test: def __init__(self, o: Foo) -> None: self.foo = None if callable(o): self.foo = o [builtins fixtures/callable.pyi] [out1] tmp/a.py:2: note: Revealed type is 'b.' [out2] tmp/a.py:2: note: Revealed type is 'b.' [case testSerializeForwardReferenceToAliasInProperty] import a [file a.py] import b [file a.py.2] import b reveal_type(b.A().p) [file b.py] class A: @property def p(self) -> C: pass @p.setter def p(self, c: C) -> None: pass @p.deleter def p(self) -> None: pass C = str [builtins fixtures/property.pyi] [out2] tmp/a.py:2: note: Revealed type is 'builtins.str' [case testSerializeForwardReferenceToImportedAliasInProperty] import a [file a.py] import b [file a.py.2] import b reveal_type(b.A().p) [file b.py] class A: @property def p(self) -> C: pass @p.setter def p(self, c: C) -> None: pass @p.deleter def p(self) -> None: pass from m import C [file m.py] C = str [builtins fixtures/property.pyi] [out2] tmp/a.py:2: note: Revealed type is 'builtins.str' [case testSerializeNestedClassStuff] # flags: --verbose import a [file a.py] import b [file a.py.2] import b # [file b.py] def foo() -> None: class Foo: class Bar: pass [case testSerializeGenericClassMethod] import a [file a.py] import b from typing import TypeVar T = TypeVar('T') class C: @classmethod def f(cls, x: T) -> T: ... x = C.f [file b.py] x = 1 [file b.py.2] x = 'yes' [builtins fixtures/classmethod.pyi] [case testSerializeGenericAbstractMethod] import a [file a.py] import b from typing import TypeVar from abc import abstractmethod T = TypeVar('T') class C: @abstractmethod def f(self, x: T) -> T: ... c: C x = c.f [file b.py] x = 1 [file b.py.2] x = 'yes' [case testSerializeGenericNormalMethod] import a [file a.py] import b from typing import TypeVar from abc import abstractmethod T = TypeVar('T') class C: def f(self, x: T) -> T: ... c: C x = c.f [file b.py] x = 1 [file b.py.2] x = 'yes' mypy-0.761/test-data/unit/check-statements.test0000644€tŠÔÚ€2›s®0000013135613576752246025741 0ustar jukkaDROPBOX\Domain Users00000000000000-- Return statement -- ---------------- [case testReturnValue] import typing def f() -> 'A': return A() def g() -> 'B': return A() class A: pass class B: pass [out] main:5: error: Incompatible return value type (got "A", expected "B") [case testReturnSubtype] import typing def f() -> 'B': return A() def g() -> 'A': return B() class A: pass class B(A): pass [out] main:3: error: Incompatible return value type (got "A", expected "B") [case testReturnWithoutAValue] import typing def f() -> 'A': return def g() -> None: return class A: pass [out] main:3: error: Return value expected [case testReturnNoneInFunctionReturningNone] import typing def f() -> None: return None def g() -> None: return f() [out] [case testReturnInGenerator] from typing import Generator def f() -> Generator[int, None, str]: yield 1 return "foo" [out] [case testEmptyReturnInGenerator] from typing import Generator def f() -> Generator[int, None, str]: yield 1 return # E: Return value expected [out] [case testNoReturnInGenerator] from typing import Generator def f() -> Generator[int, None, str]: # E: Missing return statement yield 1 [out] [case testEmptyReturnInNoneTypedGenerator] from typing import Generator def f() -> Generator[int, None, None]: yield 1 return [out] [case testNonEmptyReturnInNoneTypedGenerator] from typing import Generator def f() -> Generator[int, None, None]: yield 1 return 42 # E: No return value expected [out] [case testReturnInIterator] from typing import Iterator def f() -> Iterator[int]: yield 1 return "foo" [out] -- If statement -- ------------ [case testIfStatement] a = None # type: A a2 = None # type: A a3 = None # type: A b = None # type: bool if a: a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") elif a2: a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") elif a3: a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") else: a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if b: pass elif b: pass if b: pass class A: pass [builtins fixtures/bool.pyi] -- Loops -- ----- [case testWhileStatement] a = None # type: A b = None # type: bool while a: a = b # Fail else: a = b # Fail while b: b = b class A: pass [builtins fixtures/bool.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "bool", variable has type "A") main:7: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testForStatement] a = None # type: A b = None # type: object for a in [A()]: a = b # Fail else: a = b # Fail class A: pass [builtins fixtures/list.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A") main:7: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testBreakStatement] import typing while None: break [builtins fixtures/bool.pyi] [out] [case testContinueStatement] import typing while None: continue [builtins fixtures/bool.pyi] [out] [case testForStatementTypeComments] from typing import List, Union x = [] # type: List[int] for y in x: # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass for z in x: # type: int pass for w in x: # type: Union[int, str] reveal_type(w) # N: Revealed type is 'Union[builtins.int, builtins.str]' for v in x: # type: int, int # E: Syntax error in type annotation # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) pass [builtins fixtures/list.pyi] [case testForStatementMultipleTypeComments] from typing import List, Tuple x = [] # type: List[Tuple[int, int]] for y in x: # type: int, int # E: Syntax error in type annotation # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) pass for z in x: # type: Tuple[int, int] pass for w,v in x: # type: int, str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass for a, b in x: # type: int, int, int # E: Incompatible number of tuple items pass [builtins fixtures/list.pyi] -- Operator assignment -- ------------------- [case testPlusAssign] a, b, c = None, None, None # type: (A, B, C) a += b # Fail b += a # Fail c += a # Fail a += c class A: def __add__(self, x: 'C') -> 'A': pass class B: def __add__(self, x: A) -> 'C': pass class C: pass [out] main:3: error: Unsupported operand types for + ("A" and "B") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") main:5: error: Unsupported left operand type for + ("C") [case testMinusAssign] a, b, c = None, None, None # type: (A, B, C) a -= b # Fail b -= a # Fail c -= a # Fail a -= c class A: def __sub__(self, x: 'C') -> 'A': pass class B: def __sub__(self, x: A) -> 'C': pass class C: pass [out] main:3: error: Unsupported operand types for - ("A" and "B") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") main:5: error: Unsupported left operand type for - ("C") [case testMulAssign] a, c = None, None # type: (A, C) a *= a # Fail c *= a # Fail a *= c class A: def __mul__(self, x: 'C') -> 'A': pass class C: pass [out] main:3: error: Unsupported operand types for * ("A" and "A") main:4: error: Unsupported left operand type for * ("C") [case testMatMulAssign] a, c = None, None # type: (A, C) a @= a # E: Unsupported operand types for @ ("A" and "A") c @= a # E: Unsupported left operand type for @ ("C") a @= c class A: def __matmul__(self, x: 'C') -> 'A': pass class C: pass [case testDivAssign] a, c = None, None # type: (A, C) a /= a # Fail c /= a # Fail a /= c class A: def __truediv__(self, x: 'C') -> 'A': pass class C: pass [out] main:3: error: Unsupported operand types for / ("A" and "A") main:4: error: Unsupported left operand type for / ("C") [case testPowAssign] a, c = None, None # type: (A, C) a **= a # Fail c **= a # Fail a **= c class A: def __pow__(self, x: 'C') -> 'A': pass class C: pass [out] main:3: error: Unsupported operand types for ** ("A" and "A") main:4: error: Unsupported left operand type for ** ("C") [case testSubtypesInOperatorAssignment] a, b = None, None # type: (A, B) b += b b += a a += b class A: def __add__(self, x: 'A') -> 'B': pass class B(A): pass [out] [case testAdditionalOperatorsInOpAssign] a, c = None, None # type: (A, C) a &= a # Fail a >>= a # Fail a //= a # Fail a &= c a >>= c a //= c class A: def __and__(self, x: 'C') -> 'A': pass def __rshift__(self, x: 'C') -> 'A': pass def __floordiv__(self, x: 'C') -> 'A': pass class C: pass [out] main:3: error: Unsupported operand types for & ("A" and "A") main:4: error: Unsupported operand types for >> ("A" and "A") main:5: error: Unsupported operand types for // ("A" and "A") [case testInplaceOperatorMethods] import typing class A: def __iadd__(self, x: int) -> 'A': pass def __imul__(self, x: str) -> 'A': pass def __imatmul__(self, x: str) -> 'A': pass a = A() a += 1 a *= '' a @= '' a += '' # E: Argument 1 to "__iadd__" of "A" has incompatible type "str"; expected "int" a *= 1 # E: Argument 1 to "__imul__" of "A" has incompatible type "int"; expected "str" a @= 1 # E: Argument 1 to "__imatmul__" of "A" has incompatible type "int"; expected "str" [case testInplaceSetitem] class A(object): def __init__(self) -> None: self.a = [1] def __iadd__(self, a): # type: (int) -> A self.a += [2] return self a = A() b = [a] b[0] += 1 [builtins fixtures/list.pyi] [out] -- Assert statement -- ---------------- [case testAssert] import typing assert None + None # Fail assert None [out] main:2: error: Unsupported left operand type for + ("None") -- Exception handling -- ------------------ [case testRaiseStatement] e = None # type: BaseException f = None # type: MyError a = None # type: A raise a # Fail raise e raise f class A: pass class MyError(BaseException): pass [builtins fixtures/exception.pyi] [out] main:5: error: Exception must be derived from BaseException [case testRaiseClassobject] import typing class A: pass class MyError(BaseException): pass def f(): pass raise BaseException raise MyError raise A # E: Exception must be derived from BaseException raise object # E: Exception must be derived from BaseException raise f # E: Exception must be derived from BaseException [builtins fixtures/exception.pyi] [case testRaiseExceptionType] import typing x = None # type: typing.Type[BaseException] raise x [builtins fixtures/exception.pyi] [case testRaiseNonExceptionTypeFails] import typing x = int # type: typing.Type[int] raise x # E: Exception must be derived from BaseException [builtins fixtures/exception.pyi] [case testRaiseUnion] import typing x = None # type: typing.Union[BaseException, typing.Type[BaseException]] raise x [builtins fixtures/exception.pyi] [case testRaiseNonExceptionUnionFails] import typing x = None # type: typing.Union[BaseException, int] raise x # E: Exception must be derived from BaseException [builtins fixtures/exception.pyi] [case testRaiseFromStatement] e = None # type: BaseException f = None # type: MyError a = None # type: A raise e from a # E: Exception must be derived from BaseException raise e from e raise e from f class A: pass class MyError(BaseException): pass [builtins fixtures/exception.pyi] [case testRaiseFromClassobject] import typing class A: pass class MyError(BaseException): pass def f(): pass raise BaseException from BaseException raise BaseException from MyError raise BaseException from A # E: Exception must be derived from BaseException raise BaseException from object # E: Exception must be derived from BaseException raise BaseException from f # E: Exception must be derived from BaseException [builtins fixtures/exception.pyi] [case testTryFinallyStatement] import typing try: b = object() # type: A # Fail finally: c = object() # type: A # Fail class A: pass [out] main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testSimpleTryExcept] try: pass except BaseException as e: a, o = None, None # type: (BaseException, object) e = a e = o # Fail class A: pass class B: pass [builtins fixtures/exception.pyi] [out] main:7: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [case testTypeErrorInBlock] while object: x = None # type: A if int(): x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [case testTypeErrorInvolvingBaseException] x, a = None, None # type: (BaseException, A) if int(): a = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "A") if int(): a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "BaseException") if int(): x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "BaseException") if int(): x = BaseException() class A: pass [builtins fixtures/exception.pyi] [case testSimpleTryExcept2] import typing try: pass except BaseException as e: e = object() # Fail e = BaseException() [builtins fixtures/exception.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [case testBaseClassAsExceptionTypeInExcept] import typing try: pass except Err as e: e = BaseException() # Fail e = Err() class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") [case testMultipleExceptHandlers] import typing try: pass except BaseException as e: pass except Err as f: f = BaseException() # Fail f = Err() class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] main:7: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") [case testTryExceptStatement] import typing try: a = B() # type: A # Fail except BaseException as e: e = A() # Fail e = Err() except Err as f: f = BaseException() # Fail f = Err() class A: pass class B: pass class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:5: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException") main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") [case testTryExceptWithinFunction] import typing def f() -> None: try: pass except BaseException as e: e = object() # Fail e = BaseException() except Err as f: f = BaseException() # Fail f = Err() class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") [case testTryExceptFlow] def f() -> None: x = 1 try: pass except: raise x + 'a' # E: Unsupported left operand type for + ("int") [builtins fixtures/exception.pyi] [out] [case testTryWithElse] import typing try: pass except BaseException: pass else: object(None) # E: Too many arguments for "object" [builtins fixtures/exception.pyi] [case testRedefinedFunctionInTryWithElse] def f() -> None: pass try: pass except BaseException: f2 = f else: def f2() -> str: pass try: pass except BaseException: f3 = f else: def f3() -> None: pass [builtins fixtures/exception.pyi] [out] main:7: error: Incompatible redefinition (redefinition with type "Callable[[], str]", original type "Callable[[], None]") [case testExceptWithoutType] import typing try: -None # E: Unsupported operand type for unary - ("None") except: ~None # E: Unsupported operand type for ~ ("None") [builtins fixtures/exception.pyi] [case testRaiseWithoutArgument] import typing try: None except: raise [builtins fixtures/exception.pyi] [case testExceptWithMultipleTypes] import typing class E1(BaseException): pass class E2(E1): pass try: pass except (E1, E2): pass except (E1, object): pass # E: Exception type must be derived from BaseException except (object, E2): pass # E: Exception type must be derived from BaseException except (E1, (E2,)): pass # E: Exception type must be derived from BaseException except (E1, E2): pass except ((E1, E2)): pass except (((E1, E2))): pass [builtins fixtures/exception.pyi] [case testExceptWithTypeType] import typing E = BaseException # type: typing.Type[BaseException] try: pass except E: pass [builtins fixtures/exception.pyi] [case testExceptWithMultipleTypes2] import typing class E1(BaseException): pass class E2(E1): pass try: pass except (E1, E2) as e1: x = e1 # type: E1 y = e1 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2") except (E2, E1) as e2: a = e2 # type: E1 b = e2 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2") except (E1, E2, int) as e3: # E: Exception type must be derived from BaseException pass [builtins fixtures/exception.pyi] [case testExceptWithMultipleTypes3] import typing class E1(BaseException): pass class E1_1(E1): pass class E1_2(E1): pass try: pass except (E1, E1_1, E1_2) as e1: x = e1 # type: E1 y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1") z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2") except (E1_1, E1_2) as e2: a = e2 # type: E1 b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1") c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2") [builtins fixtures/exception.pyi] [case testExceptWithMultipleTypes4] from typing import Tuple, Type, Union class E1(BaseException): pass class E2(BaseException): pass class E3(BaseException): pass def variadic(exc: Tuple[Type[E1], ...]) -> None: try: pass except exc as e: reveal_type(e) # N: Revealed type is '__main__.E1' def union(exc: Union[Type[E1], Type[E2]]) -> None: try: pass except exc as e: reveal_type(e) # N: Revealed type is 'Union[__main__.E1, __main__.E2]' def tuple_in_union(exc: Union[Type[E1], Tuple[Type[E2], Type[E3]]]) -> None: try: pass except exc as e: reveal_type(e) # N: Revealed type is 'Union[__main__.E1, __main__.E2, __main__.E3]' def variadic_in_union(exc: Union[Type[E1], Tuple[Type[E2], ...]]) -> None: try: pass except exc as e: reveal_type(e) # N: Revealed type is 'Union[__main__.E1, __main__.E2]' def nested_union(exc: Union[Type[E1], Union[Type[E2], Type[E3]]]) -> None: try: pass except exc as e: reveal_type(e) # N: Revealed type is 'Union[__main__.E1, __main__.E2, __main__.E3]' def error_in_union(exc: Union[Type[E1], int]) -> None: try: pass except exc as e: # E: Exception type must be derived from BaseException pass def error_in_variadic(exc: Tuple[int, ...]) -> None: try: pass except exc as e: # E: Exception type must be derived from BaseException pass [builtins fixtures/tuple.pyi] [case testExceptWithAnyTypes] from typing import Any E1 = None # type: Any class E2(BaseException): pass class NotBaseDerived: pass try: pass except BaseException as e1: reveal_type(e1) # N: Revealed type is 'builtins.BaseException' except (E1, BaseException) as e2: reveal_type(e2) # N: Revealed type is 'Union[Any, builtins.BaseException]' except (E1, E2) as e3: reveal_type(e3) # N: Revealed type is 'Union[Any, __main__.E2]' except (E1, E2, BaseException) as e4: reveal_type(e4) # N: Revealed type is 'Union[Any, builtins.BaseException]' try: pass except E1 as e1: reveal_type(e1) # N: Revealed type is 'Any' except E2 as e2: reveal_type(e2) # N: Revealed type is '__main__.E2' except NotBaseDerived as e3: # E: Exception type must be derived from BaseException pass except (NotBaseDerived, E1) as e4: # E: Exception type must be derived from BaseException pass except (NotBaseDerived, E2) as e5: # E: Exception type must be derived from BaseException pass except (NotBaseDerived, E1, E2) as e6: # E: Exception type must be derived from BaseException pass except (E1, E2, NotBaseDerived) as e6: # E: Exception type must be derived from BaseException pass [builtins fixtures/exception.pyi] [case testReuseTryExceptionVariable] import typing class E1(BaseException): pass class E2(BaseException): pass try: pass except E1 as e: pass try: pass except E1 as e: pass try: pass except E2 as e: pass e + 1 # E: Trying to read deleted variable 'e' e = E1() # E: Assignment to variable 'e' outside except: block [builtins fixtures/exception.pyi] [case testReuseDefinedTryExceptionVariable] import typing class E1(BaseException): pass class E2(BaseException): pass e = 1 def f(): e # Prevent redefinition e = 1 try: pass except E1 as e: pass e = 1 # E: Assignment to variable 'e' outside except: block e = E1() # E: Assignment to variable 'e' outside except: block [builtins fixtures/exception.pyi] [case testExceptionVariableReuseInDeferredNode1] def f(*a: BaseException) -> int: x try: pass except BaseException as err: pass try: pass except BaseException as err: f(err) return 0 x = f() [builtins fixtures/exception.pyi] [case testExceptionVariableReuseInDeferredNode2] def f(*a: BaseException) -> int: try: pass except BaseException as err: pass x try: pass except BaseException as err: f(err) return 0 x = f() [builtins fixtures/exception.pyi] [case testExceptionVariableReuseInDeferredNode3] def f(*a: BaseException) -> int: try: pass except BaseException as err: pass try: pass except BaseException as err: f(err) x return 0 x = f() [builtins fixtures/exception.pyi] [case testExceptionVariableReuseInDeferredNode4] class EA(BaseException): a = None # type: int class EB(BaseException): b = None # type: str def f(*arg: BaseException) -> int: x try: pass except EA as err: f(err) a = err.a reveal_type(a) try: pass except EB as err: f(err) b = err.b reveal_type(b) return 0 x = f() [builtins fixtures/exception.pyi] [out] main:11: note: Revealed type is 'builtins.int' main:16: note: Revealed type is 'builtins.str' [case testExceptionVariableReuseInDeferredNode5] class EA(BaseException): a = None # type: int class EB(BaseException): b = None # type: str def f(*arg: BaseException) -> int: try: pass except EA as err: f(err) a = err.a reveal_type(a) x try: pass except EB as err: f(err) b = err.b reveal_type(b) return 0 x = f() [builtins fixtures/exception.pyi] [out] main:10: note: Revealed type is 'builtins.int' main:16: note: Revealed type is 'builtins.str' [case testExceptionVariableReuseInDeferredNode6] class EA(BaseException): a = None # type: int class EB(BaseException): b = None # type: str def f(*arg: BaseException) -> int: try: pass except EA as err: f(err) a = err.a reveal_type(a) try: pass except EB as err: f(err) b = err.b reveal_type(b) x return 0 x = f() [builtins fixtures/exception.pyi] [out] main:10: note: Revealed type is 'builtins.int' main:15: note: Revealed type is 'builtins.str' [case testArbitraryExpressionAsExceptionType] import typing a = BaseException try: pass except a as b: b = BaseException() b = object() # E: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [builtins fixtures/exception.pyi] [case testInvalidExceptionCallable] import typing def exc() -> BaseException: pass try: pass except exc as e: pass # E: Exception type must be derived from BaseException except BaseException() as b: pass # E: Exception type must be derived from BaseException [builtins fixtures/exception.pyi] [case testTupleValueAsExceptionType] import typing def exc() -> BaseException: pass class E1(BaseException): pass class E1_1(E1): pass class E1_2(E1): pass exs1 = (E1, E1_1, E1_2) try: pass except exs1 as e1: x = e1 # type: E1 y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1") z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2") exs2 = (E1_1, E1_2) try: pass except exs2 as e2: a = e2 # type: E1 b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1") c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2") exs3 = (E1, (E1_1, (E1_2,))) try: pass except exs3 as e3: pass # E: Exception type must be derived from BaseException [builtins fixtures/exception.pyi] [case testInvalidTupleValueAsExceptionType] import typing def exc() -> BaseException: pass class E1(BaseException): pass class E2(E1): pass exs1 = (E1, E2, int) try: pass except exs1 as e: pass # E: Exception type must be derived from BaseException [builtins fixtures/exception.pyi] [case testOverloadedExceptionType] from foo import * [file foo.pyi] from typing import overload class E(BaseException): @overload def __init__(self) -> None: pass @overload def __init__(self, x) -> None: pass try: pass except E as e: e = E() e = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "E") [builtins fixtures/exception.pyi] [case testExceptionWithAnyBaseClass] from typing import Any E = None # type: Any class EE(E): pass raise EE() raise EE [builtins fixtures/exception.pyi] [case testExceptionIsType] from typing import Type class B(BaseException): pass def f(e: Type[B]): try: pass except e: pass def g(e: Type[BaseException]): try: pass except e as err: reveal_type(err) def h(e: Type[int]): try: pass except e: pass [builtins fixtures/exception.pyi] [out] main:9: note: Revealed type is 'builtins.BaseException' main:12: error: Exception type must be derived from BaseException -- Del statement -- ------------- [case testDelStmtWithIndex] a, b = None, None # type: (A, B) del b[a] del b[b] # E: Argument 1 to "__delitem__" of "B" has incompatible type "B"; expected "A" del a[a] # E: "A" has no attribute "__delitem__" del a[b] # E: "A" has no attribute "__delitem__" class B: def __delitem__(self, index: 'A'): pass class A: pass [case testDelStmtWithAttribute] class A: def f(self): pass x = 0 a = A() del a.f del a.x del a.z # E: "A" has no attribute "z" [case testDelStatementWithTuple] class A: x = 0 a = A() del a.x, a.y # E: "A" has no attribute "y" [case testDelStatementWithAssignmentSimple] a = 1 a + 1 del a a + 1 # E: Trying to read deleted variable 'a' [builtins fixtures/ops.pyi] [case testDelStatementWithAssignmentTuple] a = 1 b = 1 del (a, b) b + 1 # E: Trying to read deleted variable 'b' [builtins fixtures/ops.pyi] [case testDelStatementWithAssignmentList] a = 1 b = 1 del [a, b] b + 1 # E: Trying to read deleted variable 'b' [builtins fixtures/list.pyi] [case testDelStatementWithAssignmentClass] class C: a = 1 c = C() c.a = 1 c.a + 1 del c.a c.a + 1 [builtins fixtures/ops.pyi] [case testDelStatementWithConditions] x = 5 del x if x: ... # E: Trying to read deleted variable 'x' def f(x): return x if 0: ... elif f(x): ... # E: Trying to read deleted variable 'x' while x == 5: ... # E: Trying to read deleted variable 'x' -- Yield statement -- --------------- [case testSimpleYield] from typing import Iterator def f() -> Iterator[int]: yield 1 yield '' # E: Incompatible types in "yield" (actual type "str", expected type "int") [builtins fixtures/for.pyi] [out] [case testYieldInFunctionReturningGenerator] from typing import Generator def f() -> Generator[int, None, None]: yield 1 [builtins fixtures/for.pyi] [out] [case testYieldInFunctionReturningIterable] from typing import Iterable def f() -> Iterable[int]: yield 1 [builtins fixtures/for.pyi] [out] [case testYieldInFunctionReturningObject] def f() -> object: yield 1 [builtins fixtures/for.pyi] [out] [case testYieldInFunctionReturningAny] from typing import Any def f() -> Any: yield object() [out] [case testYieldInFunctionReturningFunction] from typing import Callable def f() -> Callable[[], None]: # E: The return type of a generator function should be "Generator" or one of its supertypes yield object() [out] [case testYieldInDynamicallyTypedFunction] import typing def f(): yield f [case testWithInvalidInstanceReturnType] import typing def f() -> int: # E: The return type of a generator function should be "Generator" or one of its supertypes yield 1 [builtins fixtures/for.pyi] [out] [case testTypeInferenceContextAndYield] from typing import List, Iterator def f() -> 'Iterator[List[int]]': yield [] yield [object()] # E: List item 0 has incompatible type "object"; expected "int" [builtins fixtures/for.pyi] [out] [case testYieldAndReturnWithoutValue] from typing import Iterator def f() -> Iterator[int]: yield 1 return [builtins fixtures/for.pyi] [case testYieldWithNoValue] from typing import Iterator def f() -> Iterator[None]: yield [builtins fixtures/for.pyi] [case testYieldWithNoValueWhenValueRequired] from typing import Iterator def f() -> Iterator[int]: yield # E: Yield value expected [builtins fixtures/for.pyi] [out] [case testYieldWithExplicitNone] from typing import Iterator def f() -> Iterator[None]: yield None [builtins fixtures/for.pyi] [out] -- Yield from statement -- -------------------- -- -- (It's not really a statement, but don't want to move the tests.) [case testSimpleYieldFromWithIterator] from typing import Iterator def g() -> Iterator[str]: yield '42' def h() -> Iterator[int]: yield 42 def f() -> Iterator[str]: yield from g() yield from h() # E: Incompatible types in "yield from" (actual type "int", expected type "str") [out] [case testYieldFromAppliedToAny] from typing import Any def g() -> Any: yield object() def f() -> Any: yield from g() [out] [case testYieldFromInFunctionReturningFunction] from typing import Iterator, Callable def g() -> Iterator[int]: yield 42 def f() -> Callable[[], None]: # E: The return type of a generator function should be "Generator" or one of its supertypes yield from g() [out] [case testYieldFromNotIterableReturnType] from typing import Iterator def g() -> Iterator[int]: yield 42 def f() -> int: # E: The return type of a generator function should be "Generator" or one of its supertypes yield from g() [out] [case testYieldFromNotAppliedIterator] from typing import Iterator def g() -> int: return 42 def f() -> Iterator[int]: yield from g() # E: "yield from" can't be applied to "int" [out] [case testYieldFromCheckIncompatibleTypesTwoIterables] from typing import List, Iterator def g() -> Iterator[List[int]]: yield [2, 3, 4] def f() -> Iterator[List[int]]: yield from g() yield from [1, 2, 3] # E: Incompatible types in "yield from" (actual type "int", expected type "List[int]") [builtins fixtures/for.pyi] [out] [case testYieldFromNotAppliedToNothing] def h(): yield from # E: invalid syntax [out] [case testYieldFromAndYieldTogether] from typing import Iterator def f() -> Iterator[str]: yield "g1 ham" yield from g() yield "g1 eggs" def g() -> Iterator[str]: yield "g2 spam" yield "g2 more spam" [out] [case testYieldFromAny] from typing import Iterator def f(a): b = yield from a return b [out] [case testYieldFromGenericCall] from typing import Generator, TypeVar T = TypeVar('T') def f(a: T) -> Generator[int, str, T]: pass def g() -> Generator[int, str, float]: r = yield from f('') reveal_type(r) # N: Revealed type is 'builtins.str*' return 3.14 [case testYieldFromTupleStatement] from typing import Generator def g() -> Generator[int, None, None]: yield from () yield from (0, 1, 2) yield from (0, "ERROR") # E: Incompatible types in "yield from" (actual type "object", expected type "int") yield from ("ERROR",) # E: Incompatible types in "yield from" (actual type "str", expected type "int") [builtins fixtures/tuple.pyi] -- With statement -- -------------- [case testSimpleWith] import typing class A: def __enter__(self) -> None: pass def __exit__(self, x, y, z) -> None: pass with A(): object(A) # E: Too many arguments for "object" [case testWithStmtAndInvalidExit] import typing class A: def __enter__(self) -> None: pass def __exit__(self, x, y) -> None: pass with A(): # E: Too many arguments for "__exit__" of "A" pass [case testWithStmtAndMissingExit] import typing class A: def __enter__(self) -> None: pass with A(): # E: "A" has no attribute "__exit__" pass [case testWithStmtAndInvalidEnter] import typing class A: def __enter__(self, x) -> None: pass def __exit__(self, x, y, z) -> None: pass with A(): # E: Too few arguments for "__enter__" of "A" pass [case testWithStmtAndMissingEnter] import typing class A: def __exit__(self, x, y, z) -> None: pass with A(): # E: "A" has no attribute "__enter__" pass [case testWithStmtAndMultipleExprs] import typing class A: def __enter__(self) -> None: pass def __exit__(self, x, y, z) -> None: pass class B: def __enter__(self) -> None: pass with A(), B(): # E: "B" has no attribute "__exit__" pass with B(), A(): # E: "B" has no attribute "__exit__" pass [case testWithStmtAndResult] import typing class B: pass class A: def __enter__(self) -> B: pass def __exit__(self, x, y, z): pass with A() as b: b = B() b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testWithStmtAndMultipleResults] from typing import TypeVar, Generic t = TypeVar('t') class B: pass class C: pass class A(Generic[t]): def __enter__(self) -> t: pass def __exit__(self, x, y, z): pass a_b = A() # type: A[B] a_c = A() # type: A[C] with a_b as b, a_c as c: b = B() c = C() b = c # E: Incompatible types in assignment (expression has type "C", variable has type "B") c = b # E: Incompatible types in assignment (expression has type "B", variable has type "C") [case testWithStmtAndComplexTarget] from typing import Tuple class A: def __enter__(self) -> Tuple[int, str]: pass def __exit__(self, x, y, z): pass with A() as (a, b): a = 1 b = '' a = b # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/tuple.pyi] [case testWithStmtTypeComment] from typing import Union class A: def __enter__(self) -> int: pass def __exit__(self, x, y, z): pass with A(): # type: int # E: Invalid type comment: "with" statement has no targets pass with A() as a: # type: int pass with A() as b: # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass with A() as c: # type: int, int # E: Syntax error in type annotation # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) pass with A() as d: # type: Union[int, str] reveal_type(d) # N: Revealed type is 'Union[builtins.int, builtins.str]' [case testWithStmtTupleTypeComment] from typing import Tuple class A: def __enter__(self) -> Tuple[int, int]: pass def __exit__(self, x, y, z): pass with A(): pass with A() as a: # type: Tuple[int, int] pass with A() as b: # type: Tuple[int, str] # E: Incompatible types in assignment (expression has type "Tuple[int, int]", variable has type "Tuple[int, str]") pass with A() as (c, d): # type: int, int pass with A() as (e, f): # type: Tuple[int, int] pass with A() as (g, h): # type: int # E: Tuple type expected for multiple variables pass with A() as (i, j): # type: int, int, str # E: Incompatible number of tuple items pass with A() as (k, l): # type: int, str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass [builtins fixtures/tuple.pyi] [case testWithStmtComplexTypeComment] from typing import Tuple class A: def __enter__(self) -> Tuple[int, int]: pass def __exit__(self, x, y, z): pass class B: def __enter__(self) -> str: pass def __exit__(self, x, y, z): pass with A() as a, A() as (b, c), B() as d: # type: Tuple[int, int], (int, int), str pass with A() as e, A() as (f, g), B() as h: # type: Tuple[int, int], Tuple[int, int], str pass with A() as i, A() as (j, k), B() as l: # type: (int, int), (int, int), str # E: Syntax error in type annotation # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) pass with A(), A(), B() as m, A() as n, B(), B() as o: # type: int, Tuple[int, int] # E: Incompatible number of types for "with" targets pass with A(), B(), B() as p, A(), A(): # type: str pass [builtins fixtures/tuple.pyi] [case testWithStmtBoolExitReturnWithResultFalse] from typing import Optional class InvalidReturn1: def __exit__(self, x, y, z) -> bool: # E: "bool" is invalid as return type for "__exit__" that always returns False \ # N: Use "typing_extensions.Literal[False]" as the return type or change it to "None" \ # N: If return type of "__exit__" implies that it may return True, the context manager may swallow exceptions return False class InvalidReturn2: def __exit__(self, x, y, z) -> Optional[bool]: # E: "bool" is invalid as return type for "__exit__" that always returns False \ # N: Use "typing_extensions.Literal[False]" as the return type or change it to "None" \ # N: If return type of "__exit__" implies that it may return True, the context manager may swallow exceptions if int(): return False else: return False class InvalidReturn3: def __exit__(self, x, y, z) -> bool: # E: "bool" is invalid as return type for "__exit__" that always returns False \ # N: Use "typing_extensions.Literal[False]" as the return type or change it to "None" \ # N: If return type of "__exit__" implies that it may return True, the context manager may swallow exceptions def nested() -> bool: return True return False [builtins fixtures/bool.pyi] [case testWithStmtBoolExitReturnOkay] from typing_extensions import Literal class GoodReturn1: def __exit__(self, x, y, z) -> bool: if int(): return True else: return False class GoodReturn2: def __exit__(self, x, y, z) -> bool: if int(): return False else: return True class GoodReturn3: def __exit__(self, x, y, z) -> bool: return bool() class GoodReturn4: def __exit__(self, x, y, z) -> None: return class GoodReturn5: def __exit__(self, x, y, z) -> None: return None class GoodReturn6: def exit(self, x, y, z) -> bool: return False class GoodReturn7: def exit(self, x, y, z) -> bool: pass class MissingReturn: def exit(self, x, y, z) -> bool: # E: Missing return statement x = 0 class LiteralReturn: def __exit__(self, x, y, z) -> Literal[False]: return False [builtins fixtures/bool.pyi] [case testWithStmtBoolExitReturnInStub] import stub [file stub.pyi] from typing import Optional class C1: def __exit__(self, x, y, z) -> bool: ... class C2: def __exit__(self, x, y, z) -> bool: pass class C3: def __exit__(self, x, y, z) -> Optional[bool]: pass [builtins fixtures/bool.pyi] -- Chained assignment -- ------------------ [case testChainedAssignment] import typing class A: pass class B: pass x = y = A() if int(): x = A() if int(): y = A() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testChainedAssignment2] import typing def f() -> None: x = 1 y = 'x' if int(): x = y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int") x = y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/primitives.pyi] [out] [case testChainedAssignmentWithType] x = y = None # type: int if int(): x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): x = 1 if int(): y = 1 -- Star assignment -- --------------- [case testAssignListToStarExpr] from typing import List bs, cs = None, None # type: List[A], List[B] if int(): *bs, b = bs if int(): *bs, c = cs # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]") if int(): *ns, c = cs if int(): nc = cs class A: pass class B: pass [builtins fixtures/list.pyi] -- Type aliases -- ------------ [case testSimpleTypeAlias] import typing foo = int def f(x: foo) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testTypeAliasDefinedInAModule] import typing import m def f(x: m.foo) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [file m.py] import typing foo = int [case testTypeAliasDefinedInAModule2] import typing from m import foo def f(x: foo) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [file m.py] import typing foo = int -- nonlocal and global -- ------------------- [case testTypeOfGlobalUsed] import typing g = A() def f() -> None: global g g = B() class A(): pass class B(): pass [out] main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeOfNonlocalUsed] import typing def f() -> None: a = A() def g() -> None: nonlocal a a = B() class A(): pass class B(): pass [out] main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeOfOuterMostNonlocalUsed] import typing def f() -> None: a = A() def g() -> None: a = B() def h() -> None: nonlocal a a = A() a = B() class A(): pass class B(): pass [out] main:8: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAugmentedAssignmentIntFloat] weight0 = 65.5 reveal_type(weight0) # N: Revealed type is 'builtins.float' if int(): weight0 = 65 reveal_type(weight0) # N: Revealed type is 'builtins.int' weight0 *= 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "float") weight0 *= 0.5 reveal_type(weight0) # N: Revealed type is 'builtins.float' weight0 *= object() # E: Unsupported operand types for * ("float" and "object") reveal_type(weight0) # N: Revealed type is 'builtins.float' [builtins fixtures/float.pyi] [case testAugmentedAssignmentIntFloatMember] class A: def __init__(self) -> None: self.weight0 = 65.5 reveal_type(self.weight0) # N: Revealed type is 'builtins.float' self.weight0 = 65 reveal_type(self.weight0) # N: Revealed type is 'builtins.int' self.weight0 *= 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "float") self.weight0 *= 0.5 reveal_type(self.weight0) # N: Revealed type is 'builtins.float' self.weight0 *= object() # E: Unsupported operand types for * ("float" and "object") reveal_type(self.weight0) # N: Revealed type is 'builtins.float' [builtins fixtures/float.pyi] [case testAugmentedAssignmentIntFloatDict] from typing import Dict d = {'weight0': 65.5} reveal_type(d['weight0']) # N: Revealed type is 'builtins.float*' d['weight0'] = 65 reveal_type(d['weight0']) # N: Revealed type is 'builtins.float*' d['weight0'] *= 'a' # E: Unsupported operand types for * ("float" and "str") d['weight0'] *= 0.5 reveal_type(d['weight0']) # N: Revealed type is 'builtins.float*' d['weight0'] *= object() # E: Unsupported operand types for * ("float" and "object") reveal_type(d['weight0']) # N: Revealed type is 'builtins.float*' [builtins fixtures/floatdict.pyi] [case testForwardRefsInForStatementImplicit] from typing import List, NamedTuple lst: List[N] for i in lst: reveal_type(i.x) # N: Revealed type is 'builtins.int' a: str = i[0] # E: Incompatible types in assignment (expression has type "int", variable has type "str") N = NamedTuple('N', [('x', int)]) [builtins fixtures/list.pyi] [out] [case testForwardRefsInForStatement] from typing import List, NamedTuple lst: List[M] for i in lst: # type: N reveal_type(i.x) # N: Revealed type is 'builtins.int' a: str = i[0] # E: Incompatible types in assignment (expression has type "int", variable has type "str") N = NamedTuple('N', [('x', int)]) class M(N): pass [builtins fixtures/list.pyi] [out] [case testForwardRefsInWithStatementImplicit] from typing import ContextManager, Any from mypy_extensions import TypedDict cm: ContextManager[N] with cm as g: a: int = g['x'] N = TypedDict('N', {'x': int}) [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] [case testForwardRefsInWithStatement] from typing import ContextManager, Any from mypy_extensions import TypedDict cm: ContextManager[Any] with cm as g: # type: N a: str = g['x'] # E: Incompatible types in assignment (expression has type "int", variable has type "str") N = TypedDict('N', {'x': int}) [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] [case testGlobalWithoutInitialization] from typing import List def foo() -> None: global bar # TODO: Confusing error message bar = [] # type: List[str] # E: Name 'bar' already defined (possibly by an import) bar # E: Name 'bar' is not defined def foo2(): global bar2 bar2 = [] # type: List[str] bar2 [builtins fixtures/list.pyi] mypy-0.761/test-data/unit/check-super.test0000644€tŠÔÚ€2›s®0000002206613576752246024705 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type checker related to super(). -- Supertype member reference -- -------------------------- [case testAccessingSupertypeMethod] class B: def f(self) -> 'B': pass class A(B): def f(self) -> 'A': a, b = None, None # type: (A, B) if int(): a = super().f() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = super().g() # E: "g" undefined in superclass b = super().f() return a [out] [case testAccessingSuperTypeMethodWithArgs] from typing import Any class B: def f(self, y: 'A') -> None: pass class A(B): def f(self, y: Any) -> None: a, b = None, None # type: (A, B) super().f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A" super().f(a) self.f(b) self.f(a) [out] [case testAccessingSuperInit] import typing class B: def __init__(self, x: A) -> None: pass class A(B): def __init__(self) -> None: super().__init__(B(None)) # E: Argument 1 to "__init__" of "B" has incompatible type "B"; expected "A" super().__init__() # E: Too few arguments for "__init__" of "B" super().__init__(A()) [out] [case testAccessingSuperMemberWithDeepHierarchy] import typing class C: def f(self) -> None: pass class B(C): pass class A(B): def f(self) -> None: super().g() # E: "g" undefined in superclass super().f() [out] [case testAssignToBaseClassMethod] import typing class A: def f(self) -> None: pass class B(A): def g(self) -> None: super().f = None [out] main:6: error: Invalid assignment target [case testSuperWithMultipleInheritance] import typing class A: def f(self) -> None: pass class B: def g(self, x: int) -> None: pass class C(A, B): def f(self) -> None: super().f() super().g(1) super().f(1) # E: Too many arguments for "f" of "A" super().g() # E: Too few arguments for "g" of "B" super().not_there() # E: "not_there" undefined in superclass [out] [case testSuperWithNew] class A: def __new__(cls, x: int) -> 'A': return object.__new__(cls) class B(A): def __new__(cls, x: int, y: str = '') -> 'B': super().__new__(cls, 1) super().__new__(cls, 1, '') # E: Too many arguments for "__new__" of "A" return None B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" B(1) B(1, 'x') [builtins fixtures/__new__.pyi] reveal_type(C.a) # N: Revealed type is 'Any' [out] [case testSuperWithUnknownBase] from typing import Any B = None # type: Any class C(B): def __init__(self, arg=0): super(C, self).__init__(arg, arg=arg) [out] [case testSuperSilentInDynamicFunction] class A: pass class B(A): def foo(self): super(B, self).foo() # Not an error [out] [case testSuperWithAny] class B: def f(self) -> None: pass class C(B): def h(self, x) -> None: reveal_type(super(x, x).f) # N: Revealed type is 'def ()' reveal_type(super(C, x).f) # N: Revealed type is 'def ()' reveal_type(super(C, type(x)).f) # N: Revealed type is 'def (self: __main__.B)' [case testSuperInUnannotatedMethod] class C: def h(self): super(C, self).xyz [case testSuperWithTypeObjects] from typing import Type class A: def f(self) -> object: pass class B(A): def f(self) -> int: pass @classmethod def g(cls, x) -> None: reveal_type(super(cls, x).f) # N: Revealed type is 'def () -> builtins.object' def h(self, t: Type[B]) -> None: reveal_type(super(t, self).f) # N: Revealed type is 'def () -> builtins.object' [builtins fixtures/classmethod.pyi] [case testSuperWithTypeTypeAsSecondArgument] class B: def f(self) -> None: pass class C(B): def __new__(cls) -> 'C': super(C, cls).f return C() [case testSuperWithGenericSelf] from typing import TypeVar T = TypeVar('T', bound='C') class B: def f(self) -> float: pass class C(B): def f(self) -> int: pass def g(self: T) -> T: reveal_type(super(C, self).f) # N: Revealed type is 'def () -> builtins.float' return self [case testSuperWithTypeVarValues1] from typing import TypeVar T = TypeVar('T', 'C', 'D') S = TypeVar('S', 'B', 'C') class B: def f(self) -> None: pass class C(B): def f(self) -> None: pass def g(self, x: T, y: S) -> None: super(C, x).f super(C, y).f # E: Argument 2 for "super" not an instance of argument 1 class D(C): pass [case testSuperWithTypeVarValues2] from typing import TypeVar, Generic T = TypeVar('T', 'C', 'D') S = TypeVar('S', 'B', 'C') class B: def f(self) -> None: pass class C(B, Generic[T, S]): def f(self) -> None: pass def g(self, x: T, y: S) -> None: super(C, x).f super(C, y).f # E: Argument 2 for "super" not an instance of argument 1 class D(C): pass [case testSuperInClassMethod] from typing import Union class A: def f(self, i: int) -> None: pass class B(A): def f(self, i: Union[int, str]) -> None: pass @classmethod def g(cls, i: int) -> None: super().f(B(), i) super(B, cls).f(cls(), i) super(B, B()).f(i) super().f(B(), '') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int" super(B, cls).f(cls(), '') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int" super(B, B()).f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [builtins fixtures/classmethod.pyi] [case testSuperWithUnrelatedTypes] from typing import Union class A: def f(self, s: str) -> None: pass class B(A): def f(self, i: Union[int, str]) -> None: pass class C: def g(self, b: B) -> None: super(B, b).f('42') super(B, b).f(42) # E: Argument 1 to "f" of "A" has incompatible type "int"; expected "str" [case testSuperOutsideClass] from typing import Union class A: def f(self, s: str) -> None: pass class B(A): def f(self, i: Union[int, str]) -> None: pass def g(b: B) -> None: super(B, b).f('42') super(B, b).f(42) # E: Argument 1 to "f" of "A" has incompatible type "int"; expected "str" [case testSuperInInitSubclass] class A: def __init_subclass__(cls) -> None: super().__init_subclass__() [builtins fixtures/__init_subclass__.pyi] [case testSuperClassGetItem] from typing import TypeVar, Type, Any T = TypeVar("T", bound="B") class A: def __class_getitem__(cls, item) -> None: pass class B(A): def __class_getitem__(cls: Type[T], item: Any) -> None: super(B, cls).__class_getitem__(item) -- Invalid uses of super() -- ----------------------- [case testSuperOutsideMethodNoCrash] class C: a = super().whatever # E: super() outside of a method is not supported [case testSuperWithSingleArgument] class B: def f(self) -> None: pass class C(B): def __init__(self) -> None: super(C).f() # E: "super" with a single argument not supported [case testSuperWithThreeArguments] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(C, self, 1).f() # E: Too many arguments for "super" [case testSuperWithNonPositionalArguments] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(C, x=self).f() # E: "super" only accepts positional arguments super(**{}).f() # E: "super" only accepts positional arguments [case testSuperWithVarArgs] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(*(C, self)).f() # E: Varargs not supported with "super" [case testInvalidSuperArg] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(x, y).f # E: Name 'x' is not defined # E: Name 'y' is not defined [case testTypeErrorInSuperArg] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(1(), self).f # E: "int" not callable super(C, ''()).f # E: "str" not callable [case testFlippedSuperArgs] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(self, C).f # E: Argument 1 for "super" must be a type object; got a non-type instance [case testInvalidFirstSuperArg] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(None, C).f # E: Argument 1 for "super" must be a type object; got "None" [case testInvalidSecondArgumentToSuper] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(C, 1).f # E: Argument 2 for "super" not an instance of argument 1 super(C, None).f # E: Unsupported argument 2 for "super" [case testSuperInMethodWithNoArguments] class A: def f(self) -> None: pass class B(A): def g() -> None: # E: Method must have at least one argument super().f() # E: super() requires one or more positional arguments in enclosing function def h(self) -> None: def a() -> None: super().f() # E: super() requires one or more positional arguments in enclosing function [case testSuperWithUnsupportedTypeObject] from typing import Type class A: def f(self) -> int: pass class B(A): def h(self, t: Type[None]) -> None: super(t, self).f # E: Unsupported argument 1 for "super" mypy-0.761/test-data/unit/check-tuples.test0000644€tŠÔÚ€2›s®0000011400513576752246025056 0ustar jukkaDROPBOX\Domain Users00000000000000-- Normal assignment and subtyping -- ------------------------------- [case testTupleAssignmentWithTupleTypes] from typing import Tuple t1 = None # type: Tuple[A] t2 = None # type: Tuple[B] t3 = None # type: Tuple[A, A] t4 = None # type: Tuple[A, B] t5 = None # type: Tuple[B, A] if int(): t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[B]", variable has type "Tuple[A]") if int(): t1 = t3 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]") if int(): t3 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A]", variable has type "Tuple[A, A]") if int(): t3 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[A, A]") if int(): t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, A]") # Ok if int(): t1 = t1 if int(): t2 = t2 if int(): t3 = t3 if int(): t4 = t4 if int(): t5 = t5 class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testTupleSubtyping] from typing import Tuple t1 = None # type: Tuple[A, A] t2 = None # type: Tuple[A, B] t3 = None # type: Tuple[B, A] if int(): t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]") t2 = t3 # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, B]") t3 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[B, A]") t3 = t2 # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[B, A]") t1 = t2 t1 = t3 class A: pass class B(A): pass [builtins fixtures/tuple.pyi] [case testTupleCompatibilityWithOtherTypes] from typing import Tuple a, o = None, None # type: (A, object) t = None # type: Tuple[A, A] if int(): a = t # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "A") if int(): t = o # E: Incompatible types in assignment (expression has type "object", variable has type "Tuple[A, A]") if int(): t = a # E: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, A]") # TODO: callable types + tuples # Ok if int(): o = t if int(): t = None class A: pass [builtins fixtures/tuple.pyi] [case testNestedTupleTypes] from typing import Tuple t1 = None # type: Tuple[A, Tuple[A, A]] t2 = None # type: Tuple[B, Tuple[B, B]] if int(): t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]") if int(): t1 = t2 class A: pass class B(A): pass [builtins fixtures/tuple.pyi] [case testNestedTupleTypes2] from typing import Tuple t1 = None # type: Tuple[A, Tuple[A, A]] t2 = None # type: Tuple[B, Tuple[B, B]] if int(): t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]") if int(): t1 = t2 class A: pass class B(A): pass [builtins fixtures/tuple.pyi] [case testSubtypingWithNamedTupleType] from typing import Tuple t1 = None # type: Tuple[A, A] t2 = None # type: tuple if int(): t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[Any, ...]", variable has type "Tuple[A, A]") if int(): t2 = t1 class A: pass [builtins fixtures/tuple.pyi] [case testTupleInitializationWithNone] from typing import Tuple t = None # type: Tuple[A, A] t = None class A: pass [builtins fixtures/tuple.pyi] -- Tuple expressions -- ----------------- [case testTupleExpressions] from typing import Tuple t1 = None # type: tuple t2 = None # type: Tuple[A] t3 = None # type: Tuple[A, B] a, b, c = None, None, None # type: (A, B, C) if int(): t2 = () # E: Incompatible types in assignment (expression has type "Tuple[]", variable has type "Tuple[A]") if int(): t2 = (a, a) # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]") if int(): t3 = (a, a) # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]") if int(): t3 = (b, b) # E: Incompatible types in assignment (expression has type "Tuple[B, B]", variable has type "Tuple[A, B]") if int(): t3 = (a, b, a) # E: Incompatible types in assignment (expression has type "Tuple[A, B, A]", variable has type "Tuple[A, B]") t1 = () t1 = (a,) t2 = (a,) t3 = (a, b) t3 = (a, c) t3 = (None, None) class A: pass class B: pass class C(B): pass [builtins fixtures/tuple.pyi] [case testVoidValueInTuple] import typing (None, f()) # E: "f" does not return a value (f(), None) # E: "f" does not return a value def f() -> None: pass [builtins fixtures/tuple.pyi] -- Indexing -- -------- [case testIndexingTuples] from typing import Tuple t1 = None # type: Tuple[A, B] t2 = None # type: Tuple[A] t3 = None # type: Tuple[A, B, C, D, E] a, b = None, None # type: (A, B) x = None # type: Tuple[A, B, C] y = None # type: Tuple[A, C, E] n = 0 if int(): a = t1[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = t1[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B") t1[2] # E: Tuple index out of range t1[3] # E: Tuple index out of range t2[1] # E: Tuple index out of range reveal_type(t1[n]) # N: Revealed type is 'Union[__main__.A, __main__.B]' reveal_type(t3[n:]) # N: Revealed type is 'builtins.tuple[Union[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E]]' if int(): b = t1[(0)] # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = t1[0] if int(): b = t1[1] if int(): b = t1[-1] if int(): a = t1[(0)] if int(): x = t3[0:3] # type (A, B, C) if int(): y = t3[0:5:2] # type (A, C, E) if int(): x = t3[:-2] # type (A, B, C) class A: pass class B: pass class C: pass class D: pass class E: pass [builtins fixtures/tuple.pyi] [case testIndexingTuplesWithNegativeIntegers] from typing import Tuple t1 = None # type: Tuple[A, B] t2 = None # type: Tuple[A] a, b = None, None # type: A, B if int(): a = t1[-1] # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = t1[-2] # E: Incompatible types in assignment (expression has type "A", variable has type "B") t1[-3] # E: Tuple index out of range t1[-4] # E: Tuple index out of range if int(): b = t2[(-1)] # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = t1[-2] if int(): b = t1[-1] if int(): a = t2[(-1)] class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testAssigningToTupleItems] from typing import Tuple t = None # type: Tuple[A, B] n = 0 t[0] = A() # E: Unsupported target for indexed assignment t[2] = A() # E: Unsupported target for indexed assignment t[n] = A() # E: Unsupported target for indexed assignment class A: pass class B: pass [builtins fixtures/tuple.pyi] -- Multiple assignment -- ------------------- [case testMultipleAssignmentWithTuples] from typing import Tuple t1 = None # type: Tuple[A, B] t2 = None # type: Tuple[A, B, A] a, b = None, None # type: (A, B) (a1, b1) = None, None # type: Tuple[A, B] reveal_type(a1) # N: Revealed type is '__main__.A' reveal_type(b1) # N: Revealed type is '__main__.B' if int(): a, a = t1 # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b, b = t1 # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a, b, b = t2 # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a, b = t1 if int(): a, b, a1 = t2 class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithSquareBracketTuples] from typing import Tuple def avoid_confusing_test_parser() -> None: t1 = None # type: Tuple[A, B] t2 = None # type: Tuple[A, B, A] [a, b] = None, None # type: (A, B) [a1, b1] = None, None # type: Tuple[A, B] reveal_type(a) # N: Revealed type is '__main__.A' reveal_type(b) # N: Revealed type is '__main__.B' reveal_type(a1) # N: Revealed type is '__main__.A' reveal_type(b1) # N: Revealed type is '__main__.B' if int(): [a, a] = t1 # E: Incompatible types in assignment (expression has type "B", variable has type "A") [b, b] = t1 # E: Incompatible types in assignment (expression has type "A", variable has type "B") [a, b, b] = t2 # E: Incompatible types in assignment (expression has type "A", variable has type "B") [a, b] = t1 [a, b, a1] = t2 [a2, b2] = t1 reveal_type(a2) # N: Revealed type is '__main__.A' reveal_type(b2) # N: Revealed type is '__main__.B' class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithSquareBracketTuplesPython2] # flags: --python-version 2.7 --no-strict-optional from typing import Tuple def avoid_confusing_test_parser(): # type: () -> None t1 = None # type: Tuple[A, B] t2 = None # type: Tuple[A, B, A] [a, b] = None, None # type: Tuple[A, B] [a1, b1] = None, None # type: Tuple[A, B] reveal_type(a1) # N: Revealed type is '__main__.A' reveal_type(b1) # N: Revealed type is '__main__.B' if int(): [a, a] = t1 # E: Incompatible types in assignment (expression has type "B", variable has type "A") [b, b] = t1 # E: Incompatible types in assignment (expression has type "A", variable has type "B") [a, b, b] = t2 # E: Incompatible types in assignment (expression has type "A", variable has type "B") [a, b] = t1 [a, b, a1] = t2 [a2, b2] = t1 reveal_type(a2) # N: Revealed type is '__main__.A' reveal_type(b2) # N: Revealed type is '__main__.B' class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithInvalidNumberOfValues] from typing import Tuple t1 = None # type: Tuple[A, A, A] a = None # type: A a, a = t1 # E: Too many values to unpack (2 expected, 3 provided) a, a, a, a = t1 # E: Need more than 3 values to unpack (4 expected) a, a, a = t1 class A: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithTupleExpressionRvalue] a, b = None, None # type: (A, B) if int(): a, b = a, a # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a, b = b, a \ # E: Incompatible types in assignment (expression has type "B", variable has type "A") \ # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a, b = a, b if int(): a, a = a, a class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testSubtypingInMultipleAssignment] a, b = None, None # type: (A, B) if int(): b, b = a, b # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b, b = b, a # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a, b = b, b if int(): b, a = b, b class A: pass class B(A): pass [builtins fixtures/tuple.pyi] [case testInitializationWithMultipleValues] a, b = None, None # type: (A, B) a1, b1 = a, a # type: (A, B) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a2, b2 = b, b # type: (A, B) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a3, b3 = a # type: (A, B) # E: '__main__.A' object is not iterable a4, b4 = None # type: (A, B) # E: 'None' object is not iterable a5, b5 = a, b, a # type: (A, B) # E: Too many values to unpack (2 expected, 3 provided) ax, bx = a, b # type: (A, B) class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithNonTupleRvalue] a, b = None, None # type: (A, B) def f(): pass a, b = None # E: 'None' object is not iterable a, b = a # E: '__main__.A' object is not iterable a, b = f # E: 'def () -> Any' object is not iterable class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithIndexedLvalues] a, b = None, None # type: (A, B) aa, bb = None, None # type: (AA, BB) a[a], b[b] = a, bb # E: Incompatible types in assignment (expression has type "A", target has type "AA") a[a], b[b] = aa, b # E: Incompatible types in assignment (expression has type "B", target has type "BB") a[aa], b[b] = aa, bb # E: Invalid index type "AA" for "A"; expected type "A" a[a], b[bb] = aa, bb # E: Invalid index type "BB" for "B"; expected type "B" a[a], b[b] = aa, bb class A: def __setitem__(self, x: 'A', y: 'AA') -> None: pass class B: def __setitem__(self, x: 'B', y: 'BB') -> None: pass class AA: pass class BB: pass [builtins fixtures/tuple.pyi] [case testMultipleDeclarationWithParentheses] (a, b) = (None, None) # type: int, str if int(): a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): a = 1 b = '' [case testMultipleAssignmentWithExtraParentheses] a, b = None, None # type: (A, B) if int(): (a, b) = (a, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): (a, b) = (b, b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): ((a), (b)) = ((a), (a)) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): ((a), (b)) = ((b), (b)) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): [a, b] = a, a # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): [a, b] = b, b # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): (a, b) = (a, b) if int(): ((a), (b)) = ((a), (b)) if int(): [a, b] = a, b class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentUsingSingleTupleType] from typing import Tuple a, b = None, None # type: Tuple[int, str] if int(): a = 1 if int(): b = '' if int(): a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testMultipleAssignmentWithMixedVariables] a = b, c = 1, 1 x, y = p, q = 1, 1 u, v, w = r, s = 1, 1 # E: Need more than 2 values to unpack (3 expected) d, e = f, g, h = 1, 1 # E: Need more than 2 values to unpack (3 expected) -- Assignment to starred expressions -- --------------------------------- [case testAssignmentToStarMissingAnnotation] from typing import List t = 1, 2 a, b, *c = 1, 2 # E: Need type annotation for 'c' (hint: "c: List[] = ...") aa, bb, *cc = t # E: Need type annotation for 'cc' (hint: "cc: List[] = ...") [builtins fixtures/list.pyi] [case testAssignmentToStarAnnotation] from typing import List li, lo = None, None # type: List[int], List[object] a, b, *c = 1, 2 # type: int, int, List[int] if int(): c = lo # E: Incompatible types in assignment (expression has type "List[object]", variable has type "List[int]") if int(): c = li [builtins fixtures/list.pyi] [case testAssignmentToStarCount1] from typing import List ca = None # type: List[int] c = [1] if int(): a, b, *c = 1, # E: Need more than 1 value to unpack (2 expected) if int(): a, b, *c = 1, 2 if int(): a, b, *c = 1, 2, 3 if int(): a, b, *c = 1, 2, 3, 4 [builtins fixtures/list.pyi] [case testAssignmentToStarCount2] from typing import List ca = None # type: List[int] t1 = 1, t2 = 1, 2 t3 = 1, 2, 3 t4 = 1, 2, 3, 4 c = [1] if int(): a, b, *c = t1 # E: Need more than 1 value to unpack (2 expected) if int(): a, b, *c = t2 if int(): a, b, *c = t3 if int(): a, b, *c = t4 [builtins fixtures/list.pyi] [case testAssignmentToStarFromAny] from typing import Any, cast a, c = cast(Any, 1), C() p, *q = a c = a c = q class C: pass [case testAssignmentToComplexStar] from typing import List li = None # type: List[int] if int(): a, *(li) = 1, a, *(b, c) = 1, 2 # E: Need more than 1 value to unpack (2 expected) if int(): a, *(b, c) = 1, 2, 3 if int(): a, *(b, c) = 1, 2, 3, 4 # E: Too many values to unpack (2 expected, 3 provided) [builtins fixtures/list.pyi] [case testAssignmentToStarFromTupleType] from typing import List, Tuple li = None # type: List[int] la = None # type: List[A] ta = None # type: Tuple[A, A, A] if int(): a, *la = ta if int(): a, *li = ta # E: List item 0 has incompatible type "A"; expected "int" \ # E: List item 1 has incompatible type "A"; expected "int" if int(): a, *na = ta if int(): na = la na = a # E: Incompatible types in assignment (expression has type "A", variable has type "List[A]") class A: pass [builtins fixtures/list.pyi] [case testAssignmentToStarFromTupleInference] from typing import List li = None # type: List[int] la = None # type: List[A] a, *l = A(), A() if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromListInference] from typing import List li = None # type: List[int] la = None # type: List[A] a, *l = [A(), A()] if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromTupleTypeInference] from typing import List, Tuple li = None # type: List[int] la = None # type: List[A] ta = None # type: Tuple[A, A, A] a, *l = ta if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromListTypeInference] from typing import List li = None # type: List[int] la = None # type: List[A] a, *l = la if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromIterable] from typing import List, Tuple, Iterable class CustomIterable(Iterable[int]): pass a: List[int] b: Tuple[int, ...] c: Tuple[int, int, int] d: Iterable[int] e: CustomIterable a1, *a2 = a b1, *b2 = b c1, *c2 = c d1, *d2 = d e1, *e2 = e reveal_type(a2) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(b2) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(c2) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(d2) # N: Revealed type is 'builtins.list[builtins.int]' reveal_type(e2) # N: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/tuple.pyi] -- Nested tuple assignment -- ---------------------------- [case testNestedTupleAssignment1] a1, b1, c1 = None, None, None # type: (A, B, C) a2, b2, c2 = None, None, None # type: (A, B, C) if int(): a1, (b1, c1) = a2, (b2, c2) if int(): a1, (a1, (b1, c1)) = a2, (a2, (b2, c2)) if int(): a1, (a1, (a1, b1)) = a1, (a1, (a1, c1)) # E: Incompatible types in assignment (expression has type "C", variable has type "B") class A: pass class B: pass class C: pass [case testNestedTupleAssignment2] a1, b1, c1 = None, None, None # type: (A, B, C) a2, b2, c2 = None, None, None # type: (A, B, C) t = a1, b1 if int(): a2, b2 = t if int(): (a2, b2), c2 = t, c1 if int(): (a2, c2), c2 = t, c1 # E: Incompatible types in assignment (expression has type "B", variable has type "C") if int(): t, c2 = (a2, b2), c2 if int(): t, c2 = (a2, a2), c2 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]") if int(): t = a1, a1, a1 # E: Incompatible types in assignment (expression has type "Tuple[A, A, A]", variable has type "Tuple[A, B]") if int(): t = a1 # E: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, B]") if int(): a2, a2, a2 = t # E: Need more than 2 values to unpack (3 expected) if int(): a2, = t # E: Too many values to unpack (1 expected, 2 provided) if int(): a2 = t # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "A") class A: pass class B: pass class C: pass -- Error messages -- -------------- [case testTupleErrorMessages] a = None # type: A (a, a) + a # E: Unsupported operand types for + ("Tuple[A, A]" and "A") a + (a, a) # E: Unsupported operand types for + ("A" and "Tuple[A, A]") f((a, a)) # E: Argument 1 to "f" has incompatible type "Tuple[A, A]"; expected "A" (a, a).foo # E: "Tuple[A, A]" has no attribute "foo" def f(x: 'A') -> None: pass class A: def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/tuple.pyi] [case testLargeTuplesInErrorMessages] a = None # type: LongTypeName a + (a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a) # Fail class LongTypeName: def __add__(self, x: 'LongTypeName') -> 'LongTypeName': pass [builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for + ("LongTypeName" and ) -- Tuple methods -- ------------- [case testTupleMethods] from typing import Tuple t = None # type: Tuple[int, str] i = 0 s = '' b = bool() if int(): s = t.__len__() # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): i = t.__str__() # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): i = s in t # E: Incompatible types in assignment (expression has type "bool", variable has type "int") t.foo # E: "Tuple[int, str]" has no attribute "foo" if int(): i = t.__len__() if int(): s = t.__str__() if int(): b = s in t [file builtins.py] from typing import TypeVar, Generic _T = TypeVar('_T') class object: def __init__(self) -> None: pass class tuple(Generic[_T]): def __len__(self) -> int: pass def __str__(self) -> str: pass def __contains__(self, o: object) -> bool: pass class int: pass class str: pass class bool: pass class type: pass class function: pass -- For loop over tuple -- ------------------- [case testForLoopOverTuple] import typing t = 1, 2 for x in t: x = 1 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/for.pyi] [case testForLoopOverEmptyTuple] import typing t = () for x in t: pass # E: Need type annotation for 'x' [builtins fixtures/for.pyi] [case testForLoopOverNoneValuedTuple] import typing for x in None, None: pass [builtins fixtures/for.pyi] [case testForLoopOverTupleAndSubtyping] import typing class A: pass class B(A): pass for x in B(), A(): x = A() x = B() x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A") [builtins fixtures/for.pyi] [case testTupleIterable] y = 'a' x = sum((1,2)) if int(): y = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/tuple.pyi] -- Tuple as a base type -- -------------------- [case testTupleBaseClass] import m [file m.pyi] from typing import Tuple class A(Tuple[int, str]): def f(self, x: int) -> None: a, b = 1, '' if int(): a, b = self b, a = self # Error self.f('') # Error [builtins fixtures/tuple.pyi] [out] tmp/m.pyi:7: error: Incompatible types in assignment (expression has type "int", variable has type "str") tmp/m.pyi:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/m.pyi:8: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testValidTupleBaseClass2] from typing import Tuple class A(Tuple[int, str]): pass x, y = A() reveal_type(x) # N: Revealed type is 'builtins.int' reveal_type(y) # N: Revealed type is 'builtins.str' x1 = A()[0] # type: int x2 = A()[1] # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int") A()[2] # E: Tuple index out of range class B(Tuple[int, ...]): pass z1 = B()[0] # type: int z2 = B()[1] # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str") B()[100] [builtins fixtures/tuple.pyi] [out] [case testValidTupleBaseClass] from typing import Tuple class A(tuple): pass [out] [case testTupleBaseClass2] import m [file m.pyi] from typing import Tuple a = None # type: A class A(Tuple[int, str]): pass x, y = a x() # E: "int" not callable y() # E: "str" not callable [builtins fixtures/tuple.pyi] [out] [case testGenericClassWithTupleBaseClass] from typing import TypeVar, Generic, Tuple T = TypeVar('T') class Test(Generic[T], Tuple[T]): pass x = Test() # type: Test[int] [builtins fixtures/tuple.pyi] [out] main:4: error: Generic tuple types not supported -- Variable-length tuples (Tuple[t, ...] with literal '...') -- --------------------------------------------------------- [case testIndexingVariableLengthTuple] from typing import Tuple x = () # type: Tuple[str, ...] n = 5 x[n]() # E: "str" not callable x[3]() # E: "str" not callable [builtins fixtures/tuple.pyi] [case testSubtypingVariableLengthTuple] from typing import Tuple class A: pass class B(A): pass def fa(t: Tuple[A, ...]) -> None: pass def fb(t: Tuple[B, ...]) -> None: pass ta = () # type: Tuple[A, ...] tb = () # type: Tuple[B, ...] fa(ta) fa(tb) fb(tb) fb(ta) # E: Argument 1 to "fb" has incompatible type "Tuple[A, ...]"; expected "Tuple[B, ...]" [builtins fixtures/tuple.pyi] [case testSubtypingFixedAndVariableLengthTuples] from typing import Tuple class A: pass class B(A): pass def fa(t: Tuple[A, ...]) -> None: pass def fb(t: Tuple[B, ...]) -> None: pass aa = (A(), A()) ab = (A(), B()) bb = (B(), B()) fa(aa) fa(ab) fa(bb) fb(bb) fb(ab) # E: Argument 1 to "fb" has incompatible type "Tuple[A, B]"; expected "Tuple[B, ...]" fb(aa) # E: Argument 1 to "fb" has incompatible type "Tuple[A, A]"; expected "Tuple[B, ...]" [builtins fixtures/tuple.pyi] [case testSubtypingTupleIsContainer] from typing import Container a = None # type: Container[str] a = () [typing fixtures/typing-full.pyi] [case testSubtypingTupleIsSized] from typing import Sized a = None # type: Sized a = () [typing fixtures/typing-full.pyi] [case testTupleWithStarExpr1] a = (1, 2) b = (*a, '') reveal_type(b) # N: Revealed type is 'Tuple[builtins.int, builtins.int, builtins.str]' [case testTupleWithStarExpr2] a = [1] b = (0, *a) reveal_type(b) # N: Revealed type is 'builtins.tuple[builtins.int*]' [builtins fixtures/tuple.pyi] [case testTupleWithStarExpr3] a = [''] b = (0, *a) reveal_type(b) # N: Revealed type is 'builtins.tuple[builtins.object*]' c = (*a, '') reveal_type(c) # N: Revealed type is 'builtins.tuple[builtins.str*]' [builtins fixtures/tuple.pyi] [case testTupleWithStarExpr4] a = (1, 1, 'x', 'x') b = (1, 'x') a = (0, *b, '') [builtins fixtures/tuple.pyi] [case testTupleMeetTupleAny] from typing import Union, Tuple class A: pass class B: pass def f(x: Union[B, Tuple[A, A]]) -> None: if isinstance(x, tuple): reveal_type(x) # N: Revealed type is 'Tuple[__main__.A, __main__.A]' else: reveal_type(x) # N: Revealed type is '__main__.B' def g(x: Union[str, Tuple[str, str]]) -> None: if isinstance(x, tuple): reveal_type(x) # N: Revealed type is 'Tuple[builtins.str, builtins.str]' else: reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/tuple.pyi] [out] [case testTupleMeetTupleAnyComplex] from typing import Tuple, Union Pair = Tuple[int, int] Variant = Union[int, Pair] def tuplify(v: Variant) -> None: reveal_type(v) # N: Revealed type is 'Union[builtins.int, Tuple[builtins.int, builtins.int]]' if not isinstance(v, tuple): reveal_type(v) # N: Revealed type is 'builtins.int' v = (v, v) reveal_type(v) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(v) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(v[0]) # N: Revealed type is 'builtins.int' Pair2 = Tuple[int, str] Variant2 = Union[int, Pair2] def tuplify2(v: Variant2) -> None: if isinstance(v, tuple): reveal_type(v) # N: Revealed type is 'Tuple[builtins.int, builtins.str]' else: reveal_type(v) # N: Revealed type is 'builtins.int' [builtins fixtures/tuple.pyi] [out] [case testTupleMeetTupleAnyAfter] from typing import Tuple, Union def good(blah: Union[Tuple[int, int], int]) -> None: reveal_type(blah) # N: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]' if isinstance(blah, tuple): reveal_type(blah) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(blah) # N: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]' [builtins fixtures/tuple.pyi] [out] [case testTupleMeetTupleVariable] from typing import Tuple, TypeVar, Generic, Union T = TypeVar('T') class A: pass class B1(A): pass class B2(A): pass class C: pass x = None # type: Tuple[A, ...] y = None # type: Tuple[Union[B1, C], Union[B2, C]] def g(x: T) -> Tuple[T, T]: return (x, x) z = 1 x, y = g(z) # E: Argument 1 to "g" has incompatible type "int"; expected "Tuple[B1, B2]" [builtins fixtures/tuple.pyi] [out] [case testTupleWithUndersizedContext] a = ([1], 'x') if int(): a = ([], 'x', 1) # E: Incompatible types in assignment (expression has type "Tuple[List[int], str, int]", variable has type "Tuple[List[int], str]") [builtins fixtures/tuple.pyi] [case testTupleWithOversizedContext] a = (1, [1], 'x') if int(): a = (1, []) # E: Incompatible types in assignment (expression has type "Tuple[int, List[int]]", variable has type "Tuple[int, List[int], str]") [builtins fixtures/tuple.pyi] [case testTupleWithoutContext] a = (1, []) # E: Need type annotation for 'a' [builtins fixtures/tuple.pyi] [case testTupleWithUnionContext] from typing import List, Union, Tuple def f() -> Union[int, Tuple[List[str]]]: return ([],) [builtins fixtures/tuple.pyi] [case testTupleWithVariableSizedTupleContext] from typing import List, Tuple def f() -> Tuple[List[str], ...]: return ([],) [builtins fixtures/tuple.pyi] [case testTupleWithoutArgs] from typing import Tuple def f(a: Tuple) -> None: pass f(()) f((1,)) f(('', '')) f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[Any, ...]" [builtins fixtures/tuple.pyi] [case testTupleSingleton] from typing import Tuple def f(a: Tuple[()]) -> None: pass f(()) f((1,)) # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[]" f(('', '')) # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[]" f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[]" [builtins fixtures/tuple.pyi] [case testNonliteralTupleIndex] t = (0, "") x = 0 y = "" reveal_type(t[x]) # N: Revealed type is 'Union[builtins.int, builtins.str]' t[y] # E: Invalid tuple index type (actual type "str", expected type "Union[int, slice]") [builtins fixtures/tuple.pyi] [case testNonliteralTupleSlice] t = (0, "") x = 0 y = "" reveal_type(t[x:]) # N: Revealed type is 'builtins.tuple[Union[builtins.int, builtins.str]]' t[y:] # E: Slice index must be an integer or None [builtins fixtures/tuple.pyi] [case testInferTupleTypeFallbackAgainstInstance] from typing import TypeVar, Generic, Tuple T = TypeVar('T') class Base(Generic[T]): pass def f(x: Base[T]) -> T: pass class DT(Tuple[str, str], Base[int]): pass reveal_type(f(DT())) # N: Revealed type is 'builtins.int*' [builtins fixtures/tuple.pyi] [out] [case testTypeTupleClassmethod] from typing import Tuple, Type class C(Tuple[int, str]): @classmethod def f(cls) -> None: pass t: Type[C] t.g() # E: "Type[C]" has no attribute "g" t.f() [builtins fixtures/classmethod.pyi] [case testTypeTupleCall] from typing import Tuple def foo(o: CallableTuple) -> int: reveal_type(o) # N: Revealed type is 'Tuple[builtins.str, builtins.int, fallback=__main__.CallableTuple]' return o(1, 2) class CallableTuple(Tuple[str, int]): def __call__(self, n: int, m: int) -> int: return n [builtins fixtures/tuple.pyi] [case testTupleCompatibleWithSequence] from typing import Sequence s: Sequence[str] s = tuple() reveal_type(s) # N: Revealed type is 'builtins.tuple[builtins.str]' [builtins fixtures/tuple.pyi] [case testTupleInstanceCompatibleWithIterable] from typing import Iterable, Tuple x: Iterable[int] = () y: Tuple[int, ...] = (1, 2, 3) x = y reveal_type(x) # N: Revealed type is 'builtins.tuple[builtins.int]' [builtins fixtures/tuple.pyi] [case testTupleTypeCompatibleWithIterable] from typing import Iterable, Tuple x: Iterable[int] = () y: Tuple[int, int] = (1, 2) x = y reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' [case testTupleOverlapDifferentTuples] from typing import Optional, Tuple class A: pass class B: pass possibles: Tuple[int, Tuple[A]] x: Optional[Tuple[B]] if x in possibles: reveal_type(x) # N: Revealed type is 'Tuple[__main__.B]' else: reveal_type(x) # N: Revealed type is 'Union[Tuple[__main__.B], None]' [builtins fixtures/tuple.pyi] [case testUnionOfTupleIndex] from typing import Union, Tuple tup: Union[Tuple[int, str], Tuple[int, int, str]] reveal_type(tup[0]) # N: Revealed type is 'builtins.int' reveal_type(tup[1]) # N: Revealed type is 'Union[builtins.str, builtins.int]' reveal_type(tup[2]) # E: Tuple index out of range \ # N: Revealed type is 'Union[Any, builtins.str]' reveal_type(tup[:]) # N: Revealed type is 'Union[Tuple[builtins.int, builtins.str], Tuple[builtins.int, builtins.int, builtins.str]]' [builtins fixtures/tuple.pyi] [case testUnionOfTupleIndexMixed] from typing import Union, Tuple, List tup: Union[Tuple[int, str], List[int]] reveal_type(tup[0]) # N: Revealed type is 'builtins.int' reveal_type(tup[1]) # N: Revealed type is 'Union[builtins.str, builtins.int*]' reveal_type(tup[2]) # E: Tuple index out of range \ # N: Revealed type is 'Union[Any, builtins.int*]' reveal_type(tup[:]) # N: Revealed type is 'Union[Tuple[builtins.int, builtins.str], builtins.list[builtins.int*]]' [builtins fixtures/tuple.pyi] [case testFixedLengthTupleConcatenation] a = (1, "foo", 3) b = ("bar", 7) reveal_type(a + b) # N: Revealed type is 'Tuple[builtins.int, builtins.str, builtins.int, builtins.str, builtins.int]' [builtins fixtures/tuple.pyi] [case testAssigningWithLongTupleInitializer] from typing import Tuple # long initializer assignment with few mismatches t: Tuple[int, ...] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", 11) \ # E: Incompatible types in assignment (3 tuple items are incompatible) \ # N: Expression tuple item 8 has type "str"; "int" expected; \ # N: Expression tuple item 9 has type "str"; "int" expected; \ # N: Expression tuple item 10 has type "str"; "int" expected; # long initializer assignment with more mismatches t1: Tuple[int, ...] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str") \ # E: Incompatible types in assignment (4 tuple items are incompatible; 1 items are omitted) \ # N: Expression tuple item 8 has type "str"; "int" expected; \ # N: Expression tuple item 9 has type "str"; "int" expected; \ # N: Expression tuple item 10 has type "str"; "int" expected; # short tuple initializer assignment t2: Tuple[int, ...] = (1, 2, "s", 4) \ # E: Incompatible types in assignment (expression has type "Tuple[int, int, str, int]", variable has type "Tuple[int, ...]") # long initializer assignment with few mismatches, no ellipsis t3: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, "str", "str") \ # E: Incompatible types in assignment (2 tuple items are incompatible) \ # N: Expression tuple item 10 has type "str"; "int" expected; \ # N: Expression tuple item 11 has type "str"; "int" expected; # long initializer assignment with more mismatches, no ellipsis t4: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str") \ # E: Incompatible types in assignment (4 tuple items are incompatible; 1 items are omitted) \ # N: Expression tuple item 8 has type "str"; "int" expected; \ # N: Expression tuple item 9 has type "str"; "int" expected; \ # N: Expression tuple item 10 has type "str"; "int" expected; # short tuple initializer assignment, no ellipsis t5: Tuple[int, int] = (1, 2, "s", 4) # E: Incompatible types in assignment (expression has type "Tuple[int, int, str, int]", variable has type "Tuple[int, int]") # long initializer assignment with mismatched pairs t6: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str", 1, 1, 1, 1, 1) \ # E: Incompatible types in assignment (expression has type Tuple[int, int, ... <15 more items>], variable has type Tuple[int, int, ... <10 more items>]) [builtins fixtures/tuple.pyi] mypy-0.761/test-data/unit/check-type-aliases.test0000644€tŠÔÚ€2›s®0000003547613576752246026160 0ustar jukkaDROPBOX\Domain Users00000000000000[case testSimpleTypeAlias] import typing i = int def f(x: i) -> None: pass f(1) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [targets __main__, __main__.f] [case testUnionTypeAlias] from typing import Union U = Union[int, str] def f(x: U) -> None: pass f(1) f('') f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]" [targets __main__, __main__.f] [case testTupleTypeAlias] from typing import Tuple T = Tuple[int, str] def f(x: T) -> None: pass f((1, 'x')) f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[int, str]" [targets __main__, __main__.f] [case testCallableTypeAlias] from typing import Callable A = Callable[[int], None] f = None # type: A f(1) f('') # E: Argument 1 has incompatible type "str"; expected "int" [targets __main__] [case testListTypeAlias] from typing import List A = List[int] def f(x: A) -> None: pass f([1]) f(['x']) # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [targets __main__, __main__.f] [case testAnyTypeAlias] from typing import Any A = Any def f(x: A) -> None: x.foo() f(1) f('x') [case testImportUnionAlias] import typing from _m import U def f(x: U) -> None: pass f(1) f('x') f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]" [file _m.py] from typing import Union U = Union[int, str] [builtins fixtures/tuple.pyi] [case testProhibitReassigningAliases] A = float if int(): A = int # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation [out] [case testProhibitReassigningSubscriptedAliases] from typing import Callable A = Callable[[], float] if int(): A = Callable[[], int] \ # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation \ # E: Value of type "int" is not indexable # the second error is because of `Callable = 0` in lib-stub/typing.pyi [builtins fixtures/list.pyi] [out] [case testProhibitReassigningGenericAliases] from typing import TypeVar, Union, Tuple T = TypeVar('T') A = Tuple[T, T] if int(): A = Union[T, int] # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation \ # E: Value of type "int" is not indexable # the second error is because of `Union = 0` in lib-stub/typing.pyi [out] [case testProhibitUsingVariablesAsTypesAndAllowAliasesAsTypes] from typing import TypeVar, Sequence, Type T = TypeVar('T') A: Type[float] = int if int(): A = float # OK x: A # E: Variable "__main__.A" is not valid as a type def bad(tp: A) -> None: # E: Variable "__main__.A" is not valid as a type pass Alias = int GenAlias = Sequence[T] def fun(x: Alias) -> GenAlias[int]: pass [out] [case testCorrectQualifiedAliasesAlsoInFunctions] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class X(Generic[T]): A = X[S] def f(self) -> X[T]: pass a: X[T] b: A = a c: A[T] = a d: A[int] = a # E: Incompatible types in assignment (expression has type "X[T]", variable has type "X[int]") def g(self) -> None: a: X[T] b: X.A = a c: X.A[T] = a d: X.A[int] = a # E: Incompatible types in assignment (expression has type "X[T]", variable has type "X[int]") def g(arg: X[int]) -> None: p: X[int] = arg.f() q: X.A = arg.f() r: X.A[str] = arg.f() # E: Incompatible types in assignment (expression has type "X[int]", variable has type "X[str]") [out] [case testProhibitBoundTypeVariableReuseForAliases] from typing import TypeVar, Generic, List T = TypeVar('T') class C(Generic[T]): A = List[T] # E: Can't use bound type variable "T" to define generic alias x: C.A reveal_type(x) # N: Revealed type is 'builtins.list[Any]' def f(x: T) -> T: A = List[T] # E: Can't use bound type variable "T" to define generic alias return x [builtins fixtures/list.pyi] [out] [case testTypeAliasInBuiltins] def f(x: bytes): pass bytes f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str" [builtins fixtures/alias.pyi] [case testEmptyTupleTypeAlias] from typing import Tuple, Callable EmptyTuple = Tuple[()] x = None # type: EmptyTuple reveal_type(x) # N: Revealed type is 'Tuple[]' EmptyTupleCallable = Callable[[Tuple[()]], None] f = None # type: EmptyTupleCallable reveal_type(f) # N: Revealed type is 'def (Tuple[])' [builtins fixtures/list.pyi] [case testForwardTypeAlias] def f(p: 'Alias') -> None: pass reveal_type(f) # N: Revealed type is 'def (p: builtins.int)' Alias = int [out] [case testForwardTypeAliasGeneric] from typing import TypeVar, Tuple def f(p: 'Alias[str]') -> None: pass reveal_type(f) # N: Revealed type is 'def (p: Tuple[builtins.int, builtins.str])' T = TypeVar('T') Alias = Tuple[int, T] [out] [case testRecursiveAliasesErrors1] # Recursive aliases are not supported yet. from typing import Type, Callable, Union A = Union[A, int] # E: Cannot resolve name "A" (possible cyclic definition) B = Callable[[B], int] # E: Cannot resolve name "B" (possible cyclic definition) C = Type[C] # E: Cannot resolve name "C" (possible cyclic definition) [case testRecursiveAliasesErrors2] # Recursive aliases are not supported yet. from typing import Type, Callable, Union A = Union[B, int] B = Callable[[C], int] C = Type[A] x: A reveal_type(x) [out] main:5: error: Cannot resolve name "A" (possible cyclic definition) main:5: error: Cannot resolve name "B" (possible cyclic definition) main:6: error: Cannot resolve name "B" (possible cyclic definition) main:6: error: Cannot resolve name "C" (possible cyclic definition) main:7: error: Cannot resolve name "C" (possible cyclic definition) main:9: note: Revealed type is 'Union[Any, builtins.int]' [case testDoubleForwardAlias] from typing import List x: A A = List[B] B = List[int] reveal_type(x) # N: Revealed type is 'builtins.list[builtins.list[builtins.int]]' [builtins fixtures/list.pyi] [out] [case testDoubleForwardAliasWithNamedTuple] from typing import List, NamedTuple x: A A = List[B] class B(NamedTuple): x: str reveal_type(x[0].x) # N: Revealed type is 'builtins.str' [builtins fixtures/list.pyi] [out] [case testJSONAliasApproximation] # Recursive aliases are not supported yet. from typing import List, Union, Dict x: JSON # E: Cannot resolve name "JSON" (possible cyclic definition) JSON = Union[int, str, List[JSON], Dict[str, JSON]] # E: Cannot resolve name "JSON" (possible cyclic definition) reveal_type(x) # N: Revealed type is 'Any' if isinstance(x, list): reveal_type(x) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/isinstancelist.pyi] [out] [case testForwardRefToTypeVar] from typing import TypeVar, List reveal_type(a) # N: Revealed type is 'builtins.list[builtins.int]' a: A[int] A = List[T] T = TypeVar('T') [builtins fixtures/list.pyi] [out] [case testFunctionForwardRefAlias] from typing import List, TypeVar T = TypeVar('T') def f(x: T) -> List[T]: y: A[T] reveal_type(y) # N: Revealed type is 'builtins.list[T`-1]' return [x] + y A = List[T] [builtins fixtures/list.pyi] [out] [case testFunctionForwardRefAlias2] from typing import List, TypeVar def f() -> None: X = List[int] x: A[X] reveal_type(x) # N: Revealed type is 'builtins.list[builtins.list[builtins.int]]' T = TypeVar('T') A = List[T] [builtins fixtures/list.pyi] [out] [case testNoneAlias] from typing import Union void = type(None) x: void reveal_type(x) # N: Revealed type is 'None' y: Union[int, void] reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' [builtins fixtures/bool.pyi] [case testNoneAliasStrict] # flags: --strict-optional from typing import Optional, Union void = type(None) x: int y: Union[int, void] z: Optional[int] x = y # E: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int") y = z [builtins fixtures/bool.pyi] [case testAliasToTupleAndCallable] from typing import Callable, Tuple C = Callable T = Tuple c: C t: T reveal_type(c) # N: Revealed type is 'def (*Any, **Any) -> Any' reveal_type(t) # N: Revealed type is 'builtins.tuple[Any]' bad: C[int] # E: Bad number of arguments for type alias, expected: 0, given: 1 also_bad: T[int] # E: Bad number of arguments for type alias, expected: 0, given: 1 [builtins fixtures/tuple.pyi] [out] [case testAliasRefOnClass] from typing import Generic, TypeVar, Type T = TypeVar('T') class C(Generic[T]): pass class N: A = C[T] B = C[int] x: N.A[C] reveal_type(x) # N: Revealed type is '__main__.C[__main__.C[Any]]' xx = N.A[C]() reveal_type(xx) # N: Revealed type is '__main__.C[__main__.C*[Any]]' y = N.A() reveal_type(y) # N: Revealed type is '__main__.C[Any]' M = N b = M.A[int]() reveal_type(b) # N: Revealed type is '__main__.C[builtins.int*]' n: Type[N] w = n.B() reveal_type(w) # N: Revealed type is '__main__.C[builtins.int]' [out] [case testTypeAliasesToNamedTuple] from nt import C, D, E A1 = C A2 = D A3 = E class Cls: A1 = C A2 = D A3 = E A1('no') # E: Argument 1 to "C" has incompatible type "str"; expected "int" a1 = A1(1) reveal_type(a1) # N: Revealed type is 'Tuple[builtins.int, fallback=nt.C]' A2(0) # E: Argument 1 to "D" has incompatible type "int"; expected "str" a2 = A2('yes') reveal_type(a2) # N: Revealed type is 'Tuple[builtins.str, fallback=nt.D]' a3 = A3() reveal_type(a3) # N: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=nt.E]' Cls.A1('no') # E: Argument 1 has incompatible type "str"; expected "int" ca1 = Cls.A1(1) reveal_type(ca1) # N: Revealed type is 'Tuple[builtins.int, fallback=nt.C]' Cls.A2(0) # E: Argument 1 has incompatible type "int"; expected "str" ca2 = Cls.A2('yes') reveal_type(ca2) # N: Revealed type is 'Tuple[builtins.str, fallback=nt.D]' ca3 = Cls.A3() reveal_type(ca3) # N: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=nt.E]' [file nt.pyi] from typing import NamedTuple, Tuple class C(NamedTuple): x: int D = NamedTuple('D', [('y', str)]) class E(Tuple[int, str]): pass [builtins fixtures/tuple.pyi] [out] [case testTypeAliasesToAny] from typing import Any A = Any A # this should not fail [out] [case testDoubleImportsOfAnAlias] from b import * from c import * [file b.py] from a import A [file c.py] from a import A [file a.py] A = int [out] [case testDoubleImportsOfAnAlias2] from b import A from c import A [file b.py] from a import A [file c.py] from a import A [file a.py] A = int [out] [case testDoubleImportsOfAnAlias3] from b import * from c import * [file b.py] from a import A [file c.py] from a import A [file a.py] from typing import Union A = Union[None] [out] [case testAliasToClassMethod] from typing import TypeVar, Generic, Union, Type T = TypeVar('T', bound=C) MYPY = False if MYPY: test = classmethod class C: @test def meth(cls: Type[T], arg: int) -> Union[T, int]: ... class D(C): ... reveal_type(D.meth(1)) # N: Revealed type is 'Union[__main__.D*, builtins.int]' reveal_type(D().meth(1)) # N: Revealed type is 'Union[__main__.D*, builtins.int]' [builtins fixtures/classmethod.pyi] [out] [case testAliasInImportCycle] # cmd: mypy -m t t2 [file t.py] MYPY = False if MYPY: from t2 import A x: A [file t2.py] import t from typing import Callable A = Callable[[], None] [builtins fixtures/bool.pyi] [out] [case testAliasInImportCycle2] import a [file a.pyi] from b import Parameter class _ParamType: p: Parameter _ConvertibleType = _ParamType def convert_type(ty: _ConvertibleType): ... [file b.pyi] from a import _ConvertibleType class Parameter: type: _ConvertibleType [out] [case testAliasInImportCycle3] # cmd: mypy -m t t2 [file t.py] MYPY = False if MYPY: from t2 import A x: A reveal_type(x) # N: Revealed type is 't2.D' reveal_type(A) # N: Revealed type is 'def () -> t2.D' A() [file t2.py] import t class D: pass A = D [builtins fixtures/bool.pyi] [out] [case testFlexibleAlias1] from typing import TypeVar, List, Tuple from mypy_extensions import FlexibleAlias T = TypeVar('T') U = TypeVar('U') AnInt = FlexibleAlias[T, int] x: AnInt[str] reveal_type(x) # N: Revealed type is 'builtins.int' TwoArgs = FlexibleAlias[Tuple[T, U], bool] TwoArgs2 = FlexibleAlias[Tuple[T, U], List[U]] def welp(x: TwoArgs[str, int]) -> None: reveal_type(x) # N: Revealed type is 'builtins.bool' def welp2(x: TwoArgs2[str, int]) -> None: reveal_type(x) # N: Revealed type is 'builtins.list[builtins.int]' Id = FlexibleAlias[T, T] def take_id(x: Id[int]) -> None: reveal_type(x) # N: Revealed type is 'builtins.int' def id(x: Id[T]) -> T: return x # TODO: This doesn't work and maybe it should? # Indirection = AnInt[T] # y: Indirection[str] # reveal_type(y) # E : Revealed type is 'builtins.int' # But this does Indirection2 = FlexibleAlias[T, AnInt[T]] z: Indirection2[str] reveal_type(z) # N: Revealed type is 'builtins.int' Indirection3 = FlexibleAlias[Tuple[T, U], AnInt[T]] w: Indirection3[str, int] reveal_type(w) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [case testFlexibleAlias2] # flags: --always-true=BOGUS from typing import TypeVar, Any from mypy_extensions import FlexibleAlias T = TypeVar('T') BOGUS = True if BOGUS: Bogus = FlexibleAlias[T, Any] else: Bogus = FlexibleAlias[T, T] class A: x: Bogus[str] reveal_type(A().x) # N: Revealed type is 'Any' def foo(x: Bogus[int]) -> None: reveal_type(x) # N: Revealed type is 'Any' [builtins fixtures/dict.pyi] [case testFlexibleAlias3] # flags: --always-false=BOGUS from typing import TypeVar, Any from mypy_extensions import FlexibleAlias T = TypeVar('T') BOGUS = True if BOGUS: Bogus = FlexibleAlias[T, Any] else: Bogus = FlexibleAlias[T, T] class A: x: Bogus[str] reveal_type(A().x) # N: Revealed type is 'builtins.str' def foo(x: Bogus[int]) -> None: reveal_type(x) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [case testOverrideByIdemAliasCorrectType] C = C class C: # type: ignore pass x: C reveal_type(x) # N: Revealed type is '__main__.C' [out] [case testOverrideByIdemAliasCorrectTypeReversed] class C: pass C = C # type: ignore x: C reveal_type(x) # N: Revealed type is '__main__.C' [out] [case testOverrideByIdemAliasCorrectTypeImported] from other import C as B C = B x: C reveal_type(x) # N: Revealed type is 'other.C' [file other.py] class C: pass [out] [case testConditionalExceptionAlias] try: E = E except BaseException: class E(BaseException): pass # type: ignore try: pass except E as e: reveal_type(e) # N: Revealed type is '__main__.E' [builtins fixtures/exception.pyi] [out] [case testNestedClassOnAliasAsType] class Out: class In: class Inner: pass O = Out I = Out.In OI = O.In A = Out B = A w: O.In x: I.Inner y: OI.Inner z: B.In reveal_type(w) # N: Revealed type is '__main__.Out.In' reveal_type(x) # N: Revealed type is '__main__.Out.In.Inner' reveal_type(y) # N: Revealed type is '__main__.Out.In.Inner' reveal_type(z) # N: Revealed type is '__main__.Out.In' mypy-0.761/test-data/unit/check-type-checks.test0000644€tŠÔÚ€2›s®0000001030413576752246025756 0ustar jukkaDROPBOX\Domain Users00000000000000-- Conditional type checks. [case testSimpleIsinstance] x = None # type: object n = None # type: int s = None # type: str if int(): n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [case testSimpleIsinstance2] import typing def f(x: object, n: int, s: str) -> None: if int(): n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testSimpleIsinstance3] class A: x = None # type: object n = None # type: int s = None # type: str if int(): n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") else: n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testMultipleIsinstanceTests] import typing class A: pass class B(A): pass def f(x: object, a: A, b: B, c: int) -> None: if isinstance(x, A): if isinstance(x, B): b = x x = a a = x c = x # E: Incompatible types in assignment (expression has type "A", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testMultipleIsinstanceTests2] import typing class A: pass class B(A): pass def f(x: object, y: object, n: int, s: str) -> None: if isinstance(x, int): if isinstance(y, str): n = x s = y s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") n = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") s = y # E: Incompatible types in assignment (expression has type "object", variable has type "str") n = y # E: Incompatible types in assignment (expression has type "object", variable has type "int") n = x [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndElif] import typing def f(x: object, n: int, s: str) -> None: if int(): n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") elif isinstance(x, str): s = x n = x # E: Incompatible types in assignment (expression has type "str", variable has type "int") else: n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") s = x # E: Incompatible types in assignment (expression has type "object", variable has type "str") n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndAnyType] from typing import Any def f(x: Any, n: int, s: str) -> None: if int(): s = x if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") s = x [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndGenericType] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def f(self, x: T) -> None: pass def f(x: object) -> None: if isinstance(x, C): x.f(1) x.f('') x.g() # E: "C[Any]" has no attribute "g" x.g() # E: "object" has no attribute "g" [builtins fixtures/isinstance.pyi] [out] mypy-0.761/test-data/unit/check-type-promotion.test0000644€tŠÔÚ€2›s®0000000251113576752246026545 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type promotion (e.g. int -> float). [case testPromoteIntToFloat] def f(x: float) -> None: pass f(1) [builtins fixtures/primitives.pyi] [case testCantPromoteFloatToInt] def f(x: int) -> None: pass f(1.1) # E: Argument 1 to "f" has incompatible type "float"; expected "int" [builtins fixtures/primitives.pyi] [case testPromoteFloatToComplex] def f(x: complex) -> None: pass f(1) [builtins fixtures/primitives.pyi] [case testPromoteIntToComplex] def f(x: complex) -> None: pass f(1) [builtins fixtures/primitives.pyi] [case testPromoteBytearrayToByte] def f(x: bytes) -> None: pass f(bytearray(b'')) [builtins fixtures/primitives.pyi] [case testPromoteMemoryviewToBytes] def f(x: bytes) -> None: pass f(memoryview(b'')) [builtins fixtures/primitives.pyi] [case testNarrowingDownFromPromoteTargetType] y = 0.0 y = 1 y() # E: "int" not callable [builtins fixtures/primitives.pyi] [case testNarrowingDownFromPromoteTargetType2] y = 0.0 y = 1 y.x # E: "int" has no attribute "x" [builtins fixtures/primitives.pyi] [case testTypePromotionsDontInterfereWithProtocols] from typing import TypeVar, Union, Protocol class SupportsFloat(Protocol): def __float__(self) -> float: pass T = TypeVar('T') def f(x: Union[SupportsFloat, T]) -> Union[SupportsFloat, T]: pass f(0) # should not crash [builtins fixtures/primitives.pyi] [out] mypy-0.761/test-data/unit/check-typeddict.test0000644€tŠÔÚ€2›s®0000020065113576752246025536 0ustar jukkaDROPBOX\Domain Users00000000000000-- Create Instance [case testCanCreateTypedDictInstanceWithKeywordArguments] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(x=42, y=1337) reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' # Use values() to check fallback value type. reveal_type(p.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [targets sys, __main__] [case testCanCreateTypedDictInstanceWithDictCall] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' # Use values() to check fallback value type. reveal_type(p.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testCanCreateTypedDictInstanceWithDictLiteral] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point({'x': 42, 'y': 1337}) reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' # Use values() to check fallback value type. reveal_type(p.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testCanCreateTypedDictInstanceWithNoArguments] from typing import TypeVar, Union from mypy_extensions import TypedDict EmptyDict = TypedDict('EmptyDict', {}) p = EmptyDict() reveal_type(p) # N: Revealed type is 'TypedDict('__main__.EmptyDict', {})' reveal_type(p.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- Create Instance (Errors) [case testCannotCreateTypedDictInstanceWithUnknownArgumentPattern] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(42, 1337) # E: Expected keyword arguments, {...}, or dict(...) in TypedDict constructor [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictInstanceNonLiteralItemName] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) x = 'x' p = Point({x: 42, 'y': 1337}) # E: Expected TypedDict key to be string literal [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictInstanceWithExtraItems] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(x=42, y=1337, z=666) # E: Extra key 'z' for TypedDict "Point" [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictInstanceWithMissingItems] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(x=42) # E: Key 'y' missing for TypedDict "Point" [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictInstanceWithIncompatibleItemType] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(x='meaning_of_life', y=1337) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [builtins fixtures/dict.pyi] -- Define TypedDict (Class syntax) [case testCanCreateTypedDictWithClass] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): x: int y: int p = Point(x=42, y=1337) reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithSubclass] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1D(TypedDict): x: int class Point2D(Point1D): y: int r: Point1D p: Point2D reveal_type(r) # N: Revealed type is 'TypedDict('__main__.Point1D', {'x': builtins.int})' reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithSubclass2] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1D(TypedDict): x: int class Point2D(TypedDict, Point1D): # We also allow to include TypedDict in bases, it is simply ignored at runtime y: int p: Point2D reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictClassEmpty] # flags: --python-version 3.6 from mypy_extensions import TypedDict class EmptyDict(TypedDict): pass p = EmptyDict() reveal_type(p) # N: Revealed type is 'TypedDict('__main__.EmptyDict', {})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithClassOldVersion] # flags: --python-version 3.5 # Test that we can use class-syntax to merge TypedDicts even in # versions without type annotations from mypy_extensions import TypedDict MovieBase1 = TypedDict( 'MovieBase1', {'name': str, 'year': int}) MovieBase2 = TypedDict( 'MovieBase2', {'based_on': str}, total=False) class Movie(MovieBase1, MovieBase2): pass def foo(x): # type: (Movie) -> None pass foo({}) # E: Keys ('name', 'year') missing for TypedDict "Movie" foo({'name': 'lol', 'year': 2009, 'based_on': 0}) # E: Incompatible types (expression has type "int", TypedDict item "based_on" has type "str") [builtins fixtures/dict.pyi] -- Define TypedDict (Class syntax errors) [case testCannotCreateTypedDictWithClassOtherBases] # flags: --python-version 3.6 from mypy_extensions import TypedDict class A: pass class Point1D(TypedDict, A): # E: All bases of a new TypedDict must be TypedDict types x: int class Point2D(Point1D, A): # E: All bases of a new TypedDict must be TypedDict types y: int p: Point2D reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassWithOtherStuff] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): x: int y: int = 1 # E: Right hand side values are not supported in TypedDict def f(): pass # E: Invalid statement in TypedDict definition; expected "field_name: field_type" z = int # E: Invalid statement in TypedDict definition; expected "field_name: field_type" p = Point(x=42, y=1337, z='whatever') reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int, 'z': Any})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictTypeWithUnderscoreItemName] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int, '_fallback': object}) [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithClassUnderscores] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): x: int _y: int p: Point reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, '_y': builtins.int})' [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassOverwriting] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Bad(TypedDict): x: int x: str # E: Duplicate TypedDict field "x" b: Bad reveal_type(b) # N: Revealed type is 'TypedDict('__main__.Bad', {'x': builtins.int})' [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassOverwriting2] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1(TypedDict): x: int class Point2(TypedDict): x: float class Bad(Point1, Point2): # E: Cannot overwrite TypedDict field "x" while merging pass b: Bad reveal_type(b) # N: Revealed type is 'TypedDict('__main__.Bad', {'x': builtins.int})' [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassOverwriting2] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1(TypedDict): x: int class Point2(Point1): x: float # E: Cannot overwrite TypedDict field "x" while extending p2: Point2 reveal_type(p2) # N: Revealed type is 'TypedDict('__main__.Point2', {'x': builtins.int})' [builtins fixtures/dict.pyi] -- Subtyping [case testCanConvertTypedDictToItself] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) def identity(p: Point) -> Point: return p [builtins fixtures/dict.pyi] [case testCanConvertTypedDictToEquivalentTypedDict] from mypy_extensions import TypedDict PointA = TypedDict('PointA', {'x': int, 'y': int}) PointB = TypedDict('PointB', {'x': int, 'y': int}) def identity(p: PointA) -> PointB: return p [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToSimilarTypedDictWithNarrowerItemTypes] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object}) def convert(op: ObjectPoint) -> Point: return op # E: Incompatible return value type (got "ObjectPoint", expected "Point") [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToSimilarTypedDictWithWiderItemTypes] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object}) def convert(p: Point) -> ObjectPoint: return p # E: Incompatible return value type (got "Point", expected "ObjectPoint") [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToSimilarTypedDictWithIncompatibleItemTypes] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) Chameleon = TypedDict('Chameleon', {'x': str, 'y': str}) def convert(p: Point) -> Chameleon: return p # E: Incompatible return value type (got "Point", expected "Chameleon") [builtins fixtures/dict.pyi] [case testCanConvertTypedDictToNarrowerTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) Point1D = TypedDict('Point1D', {'x': int}) def narrow(p: Point) -> Point1D: return p [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToWiderTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int}) def widen(p: Point) -> Point3D: return p # E: Incompatible return value type (got "Point", expected "Point3D") [builtins fixtures/dict.pyi] [case testCanConvertTypedDictToCompatibleMapping] from mypy_extensions import TypedDict from typing import Mapping Point = TypedDict('Point', {'x': int, 'y': int}) def as_mapping(p: Point) -> Mapping[str, object]: return p [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToIncompatibleMapping] from mypy_extensions import TypedDict from typing import Mapping Point = TypedDict('Point', {'x': int, 'y': int}) def as_mapping(p: Point) -> Mapping[str, int]: return p # E: Incompatible return value type (got "Point", expected "Mapping[str, int]") [builtins fixtures/dict.pyi] [case testTypedDictAcceptsIntForFloatDuckTypes] from mypy_extensions import TypedDict from typing import Any, Mapping Point = TypedDict('Point', {'x': float, 'y': float}) def create_point() -> Point: return Point(x=1, y=2) reveal_type(Point(x=1, y=2)) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.float, 'y': builtins.float})' [builtins fixtures/dict.pyi] [case testTypedDictDoesNotAcceptsFloatForInt] from mypy_extensions import TypedDict from typing import Any, Mapping Point = TypedDict('Point', {'x': int, 'y': int}) def create_point() -> Point: return Point(x=1.2, y=2.5) [out] main:5: error: Incompatible types (expression has type "float", TypedDict item "x" has type "int") main:5: error: Incompatible types (expression has type "float", TypedDict item "y" has type "int") [builtins fixtures/dict.pyi] [case testTypedDictAcceptsAnyType] from mypy_extensions import TypedDict from typing import Any, Mapping Point = TypedDict('Point', {'x': float, 'y': float}) def create_point(something: Any) -> Point: return Point({ 'x': something.x, 'y': something.y }) [builtins fixtures/dict.pyi] [case testTypedDictValueTypeContext] from mypy_extensions import TypedDict from typing import List D = TypedDict('D', {'x': List[int]}) reveal_type(D(x=[])) # N: Revealed type is 'TypedDict('__main__.D', {'x': builtins.list[builtins.int]})' [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToDictOrMutableMapping] from mypy_extensions import TypedDict from typing import Dict, MutableMapping Point = TypedDict('Point', {'x': int, 'y': int}) def as_dict(p: Point) -> Dict[str, int]: return p # E: Incompatible return value type (got "Point", expected "Dict[str, int]") def as_mutable_mapping(p: Point) -> MutableMapping[str, object]: return p # E: Incompatible return value type (got "Point", expected "MutableMapping[str, object]") [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testCanConvertTypedDictToAny] from mypy_extensions import TypedDict from typing import Any Point = TypedDict('Point', {'x': int, 'y': int}) def unprotect(p: Point) -> Any: return p [builtins fixtures/dict.pyi] [case testAnonymousTypedDictInErrorMessages] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}) B = TypedDict('B', {'x': int, 'z': str, 'a': int}) C = TypedDict('C', {'x': int, 'z': str, 'a': str}) a: A b: B c: C def f(a: A) -> None: pass l = [a, b] # Join generates an anonymous TypedDict f(l) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x': int})]"; expected "A" ll = [b, c] f(ll) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x': int, 'z': str})]"; expected "A" [builtins fixtures/dict.pyi] [case testTypedDictWithSimpleProtocol] from typing_extensions import Protocol from mypy_extensions import TypedDict class StrObjectMap(Protocol): def __getitem__(self, key: str) -> object: ... class StrIntMap(Protocol): def __getitem__(self, key: str) -> int: ... A = TypedDict('A', {'x': int, 'y': int}) B = TypedDict('B', {'x': int, 'y': str}) def fun(arg: StrObjectMap) -> None: ... def fun2(arg: StrIntMap) -> None: ... a: A b: B fun(a) fun(b) fun2(a) # Error [builtins fixtures/dict.pyi] [out] main:18: error: Argument 1 to "fun2" has incompatible type "A"; expected "StrIntMap" main:18: note: Following member(s) of "A" have conflicts: main:18: note: Expected: main:18: note: def __getitem__(self, str) -> int main:18: note: Got: main:18: note: def __getitem__(self, str) -> object [case testTypedDictWithSimpleProtocolInference] from typing_extensions import Protocol from mypy_extensions import TypedDict from typing import TypeVar T_co = TypeVar('T_co', covariant=True) T = TypeVar('T') class StrMap(Protocol[T_co]): def __getitem__(self, key: str) -> T_co: ... A = TypedDict('A', {'x': int, 'y': int}) B = TypedDict('B', {'x': int, 'y': str}) def fun(arg: StrMap[T]) -> T: return arg['whatever'] a: A b: B reveal_type(fun(a)) # N: Revealed type is 'builtins.object*' reveal_type(fun(b)) # N: Revealed type is 'builtins.object*' [builtins fixtures/dict.pyi] [out] -- Join [case testJoinOfTypedDictHasOnlyCommonKeysAndNewFallback] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int}) p1 = TaggedPoint(type='2d', x=0, y=0) p2 = Point3D(x=1, y=1, z=1) joined_points = [p1, p2][0] reveal_type(p1.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' reveal_type(p2.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' reveal_type(joined_points) # N: Revealed type is 'TypedDict({'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testJoinOfTypedDictRemovesNonequivalentKeys] from mypy_extensions import TypedDict CellWithInt = TypedDict('CellWithInt', {'value': object, 'meta': int}) CellWithObject = TypedDict('CellWithObject', {'value': object, 'meta': object}) c1 = CellWithInt(value=1, meta=42) c2 = CellWithObject(value=2, meta='turtle doves') joined_cells = [c1, c2] reveal_type(c1) # N: Revealed type is 'TypedDict('__main__.CellWithInt', {'value': builtins.object, 'meta': builtins.int})' reveal_type(c2) # N: Revealed type is 'TypedDict('__main__.CellWithObject', {'value': builtins.object, 'meta': builtins.object})' reveal_type(joined_cells) # N: Revealed type is 'builtins.list[TypedDict({'value': builtins.object})]' [builtins fixtures/dict.pyi] [case testJoinOfDisjointTypedDictsIsEmptyTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) Cell = TypedDict('Cell', {'value': object}) d1 = Point(x=0, y=0) d2 = Cell(value='pear tree') joined_dicts = [d1, d2] reveal_type(d1) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' reveal_type(d2) # N: Revealed type is 'TypedDict('__main__.Cell', {'value': builtins.object})' reveal_type(joined_dicts) # N: Revealed type is 'builtins.list[TypedDict({})]' [builtins fixtures/dict.pyi] [case testJoinOfTypedDictWithCompatibleMappingIsMapping] from mypy_extensions import TypedDict from typing import Mapping Cell = TypedDict('Cell', {'value': int}) left = Cell(value=42) right = {'score': 999} # type: Mapping[str, int] joined1 = [left, right] joined2 = [right, left] reveal_type(joined1) # N: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.object]]' reveal_type(joined2) # N: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.object]]' [builtins fixtures/dict.pyi] [case testJoinOfTypedDictWithCompatibleMappingSupertypeIsSupertype] from mypy_extensions import TypedDict from typing import Sized Cell = TypedDict('Cell', {'value': int}) left = Cell(value=42) right = {'score': 999} # type: Sized joined1 = [left, right] joined2 = [right, left] reveal_type(joined1) # N: Revealed type is 'builtins.list[typing.Sized*]' reveal_type(joined2) # N: Revealed type is 'builtins.list[typing.Sized*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testJoinOfTypedDictWithIncompatibleTypeIsObject] from mypy_extensions import TypedDict from typing import Mapping Cell = TypedDict('Cell', {'value': int}) left = Cell(value=42) right = 42 joined1 = [left, right] joined2 = [right, left] reveal_type(joined1) # N: Revealed type is 'builtins.list[builtins.object*]' reveal_type(joined2) # N: Revealed type is 'builtins.list[builtins.object*]' [builtins fixtures/dict.pyi] -- Meet [case testMeetOfTypedDictsWithCompatibleCommonKeysHasAllKeysAndNewFallback] from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int, 'y': int}) YZ = TypedDict('YZ', {'y': int, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass reveal_type(f(g)) # N: Revealed type is 'TypedDict({'x': builtins.int, 'y': builtins.int, 'z': builtins.int})' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithIncompatibleCommonKeysIsUninhabited] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable XYa = TypedDict('XYa', {'x': int, 'y': int}) YbZ = TypedDict('YbZ', {'y': object, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XYa, y: YbZ) -> None: pass reveal_type(f(g)) # N: Revealed type is '' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithNoCommonKeysHasAllKeysAndNewFallback] from mypy_extensions import TypedDict from typing import TypeVar, Callable X = TypedDict('X', {'x': int}) Z = TypedDict('Z', {'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: Z) -> None: pass reveal_type(f(g)) # N: Revealed type is 'TypedDict({'x': builtins.int, 'z': builtins.int})' [builtins fixtures/dict.pyi] # TODO: It would be more accurate for the meet to be TypedDict instead. [case testMeetOfTypedDictWithCompatibleMappingIsUninhabitedForNow] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Mapping X = TypedDict('X', {'x': int}) M = Mapping[str, int] T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: M) -> None: pass reveal_type(f(g)) # N: Revealed type is '' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Mapping X = TypedDict('X', {'x': int}) M = Mapping[str, str] T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: M) -> None: pass reveal_type(f(g)) # N: Revealed type is '' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Iterable X = TypedDict('X', {'x': int}) I = Iterable[str] T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: I) -> None: pass reveal_type(f(g)) # N: Revealed type is 'TypedDict('__main__.X', {'x': builtins.int})' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithNonTotal] from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int, 'y': int}, total=False) YZ = TypedDict('YZ', {'y': int, 'z': int}, total=False) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass reveal_type(f(g)) # N: Revealed type is 'TypedDict({'x'?: builtins.int, 'y'?: builtins.int, 'z'?: builtins.int})' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithNonTotalAndTotal] from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int}, total=False) YZ = TypedDict('YZ', {'y': int, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass reveal_type(f(g)) # N: Revealed type is 'TypedDict({'x'?: builtins.int, 'y': builtins.int, 'z': builtins.int})' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithIncompatibleNonTotalAndTotal] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int, 'y': int}, total=False) YZ = TypedDict('YZ', {'y': int, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass reveal_type(f(g)) # N: Revealed type is '' [builtins fixtures/dict.pyi] -- Constraint Solver [case testTypedDictConstraintsAgainstIterable] from typing import TypeVar, Iterable from mypy_extensions import TypedDict T = TypeVar('T') def f(x: Iterable[T]) -> T: pass A = TypedDict('A', {'x': int}) a: A reveal_type(f(a)) # N: Revealed type is 'builtins.str*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- TODO: Figure out some way to trigger the ConstraintBuilderVisitor.visit_typeddict_type() path. -- Special Method: __getitem__ [case testCanGetItemOfTypedDictWithValidStringLiteralKey] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) reveal_type(p['type']) # N: Revealed type is 'builtins.str' reveal_type(p['x']) # N: Revealed type is 'builtins.int' reveal_type(p['y']) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [case testCanGetItemOfTypedDictWithValidBytesOrUnicodeLiteralKey] # flags: --python-version 2.7 from mypy_extensions import TypedDict Cell = TypedDict('Cell', {'value': int}) c = Cell(value=42) reveal_type(c['value']) # N: Revealed type is 'builtins.int' reveal_type(c[u'value']) # N: Revealed type is 'builtins.int' [builtins_py2 fixtures/dict.pyi] [case testCannotGetItemOfTypedDictWithInvalidStringLiteralKey] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p: TaggedPoint p['z'] # E: TypedDict "TaggedPoint" has no key 'z' [builtins fixtures/dict.pyi] [case testTypedDictWithUnicodeName] # flags: --python-version 2.7 from mypy_extensions import TypedDict TaggedPoint = TypedDict(u'TaggedPoint', {'type': str, 'x': int, 'y': int}) [builtins fixtures/dict.pyi] [case testCannotGetItemOfAnonymousTypedDictWithInvalidStringLiteralKey] from typing import TypeVar from mypy_extensions import TypedDict A = TypedDict('A', {'x': str, 'y': int, 'z': str}) B = TypedDict('B', {'x': str, 'z': int}) C = TypedDict('C', {'x': str, 'y': int, 'z': int}) T = TypeVar('T') def join(x: T, y: T) -> T: return x ab = join(A(x='', y=1, z=''), B(x='', z=1)) ac = join(A(x='', y=1, z=''), C(x='', y=0, z=1)) ab['y'] # E: 'y' is not a valid TypedDict key; expected one of ('x') ac['a'] # E: 'a' is not a valid TypedDict key; expected one of ('x', 'y') [builtins fixtures/dict.pyi] [case testCannotGetItemOfTypedDictWithNonLiteralKey] from mypy_extensions import TypedDict from typing import Union TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) def get_coordinate(p: TaggedPoint, key: str) -> Union[str, int]: return p[key] # E: TypedDict key must be a string literal; expected one of ('type', 'x', 'y') [builtins fixtures/dict.pyi] -- Special Method: __setitem__ [case testCanSetItemOfTypedDictWithValidStringLiteralKeyAndCompatibleValueType] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p['type'] = 'two_d' p['x'] = 1 [builtins fixtures/dict.pyi] [case testCannotSetItemOfTypedDictWithIncompatibleValueType] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p['x'] = 'y' # E: Argument 2 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCannotSetItemOfTypedDictWithInvalidStringLiteralKey] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p['z'] = 1 # E: TypedDict "TaggedPoint" has no key 'z' [builtins fixtures/dict.pyi] [case testCannotSetItemOfTypedDictWithNonLiteralKey] from mypy_extensions import TypedDict from typing import Union TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) def set_coordinate(p: TaggedPoint, key: str, value: int) -> None: p[key] = value # E: TypedDict key must be a string literal; expected one of ('type', 'x', 'y') [builtins fixtures/dict.pyi] -- isinstance [case testTypedDictWithIsInstanceAndIsSubclass] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int}) d: object if isinstance(d, D): # E: Cannot use isinstance() with TypedDict type reveal_type(d) # N: Revealed type is '__main__.D' issubclass(object, D) # E: Cannot use issubclass() with TypedDict type [builtins fixtures/isinstancelist.pyi] -- Scoping [case testTypedDictInClassNamespace] # https://github.com/python/mypy/pull/2553#issuecomment-266474341 from mypy_extensions import TypedDict class C: def f(self): A = TypedDict('A', {'x': int}) def g(self): A = TypedDict('A', {'y': int}) C.A # E: "Type[C]" has no attribute "A" [builtins fixtures/dict.pyi] [case testTypedDictInFunction] from mypy_extensions import TypedDict def f() -> None: A = TypedDict('A', {'x': int}) A # E: Name 'A' is not defined [builtins fixtures/dict.pyi] -- Union simplification / proper subtype checks [case testTypedDictUnionSimplification] from typing import TypeVar, Union, Any, cast from mypy_extensions import TypedDict T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass C = TypedDict('C', {'a': int}) D = TypedDict('D', {'a': int, 'b': int}) E = TypedDict('E', {'a': str}) F = TypedDict('F', {'x': int}) G = TypedDict('G', {'a': Any}) c = C(a=1) d = D(a=1, b=1) e = E(a='') f = F(x=1) g = G(a=cast(Any, 1)) # Work around #2610 reveal_type(u(d, d)) # N: Revealed type is 'TypedDict('__main__.D', {'a': builtins.int, 'b': builtins.int})' reveal_type(u(c, d)) # N: Revealed type is 'TypedDict('__main__.C', {'a': builtins.int})' reveal_type(u(d, c)) # N: Revealed type is 'TypedDict('__main__.C', {'a': builtins.int})' reveal_type(u(c, e)) # N: Revealed type is 'Union[TypedDict('__main__.E', {'a': builtins.str}), TypedDict('__main__.C', {'a': builtins.int})]' reveal_type(u(e, c)) # N: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.E', {'a': builtins.str})]' reveal_type(u(c, f)) # N: Revealed type is 'Union[TypedDict('__main__.F', {'x': builtins.int}), TypedDict('__main__.C', {'a': builtins.int})]' reveal_type(u(f, c)) # N: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.F', {'x': builtins.int})]' reveal_type(u(c, g)) # N: Revealed type is 'Union[TypedDict('__main__.G', {'a': Any}), TypedDict('__main__.C', {'a': builtins.int})]' reveal_type(u(g, c)) # N: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.G', {'a': Any})]' [builtins fixtures/dict.pyi] [case testTypedDictUnionSimplification2] from typing import TypeVar, Union, Mapping, Any from mypy_extensions import TypedDict T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass C = TypedDict('C', {'a': int, 'b': int}) c = C(a=1, b=1) m_s_o: Mapping[str, object] m_s_s: Mapping[str, str] m_i_i: Mapping[int, int] m_s_a: Mapping[str, Any] reveal_type(u(c, m_s_o)) # N: Revealed type is 'typing.Mapping*[builtins.str, builtins.object]' reveal_type(u(m_s_o, c)) # N: Revealed type is 'typing.Mapping*[builtins.str, builtins.object]' reveal_type(u(c, m_s_s)) # N: Revealed type is 'Union[typing.Mapping*[builtins.str, builtins.str], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]' reveal_type(u(c, m_i_i)) # N: Revealed type is 'Union[typing.Mapping*[builtins.int, builtins.int], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]' reveal_type(u(c, m_s_a)) # N: Revealed type is 'Union[typing.Mapping*[builtins.str, Any], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]' [builtins fixtures/dict.pyi] -- Use dict literals [case testTypedDictDictLiterals] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) def f(p: Point) -> None: if int(): p = {'x': 2, 'y': 3} p = {'x': 2} # E: Key 'y' missing for TypedDict "Point" p = dict(x=2, y=3) f({'x': 1, 'y': 3}) f({'x': 1, 'y': 'z'}) # E: Incompatible types (expression has type "str", TypedDict item "y" has type "int") f(dict(x=1, y=3)) f(dict(x=1, y=3, z=4)) # E: Extra key 'z' for TypedDict "Point" f(dict(x=1, y=3, z=4, a=5)) # E: Extra keys ('z', 'a') for TypedDict "Point" [builtins fixtures/dict.pyi] [case testTypedDictExplicitTypes] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p1a: Point = {'x': 'hi'} # E: Key 'y' missing for TypedDict "Point" p1b: Point = {} # E: Keys ('x', 'y') missing for TypedDict "Point" p2: Point p2 = dict(x='bye') # E: Key 'y' missing for TypedDict "Point" p3 = Point(x=1, y=2) if int(): p3 = {'x': 'hi'} # E: Key 'y' missing for TypedDict "Point" p4: Point = {'x': 1, 'y': 2} [builtins fixtures/dict.pyi] [case testCannotCreateAnonymousTypedDictInstanceUsingDictLiteralWithExtraItems] from mypy_extensions import TypedDict from typing import TypeVar A = TypedDict('A', {'x': int, 'y': int}) B = TypedDict('B', {'x': int, 'y': str}) T = TypeVar('T') def join(x: T, y: T) -> T: return x ab = join(A(x=1, y=1), B(x=1, y='')) if int(): ab = {'x': 1, 'z': 1} # E: Expected TypedDict key 'x' but found keys ('x', 'z') [builtins fixtures/dict.pyi] [case testCannotCreateAnonymousTypedDictInstanceUsingDictLiteralWithMissingItems] from mypy_extensions import TypedDict from typing import TypeVar A = TypedDict('A', {'x': int, 'y': int, 'z': int}) B = TypedDict('B', {'x': int, 'y': int, 'z': str}) T = TypeVar('T') def join(x: T, y: T) -> T: return x ab = join(A(x=1, y=1, z=1), B(x=1, y=1, z='')) if int(): ab = {} # E: Expected TypedDict keys ('x', 'y') but found no keys [builtins fixtures/dict.pyi] -- Other TypedDict methods [case testTypedDictGetMethod] # flags: --strict-optional from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'x': int, 'y': str}) d: D reveal_type(d.get('x')) # N: Revealed type is 'Union[builtins.int, None]' reveal_type(d.get('y')) # N: Revealed type is 'Union[builtins.str, None]' reveal_type(d.get('x', A())) # N: Revealed type is 'Union[builtins.int, __main__.A]' reveal_type(d.get('x', 1)) # N: Revealed type is 'builtins.int' reveal_type(d.get('y', None)) # N: Revealed type is 'Union[builtins.str, None]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictGetMethodTypeContext] # flags: --strict-optional from typing import List from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'x': List[int], 'y': int}) d: D reveal_type(d.get('x', [])) # N: Revealed type is 'builtins.list[builtins.int]' d.get('x', ['x']) # E: List item 0 has incompatible type "str"; expected "int" a = [''] reveal_type(d.get('x', a)) # N: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str*]]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictGetMethodInvalidArgs] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}) d: D d.get() # E: All overload variants of "get" of "Mapping" require at least one argument \ # N: Possible overload variants: \ # N: def get(self, k: str) -> object \ # N: def [V] get(self, k: str, default: object) -> object d.get('x', 1, 2) # E: No overload variant of "get" of "Mapping" matches argument types "str", "int", "int" \ # N: Possible overload variants: \ # N: def get(self, k: str) -> object \ # N: def [V] get(self, k: str, default: Union[int, V]) -> object x = d.get('z') # E: TypedDict "D" has no key 'z' reveal_type(x) # N: Revealed type is 'Any' s = '' y = d.get(s) reveal_type(y) # N: Revealed type is 'builtins.object*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictMissingMethod] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}) d: D d.bad(1) # E: "D" has no attribute "bad" [builtins fixtures/dict.pyi] [case testTypedDictChainedGetMethodWithDictFallback] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}) E = TypedDict('E', {'d': D}) p = E(d=D(x=0, y='')) reveal_type(p.get('d', {'x': 1, 'y': ''})) # N: Revealed type is 'TypedDict('__main__.D', {'x': builtins.int, 'y': builtins.str})' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictGetDefaultParameterStillTypeChecked] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p.get('x', 1 + 'y') # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictChainedGetWithEmptyDictDefault] # flags: --strict-optional from mypy_extensions import TypedDict C = TypedDict('C', {'a': int}) D = TypedDict('D', {'x': C, 'y': str}) d: D reveal_type(d.get('x', {})) \ # N: Revealed type is 'TypedDict('__main__.C', {'a'?: builtins.int})' reveal_type(d.get('x', None)) \ # N: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), None]' reveal_type(d.get('x', {}).get('a')) # N: Revealed type is 'Union[builtins.int, None]' reveal_type(d.get('x', {})['a']) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- Totality (the "total" keyword argument) [case testTypedDictWithTotalTrue] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=True) d: D reveal_type(d) \ # N: Revealed type is 'TypedDict('__main__.D', {'x': builtins.int, 'y': builtins.str})' [builtins fixtures/dict.pyi] [case testTypedDictWithInvalidTotalArgument] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}, total=0) # E: TypedDict() "total" argument must be True or False B = TypedDict('B', {'x': int}, total=bool) # E: TypedDict() "total" argument must be True or False C = TypedDict('C', {'x': int}, x=False) # E: Unexpected keyword argument "x" for "TypedDict" D = TypedDict('D', {'x': int}, False) # E: Unexpected arguments to TypedDict() [builtins fixtures/dict.pyi] [case testTypedDictWithTotalFalse] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=False) def f(d: D) -> None: reveal_type(d) # N: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})' f({}) f({'x': 1}) f({'y': ''}) f({'x': 1, 'y': ''}) f({'x': 1, 'z': ''}) # E: Extra key 'z' for TypedDict "D" f({'x': ''}) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [builtins fixtures/dict.pyi] [case testTypedDictConstructorWithTotalFalse] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=False) def f(d: D) -> None: pass reveal_type(D()) # N: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})' reveal_type(D(x=1)) # N: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})' f(D(y='')) f(D(x=1, y='')) f(D(x=1, z='')) # E: Extra key 'z' for TypedDict "D" f(D(x='')) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [builtins fixtures/dict.pyi] [case testTypedDictIndexingWithNonRequiredKey] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=False) d: D reveal_type(d['x']) # N: Revealed type is 'builtins.int' reveal_type(d['y']) # N: Revealed type is 'builtins.str' reveal_type(d.get('x')) # N: Revealed type is 'builtins.int' reveal_type(d.get('y')) # N: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictSubtypingWithTotalFalse] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'x': int}, total=False) C = TypedDict('C', {'x': int, 'y': str}, total=False) def fa(a: A) -> None: pass def fb(b: B) -> None: pass def fc(c: C) -> None: pass a: A b: B c: C fb(b) fc(c) fb(c) fb(a) # E: Argument 1 to "fb" has incompatible type "A"; expected "B" fa(b) # E: Argument 1 to "fa" has incompatible type "B"; expected "A" fc(b) # E: Argument 1 to "fc" has incompatible type "B"; expected "C" [builtins fixtures/dict.pyi] [case testTypedDictJoinWithTotalFalse] from typing import TypeVar from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'x': int}, total=False) C = TypedDict('C', {'x': int, 'y': str}, total=False) T = TypeVar('T') def j(x: T, y: T) -> T: return x a: A b: B c: C reveal_type(j(a, b)) \ # N: Revealed type is 'TypedDict({})' reveal_type(j(b, b)) \ # N: Revealed type is 'TypedDict({'x'?: builtins.int})' reveal_type(j(c, c)) \ # N: Revealed type is 'TypedDict({'x'?: builtins.int, 'y'?: builtins.str})' reveal_type(j(b, c)) \ # N: Revealed type is 'TypedDict({'x'?: builtins.int})' reveal_type(j(c, b)) \ # N: Revealed type is 'TypedDict({'x'?: builtins.int})' [builtins fixtures/dict.pyi] [case testTypedDictClassWithTotalArgument] from mypy_extensions import TypedDict class D(TypedDict, total=False): x: int y: str d: D reveal_type(d) # N: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})' [builtins fixtures/dict.pyi] [case testTypedDictClassWithInvalidTotalArgument] from mypy_extensions import TypedDict class D(TypedDict, total=1): # E: Value of "total" must be True or False x: int class E(TypedDict, total=bool): # E: Value of "total" must be True or False x: int class F(TypedDict, total=xyz): # E: Value of "total" must be True or False \ # E: Name 'xyz' is not defined x: int [builtins fixtures/dict.pyi] [case testTypedDictClassInheritanceWithTotalArgument] from mypy_extensions import TypedDict class A(TypedDict): x: int class B(TypedDict, A, total=False): y: int class C(TypedDict, B, total=True): z: str c: C reveal_type(c) # N: Revealed type is 'TypedDict('__main__.C', {'x': builtins.int, 'y'?: builtins.int, 'z': builtins.str})' [builtins fixtures/dict.pyi] [case testNonTotalTypedDictInErrorMessages] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}, total=False) B = TypedDict('B', {'x': int, 'z': str, 'a': int}, total=False) C = TypedDict('C', {'x': int, 'z': str, 'a': str}, total=False) a: A b: B c: C def f(a: A) -> None: pass l = [a, b] # Join generates an anonymous TypedDict f(l) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x'?: int})]"; expected "A" ll = [b, c] f(ll) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x'?: int, 'z'?: str})]"; expected "A" [builtins fixtures/dict.pyi] [case testNonTotalTypedDictCanBeEmpty] # flags: --warn-unreachable from mypy_extensions import TypedDict class A(TypedDict): ... class B(TypedDict, total=False): x: int a: A = {} b: B = {} if not a: reveal_type(a) # N: Revealed type is 'TypedDict('__main__.A', {})' if not b: reveal_type(b) # N: Revealed type is 'TypedDict('__main__.B', {'x'?: builtins.int})' [builtins fixtures/dict.pyi] -- Create Type (Errors) [case testCannotCreateTypedDictTypeWithTooFewArguments] from mypy_extensions import TypedDict Point = TypedDict('Point') # E: Too few arguments for TypedDict() [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithTooManyArguments] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}, dict) # E: Unexpected arguments to TypedDict() [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidName] from mypy_extensions import TypedDict Point = TypedDict(dict, {'x': int, 'y': int}) # E: TypedDict() expects a string literal as the first argument [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidItems] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x'}) # E: TypedDict() expects a dictionary literal as the second argument [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithKwargs] from mypy_extensions import TypedDict d = {'x': int, 'y': int} Point = TypedDict('Point', {**d}) # E: Invalid TypedDict() field name [builtins fixtures/dict.pyi] -- NOTE: The following code works at runtime but is not yet supported by mypy. -- Keyword arguments may potentially be supported in the future. [case testCannotCreateTypedDictTypeWithNonpositionalArgs] from mypy_extensions import TypedDict Point = TypedDict(typename='Point', fields={'x': int, 'y': int}) # E: Unexpected arguments to TypedDict() [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidItemName] from mypy_extensions import TypedDict Point = TypedDict('Point', {int: int, int: int}) # E: Invalid TypedDict() field name [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidItemType] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': 1, 'y': 1}) # E: Invalid type: try using Literal[1] instead? [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidName] from mypy_extensions import TypedDict X = TypedDict('Y', {'x': int}) # E: First argument 'Y' to TypedDict() does not match variable name 'X' [builtins fixtures/dict.pyi] -- Overloading [case testTypedDictOverloading] from typing import overload, Iterable from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) @overload def f(x: Iterable[str]) -> str: ... @overload def f(x: int) -> int: ... def f(x): pass a: A reveal_type(f(a)) # N: Revealed type is 'builtins.str' reveal_type(f(1)) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverloading2] from typing import overload, Iterable from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) @overload def f(x: Iterable[int]) -> None: ... @overload def f(x: int) -> None: ... def f(x): pass a: A f(a) [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] main:13: error: Argument 1 to "f" has incompatible type "A"; expected "Iterable[int]" main:13: note: Following member(s) of "A" have conflicts: main:13: note: Expected: main:13: note: def __iter__(self) -> Iterator[int] main:13: note: Got: main:13: note: def __iter__(self) -> Iterator[str] [case testTypedDictOverloading3] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) @overload def f(x: str) -> None: ... @overload def f(x: int) -> None: ... def f(x): pass a: A f(a) # E: No overload variant of "f" matches argument type "A" \ # N: Possible overload variants: \ # N: def f(x: str) -> None \ # N: def f(x: int) -> None [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverloading4] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'x': str}) @overload def f(x: A) -> int: ... @overload def f(x: int) -> str: ... def f(x): pass a: A b: B reveal_type(f(a)) # N: Revealed type is 'builtins.int' reveal_type(f(1)) # N: Revealed type is 'builtins.str' f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverloading5] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'y': str}) C = TypedDict('C', {'y': int}) @overload def f(x: A) -> None: ... @overload def f(x: B) -> None: ... def f(x): pass a: A b: B c: C f(a) f(b) f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "A" [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverloading6] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'y': str}) @overload def f(x: A) -> int: ... @overload def f(x: B) -> str: ... def f(x): pass a: A b: B reveal_type(f(a)) # N: Revealed type is 'builtins.int' reveal_type(f(b)) # N: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- Special cases [case testForwardReferenceInTypedDict] from typing import Mapping from mypy_extensions import TypedDict X = TypedDict('X', {'b': 'B', 'c': 'C'}) class B: pass class C(B): pass x: X reveal_type(x) # N: Revealed type is 'TypedDict('__main__.X', {'b': __main__.B, 'c': __main__.C})' m1: Mapping[str, object] = x m2: Mapping[str, B] = x # E: Incompatible types in assignment (expression has type "X", variable has type "Mapping[str, B]") [builtins fixtures/dict.pyi] [case testForwardReferenceInClassTypedDict] from typing import Mapping from mypy_extensions import TypedDict class X(TypedDict): b: 'B' c: 'C' class B: pass class C(B): pass x: X reveal_type(x) # N: Revealed type is 'TypedDict('__main__.X', {'b': __main__.B, 'c': __main__.C})' m1: Mapping[str, object] = x m2: Mapping[str, B] = x # E: Incompatible types in assignment (expression has type "X", variable has type "Mapping[str, B]") [builtins fixtures/dict.pyi] [case testForwardReferenceToTypedDictInTypedDict] from typing import Mapping from mypy_extensions import TypedDict X = TypedDict('X', {'a': 'A'}) A = TypedDict('A', {'b': int}) x: X reveal_type(x) # N: Revealed type is 'TypedDict('__main__.X', {'a': TypedDict('__main__.A', {'b': builtins.int})})' reveal_type(x['a']['b']) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [case testSelfRecursiveTypedDictInheriting] from mypy_extensions import TypedDict class MovieBase(TypedDict): name: str year: int class Movie(MovieBase): director: 'Movie' # E: Cannot resolve name "Movie" (possible cyclic definition) m: Movie reveal_type(m['director']['name']) # N: Revealed type is 'Any' [builtins fixtures/dict.pyi] [out] [case testSubclassOfRecursiveTypedDict] from typing import List from mypy_extensions import TypedDict class Command(TypedDict): subcommands: List['Command'] # E: Cannot resolve name "Command" (possible cyclic definition) class HelpCommand(Command): pass hc = HelpCommand(subcommands=[]) reveal_type(hc) # N: Revealed type is 'TypedDict('__main__.HelpCommand', {'subcommands': builtins.list[Any]})' [builtins fixtures/list.pyi] [out] [case testTypedDictForwardAsUpperBound] from typing import TypeVar, Generic from mypy_extensions import TypedDict T = TypeVar('T', bound='M') class G(Generic[T]): x: T yb: G[int] # E: Type argument "builtins.int" of "G" must be a subtype of "TypedDict('__main__.M', {'x': builtins.int})" yg: G[M] z: int = G[M]().x['x'] class M(TypedDict): x: int [builtins fixtures/dict.pyi] [out] [case testTypedDictWithImportCycleForward] import a [file a.py] from mypy_extensions import TypedDict from b import f N = TypedDict('N', {'a': str}) [file b.py] import a def f(x: a.N) -> None: reveal_type(x) reveal_type(x['a']) [builtins fixtures/dict.pyi] [out] tmp/b.py:4: note: Revealed type is 'TypedDict('a.N', {'a': builtins.str})' tmp/b.py:5: note: Revealed type is 'builtins.str' [case testTypedDictImportCycle] import b [file a.py] class C: pass from b import tp x: tp reveal_type(x['x']) # N: Revealed type is 'builtins.int' reveal_type(tp) # N: Revealed type is 'def () -> b.tp' tp(x='no') # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [file b.py] from a import C from mypy_extensions import TypedDict tp = TypedDict('tp', {'x': int}) [builtins fixtures/dict.pyi] [out] [case testTypedDictAsStarStarArg] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}) class B: pass def f1(x: int, y: str) -> None: ... def f2(x: int, y: int) -> None: ... def f3(x: B, y: str) -> None: ... def f4(x: int) -> None: pass def f5(x: int, y: str, z: int) -> None: pass def f6(x: int, z: str) -> None: pass a: A f1(**a) f2(**a) # E: Argument "y" to "f2" has incompatible type "str"; expected "int" f3(**a) # E: Argument "x" to "f3" has incompatible type "int"; expected "B" f4(**a) # E: Extra argument "y" from **args for "f4" f5(**a) # E: Too few arguments for "f5" f6(**a) # E: Extra argument "y" from **args for "f6" f1(1, **a) # E: "f1" gets multiple values for keyword argument "x" [case testTypedDictAsStarStarArgConstraints] from typing import TypeVar, Union from mypy_extensions import TypedDict T = TypeVar('T') S = TypeVar('S') def f1(x: T, y: S) -> Union[T, S]: ... A = TypedDict('A', {'y': int, 'x': str}) a: A reveal_type(f1(**a)) # N: Revealed type is 'Union[builtins.str*, builtins.int*]' [case testTypedDictAsStarStarArgCalleeKwargs] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}) B = TypedDict('B', {'x': str, 'y': str}) def f(**kwargs: str) -> None: ... def g(x: int, **kwargs: str) -> None: ... a: A b: B f(**a) # E: Argument 1 to "f" has incompatible type "**A"; expected "str" f(**b) g(**a) g(**b) # E: Argument "x" to "g" has incompatible type "str"; expected "int" g(1, **a) # E: "g" gets multiple values for keyword argument "x" g(1, **b) # E: "g" gets multiple values for keyword argument "x" \ # E: Argument "x" to "g" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testTypedDictAsStarStarTwice] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}) B = TypedDict('B', {'z': bytes}) C = TypedDict('C', {'x': str, 'z': bytes}) def f1(x: int, y: str, z: bytes) -> None: ... def f2(x: int, y: float, z: bytes) -> None: ... def f3(x: int, y: str, z: float) -> None: ... a: A b: B c: C f1(**a, **b) f1(**b, **a) f2(**a, **b) # E: Argument "y" to "f2" has incompatible type "str"; expected "float" f3(**a, **b) # E: Argument "z" to "f3" has incompatible type "bytes"; expected "float" f3(**b, **a) # E: Argument "z" to "f3" has incompatible type "bytes"; expected "float" f1(**a, **c) # E: "f1" gets multiple values for keyword argument "x" \ # E: Argument "x" to "f1" has incompatible type "str"; expected "int" f1(**c, **a) # E: "f1" gets multiple values for keyword argument "x" \ # E: Argument "x" to "f1" has incompatible type "str"; expected "int" [case testTypedDictNonMappingMethods] from typing import List from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': List[int]}) a: A reveal_type(a.copy()) # N: Revealed type is 'TypedDict('__main__.A', {'x': builtins.int, 'y': builtins.list[builtins.int]})' a.has_key('x') # E: "A" has no attribute "has_key" # TODO: Better error message a.clear() # E: "A" has no attribute "clear" a.setdefault('invalid', 1) # E: TypedDict "A" has no key 'invalid' reveal_type(a.setdefault('x', 1)) # N: Revealed type is 'builtins.int' reveal_type(a.setdefault('y', [])) # N: Revealed type is 'builtins.list[builtins.int]' a.setdefault('y', '') # E: Argument 2 to "setdefault" of "TypedDict" has incompatible type "str"; expected "List[int]" x = '' a.setdefault(x, 1) # E: Expected TypedDict key to be string literal alias = a.setdefault alias(x, 1) # E: Argument 1 has incompatible type "str"; expected "NoReturn" a.update({}) a.update({'x': 1}) a.update({'x': ''}) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") a.update({'x': 1, 'y': []}) a.update({'x': 1, 'y': [1]}) a.update({'z': 1}) # E: Unexpected TypedDict key 'z' a.update({'z': 1, 'zz': 1}) # E: Unexpected TypedDict keys ('z', 'zz') a.update({'z': 1, 'x': 1}) # E: Expected TypedDict key 'x' but found keys ('z', 'x') d = {'x': 1} a.update(d) # E: Argument 1 to "update" of "TypedDict" has incompatible type "Dict[str, int]"; expected "TypedDict({'x'?: int, 'y'?: List[int]})" [builtins fixtures/dict.pyi] [case testTypedDictNonMappingMethods_python2] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) a = A(x=1) reveal_type(a.copy()) # N: Revealed type is 'TypedDict('__main__.A', {'x': builtins.int})' reveal_type(a.has_key('y')) # N: Revealed type is 'builtins.bool' a.clear() # E: "A" has no attribute "clear" [builtins_py2 fixtures/dict.pyi] [case testTypedDictPopMethod] from typing import List from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': List[int]}, total=False) B = TypedDict('B', {'x': int}) a: A b: B reveal_type(a.pop('x')) # N: Revealed type is 'builtins.int' reveal_type(a.pop('y', [])) # N: Revealed type is 'builtins.list[builtins.int]' reveal_type(a.pop('x', '')) # N: Revealed type is 'Union[builtins.int, Literal['']?]' reveal_type(a.pop('x', (1, 2))) # N: Revealed type is 'Union[builtins.int, Tuple[Literal[1]?, Literal[2]?]]' a.pop('invalid', '') # E: TypedDict "A" has no key 'invalid' b.pop('x') # E: Key 'x' of TypedDict "B" cannot be deleted x = '' b.pop(x) # E: Expected TypedDict key to be string literal pop = b.pop pop('x') # E: Argument 1 has incompatible type "str"; expected "NoReturn" pop('invalid') # E: Argument 1 has incompatible type "str"; expected "NoReturn" [builtins fixtures/dict.pyi] [case testTypedDictDel] from typing import List from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': List[int]}, total=False) B = TypedDict('B', {'x': int}) a: A b: B del a['x'] del a['invalid'] # E: TypedDict "A" has no key 'invalid' del b['x'] # E: Key 'x' of TypedDict "B" cannot be deleted s = '' del a[s] # E: Expected TypedDict key to be string literal del b[s] # E: Expected TypedDict key to be string literal alias = b.__delitem__ alias('x') # E: Argument 1 has incompatible type "str"; expected "NoReturn" alias(s) # E: Argument 1 has incompatible type "str"; expected "NoReturn" [builtins fixtures/dict.pyi] [case testPluginUnionsOfTypedDicts] from typing import Union from mypy_extensions import TypedDict class TDA(TypedDict): a: int b: str class TDB(TypedDict): a: int b: int c: int td: Union[TDA, TDB] reveal_type(td.get('a')) # N: Revealed type is 'builtins.int' reveal_type(td.get('b')) # N: Revealed type is 'Union[builtins.str, builtins.int]' reveal_type(td.get('c')) # E: TypedDict "TDA" has no key 'c' \ # N: Revealed type is 'Union[Any, builtins.int]' reveal_type(td['a']) # N: Revealed type is 'builtins.int' reveal_type(td['b']) # N: Revealed type is 'Union[builtins.str, builtins.int]' reveal_type(td['c']) # N: Revealed type is 'Union[Any, builtins.int]' \ # E: TypedDict "TDA" has no key 'c' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testPluginUnionsOfTypedDictsNonTotal] from typing import Union from mypy_extensions import TypedDict class TDA(TypedDict, total=False): a: int b: str class TDB(TypedDict, total=False): a: int b: int c: int td: Union[TDA, TDB] reveal_type(td.pop('a')) # N: Revealed type is 'builtins.int' reveal_type(td.pop('b')) # N: Revealed type is 'Union[builtins.str, builtins.int]' reveal_type(td.pop('c')) # E: TypedDict "TDA" has no key 'c' \ # N: Revealed type is 'Union[Any, builtins.int]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testCanCreateTypedDictWithTypingExtensions] # flags: --python-version 3.6 from typing_extensions import TypedDict class Point(TypedDict): x: int y: int p = Point(x=42, y=1337) reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithTypingProper] # flags: --python-version 3.8 from typing import TypedDict class Point(TypedDict): x: int y: int p = Point(x=42, y=1337) reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOptionalUpdate] from typing import Union from mypy_extensions import TypedDict class A(TypedDict): x: int d: Union[A, None] d.update({'x': 1}) [builtins fixtures/dict.pyi] [case testTypedDictOverlapWithDict] # mypy: strict-equality from typing import TypedDict, Dict class Config(TypedDict): a: str b: str x: Dict[str, str] y: Config x == y [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictNonOverlapping] # mypy: strict-equality from typing import TypedDict, Dict class Config(TypedDict): a: str b: int x: Dict[str, str] y: Config x == y # E: Non-overlapping equality check (left operand type: "Dict[str, str]", right operand type: "Config") [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictNonTotal] # mypy: strict-equality from typing import TypedDict, Dict class Config(TypedDict, total=False): a: str b: int x: Dict[str, str] y: Config x == y [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictNonTotalNonOverlapping] # mypy: strict-equality from typing import TypedDict, Dict class Config(TypedDict, total=False): a: int b: int x: Dict[str, str] y: Config x == y # E: Non-overlapping equality check (left operand type: "Dict[str, str]", right operand type: "Config") [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictEmpty] # mypy: strict-equality from typing import TypedDict class Config(TypedDict): a: str b: str x: Config x == {} # E: Non-overlapping equality check (left operand type: "Config", right operand type: "Dict[, ]") [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictNonTotalEmpty] # mypy: strict-equality from typing import TypedDict class Config(TypedDict, total=False): a: str b: str x: Config x == {} [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictNonStrKey] # mypy: strict-equality from typing import TypedDict, Dict, Union class Config(TypedDict): a: str b: str x: Config y: Dict[Union[str, int], str] x == y [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictOverload] from typing import overload, TypedDict, Dict class Map(TypedDict): x: int y: str @overload def func(x: Map) -> int: ... @overload def func(x: Dict[str, str]) -> str: ... def func(x): pass [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictOverloadBad] from typing import overload, TypedDict, Dict class Map(TypedDict, total=False): x: int y: str @overload def func(x: Map) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def func(x: Dict[str, str]) -> str: ... def func(x): pass [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictOverloadMappingBad] from typing import overload, TypedDict, Mapping class Map(TypedDict, total=False): x: int y: str @overload def func(x: Map) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def func(x: Mapping[str, str]) -> str: ... def func(x): pass [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverlapWithDictOverloadNonStrKey] from typing import overload, TypedDict, Dict class Map(TypedDict): x: str y: str @overload def func(x: Map) -> int: ... @overload def func(x: Dict[int, str]) -> str: ... def func(x): pass [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictIsInstance] from typing import TypedDict, Union class User(TypedDict): id: int name: str u: Union[str, User] u2: User if isinstance(u, dict): reveal_type(u) # N: Revealed type is 'TypedDict('__main__.User', {'id': builtins.int, 'name': builtins.str})' else: reveal_type(u) # N: Revealed type is 'builtins.str' assert isinstance(u2, dict) reveal_type(u2) # N: Revealed type is 'TypedDict('__main__.User', {'id': builtins.int, 'name': builtins.str})' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictIsInstanceABCs] from typing import TypedDict, Union, Mapping, Iterable class User(TypedDict): id: int name: str u: Union[int, User] u2: User if isinstance(u, Iterable): reveal_type(u) # N: Revealed type is 'TypedDict('__main__.User', {'id': builtins.int, 'name': builtins.str})' else: reveal_type(u) # N: Revealed type is 'builtins.int' assert isinstance(u2, Mapping) reveal_type(u2) # N: Revealed type is 'TypedDict('__main__.User', {'id': builtins.int, 'name': builtins.str})' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictLiteralTypeKeyInCreation] from typing import TypedDict, Final, Literal class Value(TypedDict): num: int num: Final = 'num' v: Value = {num: 5} v = {num: ''} # E: Incompatible types (expression has type "str", TypedDict item "num" has type "int") bad: Final = 2 v = {bad: 3} # E: Expected TypedDict key to be string literal union: Literal['num', 'foo'] v = {union: 2} # E: Expected TypedDict key to be string literal num2: Literal['num'] v = {num2: 2} bad2: Literal['bad'] v = {bad2: 2} # E: Extra key 'bad' for TypedDict "Value" [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testCannotUseFinalDecoratorWithTypedDict] from typing import TypedDict from typing_extensions import final @final # E: @final cannot be used with TypedDict class DummyTypedDict(TypedDict): int_val: int float_val: float str_val: str [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictDoubleForwardClass] from mypy_extensions import TypedDict from typing import Any, List class Foo(TypedDict): bar: Bar baz: Bar Bar = List[Any] foo: Foo reveal_type(foo['bar']) # N: Revealed type is 'builtins.list[Any]' reveal_type(foo['baz']) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictDoubleForwardFunc] from mypy_extensions import TypedDict from typing import Any, List Foo = TypedDict('Foo', {'bar': Bar, 'baz': Bar}) Bar = List[Any] foo: Foo reveal_type(foo['bar']) # N: Revealed type is 'builtins.list[Any]' reveal_type(foo['baz']) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictDoubleForwardMixed] from mypy_extensions import TypedDict from typing import Any, List Bar = List[Any] class Foo(TypedDict): foo: Toto bar: Bar baz: Bar Toto = int foo: Foo reveal_type(foo['foo']) # N: Revealed type is 'builtins.int' reveal_type(foo['bar']) # N: Revealed type is 'builtins.list[Any]' reveal_type(foo['baz']) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAssignTypedDictAsAttribute] from typing import TypedDict class A: def __init__(self) -> None: self.b = TypedDict('b', {'x': int, 'y': str}) # E: TypedDict type as attribute is not supported reveal_type(A().b) # N: Revealed type is 'Any' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] mypy-0.761/test-data/unit/check-typevar-values.test0000644€tŠÔÚ€2›s®0000004136013576752246026534 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type variables with values restriction. [case testCallGenericFunctionWithTypeVarValueRestriction] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') f(object()) # E: Value of type variable "T" of "f" cannot be "object" [case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext] from typing import TypeVar, List T = TypeVar('T', int, str) def f(x: T) -> List[T]: pass i = [1] s = ['x'] o = [object()] if int(): i = f(1) s = f('') o = f(1) \ # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[object]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] [case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs] from typing import TypeVar, Any, cast T = TypeVar('T', int, str) def f(x: T) -> None: pass f(cast(Any, object())) [out] [case testCallGenericFunctionWithTypeVarValueRestrictionInDynamicFunc] from typing import TypeVar, Any T = TypeVar('T', int, str) def f(x: T) -> None: pass def g(): f(object()) [out] [case testCallGenericFunctionWithTypeVarValueRestrictionUsingSubtype] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> None: pass class S(str): pass f(S()) [out] [case testCheckGenericFunctionBodyWithTypeVarValues] from typing import TypeVar class A: def f(self, x: int) -> A: return self class B: def f(self, x: int) -> B: return self AB = TypeVar('AB', A, B) def f(x: AB) -> AB: x = x.f(1) return x.f(1) [case testCheckGenericFunctionBodyWithTypeVarValues2] from typing import TypeVar class A: def f(self) -> A: return A() def g(self) -> B: return B() class B: def f(self) -> A: return A() def g(self) -> B: return B() AB = TypeVar('AB', A, B) def f(x: AB) -> AB: return x.f() # Error def g(x: AB) -> AB: return x.g() # Error [out] main:10: error: Incompatible return value type (got "A", expected "B") main:12: error: Incompatible return value type (got "B", expected "A") [case testTypeInferenceAndTypeVarValues] from typing import TypeVar class A: def f(self) -> A: return self def g(self) -> B: return B() class B: def f(self) -> B: return self def g(self) -> B: return B() AB = TypeVar('AB', A, B) def f(x: AB) -> AB: y = x if y: return y.f() else: return y.g() # E: Incompatible return value type (got "B", expected "A") [out] [case testTypeDeclaredBasedOnTypeVarWithValues] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: a = None # type: T b = None # type: T if 1: a = x b = x a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") return x [out] [case testIsinstanceAndTypeVarValues] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): return 2 return x def g(x: T) -> T: if isinstance(x, str): return '' return x def h(x: T) -> T: if isinstance(x, int): return '' # E: Incompatible return value type (got "str", expected "int") return x [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndTypeVarValues2] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): return 2 else: return '' def g(x: T) -> T: if isinstance(x, int): return '' # E: Incompatible return value type (got "str", expected "int") else: return 2 # E: Incompatible return value type (got "int", expected "str") return x [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndTypeVarValues3] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): y = 1 else: y = '' return y [builtins fixtures/isinstance.pyi] [case testIsinstanceAndTypeVarValues4] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): y = 1 else: y = object() return y # E: Incompatible return value type (got "object", expected "str") [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndTypeVarValues5] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): y = object() else: y = '' return y # E: Incompatible return value type (got "object", expected "int") [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceWithUserDefinedTypeAndTypeVarValues] from typing import TypeVar class A: pass class B: pass T = TypeVar('T', A, B) def f(x: T) -> None: y = x if isinstance(x, A): # This is only checked when x is A, since A and B are not considered overlapping. x = y x = A() else: x = B() x = y x.foo() # E: "B" has no attribute "foo" S = TypeVar('S', int, str) def g(x: S) -> None: y = x if isinstance(x, int): x = y [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceWithUserDefinedTypeAndTypeVarValues2] from typing import TypeVar class S(str): pass T = TypeVar('T', S, int) def f(x: T) -> None: y = x if isinstance(x, S): # This is checked only when type of x is str. x = y x = S() x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "S") else: x = y x = 1 x = S() # E: Incompatible types in assignment (expression has type "S", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testTypeVarValuesAndNestedCalls] from typing import TypeVar T = TypeVar('T', int, str) def f(m: T) -> int: pass def h(x: int) -> int: pass def g(a: T) -> None: h(f(a)) [out] [case testGenericTypeWithTypevarValues] from typing import TypeVar, Generic, Any X = TypeVar('X', int, str) class A(Generic[X]): pass a = None # type: A[int] b = None # type: A[str] d = None # type: A[object] # E: Value of type variable "X" of "A" cannot be "object" c = None # type: A[Any] [case testConstructGenericTypeWithTypevarValuesAndTypeInference] from typing import TypeVar, Generic, Any, cast X = TypeVar('X', int, str) class A(Generic[X]): def __init__(self, x: X) -> None: pass A(1) A('x') A(cast(Any, object())) A(object()) # E: Value of type variable "X" of "A" cannot be "object" [case testGenericTypeWithTypevarValuesAndTypevarArgument] from typing import TypeVar, Generic class C: pass X = TypeVar('X', int, str) Y = TypeVar('Y', int, C) Z = TypeVar('Z') class D(Generic[X]): def __init__(self, x: X) -> None: pass def f(x: X) -> None: a = None # type: D[X] def g(x: Y) -> None: a = None # type: D[Y] def h(x: Z) -> None: a = None # type: D[Z] [out] main:11: error: Invalid type argument value for "D" main:13: error: Type variable "Z" not valid as type argument value for "D" [case testGenericTypeWithTypevarValuesAndSubtypePromotion] from typing import TypeVar, Generic X = TypeVar('X', int, str) class S(str): pass class C(Generic[X]): def __init__(self, x: X) -> None: pass x = None # type: C[str] y = C(S()) if int(): x = y y = x c_int = C(1) # type: C[int] if int(): y = c_int # E: Incompatible types in assignment (expression has type "C[int]", variable has type "C[str]") [case testGenericTypeBodyWithTypevarValues] from typing import TypeVar, Generic class A: def f(self, x: int) -> None: pass def g(self, x: int) -> None: pass def h(self, x: str) -> None: pass class B: def f(self, x: int) -> None: pass def g(self, x: str) -> None: pass def h(self, x: int) -> None: pass X = TypeVar('X', A, B) class C(Generic[X]): def f(self, x: X) -> None: x.f(1) x.g(1) # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str" x.h(1) # E: Argument 1 to "h" of "A" has incompatible type "int"; expected "str" [out] [case testAttributeInGenericTypeWithTypevarValues1] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): x = None # type: X def f(self, x: X) -> None: self.x = x self.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [out] [case testAttributeInGenericTypeWithTypevarValues2] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): x = None # type: X cn = C() # type: C[int] cn.x = 1 cn.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") cs = C() # type: C[str] cs.x = '' cs.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAttributeInGenericTypeWithTypevarValues3] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): def f(self, x: X) -> None: self.x = x # type: X ci: C[int] cs: C[str] reveal_type(ci.x) # N: Revealed type is 'builtins.int*' reveal_type(cs.x) # N: Revealed type is 'builtins.str*' [case testAttributeInGenericTypeWithTypevarValuesUsingInference1] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): def f(self, x: X) -> None: self.x = x # E: Need type annotation for 'x' ci: C[int] cs: C[str] reveal_type(ci.x) # N: Revealed type is 'Any' reveal_type(cs.x) # N: Revealed type is 'Any' [case testAttributeInGenericTypeWithTypevarValuesUsingInference2] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): def f(self, x: X) -> None: self.x = 1 reveal_type(self.x) # N: Revealed type is 'builtins.int' ci: C[int] cs: C[str] reveal_type(ci.x) # N: Revealed type is 'builtins.int' reveal_type(cs.x) # N: Revealed type is 'builtins.int' [case testAttributeInGenericTypeWithTypevarValuesUsingInference3] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): x: X def f(self) -> None: self.y = self.x # E: Need type annotation for 'y' ci: C[int] cs: C[str] reveal_type(ci.y) # N: Revealed type is 'Any' reveal_type(cs.y) # N: Revealed type is 'Any' [case testInferredAttributeInGenericClassBodyWithTypevarValues] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): x = 1 C.x = 1 C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testMultipleClassTypevarsWithValues1] from typing import TypeVar, Generic class A: def f(self, x: int) -> None: pass class B: def f(self, x: str) -> None: pass X = TypeVar('X', A, B) Y = TypeVar('Y', int, str) class C(Generic[X, Y]): def f(self, x: X, y: Y) -> None: x.f(y) [out] main:10: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" main:10: error: Argument 1 to "f" of "B" has incompatible type "int"; expected "str" [case testMultipleClassTypevarsWithValues2] from typing import TypeVar, Generic class A: pass class B: pass X = TypeVar('X', A, B) Y = TypeVar('Y', int, str) class C(Generic[X, Y]): pass a = None # type: C[A, int] b = None # type: C[B, str] c = None # type: C[int, int] # E: Value of type variable "X" of "C" cannot be "int" d = None # type: C[A, A] # E: Value of type variable "Y" of "C" cannot be "A" [case testCallGenericFunctionUsingMultipleTypevarsWithValues] from typing import TypeVar class A: pass class B: pass X = TypeVar('X', A, B) Y = TypeVar('Y', int, str) def f(x: X, y: Y) -> None: pass f(A(), '') f(B(), 1) f(A(), A()) # E: Value of type variable "Y" of "f" cannot be "A" f(1, 1) # E: Value of type variable "X" of "f" cannot be "int" [case testGenericFunctionWithNormalAndRestrictedTypevar] from typing import TypeVar, Generic X = TypeVar('X') Y = TypeVar('Y', int, str) class C(Generic[Y]): def __init__(self, y: Y) -> None: pass def f(x: X, y: Y, z: int) -> None: C(y) C(x) # Error if int(): z = x # Error z = y # Error y.foo # Error [out] main:8: error: Value of type variable "Y" of "C" cannot be "X" main:10: error: Incompatible types in assignment (expression has type "X", variable has type "int") main:11: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:12: error: "int" has no attribute "foo" main:12: error: "str" has no attribute "foo" [case testTypeVarWithValueInferredFromObjectReturnTypeContext] from typing import TypeVar T = TypeVar('T', int, str) def c1(x: object) -> None: pass def c2(x: int) -> None: pass def c3(x: str) -> None: pass def g(x: T) -> T: pass c1(g('')) c2(g(1)) c3(g('')) c2(g('')) # E: Argument 1 to "c2" has incompatible type "str"; expected "int" c3(g(1)) # E: Argument 1 to "c3" has incompatible type "int"; expected "str" [case testTypeVarWithValueInferredFromObjectReturnTypeContext2] from typing import TypeVar T = TypeVar('T', int, str) class ss(str): pass def c(x: ss) -> None: pass def g(x: T) -> T: pass c(g('')) c(g(1)) [out] main:6: error: Argument 1 to "c" has incompatible type "str"; expected "ss" main:7: error: Argument 1 to "c" has incompatible type "int"; expected "ss" [case testDefineAttributeInGenericMethodUsingTypeVarWithValues] from typing import TypeVar T = TypeVar('T', int, str) class A: def f(self, x: T) -> None: self.x = x # E: Need type annotation for 'x' self.y = [x] # E: Need type annotation for 'y' self.z = 1 reveal_type(A().x) # N: Revealed type is 'Any' reveal_type(A().y) # N: Revealed type is 'Any' reveal_type(A().z) # N: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] -- Special cases -- ------------- [case testTypevarValuesSpecialCase1] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T', int, str) class A(Generic[T]): @abstractmethod def f(self) -> 'A[T]': pass class B(A[str]): @abstractmethod def f(self) -> 'B': pass class C(A[str]): @abstractmethod def f(self) -> int: # E: Return type "int" of "f" incompatible with return type "A[str]" in supertype "A" pass [out] [case testDefaultArgumentValueInGenericClassWithTypevarValues] from typing import TypeVar, Generic T = TypeVar('T', int, str) class C(Generic[T]): def f(self, x: int = None) -> None: pass [case testTypevarValuesWithOverloadedFunctionSpecialCase] from foo import * [file foo.pyi] from typing import TypeVar, overload, Callable T = TypeVar('T', int, str) def f(x: T) -> None: y = m(g, x) if int(): x = y y = object() # Error A = TypeVar('A') R = TypeVar('R') def m(f: Callable[[A], R], it: A) -> A: pass @overload def g(x: int) -> int: return x @overload def g(x: str) -> str: return x [out] tmp/foo.pyi:8: error: Incompatible types in assignment (expression has type "object", variable has type "int") tmp/foo.pyi:8: error: Incompatible types in assignment (expression has type "object", variable has type "str") [case testGenericFunctionSubtypingWithTypevarValues] from typing import TypeVar class A: pass T = TypeVar('T', int, str) U = TypeVar('U', str, A, int) def f(x: T) -> T: pass def g(x: U) -> U: pass a = f if int(): a = f if int(): a = g b = g if int(): b = g if int(): b = f # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[U], U]") [case testInnerFunctionWithTypevarValues] from typing import TypeVar T = TypeVar('T', int, str) U = TypeVar('U', int, str) def outer(x: T) -> T: def inner(y: T) -> T: return x def inner2(y: U) -> U: return y inner(x) inner(3) # E: Argument 1 to "inner" has incompatible type "int"; expected "str" inner2(x) inner2(3) outer(3) return x [out] [case testInnerFunctionMutualRecursionWithTypevarValues] from typing import TypeVar T = TypeVar('T', int, str) def outer(x: T) -> T: def inner1(y: T) -> T: return inner2(y) def inner2(y: T) -> T: return inner1('a') # E: Argument 1 to "inner1" has incompatible type "str"; expected "int" return inner1(x) [out] [case testClassMemberTypeVarInFunctionBody] from typing import TypeVar, List S = TypeVar('S') class C: T = TypeVar('T', bound=int) def f(self, x: T) -> T: L = List[S] y: L[C.T] = [x] C.T # E: Type variable "C.T" cannot be used as an expression A = C.T # E: Type variable "C.T" cannot be used as an expression return y[0] [builtins fixtures/list.pyi] [case testParameterLessGenericAsRestriction] from typing import Sequence, Iterable, TypeVar S = TypeVar('S', Sequence, Iterable) def my_len(s: S) -> None: pass def crash() -> None: my_len((0,)) [case testReferenceToDecoratedFunctionAndTypeVarValues] from typing import TypeVar, Callable T = TypeVar('T') S = TypeVar('S', int, str) def dec(f: Callable[..., T]) -> Callable[..., T]: ... @dec def g(s: S) -> Callable[[S], None]: ... def f(x: S) -> None: h = g(x) h(x) mypy-0.761/test-data/unit/check-underscores.test0000644€tŠÔÚ€2›s®0000000057713576752246026106 0ustar jukkaDROPBOX\Domain Users00000000000000[case testUnderscoresRequire36] # flags: --python-version 3.5 x = 1000_000 # E: Underscores in numeric literals are only supported in Python 3.6 and greater [out] [case testUnderscoresBasics] # flags: --python-version 3.6 x: int x = 1000_000 x = 0x_FF_FF_FF_FF y: str = 1000_000.000_001 # E: Incompatible types in assignment (expression has type "float", variable has type "str") mypy-0.761/test-data/unit/check-unions.test0000644€tŠÔÚ€2›s®0000007743113576752246025070 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checking of union types [case testUnion1] from typing import Union def f(x: Union[int, str]) -> None: if isinstance(x, int): y = 1 y = x elif isinstance(x, str): z = 'a' z = x [builtins fixtures/isinstance.pyi] [case testUnion2] from typing import Union def f(x: Union[int, str]) -> None: if isinstance(x, int): y = 1 y = x else: z = 'a' z = x [builtins fixtures/isinstance.pyi] [case testUnion3] from typing import Union def f(x: Union[int, str]) -> None: if isinstance(x, int): y = 1 if int(): y = x else: z = 2 if int(): z = x # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testUnionAnyIsInstance] from typing import Any, Union def func(v: Union[int, Any]) -> None: if isinstance(v, int): reveal_type(v) # N: Revealed type is 'builtins.int' else: reveal_type(v) # N: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [out] [case testUnionAttributeAccess] from typing import Union class A: y = 1 class B: y = 2 class C: pass class D: pass u = None # type: Union[A, C, D] v = None # type: Union[C, D] w = None # type: Union[A, B] x = None # type: Union[A, C] y = None # type: int z = None # type: str if int(): y = w.y v.y # E: Item "C" of "Union[C, D]" has no attribute "y" \ # E: Item "D" of "Union[C, D]" has no attribute "y" u.y # E: Item "C" of "Union[A, C, D]" has no attribute "y" \ # E: Item "D" of "Union[A, C, D]" has no attribute "y" if int(): z = w.y # E: Incompatible types in assignment (expression has type "int", variable has type "str") w.y = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): y = x.y # E: Item "C" of "Union[A, C]" has no attribute "y" zz = x.y # E: Item "C" of "Union[A, C]" has no attribute "y" if int(): z = zz # E: Incompatible types in assignment (expression has type "Union[int, Any]", variable has type "str") [builtins fixtures/isinstance.pyi] [case testUnionMethodCalls] from typing import Union class A: def foo(self) -> int: pass class B: def foo(self) -> int: pass class C: def foo(self) -> str: pass x = None # type: Union[A, B] y = None # type: Union[A, C] i = None # type: int x.foo() y.foo() i = x.foo() if int(): i = y.foo() # E: Incompatible types in assignment (expression has type "Union[int, str]", variable has type "int") [builtins fixtures/isinstance.pyi] [case testUnionIndexing] from typing import Union, List x = None # type: Union[List[int], str] x[2] x[2] + 1 # E: Unsupported operand types for + ("str" and "int") \ # N: Left operand is of type "Union[int, str]" [builtins fixtures/isinstancelist.pyi] [case testUnionAsOverloadArg] from foo import * x = 0 if int(): x = f(1) if int(): x = f('') s = '' if int(): s = f(int) if int(): s = f(1) # E: Incompatible types in assignment (expression has type "int", variable has type "str") if int(): x = f(int) # E: Incompatible types in assignment (expression has type "str", variable has type "int") [file foo.pyi] from typing import Union, overload @overload def f(x: Union[int, str]) -> int: pass @overload def f(x: type) -> str: pass [case testUnionWithNoneItem] from typing import Union def f() -> Union[int, None]: pass x = 1 x = f() [case testOptional] from typing import Optional def f(x: Optional[int]) -> None: pass f(1) f(None) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "Optional[int]" [case testUnionSimplificationGenericFunction] from typing import TypeVar, Union, List T = TypeVar('T') def f(x: List[T]) -> Union[T, int]: pass def g(y: str) -> None: pass a = f([1]) g(a) # E: Argument 1 to "g" has incompatible type "int"; expected "str" [builtins fixtures/list.pyi] [case testUnionSimplificationGenericClass] from typing import TypeVar, Union, Generic T = TypeVar('T') U = TypeVar('U') class C(Generic[T, U]): def f(self, x: str) -> Union[T, U]: pass a = C() # type: C[int, int] b = a.f('a') a.f(b) # E: Argument 1 to "f" of "C" has incompatible type "int"; expected "str" [case testUnionOrderEquivalence] from typing import Union def foo(): pass S = str T = int if foo(): def f(x: Union[int, str]) -> None: pass elif foo(): def f(x: Union[str, int]) -> None: pass elif foo(): def f(x: Union[int, str, int, int, str]) -> None: pass elif foo(): def f(x: Union[int, str, float]) -> None: pass # E: All conditional function variants must have identical signatures elif foo(): def f(x: Union[S, T]) -> None: pass elif foo(): def f(x: Union[str]) -> None: pass # E: All conditional function variants must have identical signatures else: def f(x: Union[Union[int, T], Union[S, T], str]) -> None: pass # Checks bidirectionality of testing. The first definition of g is consistent with # the second, but not vice-versa. if foo(): def g(x: Union[int, str, bytes]) -> None: pass else: def g(x: Union[int, str]) -> None: pass # E: All conditional function variants must have identical signatures [case testUnionSimplificationSpecialCases] from typing import Any, TypeVar, Union class C(Any): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass a = None # type: Any reveal_type(u(C(), None)) # N: Revealed type is '__main__.C*' reveal_type(u(None, C())) # N: Revealed type is '__main__.C*' reveal_type(u(C(), a)) # N: Revealed type is 'Union[Any, __main__.C*]' reveal_type(u(a, C())) # N: Revealed type is 'Union[__main__.C*, Any]' reveal_type(u(C(), C())) # N: Revealed type is '__main__.C*' reveal_type(u(a, a)) # N: Revealed type is 'Any' [case testUnionSimplificationSpecialCase2] from typing import Any, TypeVar, Union class C(Any): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass def f(x: T) -> None: reveal_type(u(C(), x)) # N: Revealed type is 'Union[T`-1, __main__.C*]' reveal_type(u(x, C())) # N: Revealed type is 'Union[__main__.C*, T`-1]' [case testUnionSimplificationSpecialCase3] from typing import Any, TypeVar, Generic, Union class C(Any): pass V = TypeVar('V') T = TypeVar('T') class M(Generic[V]): def get(self, default: T) -> Union[V, T]: ... def f(x: M[C]) -> None: y = x.get(None) reveal_type(y) # N: Revealed type is '__main__.C' [case testUnionSimplificationSpecialCases] from typing import Any, TypeVar, Union class C(Any): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass a = None # type: Any # Base-class-Any and None, simplify reveal_type(u(C(), None)) # N: Revealed type is '__main__.C*' reveal_type(u(None, C())) # N: Revealed type is '__main__.C*' # Normal instance type and None, simplify reveal_type(u(1, None)) # N: Revealed type is 'builtins.int*' reveal_type(u(None, 1)) # N: Revealed type is 'builtins.int*' # Normal instance type and base-class-Any, no simplification reveal_type(u(C(), 1)) # N: Revealed type is 'Union[builtins.int*, __main__.C*]' reveal_type(u(1, C())) # N: Revealed type is 'Union[__main__.C*, builtins.int*]' # Normal instance type and Any, no simplification reveal_type(u(1, a)) # N: Revealed type is 'Union[Any, builtins.int*]' reveal_type(u(a, 1)) # N: Revealed type is 'Union[builtins.int*, Any]' # Any and base-class-Any, no simplificaiton reveal_type(u(C(), a)) # N: Revealed type is 'Union[Any, __main__.C*]' reveal_type(u(a, C())) # N: Revealed type is 'Union[__main__.C*, Any]' # Two normal instance types, simplify reveal_type(u(1, object())) # N: Revealed type is 'builtins.object*' reveal_type(u(object(), 1)) # N: Revealed type is 'builtins.object*' # Two normal instance types, no simplification reveal_type(u(1, '')) # N: Revealed type is 'Union[builtins.str*, builtins.int*]' reveal_type(u('', 1)) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' [case testUnionSimplificationWithDuplicateItems] from typing import Any, TypeVar, Union class C(Any): pass T = TypeVar('T') S = TypeVar('S') R = TypeVar('R') def u(x: T, y: S, z: R) -> Union[R, S, T]: pass a = None # type: Any reveal_type(u(1, 1, 1)) # N: Revealed type is 'builtins.int*' reveal_type(u(C(), C(), None)) # N: Revealed type is '__main__.C*' reveal_type(u(a, a, 1)) # N: Revealed type is 'Union[builtins.int*, Any]' reveal_type(u(a, C(), a)) # N: Revealed type is 'Union[Any, __main__.C*]' reveal_type(u('', 1, 1)) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' [case testUnionAndBinaryOperation] from typing import Union class A: pass def f(x: Union[int, str, A]): x + object() # E: Unsupported left operand type for + ("A") \ # E: Unsupported operand types for + ("int" and "object") \ # E: Unsupported operand types for + ("str" and "object") \ # N: Left operand is of type "Union[int, str, A]" [case testNarrowingDownNamedTupleUnion] from typing import NamedTuple, Union A = NamedTuple('A', [('y', int)]) B = NamedTuple('B', [('x', int)]) C = NamedTuple('C', [('x', int)]) def foo(a: Union[A, B, C]): if isinstance(a, (B, C)): reveal_type(a) # N: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.B], Tuple[builtins.int, fallback=__main__.C]]' a.x a.y # E: Item "B" of "Union[B, C]" has no attribute "y" \ # E: Item "C" of "Union[B, C]" has no attribute "y" b = a # type: Union[B, C] [builtins fixtures/isinstance.pyi] [case testSimplifyingUnionAndTypePromotions] from typing import TypeVar, Union T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass reveal_type(u(1, 2.3)) # N: Revealed type is 'builtins.float*' reveal_type(u(2.3, 1)) # N: Revealed type is 'builtins.float*' reveal_type(u(False, 2.2)) # N: Revealed type is 'builtins.float*' reveal_type(u(2.2, False)) # N: Revealed type is 'builtins.float*' [builtins fixtures/primitives.pyi] [case testSimplifyingUnionWithTypeTypes1] from typing import TypeVar, Union, Type, Any T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass t_o = None # type: Type[object] t_s = None # type: Type[str] t_a = None # type: Type[Any] # Two identical items reveal_type(u(t_o, t_o)) # N: Revealed type is 'Type[builtins.object]' reveal_type(u(t_s, t_s)) # N: Revealed type is 'Type[builtins.str]' reveal_type(u(t_a, t_a)) # N: Revealed type is 'Type[Any]' reveal_type(u(type, type)) # N: Revealed type is 'def (x: builtins.object) -> builtins.type' # One type, other non-type reveal_type(u(t_s, 1)) # N: Revealed type is 'Union[builtins.int*, Type[builtins.str]]' reveal_type(u(1, t_s)) # N: Revealed type is 'Union[Type[builtins.str], builtins.int*]' reveal_type(u(type, 1)) # N: Revealed type is 'Union[builtins.int*, def (x: builtins.object) -> builtins.type]' reveal_type(u(1, type)) # N: Revealed type is 'Union[def (x: builtins.object) -> builtins.type, builtins.int*]' reveal_type(u(t_a, 1)) # N: Revealed type is 'Union[builtins.int*, Type[Any]]' reveal_type(u(1, t_a)) # N: Revealed type is 'Union[Type[Any], builtins.int*]' reveal_type(u(t_o, 1)) # N: Revealed type is 'Union[builtins.int*, Type[builtins.object]]' reveal_type(u(1, t_o)) # N: Revealed type is 'Union[Type[builtins.object], builtins.int*]' [case testSimplifyingUnionWithTypeTypes2] from typing import TypeVar, Union, Type, Any T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass t_o = None # type: Type[object] t_s = None # type: Type[str] t_a = None # type: Type[Any] t = None # type: type # Union with object reveal_type(u(t_o, object())) # N: Revealed type is 'builtins.object*' reveal_type(u(object(), t_o)) # N: Revealed type is 'builtins.object*' reveal_type(u(t_s, object())) # N: Revealed type is 'builtins.object*' reveal_type(u(object(), t_s)) # N: Revealed type is 'builtins.object*' reveal_type(u(t_a, object())) # N: Revealed type is 'builtins.object*' reveal_type(u(object(), t_a)) # N: Revealed type is 'builtins.object*' # Union between type objects reveal_type(u(t_o, t_a)) # N: Revealed type is 'Union[Type[Any], Type[builtins.object]]' reveal_type(u(t_a, t_o)) # N: Revealed type is 'Union[Type[builtins.object], Type[Any]]' reveal_type(u(t_s, t_o)) # N: Revealed type is 'Type[builtins.object]' reveal_type(u(t_o, t_s)) # N: Revealed type is 'Type[builtins.object]' reveal_type(u(t_o, type)) # N: Revealed type is 'Type[builtins.object]' reveal_type(u(type, t_o)) # N: Revealed type is 'Type[builtins.object]' reveal_type(u(t_a, t)) # N: Revealed type is 'builtins.type*' reveal_type(u(t, t_a)) # N: Revealed type is 'builtins.type*' # The following should arguably not be simplified, but it's unclear how to fix then # without causing regressions elsewhere. reveal_type(u(t_o, t)) # N: Revealed type is 'builtins.type*' reveal_type(u(t, t_o)) # N: Revealed type is 'builtins.type*' [case testNotSimplifyingUnionWithMetaclass] from typing import TypeVar, Union, Type, Any class M(type): pass class M2(M): pass class A(metaclass=M): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass a: Any t_a: Type[A] reveal_type(u(M(*a), t_a)) # N: Revealed type is '__main__.M*' reveal_type(u(t_a, M(*a))) # N: Revealed type is '__main__.M*' reveal_type(u(M2(*a), t_a)) # N: Revealed type is 'Union[Type[__main__.A], __main__.M2*]' reveal_type(u(t_a, M2(*a))) # N: Revealed type is 'Union[__main__.M2*, Type[__main__.A]]' [case testSimplifyUnionWithCallable] from typing import TypeVar, Union, Any, Callable T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass class C: pass class D(C): pass D_C: Callable[[D], C] A_C: Callable[[Any], C] D_A: Callable[[D], Any] C_C: Callable[[C], C] D_D: Callable[[D], D] i_C: Callable[[int], C] # TODO: Test argument names and kinds once we have flexible callable types. reveal_type(u(D_C, D_C)) # N: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(A_C, D_C)) # N: Revealed type is 'Union[def (__main__.D) -> __main__.C, def (Any) -> __main__.C]' reveal_type(u(D_C, A_C)) # N: Revealed type is 'Union[def (Any) -> __main__.C, def (__main__.D) -> __main__.C]' reveal_type(u(D_A, D_C)) # N: Revealed type is 'Union[def (__main__.D) -> __main__.C, def (__main__.D) -> Any]' reveal_type(u(D_C, D_A)) # N: Revealed type is 'Union[def (__main__.D) -> Any, def (__main__.D) -> __main__.C]' reveal_type(u(D_C, C_C)) # N: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(C_C, D_C)) # N: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(D_C, D_D)) # N: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(D_D, D_C)) # N: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(D_C, i_C)) # N: Revealed type is 'Union[def (builtins.int) -> __main__.C, def (__main__.D) -> __main__.C]' [case testUnionOperatorMethodSpecialCase] from typing import Union class C: def __le__(self, x: 'C') -> int: ... class D: def __le__(self, other) -> int: ... class E: def __ge__(self, other: Union[C, D]) -> int: ... [case testUnionSimplificationWithBoolIntAndFloat] from typing import List, Union l = reveal_type([]) # type: List[Union[bool, int, float]] \ # N: Revealed type is 'builtins.list[builtins.float]' reveal_type(l) \ # N: Revealed type is 'builtins.list[Union[builtins.bool, builtins.int, builtins.float]]' [builtins fixtures/list.pyi] [case testUnionSimplificationWithBoolIntAndFloat2] from typing import List, Union l = reveal_type([]) # type: List[Union[bool, int, float, str]] \ # N: Revealed type is 'builtins.list[Union[builtins.float, builtins.str]]' reveal_type(l) \ # N: Revealed type is 'builtins.list[Union[builtins.bool, builtins.int, builtins.float, builtins.str]]' [builtins fixtures/list.pyi] [case testNestedUnionsProcessedCorrectly] from typing import Union class A: pass class B: pass class C: pass def foo(bar: Union[Union[A, B], C]) -> None: if isinstance(bar, A): reveal_type(bar) # N: Revealed type is '__main__.A' else: reveal_type(bar) # N: Revealed type is 'Union[__main__.B, __main__.C]' [builtins fixtures/isinstance.pyi] [out] [case testAssignAnyToUnion] from typing import Union, Any x: Union[int, str] a: Any if bool(): x = a # TODO: Maybe we should infer Any as the type instead. reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/bool.pyi] [case testAssignAnyToUnionWithAny] from typing import Union, Any x: Union[int, Any] a: Any if bool(): x = a reveal_type(x) # N: Revealed type is 'Any' reveal_type(x) # N: Revealed type is 'Union[builtins.int, Any]' [builtins fixtures/bool.pyi] [case testUnionMultiassignSingle] from typing import Union, Tuple, Any a: Union[Tuple[int], Tuple[float]] (a1,) = a reveal_type(a1) # N: Revealed type is 'builtins.float' b: Union[Tuple[int], Tuple[str]] (b1,) = b reveal_type(b1) # N: Revealed type is 'Union[builtins.int, builtins.str]' [case testUnionMultiassignDouble] from typing import Union, Tuple c: Union[Tuple[int, int], Tuple[int, float]] (c1, c2) = c reveal_type(c1) # N: Revealed type is 'builtins.int' reveal_type(c2) # N: Revealed type is 'builtins.float' [case testUnionMultiassignGeneric] from typing import Union, Tuple, TypeVar T = TypeVar('T') S = TypeVar('S') def pack_two(x: T, y: S) -> Union[Tuple[T, T], Tuple[S, S]]: pass (x, y) = pack_two(1, 'a') reveal_type(x) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(y) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' [case testUnionMultiassignAny] from typing import Union, Tuple, Any d: Union[Any, Tuple[float, float]] (d1, d2) = d reveal_type(d1) # N: Revealed type is 'Union[Any, builtins.float]' reveal_type(d2) # N: Revealed type is 'Union[Any, builtins.float]' e: Union[Any, Tuple[float, float], int] (e1, e2) = e # E: 'builtins.int' object is not iterable [case testUnionMultiassignNotJoin] from typing import Union, List class A: pass class B(A): pass class C(A): pass a: Union[List[B], List[C]] x, y = a reveal_type(x) # N: Revealed type is 'Union[__main__.B*, __main__.C*]' [builtins fixtures/list.pyi] [case testUnionMultiassignRebind] from typing import Union, List class A: pass class B(A): pass class C(A): pass obj: object a: Union[List[B], List[C]] obj, new = a reveal_type(obj) # N: Revealed type is 'Union[__main__.B*, __main__.C*]' reveal_type(new) # N: Revealed type is 'Union[__main__.B*, __main__.C*]' obj = 1 reveal_type(obj) # N: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [case testUnionMultiassignAlreadyDeclared] from typing import Union, Tuple a: Union[Tuple[int, int], Tuple[int, float]] a1: object a2: int (a1, a2) = a # E: Incompatible types in assignment (expression has type "float", variable has type "int") b: Union[Tuple[float, int], Tuple[int, int]] b1: object b2: int (b1, b2) = b reveal_type(b1) # N: Revealed type is 'builtins.float' reveal_type(b2) # N: Revealed type is 'builtins.int' c: Union[Tuple[int, int], Tuple[int, int]] c1: object c2: int (c1, c2) = c reveal_type(c1) # N: Revealed type is 'builtins.int' reveal_type(c2) # N: Revealed type is 'builtins.int' d: Union[Tuple[int, int], Tuple[int, float]] d1: object (d1, d2) = d reveal_type(d1) # N: Revealed type is 'builtins.int' reveal_type(d2) # N: Revealed type is 'builtins.float' [case testUnionMultiassignIndexed] from typing import Union, Tuple, List class B: x: object x: List[int] b: B a: Union[Tuple[int, int], Tuple[int, object]] (x[0], b.x) = a reveal_type(x[0]) # N: Revealed type is 'builtins.int*' reveal_type(b.x) # N: Revealed type is 'builtins.object' [builtins fixtures/list.pyi] [case testUnionMultiassignIndexedWithError] from typing import Union, Tuple, List class A: pass class B: x: int x: List[A] b: B a: Union[Tuple[int, int], Tuple[int, object]] (x[0], b.x) = a # E: Incompatible types in assignment (expression has type "int", target has type "A") \ # E: Incompatible types in assignment (expression has type "object", variable has type "int") reveal_type(x[0]) # N: Revealed type is '__main__.A*' reveal_type(b.x) # N: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [case testUnionMultiassignPacked] from typing import Union, Tuple, List a: Union[Tuple[int, int, int], Tuple[int, int, str]] a1: int a2: object (a1, *xs, a2) = a reveal_type(a1) # N: Revealed type is 'builtins.int' reveal_type(xs) # N: Revealed type is 'builtins.list[builtins.int*]' reveal_type(a2) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/list.pyi] [case testUnpackingUnionOfListsInFunction] from typing import Union, List def f(x: bool) -> Union[List[int], List[str]]: if x: return [1, 1] else: return ['a', 'a'] def g(x: bool) -> None: a, b = f(x) reveal_type(a) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(b) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' [builtins fixtures/list.pyi] [case testUnionOfVariableLengthTupleUnpacking] from typing import Tuple, Union VarTuple = Union[Tuple[int, int], Tuple[int, int, int]] def make_tuple() -> VarTuple: pass x = make_tuple() a, b = x # E: Too many values to unpack (2 expected, 3 provided) a, b, c = x # E: Need more than 2 values to unpack (3 expected) c, *d = x reveal_type(c) # N: Revealed type is 'builtins.int' reveal_type(d) # N: Revealed type is 'builtins.list[builtins.int*]' [builtins fixtures/tuple.pyi] [case testUnionOfNonIterableUnpacking] from typing import Union bad: Union[int, str] x, y = bad # E: 'builtins.int' object is not iterable \ # E: 'builtins.str' object is not iterable reveal_type(x) # N: Revealed type is 'Any' reveal_type(y) # N: Revealed type is 'Any' [out] [case testUnionAlwaysTooMany] from typing import Union, Tuple bad: Union[Tuple[int, int, int], Tuple[str, str, str]] x, y = bad # E: Too many values to unpack (2 expected, 3 provided) reveal_type(x) # N: Revealed type is 'Any' reveal_type(y) # N: Revealed type is 'Any' [builtins fixtures/tuple.pyi] [out] [case testUnionAlwaysTooFew] from typing import Union, Tuple bad: Union[Tuple[int, int, int], Tuple[str, str, str]] x, y, z, w = bad # E: Need more than 3 values to unpack (4 expected) reveal_type(x) # N: Revealed type is 'Any' reveal_type(y) # N: Revealed type is 'Any' reveal_type(z) # N: Revealed type is 'Any' reveal_type(w) # N: Revealed type is 'Any' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingChainedTuple] from typing import Union, Tuple good: Union[Tuple[int, int], Tuple[str, str]] x, y = t = good reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(t) # N: Revealed type is 'Union[Tuple[builtins.int, builtins.int], Tuple[builtins.str, builtins.str]]' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingChainedTuple2] from typing import Union, Tuple good: Union[Tuple[int, int], Tuple[str, str]] t = x, y = good reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(t) # N: Revealed type is 'Union[Tuple[builtins.int, builtins.int], Tuple[builtins.str, builtins.str]]' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingChainedTuple3] from typing import Union, Tuple good: Union[Tuple[int, int], Tuple[str, str]] x, y = a, b = good reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(a) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(b) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingChainedList] from typing import Union, List good: Union[List[int], List[str]] lst = x, y = good reveal_type(x) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(y) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(lst) # N: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]' [builtins fixtures/list.pyi] [out] [case testUnionUnpackingChainedList2] from typing import Union, List good: Union[List[int], List[str]] x, *y, z = lst = good reveal_type(x) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(y) # N: Revealed type is 'Union[builtins.list[builtins.int*], builtins.list[builtins.str*]]' reveal_type(z) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(lst) # N: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]' [builtins fixtures/list.pyi] [out] [case testUnionUnpackingInForTuple] from typing import Union, Tuple, NamedTuple class NTInt(NamedTuple): x: int y: int class NTStr(NamedTuple): x: str y: str t1: NTInt reveal_type(t1.__iter__) # N: Revealed type is 'def () -> typing.Iterator[builtins.int*]' nt: Union[NTInt, NTStr] reveal_type(nt.__iter__) # N: Revealed type is 'Union[def () -> typing.Iterator[builtins.int*], def () -> typing.Iterator[builtins.str*]]' for nx in nt: reveal_type(nx) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' t: Union[Tuple[int, int], Tuple[str, str]] for x in t: reveal_type(x) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' [builtins fixtures/for.pyi] [out] [case testUnionUnpackingInForList] from typing import Union, List, Tuple t: Union[List[Tuple[int, int]], List[Tuple[str, str]]] for x, y in t: reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # N: Revealed type is 'Union[builtins.int, builtins.str]' t2: List[Union[Tuple[int, int], Tuple[str, str]]] for x2, y2 in t2: reveal_type(x2) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y2) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/for.pyi] [out] [case testUnionUnpackingDoubleBinder] from typing import Union, Tuple x: object y: object class A: pass class B: pass t1: Union[Tuple[A, A], Tuple[B, B]] t2: Union[Tuple[int, int], Tuple[str, str]] x, y = t1 reveal_type(x) # N: Revealed type is 'Union[__main__.A, __main__.B]' reveal_type(y) # N: Revealed type is 'Union[__main__.A, __main__.B]' x, y = t2 reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # N: Revealed type is 'Union[builtins.int, builtins.str]' x, y = object(), object() reveal_type(x) # N: Revealed type is 'builtins.object' reveal_type(y) # N: Revealed type is 'builtins.object' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingFromNestedTuples] from typing import Union, Tuple t: Union[Tuple[int, Tuple[int, int]], Tuple[str, Tuple[str, str]]] x, (y, z) = t reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(z) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/tuple.pyi] [out] [case testNestedUnionUnpackingFromNestedTuples] from typing import Union, Tuple class A: pass class B: pass t: Union[Tuple[int, Union[Tuple[int, int], Tuple[A, A]]], Tuple[str, Union[Tuple[str, str], Tuple[B, B]]]] x, (y, z) = t reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # N: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]' reveal_type(z) # N: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]' [builtins fixtures/tuple.pyi] [out] [case testNestedUnionUnpackingFromNestedTuplesBinder] from typing import Union, Tuple class A: pass class B: pass x: object y: object z: object t: Union[Tuple[int, Union[Tuple[int, int], Tuple[A, A]]], Tuple[str, Union[Tuple[str, str], Tuple[B, B]]]] x, (y, z) = t reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # N: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]' reveal_type(z) # N: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]' [builtins fixtures/tuple.pyi] [out] [case testUnpackUnionNoCrashOnPartialNone] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] x, _ = d.get(a, (None, None)) for y in x: pass # E: Item "None" of "Optional[List[Tuple[str, str]]]" has no attribute "__iter__" (not iterable) if x: for s, t in x: reveal_type(s) # N: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [out] [case testUnpackUnionNoCrashOnPartialNone2] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any x = None d: Dict[str, Tuple[List[Tuple[str, str]], str]] x, _ = d.get(a, (None, None)) for y in x: pass # E: Item "None" of "Optional[List[Tuple[str, str]]]" has no attribute "__iter__" (not iterable) if x: for s, t in x: reveal_type(s) # N: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [out] [case testUnpackUnionNoCrashOnPartialNoneBinder] # flags: --strict-optional from typing import Dict, Tuple, List, Any x: object a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] x, _ = d.get(a, (None, None)) reveal_type(x) # N: Revealed type is 'Union[builtins.list[Tuple[builtins.str, builtins.str]], None]' if x: for y in x: pass [builtins fixtures/dict.pyi] [out] [case testUnpackUnionNoCrashOnPartialNoneList] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] x, _ = d.get(a, ([], [])) reveal_type(x) # N: Revealed type is 'Union[builtins.list[Tuple[builtins.str, builtins.str]], builtins.list[]]' for y in x: pass [builtins fixtures/dict.pyi] [out] [case testLongUnionFormatting] from typing import Any, Generic, TypeVar, Union T = TypeVar('T') class ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes(Generic[T]): pass x: Union[ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[int], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[object], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[float], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[str], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[Any], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[bytes]] def takes_int(arg: int) -> None: pass takes_int(x) # E: Argument 1 to "takes_int" has incompatible type ; expected "int" [case testRecursiveForwardReferenceInUnion] from typing import List, Union MYTYPE = List[Union[str, "MYTYPE"]] # E: Cannot resolve name "MYTYPE" (possible cyclic definition) [builtins fixtures/list.pyi] [case testNonStrictOptional] from typing import Optional, List def union_test1(x): # type: (Optional[List[int]]) -> Optional[int] if x is None: return x else: return x[0] def union_test2(x): # type: (Optional[List[int]]) -> Optional[int] if isinstance(x, type(None)): return x else: return x[0] def f(): return 0 def union_test3(): # type: () -> int x = f() assert x is None x = f() return x + 1 [builtins fixtures/isinstancelist.pyi] [case testInvariance] from typing import List, Union from enum import Enum class Boop(Enum): FOO = 1 def do_thing_with_enums(enums: Union[List[Enum], Enum]) -> None: ... boop: List[Boop] = [] do_thing_with_enums(boop) # E: Argument 1 to "do_thing_with_enums" has incompatible type "List[Boop]"; expected "Union[List[Enum], Enum]" \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/isinstancelist.pyi] [case testAssignUnionWithTenaryExprWithEmptyCollection] from typing import Dict, List, Union x: Union[int, List[int]] = 1 if bool() else [] y: Union[int, Dict[int, int]] = 1 if bool() else {} u: Union[int, List[int]] = [] if bool() else 1 v: Union[int, Dict[int, int]] = {} if bool() else 1 [builtins fixtures/isinstancelist.pyi] mypy-0.761/test-data/unit/check-unreachable-code.test0000644€tŠÔÚ€2›s®0000010054713576752246026731 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases for conditional checks that result in some -- blocks classified as unreachable (they are not type checked or semantically -- analyzed). -- -- For example, we skip blocks that will not be executed on the active -- Python version. [case testConditionalTypeAliasPY3] import typing def f(): pass PY3 = f() if PY3: t = int x = object() + 'x' # E: Unsupported left operand type for + ("object") else: t = str y = 'x' / 1 x z = 1 # type: t [case testConditionalTypeAliasPY3_python2] import typing def f(): pass PY3 = f() if PY3: t = int x = object() + 'x' else: t = str y = 'x' / 1 # E: "str" has no attribute "__div__" y z = '' # type: t [case testConditionalAssignmentPY2] import typing def f(): pass PY2 = f() if PY2: x = object() + 'x' else: y = 'x' / 1 # E: Unsupported left operand type for / ("str") y [case testConditionalAssignmentPY2_python2] import typing def f(): pass PY2 = f() if PY2: x = object() + 'x' # E: Unsupported left operand type for + ("object") else: y = 'x' / 1 x [case testConditionalImport] import typing def f(): pass PY2 = f() if PY2: import fuzzybar from barbar import * from pawwaw import a, bc else: import m [file m.py] import typing x = 1 if int(): x = 'a' [out] tmp/m.py:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNegatedMypyConditional] import typing MYPY = 0 if not MYPY: import xyz753 else: import pow123 # E [builtins fixtures/bool.pyi] [out] main:6: error: Cannot find implementation or library stub for module named 'pow123' main:6: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testMypyConditional] import typing MYPY = 0 if MYPY: None + 1 # E: Unsupported left operand type for + ("None") else: None + '' [builtins fixtures/bool.pyi] [case testTypeCheckingConditional] import typing if typing.TYPE_CHECKING: import pow123 # E else: import xyz753 [out] main:3: error: Cannot find implementation or library stub for module named 'pow123' main:3: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testTypeCheckingConditionalFromImport] from typing import TYPE_CHECKING if TYPE_CHECKING: import pow123 # E else: import xyz753 [out] main:3: error: Cannot find implementation or library stub for module named 'pow123' main:3: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testNegatedTypeCheckingConditional] import typing if not typing.TYPE_CHECKING: import pow123 # E else: import xyz753 [builtins fixtures/bool.pyi] [out] main:5: error: Cannot find implementation or library stub for module named 'xyz753' main:5: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testUndefinedTypeCheckingConditional] if not TYPE_CHECKING: # E import pow123 else: import xyz753 [builtins fixtures/bool.pyi] [out] main:1: error: Name 'TYPE_CHECKING' is not defined main:4: error: Cannot find implementation or library stub for module named 'xyz753' main:4: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testConditionalClassDefPY3] def f(): pass PY3 = f() if PY3: pass else: class X(object): pass [case testUnreachabilityAndElifPY3] def f(): pass PY3 = f() if PY3: pass elif bool(): import nonexistent 1 + '' else: import bad_name 1 + '' [builtins fixtures/bool.pyi] [out] [case testSysVersionInfo_python2] import sys if sys.version_info[0] >= 3: def foo(): # type: () -> int return 0 else: def foo(): # type: () -> str return '' reveal_type(foo()) # N: Revealed type is 'builtins.str' [builtins_py2 fixtures/ops.pyi] [out] [case testSysVersionInfo] import sys if sys.version_info[0] >= 3: def foo() -> int: return 0 else: def foo() -> str: return '' reveal_type(foo()) # N: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoNegated_python2] import sys if not (sys.version_info[0] < 3): def foo(): # type: () -> int return 0 else: def foo(): # type: () -> str return '' reveal_type(foo()) # N: Revealed type is 'builtins.str' [builtins_py2 fixtures/ops.pyi] [out] [case testSysVersionInfoNegated] import sys if not (sys.version_info[0] < 3): def foo() -> int: return 0 else: def foo() -> str: return '' reveal_type(foo()) # N: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced1] import sys if sys.version_info[:1] >= (3,): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced2] import sys if sys.version_info[:2] >= (3, 0): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced3] import sys if sys.version_info[:] >= (3, 0): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced4] import sys if sys.version_info[0:2] >= (3, 0): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced5] import sys if sys.version_info[0:] >= (3,): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced6] import sys if sys.version_info[1:] >= (5,): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced7] import sys if sys.version_info >= (3, 5): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced8] # Our pyversion only has (major, minor), # so testing for (major, minor, bugfix) is unsupported. import sys if sys.version_info >= (3, 5, 0): def foo() -> int: return 0 else: def foo() -> str: return '' # E: All conditional function variants must have identical signatures [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced9] # Our pyversion only has (major, minor), # so testing for (minor, bugfix) is unsupported (also it's silly :-). import sys if sys.version_info[1:] >= (5, 0): def foo() -> int: return 0 else: def foo() -> str: return '' # E: All conditional function variants must have identical signatures [builtins fixtures/ops.pyi] [out] [case testSysPlatform1] import sys if sys.platform == 'fictional': def foo() -> int: return 0 else: def foo() -> str: return '' foo() + '' [builtins fixtures/ops.pyi] [out] [case testSysPlatform2] import sys if sys.platform != 'fictional': def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysPlatformNegated] import sys if not (sys.platform == 'fictional'): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoClass] import sys if sys.version_info < (3, 5): class C: pass else: class C: def foo(self) -> int: return 0 C().foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoImport] import sys if sys.version_info >= (3, 5): import collections else: collections = None Pt = collections.namedtuple('Pt', 'x y z') [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoVariable] import sys if sys.version_info >= (3, 5): x = '' else: x = 0 x + '' [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoInClass] import sys class C: if sys.version_info >= (3, 5): def foo(self) -> int: return 0 else: def foo(self) -> str: return '' reveal_type(C().foo()) # N: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoInFunction] import sys def foo() -> None: if sys.version_info >= (3, 5): x = '' else: x = 0 reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testSysPlatformInMethod] import sys class C: def foo(self) -> None: if sys.platform != 'fictional': x = '' else: x = 0 reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testSysPlatformInFunctionImport1] import sys def foo() -> None: if sys.platform != 'fictional': import a else: import b as a a.x [file a.py] x = 1 [builtins fixtures/ops.pyi] [out] [case testSysPlatformInFunctionImport2] import sys def foo() -> None: if sys.platform == 'fictional': import b as a else: import a a.x [file a.py] x = 1 [builtins fixtures/ops.pyi] [out] [case testSysPlatformInFunctionImport3] from typing import Callable import sys def idf(x: Callable[[], None]) -> Callable[[], None]: return x @idf def foo() -> None: if sys.platform == 'fictional': import b as a else: import a a.x [file a.py] x = 1 [builtins fixtures/ops.pyi] [out] [case testSysPlatformInMethodImport2] import sys class A: def foo(self) -> None: if sys.platform == 'fictional': import b as a else: import a a.x [file a.py] x = 1 [builtins fixtures/ops.pyi] [out] [case testCustomSysVersionInfo] # flags: --python-version 3.5 import sys if sys.version_info == (3, 5): x = "foo" else: x = 3 reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testCustomSysVersionInfo2] # flags: --python-version 3.5 import sys if sys.version_info == (3, 6): x = "foo" else: x = 3 reveal_type(x) # N: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testCustomSysPlatform] # flags: --platform linux import sys if sys.platform == 'linux': x = "foo" else: x = 3 reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testCustomSysPlatform2] # flags: --platform win32 import sys if sys.platform == 'linux': x = "foo" else: x = 3 reveal_type(x) # N: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testCustomSysPlatformStartsWith] # flags: --platform win32 import sys if sys.platform.startswith('win'): x = "foo" else: x = 3 reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testShortCircuitInExpression] import typing def make() -> bool: pass PY2 = PY3 = make() a = PY2 and 's' b = PY3 and 's' c = PY2 or 's' d = PY3 or 's' e = (PY2 or PY3) and 's' f = (PY3 or PY2) and 's' g = (PY2 or PY3) or 's' h = (PY3 or PY2) or 's' reveal_type(a) # N: Revealed type is 'builtins.bool' reveal_type(b) # N: Revealed type is 'builtins.str' reveal_type(c) # N: Revealed type is 'builtins.str' reveal_type(d) # N: Revealed type is 'builtins.bool' reveal_type(e) # N: Revealed type is 'builtins.str' reveal_type(f) # N: Revealed type is 'builtins.str' reveal_type(g) # N: Revealed type is 'builtins.bool' reveal_type(h) # N: Revealed type is 'builtins.bool' [builtins fixtures/ops.pyi] [out] [case testShortCircuitAndWithConditionalAssignment] # flags: --platform linux import sys def f(): pass PY2 = f() if PY2 and sys.platform == 'linux': x = 'foo' else: x = 3 reveal_type(x) # N: Revealed type is 'builtins.int' if sys.platform == 'linux' and PY2: y = 'foo' else: y = 3 reveal_type(y) # N: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [case testShortCircuitOrWithConditionalAssignment] # flags: --platform linux import sys def f(): pass PY2 = f() if PY2 or sys.platform == 'linux': x = 'foo' else: x = 3 reveal_type(x) # N: Revealed type is 'builtins.str' if sys.platform == 'linux' or PY2: y = 'foo' else: y = 3 reveal_type(y) # N: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [case testShortCircuitNoEvaluation] # flags: --platform linux --always-false COMPILE_TIME_FALSE import sys if sys.platform == 'darwin': mac_only = 'junk' # `mac_only` should not be evaluated if sys.platform == 'darwin' and mac_only: pass if sys.platform == 'linux' or mac_only: pass COMPILE_TIME_FALSE = 'junk' if COMPILE_TIME_FALSE: compile_time_false_only = 'junk' # `compile_time_false_only` should not be evaluated if COMPILE_TIME_FALSE and compile_time_false_only: pass if not COMPILE_TIME_FALSE or compile_time_false_only: pass MYPY = False if not MYPY: mypy_only = 'junk' # `mypy_only` should not be evaluated if not MYPY and mypy_only: pass if MYPY or mypy_only: pass [builtins fixtures/ops.pyi] [case testConditionalAssertWithoutElse] import typing class A: pass class B(A): pass x = A() reveal_type(x) # N: Revealed type is '__main__.A' if typing.TYPE_CHECKING: assert isinstance(x, B) reveal_type(x) # N: Revealed type is '__main__.B' reveal_type(x) # N: Revealed type is '__main__.B' [builtins fixtures/isinstancelist.pyi] [case testUnreachableWhenSuperclassIsAny] # flags: --strict-optional from typing import Any # This can happen if we're importing a class from a missing module Parent: Any class Child(Parent): def foo(self) -> int: reveal_type(self) # N: Revealed type is '__main__.Child' if self is None: reveal_type(self) return None reveal_type(self) # N: Revealed type is '__main__.Child' return 3 def bar(self) -> int: if 1: self = super(Child, self).something() reveal_type(self) # N: Revealed type is '__main__.Child' if self is None: reveal_type(self) return None reveal_type(self) # N: Revealed type is '__main__.Child' return 3 [builtins fixtures/isinstance.pyi] [case testUnreachableWhenSuperclassIsAnyNoStrictOptional] # flags: --no-strict-optional from typing import Any Parent: Any class Child(Parent): def foo(self) -> int: reveal_type(self) # N: Revealed type is '__main__.Child' if self is None: reveal_type(self) # N: Revealed type is 'None' return None reveal_type(self) # N: Revealed type is '__main__.Child' return 3 [builtins fixtures/isinstance.pyi] [case testUnreachableAfterToplevelAssert] import sys reveal_type(0) # N: Revealed type is 'Literal[0]?' assert sys.platform == 'lol' reveal_type('') # No error here :-) [builtins fixtures/ops.pyi] [case testUnreachableAfterToplevelAssert2] import sys reveal_type(0) # N: Revealed type is 'Literal[0]?' assert sys.version_info[0] == 1 reveal_type('') # No error here :-) [builtins fixtures/ops.pyi] [case testUnreachableAfterToplevelAssert3] reveal_type(0) # N: Revealed type is 'Literal[0]?' MYPY = False assert not MYPY reveal_type('') # No error here :-) [builtins fixtures/ops.pyi] [case testUnreachableAfterToplevelAssert4] # flags: --always-false NOPE reveal_type(0) # N: Revealed type is 'Literal[0]?' NOPE = False assert NOPE reveal_type('') # No error here :-) [builtins fixtures/ops.pyi] [case testUnreachableAfterToplevelAssertImport] import foo foo.bar() # E: "object" has no attribute "bar" [file foo.py] import sys assert sys.platform == 'lol' def bar() -> None: pass [builtins fixtures/ops.pyi] [case testUnreachableAfterToplevelAssertImport2] # flags: --platform lol import foo foo.bar() # No error :-) [file foo.py] import sys assert sys.platform == 'lol' def bar() -> None: pass [builtins fixtures/ops.pyi] [case testUnreachableAfterToplevelAssertNotInsideIf] import sys if sys.version_info[0] >= 2: assert sys.platform == 'lol' reveal_type('') # N: Revealed type is 'Literal['']?' reveal_type('') # N: Revealed type is 'Literal['']?' [builtins fixtures/ops.pyi] [case testUnreachableFlagWithBadControlFlow] # flags: --warn-unreachable a: int if isinstance(a, int): reveal_type(a) # N: Revealed type is 'builtins.int' else: reveal_type(a) # E: Statement is unreachable b: int while isinstance(b, int): reveal_type(b) # N: Revealed type is 'builtins.int' else: reveal_type(b) # E: Statement is unreachable def foo(c: int) -> None: reveal_type(c) # N: Revealed type is 'builtins.int' assert not isinstance(c, int) reveal_type(c) # E: Statement is unreachable d: int if False: reveal_type(d) # E: Statement is unreachable e: int if True: reveal_type(e) # N: Revealed type is 'builtins.int' else: reveal_type(e) # E: Statement is unreachable [builtins fixtures/isinstancelist.pyi] [case testUnreachableFlagStatementAfterReturn] # flags: --warn-unreachable def foo(x: int) -> None: reveal_type(x) # N: Revealed type is 'builtins.int' return reveal_type(x) # E: Statement is unreachable [case testUnreachableFlagTryBlocks] # flags: --warn-unreachable def foo(x: int) -> int: try: reveal_type(x) # N: Revealed type is 'builtins.int' return x reveal_type(x) # E: Statement is unreachable finally: reveal_type(x) # N: Revealed type is 'builtins.int' if True: reveal_type(x) # N: Revealed type is 'builtins.int' else: reveal_type(x) # E: Statement is unreachable def bar(x: int) -> int: try: if True: raise Exception() reveal_type(x) # E: Statement is unreachable except: reveal_type(x) # N: Revealed type is 'builtins.int' return x else: reveal_type(x) # E: Statement is unreachable def baz(x: int) -> int: try: reveal_type(x) # N: Revealed type is 'builtins.int' except: # Mypy assumes all lines could throw an exception reveal_type(x) # N: Revealed type is 'builtins.int' return x else: reveal_type(x) # N: Revealed type is 'builtins.int' return x [builtins fixtures/exception.pyi] [case testUnreachableFlagIgnoresSemanticAnalysisUnreachable] # flags: --warn-unreachable --python-version 3.7 --platform win32 --always-false FOOBAR import sys from typing import TYPE_CHECKING x: int if TYPE_CHECKING: reveal_type(x) # N: Revealed type is 'builtins.int' else: reveal_type(x) if not TYPE_CHECKING: reveal_type(x) else: reveal_type(x) # N: Revealed type is 'builtins.int' if sys.platform == 'darwin': reveal_type(x) else: reveal_type(x) # N: Revealed type is 'builtins.int' if sys.platform == 'win32': reveal_type(x) # N: Revealed type is 'builtins.int' else: reveal_type(x) if sys.version_info == (2, 7): reveal_type(x) else: reveal_type(x) # N: Revealed type is 'builtins.int' if sys.version_info == (3, 7): reveal_type(x) # N: Revealed type is 'builtins.int' else: reveal_type(x) FOOBAR = "" if FOOBAR: reveal_type(x) else: reveal_type(x) # N: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [case testUnreachableFlagIgnoresSemanticAnalysisExprUnreachable] # flags: --warn-unreachable --always-false FOOBAR import sys from typing import TYPE_CHECKING FOOBAR = "" def foo() -> bool: ... lst = [1, 2, 3] a = FOOBAR and foo() b = (not FOOBAR) or foo() c = 1 if FOOBAR else 2 d = [x for x in lst if FOOBAR] [builtins fixtures/list.pyi] [case testUnreachableFlagOkWithDeadStatements] # flags: --warn-unreachable from typing import NoReturn def assert_never(x: NoReturn) -> NoReturn: assert False def nonthrowing_assert_never(x: NoReturn) -> None: ... def expect_str(x: str) -> str: pass x: int if False: assert False reveal_type(x) if False: raise Exception() reveal_type(x) if False: assert_never(x) reveal_type(x) if False: nonthrowing_assert_never(x) # E: Statement is unreachable reveal_type(x) if False: # Ignore obvious type errors assert_never(expect_str(x)) reveal_type(x) [builtins fixtures/exception.pyi] [case testUnreachableFlagExpressions] # flags: --warn-unreachable def foo() -> bool: ... lst = [1, 2, 3, 4] a = True or foo() # E: Right operand of 'or' is never evaluated b = False or foo() # E: Left operand of 'or' is always false c = True and foo() # E: Left operand of 'and' is always true d = False and foo() # E: Right operand of 'and' is never evaluated e = True or (True or (True or foo())) # E: Right operand of 'or' is never evaluated f = (True or foo()) or (True or foo()) # E: Right operand of 'or' is never evaluated g = 3 if True else 4 # E: If condition is always true h = 3 if False else 4 # E: If condition is always false i = [x for x in lst if True] # E: If condition in comprehension is always true j = [x for x in lst if False] # E: If condition in comprehension is always false k = [x for x in lst if isinstance(x, int) or foo()] # E: If condition in comprehension is always true \ # E: Right operand of 'or' is never evaluated [builtins fixtures/isinstancelist.pyi] [case testUnreachableFlagMiscTestCaseMissingMethod] # flags: --warn-unreachable class Case1: def test1(self) -> bool: return False and self.missing() # E: Right operand of 'and' is never evaluated def test2(self) -> bool: return not self.property_decorator_missing and self.missing() # E: Right operand of 'and' is never evaluated def property_decorator_missing(self) -> bool: return True [builtins fixtures/bool.pyi] [case testUnreachableFlagWithGenerics] # flags: --warn-unreachable from typing import TypeVar, Generic T1 = TypeVar('T1', bound=int) T2 = TypeVar('T2', int, str) def test1(x: T1) -> T1: if isinstance(x, int): reveal_type(x) # N: Revealed type is 'T1`-1' else: reveal_type(x) # E: Statement is unreachable return x def test2(x: T2) -> T2: if isinstance(x, int): reveal_type(x) # N: Revealed type is 'builtins.int*' else: reveal_type(x) # N: Revealed type is 'builtins.str*' if False: # This is unreachable, but we don't report an error, unfortunately. # The presence of the TypeVar with values unfortunately currently shuts # down type-checking for this entire function. # TODO: Find a way of removing this limitation reveal_type(x) return x class Test3(Generic[T2]): x: T2 def func(self) -> None: if isinstance(self.x, int): reveal_type(self.x) # N: Revealed type is 'builtins.int*' else: reveal_type(self.x) # N: Revealed type is 'builtins.str*' if False: # Same issue as above reveal_type(self.x) [builtins fixtures/isinstancelist.pyi] [case testUnreachableFlagContextManagersNoSuppress] # flags: --warn-unreachable from contextlib import contextmanager from typing import Optional, Iterator, Any from typing_extensions import Literal class DoesNotSuppress1: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> Optional[bool]: ... class DoesNotSuppress2: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> Literal[False]: ... class DoesNotSuppress3: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> Any: ... class DoesNotSuppress4: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> None: ... @contextmanager def simple() -> Iterator[int]: yield 3 def cond() -> bool: ... def noop() -> None: ... def f_no_suppress_1a() -> int: with DoesNotSuppress1(): return 3 noop() # E: Statement is unreachable def f_no_suppress_1b() -> int: with DoesNotSuppress1(): if cond(): return 3 else: return 3 noop() # E: Statement is unreachable def f_no_suppress_2() -> int: with DoesNotSuppress2(): return 3 noop() # E: Statement is unreachable def f_no_suppress_3() -> int: with DoesNotSuppress3(): return 3 noop() # E: Statement is unreachable def f_no_suppress_4() -> int: with DoesNotSuppress4(): return 3 noop() # E: Statement is unreachable def f_no_suppress_5() -> int: with simple(): return 3 noop() # E: Statement is unreachable [typing fixtures/typing-full.pyi] [case testUnreachableFlagContextManagersSuppressed] # flags: --warn-unreachable from contextlib import contextmanager from typing import Optional, Iterator, Any from typing_extensions import Literal class DoesNotSuppress: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> Optional[bool]: ... class Suppresses1: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> bool: ... class Suppresses2: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> Literal[True]: ... def cond() -> bool: ... def noop() -> None: ... def f_suppress_1a() -> int: # E: Missing return statement with Suppresses1(): return 3 noop() def f_suppress_1b() -> int: # E: Missing return statement with Suppresses1(): if cond(): return 3 else: return 3 noop() def f_suppress_2() -> int: # E: Missing return statement with Suppresses2(): return 3 noop() def f_mix() -> int: # E: Missing return statement with DoesNotSuppress(), Suppresses1(), DoesNotSuppress(): return 3 noop() [typing fixtures/typing-full.pyi] [case testUnreachableFlagContextManagersSuppressedNoStrictOptional] # flags: --warn-unreachable --no-strict-optional from contextlib import contextmanager from typing import Optional, Iterator, Any from typing_extensions import Literal class DoesNotSuppress1: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> Optional[bool]: ... # Normally, this should suppress. But when strict-optional mode is disabled, we can't # necessarily distinguish between bool and Optional[bool]. So we default to assuming # no suppression, since that's what most context managers will do. class DoesNotSuppress2: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> bool: ... # But if we see Literal[True], it's pretty unlikely the return type is actually meant to # be 'Optional[Literal[True]]'. So, we optimistically assume this is meant to be suppressing. class Suppresses: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> Literal[True]: ... def noop() -> None: ... def f_no_suppress_1() -> int: with DoesNotSuppress1(): return 3 noop() # E: Statement is unreachable def f_no_suppress_2() -> int: with DoesNotSuppress1(): return 3 noop() # E: Statement is unreachable def f_suppress() -> int: # E: Missing return statement with Suppresses(): return 3 noop() [typing fixtures/typing-full.pyi] [case testUnreachableFlagContextAsyncManagersNoSuppress] # flags: --warn-unreachable --python-version 3.7 from contextlib import asynccontextmanager from typing import Optional, AsyncIterator, Any from typing_extensions import Literal class DoesNotSuppress1: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> Optional[bool]: ... class DoesNotSuppress2: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> Literal[False]: ... class DoesNotSuppress3: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> Any: ... class DoesNotSuppress4: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> None: ... @asynccontextmanager async def simple() -> AsyncIterator[int]: yield 3 def cond() -> bool: ... def noop() -> None: ... async def f_no_suppress_1a() -> int: async with DoesNotSuppress1(): return 3 noop() # E: Statement is unreachable async def f_no_suppress_1b() -> int: async with DoesNotSuppress1(): if cond(): return 3 else: return 3 noop() # E: Statement is unreachable async def f_no_suppress_2() -> int: async with DoesNotSuppress2(): return 3 noop() # E: Statement is unreachable async def f_no_suppress_3() -> int: async with DoesNotSuppress3(): return 3 noop() # E: Statement is unreachable async def f_no_suppress_4() -> int: async with DoesNotSuppress4(): return 3 noop() # E: Statement is unreachable async def f_no_suppress_5() -> int: async with simple(): return 3 noop() # E: Statement is unreachable [typing fixtures/typing-full.pyi] [case testUnreachableFlagContextAsyncManagersSuppressed] # flags: --warn-unreachable --python-version 3.7 from contextlib import asynccontextmanager from typing import Optional, AsyncIterator, Any from typing_extensions import Literal class DoesNotSuppress: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> Optional[bool]: ... class Suppresses1: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> bool: ... class Suppresses2: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> Literal[True]: ... def cond() -> bool: ... def noop() -> None: ... async def f_suppress_1() -> int: # E: Missing return statement async with Suppresses1(): return 3 noop() async def f_suppress_2() -> int: # E: Missing return statement async with Suppresses1(): if cond(): return 3 else: return 3 noop() async def f_suppress_3() -> int: # E: Missing return statement async with Suppresses2(): return 3 noop() async def f_mix() -> int: # E: Missing return statement async with DoesNotSuppress(), Suppresses1(), DoesNotSuppress(): return 3 noop() [typing fixtures/typing-full.pyi] [case testUnreachableFlagContextAsyncManagersAbnormal] # flags: --warn-unreachable --python-version 3.7 from contextlib import asynccontextmanager from typing import Optional, AsyncIterator, Any from typing_extensions import Literal class RegularManager: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> bool: ... class AsyncManager: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> bool: ... def noop() -> None: ... async def f_bad_1() -> int: async with RegularManager(): # E: "RegularManager" has no attribute "__aenter__"; maybe "__enter__"? \ # E: "RegularManager" has no attribute "__aexit__"; maybe "__exit__"? return 3 noop() # E: Statement is unreachable def f_bad_2() -> int: with AsyncManager(): # E: "AsyncManager" has no attribute "__enter__"; maybe "__aenter__"? \ # E: "AsyncManager" has no attribute "__exit__"; maybe "__aexit__"? return 3 noop() # E: Statement is unreachable # TODO: We should consider reporting an error when the user tries using # context manager with malformed signatures instead of silently continuing. class RegularManagerMalformedSignature: def __enter__(self) -> int: ... def __exit__(self, exctype: object, excvalue: object, traceback: object) -> object: ... class AsyncManagerMalformedSignature: async def __aenter__(self) -> int: ... async def __aexit__(self, exctype: object, excvalue: object, traceback: object) -> object: ... def f_malformed_1() -> int: with RegularManagerMalformedSignature(): return 3 noop() # E: Statement is unreachable async def f_malformed_2() -> int: async with AsyncManagerMalformedSignature(): return 3 noop() # E: Statement is unreachable [typing fixtures/typing-full.pyi] mypy-0.761/test-data/unit/check-unsupported.test0000644€tŠÔÚ€2›s®0000000056213576752246026134 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for unsupported features [case testDecorateOverloadedFunction] from foo import * [file foo.pyi] # The error messages are not the most informative ever. def d(x): pass @d def f(): pass def f(x): pass # E def g(): pass @d # E def g(x): pass [out] tmp/foo.pyi:5: error: Name 'f' already defined on line 3 tmp/foo.pyi:7: error: Name 'g' already defined on line 6 mypy-0.761/test-data/unit/check-varargs.test0000644€tŠÔÚ€2›s®0000005117113576752246025213 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the type checker related to varargs. -- Varargs within body -- ------------------- [case testVarArgsWithinFunction] from typing import Tuple def f( *b: 'B') -> None: ab = None # type: Tuple[B, ...] ac = None # type: Tuple[C, ...] if int(): b = ac # E: Incompatible types in assignment (expression has type "Tuple[C, ...]", variable has type "Tuple[B, ...]") ac = b # E: Incompatible types in assignment (expression has type "Tuple[B, ...]", variable has type "Tuple[C, ...]") b = ab ab = b class B: pass class C: pass [builtins fixtures/tuple.pyi] [out] [case testVarArgsAreTuple] from typing import Tuple, Sequence def want_tuple(types: Tuple[type, ...]): pass def want_sequence(types: Sequence[type]): pass def test(*t: type) -> None: want_tuple(t) want_sequence(t) [builtins fixtures/tuple.pyi] [out] -- Calling varargs function -- ------------------------ [case testCallingVarArgsFunction] a = None # type: A b = None # type: B c = None # type: C f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "A" f(a, b, c) # E: Argument 3 to "f" has incompatible type "C"; expected "A" f(g()) # E: "g" does not return a value f(a, g()) # E: "g" does not return a value f() f(a) f(b) f(a, b, a, b) def f( *a: 'A') -> None: pass def g() -> None: pass class A: pass class B(A): pass class C: pass [builtins fixtures/list.pyi] [case testCallingVarArgsFunctionWithAlsoNormalArgs] a = None # type: A b = None # type: B c = None # type: C f(a) # E: Argument 1 to "f" has incompatible type "A"; expected "C" f(c, c) # E: Argument 2 to "f" has incompatible type "C"; expected "A" f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A" f(c) f(c, a) f(c, b, b, a, b) def f(a: 'C', *b: 'A') -> None: pass class A: pass class B(A): pass class C: pass [builtins fixtures/list.pyi] [case testCallingVarArgsFunctionWithDefaultArgs] a = None # type: A b = None # type: B c = None # type: C f(a) # E: Argument 1 to "f" has incompatible type "A"; expected "Optional[C]" f(c, c) # E: Argument 2 to "f" has incompatible type "C"; expected "A" f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A" f() f(c) f(c, a) f(c, b, b, a, b) def f(a: 'C' = None, *b: 'A') -> None: pass class A: pass class B(A): pass class C: pass [builtins fixtures/list.pyi] [case testCallVarargsFunctionWithIterable] from typing import Iterable it1 = None # type: Iterable[int] it2 = None # type: Iterable[str] def f(*x: int) -> None: pass f(*it1) f(*it2) # E: Argument 1 to "f" has incompatible type "*Iterable[str]"; expected "int" [builtins fixtures/for.pyi] [case testCallVarargsFunctionWithTwoTupleStarArgs] from typing import TypeVar, Tuple T1 = TypeVar('T1') T2 = TypeVar('T2') T3 = TypeVar('T3') T4 = TypeVar('T4') def f(a: T1, b: T2, c: T3, d: T4) -> Tuple[T1, T2, T3, T4]: ... x: Tuple[int, str] y: Tuple[float, bool] reveal_type(f(*x, *y)) # N: Revealed type is 'Tuple[builtins.int*, builtins.str*, builtins.float*, builtins.bool*]' [builtins fixtures/list.pyi] [case testCallVarargsFunctionWithIterableAndPositional] from typing import Iterable it1 = None # type: Iterable[int] def f(*x: int) -> None: pass f(*it1, 1, 2) f(*it1, 1, *it1, 2) f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [builtins fixtures/for.pyi] [case testCallVarargsFunctionWithTupleAndPositional] def f(*x: int) -> None: pass it1 = (1, 2) it2 = ('',) f(*it1, 1, 2) f(*it1, 1, *it1, 2) f(*it1, 1, *it2, 2) # E: Argument 3 to "f" has incompatible type "*Tuple[str]"; expected "int" f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [builtins fixtures/for.pyi] -- Calling varargs function + type inference -- ----------------------------------------- [case testTypeInferenceWithCalleeVarArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A b = None # type: B c = None # type: C o = None # type: object if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): o = f() if int(): a = f(a) if int(): a = f(b) if int(): a = f(a, b, a) if int(): o = f(a, b, o) if int(): c = f(c) def f( *a: T) -> T: pass class A: pass class B(A): pass class C: pass [builtins fixtures/list.pyi] [case testTypeInferenceWithCalleeVarArgsAndDefaultArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A o = None # type: object if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): a = f(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): a = f(a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): a = f(a, a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): a = f(a) if int(): a = f(a, a) if int(): a = f(a, a, a) def f(a: T, b: T = None, *c: T) -> T: pass class A: pass [builtins fixtures/list.pyi] -- Calling normal function with varargs -- ------------------------------------ [case testCallingWithListVarArgs] from typing import List, Any, cast aa = None # type: List[A] ab = None # type: List[B] a = None # type: A b = None # type: B f(*aa) # Fail f(a, *ab) # Ok f(a, b) (cast(Any, f))(*aa) # IDEA: Move to check-dynamic? (cast(Any, f))(a, *ab) # IDEA: Move to check-dynamic? def f(a: 'A', b: 'B') -> None: pass class A: pass class B: pass [builtins fixtures/list.pyi] [out] main:7: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" [case testCallingWithTupleVarArgs] a = None # type: A b = None # type: B c = None # type: C cc = None # type: CC f(*(a, b, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B, B]"; expected "C" f(*(b, b, c)) # E: Argument 1 to "f" has incompatible type "*Tuple[B, B, C]"; expected "A" f(a, *(b, b)) # E: Argument 2 to "f" has incompatible type "*Tuple[B, B]"; expected "C" f(b, *(b, c)) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(*(a, b)) # E: Too few arguments for "f" f(*(a, b, c, c)) # E: Too many arguments for "f" f(a, *(b, c, c)) # E: Too many arguments for "f" f(*(a, b, c)) f(a, *(b, c)) f(a, b, *(c,)) f(a, *(b, cc)) def f(a: 'A', b: 'B', c: 'C') -> None: pass class A: pass class B: pass class C: pass class CC(C): pass [builtins fixtures/tuple.pyi] [case testInvalidVarArg] a = None # type: A f(*None) f(*a) # E: List or tuple expected as variable arguments f(*(a,)) def f(a: 'A') -> None: pass class A: pass [builtins fixtures/tuple.pyi] -- Calling varargs function with varargs -- ------------------------------------- [case testCallingVarArgsFunctionWithListVarArgs] from typing import List aa, ab, a, b = None, None, None, None # type: (List[A], List[B], A, B) f(*aa) # Fail f(a, *aa) # Fail f(b, *ab) # Fail f(a, a, *ab) # Fail f(a, b, *aa) # Fail f(b, b, *ab) # Fail g(*ab) # Fail f(a, *ab) f(a, b, *ab) f(a, b, b, *ab) g(*aa) def f(a: 'A', *b: 'B') -> None: pass def g(a: 'A', *b: 'A') -> None: pass class A: pass class B: pass [builtins fixtures/list.pyi] [out] main:3: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A" main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B" main:7: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A" main:9: error: Argument 1 to "g" has incompatible type "*List[B]"; expected "A" [case testCallingVarArgsFunctionWithTupleVarArgs] a, b, c, cc = None, None, None, None # type: (A, B, C, CC) f(*(b, b, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[B, B, B]"; expected "A" f(*(a, a, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, A, B]"; expected "B" f(*(a, b, a)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B, A]"; expected "B" f(a, *(a, b)) # E: Argument 2 to "f" has incompatible type "*Tuple[A, B]"; expected "B" f(b, *(b, b)) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(b, b, *(b,)) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(a, a, *(b,)) # E: Argument 2 to "f" has incompatible type "A"; expected "B" f(a, b, *(a,)) # E: Argument 3 to "f" has incompatible type "*Tuple[A]"; expected "B" f(*()) # E: Too few arguments for "f" f(*(a, b, b)) f(a, *(b, b)) f(a, b, *(b,)) def f(a: 'A', *b: 'B') -> None: pass class A: pass class B: pass class C: pass class CC(C): pass [builtins fixtures/list.pyi] -- Varargs special cases -- --------------------- [case testDynamicVarArg] from typing import Any d, a = None, None # type: (Any, A) f(a, a, *d) # Fail f(a, *d) # Ok f(*d) # Ok g(*d) g(a, *d) g(a, a, *d) def f(a: 'A') -> None: pass def g(a: 'A', *b: 'A') -> None: pass class A: pass [builtins fixtures/list.pyi] [out] main:3: error: Too many arguments for "f" [case testListVarArgsAndSubtyping] from typing import List aa = None # type: List[A] ab = None # type: List[B] g(*aa) # E: Argument 1 to "g" has incompatible type "*List[A]"; expected "B" f(*aa) f(*ab) g(*ab) def f( *a: 'A') -> None: pass def g( *a: 'B') -> None: pass class A: pass class B(A): pass [builtins fixtures/list.pyi] [case testCallerVarArgsAndDefaultArgs] a, b = None, None # type: (A, B) f(*()) # Fail f(a, *[a]) # Fail f(a, b, *[a]) # Fail f(*(a, a, b)) # Fail f(*(a,)) f(*(a, b)) f(*(a, b, b, b)) f(a, *[]) f(a, *[b]) f(a, *[b, b]) def f(a: 'A', b: 'B' = None, *c: 'B') -> None: pass class A: pass class B: pass [builtins fixtures/list.pyi] [out] main:3: error: Too few arguments for "f" main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "Optional[B]" main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" main:5: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" main:6: error: Argument 1 to "f" has incompatible type "*Tuple[A, A, B]"; expected "Optional[B]" [case testVarArgsAfterKeywordArgInCall1-skip] # see: mypy issue #2729 def f(x: int, y: str) -> None: pass f(x=1, *[2]) [builtins fixtures/list.pyi] [out] main:2: error: "f" gets multiple values for keyword argument "x" main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str" [case testVarArgsAfterKeywordArgInCall2-skip] # see: mypy issue #2729 def f(x: int, y: str) -> None: pass f(y='x', *[1]) [builtins fixtures/list.pyi] [out] main:2: error: "f" gets multiple values for keyword argument "y" main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str" [case testVarArgsAfterKeywordArgInCall3] def f(x: int, y: str) -> None: pass f(y='x', *(1,)) [builtins fixtures/list.pyi] [case testVarArgsAfterKeywordArgInCall4] def f(x: int, *, y: str) -> None: pass f(y='x', *[1]) [builtins fixtures/list.pyi] [case testVarArgsAfterKeywordArgInCall5] def f(x: int, *, y: str) -> None: pass f(y='x', *(1,)) [builtins fixtures/list.pyi] [case testVarArgsEmptyList] from typing import List def foo() -> None: pass lst: List[int] = [] foo(*lst) [builtins fixtures/list.pyi] [case testVarArgsEmptyTuple] def foo() -> None: pass foo(*()) -- Overloads + varargs -- ------------------- [case testIntersectionTypesAndVarArgs] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) if int(): b = f() # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = f(b, b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): b = f(a, *[b]) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = f(*()) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = f(*(a,)) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): b = f(*(a, b)) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): a = f(*(b,)) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = f(*(b, b)) # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): a = f(*[b]) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f() a = f(a) a = f(a, b) b = f(b) b = f(b, b) a = f(a, *[b]) a = f(*()) a = f(*(a,)) a = f(*(a, b)) b = f(*(b,)) b = f(*(b, b)) b = f(*[b]) class A: pass class B: pass @overload def f(a: A = None, *b: B) -> A: pass @overload def f(a: B, *b: B) -> B: pass [builtins fixtures/list.pyi] -- Caller varargs + type inference -- ------------------------------- [case testCallerVarArgsListWithTypeInference] from typing import List, TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') a, b, aa = None, None, None # type: (A, B, List[A]) if int(): a, b = f(*aa) # E: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" if int(): b, b = f(*aa) # E: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" if int(): a, a = f(b, *aa) # E: Argument 1 to "f" has incompatible type "B"; expected "A" if int(): b, b = f(b, *aa) # E: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" if int(): b, b = f(b, b, *aa) # E: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" if int(): a, b = f(a, *a) # E: List or tuple expected as variable arguments if int(): a, b = f(*a) # E: List or tuple expected as variable arguments if int(): a, a = f(*aa) if int(): b, a = f(b, *aa) if int(): b, a = f(b, a, *aa) def f(a: S, *b: T) -> Tuple[S, T]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [case testCallerVarArgsTupleWithTypeInference] from typing import TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') a, b = None, None # type: (A, B) if int(): a, a = f(*(a, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B]"; expected "A" if int(): b, b = f(a, *(b,)) # E: Argument 1 to "f" has incompatible type "A"; expected "B" if int(): a, a = f(*(a, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B]"; expected "A" if int(): b, b = f(a, *(b,)) # E: Argument 1 to "f" has incompatible type "A"; expected "B" if int(): a, b = f(*(a, b, b)) # E: Too many arguments for "f" if int(): a, b = f(*(a, b)) if int(): a, b = f(a, *(b,)) def f(a: S, b: T) -> Tuple[S, T]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [case testCallerVarargsAndComplexTypeInference] from typing import List, TypeVar, Generic, Tuple T = TypeVar('T') S = TypeVar('S') a, b = None, None # type: (A, B) ao = None # type: List[object] aa = None # type: List[A] ab = None # type: List[B] if int(): a, aa = G().f(*[a]) \ # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") \ # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant if int(): aa, a = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[]", variable has type "A") if int(): ab, aa = G().f(*[a]) \ # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant \ # E: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B" if int(): ao, ao = G().f(*[a]) \ # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[object]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant if int(): aa, aa = G().f(*[a]) \ # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant class G(Generic[T]): def f(self, *a: S) -> Tuple[List[S], List[T]]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [case testCallerTupleVarArgsAndGenericCalleeVarArg] # flags: --strict-optional from typing import TypeVar T = TypeVar('T') def f(*args: T) -> T: ... reveal_type(f(*(1, None))) # N: Revealed type is 'Union[Literal[1]?, None]' reveal_type(f(1, *(None, 1))) # N: Revealed type is 'Union[Literal[1]?, None]' reveal_type(f(1, *(1, None))) # N: Revealed type is 'Union[builtins.int, None]' [builtins fixtures/tuple.pyi] -- Comment signatures -- ------------------ [case testVarArgsAndCommentSignature] import typing def f(*x): # type: (*int) -> None pass f(1) f(1, 2) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" f(1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] -- Subtyping -- --------- [case testVarArgsFunctionSubtyping] from typing import Callable x = None # type: Callable[[int], None] def f(*x: int) -> None: pass def g(*x: str) -> None: pass x = f x = g # E: Incompatible types in assignment (expression has type "Callable[[VarArg(str)], None]", variable has type "Callable[[int], None]") [builtins fixtures/list.pyi] [out] -- Decorated method where self is implied by *args -- ----------------------------------------------- [case testVarArgsCallableSelf] from typing import Callable def cm(func) -> Callable[..., None]: pass class C: @cm def foo(self) -> None: pass C().foo() C().foo(1) # The decorator's return type says this should be okay [case testInvariantDictArgNote] from typing import Dict, Sequence def f(x: Dict[str, Sequence[int]]) -> None: pass def g(x: Dict[str, float]) -> None: pass def h(x: Dict[str, int]) -> None: pass a = {'a': [1, 2]} b = {'b': ['c', 'd']} c = {'c': 1.0} d = {'d': 1} f(a) # E: Argument 1 to "f" has incompatible type "Dict[str, List[int]]"; expected "Dict[str, Sequence[int]]" \ # N: "Dict" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Mapping" instead, which is covariant in the value type f(b) # E: Argument 1 to "f" has incompatible type "Dict[str, List[str]]"; expected "Dict[str, Sequence[int]]" g(c) g(d) # E: Argument 1 to "g" has incompatible type "Dict[str, int]"; expected "Dict[str, float]" \ # N: "Dict" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Mapping" instead, which is covariant in the value type h(c) # E: Argument 1 to "h" has incompatible type "Dict[str, float]"; expected "Dict[str, int]" h(d) [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testInvariantListArgNote] from typing import List, Union def f(numbers: List[Union[int, float]]) -> None: pass a = [1, 2] f(a) # E: Argument 1 to "f" has incompatible type "List[int]"; expected "List[Union[int, float]]" \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant x = [1] y = ['a'] if int(): x = y # E: Incompatible types in assignment (expression has type "List[str]", variable has type "List[int]") [builtins fixtures/list.pyi] [case testInvariantTypeConfusingNames] from typing import TypeVar class Listener: pass class DictReader: pass def f(x: Listener) -> None: pass def g(y: DictReader) -> None: pass a = [1, 2] b = {'b': 1} f(a) # E: Argument 1 to "f" has incompatible type "List[int]"; expected "Listener" g(b) # E: Argument 1 to "g" has incompatible type "Dict[str, int]"; expected "DictReader" [builtins fixtures/dict.pyi] mypy-0.761/test-data/unit/check-warnings.test0000644€tŠÔÚ€2›s®0000001221413576752246025371 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for warning generation. -- Redundant casts -- --------------- [case testRedundantCast] # flags: --warn-redundant-casts from typing import cast a = 1 b = cast(str, a) c = cast(int, a) [out] main:5: error: Redundant cast to "int" [case testRedundantCastWithIsinstance] # flags: --warn-redundant-casts from typing import cast, Union x = 1 # type: Union[int, str] if isinstance(x, str): cast(str, x) [builtins fixtures/isinstance.pyi] [out] main:5: error: Redundant cast to "str" [case testCastToSuperclassNotRedundant] # flags: --warn-redundant-casts from typing import cast, TypeVar, List T = TypeVar('T') def add(xs: List[T], ys: List[T]) -> List[T]: pass class A: pass class B(A): pass a = A() b = B() # Without the cast, the following line would fail to type check. c = add([cast(A, b)], [a]) [builtins fixtures/list.pyi] -- Unused 'type: ignore' comments -- ------------------------------ [case testUnusedTypeIgnore] # flags: --warn-unused-ignores a = 1 if int(): a = 'a' # type: ignore if int(): a = 2 # type: ignore # E: unused 'type: ignore' comment if int(): a = 'b' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testUnusedTypeIgnoreImport] # flags: --warn-unused-ignores import banana # type: ignore import m # type: ignore from m import * # type: ignore [file m.py] pass [out] main:3: error: unused 'type: ignore' comment main:4: error: unused 'type: ignore' comment -- No return -- --------- [case testNoReturn] # flags: --warn-no-return def f() -> int: pass def g() -> int: if bool(): return 1 [builtins fixtures/list.pyi] [out] main:5: error: Missing return statement [case testNoReturnWhile] # flags: --warn-no-return def h() -> int: while True: if bool(): return 1 def i() -> int: while 1: if bool(): return 1 if bool(): break def j() -> int: while 1: if bool(): return 1 if bool(): continue [builtins fixtures/list.pyi] [out] main:7: error: Missing return statement [case testNoReturnExcept] # flags: --warn-no-return def f() -> int: try: return 1 except: pass def g() -> int: try: pass except: return 1 else: return 1 def h() -> int: try: pass except: pass else: pass finally: return 1 [builtins fixtures/exception.pyi] [out] main:2: error: Missing return statement [case testNoReturnEmptyBodyWithDocstring] def f() -> int: """Return the number of peppers.""" # This might be an @abstractmethod, for example pass [out] -- Returning Any -- ------------- [case testReturnAnyFromTypedFunction] # flags: --warn-return-any from typing import Any def g() -> Any: pass def f() -> int: return g() [out] main:4: error: Returning Any from function declared to return "int" [case testReturnAnyForNotImplementedInBinaryMagicMethods] # flags: --warn-return-any class A: def __eq__(self, other: object) -> bool: return NotImplemented [builtins fixtures/notimplemented.pyi] [out] [case testReturnAnyForNotImplementedInNormalMethods] # flags: --warn-return-any class A: def some(self) -> bool: return NotImplemented [builtins fixtures/notimplemented.pyi] [out] main:3: error: Returning Any from function declared to return "bool" [case testReturnAnyFromTypedFunctionWithSpecificFormatting] # flags: --warn-return-any from typing import Any, Tuple typ = Tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int] def g() -> Any: pass def f() -> typ: return g() [out] main:11: error: Returning Any from function declared to return [case testReturnAnySilencedFromTypedFunction] # flags: --warn-return-any from typing import Any def g() -> Any: pass def f() -> int: result = g() # type: int return result [out] [case testReturnAnyFromUntypedFunction] # flags: --warn-return-any from typing import Any def g() -> Any: pass def f(): return g() [out] [case testReturnAnyFromAnyTypedFunction] # flags: --warn-return-any from typing import Any def g() -> Any: pass def f() -> Any: return g() [out] [case testOKReturnAnyIfProperSubtype] # flags: --warn-return-any --strict-optional from typing import Any, Optional class Test(object): def __init__(self) -> None: self.attr = "foo" # type: Any def foo(self, do_it: bool) -> Optional[Any]: if do_it: return self.attr # Should not warn here else: return None [builtins fixtures/list.pyi] [out] [case testReturnAnyDeferred] # flags: --warn-return-any def foo(a1: A) -> int: if a1._x: return 1 n = 1 return n class A: def __init__(self, x: int) -> None: self._x = x mypy-0.761/test-data/unit/cmdline.test0000644€tŠÔÚ€2›s®0000010314713576752246024107 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for command line parsing -- ------------------------------ -- -- The initial line specifies the command line, in the format -- -- # cmd: mypy -- -- Note that # flags: --some-flag IS NOT SUPPORTED. -- Use # cmd: mypy --some-flag ... -- -- '== Return code: ' is added to the output when the process return code -- is "nonobvious" -- that is, when it is something other than 0 if there are no -- messages and 1 if there are. -- Directories/packages on the command line -- ---------------------------------------- [case testCmdlinePackage] # cmd: mypy pkg [file pkg/__init__.py] [file pkg/a.py] undef [file pkg/subpkg/__init__.py] [file pkg/subpkg/a.py] undef import pkg.subpkg.a [out] pkg/a.py:1: error: Name 'undef' is not defined pkg/subpkg/a.py:1: error: Name 'undef' is not defined [case testCmdlinePackageSlash] # cmd: mypy pkg/ [file pkg/__init__.py] [file pkg/a.py] undef [file pkg/subpkg/__init__.py] [file pkg/subpkg/a.py] undef import pkg.subpkg.a [out] pkg/a.py:1: error: Name 'undef' is not defined pkg/subpkg/a.py:1: error: Name 'undef' is not defined [case testCmdlineNonPackage] # cmd: mypy dir [file dir/a.py] undef [file dir/subdir/a.py] undef [out] dir/a.py:1: error: Name 'undef' is not defined [case testCmdlineNonPackageSlash] # cmd: mypy dir/ [file dir/a.py] undef [file dir/subdir/a.py] undef [out] dir/a.py:1: error: Name 'undef' is not defined [case testCmdlinePackageContainingSubdir] # cmd: mypy pkg [file pkg/__init__.py] [file pkg/a.py] undef [file pkg/subdir/a.py] undef [out] pkg/a.py:1: error: Name 'undef' is not defined [case testCmdlineNonPackageContainingPackage] # cmd: mypy dir [file dir/a.py] undef import subpkg.a [file dir/subpkg/__init__.py] [file dir/subpkg/a.py] undef [out] dir/subpkg/a.py:1: error: Name 'undef' is not defined dir/a.py:1: error: Name 'undef' is not defined [case testCmdlineInvalidPackageName] # cmd: mypy dir/sub.pkg/a.py [file dir/sub.pkg/__init__.py] [file dir/sub.pkg/a.py] undef [out] sub.pkg is not a valid Python package name == Return code: 2 [case testBadFileEncoding] # cmd: mypy a.py [file a.py] # coding: uft-8 [out] mypy: can't decode file 'a.py': unknown encoding: uft-8 == Return code: 2 -- ' [case testCannotIgnoreDuplicateModule] # cmd: mypy one/mod/__init__.py two/mod/__init__.py [file one/mod/__init__.py] # type: ignore [file two/mod/__init__.py] # type: ignore [out] two/mod/__init__.py: error: Duplicate module named 'mod' (also at 'one/mod/__init__.py') == Return code: 2 [case promptsForgotInit] # cmd: mypy a.py one/mod/a.py [file one/__init__.py] # type: ignore [file a.py] # type: ignore [file one/mod/a.py] #type: ignore [out] one/mod/a.py: error: Duplicate module named 'a' (also at 'a.py') one/mod/a.py: error: Are you missing an __init__.py? == Return code: 2 [case testFlagsFile] # cmd: mypy @flagsfile [file flagsfile] -2 main.py [file main.py] def f(): try: 1/0 except ZeroDivisionError, err: print err [case testConfigFile] # cmd: mypy main.py [file mypy.ini] \[mypy] python_version = 2.7 [file main.py] def f(): try: 1/0 except ZeroDivisionError, err: print err [case testErrorContextConfig] # cmd: mypy main.py [file mypy.ini] \[mypy] show_error_context=True [file main.py] def f() -> None: 0 + "" [out] main.py: note: In function "f": main.py:2: error: Unsupported operand types for + ("int" and "str") [case testAltConfigFile] # cmd: mypy --config-file config.ini main.py [file config.ini] \[mypy] python_version = 2.7 [file main.py] def f(): try: 1/0 except ZeroDivisionError, err: print err [case testNoConfigFile] # cmd: mypy main.py --config-file= [file mypy.ini] \[mypy] warn_unused_ignores = True [file main.py] # type: ignore [case testPerFileConfigSection] # cmd: mypy x.py y.py z.py [file mypy.ini] \[mypy] disallow_untyped_defs = True \[mypy-y] disallow_untyped_defs = False \[mypy-z] disallow_untyped_calls = True [file x.py] def f(a): pass def g(a: int) -> int: return f(a) [file y.py] def f(a): pass def g(a: int) -> int: return f(a) [file z.py] def f(a): pass def g(a: int) -> int: return f(a) [out] z.py:1: error: Function is missing a type annotation z.py:4: error: Call to untyped function "f" in typed context x.py:1: error: Function is missing a type annotation [case testPerFileConfigSectionMultipleMatchesDisallowed] # cmd: mypy xx.py xy.py yx.py yy.py [file mypy.ini] \[mypy] \[mypy-*x*] disallow_untyped_defs = True \[mypy-*y*] disallow_untyped_calls = True [file xx.py] def f(a): pass def g(a: int) -> int: return f(a) [file xy.py] def f(a): pass def g(a: int) -> int: return f(a) [file yx.py] def f(a): pass def g(a: int) -> int: return f(a) [file yy.py] def f(a): pass def g(a: int) -> int: return f(a) [out] mypy.ini: [mypy-*x*]: Patterns must be fully-qualified module names, optionally with '*' in some components (e.g spam.*.eggs.*) mypy.ini: [mypy-*y*]: Patterns must be fully-qualified module names, optionally with '*' in some components (e.g spam.*.eggs.*) == Return code: 0 [case testMultipleGlobConfigSection] # cmd: mypy x.py y.py z.py [file mypy.ini] \[mypy] \[mypy-x.*,z.*] disallow_untyped_defs = True [file x.py] def f(a): pass [file y.py] def f(a): pass [file z.py] def f(a): pass [out] z.py:1: error: Function is missing a type annotation x.py:1: error: Function is missing a type annotation [case testConfigErrorNoSection] # cmd: mypy -c pass [file mypy.ini] [out] mypy.ini: No [mypy] section in config file == Return code: 0 [case testConfigErrorUnknownFlag] # cmd: mypy -c pass [file mypy.ini] \[mypy] bad = 0 [out] mypy.ini: [mypy]: Unrecognized option: bad = 0 == Return code: 0 [case testConfigErrorBadFlag] # cmd: mypy a.py [file mypy.ini] \[mypy] disallow-untyped-defs = True [file a.py] def f(): pass [out] mypy.ini: [mypy]: Unrecognized option: disallow-untyped-defs = True == Return code: 0 [case testConfigErrorBadBoolean] # cmd: mypy -c pass [file mypy.ini] \[mypy] ignore_missing_imports = nah [out] mypy.ini: [mypy]: ignore_missing_imports: Not a boolean: nah == Return code: 0 [case testConfigErrorNotPerFile] # cmd: mypy -c pass [file mypy.ini] \[mypy] \[mypy-*] python_version = 3.4 [out] mypy.ini: [mypy-*]: Per-module sections should only specify per-module flags (python_version) == Return code: 0 [case testConfigMypyPath] # cmd: mypy file.py [file mypy.ini] \[mypy] mypy_path = foo:bar , baz [file foo/foo.pyi] def foo(x: int) -> str: ... [file bar/bar.pyi] def bar(x: str) -> list: ... [file baz/baz.pyi] def baz(x: list) -> dict: ... [file file.py] import no_stubs from foo import foo from bar import bar from baz import baz baz(bar(foo(42))) baz(bar(foo('oof'))) [out] file.py:1: error: Cannot find implementation or library stub for module named 'no_stubs' file.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports file.py:6: error: Argument 1 to "foo" has incompatible type "str"; expected "int" [case testIgnoreErrorsConfig] # cmd: mypy x.py y.py [file mypy.ini] \[mypy] \[mypy-x] ignore_errors = True [file x.py] "" + 0 [file y.py] "" + 0 [out] y.py:1: error: Unsupported operand types for + ("str" and "int") [case testConfigFollowImportsNormal] # cmd: mypy main.py [file main.py] from a import x x + 0 x + '' # E import a a.x + 0 a.x + '' # E a.y # E a + 0 # E [file mypy.ini] \[mypy] follow_imports = normal [file a.py] x = 0 x += '' # Error reported here [out] a.py:2: error: Unsupported operand types for + ("int" and "str") main.py:3: error: Unsupported operand types for + ("int" and "str") main.py:6: error: Unsupported operand types for + ("int" and "str") main.py:7: error: Module has no attribute "y" main.py:8: error: Unsupported operand types for + (Module and "int") [case testConfigFollowImportsSilent] # cmd: mypy main.py [file main.py] from a import x x + '' import a a.x + '' a.y a + 0 [file mypy.ini] \[mypy] follow_imports = silent [file a.py] x = 0 x += '' # No error reported [out] main.py:2: error: Unsupported operand types for + ("int" and "str") main.py:4: error: Unsupported operand types for + ("int" and "str") main.py:5: error: Module has no attribute "y" main.py:6: error: Unsupported operand types for + (Module and "int") [case testConfigFollowImportsSkip] # cmd: mypy main.py [file main.py] from a import x reveal_type(x) # Expect Any import a reveal_type(a.x) # Expect Any [file mypy.ini] \[mypy] follow_imports = skip [file a.py] / # No error reported [out] main.py:2: note: Revealed type is 'Any' main.py:4: note: Revealed type is 'Any' [case testConfigFollowImportsError] # cmd: mypy main.py [file main.py] from a import x reveal_type(x) # Expect Any import a # Error reported here reveal_type(a.x) # Expect Any [file mypy.ini] \[mypy] follow_imports = error [file a.py] / # No error reported [out] main.py:1: error: Import of 'a' ignored main.py:1: note: (Using --follow-imports=error, module not passed on command line) main.py:2: note: Revealed type is 'Any' main.py:4: note: Revealed type is 'Any' [case testConfigFollowImportsSelective] # cmd: mypy main.py [file mypy.ini] \[mypy] \[mypy-normal] follow_imports = normal \[mypy-silent] follow_imports = silent \[mypy-skip] follow_imports = skip \[mypy-error] follow_imports = error [file main.py] import normal import silent import skip import error reveal_type(normal.x) reveal_type(silent.x) reveal_type(skip) reveal_type(error) [file normal.py] x = 0 x += '' [file silent.py] x = 0 x += '' [file skip.py] bla bla [file error.py] bla bla [out] normal.py:2: error: Unsupported operand types for + ("int" and "str") main.py:4: error: Import of 'error' ignored main.py:4: note: (Using --follow-imports=error, module not passed on command line) main.py:5: note: Revealed type is 'builtins.int' main.py:6: note: Revealed type is 'builtins.int' main.py:7: note: Revealed type is 'Any' main.py:8: note: Revealed type is 'Any' [case testConfigSilentMissingImportsOff] # cmd: mypy main.py [file main.py] import missing # Expect error here reveal_type(missing.x) # Expect Any [file mypy.ini] \[mypy] ignore_missing_imports = False [out] main.py:1: error: Cannot find implementation or library stub for module named 'missing' main.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main.py:2: note: Revealed type is 'Any' [case testConfigSilentMissingImportsOn] # cmd: mypy main.py [file main.py] import missing # No error here reveal_type(missing.x) # Expect Any [file mypy.ini] \[mypy] ignore_missing_imports = True [out] main.py:2: note: Revealed type is 'Any' [case testConfigNoErrorForUnknownXFlagInSubsection] # cmd: mypy -c pass [file mypy.ini] \[mypy] \[mypy-foo] x_bad = 0 [out] [case testDotInFilenameOKScript] # cmd: mypy a.b.py c.d.pyi [file a.b.py] undef [file c.d.pyi] whatever [out] c.d.pyi:1: error: Name 'whatever' is not defined a.b.py:1: error: Name 'undef' is not defined [case testDotInFilenameOKFolder] # cmd: mypy my.folder [file my.folder/tst.py] undef [out] my.folder/tst.py:1: error: Name 'undef' is not defined [case testDotInFilenameNoImport] # cmd: mypy main.py [file main.py] import a.b [file a.b.py] whatever [out] main.py:1: error: Cannot find implementation or library stub for module named 'a.b' main.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main.py:1: error: Cannot find implementation or library stub for module named 'a' [case testPythonVersionTooOld10] # cmd: mypy -c pass [file mypy.ini] \[mypy] python_version = 1.0 [out] mypy.ini: [mypy]: python_version: Python major version '1' out of range (must be 2 or 3) == Return code: 0 [case testPythonVersionTooOld26] # cmd: mypy -c pass [file mypy.ini] \[mypy] python_version = 2.6 [out] mypy.ini: [mypy]: python_version: Python 2.6 is not supported (must be 2.7) == Return code: 0 [case testPythonVersionTooOld33] # cmd: mypy -c pass [file mypy.ini] \[mypy] python_version = 3.3 [out] mypy.ini: [mypy]: python_version: Python 3.3 is not supported (must be 3.4 or higher) == Return code: 0 [case testPythonVersionTooNew28] # cmd: mypy -c pass [file mypy.ini] \[mypy] python_version = 2.8 [out] mypy.ini: [mypy]: python_version: Python 2.8 is not supported (must be 2.7) == Return code: 0 [case testPythonVersionTooNew40] # cmd: mypy -c pass [file mypy.ini] \[mypy] python_version = 4.0 [out] mypy.ini: [mypy]: python_version: Python major version '4' out of range (must be 2 or 3) == Return code: 0 [case testPythonVersionAccepted27] # cmd: mypy -c pass [file mypy.ini] \[mypy] python_version = 2.7 [out] [case testPythonVersionAccepted34] # cmd: mypy -c pass [file mypy.ini] \[mypy] python_version = 3.4 [out] [case testPythonVersionAccepted36] # cmd: mypy -c pass [file mypy.ini] \[mypy] python_version = 3.6 [out] -- This should be a dumping ground for tests of plugins that are sensitive to -- typeshed changes. [case testTypeshedSensitivePlugins] # cmd: mypy int_pow.py [file int_pow.py] a = 1 b = a + 2 reveal_type(a**0) # N: Revealed type is 'builtins.int' reveal_type(a**1) # N: Revealed type is 'builtins.int' reveal_type(a**2) # N: Revealed type is 'builtins.int' reveal_type(a**-0) # N: Revealed type is 'builtins.int' reveal_type(a**-1) # N: Revealed type is 'builtins.float' reveal_type(a**(-2)) # N: Revealed type is 'builtins.float' reveal_type(a**b) # N: Revealed type is 'Any' reveal_type(a.__pow__(2)) # N: Revealed type is 'builtins.int' reveal_type(a.__pow__(a)) # N: Revealed type is 'Any' a.__pow__() # E: Too few arguments for "__pow__" of "int" [case testDisallowAnyUnimported] # cmd: mypy main.py [file mypy.ini] \[mypy] disallow_any_unimported = True ignore_missing_imports = True [file main.py] from unreal import F def f(x: F) -> None: pass [out] main.py:3: error: Argument 1 to "f" becomes "Any" due to an unfollowed import [case testDisallowAnyExplicitDefSignature] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List def f(x: Any) -> None: pass def g() -> Any: pass def h() -> List[Any]: pass [out] m.py:3: error: Explicit "Any" is not allowed m.py:6: error: Explicit "Any" is not allowed m.py:9: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitVarDeclaration] # cmd: mypy --python-version=3.6 m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List v: Any = '' w = '' # type: Any class X: y = '' # type: Any [out] m.py:2: error: Explicit "Any" is not allowed m.py:3: error: Explicit "Any" is not allowed m.py:5: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitGenericVarDeclaration] # cmd: mypy --python-version=3.6 m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List v: List[Any] = [] [out] m.py:2: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitInheritance] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List class C(Any): pass class D(List[Any]): pass [out] m.py:3: error: Explicit "Any" is not allowed m.py:6: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitAlias] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List X = Any Y = List[Any] def foo(x: X) -> Y: # no error x.nonexistent() # no error return x [out] m.py:3: error: Explicit "Any" is not allowed m.py:4: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitGenericAlias] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List, TypeVar, Tuple T = TypeVar('T') TupleAny = Tuple[Any, T] # error def foo(x: TupleAny[str]) -> None: # no error pass def goo(x: TupleAny[Any]) -> None: # error pass [out] m.py:5: error: Explicit "Any" is not allowed m.py:10: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitCast] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List, cast x = 1 y = cast(Any, x) z = cast(List[Any], x) [out] m.py:4: error: Explicit "Any" is not allowed m.py:5: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitNamedTuple] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List, NamedTuple Point = NamedTuple('Point', [('x', List[Any]), ('y', Any)]) [out] m.py:3: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitTypeVarConstraint] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List, TypeVar T = TypeVar('T', Any, List[Any]) [out] m.py:3: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitNewType] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from typing import Any, List, NewType Baz = NewType('Baz', Any) # this error does not come from `--disallow-any-explicit` flag Bar = NewType('Bar', List[Any]) [out] m.py:3: error: Argument 2 to NewType(...) must be subclassable (got "Any") m.py:4: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitTypedDictSimple] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from mypy_extensions import TypedDict from typing import Any M = TypedDict('M', {'x': str, 'y': Any}) # error M(x='x', y=2) # no error def f(m: M) -> None: pass # no error [out] m.py:4: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitTypedDictGeneric] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_explicit = True [file m.py] from mypy_extensions import TypedDict from typing import Any, List M = TypedDict('M', {'x': str, 'y': List[Any]}) # error N = TypedDict('N', {'x': str, 'y': List}) # no error [out] m.py:4: error: Explicit "Any" is not allowed [case testDisallowAnyGenericsTupleNoTypeParams] # cmd: mypy --python-version=3.6 m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] from typing import Tuple def f(s: Tuple) -> None: pass # error def g(s) -> Tuple: # error return 'a', 'b' def h(s) -> Tuple[str, str]: # no error return 'a', 'b' x: Tuple = () # error [out] m.py:3: error: Missing type parameters for generic type "Tuple" m.py:4: error: Missing type parameters for generic type "Tuple" m.py:8: error: Missing type parameters for generic type "Tuple" [case testDisallowAnyGenericsTupleWithNoTypeParamsGeneric] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] from typing import Tuple, List def f(s: List[Tuple]) -> None: pass # error def g(s: List[Tuple[str, str]]) -> None: pass # no error [out] m.py:3: error: Missing type parameters for generic type "Tuple" [case testDisallowAnyGenericsTypeType] # cmd: mypy --python-version=3.6 m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] from typing import Type, Any def f(s: Type[Any]) -> None: pass # no error def g(s) -> Type: # error return s def h(s) -> Type[str]: # no error return s x: Type = g(0) # error [out] m.py:4: error: Missing type parameters for generic type "Type" m.py:8: error: Missing type parameters for generic type "Type" [case testDisallowAnyGenericsAliasGenericType] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] from typing import List L = List # no error def f(l: L) -> None: pass # error def g(l: L[str]) -> None: pass # no error [out] m.py:5: error: Missing type parameters for generic type "L" [case testDisallowAnyGenericsGenericAlias] # cmd: mypy --python-version=3.6 m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] from typing import List, TypeVar, Tuple T = TypeVar('T') A = Tuple[T, str, T] def f(s: A) -> None: pass # error def g(s) -> A: # error return 'a', 'b', 1 def h(s) -> A[str]: # no error return 'a', 'b', 'c' x: A = ('a', 'b', 1) # error [out] m.py:6: error: Missing type parameters for generic type "A" m.py:7: error: Missing type parameters for generic type "A" m.py:11: error: Missing type parameters for generic type "A" [case testDisallowAnyGenericsPlainList] # cmd: mypy --python-version=3.6 m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] from typing import List def f(l: List) -> None: pass # error def g(l: List[str]) -> None: pass # no error def h(l: List[List]) -> None: pass # error def i(l: List[List[List[List]]]) -> None: pass # error x = [] # error: need type annotation y: List = [] # error [out] m.py:3: error: Missing type parameters for generic type "List" m.py:5: error: Missing type parameters for generic type "List" m.py:6: error: Missing type parameters for generic type "List" m.py:8: error: Need type annotation for 'x' (hint: "x: List[] = ...") m.py:9: error: Missing type parameters for generic type "List" [case testDisallowAnyGenericsCustomGenericClass] # cmd: mypy --python-version=3.6 m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] from typing import Generic, TypeVar, Any T = TypeVar('T') class G(Generic[T]): pass def f() -> G: # error return G() x: G[Any] = G() # no error y: G = x # error [out] m.py:6: error: Missing type parameters for generic type "G" m.py:10: error: Missing type parameters for generic type "G" [case testDisallowAnyGenericsBuiltinCollections] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] s = tuple([1, 2, 3]) # no error def f(t: tuple) -> None: pass def g() -> list: pass def h(s: dict) -> None: pass def i(s: set) -> None: pass def j(s: frozenset) -> None: pass [out] m.py:3: error: Implicit generic "Any". Use "typing.Tuple" and specify generic parameters m.py:4: error: Implicit generic "Any". Use "typing.List" and specify generic parameters m.py:5: error: Implicit generic "Any". Use "typing.Dict" and specify generic parameters m.py:6: error: Implicit generic "Any". Use "typing.Set" and specify generic parameters m.py:7: error: Implicit generic "Any". Use "typing.FrozenSet" and specify generic parameters [case testDisallowAnyGenericsTypingCollections] # cmd: mypy m.py [file mypy.ini] \[mypy] \[mypy-m] disallow_any_generics = True [file m.py] from typing import Tuple, List, Dict, Set, FrozenSet def f(t: Tuple) -> None: pass def g() -> List: pass def h(s: Dict) -> None: pass def i(s: Set) -> None: pass def j(s: FrozenSet) -> None: pass [out] m.py:3: error: Missing type parameters for generic type "Tuple" m.py:4: error: Missing type parameters for generic type "List" m.py:5: error: Missing type parameters for generic type "Dict" m.py:6: error: Missing type parameters for generic type "Set" m.py:7: error: Missing type parameters for generic type "FrozenSet" [case testDisallowSubclassingAny] # cmd: mypy m.py y.py [file mypy.ini] \[mypy] disallow_subclassing_any = True \[mypy-m] disallow_subclassing_any = False [file m.py] from typing import Any x = None # type: Any class ShouldBeFine(x): ... [file y.py] from typing import Any x = None # type: Any class ShouldNotBeFine(x): ... [out] y.py:5: error: Class cannot subclass 'x' (has type 'Any') [case testSectionInheritance] # cmd: mypy a [file a/__init__.py] 0() [file a/foo.py] 0() [file a/b/__init__.py] [file a/b/c/__init__.py] 0() [file a/b/c/d/__init__.py] [file a/b/c/d/e/__init__.py] from typing import List def g(x: List) -> None: pass g(None) [file mypy.ini] \[mypy] allow_any_generics = True \[mypy-a.*] ignore_errors = True \[mypy-a.b.*] disallow_any_generics = True ignore_errors = True \[mypy-a.b.c.*] ignore_errors = True \[mypy-a.b.c.d.*] ignore_errors = True \[mypy-a.b.c.d.e.*] ignore_errors = True strict_optional = True \[mypy-a.b.c.d.e] ignore_errors = False [out] a/b/c/d/e/__init__.py:2: error: Missing type parameters for generic type "List" a/b/c/d/e/__init__.py:3: error: Argument 1 to "g" has incompatible type "None"; expected "List[Any]" [case testDisallowUntypedDefsAndGenerics] # cmd: mypy a.py [file mypy.ini] \[mypy] disallow_untyped_defs = True disallow_any_generics = True [file a.py] def get_tasks(self): return 'whatever' [out] a.py:1: error: Function is missing a return type annotation [case testMissingFile] # cmd: mypy nope.py [out] mypy: can't read file 'nope.py': No such file or directory == Return code: 2 --' [case testParseError] # cmd: mypy a.py [file a.py] def foo( [out] a.py:1: error: unexpected EOF while parsing == Return code: 2 [case testParseErrorAnnots] # cmd: mypy a.py [file a.py] def foo(x): # type: (str, int) -> None return [out] a.py:1: error: Type signature has too many arguments [case testModulesAndPackages] # cmd: mypy --package p.a --package p.b --module c [file p/__init__.py] [file p/a.py] def foo(x): # type: (int) -> str return "x" foo("wrong") [file p/b/__init__.py] from ..a import foo def bar(a): # type: (int) -> str return foo(a) bar("wrong") [file c.py] import p.b p.b.bar("wrong") [out] p/a.py:4: error: Argument 1 to "foo" has incompatible type "str"; expected "int" p/b/__init__.py:5: error: Argument 1 to "bar" has incompatible type "str"; expected "int" c.py:2: error: Argument 1 to "bar" has incompatible type "str"; expected "int" [case testSrcPEP420Packages] # cmd: mypy -p anamespace --namespace-packages [file mypy.ini] \[mypy]] mypy_path = src [file src/setup.cfg] [file src/anamespace/foo/__init__.py] [file src/anamespace/foo/bar.py] def bar(a: int, b: int) -> str: return a + b [out] src/anamespace/foo/bar.py:2: error: Incompatible return value type (got "int", expected "str") [case testFollowImportStubs1] # cmd: mypy main.py [file mypy.ini] \[mypy] \[mypy-math.*] follow_imports = error follow_imports_for_stubs = True [file main.py] import math math.frobnicate() [out] main.py:1: error: Import of 'math' ignored main.py:1: note: (Using --follow-imports=error, module not passed on command line) [case testFollowImportStubs2] # cmd: mypy main.py [file mypy.ini] \[mypy] \[mypy-math.*] follow_imports = skip follow_imports_for_stubs = True [file main.py] import math math.frobnicate() [case testShadowFile1] # cmd: mypy --shadow-file source.py shadow.py source.py [file source.py] def foo() -> str: return "bar" [file shadow.py] def bar() -> str: return 14 [out] source.py:2: error: Incompatible return value type (got "int", expected "str") [case testShadowFile2] # cmd: mypy --shadow-file s1.py shad1.py --shadow-file s2.py shad2.py --shadow-file s3.py shad3.py s1.py s2.py s3.py s4.py [file s1.py] def foo() -> str: return "bar" [file shad1.py] def bar() -> str: return 14 [file s2.py] def baz() -> str: return 14 [file shad2.py] def baz() -> int: return 14 [file s3.py] def qux() -> str: return "bar" [file shad3.py] def foo() -> int: return [42] [file s4.py] def foo() -> str: return 9 [out] s4.py:2: error: Incompatible return value type (got "int", expected "str") s3.py:2: error: Incompatible return value type (got "List[int]", expected "int") s1.py:2: error: Incompatible return value type (got "int", expected "str") [case testConfigWarnUnusedSection1] # cmd: mypy foo.py quux.py spam/eggs.py [file mypy.ini] \[mypy] warn_unused_configs = True incremental = False \[mypy-bar] \[mypy-foo] \[mypy-baz.*] \[mypy-quux.*] \[mypy-spam.*] \[mypy-spam.eggs] \[mypy-emarg.*] \[mypy-emarg.hatch] -- Currently we don't treat an unstructured pattern like a.*.b as unused -- if it matches another section (like a.x.b). This would be reasonable -- to change. ' \[mypy-a.*.b] \[mypy-a.*.c] \[mypy-a.x.b] [file foo.py] [file quux.py] [file spam/__init__.py] [file spam/eggs.py] [out] Warning: unused section(s) in mypy.ini: [mypy-bar], [mypy-baz.*], [mypy-emarg.*], [mypy-emarg.hatch], [mypy-a.*.c], [mypy-a.x.b] == Return code: 0 [case testConfigUnstructuredGlob] # cmd: mypy emarg foo [file mypy.ini] \[mypy] ignore_errors = true \[mypy-*.lol] ignore_errors = false \[mypy-emarg.*] ignore_errors = false \[mypy-emarg.*.villip.*] ignore_errors = true \[mypy-emarg.hatch.villip.mankangulisk] ignore_errors = false [file emarg/__init__.py] [file emarg/foo.py] fail [file emarg/villip.py] fail [file emarg/hatch/__init__.py] [file emarg/hatch/villip/__init__.py] [file emarg/hatch/villip/nus.py] fail [file emarg/hatch/villip/mankangulisk.py] fail [file foo/__init__.py] [file foo/lol.py] fail [out] foo/lol.py:1: error: Name 'fail' is not defined emarg/foo.py:1: error: Name 'fail' is not defined emarg/hatch/villip/mankangulisk.py:1: error: Name 'fail' is not defined [case testPackageRootEmpty] # cmd: mypy --package-root= a/b/c.py main.py [file a/b/c.py] [file main.py] import a.b.c [case testPackageRootNonEmpty] # cmd: mypy --package-root=a/ a/b/c.py main.py [file a/b/c.py] [file main.py] import b.c [case testPackageRootMultiple1] # cmd: mypy --package-root=. --package-root=a a/b/c.py d.py main.py [file a/b/c.py] [file d.py] [file main.py] import b.c import d [case testPackageRootMultiple2] # cmd: mypy --package-root=a/ --package-root=./ a/b/c.py d.py main.py [file a/b/c.py] [file d.py] [file main.py] import b.c import d [case testCacheMap] -- This just checks that a valid --cache-map triple is accepted. -- (Errors are too verbose to check.) # cmd: mypy a.py --no-sqlite-cache --cache-map a.py a.meta.json a.data.json [file a.py] [out] [case testIniFiles] # cmd: mypy [file mypy.ini] \[mypy] files = a.py, b.py [file a.py] fail [file b.py] fail [out] b.py:1: error: Name 'fail' is not defined a.py:1: error: Name 'fail' is not defined [case testIniFilesGlobbing] # cmd: mypy [file mypy.ini] \[mypy] files = **/*.py [file a/b.py] fail [file c.py] fail [out] a/b.py:1: error: Name 'fail' is not defined c.py:1: error: Name 'fail' is not defined [case testIniFilesCmdlineOverridesConfig] # cmd: mypy override.py [file mypy.ini] \[mypy] files = config.py [out] mypy: can't read file 'override.py': No such file or directory == Return code: 2 [case testErrorSummaryOnSuccess] # cmd: mypy --error-summary good.py [file good.py] x = 2 + 2 [out] Success: no issues found in 1 source file == Return code: 0 [case testErrorSummaryOnFail] # cmd: mypy --error-summary bad.py [file bad.py] 42 + 'no' [out] bad.py:1: error: Unsupported operand types for + ("int" and "str") Found 1 error in 1 file (checked 1 source file) [case testErrorSummaryOnFailNotes] # cmd: mypy --error-summary bad.py [file bad.py] from typing import List x = [] # type: List[float] y = [] # type: List[int] x = y [out] bad.py:4: error: Incompatible types in assignment (expression has type "List[int]", variable has type "List[float]") bad.py:4: note: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance bad.py:4: note: Consider using "Sequence" instead, which is covariant Found 1 error in 1 file (checked 1 source file) [case testErrorSummaryOnFailTwoErrors] # cmd: mypy --error-summary bad.py foo.py [file bad.py] 42 + 'no' 42 + 'no' [file foo.py] [out] bad.py:1: error: Unsupported operand types for + ("int" and "str") bad.py:2: error: Unsupported operand types for + ("int" and "str") Found 2 errors in 1 file (checked 2 source files) [case testErrorSummaryOnFailTwoFiles] # cmd: mypy --error-summary bad.py bad2.py [file bad.py] 42 + 'no' [file bad2.py] 42 + 'no' [out] bad2.py:1: error: Unsupported operand types for + ("int" and "str") bad.py:1: error: Unsupported operand types for + ("int" and "str") Found 2 errors in 2 files (checked 2 source files) [case testErrorSummaryOnBadUsage] # cmd: mypy --error-summary missing.py [out] mypy: can't read file 'missing.py': No such file or directory == Return code: 2 [case testShowSourceCodeSnippetsWrappedFormatting] # cmd: mypy --pretty --python-version=3.6 some_file.py [file some_file.py] from typing import Union 42 + 'no way' class OneCustomClassName: def some_interesting_method(self, arg: AnotherCustomClassDefinedBelow) -> AnotherCustomClassDefinedBelow: ... class AnotherCustomClassDefinedBelow: def another_even_more_interesting_method(self, arg: Union[int, str, float]) -> None: self.very_important_attribute_with_long_name: OneCustomClassName = OneCustomClassName().some_interesting_method(arg) [out] some_file.py:3: error: Unsupported operand types for + ("int" and "str") 42 + 'no way' ^ some_file.py:11: error: Incompatible types in assignment (expression has type "AnotherCustomClassDefinedBelow", variable has type "OneCustomClassName") ...t_attribute_with_long_name: OneCustomClassName = OneCustomClassName().... ^ some_file.py:11: error: Argument 1 to "some_interesting_method" of "OneCustomClassName" has incompatible type "Union[int, str, float]"; expected "AnotherCustomClassDefinedBelow" ...OneCustomClassName = OneCustomClassName().some_interesting_method(arg) ^ [case testShowSourceCodeSnippetsBlockingError] # cmd: mypy --pretty --show-error-codes some_file.py [file some_file.py] it_looks_like_we_started_typing_something_but_then. = did_not_notice(an_extra_dot) [out] some_file.py:1: error: invalid syntax [syntax] ...ooks_like_we_started_typing_something_but_then. = did_not_notice(an_ex... ^ == Return code: 2 [case testSpecialTypeshedGenericNote] # cmd: mypy --disallow-any-generics --python-version=3.6 test.py [file test.py] from os import PathLike from queue import Queue p: PathLike q: Queue [out] test.py:4: error: Missing type parameters for generic type "_PathLike" test.py:4: note: Subscripting classes that are not generic at runtime may require escaping, see https://mypy.readthedocs.io/en/latest/common_issues.html#not-generic-runtime test.py:5: error: Missing type parameters for generic type "Queue" test.py:5: note: Subscripting classes that are not generic at runtime may require escaping, see https://mypy.readthedocs.io/en/latest/common_issues.html#not-generic-runtime [case testErrorMessageWhenOpenPydFile] # cmd: mypy a.pyd [file a.pyd] # coding: uft-8 [out] mypy: stubgen does not support .pyd files: 'a.pyd' == Return code: 2 mypy-0.761/test-data/unit/daemon.test0000644€tŠÔÚ€2›s®0000002120113576752246023725 0ustar jukkaDROPBOX\Domain Users00000000000000-- End-to-end test cases for the daemon (dmypy). -- These are special because they run multiple shell commands. [case testDaemonStartStop] $ dmypy start -- --follow-imports=error Daemon started $ dmypy stop Daemon stopped [case testDaemonBasic] $ dmypy start -- --follow-imports=error Daemon started $ dmypy check -- foo.py Success: no issues found in 1 source file $ dmypy recheck Success: no issues found in 1 source file $ dmypy stop Daemon stopped [file foo.py] def f(): pass [case testDaemonRun] $ dmypy run -- foo.py --follow-imports=error Daemon started Success: no issues found in 1 source file $ dmypy stop Daemon stopped [file foo.py] def f(): pass [case testDaemonRunRestart] $ dmypy run -- foo.py --follow-imports=error Daemon started Success: no issues found in 1 source file $ dmypy run -- foo.py --follow-imports=error Success: no issues found in 1 source file $ {python} -c "print('[mypy]')" >mypy.ini $ {python} -c "print('disallow_untyped_defs = True')" >>mypy.ini $ dmypy run -- foo.py --follow-imports=error Restarting: configuration changed Daemon stopped Daemon started foo.py:1: error: Function is missing a return type annotation foo.py:1: note: Use "-> None" if function does not return a value Found 1 error in 1 file (checked 1 source file) == Return code: 1 $ {python} -c "print('def f() -> None: pass')" >foo.py $ dmypy run -- foo.py --follow-imports=error Success: no issues found in 1 source file $ dmypy stop Daemon stopped [file foo.py] def f(): pass [case testDaemonRunRestartPretty] $ dmypy run -- foo.py --follow-imports=error --pretty Daemon started Success: no issues found in 1 source file $ dmypy run -- foo.py --follow-imports=error --pretty Success: no issues found in 1 source file $ {python} -c "print('[mypy]')" >mypy.ini $ {python} -c "print('disallow_untyped_defs = True')" >>mypy.ini $ dmypy run -- foo.py --follow-imports=error --pretty Restarting: configuration changed Daemon stopped Daemon started foo.py:1: error: Function is missing a return type annotation def f(): pass ^ foo.py:1: note: Use "-> None" if function does not return a value Found 1 error in 1 file (checked 1 source file) == Return code: 1 $ {python} -c "print('def f() -> None: pass')" >foo.py $ dmypy run -- foo.py --follow-imports=error --pretty Success: no issues found in 1 source file $ dmypy stop Daemon stopped [file foo.py] def f(): pass [case testDaemonRunRestartPluginVersion] $ dmypy run -- foo.py --no-error-summary Daemon started $ {python} -c "print(' ')" >> plug.py $ dmypy run -- foo.py --no-error-summary Restarting: plugins changed Daemon stopped Daemon started $ dmypy stop Daemon stopped [file mypy.ini] \[mypy] follow_imports = error plugins = plug.py [file foo.py] pass [file plug.py] from mypy.plugin import Plugin class Dummy(Plugin): pass def plugin(version): return Dummy [case testDaemonRunRestartGlobs] -- Ensure dmypy is not restarted if the configuration doesn't change and it contains globs -- Note: Backslash path separator in output is replaced with forward slash so the same test succeeds on Windows as well $ dmypy run -- foo --follow-imports=error --python-version=3.6 Daemon started foo/lol.py:1: error: Name 'fail' is not defined Found 1 error in 1 file (checked 3 source files) == Return code: 1 $ dmypy run -- foo --follow-imports=error --python-version=3.6 foo/lol.py:1: error: Name 'fail' is not defined Found 1 error in 1 file (checked 3 source files) == Return code: 1 $ {python} -c "print('[mypy]')" >mypy.ini $ {python} -c "print('ignore_errors=True')" >>mypy.ini $ dmypy run -- foo --follow-imports=error --python-version=3.6 Restarting: configuration changed Daemon stopped Daemon started Success: no issues found in 3 source files $ dmypy stop Daemon stopped [file mypy.ini] \[mypy] ignore_errors = True \[mypy-*.lol] ignore_errors = False [file foo/__init__.py] [file foo/lol.py] fail [file foo/ok.py] a: int = 1 [case testDaemonStatusKillRestartRecheck] $ dmypy status No status file found == Return code: 2 $ dmypy stop No status file found == Return code: 2 $ dmypy kill No status file found == Return code: 2 $ dmypy recheck No status file found == Return code: 2 $ dmypy start -- --follow-imports=error --no-error-summary Daemon started $ dmypy status Daemon is up and running $ dmypy start Daemon is still alive == Return code: 2 $ dmypy restart -- --follow-imports=error --no-error-summary Daemon stopped Daemon started $ dmypy stop Daemon stopped $ dmypy status No status file found == Return code: 2 $ dmypy restart -- --follow-imports=error --no-error-summary Daemon started $ dmypy recheck Command 'recheck' is only valid after a 'check' command == Return code: 2 $ dmypy kill Daemon killed $ dmypy status Daemon has died == Return code: 2 [case testDaemonRecheck] $ dmypy start -- --follow-imports=error --no-error-summary Daemon started $ dmypy check foo.py bar.py $ dmypy recheck $ dmypy recheck --update foo.py --remove bar.py sir_not_appearing_in_this_film.py foo.py:1: error: Import of 'bar' ignored foo.py:1: note: (Using --follow-imports=error, module not passed on command line) == Return code: 1 $ dmypy recheck --update bar.py $ dmypy recheck --update sir_not_appearing_in_this_film.py $ dmypy recheck --update --remove $ dmypy stop Daemon stopped [file foo.py] import bar [file bar.py] pass [case testDaemonTimeout] $ dmypy start --timeout 1 -- --follow-imports=error Daemon started $ {python} -c "import time;time.sleep(1)" $ dmypy status No status file found == Return code: 2 [case testDaemonRunNoTarget] $ dmypy run -- --follow-imports=error Daemon started mypy-daemon: error: Missing target module, package, files, or command. == Return code: 2 $ dmypy stop Daemon stopped -- this is carefully constructed to be able to break if the quickstart system lets -- something through incorrectly. in particular, the files need to have the same size [case testDaemonQuickstart] $ {python} -c "print('x=1')" >foo.py $ {python} -c "print('x=1')" >bar.py $ mypy --local-partial-types --cache-fine-grained --follow-imports=error --no-sqlite-cache --python-version=3.6 -- foo.py bar.py Success: no issues found in 2 source files $ {python} -c "import shutil; shutil.copy('.mypy_cache/3.6/bar.meta.json', 'asdf.json')" -- update bar's timestamp but don't change the file $ {python} -c "import time;time.sleep(1)" $ {python} -c "print('x=1')" >bar.py $ dmypy run -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.6 Daemon started Success: no issues found in 2 source files $ dmypy status --fswatcher-dump-file test.json Daemon is up and running $ dmypy stop Daemon stopped -- copy the original bar cache file back so that the mtime mismatches $ {python} -c "import shutil; shutil.copy('asdf.json', '.mypy_cache/3.6/bar.meta.json')" -- sleep guarantees timestamp changes $ {python} -c "import time;time.sleep(1)" $ {python} -c "print('lol')" >foo.py $ dmypy run --log-file=log -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.6 --quickstart-file test.json Daemon started foo.py:1: error: Name 'lol' is not defined Found 1 error in 1 file (checked 2 source files) == Return code: 1 -- make sure no errors made it to the log file $ {python} -c "import sys; sys.stdout.write(open('log').read())" -- make sure the meta file didn't get updated. we use this as an imperfect proxy for -- whether the source file got rehashed, which we don't want it to have been. $ {python} -c "x = open('.mypy_cache/3.6/bar.meta.json').read(); y = open('asdf.json').read(); assert x == y" [case testDaemonSuggest] $ dmypy start --log-file log.txt -- --follow-imports=error --no-error-summary Daemon started $ dmypy suggest foo:foo Command 'suggest' is only valid after a 'check' command (that produces no parse errors) == Return code: 2 $ dmypy check foo.py bar.py $ dmypy suggest foo.bar Unknown function foo.bar == Return code: 2 $ dmypy suggest foo.var Object foo.var is not a function == Return code: 2 $ dmypy suggest foo.Foo.var Unknown class foo.Foo == Return code: 2 $ dmypy suggest foo.Bar.baz Unknown method foo.Bar.baz == Return code: 2 $ dmypy suggest foo.foo.baz Object foo.foo is not a class == Return code: 2 $ dmypy suggest --callsites foo.foo bar.py:3: (str) bar.py:4: (arg=str) $ dmypy suggest foo.foo (str) -> int $ {python} -c "import shutil; shutil.copy('foo.py.2', 'foo.py')" $ dmypy check foo.py bar.py bar.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") == Return code: 1 [file foo.py] def foo(arg): return 12 class Bar: def bar(self): pass var = 0 [file foo.py.2] def foo(arg: str) -> int: return 12 class Bar: def bar(self) -> None: pass var = 0 [file bar.py] from foo import foo def bar() -> None: x = foo('abc') # type: str foo(arg='xyz') mypy-0.761/test-data/unit/deps-classes.test0000644€tŠÔÚ€2›s®0000001063713576752246025063 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies for classes. -- -- The dependencies are used for fined-grained incremental checking. -- -- See the comment at the top of deps.test for more documentation. -- TODO: Move class related test cases from deps.test to here [case testNamedTuple] from typing import NamedTuple, Any from a import A N = NamedTuple('N', [('a', 'A')]) def f(a: Any) -> None: n = N(a) n.a [file a.py] class A: pass [out] -> m.f -> m.f -> m.f -> m.f -> , , m -> m [case testNamedTuple2] from typing import NamedTuple, Any, Tuple from a import A, B N = NamedTuple('N', [('a', 'Tuple[A, B]')]) def f(a: Any) -> None: n = N(a) n.a [file a.py] class A: pass class B: pass [out] -> m.f -> m.f -> m.f -> m.f -> , , m -> , , m -> m [case testNamedTuple3] from typing import NamedTuple N = NamedTuple('N', [('x', int)]) x = N(1) M = NamedTuple('M', [('z', 'N')]) y = M(x) [out] -> m -> m -> , m -> m -> m -> , , , , m -> m -> m [case testNamedTuple4] from typing import NamedTuple, Any from a import A class N(NamedTuple): a: A def f(a: Any) -> None: n = N(a) n.a [file a.py] class A: pass [out] -> m.f -> m.f -> m.f -> m.N, m.f -> , , m, m.N -> m [case testIfFalseInClassBody] class A: if False: x = None # type: str x.foo() [builtins fixtures/bool.pyi] [out] -> m.A [case testAlwaysFalseIsinstanceInClassBody] class A: x: int if isinstance(x, str): y: str = None y.foo() [builtins fixtures/isinstance.pyi] [out] -> m.A [case testDoubleAttributeInitializationToNone] class C: def __init__(self) -> None: self.x = None self.x = None [out] -> m.C.__init__ -> m.C [case testClassNestedWithinFunction] class C: pass def f() -> None: class S1(C): pass class D: def g(self) -> None: class S2(C): pass [out] -- TODO: Is it okay to have targets like m.S1@4.__init__? -> , , m.D.g, m.f -> , -> , -> m.C, m.D.g, m.f -> m.D.g -> m.D -> m.f [case testClassSuper] class C: def __init__(self, x: int) -> None: pass def foo(self) -> None: pass class D(C): def __init__(self, x: int) -> None: super().__init__(x) super().foo() [out] -> , m -> , m.D.__init__ -> -> , m.D.__init__ -> m, m.C, m.D -> m.D [case testClassMissingInit] class C: def __init__(self, x: int) -> None: pass class D(C): pass def foo() -> None: D(6) [out] -> , m -> -> -> m, m.C, m.D -> m.foo -> m.foo -> m.D, m.foo [case testClassBasedEnum] from enum import Enum from m import B class A(Enum): X = B() def f(a: A) -> None: pass def g() -> None: A.X [file m.py] class B: pass [out] -> m.g -> , m.A, m.f, m.g -> m -> m -- The dependecy target is superfluous but benign -> , m -> m [case testClassAttribute] class C: x = 0 def f() -> None: C.x def g() -> None: C.x = 1 [out] -> m.f, m.g -> m.C, m.f, m.g [case testStaticAndClassMethods] class C: @staticmethod def foo() -> None: h() @classmethod def bar(cls) -> None: h() def fstatic() -> None: C.foo() def fclass() -> None: C.bar() cc = C() def gstatic() -> None: cc.foo() def gclass() -> None: cc.bar() def h() -> None: pass [builtins fixtures/classmethod.pyi] [out] -> m -> m -> m, m.fclass, m.gclass -> m, m.fstatic, m.gstatic -> , m, m.C, m.fclass, m.fstatic -> m, m.gclass, m.gstatic -> m.C.bar, m.C.foo [case testClassAttributeWithMetaclass] class M(type): x = 1 class C(metaclass=M): pass def f() -> None: C.x [out] -> m.f -> m.C, m.f -> m.f -> , m.M mypy-0.761/test-data/unit/deps-expressions.test0000644€tŠÔÚ€2›s®0000002061113576752246026001 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies for expressions. -- -- The dependencies are used for fined-grained incremental checking. -- -- See the comment at the top of deps.test for more documentation. [case testListExpr] def f() -> int: pass def g() -> None: a = [f()] [builtins fixtures/list.pyi] [out] -> m.g [case testDictExpr] def f1() -> int: pass def f2() -> int: pass def g() -> None: a = {f1(): 1, 2: f2()} [builtins fixtures/dict.pyi] [out] -> m.g -> m.g [case testSetExpr] def f() -> int: pass def g() -> None: a = {f()} [builtins fixtures/set.pyi] [out] -> m.g [case testTupleExpr] def f1() -> int: pass def f2() -> int: pass def g() -> None: a = (f1(), f2()) [builtins fixtures/tuple.pyi] [out] -> m.g -> m.g [case testListComprehension] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f1() -> int: pass def f2() -> int: pass def g() -> None: a = [f1() for x in A() if f2()] [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.g -> m.A, m.g -> m.g -> m.g [case testSetComprehension] from typing import Set def f1() -> int: pass def f2() -> Set[int]: pass def f3() -> int: pass def g() -> None: a = {f1() for x in f2() if f3()} [builtins fixtures/set.pyi] [out] -> m.g -> m.g -> m.g [case testDictComprehension] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f1() -> int: pass def f2() -> int: pass def f3() -> int: pass def g() -> None: a = {f1(): f2() for x in A() if f3()} [builtins fixtures/dict.pyi] [out] -> m.g -> m.g -> m.g -> m.A, m.g -> m.g -> m.g -> m.g [case testGeneratorExpr] from typing import List def f1() -> int: pass def f2() -> List[int]: pass def f3() -> int: pass def g() -> None: a = (f1() for x in f2() if f3()) [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.g [case testConditionalExpr] def f1() -> int: pass def f2() -> int: pass def f3() -> int: pass def g() -> None: a = f1() if f2() else f3() [out] -> m.g -> m.g -> m.g [case testAwaitExpr] def f(): pass async def g() -> None: x = await f() [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] -> m.g [case testStarExpr] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def g() -> None: a = [*A()] [builtins fixtures/list.pyi] [out] -> m.g -> , m.g -> m.g -> m.A, m.g, typing.Iterable [case testCast] from typing import cast class A: pass def f() -> object: pass def g() -> None: x = cast(A, f()) [out] -> m.A, m.g -> m.g [case testTypeApplication] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): def __init__(self, x): pass class B: pass class C: pass def f() -> int: pass def g() -> None: x = A[B, C](f()) [out] -> m.g -> m.g -> m.A, m.g -> m.B, m.g -> m.C, m.g -> m.A -> m.A -> m.g [case testIndexExpr] class A: def __getitem__(self, x: int) -> int: pass def f1() -> A: pass def f2() -> int: pass def g(a: A) -> int: return f1()[f2()] [out] -> m.g -> , , m.A, m.f1, m.g -> m.g -> m.g [case testIndexExpr] class A: def __getitem__(self, x: int) -> int: pass def f1() -> A: pass def f2() -> int: pass def g(a: A) -> int: return f1()[f2()] [out] -> m.g -> , , m.A, m.f1, m.g -> m.g -> m.g [case testIndexExprLvalue] class A: def __setitem__(self, x: int, y: int) -> None: pass def f1() -> A: pass def f2() -> int: pass def f3() -> int: pass def g(a: A) -> None: f1()[f2()] = f3() [out] -- __getitem__ dependency is redundant but harmless -> m.g -> m.g -> , , m.A, m.f1, m.g -> m.g -> m.g -> m.g [case testUnaryExpr] class A: def __neg__(self) -> int: pass def __pos__(self) -> int: pass def __invert__(self) -> int: pass def f1() -> A: pass def f2() -> A: pass def f3() -> A: pass def g1() -> int: return +f1() def g2() -> int: return -f2() def g3() -> int: return ~f3() [out] -> m.g3 -> m.g2 -> m.g1 -> , , , m.A, m.f1, m.f2, m.f3 -> m.g1 -> m.g2 -> m.g3 [case testOpExpr] class A: def __add__(self, x: 'B') -> int: pass class B: pass def f() -> int: a: A b: B return a + b [out] -> m.f -> m.A, m.f -> m.f -> , m.A.__add__, m.B, m.f [case testComparisonExpr] class A: def __lt__(self, x: 'B') -> int: pass class B: pass def f() -> int: return A() < B() [out] -> m.f -> m.f -> m.f -> m.A, m.f -> m.f -> m.f -> m.f -> , m.A.__lt__, m.B, m.f [case testIsOp] class A: pass class B: pass def f() -> bool: return A() is B() [builtins fixtures/bool.pyi] [out] -> m.f -> m.f -> m.A, m.f -> m.f -> m.f -> m.B, m.f [case testInOp] class A: def __contains__(self, x: B) -> int: pass class B: pass def f() -> bool: return B() in A() [builtins fixtures/bool.pyi] [out] -> m.f -> m.f -> m.f -> m.A, m.f -> m.f -> m.f -> , m.A.__contains__, m.B, m.f [case testComparisonExprWithMultipleOperands] class A: def __lt__(self, x: 'B') -> int: pass class B: pass class C: def __ge__(self, x: 'B') -> int: pass def f() -> int: return A() < B() <= C() [out] -> m.f -> m.f -> m.f -> m.A, m.f -> m.f -> m.f -> m.f -> m.f -> , , m.A.__lt__, m.B, m.C.__ge__, m.f -> m.f -> m.f -> m.f -> m.C, m.f [case testOperatorWithTupleOperand] from typing import Tuple class C(Tuple[int, str]): def __and__(self, x: D) -> int: pass def __neg__(self) -> int: pass class D: pass def f() -> None: c: C d: D x = c & d y = -c [builtins fixtures/tuple.pyi] [out] -> m.f -> m.f -> m.C, m.f -> m.f -> , m.C.__and__, m.D, m.f [case testUnionTypeOperation] from typing import Union class A: def __add__(self, x: str) -> int: pass class B: def __add__(self, x: str) -> int: pass def f(a: Union[A, B]) -> int: return a + '' [out] -> m.f -> , m.A, m.f -> m.f -> , m.B, m.f [case testBackquoteExpr_python2] def g(): # type: () -> int pass def f(): # type: () -> str return `g()` [out] -> m.f [case testComparison_python2] class A: def __cmp__(self, other): # type: (B) -> int pass class B: pass def f(a, b): # type: (A, B) -> None x = a == b def g(a, b): # type: (A, B) -> None x = a < b [out] -> m.f, m.g -> m.f -> m.g -> , , m.A, m.f, m.g -> m.f, m.g -> m.f -> m.g -> , , , m.A.__cmp__, m.B, m.f, m.g [case testSliceExpr] class A: def __getitem__(self, x) -> None: pass def f1() -> int: pass def f2() -> int: pass def f3() -> int: pass def f4() -> int: pass def f5() -> int: pass def f() -> None: a: A a[f1():f2():f3()] a[f4():] a[::f5()] [builtins fixtures/slice.pyi] [out] -> m.f -> m.A, m.f -> m.f -> m.f -> m.f -> m.f -> m.f [case testRevealTypeExpr] def f1() -> int: pass def f() -> None: reveal_type(f1()) # type: ignore [out] -> m.f [case testLambdaExpr] from typing import Callable def f1(c: Callable[[int], str]) -> None: pass def f2() -> str: pass def g() -> None: f1(lambda x: f2()) [out] -> m.g -> m.g [case testLiteralDepsExpr] from typing_extensions import Literal Alias = Literal[1] a: Alias b = a def f(x: Alias) -> None: pass def g() -> Literal[1]: return b [out] -> , m, m.f -> m -> m, m.g mypy-0.761/test-data/unit/deps-generics.test0000644€tŠÔÚ€2›s®0000000717613576752246025231 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies involving generics. -- -- The dependencies are used for fined-grained incremental checking. -- -- See the comment at the top of deps.test for more documentation. [case testGenericFunction] from typing import TypeVar T = TypeVar('T') class A: pass def f(x: T) -> T: y: T z: A return x [out] -> m.A, m.f -> , m.f [case testGenericClass] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B: pass def f() -> None: a: A[B] [out] -> m.A, m.f -> m.B, m.f -> m.A [case testGenericClassWithMembers] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def g(self, a: T) -> None: self.x = a def f(self) -> T: return self.x [out] -> m.A.f, m.A.g -> m.A -> , , , m.A, m.A.f, m.A.g [case testGenericClassInit] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, a: T) -> None: self.x = a class B: pass def f() -> None: a = A(B()) [out] -> m.f -> m.f -> m.A.__init__ -> m.A, m.f -> m.f -> m.f -> m.B, m.f -> , , m.A, m.A.__init__ [case testGenericMethod] from typing import TypeVar T = TypeVar('T') class A: def f(self, x: T) -> T: return x [out] -> m.A -> , m.A.f [case testGenericBaseClass] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B(A[C]): pass class C: pass [out] -> , m -> -> -> m, m.A, m.B -> m.B -> m, m.B, m.C -> m.A [case testGenericBaseClass2] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B(A[T]): pass [out] -> , m -> -> -> m, m.A, m.B -> m.B -> m, m.A, m.B [case testTypeVarBound] from typing import TypeVar, Tuple class A: pass class B: pass T = TypeVar('T', bound=Tuple[A, B]) def f(x: T) -> T: return x [out] -> , , m, m.A, m.f -> , , m, m.B, m.f -> , m.f [case testTypeVarBoundOperations] from typing import TypeVar, Tuple class A: def f(self) -> None: pass def __add__(self, other: int) -> int: pass T = TypeVar('T', bound=A) def f(x: T) -> None: x.f() x + 1 [out] -> m.f -> m.f -> , , m, m.A, m.f -> , m.f [case testTypeVarValues] from typing import TypeVar class A: pass class B: pass class C: pass class D: pass T = TypeVar('T', A, B) S = TypeVar('S', C, D) def f(x: T) -> S: pass [out] -> , , m, m.A, m.f -> , , m, m.B, m.f -> , , m, m.C, m.f -> , , m, m.D, m.f -> , m.f -> , m.f [case testTypeVarValuesMethod] from typing import TypeVar, Generic class C: pass class D: pass S = TypeVar('S', C, D) class G(Generic[S]): def f(self) -> S: pass [out] -> , , m, m.C, m.G.f -> , , m, m.D, m.G.f -> m.G -> , m.G, m.G.f [case testTypeVarValuesMethodAttr] from typing import TypeVar, Generic class A: x: int class B: x: int T = TypeVar('T', A, B) class G(Generic[T]): def f(self, x: T) -> None: x.x [out] -> m.G.f -> , , m, m.A, m.G.f -> m.G.f -> , , m, m.B, m.G.f -> m.G -> , m.G, m.G.f mypy-0.761/test-data/unit/deps-statements.test0000644€tŠÔÚ€2›s®0000002724113576752246025614 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies for statements. -- -- The dependencies are used for fined-grained incremental checking. -- -- See the comment at the top of deps.test for more documentation. [case testIfStmt] def f1() -> int: pass def f2() -> None: pass def f3() -> int: pass def f4() -> None: pass def f5() -> None: pass def g() -> None: if f1(): f2() elif f3(): f4() else: f5() [out] -> m.g -> m.g -> m.g -> m.g -> m.g [case testWhileStmt] def f1() -> int: pass def f2() -> None: pass def f3() -> None: pass def g() -> None: while f1(): f2() else: f3() [out] -> m.g -> m.g -> m.g [case testAssertStmt] def f1() -> int: pass def f2() -> str: pass def f3() -> int: pass def g() -> None: assert f1(), f2() assert f3() [out] -> m.g -> m.g -> m.g [case testRaiseStmt] def f1() -> BaseException: pass def f2() -> BaseException: pass def g1() -> None: raise f1() def g2() -> None: raise f1() from f2() [builtins fixtures/exception.pyi] [out] -> m.g1, m.g2 -> m.g2 [case testTryFinallyStmt] def f1() -> None: pass def f2() -> None: pass def g() -> None: try: f1() finally: f2() [out] -> m.g -> m.g [case testPrintStmt_python2] def f1(): # type: () -> int pass def f2(): # type: () -> int pass def g1(): # type: () -> None print f1() def g2(): # type: () -> None print f1(), f2() [out] -> m.g1, m.g2 -> m.g2 [case testPrintStmtWithFile_python2] class A: def write(self, s): # type: (str) -> None pass def f1(): # type: () -> A pass def f2(): # type: () -> int pass def g(): # type: () -> None print >>f1(), f2() [out] -> m.g -> , m.A, m.f1 -> m.g [case testExecStmt_python2] def f1(): pass def f2(): pass def f3(): pass def g1(): # type: () -> None exec f1() def g2(): # type: () -> None exec f1() in f2() def g3(): # type: () -> None exec f1() in f2(), f3() [out] -> m.g1, m.g2, m.g3 -> m.g2, m.g3 -> m.g3 [case testForStmt] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f1() -> None: pass def f2() -> None: pass def g() -> None: a: A for x in a: f1() else: f2() [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.g [case testTryExceptStmt] class A(BaseException): pass class B(BaseException): def f(self) -> None: pass def f1() -> None: pass def f2() -> None: pass def f3() -> None: pass def g() -> None: try: f1() except A: f2() except B as e: e.f() else: f3() [builtins fixtures/exception.pyi] [out] -- The dependencies on the ctor are basically spurious but not a problem -> m.g -> m.g -> m.A, m.g -> m.g -> m.g -> m.g -> m.B, m.g -> m.g -> m.g -> m.g [case testTryExceptStmt2] class A(BaseException): pass class B(BaseException): def f(self) -> None: pass def f1() -> None: pass def f2() -> None: pass def g() -> None: try: f1() except (A, B): f2() [builtins fixtures/exception.pyi] [out] -- The dependencies on the ctor are basically spurious but not a problem -> m.g -> m.g -> m.A, m.g -> m.g -> m.g -> m.B, m.g -> m.g -> m.g [case testWithStmt] from typing import Any class A: def __enter__(self) -> 'B': pass def __exit__(self, a, b, c) -> None: pass class B: def f(self) -> None: pass def g() -> None: a: A with a as x: x.f() [out] -> m.g -> m.g -> m.A, m.g -> m.g -> , m.A.__enter__, m.B [case testWithStmt2] from typing import Any class A: def __enter__(self) -> 'C': pass def __exit__(self, a, b, c) -> None: pass class B: def __enter__(self) -> 'D': pass def __exit__(self, a, b, c) -> None: pass class C: pass class D: pass def g() -> None: a: A b: B with a as x, b as y: pass [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.g -> m.B, m.g -> , m.A.__enter__, m.C -> , m.B.__enter__, m.D [case testWithStmtAnnotation] from typing import Any class A: def __enter__(self) -> Any: pass def __exit__(self, a, b, c) -> None: pass class B: pass def f(b: B) -> None: pass def g() -> None: a: A with a as x: # type: B f(x) [out] -> m.g -> m.g -> m.A, m.g -> , m.B, m.f, m.g -> m.g [case testForStmtAnnotation] class A: def __iter__(self): pass class B: def f(self) -> None: pass def g() -> None: a: A for x in a: # type: B x.f() [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.B, m.g [case testMultipleAssignment] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f() -> None: a: A x, y = a [out] -> , m.f -> m.A, m.f, typing.Iterable [case testMultipleLvalues] class A: def f(self) -> None: self.x = 1 self.y = 1 def g() -> None: a: A a.x = a.y = 1 [out] -> m.A.f, m.g -> m.A.f, m.g -> m.A, m.g [case testNestedLvalues] class A: def f(self) -> None: self.x = 1 self.y = '' def g() -> None: a: A a.x, a.y = 1, '' [out] -> m.A.f, m.g -> m.A.f, m.g -> m.A, m.g [case testForAndSetItem] class A: def __setitem__(self, x: int, y: int) -> None: pass def f(): pass def g() -> None: a: A for a[0] in f(): pass [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.A, m.g -> m.g [case testMultipleAssignmentAndFor] from typing import Iterator, Iterable class A: def f(self) -> None: self.x = 1 self.y = 1 class B: def __iter__(self) -> Iterator[int]: pass def f() -> Iterable[B]: pass def g() -> None: a: A for a.x, a.y in f(): pass [builtins fixtures/list.pyi] [out] -> m.A.f, m.g -> m.A.f, m.g -> m.A, m.g -> m.g -> , m.g -> , m.B, m.f, typing.Iterable -> m.g [case testNestedSetItem] class A: def __setitem__(self, x: int, y: int) -> None: pass class B: def __setitem__(self, x: int, y: int) -> None: pass def f(): pass def g() -> None: a: A b: B a[0], b[0] = f() [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.g -> m.B, m.g -> m.g [case testOperatorAssignmentStmt] class A: def __add__(self, other: 'B') -> 'A': pass class B: pass def f() -> B: pass def g() -> None: a: A a += f() [out] -> m.g -> m.g -> , m.A, m.A.__add__, m.g -> , , m.A.__add__, m.B, m.f -> m.g [case testOperatorAssignmentStmtSetItem] class A: def __add__(self, other: 'B') -> 'A': pass class B: pass class C: def __getitem__(self, x: int) -> A: pass def __setitem__(self, x: int, y: A) -> None: pass def f() -> int: pass def g() -> None: b: B c: C c[f()] += b [out] -> m.g -> m.g -> , , , m.A, m.A.__add__, m.C.__getitem__, m.C.__setitem__ -> , m.A.__add__, m.B, m.g -> m.g -> m.g -> m.C, m.g -> m.g [case testYieldStmt] from typing import Iterator class A: pass def f1() -> A: pass def g() -> Iterator[A]: yield f1() [builtins fixtures/list.pyi] [out] -> , , m.A, m.f1, m.g -> m.g [case testDelStmt] class A: def f(self) -> None: self.x = 1 def f() -> A: pass def g() -> None: del f().x [out] -> m.A.f, m.g -> , m.A, m.f -> m.g [case testDelStmtWithIndexing] class A: def __delitem__(self, x: int) -> None: pass def f1() -> A: pass def f2() -> int: pass def g() -> None: del f1()[f2()] [out] -> m.g -- __getitem__ is redundant but harmless -> m.g -> , m.A, m.f1 -> m.g -> m.g [case testYieldFrom] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f() -> Iterator[int]: yield from A() [out] -> m.f -> , m.f -> m.f -> m.A, m.f, typing.Iterable [case testFunctionDecorator] from typing import Callable def dec(f: Callable[[int], int]) -> Callable[[str], str]: pass def f() -> int: pass @dec def g(x: int) -> int: return f() [out] -> m -> m.g -> m [case testMethodDecorator] from typing import Callable, Any def dec(f: Callable[[Any, int], int]) -> Callable[[Any, str], str]: pass def f() -> int: pass class A: @dec def g(self, x: int) -> int: return f() [out] -> m -> m.A -> m -> m.A.g [case testNestedFunction] class A: pass def h() -> None: pass def f() -> None: def g(x: A) -> None: h() [out] -> , m.A, m.f -> m.f [case testPlatformCheck] import a import sys def f() -> int: if sys.platform == 'nonexistent': return a.g() else: return 1 [file a.py] [builtins fixtures/ops.pyi] [out] -> m -> m.f -> m, m.f [case testOverload] from typing import overload class A: pass class B(A): pass @overload def f(x: B) -> B: ... @overload def f(x: A) -> A: ... def f(x: A) -> A: g() return x def g() -> None: pass [builtins fixtures/isinstancelist.pyi] [out] -> , m -> -> -> , m, m.A, m.B, m.f -> , m.B, m.f -> m.f [case testOverloadedMethod] from typing import overload class A: pass class B(A): pass class C: @overload def f(self, x: B) -> B: ... @overload def f(self, x: A) -> A: ... def f(self, x: A) -> A: self.g() return x def g(self) -> None: pass [builtins fixtures/isinstancelist.pyi] [out] -> , m -> -> -> , m, m.A, m.B, m.C.f -> , m.B, m.C.f -> m.C.f -> m.C [case testConditionalFunctionDefinition] import sys class A: pass class B: pass if sys.platform == 'nonexistent': def f(x: A) -> None: g() else: def f(x: B) -> None: h() def g() -> None: pass def h() -> None: pass [builtins fixtures/ops.pyi] [out] -> m.A -> , m.B, m.f -> m.f -> m -> m [case testConditionalMethodDefinition] import sys class A: pass class B: pass class C: if sys.platform == 'nonexistent': def f(self, x: A) -> None: self.g() else: def f(self, x: B) -> None: self.h() def g(self) -> None: pass def h(self) -> None: pass [builtins fixtures/ops.pyi] [out] -> m.A -> , m.B, m.C.f -> m.C.f -> m.C -> m -> m [case testNewType] from typing import NewType from m import C N = NewType('N', C) def f(n: N) -> None: pass [file m.py] class C: x: int [out] -> , m, m.f -> , m -> -> -> -> m, m.N -> m mypy-0.761/test-data/unit/deps-types.test0000644€tŠÔÚ€2›s®0000004330013576752246024563 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies between types. -- -- The dependencies are used for fined-grained incremental checking. -- -- See the comment at the top of deps.test for more documentation. [case testFilterOutBuiltInTypes] class A: pass def f(x: int, y: str, z: A) -> None: pass [out] -> , m.A, m.f [case testTupleType] from typing import Tuple class A: pass class B: pass def f(x: Tuple[A, B]) -> None: pass [out] -> , m.A, m.f -> , m.B, m.f [case testUnionType] from typing import Union class A: pass class B: pass def f() -> None: x: Union[int, A, B] [out] -> m.A, m.f -> m.B, m.f [case testCallableType] from typing import Callable class A: pass class B: pass def f() -> None: x: Callable[[int, A], None] y: Callable[[int, str], B] [out] -> m.A, m.f -> m.B, m.f [case testTypeType] from typing import Type class A: pass def f() -> None: x: Type[A] y: Type[int] [out] -> m.f -> m.f -> m.A, m.f [case testTypeTypeAttribute] from typing import Type class A: @staticmethod def f() -> None: pass def f(x: Type[A]) -> None: x.f() [builtins fixtures/staticmethod.pyi] [out] -> , m.f -> , m.f -> m, m.f -> , m.A, m.f [case testComplexNestedType] from typing import Union, Callable, Type class A: pass class B: pass class C: pass def f() -> None: x: Union[int, Callable[[Type[A]], B], C] [out] -> m.f -> m.f -> m.A, m.f -> m.B, m.f -> m.C, m.f [case testUnionTypeAttributeAccess] from typing import Union class A: def f(self) -> None: self.x = 0 class B: def f(self) -> None: self.x = '' def f(a: Union[A, B]) -> None: a.x a.f() [out] -> m.f -> m.A.f, m.f -> , m.A, m.f -> m.f -> m.B.f, m.f -> , m.B, m.f [case testTupleTypeAttributeAccess] from typing import Tuple class C(Tuple[int, str]): def f(self) -> None: pass def f(c: C) -> None: c.f() [builtins fixtures/tuple.pyi] [out] -> m.f -> , m.C, m.f [case testOverloaded] from typing import overload class A: pass class B: pass def g() -> None: pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass def f(x): g() ff = f def h() -> None: f(A()) ff(A()) [out] -> m.h -> m.h -> , , m.A, m.f, m.h -> , , m.B, m.f -> m, m.h -> m, m.h -> m.f [case testMetaclassAttributes] from mod import C from typing import Type def f(arg: Type[C]) -> None: arg.x [file mod.py] class M(type): x: int class C(metaclass=M): pass [out] -> , m.f -> , m.f -> m.f -> , m, m.f -> m.f -> m [case testMetaclassAttributesDirect] from mod import C def f() -> None: C.x [file mod.py] class M(type): x: int class C(metaclass=M): pass [out] -> m.f -> m, m.f -> m.f -> m [case testMetaclassOperators] from mod import C from typing import Type def f(arg: Type[C]) -> None: arg + arg [file mod.py] class M(type): def __add__(self, other: M) -> M: pass class C(metaclass=M): pass [out] -> , m.f -> , m.f -> , m, m.f -> m.f -> m.f -> m [case testMetaclassOperatorsDirect] from mod import C def f() -> None: C + C [file mod.py] class M(type): def __add__(self, other: M) -> M: pass class C(metaclass=M): pass [out] -> m.f -> m.f -> m, m.f -> m.f -> m.f -> m [case testMetaclassDepsDeclared] import mod class C(metaclass=mod.M): pass [file mod.py] class M(type): pass [out] -> m.C -> -> m [case testMetaclassDepsDeclared_python2] # flags: --py2 import mod class C: __metaclass__ = mod.M [file mod.py] class M(type): pass [out] -> m.C -> m -> m -> , , m -> m [case testMetaclassDepsDeclaredNested] import mod def func() -> None: class C(metaclass=mod.M): pass [file mod.py] class M(type): pass [out] -> m.func -> -> m [case testMetaclassAttributes_python2] # flags: --py2 from mod import C from typing import Type def f(arg): # type: (Type[C]) -> None arg.x [file mod.py] class M(type): x = None # type: int class C: __metaclass__ = M [out] -> , m.f -> , m.f -> m.f -> , m, m.f -> m.f -> m [case testMetaclassOperatorsDirect_python2] # flags: --py2 from mod import C def f(): # type: () -> None C + C [file mod.py] class M(type): def __add__(self, other): # type: (M) -> M pass class C: __metaclass__ = M [out] -> m.f -> m.f -> m, m.f -> m.f -> m.f -> m -- Type aliases [case testAliasDepsNormalMod] from mod import I A = I x: A [file mod.py] class I: pass [out] -> m -> m -> m -> m -> , m -> m [case testAliasDepsNormalModExtended] # __dump_all__ import a x: a.A [file a.py] from mod import I A = I [file mod.py] class I: pass [out] -> m -> m -> m -> a -> a -> , m, a, mod.I -> a [case testAliasDepsNormalFunc] from mod import I A = I def f(x: A) -> None: pass [file mod.py] class I: pass [out] -> m.f -> m -> m -> , m, m.f -> m [case testAliasDepsNormalFuncExtended] # __dump_all__ import a def f(x: a.A) -> None: pass [file a.py] from mod import I A = I [file mod.py] class I: pass [out] -> m.f -> m -> a -> a -> , m.f, a, mod.I -> a [case testAliasDepsNormalClass] from a import A class C: x: A [file a.py] from mod import I A = I [file mod.py] class I: pass [out] -> m.C -> m -> m -> , m [case testAliasDepsNormalClassBases] from a import A class C(A): pass [file a.py] from mod import I A = I [file mod.py] class I: pass [out] -> m.C -> m -> m -> , m -> -> -> m.C [case testAliasDepsGenericMod] from mod import I, S, D A = D[I, S] x: A [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> m -> m -> m -> m -> , m -> , m -> , m -> m [case testAliasDepsGenericFunc] from mod import I, S, D A = D[S, I] def f(x: A) -> None: pass [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> , m.f -> m -> m -> , m, m.f -> , m, m.f -> , m, m.f -> m [case testAliasDepsGenericFuncExtended] import a def f(x: a.A) -> None: pass [file a.py] from mod import I, S, D A = D[S, I] [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> , m.f -> m -> , m.f -> , m.f -> , m.f [case testAliasDepsGenericClass] from mod import I, D, S, T A = D[S, T] class C: x: A[I] [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> m -> m.C -> m -> m -> , m -> , m -> , m -> m -> m [case testAliasDepsForwardMod] from mod import I x: A A = I [file mod.py] from typing import TypeVar, Generic class I: pass [out] -> m -> m -> m -> m -> , m -> m [case testAliasDepsForwardFunc] from mod import I def f(x: A) -> None: pass A = I [file mod.py] class I: pass [out] -> m.f -> m -> m -> , m, m.f -> m [case testAliasDepsForwardClass] from mod import I class C: x: A A = I [file mod.py] class I: pass [out] -> m -> m.C -> m -> m -> , m -> m [case testAliasDepsChainedMod] from mod import I A = I B = A x: B [file mod.py] class I: pass [out] -> m -> m -> m -> m -> m -> , m -> m [case testAliasDepsChainedFunc] from mod import I A = I B = A def f(x: B) -> None: pass [file mod.py] class I: pass [out] -> m -> m.f -> m -> m -> , m, m.f -> m [case testAliasDepsChainedFuncExtended] import a B = a.A def f(x: B) -> None: pass [file a.py] from mod import I A = I [file mod.py] class I: pass [out] -> m.f -> m -> m -> m -> m -> , m, m.f [case testAliasDepsChainedClass] from mod import I A = I B = A class C(B): pass [file mod.py] class I: pass [out] -> m -> m -> m.C -> , m -> , m -> , m -> m, m.C -> m [case testAliasDepsNestedMod] from mod import I, S, D A = D[S, I] B = D[S, A] x: B [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> , m -> m -> m -> m -> m -> , m -> , m -> , m -> m [case testAliasDepsNestedModExtended] # __dump_all__ from mod import S, D import a B = D[S, a.A] x: B [file a.py] from mod import I, S, D A = D[S, I] [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> m -> m -> , m -> m -> m, a -> m, a -> , m, a, mod.D -> , m, a, mod.I -> , m, a, mod.S -> mod.D -> mod.D -> m, a [case testAliasDepsNestedFunc] from mod import I, S, D A = D[S, I] B = D[S, A] def f(x: B) -> None: pass [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> , m, m.f -> , m.f -> m -> m -> , m, m.f -> , m, m.f -> , m, m.f -> m [case testAliasDepsNestedFuncExtended] # __dump_all__ from mod import S, D import a B = D[S, a.A] def f(x: B) -> None: pass [file a.py] from mod import I, S, D A = D[S, I] [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> , m.f -> , m, m.f -> m -> m, a -> m, a -> , m, m.f, a, mod.D -> , m, m.f, a, mod.I -> , m, m.f, a, mod.S -> mod.D -> mod.D -> m, a [case testAliasDepsNestedFuncDirect] from mod import I, S, D A = D[S, I] E = D def f(x: E[S, A]) -> None: pass [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> , m.f -> m.f -> m -> m -> , m, m.f -> , m, m.f -> , m, m.f -> m -> m -> m [case testAliasDepsNestedClass] from mod import I, S, D A = D[S, I] B = D[S, A] class C: x: B [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> , m -> m -> m.C -> m -> m -> , m -> , m -> , m -> m [case testAliasDepsCast] from typing import cast from mod import I A = I def fun() -> None: x = cast(A, 42) [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> m.fun -> m -> m -> m, m.fun -> m [case testAliasDepsRuntime] from mod import I, S, D A = I x = D[S, A]() [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> m -> m -> m -> m -> , m -> m -> m -> , m -> , m -> m [case testAliasDepsRuntimeExtended] # __dump_all__ from mod import I, S, D import a x = D[S, a.A]() [file a.py] from mod import I A = I [file mod.py] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class D(Generic[T, U]): pass class I: pass class S: pass [out] -> m -> m -> m -> m -> m -> , m, mod.D -> a -> a -> , m, a, mod.I -> , m, mod.S -> mod.D -> mod.D -> m, a [case testAliasDepsNamedTuple] from typing import NamedTuple from mod import I A = I class P(NamedTuple): x: A [file mod.py] class I: pass [out] -> m -> m.P -> m -> m -> , , m, m.P -> m [case testAliasDepsNamedTupleFunctional] # __dump_all__ from typing import NamedTuple import a P = NamedTuple('P', [('x', a.A)]) [file a.py] from mod import I A = I [file mod.py] class I: pass [out] -> m -> m -> a -> a -> , , m, a, mod.I -> a [case testAliasDepsTypedDict] from mypy_extensions import TypedDict from mod import I A = I class P(TypedDict): x: A [file mod.py] class I: pass [builtins fixtures/dict.pyi] [out] -> m -> m.P -> m -> m -> , m -> m [case testAliasDepsTypedDictFunctional] # __dump_all__ from mypy_extensions import TypedDict import a P = TypedDict('P', {'x': a.A}) [file a.py] from mod import I A = I [file mod.py] class I: pass [builtins fixtures/dict.pyi] [out] -> m -> m -> a -> a -> , a, mod.I -> a -> sys -> sys [case testAliasDepsClassInFunction] from mod import I A = I def f() -> None: class C: x: A [file mod.py] class I: pass [out] -> m.f -> m.f -> m -> m -> , m, m.f -> m [case testAliasDepsFromImportUnqualified] from a import C x: C def f() -> None: C() class A: def meth(self) -> None: def inner() -> C: pass [file a.py] from b import D C = D [file b.py] class D: pass [out] -> m.A -> m -> m, m.A.meth, m.f -> m -> m.f -> m.f -> , , m, m.A.meth [case testFuncBasedEnum] from enum import Enum from mod import B A = Enum('A', [('X', B())]) def f(a: A) -> None: pass def g() -> None: A.X [file mod.py] class B: pass [out] -> m.g -> , m.f, m.g -> m -> m [case testProtocolDepsWildcard] # __dump_all__ import mod x: mod.P [file mod.py] from typing import Protocol class P(Protocol): x: int [out] -> m -> , m, mod.P -> -> m [case testProtocolDepsWildcardSupertype] # __dump_all__ import mod x: mod.P [file mod.py] from typing import Protocol class PBase(Protocol): x: int class P(PBase, Protocol): y: int [out] -> m -> mod -> , m, mod.P -> , mod -> -> -> -> -> mod, mod.P, mod.PBase -> , -> -> m [case testProtocolDepsPositive] # __dump_all__ import mod class C: x: int x: mod.P = C() [file mod.py] from typing import Protocol class P(Protocol): x: int [out] -> m -> m -> -> m, m.C, mod.P -> m -> , m, mod.P -> -> m [case testProtocolDepsNegative] # __dump_all__ import mod from typing import overload class C: y: int @overload def func(x: C) -> int: ... @overload def func(x: mod.P) -> str: ... def func(x): pass func(C()) [file mod.py] from typing import Protocol class P(Protocol): x: int [out] -> m -> m -> -> , m, m.C, m.func, mod.P -> m -> , m.func, mod.P -> -> m [case testProtocolDepsConcreteSuperclass] # __dump_all__ import mod class B: x: int class C(B): pass x: mod.P = C() [file mod.py] from typing import Protocol class P(Protocol): x: int [out] -> , m -> -> -> , -> m, m.B, m.C -> m -> m -> -> m, m.C, mod.P -> m -> , m, mod.P -> -> m mypy-0.761/test-data/unit/deps.test0000644€tŠÔÚ€2›s®0000006205313576752246023427 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating dependencies between ASTs nodes. -- -- The dependencies are used for fined-grained incremental checking and -- the daemon mode. -- -- The output of each test case includes the dependency map for whitelisted -- modules (includes the main module and the modules 'pkg' and 'pkg.mod' at -- least). -- -- Dependencies are formatted as " -> affected locations". -- -- Look at the docstring of mypy.server.deps for an explanation of -- how fine-grained dependencies are represented. [case testCallFunction] def f() -> None: g() def g() -> None: pass [out] -> m.f [case testCallMethod] def f(a: A) -> None: a.g() class A: def g(self) -> None: pass [out] -> m.f -> , m.A, m.f [case testAccessAttribute] def f(a: A) -> None: a.x class A: def g(self) -> None: self.x = 1 [out] -> m.A.g, m.f -> , m.A, m.f [case testConstructInstance] def f() -> None: A() class A: pass [out] -> m.f -> m.f -> m.A, m.f [case testAccessModuleAttribute] class A: pass x = A() def f() -> None: x [out] -> m -> m -> , m, m.A -> m, m.f [case testAccessModuleAttribute2] import n def f() -> None: n.x [file n.py] x = 1 [out] -> m.f -> m, m.f [case testImport] import n [file n.py] x = 1 [out] -> m [case testCallImportedFunction] import n n.f() [file n.py] def f() -> None: pass [out] -> m -> m [case testImportModuleAs] import n as x x.f() [file n.py] def f() -> None: pass [out] -> m -> m [case testCallImportedFunctionInFunction] import n def g() -> None: n.f() [file n.py] def f() -> None: pass [out] -> m.g -> m, m.g [case testInheritanceSimple] class A: pass class B(A): pass [out] -> , m -> -> -> m, m.A, m.B -> m.B [case testInheritanceWithMethodAndAttribute] class A: pass class B(A): def f(self) -> None: self.x = 1 [out] -> , m -> -> -> m.B.f -> -> m, m.A, m.B -> m.B.f -> m.B [case testInheritanceWithMethodAndAttributeAndDeepHierarchy] class A: pass class B(A): pass class C(B): def f(self) -> None: self.x = 1 [out] -> , , m -> , -> , -> m.C.f -> -> m, m.A, m.B -> , m -> -> -> m.C.f -> -> m, m.B, m.C -> m.C.f -> m.C [case testInheritAttribute] import n class B(n.A): def f(self) -> None: a = 1 a = self.x [file n.py] class A: def g(self) -> None: self.x = 1 [out] -> m.B.f -> m.B -> , m -> -> -> m.B.f -> -> -> m, m.B -> m [case testInheritMethod] class A: def g(self) -> None: pass class B(A): def f(self) -> None: self.g() [out] -> , m -> -> -> m.B.f -> -> m, m.A, m.B -> m.B.f -> m.B [case testPackage] import a.b def f() -> None: a.b.g() [file a/__init__.py] [file a/b.py] def g() -> None: pass [out] -> m.f -> m, m.f -> m.f [case testClassInPackage] import a.b def f(x: a.b.A) -> None: x.g() x.y [file a/__init__.py] [file a/b.py] class A: def g(self) -> None: self.y = 1 [out] -> m.f -> m.f -> , m.f -> m [case testPackage__init__] import a def f() -> None: a.g() [file a/__init__.py] def g() -> None: pass [out] -> m.f -> m, m.f [case testClassInPackage__init__] import a def f(x: a.A) -> None: x.g() x.y [file a/__init__.py] class A: def g(self) -> None: self.y = 1 [out] -> m.f -> m.f -> , m.f -> m [case testConstructor] class A: def __init__(self, x: C) -> None: pass class C: pass def f() -> None: A(C()) [out] -> m.f -> m.f -> m.A, m.f -> m.f -> m.f -> , m.A.__init__, m.C, m.f [case testNonTrivialConstructor] class C: def __init__(self) -> None: self.x = 1 [out] -> m.C.__init__ -> m.C [case testImportFrom] from n import f def g() -> None: f() [file n.py] def f() -> None: pass [out] -> m, m.g -> m [case testImportFromAs] from n import f as ff def g() -> None: ff() [file n.py] def f() -> None: pass [out] -> m, m.g -> m [case testNestedClass] def f() -> None: b = A.B() b.f() class A: class B: def f(self) -> None: pass [out] -> m.f -> m.f -> m.f -> m.A.B, m.f -> m.A, m.f [case testNestedClassAttribute] def f() -> None: b = A.B() b.x class A: class B: def f(self) -> None: self.x = 1 [out] -> m.f -> m.f -> m.A.B.f, m.f -> m.A.B, m.f -> m.A, m.f [case testNestedClassInAnnotation] def f(x: A.B) -> None: pass class A: class B: pass [out] -> , m.A.B, m.f -> m.A [case testNestedClassInAnnotation2] def f(x: A.B) -> None: x.f() class A: class B: def f(self) -> None: pass [out] -> m.f -> , m.A.B, m.f -> m.A [case NestedFunctionType] from mod import A, B, C, D def outer() -> None: def inner(x: A, *args: B, **kwds: C) -> D: pass [file mod.py] class A: pass class B: pass class C: pass class D: pass [builtins fixtures/dict.pyi] [out] -> , m, m.outer -> , m, m.outer -> , m, m.outer -> , m, m.outer -> m [case NestedFunctionBody] from mod import A, B, C def outer() -> None: def inner() -> None: A() x: B y: C y.x [file mod.py] class A: pass class B: pass class C: x: int [builtins fixtures/dict.pyi] [out] -> m.outer -> m.outer -> m, m.outer -> m, m.outer -> m.outer -> m, m.outer -> m [case testDefaultArgValue] def f1(x: int) -> int: pass def f2() -> int: pass def g(x: int = f1(f2())) -> None: pass [out] -> m.g -> m.g [case testIsInstance] class A: def g(self) -> None: pass def f(x: object) -> None: if isinstance(x, A): x.g() def ff(x: object) -> None: if isinstance(x, A): pass [builtins fixtures/isinstancelist.pyi] [out] -- The dependencies on the ctor are basically spurious but not a problem -> m.f -> m.A, m.f, m.ff [case testUnreachableIsInstance] class A: x: int class B: y: int def f(x: A) -> None: if isinstance(x, B): x.y [builtins fixtures/isinstancelist.pyi] [out] -> , m.A, m.f -> m.B, m.f [case testAttributeWithClassType1] from n import A class B: def h(self, z: A) -> None: self.z = z [file n.py] class A: pass [out] -> m.B.h -> m.B -> , , m, m.B.h -> m [case testAttributeWithClassType2] from m import A class B: def f(self) -> None: self.x = A() [file m.py] class A: pass [out] -> m.B.f -> m.B -> m.B.f -> m.B.f -> , m, m.B.f -> m [case testAttributeWithClassType3] from n import A, x class B: def g(self) -> None: self.x = x [file n.py] class A: pass x = A() [out] -> m.B.g -> m.B -> , m -> m, m.B.g -> m [case testAttributeWithClassType4] from n import A class B: def g(self) -> None: self.x: A [file n.py] class A: pass [out] -> m.B.g -> m.B -> , m, m.B.g -> m [case testClassBody] def f() -> int: pass def g() -> int: pass def h() -> int: pass class A: h() if f(): g() [out] -> m.A -> m -> m -> m [case testVariableInitializedInClass] from n import A class B: x = None # type: A [file n.py] class A: pass [out] -> m.B -> , m -> m [case testVariableAnnotationInClass] from n import A class B: x: A def f(self) -> None: y = self.x [file n.py] class A: pass [out] -> m.B.f -> m.B -> , m -> m [case testGlobalVariableInitialized] from n import A x = A() [file n.py] class A: pass [out] -> m -> m -> m -> , m -> m [case testGlobalVariableAnnotation] from n import A x: A [file n.py] class A: pass [out] -> m -> , m -> m [case testProperty] class B: pass class A: @property def x(self) -> B: pass def f(a: A) -> None: b = a.x [builtins fixtures/property.pyi] [out] -> m, m.f -> , m.A, m.f -> , m.A.x, m.B [case testUnreachableAssignment] from typing import List, Tuple def f() -> None: pass class C: def __init__(self, x: int) -> None: if isinstance(x, int): self.y = 1 else: self.y = f() [builtins fixtures/isinstancelist.pyi] [out] -> m.C.__init__ -> m.C -> m.C.__init__ [case testPartialNoneTypeAttributeCrash1] class C: pass class A: x = None def f(self) -> None: self.x = C() [out] -> m.A.f -> m.A -> m.A.f -> m.A.f -> , m.A.f, m.C [case testPartialNoneTypeAttributeCrash2] # flags: --strict-optional class C: pass class A: x = None def f(self) -> None: self.x = C() [out] -> m.A.f -> m.A -> m.A.f -> m.A.f -> , m.A.f, m.C [case testRelativeImport] import pkg # Magic package name in test runner [file pkg/__init__.py] from . import mod from .a import x [file pkg/mod.py] from . import a [file pkg/a.py] x = 1 [out] -> pkg -> pkg, pkg.mod -> pkg -> m, pkg, pkg.mod [case testTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) def foo(x: Point) -> int: return x['x'] + x['y'] [builtins fixtures/dict.pyi] [out] -> , , m, m.foo -> m [case testTypedDict2] from mypy_extensions import TypedDict class A: pass Point = TypedDict('Point', {'x': int, 'y': A}) p = Point(dict(x=42, y=A())) def foo(x: Point) -> int: return x['x'] [builtins fixtures/dict.pyi] [out] -> m -> m -> , , , m, m.A, m.foo -> , , m, m.foo -> m [case testTypedDict3] from mypy_extensions import TypedDict class A: pass class Point(TypedDict): x: int y: A p = Point(dict(x=42, y=A())) def foo(x: Point) -> int: return x['x'] [builtins fixtures/dict.pyi] [out] -> m -> m -> , , , m, m.A, m.foo -> , , m, m.Point, m.foo -> m [case testImportStar] from a import * [file a.py] x = 0 [out] -> m [case testDecoratorDepsMod] import mod @mod.deca @mod.decb(mod.C()) def func(x: int) -> int: pass [file mod.py] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[int], int]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass [out] -> m -> m -> m -> m -> m -> m -> m [case testDecoratorDepsFunc] import mod def outer() -> None: @mod.deca @mod.decb(mod.C()) def func(x: int) -> int: pass [file mod.py] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[int], int]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass [out] -> m.outer -> m.outer -> m.outer -> m.outer -> m.outer -> m, m.outer [case testDecoratorDepsDeeepNested] import mod def outer() -> None: def inner() -> None: @mod.dec def func(x: int) -> int: pass [file mod.py] from typing import Callable def dec(func: Callable[[int], int]) -> Callable[[str], str]: pass [out] -> m.outer -> m, m.outer [case testDecoratorDepsNestedClass] import mod class Outer: class Inner: c = mod.C() @c.dec def func(self, x: int) -> int: pass [file mod.py] from typing import Callable class C: def dec(self, func: Callable[..., int]) -> Callable[..., str]: pass [out] -> m -> m.Outer.Inner -> m.Outer -> m -> m -> m -> , m -> m [case testDecoratorDepsClassInFunction] import mod def outer() -> None: class Inner: c = mod.C() @c.dec def func(self, x: int) -> int: pass [file mod.py] from typing import Callable class C: def dec(self, func: Callable[..., int]) -> Callable[..., str]: pass [out] -> m.outer -> m.outer -> m.outer -> m.outer -> , m.outer -> m, m.outer [case DecoratorDepsMethod] import mod class D: @mod.deca @mod.decb(mod.C()) def func(self, x: int) -> int: pass [file mod.py] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[..., int]) -> Callable[..., str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass [out] -> m -> m.D -> m -> m -> m -> m -> m -> m [case testMissingModuleClass1] from b import A # type: ignore def f(x: A) -> None: x.foo() [out] -> , m.f -> m -> m [case testMissingModuleClass2] from p.b import A # type: ignore def f(x: A) -> None: x.foo() [out] -> , m.f -> m -> m [case testIgnoredMissingModuleAttribute] import a a.x # type: ignore import b.c b.c.x # type: ignore from b import c c.y # type: ignore [file a.py] [file b/__init__.py] [file b/c.py] [builtins fixtures/module.pyi] [out] -> m -> m -> m -> m -> m -> m [case testIgnoredMissingInstanceAttribute] from a import C C().x # type: ignore [file a.py] class C: pass [out] -> m -> m -> m -> m -> m [case testIgnoredMissingClassAttribute] from a import C C.x # type: ignore [file a.py] class C: pass [out] -> m -> m -> m [case testDepsFromOverloadMod] # __dump_all__ import mod x = mod.f [file mod.py] from typing import overload, Any import submod @overload def f(x: int) -> submod.A: pass @overload def f(x: str) -> submod.B: pass def f(x) -> Any: y: submod.C y.x [file submod.py] class A: pass class B: pass class C: x: D class D: pass [out] -> m -> m -> m -> , , mod.f, submod.A -> , , mod.f, submod.B -> mod.f -> mod.f, submod.C -> , submod, submod.D -> mod [case testDepsFromOverloadFunc] import mod def f() -> None: x = mod.f [file mod.py] from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass def f(x): pass [out] -> m.f -> m, m.f [case testDepsToOverloadMod] from typing import overload, Any import mod @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass def f(x: Any) -> Any: mod.g() [file mod.py] def g() -> int: pass [out] -> m.f -> m, m.f [case testDepsToOverloadFunc] from typing import overload, Any import mod def outer() -> None: @overload def f(x: int) -> mod.A: pass @overload def f(x: str) -> mod.B: pass def f(x: Any) -> Any: mod.g() [file mod.py] def g() -> int: pass class A: pass class B: pass [out] -> , m.outer -> , m.outer -> m.outer -> m, m.outer [case testDepsFromOverloadToOverload] from typing import overload, Any import mod def outer() -> None: @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass def f(x: Any) -> Any: mod.g(str()) [file mod.py] from typing import overload @overload def g(x: int) -> None: pass @overload def g(x: str) -> str: pass def g(x): pass [out] -> m.outer -> m, m.outer [case testDepsFromOverloadToOverloadDefaultClass] from typing import overload, Any import mod class Outer: @overload def f(self, x: int) -> None: pass @overload def f(self, x: str, cb=mod.g) -> str: pass def f(self, *args: Any, **kwargs: Any) -> Any: pass [file mod.py] from typing import overload @overload def g(x: int) -> None: pass @overload def g(x: str) -> str: pass def g(x): pass [builtins fixtures/dict.pyi] [out] -> m.Outer -> m.Outer.f -> m, m.Outer.f [case testDepsFromOverloadToOverloadDefaultNested] from typing import overload, Any import mod def outer() -> None: @overload def f(x: int) -> None: pass @overload def f(x: str, cb=mod.g) -> str: pass def f(*args: Any, **kwargs: Any) -> Any: pass [file mod.py] from typing import overload @overload def g(x: int) -> None: pass @overload def g(x: str) -> str: pass def g(x): pass [builtins fixtures/dict.pyi] [out] -> m.outer -> m, m.outer [case testDepsToOverloadGeneric] # __dump_all__ import mod from typing import overload, Any @overload def f(x: mod.TA) -> mod.TA: pass @overload def f(x: mod.TB, y: int) -> mod.TB: pass def f(*args: Any, **kwargs: Any) -> Any: pass [file mod.py] from typing import TypeVar import submod TA = TypeVar('TA', submod.A, submod.B) TB = TypeVar('TB', bound=submod.C) [file submod.py] class A: pass class B: pass class C: pass [builtins fixtures/dict.pyi] [out] -> , m.f -> , m.f -> m -> , , m.f, mod, submod.A -> , , m.f, mod, submod.B -> , , m.f, mod, submod.C -> mod [case testDepsOverloadBothExternalAndImplementationType] import mod from typing import overload @overload def f(x: mod.A) -> mod.A: pass @overload def f(x: mod.B) -> mod.B: pass def f(x: mod.Base) -> mod.Base: pass [file mod.py] class Base: pass class A(Base): pass class B(Base): pass [out] -> , m.f -> , m.f -> , m.f -> m [case testCustomIterator] class A: def __iter__(self) -> B: pass class B: def __iter__(self) -> B: pass def __next__(self) -> C: pass class C: pass def f() -> None: for x in A(): pass [out] -> m.f -> m.f -> m.f -> m.f -> m.A, m.f -> m.f -> , , m.A.__iter__, m.B, m.B.__iter__ -> , m.B.__next__, m.C [case testCustomIterator_python2] class A: def __iter__(self): # type: () -> B pass class B: def __iter__(self): # type: () -> B pass def next(self): # type: () -> C pass class C: pass def f(): # type: () -> None for x in A(): pass [out] -> m.f -> m.f -> m.f -> m.f -> m.A, m.f -> m.f -> , , m.A.__iter__, m.B, m.B.__iter__ -> , m.B.next, m.C [case testDepsLiskovClass] from mod import A, C class D(C): x: A [file mod.py] class C: x: B class B: pass class A(B): pass [out] -> m -> m.D -> , m -> , m -> -> -> -> m, m.D -> m [case testDepsLiskovMethod] from mod import A, C class D(C): def __init__(self) -> None: self.x: A [file mod.py] class C: def __init__(self) -> None: self.x: B class B: pass class A(B): pass [out] -> m.D.__init__ -> m.D -> , m, m.D.__init__ -> , m -> , m.D.__init__ -> -> -> m, m.D -> m [case testIndexedStarLvalue] from typing import List, Tuple class B: def __setitem__(self, i: int, v: List[str]) -> None: pass def g() -> Tuple[int, str, str]: pass def f(b: B) -> None: a, *b[0] = g() [builtins fixtures/list.pyi] [out] -> m.f -> m.f -> , m.B, m.f -> m.f [case testLogicalDecorator] # flags: --logical-deps from mod import dec @dec def f() -> None: pass [file mod.py] from typing import Callable def dec(f: Callable[[], None]) -> Callable[[], None]: pass [out] -> , m [case testLogicalDecoratorWithArgs] # flags: --logical-deps from mod import dec @dec(str()) def f() -> None: pass [file mod.py] from typing import Callable def dec(arg: str) -> Callable[[Callable[[], None]], Callable[[], None]]: pass [out] -> , m [case testLogicalDecoratorMember] # flags: --logical-deps import mod @mod.dec def f() -> None: pass [file mod.py] from typing import Callable def dec(f: Callable[[], None]) -> Callable[[], None]: pass [out] -> , m -> m [case testLogicalDefinition] # flags: --logical-deps from mod import func b = func() [file mod.py] def func() -> int: pass [out] -> m -> , m [case testLogicalDefinitionIrrelevant] # flags: --logical-deps from mod import func def outer() -> None: a = func() b: int = func() c = int() c = func() [file mod.py] def func() -> int: pass [out] -> m -> m -> m, m.outer [case testLogicalDefinitionMember] # flags: --logical-deps import mod b = mod.func() [file mod.py] def func() -> int: pass [out] -> m -> , m -> m [case testLogicalDefinitionClass] # flags: --logical-deps from mod import Cls b = Cls() [file mod.py] class Base: def __init__(self) -> None: pass class Cls(Base): pass [out] -> m -> -> m -> m -> , m [case testLogicalBaseAttribute] # flags: --logical-deps from mod import C class D(C): x: int [file mod.py] class C: x: int y: int [out] -> m -> m.D -> -> m, m.D [case testLogicalIgnoredImport1] # flags: --logical-deps --ignore-missing-imports import foo def f() -> None: foo.x [out] -> m.f -> m, m.f [case testLogicalIgnoredImport2] # flags: --logical-deps --ignore-missing-imports import foo.bar import a.b.c.d def f() -> None: foo.bar.x foo.bar.x.y a.b.c.d.e [out] -> m.f -> m, m.f -> m.f -> m.f -> m.f -> m.f -> m.f -> m, m.f -> m.f [case testLogicalIgnoredFromImport] # flags: --logical-deps --ignore-missing-imports from foo.bar import f, C def g() -> None: f() C.ff() def gg(x: C) -> None: z = x.y z.zz [out] -> m.g -> m.gg -> , m.g, m.gg -> m.g [case testLogical__init__] # flags: --logical-deps class A: def __init__(self) -> None: pass class B(A): pass class C(B): pass class D(A): def __init__(self, x: int) -> None: pass def f() -> None: A() def g() -> None: C() def h() -> None: D(1) [out] -> m.f -> m.f -> m, m.A, m.B, m.D, m.f -> m, m.B, m.C -> m.g -> m.g -> m.C, m.g -> m.h -> m.h -> m.D, m.h [case testLogicalSuperPython2] # flags: --logical-deps --py2 class A: def __init__(self): pass def m(self): pass class B(A): def m(self): pass class C(B): pass class D(C): def __init__(self): # type: () -> None super(B, self).__init__() def mm(self): # type: () -> None super(B, self).m() [out] -> m.D.__init__ -> , m.B.m -> m.D.mm -> m, m.A, m.B -> m.D.__init__ -> m.D.mm -> m.D.mm -> m, m.B, m.C -> m.D.__init__ -> m.D.mm -> m.D.mm -> m, m.C, m.D -> m.D [case testDataclassDeps] # flags: --python-version 3.7 from dataclasses import dataclass Z = int @dataclass class A: x: Z @dataclass class B(A): y: int [builtins fixtures/list.pyi] [out] -> , m -> -> -> , m.B.__init__ -> -> -> -> -> m, m.A, m.B -> m -> m -> m.B -> m -> m -> m mypy-0.761/test-data/unit/diff.test0000644€tŠÔÚ€2›s®0000006476713576752246023422 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for taking a diff of two module ASTs/symbol tables. -- The diffs are used for fined-grained incremental checking. [case testChangeTypeOfModuleAttribute] x = 1 y = 1 [file next.py] x = '' y = 1 [out] __main__.x [case testChangeSignatureOfModuleFunction] def f(x: int) -> None: pass def g(y: str) -> None: pass [file next.py] def f(x: str) -> None: x = '' def g(y: str) -> None: y = '' [out] __main__.f [case testAddModuleAttribute] x = 1 [file next.py] x = 1 y = 1 [out] __main__.y [case testRemoveModuleAttribute] x = 1 y = 1 [file next.py] x = 1 [out] __main__.y -- -- Classes -- [case testChangeMethodSignature] class A: def f(self) -> None: pass def g(self) -> None: pass [file next.py] class A: def f(self, x: int) -> None: pass def g(self) -> None: pass [out] __main__.A.f [case testChangeAttributeType] class A: def f(self) -> None: self.x = 1 self.y = 1 [file next.py] class A: def f(self) -> None: self.x = 1 self.y = '' [out] __main__.A.y [case testAddAttribute] class A: pass [file next.py] class A: def f(self) -> None: self.x = 1 [out] __main__.A.f __main__.A.x [case testAddAttribute2] class A: def f(self) -> None: pass [file next.py] class A: def f(self) -> None: self.x = 1 [out] __main__.A.x [case testRemoveAttribute] class A: def f(self) -> None: self.x = 1 [file next.py] class A: pass [out] __main__.A.f __main__.A.x [case testAddMethod] class A: def f(self) -> None: pass [file next.py] class A: def f(self) -> None: pass def g(self) -> None: pass [out] __main__.A.g [case testRemoveMethod] class A: def f(self) -> None: pass def g(self) -> None: pass [file next.py] class A: def f(self) -> None: pass [out] __main__.A.g [case testAddImport] import nn [file next.py] import n import nn [file n.py] x = 1 [file nn.py] y = 1 [out] __main__.n [case testRemoveImport] import n [file next.py] [file n.py] x = 1 [out] __main__.n [case testChangeClassIntoFunction] class A: pass [file next.py] def A() -> None: pass [out] __main__.A [case testDeleteClass] class A: pass [file next.py] [out] __main__.A [case testAddBaseClass] class A: pass [file next.py] class B: pass class A(B): pass [out] __main__.A __main__.B [case testChangeBaseClass] class A: pass class B: pass class C(A): pass [file next.py] class A: pass class B: pass class C(B): pass [out] __main__.C [case testRemoveBaseClass] class A: pass class B(A): pass [file next.py] class A: pass class B: pass [out] __main__.B [case testRemoveClassFromMiddleOfMro] class A: pass class B(A): pass class C(B): pass [file next.py] class A: pass class B: pass class C(B): pass [out] __main__.B __main__.C [case testDifferenceInConstructor] class A: def __init__(self) -> None: pass [file next.py] class A: def __init__(self, x: int) -> None: pass [out] __main__.A.__init__ [case testChangeSignatureOfMethodInNestedClass] class A: class B: def f(self) -> int: pass [file next.py] class A: class B: def f(self) -> str: pass [out] __main__.A.B.f [case testChangeTypeOfAttributeInNestedClass] class A: class B: def f(self) -> None: self.x = 1 [file next.py] class A: class B: def f(self) -> None: self.x = '' [out] __main__.A.B.x [case testAddMethodToNestedClass] class A: class B: pass [file next.py] class A: class B: def f(self) -> str: pass [out] __main__.A.B.f [case testAddNestedClass] class A: pass [file next.py] class A: class B: def f(self) -> None: pass [out] __main__.A.B [case testRemoveNestedClass] class A: class B: def f(self) -> None: pass [file next.py] class A: pass [out] __main__.A.B [case testChangeNestedClassToMethod] class A: class B: pass [file next.py] class A: def B(self) -> None: pass [out] __main__.A.B [case testChangeNamedTupleAttribute] from typing import NamedTuple class A: x: str N = NamedTuple('N', [('x', int), ('y', str)]) M = NamedTuple('M', [('x', int), ('y', str)]) [file next.py] from typing import NamedTuple N = NamedTuple('N', [('x', int), ('y', int)]) M = NamedTuple('M', [('x', int), ('y', str)]) [out] __main__.A __main__.N __main__.N._NT __main__.N.__new__ __main__.N._asdict __main__.N._make __main__.N._replace __main__.N.y [case testSimpleDecoratedFunction] from a import dec @dec def f() -> None: pass @dec def g() -> None: pass [file next.py] from a import dec @dec def f(x: int) -> None: pass @dec def g() -> None: pass [file a.py] from typing import TypeVar T = TypeVar('T') def dec(f: T) -> T: return f [out] __main__.f [case testSimpleDecoratedMethod] from a import dec class A: @dec def f(self) -> None: self.g() @dec def g(self) -> None: pass [file next.py] from a import dec class A: @dec def f(self, x: int) -> None: self.g() @dec def g(self) -> None: pass [file a.py] from typing import TypeVar T = TypeVar('T') def dec(f: T) -> T: return f [out] __main__.A.f [case testTypeVarBound] from typing import TypeVar T = TypeVar('T') S = TypeVar('S') [file next.py] from typing import TypeVar T = TypeVar('T', bound=int) S = TypeVar('S') [out] __main__.T [case testTypeVarVariance] from typing import TypeVar A = TypeVar('A', covariant=True) B = TypeVar('B', covariant=True) C = TypeVar('C', covariant=True) [file next.py] from typing import TypeVar A = TypeVar('A', covariant=True) B = TypeVar('B', contravariant=True) C = TypeVar('C') [out] __main__.B __main__.C [case testTypeVarValues] from typing import TypeVar A = TypeVar('A', int, str) B = TypeVar('B', int, str) C = TypeVar('C', int, str) [file next.py] from typing import TypeVar A = TypeVar('A', int, str) B = TypeVar('B', int, str, object) C = TypeVar('C') [out] __main__.B __main__.C [case testGenericFunction] from typing import TypeVar T = TypeVar('T') S = TypeVar('S') def f(x: T) -> T: pass def g(x: S) -> S: pass [file next.py] from typing import TypeVar T = TypeVar('T', int, str) S = TypeVar('S') def f(x: T) -> T: pass def g(x: S) -> S: pass [out] __main__.T __main__.f [case testGenericTypes] from typing import List x: List[int] y: List[int] [file next.py] from typing import List x: List[int] y: List[str] [builtins fixtures/list.pyi] [out] __main__.y [case testTypeAliasOfList] from typing import List X = List[int] Y = List[int] [file next.py] from typing import List X = List[str] Y = List[int] [builtins fixtures/list.pyi] [out] __main__.X [case testTypeAliasOfCallable] from typing import Callable A = Callable[[int], str] B = Callable[[int], str] C = Callable[[int], str] [file next.py] from typing import Callable A = Callable[[int], str] B = Callable[[], str] C = Callable[[int], int] [out] __main__.B __main__.C [case testGenericTypeAlias] from typing import Callable, TypeVar T = TypeVar('T') A = Callable[[T], T] B = Callable[[T], T] [file next.py] from typing import Callable, TypeVar T = TypeVar('T') S = TypeVar('S') A = Callable[[T], T] B = Callable[[T], S] [out] __main__.B __main__.S [case testDifferentListTypes] from typing import List A = List B = list C = List [file next.py] from typing import List A = List B = list C = list [builtins fixtures/list.pyi] [out] __main__.C [case testDecoratorChangesSignature] from contextlib import contextmanager from typing import Iterator, List, Tuple @contextmanager def f(x: List[Tuple[int]]) -> Iterator[None]: yield @contextmanager def g(x: str) -> Iterator[None]: yield [file next.py] from contextlib import contextmanager from typing import Iterator, List, Tuple @contextmanager def f(x: List[Tuple[int]]) -> Iterator[None]: yield @contextmanager def g(x: object) -> Iterator[None]: yield [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [out] __main__.g [case testOverloadedMethod] from typing import overload class A: @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass def f(self, x): pass @overload def g(self, x: int) -> int: pass @overload def g(self, x: str) -> str: pass def g(self, x): pass [file next.py] from typing import overload class A: @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass def f(self, x): pass @overload def g(self, x: int) -> int: pass @overload def g(self, x: object) -> object: pass def g(self, x): pass [out] __main__.A.g [case testPropertyWithSetter] class A: @property def x(self) -> int: pass @x.setter def x(self, o: int) -> None: pass class B: @property def x(self) -> int: pass @x.setter def x(self, o: int) -> None: pass [file next.py] class A: @property def x(self) -> int: pass @x.setter def x(self, o: int) -> None: pass class B: @property def x(self) -> str: pass @x.setter def x(self, o: str) -> None: pass [builtins fixtures/property.pyi] [out] __main__.B.x [case testFunctionalEnum] from enum import Enum, IntEnum A = Enum('A', 'x') B = Enum('B', 'x') C = Enum('C', 'x') D = IntEnum('D', 'x') [file next.py] from enum import Enum, IntEnum A = Enum('A', 'x') B = Enum('B', 'y') C = IntEnum('C', 'x') D = IntEnum('D', 'x y') [out] __main__.B.x __main__.B.y __main__.C __main__.D.y [case testClassBasedEnum] from enum import Enum class A(Enum): X = 0 Y = 1 class B(Enum): X = 0 Y = 1 class C(Enum): X = 0 Y = 1 class D(Enum): X = 0 Y = 1 class E(Enum): X = 0 [file next.py] from enum import Enum class A(Enum): X = 0 Y = 1 class B(Enum): X = 0 Z = 1 class C(Enum): X = 0 Y = 1 Z = 2 class D(Enum): X = 'a' Y = 'b' class F(Enum): X = 0 [out] __main__.B.Y __main__.B.Z __main__.C.Z __main__.D.X __main__.D.Y __main__.E __main__.F [case testTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) [file next.py] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': str}) p = Point(dict(x=42, y='lurr')) [builtins fixtures/dict.pyi] [out] __main__.Point __main__.p [case testTypedDict2] from mypy_extensions import TypedDict class Point(TypedDict): x: int y: int p = Point(dict(x=42, y=1337)) [file next.py] from mypy_extensions import TypedDict class Point(TypedDict): x: int y: str p = Point(dict(x=42, y='lurr')) [builtins fixtures/dict.pyi] [out] __main__.Point __main__.p [case testTypedDict3] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) [file next.py] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int}) p = Point(dict(x=42)) [builtins fixtures/dict.pyi] [out] __main__.Point __main__.p [case testTypedDict4] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) [file next.py] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}, total=False) p = Point(dict(x=42, y=1337)) [builtins fixtures/dict.pyi] [out] __main__.Point __main__.p [case testTypeAliasSimple] A = int B = int [file next.py] A = str B = int [out] __main__.A [case testTypeAliasGeneric] from typing import List A = List[int] B = List[int] [file next.py] from typing import List A = List[str] B = List[int] [builtins fixtures/list.pyi] [out] __main__.A [case testTypeAliasGenToNonGen] from typing import List A = List[str] B = List [file next.py] from typing import List A = List B = List [builtins fixtures/list.pyi] [out] __main__.A [case testTypeAliasNonGenToGen] from typing import List A = List B = List [file next.py] from typing import List A = List[str] B = List [builtins fixtures/list.pyi] [out] __main__.A [case testTypeAliasGenericTypeVar] from typing import TypeVar, Dict T = TypeVar('T') S = TypeVar('S') A = Dict[str, T] B = Dict[str, S] [file next.py] from typing import TypeVar, Dict class T: pass S = TypeVar('S') A = Dict[str, T] B = Dict[str, S] [builtins fixtures/dict.pyi] [out] __main__.A __main__.T [case testNewType] from typing import NewType class C: pass class D: pass N1 = NewType('N1', C) N2 = NewType('N2', D) N3 = NewType('N3', C) class N4(C): pass [file next.py] from typing import NewType class C: pass class D(C): pass N1 = NewType('N1', C) N2 = NewType('N2', D) class N3(C): pass N4 = NewType('N4', C) [out] __main__.D __main__.N2 __main__.N3 __main__.N3.__init__ __main__.N4 __main__.N4.__init__ [case testChangeGenericBaseClassOnly] from typing import List class C(List[int]): pass [file next.py] from typing import List class C(List[str]): pass [builtins fixtures/list.pyi] [out] __main__.C [case testOverloads] from typing import overload class C: pass @overload def a(x: int) -> None: pass @overload def a(x: str) -> str: pass def a(x): pass @overload def b(x: int) -> None: pass @overload def b(x: str) -> str: pass def b(x): pass @overload def c(x: int) -> None: pass @overload def c(x: str) -> str: pass def c(x): pass @overload def d(x: int) -> None: pass @overload def d(x: str) -> str: pass def d(x): pass [file next.py] from typing import overload class C: pass @overload def a(x: int) -> None: pass @overload def a(x: str) -> str: pass def a(x): pass @overload def b(x: str) -> str: pass @overload def b(x: int) -> None: pass def b(x): pass @overload def c(x: int) -> None: pass @overload def c(x: str) -> str: pass @overload def c(x: C) -> C: pass def c(x): pass def d(x: int) -> None: pass @overload def e(x: int) -> None: pass @overload def e(x: str) -> str: pass def e(x): pass [out] __main__.b __main__.c __main__.d __main__.e [case testOverloadsExternalOnly] from typing import overload class Base: pass class A(Base): pass class B(Base): pass class C(Base): pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass def f(x: Base) -> Base: pass @overload def g(x: A) -> A: pass @overload def g(x: B) -> B: pass def g(x: Base) -> Base: pass [file next.py] from typing import overload class Base: pass class A(Base): pass class B(Base): pass class C(Base): pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass def f(x: object) -> object: pass @overload def g(x: A) -> A: pass @overload def g(x: C) -> C: pass def g(x: Base) -> Base: pass [out] __main__.g [case testNestedFunctionDoesntMatter] class A: pass class B: pass def outer() -> None: def inner(x: A) -> B: pass [file next.py] class A: pass class B: pass def outer() -> None: def inner(y: B) -> A: pass [out] [case testProtocolVsNominal] from typing import Protocol class A(Protocol): x: int class B(Protocol): x: int class C(Protocol): x: int class D(Protocol): x: int [file next.py] from typing import Protocol class A(Protocol): x: int class B(Protocol): x: str class C(Protocol): x: int y: int class D: x: int [out] __main__.B.x __main__.C.(abstract) __main__.C.y __main__.D __main__.D.(abstract) [case testProtocolNormalVsGeneric] from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol[T]): x: T class P2(Protocol[T]): x: T y: T [file next.py] from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol): x: int class P2(Protocol[T]): x: T [out] __main__.P __main__.P.x __main__.P2.(abstract) __main__.P2.y [case testAddAbstractMethod] from abc import abstractmethod class A: @abstractmethod def f(self) -> None: pass [file next.py] from abc import abstractmethod class A: @abstractmethod def f(self) -> None: pass @abstractmethod def g(self) -> None: pass [out] __main__.A.(abstract) __main__.A.g [case testFinalFlagsTriggerVar] from typing import Final v: Final = 1 w: Final = 1 x: Final = 1 y: Final[int] = 1 z: Final[int] = 1 same1: Final = 1 same2: Final[int] = 1 class C: v: Final = 1 w: Final = 1 x: Final = 1 y: Final[int] = 1 z: Final[int] = 1 same1: Final = 1 same2: Final[int] = 1 def __init__(self) -> None: self.vi: Final = 1 self.wi: Final = 1 self.xi: Final = 1 self.yi: Final[int] = 1 self.zi: Final[int] = 1 self.same1_instance: Final = 1 self.same2_instance: Final[int] = 1 [file next.py] from typing import Final v: Final = 0 w = 1 x: Final[int] = 1 y: int = 1 z: Final = 1 same1: Final = 1 same2: Final[int] = 0 class C: v: Final = 0 w = 1 x: Final[int] = 1 y: int = 1 z: Final = 1 same1: Final = 1 same2: Final[int] = 0 def __init__(self) -> None: self.vi: Final = 0 self.wi = 1 self.xi: Final[int] = 1 self.yi: int = 1 self.zi: Final = 1 self.same1_instance: Final = 1 self.same2_instance: Final[int] = 0 [out] __main__.C.v __main__.C.vi __main__.C.w __main__.C.wi __main__.C.x __main__.C.xi __main__.C.y __main__.C.yi __main__.C.z __main__.C.zi __main__.v __main__.w __main__.x __main__.y __main__.z [case testFinalFlagsTriggerMethod] from typing import final class C: def meth(self) -> int: pass @final def same(self) -> int: pass @classmethod def cmeth(cls) -> int: pass [file next.py] from typing import final class C: @final def meth(self) -> int: pass @final def same(self) -> int: pass @final @classmethod def cmeth(cls) -> int: pass [builtins fixtures/classmethod.pyi] [out] __main__.C.cmeth __main__.C.meth [case testFinalFlagsTriggerProperty] from typing import final class C: @final @property def p(self) -> int: pass @final @property def same(self) -> str: pass [file next.py] from typing import final class C: @property def p(self) -> int: pass @final @property def same(self) -> str: pass [builtins fixtures/property.pyi] [out] __main__.C.p [case testFinalFlagsTriggerMethodOverload] from typing import final, overload class C: @overload def m(self, x: int) -> int: ... @overload def m(self, x: str) -> str: ... @final def m(self, x): pass @overload def same(self, x: int) -> int: ... @overload def same(self, x: str) -> str: ... @final def same(self, x): pass [file next.py] from typing import final, overload class C: @overload def m(self, x: int) -> int: ... @overload def m(self, x: str) -> str: ... def m(self, x): pass @overload def same(self, x: int) -> int: ... @overload def same(self, x: str) -> str: ... @final def same(self, x): pass [out] __main__.C.m [case testDynamicBasePluginDiff] # flags: --config-file tmp/mypy.ini from mod import declarative_base, Column, Instr Base = declarative_base() class Model(Base): x: Column[int] class Other: x: Column[int] class Diff: x: Column[int] [file next.py] from mod import declarative_base, Column, Instr Base = declarative_base() class Model(Base): x: Column[int] class Other: x: Column[int] class Diff(Base): x: Column[int] [file mod.py] from typing import Generic, TypeVar def declarative_base(): ... T = TypeVar('T') class Column(Generic[T]): ... class Instr(Generic[T]): ... [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/dyn_class.py [out] __main__.Diff __main__.Diff.x [case testLiteralTriggersVar] from typing_extensions import Literal x: Literal[1] = 1 y = 1 z: Literal[1] = 1 same: Literal[1] = 1 class C: x_class: Literal[1] = 1 y_class = 1 z_class: Literal[1] = 1 same_class: Literal[1] = 1 def __init__(self) -> None: self.x_instance: Literal[1] = 1 self.y_instance = 1 self.z_instance: Literal[1] = 1 self.same_instance: Literal[1] = 1 [file next.py] from typing_extensions import Literal x = 1 y: Literal[1] = 1 z: Literal[2] = 2 same: Literal[1] = 1 class C: x_class = 1 y_class: Literal[1] = 1 z_class: Literal[2] = 2 same_class: Literal[1] = 1 def __init__(self) -> None: self.x_instance = 1 self.y_instance: Literal[1] = 1 self.z_instance: Literal[2] = 2 self.same_instance: Literal[1] = 1 [out] __main__.C.x_class __main__.C.x_instance __main__.C.y_class __main__.C.y_instance __main__.C.z_class __main__.C.z_instance __main__.x __main__.y __main__.z [case testLiteralTriggersFunctions] from typing_extensions import Literal def function_1() -> int: pass def function_2() -> Literal[1]: pass def function_3() -> Literal[1]: pass def function_4(x: int) -> None: pass def function_5(x: Literal[1]) -> None: pass def function_6(x: Literal[1]) -> None: pass def function_same_1() -> Literal[1]: pass def function_same_2(x: Literal[1]) -> None: pass class C: def method_1(self) -> int: pass def method_2(self) -> Literal[1]: pass def method_3(self) -> Literal[1]: pass def method_4(self, x: int) -> None: pass def method_5(self, x: Literal[1]) -> None: pass def method_6(self, x: Literal[1]) -> None: pass def method_same_1(self) -> Literal[1]: pass def method_same_2(self, x: Literal[1]) -> None: pass @classmethod def classmethod_1(cls) -> int: pass @classmethod def classmethod_2(cls) -> Literal[1]: pass @classmethod def classmethod_3(cls) -> Literal[1]: pass @classmethod def classmethod_4(cls, x: int) -> None: pass @classmethod def classmethod_5(cls, x: Literal[1]) -> None: pass @classmethod def classmethod_6(cls, x: Literal[1]) -> None: pass @classmethod def classmethod_same_1(cls) -> Literal[1]: pass @classmethod def classmethod_same_2(cls, x: Literal[1]) -> None: pass @staticmethod def staticmethod_1() -> int: pass @staticmethod def staticmethod_2() -> Literal[1]: pass @staticmethod def staticmethod_3() -> Literal[1]: pass @staticmethod def staticmethod_4(x: int) -> None: pass @staticmethod def staticmethod_5(x: Literal[1]) -> None: pass @staticmethod def staticmethod_6(x: Literal[1]) -> None: pass @staticmethod def staticmethod_same_1() -> Literal[1]: pass @staticmethod def staticmethod_same_2(x: Literal[1]) -> None: pass [file next.py] from typing_extensions import Literal def function_1() -> Literal[1]: pass def function_2() -> int: pass def function_3() -> Literal[2]: pass def function_4(x: Literal[1]) -> None: pass def function_5(x: int) -> None: pass def function_6(x: Literal[2]) -> None: pass def function_same_1() -> Literal[1]: pass def function_same_2(x: Literal[1]) -> None: pass class C: def method_1(self) -> Literal[1]: pass def method_2(self) -> int: pass def method_3(self) -> Literal[2]: pass def method_4(self, x: Literal[1]) -> None: pass def method_5(self, x: int) -> None: pass def method_6(self, x: Literal[2]) -> None: pass def method_same_1(self) -> Literal[1]: pass def method_same_2(self, x: Literal[1]) -> None: pass @classmethod def classmethod_1(cls) -> Literal[1]: pass @classmethod def classmethod_2(cls) -> int: pass @classmethod def classmethod_3(cls) -> Literal[2]: pass @classmethod def classmethod_4(cls, x: Literal[1]) -> None: pass @classmethod def classmethod_5(cls, x: int) -> None: pass @classmethod def classmethod_6(cls, x: Literal[2]) -> None: pass @classmethod def classmethod_same_1(cls) -> Literal[1]: pass @classmethod def classmethod_same_2(cls, x: Literal[1]) -> None: pass @staticmethod def staticmethod_1() -> Literal[1]: pass @staticmethod def staticmethod_2() -> int: pass @staticmethod def staticmethod_3() -> Literal[2]: pass @staticmethod def staticmethod_4(x: Literal[1]) -> None: pass @staticmethod def staticmethod_5(x: int) -> None: pass @staticmethod def staticmethod_6(x: Literal[2]) -> None: pass @staticmethod def staticmethod_same_1() -> Literal[1]: pass @staticmethod def staticmethod_same_2(x: Literal[1]) -> None: pass [builtins fixtures/classmethod.pyi] [out] __main__.C.classmethod_1 __main__.C.classmethod_2 __main__.C.classmethod_3 __main__.C.classmethod_4 __main__.C.classmethod_5 __main__.C.classmethod_6 __main__.C.method_1 __main__.C.method_2 __main__.C.method_3 __main__.C.method_4 __main__.C.method_5 __main__.C.method_6 __main__.C.staticmethod_1 __main__.C.staticmethod_2 __main__.C.staticmethod_3 __main__.C.staticmethod_4 __main__.C.staticmethod_5 __main__.C.staticmethod_6 __main__.function_1 __main__.function_2 __main__.function_3 __main__.function_4 __main__.function_5 __main__.function_6 [case testLiteralTriggersProperty] from typing_extensions import Literal class C: @property def p1(self) -> Literal[1]: pass @property def p2(self) -> int: pass @property def same(self) -> Literal[1]: pass [file next.py] from typing_extensions import Literal class C: @property def p1(self) -> int: pass @property def p2(self) -> Literal[1]: pass @property def same(self) -> Literal[1]: pass [builtins fixtures/property.pyi] [out] __main__.C.p1 __main__.C.p2 [case testLiteralsTriggersOverload] from typing import overload from typing_extensions import Literal @overload def func(x: str) -> str: ... @overload def func(x: Literal[1]) -> int: ... def func(x): pass @overload def func_same(x: str) -> str: ... @overload def func_same(x: Literal[1]) -> int: ... def func_same(x): pass class C: @overload def method(self, x: str) -> str: ... @overload def method(self, x: Literal[1]) -> int: ... def method(self, x): pass @overload def method_same(self, x: str) -> str: ... @overload def method_same(self, x: Literal[1]) -> int: ... def method_same(self, x): pass [file next.py] from typing import overload from typing_extensions import Literal @overload def func(x: str) -> str: ... @overload def func(x: Literal[2]) -> int: ... def func(x): pass @overload def func_same(x: str) -> str: ... @overload def func_same(x: Literal[1]) -> int: ... def func_same(x): pass class C: @overload def method(self, x: str) -> str: ... @overload def method(self, x: Literal[2]) -> int: ... def method(self, x): pass @overload def method_same(self, x: str) -> str: ... @overload def method_same(self, x: Literal[1]) -> int: ... def method_same(self, x): pass [out] __main__.C.method __main__.func [case testUnionOfLiterals] from typing_extensions import Literal x: Literal[1, '2'] [file next.py] from typing_extensions import Literal x: Literal[1, 2] [out] __main__.x [case testUnionOfCallables] from typing import Callable, Union from mypy_extensions import Arg x: Union[Callable[[Arg(int, 'x')], None], Callable[[int], None]] [file next.py] from typing import Callable, Union from mypy_extensions import Arg x: Union[Callable[[Arg(int, 'y')], None], Callable[[int], None]] [out] __main__.x mypy-0.761/test-data/unit/errorstream.test0000644€tŠÔÚ€2›s®0000000220013576752246025025 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for incremental error streaming. -- Each time errors are reported, '==== Errors flushed ====' is printed. [case testErrorStream] import b [file a.py] 1 + '' [file b.py] import a '' / 2 [out] ==== Errors flushed ==== a.py:1: error: Unsupported operand types for + ("int" and "str") ==== Errors flushed ==== b.py:2: error: Unsupported operand types for / ("str" and "int") [case testBlockers] import b [file a.py] 1 + '' [file b.py] import a break 1 / '' # won't get reported, after a blocker [out] ==== Errors flushed ==== a.py:1: error: Unsupported operand types for + ("int" and "str") ==== Errors flushed ==== b.py:2: error: 'break' outside loop [case testCycles] import a [file a.py] import b 1 + '' def f() -> int: reveal_type(b.x) return b.x y = 0 + 0 [file b.py] import a def g() -> int: reveal_type(a.y) return a.y 1 / '' x = 1 + 1 [out] ==== Errors flushed ==== b.py:3: note: Revealed type is 'builtins.int' b.py:5: error: Unsupported operand types for / ("int" and "str") ==== Errors flushed ==== a.py:2: error: Unsupported operand types for + ("int" and "str") a.py:4: note: Revealed type is 'builtins.int' mypy-0.761/test-data/unit/fine-grained-blockers.test0000644€tŠÔÚ€2›s®0000002254213576752246026625 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for fine-grained incremental mode and blocking errors -- -- The comments in fine-grained.test explain how these tests work. -- TODO: -- - blocking error while other existing errors as well (that get preserved) -- - differences in other modules + blocking error [case testParseError] import a a.f() [file a.py] def f() -> None: pass [file a.py.2] def f(x: int) -> [file a.py.3] def f(x: int) -> None: pass [file a.py.4] def f() -> None: pass [out] == a.py:1: error: invalid syntax == main:2: error: Too few arguments for "f" == [case testParseErrorShowSource] # flags: --pretty --show-error-codes import a a.f() [file a.py] def f() -> None: pass [file a.py.2] def f(x: int) -> [file a.py.3] def f(x: int) -> None: pass [file a.py.4] def f() -> None: pass [out] == a.py:1: error: invalid syntax [syntax] def f(x: int) -> ^ == main:3: error: Too few arguments for "f" [call-arg] a.f() ^ == [case testParseErrorMultipleTimes] import a a.f() [file a.py] def f() -> None: pass [file a.py.2] def f(x: int) -> [file a.py.3] def f(x: int ) -> None [file a.py.4] def f(x: int) -> None: pass [out] == a.py:1: error: invalid syntax == a.py:2: error: invalid syntax == main:2: error: Too few arguments for "f" [case testSemanticAnalysisBlockingError] import a a.f() [file a.py] def f() -> None: pass [file a.py.2] def f() -> None: pass break [file a.py.3] def f(x: int) -> None: pass [out] == a.py:2: error: 'break' outside loop == main:2: error: Too few arguments for "f" [case testBlockingErrorWithPreviousError] import a import b a.f(1) def g() -> None: b.f(1) [file a.py] def f() -> None: pass [file b.py] def f() -> None: pass [file a.py.2] def f() -> None [file a.py.3] def f() -> None: pass [out] main:3: error: Too many arguments for "f" main:5: error: Too many arguments for "f" == a.py:1: error: invalid syntax == main:3: error: Too many arguments for "f" main:5: error: Too many arguments for "f" [case testUpdateClassReferenceAcrossBlockingError] import a c: a.C def f() -> None: c.f() [file a.py] class C: def f(self) -> None: pass [file a.py.2] error error [file a.py.3] class C: def f(self, x: int) -> None: pass [out] == a.py:1: error: invalid syntax == main:5: error: Too few arguments for "f" of "C" [case testAddFileWithBlockingError] import a a.f(1) [file a.py.2] x x [file a.py.3] def f() -> None: pass [out] main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == a.py:1: error: invalid syntax == main:2: error: Too many arguments for "f" [case testModifyTwoFilesOneWithBlockingError1] import a [file a.py] import b def f() -> None: pass b.g() [file b.py] import a a.f() def g() -> None: pass [file a.py.2] import b # Dummy edit def f() -> None: pass b.g() [file b.py.2] import a a # Syntax error a.f() def g() -> None: pass [file b.py.3] import a a.f() def g() -> None: pass [out] == b.py:1: error: invalid syntax == [case testModifyTwoFilesOneWithBlockingError2] import a [file a.py] import b def f() -> None: pass b.g() [file b.py] import a a.f() def g() -> None: pass [file a.py.2] import b b def f() -> None: pass b.g() [file b.py.2] import a # Dummy edit a.f() def g() -> None: pass [file a.py.3] import b def f() -> None: pass b.g() [out] == a.py:1: error: invalid syntax == [case testBlockingErrorRemainsUnfixed] import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file a.py.2] x x [file b.py.3] def f(x: int) -> None: pass [file a.py.4] import b b.f() [out] == a.py:1: error: invalid syntax == a.py:1: error: invalid syntax == a.py:2: error: Too few arguments for "f" [case testModifyTwoFilesIntroduceTwoBlockingErrors] import a [file a.py] import b def f() -> None: pass b.g() [file b.py] import a a.f() def g() -> None: pass [file a.py.2] import b b def f() -> None: pass b.g() [file b.py.2] import a a a.f() def g() -> None: pass [file a.py.3] import b b def f() -> None: pass b.g() [file b.py.3] import a a a.f() def g() -> None: pass [file a.py.4] import b def f() -> None: pass b.g(1) [file b.py.4] import a def g() -> None: pass a.f(1) [out] == a.py:1: error: invalid syntax == a.py:1: error: invalid syntax == b.py:3: error: Too many arguments for "f" a.py:3: error: Too many arguments for "g" [case testDeleteFileWithBlockingError-only_when_nocache] -- Different cache/no-cache tests because: -- Error message ordering differs import a import b [file a.py] def f() -> None: pass [file b.py] import a a.f() [file a.py.2] x x [delete a.py.3] [out] == a.py:1: error: invalid syntax == main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports b.py:1: error: Cannot find implementation or library stub for module named 'a' [case testDeleteFileWithBlockingError-only_when_cache] -- Different cache/no-cache tests because: -- Error message ordering differs import a import b [file a.py] def f() -> None: pass [file b.py] import a a.f() [file a.py.2] x x [delete a.py.3] [out] == a.py:1: error: invalid syntax == b.py:1: error: Cannot find implementation or library stub for module named 'a' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'a' [case testModifyFileWhileBlockingErrorElsewhere] import a import b [file a.py] [file b.py] import a [file a.py.2] x x [file b.py.3] import a a.f() 1() [file a.py.4] [builtins fixtures/module.pyi] [out] == a.py:1: error: invalid syntax == a.py:1: error: invalid syntax == b.py:2: error: Module has no attribute "f" b.py:3: error: "int" not callable [case testImportBringsAnotherFileWithBlockingError1] import a [file a.py] [file a.py.2] import blocker 1() [file a.py.3] 1() def f() -> None: pass [out] == /test-data/unit/lib-stub/blocker.pyi:2: error: invalid syntax == a.py:1: error: "int" not callable [case testImportBringsAnotherFileWithSemanticAnalysisBlockingError] import a [file a.py] [file a.py.2] import blocker2 1() [file a.py.3] 1() [out] == /test-data/unit/lib-stub/blocker2.pyi:2: error: 'continue' outside loop == a.py:1: error: "int" not callable [case testFixingBlockingErrorTriggersDeletion1-only_when_nocache] -- Disabled in cache mdode: -- Cache mode fails to produce the error in the final step, but this is -- a manifestation of a bug that can occur in no-cache mode also. import a def g(x: a.A) -> None: x.f() [file a.py] class A: def f(self) -> None: pass [delete a.py.2] [file a.py.3] class A: pass [builtins fixtures/module.pyi] [out] == main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == main:4: error: "A" has no attribute "f" [case testFixingBlockingErrorTriggersDeletion2] from a import A def g(x: A) -> None: x.f() [file a.py] class A: def f(self) -> None: pass [delete a.py.2] [file a.py.3] [builtins fixtures/module.pyi] [out] == main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == main:1: error: Module 'a' has no attribute 'A' [case testFixingBlockingErrorBringsInAnotherModuleWithBlocker] import a [file a.py] [file a.py.2] x y [file a.py.3] import blocker 1() [file a.py.4] import sys 1() [out] == a.py:1: error: invalid syntax == /test-data/unit/lib-stub/blocker.pyi:2: error: invalid syntax == a.py:2: error: "int" not callable [case testInitialBlocker] # cmd: mypy a.py b.py [file a.py] 1 1 [file b.py] def f() -> int: return '' [file a.py.2] x = 1 [file b.py.3] def f() -> int: return 0 [out] a.py:1: error: invalid syntax == b.py:2: error: Incompatible return value type (got "str", expected "int") == [case testDecodeErrorBlocker-posix] import a a.f(1) [file a.py] def f(x: int) -> None: ... [file a.py.2] # coding: ascii ä = 1 [file a.py.3] def f(x: str) -> None: ... [out] == mypy: can't decode file 'tmp/a.py': 'ascii' codec can't decode byte 0xc3 in position 16: ordinal not in range(128) == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testDecodeErrorBlocker-windows] import a a.f(1) [file a.py] def f(x: int) -> None: ... [file a.py.2] # coding: ascii ä = 1 [file a.py.3] def f(x: str) -> None: ... [out] == mypy: can't decode file 'tmp/a.py': 'ascii' codec can't decode byte 0xc3 in position 17: ordinal not in range(128) == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testDecodeErrorBlocker_python2-only_when_nocache] # flags: --py2 import a a.f(1) [file a.py] def f(x): # type: (int) -> None pass [file a.py.2] ä = 1 [file a.py.3] def f(x): # type: (str) -> None pass [out] == mypy: can't decode file 'tmp/a.py': 'ascii' codec can't decode byte 0xc3 in position 0: ordinal not in range(128) == main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testDecodeErrorBlockerOnInitialRun-posix] # Note that there's no test variant for Windows, since the above Windows test case is good enough. import a a.f(1) [file a.py] # coding: ascii ä = 1 [file a.py.2] def f(x: str) -> None: ... [out] mypy: can't decode file 'tmp/a.py': 'ascii' codec can't decode byte 0xc3 in position 16: ordinal not in range(128) == main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" mypy-0.761/test-data/unit/fine-grained-cache-incremental.test0000644€tŠÔÚ€2›s®0000001156413576752246030365 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for building caches for fine-grained mode using incremental -- builds. -- -- These tests are run only with the cache. -- -- '# num_build_steps: N' specifies how many regular build steps (without the daemon) -- to do before switching to running using the daemon. Default is 1. -- Add file -- -------- [case testIncrCacheBasic1] # num_build_steps: 2 import a [file a.py] from b import x def f() -> int: return 0 [file b.py] x = 1 [file a.py.2] from b import x def f() -> int: return 0 + x [file b.py.3] x = 'hi' [out] == == a.py:3: error: Unsupported operand types for + ("int" and "str") [case testIncrCacheBasic2] # num_build_steps: 2 import a [file a.py] from b import x def f() -> int: return 0+x [file b.py] x = 1 [file b.py.2] from c import x [file c.py.2] x = 1 [file c.py.3] x = 'hi' [out] == == a.py:3: error: Unsupported operand types for + ("int" and "str") [case testIncrCacheDoubleChange1] # num_build_steps: 2 import b import c [file a.py] def f(x: int) -> None: pass [file b.py] from a import f f(10) [file c.py] from a import f f(10) [file a.py.2] def f(x: int) -> None: pass # nothing changed [file a.py.3] def f(x: str) -> None: pass [out] == == c.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" b.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testIncrCacheProtocol1] # num_build_steps: 2 import a [file a.py] import b from typing import Protocol class P(Protocol): x: int def f() -> None: def g(x: P) -> None: pass g(b.C()) [file c.py] [file c.py.2] # yo [file b.py] class C: x: int [file b.py.3] class C: x: str -- If we did a *full* reload (because the proto cache failed to load), -- nothing would show up as stale [stale2 b] [rechecked2 a, b] [out] == == a.py:8: error: Argument 1 to "g" has incompatible type "C"; expected "P" a.py:8: note: Following member(s) of "C" have conflicts: a.py:8: note: x: expected "int", got "str" [case testIncrCacheProtocol2] # num_build_steps: 3 import a [file a.py] import b from typing import Protocol class P(Protocol): x: int class Q(Protocol): x: int def f() -> None: def g(x: P) -> None: pass g(b.C()) [file c.py] [file c.py.2] # uh [file c.py.3] from a import Q import b def f() -> None: def g(x: Q) -> None: pass g(b.C()) [file b.py] class C: x: int [file b.py.4] class C: x: str -- If we did a *full* reload (because the proto cache failed to load), -- nothing would show up as stale [stale3 b] [rechecked3 a, b, c] [out] == == == c.py:6: error: Argument 1 to "g" has incompatible type "C"; expected "Q" c.py:6: note: Following member(s) of "C" have conflicts: c.py:6: note: x: expected "int", got "str" a.py:10: error: Argument 1 to "g" has incompatible type "C"; expected "P" a.py:10: note: Following member(s) of "C" have conflicts: a.py:10: note: x: expected "int", got "str" [case testIncrCacheProtocol3] # num_build_steps: 2 import a [file a.py] import b from typing import Protocol class P(Protocol): x: int def f() -> None: def g(x: P) -> None: pass g(b.C()) [file c.py] [file c.py.2] # yo [file b.py] class C: x: int [file b.py.3] class C: x: int y: int [file b.py.4] class C: x: str y: int -- If we did a *full* reload (because the proto cache failed to load), -- nothing would show up as stale [stale2 b] [rechecked2 b] [stale3 b] [rechecked3 a, b] [out] == == == a.py:8: error: Argument 1 to "g" has incompatible type "C"; expected "P" a.py:8: note: Following member(s) of "C" have conflicts: a.py:8: note: x: expected "int", got "str" [case testIncrCacheBustedProtocol] # flags: --no-sqlite-cache [file a.py] [file b.py] -- This is a heinous hack, but we simulate having a invalid cache by clobbering -- the proto deps file with something with mtime mismatches. [file ../.mypy_cache/3.6/@deps.meta.json.2] {"snapshot": {"__main__": "a7c958b001a45bd6a2a320f4e53c4c16", "a": "d41d8cd98f00b204e9800998ecf8427e", "b": "d41d8cd98f00b204e9800998ecf8427e", "builtins": "c532c89da517a4b779bcf7a964478d67"}, "deps_meta": {"@root": {"path": "@root.deps.json", "mtime": 0}, "__main__": {"path": "__main__.deps.json", "mtime": 0}, "a": {"path": "a.deps.json", "mtime": 0}, "b": {"path": "b.deps.json", "mtime": 0}, "builtins": {"path": "builtins.deps.json", "mtime": 0}}} [file b.py.2] # uh -- A full reload shows up as nothing getting rechecked by fine-grained mode. -- If we did not do a full reload, b would be stale and checked in fine-grained mode [stale] [rechecked] [out] == [case testInvalidateCachePart] # flags: --no-sqlite-cache # cmd: mypy a1.py a2.py b.py p/__init__.py p/c.py [file a1.py] import p from b import x from p.c import C [file a2.py] import p from b import x from p.c import C [file b.py] x = 10 [file p/__init__.py] [file p/c.py] class C: pass [delete ../.mypy_cache/3.6/b.meta.json.2] [delete ../.mypy_cache/3.6/p/c.meta.json.2] [out] == mypy-0.761/test-data/unit/fine-grained-cycles.test0000644€tŠÔÚ€2›s®0000000605213576752246026301 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for fine-grained incremental checking and import cycles -- -- The comment at the top of fine-grained.test explains how these tests -- work. [case testFunctionSelfReferenceThroughImportCycle] import a [file a.py] from b import f [file b.py] import a def f() -> None: a.f() [file b.py.2] import a def f(x: int) -> None: a.f() [out] == b.py:4: error: Too few arguments for "f" [case testClassSelfReferenceThroughImportCycle] import a [file a.py] from b import A [file b.py] import a class A: def g(self) -> None: pass def f() -> None: a.A().g() [file b.py.2] import a class A: def g(self, x: int) -> None: pass def f() -> None: a.A().g() [out] == b.py:7: error: Too few arguments for "g" of "A" [case testAnnotationSelfReferenceThroughImportCycle] import a [file a.py] from b import A [file b.py] import a x: a.A class A: def g(self) -> None: pass def f() -> None: x.g() [file b.py.2] import a x: a.A class A: def g(self, x: int) -> None: pass def f() -> None: x.g() [out] == b.py:9: error: Too few arguments for "g" of "A" [case testModuleSelfReferenceThroughImportCycle] import a [file a.py] import b [file b.py] import a def f() -> None: a.b.f() [file b.py.2] import a def f(x: int) -> None: a.b.f() [out] == b.py:4: error: Too few arguments for "f" [case testVariableSelfReferenceThroughImportCycle] import a [file a.py] from b import x [file b.py] import a x: int def f() -> None: a.x = 1 [file b.py.2] import a x: str def f() -> None: a.x = 1 [out] == b.py:6: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testReferenceToTypeThroughCycle] import a [file a.py] from b import C def f() -> C: pass [file b.py] import a class C: def g(self) -> None: pass def h() -> None: c = a.f() c.g() [file b.py.2] import a class C: def g(self, x: int) -> None: pass def h() -> None: c = a.f() c.g() [out] == b.py:8: error: Too few arguments for "g" of "C" [case testReferenceToTypeThroughCycleAndDeleteType] import a [file a.py] from b import C def f() -> C: pass [file b.py] import a class C: def g(self) -> None: pass def h() -> None: c = a.f() c.g() [file b.py.2] import a def h() -> None: c = a.f() c.g() [out] == a.py:1: error: Module 'b' has no attribute 'C' [case testReferenceToTypeThroughCycleAndReplaceWithFunction] import a [file a.py] from b import C def f() -> C: pass [file b.py] import a class C: def g(self) -> None: pass def h() -> None: c = a.f() c.g() [file b.py.2] import a def C() -> int: pass def h() -> None: c = a.f() c.g() [out] == a.py:3: error: Function "b.C" is not valid as a type a.py:3: note: Perhaps you need "Callable[...]" or a callback protocol? b.py:7: error: C? has no attribute "g" -- TODO: More import cycle: -- -- * "from x import y" through cycle -- * "from x import *" through cycle -- * "Cls.module" though cycle -- * TypeVar -- * type alias -- * all kinds of reference deleted -- * all kinds of reference rebound to different kind -- -- etc. mypy-0.761/test-data/unit/fine-grained-modules.test0000644€tŠÔÚ€2›s®0000013017213576752246026470 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for fine-grained incremental mode related to modules -- -- Covers adding and deleting modules, changes to multiple modules, -- changes to import graph, and changes to module references. -- -- The comments in fine-grained.test explain how these tests work. -- Add file -- -------- [case testAddFile] import b [file b.py] [file a.py.2] def f() -> None: pass [file b.py.3] import a a.f(1) [out] == == b.py:2: error: Too many arguments for "f" [case testAddFileWithErrors] import b [file b.py] [file a.py.2] def f() -> str: return 1 [file b.py.3] import a a.f(1) [file a.py.4] def f(x: int) -> None: pass [out] == a.py:2: error: Incompatible return value type (got "int", expected "str") == b.py:2: error: Too many arguments for "f" a.py:2: error: Incompatible return value type (got "int", expected "str") == [case testAddFileFixesError] import b [file b.py] [file b.py.2] from a import f f() [file a.py.3] def f() -> None: pass [out] == b.py:1: error: Cannot find implementation or library stub for module named 'a' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == [case testAddFileFixesAndGeneratesError1] import b [file b.py] [file b.py.2] from a import f [file b.py.3] from a import f f(1) [file a.py.4] def f() -> None: pass [out] == b.py:1: error: Cannot find implementation or library stub for module named 'a' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == b.py:1: error: Cannot find implementation or library stub for module named 'a' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == b.py:2: error: Too many arguments for "f" [case testAddFileFixesAndGeneratesError2] import b [file b.py] [file b.py.2] from a import f f(1) [file c.py.3] x = 'whatever' [file a.py.4] def f() -> None: pass [out] == b.py:1: error: Cannot find implementation or library stub for module named 'a' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == b.py:1: error: Cannot find implementation or library stub for module named 'a' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == b.py:2: error: Too many arguments for "f" [case testAddFileGeneratesError1] # flags: --ignore-missing-imports import a [file a.py] from b import f f(1) [file b.py.2] def f() -> None: pass [out] == a.py:2: error: Too many arguments for "f" [case testAddFilePreservesError1] import b [file b.py] [file b.py.2] from a import f f(1) [file x.py.3] # unrelated change [out] == b.py:1: error: Cannot find implementation or library stub for module named 'a' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == b.py:1: error: Cannot find implementation or library stub for module named 'a' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testAddFilePreservesError2] import b [file b.py] f() [file a.py.2] [out] b.py:1: error: Name 'f' is not defined == b.py:1: error: Name 'f' is not defined [case testRemoveSubmoduleFromBuild1] # cmd1: mypy a.py b/__init__.py b/c.py # cmd2: mypy a.py b/__init__.py # flags: --follow-imports=skip --ignore-missing-imports [file a.py] from b import c x=1 [file a.py.2] from b import c x=2 [file a.py.3] from b import c x=3 [file b/__init__.py] [file b/c.py] [out] == == [case testImportLineNumber1] import b [file b.py] [file b.py.2] x = 1 import a [out] == b.py:2: error: Cannot find implementation or library stub for module named 'a' b.py:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testImportLineNumber2] import b [file b.py] [file b.py.2] x = 1 import a from c import f [file x.py.3] [out] == b.py:2: error: Cannot find implementation or library stub for module named 'a' b.py:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports b.py:3: error: Cannot find implementation or library stub for module named 'c' == b.py:2: error: Cannot find implementation or library stub for module named 'a' b.py:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports b.py:3: error: Cannot find implementation or library stub for module named 'c' [case testAddPackage1] import p.a p.a.f(1) [file p/__init__.py.2] [file p/a.py.2] def f(x: str) -> None: pass [out] main:1: error: Cannot find implementation or library stub for module named 'p.a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'p' == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddPackage2] import p p.f(1) [file p/__init__.py.2] from p.a import f [file p/a.py.2] def f(x: str) -> None: pass [out] main:1: error: Cannot find implementation or library stub for module named 'p' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddPackage3] import p.a p.a.f(1) [file p/__init__.py.2] [file p/a.py.3] def f(x: str) -> None: pass [out] main:1: error: Cannot find implementation or library stub for module named 'p.a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'p' == main:1: error: Cannot find implementation or library stub for module named 'p.a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [builtins fixtures/module.pyi] [case testAddPackage4] import p.a p.a.f(1) [file p/a.py.2] def f(x: str) -> None: pass [file p/__init__.py.3] [out] main:1: error: Cannot find implementation or library stub for module named 'p.a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'p' == main:1: error: Cannot find implementation or library stub for module named 'p.a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'p' == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddNonPackageSubdir] # cmd: mypy x.py # cmd2: mypy x.py foo/a.py foo/b.py [file x.py] [file foo/a.py.2] import b b.foo(5) [file foo/b.py.2] def foo(x: str) -> None: pass [out] == foo/a.py:2: error: Argument 1 to "foo" has incompatible type "int"; expected "str" [case testAddPackage5] # cmd: mypy main p/a.py # cmd2: mypy main p/a.py # cmd3: mypy main p/a.py p/__init__.py import p.a p.a.f(1) [file p/a.py] [file p/a.py.2] def f(x: str) -> None: pass [file p/__init__.py.3] [out] main:4: error: Cannot find implementation or library stub for module named 'p.a' main:4: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:4: error: Cannot find implementation or library stub for module named 'p' == main:4: error: Cannot find implementation or library stub for module named 'p.a' main:4: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:4: error: Cannot find implementation or library stub for module named 'p' == main:5: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddPackage6] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy x.py # cmd2: mypy x.py p/a.py [file x.py] import p.a p.a.f(1) [file p/a.py.2] def f(x: str) -> None: pass [file p/__init__.py.2] [out] == -- It is a bug (#4797) that this isn't an error, but not a fine-grained specific one [case testAddPackage7] # flags: --follow-imports=skip # cmd: mypy x.py # cmd2: mypy x.py p/a.py [file x.py] from p.a import f f(1) [file p/a.py.2] def f(x: str) -> None: pass [file p/__init__.py.2] [out] x.py:1: error: Cannot find implementation or library stub for module named 'p.a' x.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == x.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAddPackage8] # cmd: mypy x.py p/a.py # cmd2: mypy x.py p/a.py p/__init__.py # cmd3: mypy x.py p/a.py p/__init__.py [file x.py] [file p/a.py] 1+'hi' [file p/__init__.py.2] [file p/a.py.3] '1'+'hi' [out] p/a.py:1: error: Unsupported operand types for + ("int" and "str") == p/a.py:1: error: Unsupported operand types for + ("int" and "str") == -- Delete file -- ----------- [case testDeleteBasic] import a [file a.py] import b [file b.py] def f() -> None: pass [file a.py.2] [delete b.py.3] [out] == == [case testDeleteDepOfDunderInit1] [file p/__init__.py] from .foo import Foo [file p/foo.py] class Foo: pass [file p/__init__.py.2] [delete p/foo.py.2] [out] == [case testDeleteDepOfDunderInit2] [file p/__init__.py] from p.foo import Foo [file p/foo.py] class Foo: pass [file p/__init__.py.2] [delete p/foo.py.2] [out] == [case testDeletionTriggersImportFrom] import a [file a.py] from b import f def g() -> None: f() [file b.py] def f() -> None: pass [delete b.py.2] [file b.py.3] def f(x: int) -> None: pass [out] == a.py:1: error: Cannot find implementation or library stub for module named 'b' a.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == a.py:4: error: Too few arguments for "f" [case testDeletionTriggersImport] import a [file a.py] def f() -> None: pass [delete a.py.2] [file a.py.3] def f() -> None: pass [out] == main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == [case testDeletionOfSubmoduleTriggersImportFrom1-only_when_nocache] -- Different cache/no-cache tests because: -- missing module error message mismatch from p import q [file p/__init__.py] [file p/q.py] [delete p/q.py.2] [file p/q.py.3] [out] == main:1: error: Module 'p' has no attribute 'q' -- TODO: The following messages are different compared to non-incremental mode main:1: error: Cannot find implementation or library stub for module named 'p.q' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == -- TODO: Fix this bug. It is a real bug that was been papered over -- by the test harness. [case testDeletionOfSubmoduleTriggersImportFrom1-only_when_cache-skip] -- Different cache/no-cache tests because: -- missing module error message mismatch from p import q [file p/__init__.py] [file p/q.py] [delete p/q.py.2] [file p/q.py.3] [out] == main:1: error: Cannot find implementation or library stub for module named 'p.q' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == [case testDeletionOfSubmoduleTriggersImportFrom2] from p.q import f f() [file p/__init__.py] [file p/q.py] def f() -> None: pass [delete p/q.py.2] [file p/q.py.3] def f(x: int) -> None: pass [out] == main:1: error: Cannot find implementation or library stub for module named 'p.q' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == main:2: error: Too few arguments for "f" [case testDeletionOfSubmoduleTriggersImport] import p.q [file p/__init__.py] [file p/q.py] def f() -> None: pass [delete p/q.py.2] [file p/q.py.3] def f(x: int) -> None: pass [out] == main:1: error: Cannot find implementation or library stub for module named 'p.q' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == [case testDeleteSubpackageWithNontrivialParent1] [file p/__init__.py] def g() -> None: pass [file p/b.py.2] def foo() -> None: pass foo() [delete p/b.py.3] [out] == == [case testDeleteModuleWithError] import a [file a.py] def f() -> int: return 1 [file a.py.2] def f() -> str: return 1 [delete a.py.3] def f() -> str: return 1 [out] == a.py:2: error: Incompatible return value type (got "int", expected "str") == main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testDeleteModuleWithErrorInsidePackage] import a.b [file a/__init__.py] [file a/b.py] def f() -> int: return '' [delete a/b.py.2] def f() -> str: return 1 [out] a/b.py:2: error: Incompatible return value type (got "str", expected "int") == main:1: error: Cannot find implementation or library stub for module named 'a.b' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testModifyTwoFilesNoError1] import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file a.py.2] import b b.f(1) [file b.py.2] def f(x: int) -> None: pass [out] == [case testDeleteSubpackageInit1] # cmd: mypy q/r/s.py # flags: --follow-imports=skip --ignore-missing-imports [file q/__init__.py] [file q/r/__init__.py] [file q/r/s.py] [delete q/__init__.py.2] [out] == [case testAddSubpackageInit2] # cmd: mypy q/r/s.py # flags: --follow-imports=skip --ignore-missing-imports [file q/r/__init__.py] [file q/r/s.py] 1 [file q/r/s.py.2] 2 [file q/__init__.py.2] [out] == [case testModifyTwoFilesNoError2] import a [file a.py] from b import g def f() -> None: pass [file b.py] import a def g() -> None: pass a.f() [file a.py.2] from b import g def f(x: int) -> None: pass [file b.py.2] import a def g() -> None: pass a.f(1) [out] == [case testModifyTwoFilesErrorsElsewhere] import a import b a.f() b.g(1) [file a.py] def f() -> None: pass [file b.py] def g(x: int) -> None: pass [file a.py.2] def f(x: int) -> None: pass [file b.py.2] def g() -> None: pass [out] == main:3: error: Too few arguments for "f" main:4: error: Too many arguments for "g" [case testModifyTwoFilesErrorsInBoth] import a [file a.py] import b def f() -> None: pass b.g(1) [file b.py] import a def g(x: int) -> None: pass a.f() [file a.py.2] import b def f(x: int) -> None: pass b.g(1) [file b.py.2] import a def g() -> None: pass a.f() [out] == b.py:3: error: Too few arguments for "f" a.py:3: error: Too many arguments for "g" [case testModifyTwoFilesFixErrorsInBoth] import a [file a.py] import b def f(x: int) -> None: pass b.g(1) [file b.py] import a def g() -> None: pass a.f() [file a.py.2] import b def f() -> None: pass b.g(1) [file b.py.2] import a def g(x: int) -> None: pass a.f() [out] b.py:3: error: Too few arguments for "f" a.py:3: error: Too many arguments for "g" == [case testAddTwoFilesNoError] import a [file a.py] import b import c b.f() c.g() [file b.py.2] import c def f() -> None: pass c.g() [file c.py.2] import b def g() -> None: pass b.f() [out] a.py:1: error: Cannot find implementation or library stub for module named 'b' a.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports a.py:2: error: Cannot find implementation or library stub for module named 'c' == [case testAddTwoFilesErrorsInBoth] import a [file a.py] import b import c b.f() c.g() [file b.py.2] import c def f() -> None: pass c.g(1) [file c.py.2] import b def g() -> None: pass b.f(1) [out] a.py:1: error: Cannot find implementation or library stub for module named 'b' a.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports a.py:2: error: Cannot find implementation or library stub for module named 'c' == c.py:3: error: Too many arguments for "f" b.py:3: error: Too many arguments for "g" [case testAddTwoFilesErrorsElsewhere] import a import b a.f(1) b.g(1) [file a.py.2] def f() -> None: pass [file b.py.2] def g() -> None: pass [out] main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Cannot find implementation or library stub for module named 'b' == main:3: error: Too many arguments for "f" main:4: error: Too many arguments for "g" [case testDeleteTwoFilesErrorsElsewhere] import a import b a.f() b.g() [file a.py] def f() -> None: pass [file b.py] def g() -> None: pass [delete a.py.2] [delete b.py.2] [out] == main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Cannot find implementation or library stub for module named 'b' [case testDeleteTwoFilesNoErrors] import a [file a.py] import b import c b.f() c.g() [file b.py] def f() -> None: pass [file c.py] def g() -> None: pass [file a.py.2] [delete b.py.3] [delete c.py.3] [out] == == [case testDeleteTwoFilesFixErrors] import a import b a.f() b.g() [file a.py] import b def f() -> None: pass b.g(1) [file b.py] import a def g() -> None: pass a.f(1) [delete a.py.2] [delete b.py.2] [out] b.py:3: error: Too many arguments for "f" a.py:3: error: Too many arguments for "g" == main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Cannot find implementation or library stub for module named 'b' [case testAddFileWhichImportsLibModule] import a a.x = 0 [file a.py.2] import sys x = sys.platform [out] main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == main:2: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAddFileWhichImportsLibModuleWithErrors] # flags: --no-silence-site-packages import a a.x = 0 [file a.py.2] import broken x = broken.x z [out] main:2: error: Cannot find implementation or library stub for module named 'a' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == a.py:3: error: Name 'z' is not defined /test-data/unit/lib-stub/broken.pyi:2: error: Name 'y' is not defined [case testRenameModule] import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file a.py.2] import c c.f() [file c.py.2] def f() -> None: pass [file a.py.3] import c c.f(1) [out] == == a.py:2: error: Too many arguments for "f" [case testDeleteFileWSuperClass] # flags: --ignore-missing-imports [file a.py] from c import Bar from b import Foo z = (1, Foo()) [file b.py] from e import Quux from d import Baz class Foo(Baz, Quux): pass [file e.py] from c import Bar class Quux(Bar): pass [file c.py] class Bar: pass [file d.py] class Baz: pass [delete c.py.2] [out] == [case testDeleteFileWithinPackage] import a [file a.py] import m.x m.x.g(1) [file m/__init__.py] [file m/x.py] def g() -> None: pass [delete m/x.py.2] [builtins fixtures/module.pyi] [out] a.py:2: error: Too many arguments for "g" == a.py:1: error: Cannot find implementation or library stub for module named 'm.x' a.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports a.py:2: error: Module has no attribute "x" [case testDeletePackage1] import p.a p.a.f(1) [file p/__init__.py] [file p/a.py] def f(x: str) -> None: pass [delete p/__init__.py.2] [delete p/a.py.2] def f(x: str) -> None: pass [out] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:1: error: Cannot find implementation or library stub for module named 'p.a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'p' [case testDeletePackage2] import p p.f(1) [file p/__init__.py] from p.a import f [file p/a.py] def f(x: str) -> None: pass [delete p/__init__.py.2] [delete p/a.py.2] [out] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:1: error: Cannot find implementation or library stub for module named 'p' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testDeletePackage3] import p.a p.a.f(1) [file p/__init__.py] [file p/a.py] def f(x: str) -> None: pass [delete p/a.py.2] [delete p/__init__.py.3] [builtins fixtures/module.pyi] [out] main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:2: error: Cannot find implementation or library stub for module named 'p.a' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:3: error: Module has no attribute "a" == main:2: error: Cannot find implementation or library stub for module named 'p.a' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Cannot find implementation or library stub for module named 'p' [case testDeletePackage4] import p.a p.a.f(1) [file p/a.py] def f(x: str) -> None: pass [file p/__init__.py] [delete p/__init__.py.2] [delete p/a.py.3] [out] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:1: error: Cannot find implementation or library stub for module named 'p.a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'p' == main:1: error: Cannot find implementation or library stub for module named 'p.a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named 'p' [case testDeletePackage5] # cmd1: mypy main p/a.py p/__init__.py # cmd2: mypy main p/a.py # cmd3: mypy main import p.a p.a.f(1) [file p/a.py] def f(x: str) -> None: pass [file p/__init__.py] [delete p/__init__.py.2] [delete p/a.py.3] [out] main:6: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:5: error: Cannot find implementation or library stub for module named 'p.a' main:5: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:5: error: Cannot find implementation or library stub for module named 'p' == main:5: error: Cannot find implementation or library stub for module named 'p.a' main:5: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:5: error: Cannot find implementation or library stub for module named 'p' [case testDeletePackage6] # cmd1: mypy p/a.py p/b.py p/__init__.py # cmd2: mypy p/a.py p/b.py # cmd3: mypy p/a.py p/b.py [file p/a.py] def f(x: str) -> None: pass [file p/b.py] from p.a import f f(12) [file p/__init__.py] [delete p/__init__.py.2] [file p/b.py.3] from a import f f(12) [out] p/b.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" == p/b.py:1: error: Cannot find implementation or library stub for module named 'p.a' p/b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == p/b.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" -- TODO: -- - add one file which imports another new file, blocking error in new file -- - arbitrary blocking errors -- - order of processing makes a difference -- - mix of modify, add and delete in one iteration -- Controlling imports using command line options -- ---------------------------------------------- [case testIgnoreMissingImports] # flags: --ignore-missing-imports import a [file a.py] import b import c [file c.py] [delete c.py.2] [file b.py.3] import d 1 + '' [out] == == b.py:2: error: Unsupported operand types for + ("int" and "str") [case testSkipImports] # cmd: mypy main a.py # flags: --follow-imports=skip --ignore-missing-imports import a [file a.py] import b [file b.py] 1 + '' class A: pass [file a.py.2] import b reveal_type(b) reveal_type(b.A) [file a.py.3] import b reveal_type(b) reveal_type(b.A) [file b.py.3] 1 + '' class A: pass [out] == a.py:2: note: Revealed type is 'Any' a.py:3: note: Revealed type is 'Any' == a.py:2: note: Revealed type is 'Any' a.py:3: note: Revealed type is 'Any' [case testSkipImportsWithinPackage] # cmd: mypy a/b.py # flags: --follow-imports=skip --ignore-missing-imports [file a/__init__.py] 1 + '' [file a/b.py] import a.c [file a/b.py.2] import a.c import x reveal_type(a.c) [file a/b.py.3] import a.c import x 1 + '' [out] == a/b.py:3: note: Revealed type is 'Any' == a/b.py:3: error: Unsupported operand types for + ("int" and "str") [case testDeleteModuleWithinPackageInitIgnored-only_when_nocache] -- Disabled in cache mode because incorrect behavior: -- Having deleted files specified on command line seems dodgy, though. # cmd: mypy x.py a/b.py # flags: --follow-imports=skip --ignore-missing-imports [file x.py] import a.b [file a/__init__.py] [file a/b.py] x = 1 [delete a/b.py.2] [out] == [case testAddImport] import what.b [file aaa/__init__.py] [file aaa/z.py] def foo(x: int) -> None: pass [file aaa/z.py.2] import config def foo() -> None: pass [file what/__init__.py] [file what/b.py] import config import aaa.z def main() -> None: aaa.z.foo(5) [file what/b.py.2] import aaa.z def main() -> None: aaa.z.foo() [file config.py] [out] == [case testAddImport2] import what.b [file aaa/__init__.py] [file aaa/z.py] def foo(x: int) -> None: pass [file aaa/z.py.2] def foo() -> None: pass [file what/__init__.py] [file what/b.py] import aaa.z def main() -> None: aaa.z.foo(5) [file what/b.py.2] import config import aaa.z def main() -> None: aaa.z.foo() [file config.py] [out] == -- Misc -- ---- [case testChangeModuleToVariable] from a import m m.x [file a.py] from b import m [file b.py] import m [file b.py.2] m = '' [file m.py] x = 1 [file m2.py] [out] == main:2: error: "str" has no attribute "x" [case testChangeVariableToModule] from a import m y: str = m [file a.py] from b import m [file b.py] m = '' [file b.py.2] import m [file m.py] x = 1 [file m2.py] [builtins fixtures/module.pyi] [out] == main:2: error: Incompatible types in assignment (expression has type Module, variable has type "str") [case testRefreshImportOfIgnoredModule1] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy c.py a/__init__.py b.py [file c.py] from a import a2 import b b.x [file a/__init__.py] [file b.py] x = 0 [file b.py.2] x = '' [file b.py.3] x = 0 [file a/a2.py] [out] == == [case testRefreshImportOfIgnoredModule2] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy c.py a/__init__.py b.py [file c.py] from a import a2 import b b.x [file a/__init__.py] [file b.py] x = 0 [file b.py.2] x = '' [file b.py.3] x = 0 [file a/a2/__init__.py] [out] == == [case testIncrementalWithIgnoresTwice] import a [file a.py] import b import foo # type: ignore [file b.py] x = 1 [file b.py.2] x = 'hi' [file b.py.3] x = 1 [out] == == [case testIgnoredImport2] import x [file y.py] import xyz # type: ignore B = 0 from x import A [file x.py] A = 0 from y import B [file x.py.2] A = 1 from y import B [file x.py.3] A = 2 from y import B [out] == == [case testDeleteIndirectDependency] import b b.x.foo() [file b.py] import c x = c.Foo() [file c.py] class Foo: def foo(self) -> None: pass [delete c.py.2] [file b.py.2] class Foo: def foo(self) -> None: pass x = Foo() [file b.py.3] class Foo: def foo(self, x: int) -> None: pass x = Foo() [out] == == main:2: error: Too few arguments for "foo" of "Foo" -- This series of tests is designed to test adding a new module that -- does not appear in the cache, for cache mode. They are run in -- cache mode only because stale and rechecked differ heavily between -- the modes. [case testAddModuleAfterCache1-only_when_cache] # cmd: mypy main a.py # cmd2: mypy main a.py b.py # cmd3: mypy main a.py b.py import a [file a.py] pass [file a.py.2] import b b.foo(0) [file b.py.2] def foo() -> None: pass [stale a, b] [rechecked a, b] [file b.py.3] def foo(x: int) -> None: pass [stale2 b] [rechecked2 b] [out] == a.py:2: error: Too many arguments for "foo" == [case testAddModuleAfterCache2-only_when_cache] # cmd: mypy main a.py # cmd2: mypy main a.py b.py # cmd3: mypy main a.py b.py # flags: --ignore-missing-imports --follow-imports=skip import a [file a.py] import b b.foo(0) [file b.py.2] def foo() -> None: pass [stale b] [rechecked a, b] [file b.py.3] def foo(x: int) -> None: pass [stale2 b] [out] == a.py:2: error: Too many arguments for "foo" == [case testAddModuleAfterCache3-only_when_cache] # cmd: mypy main a.py # cmd2: mypy main a.py b.py c.py d.py e.py f.py g.py h.py # cmd3: mypy main a.py b.py c.py d.py e.py f.py g.py h.py # flags: --ignore-missing-imports --follow-imports=skip import a [file a.py] import b, c, d, e, f, g, h b.foo(10) [file b.py.2] def foo() -> None: pass [file c.py.2] [file d.py.2] [file e.py.2] [file f.py.2] [file g.py.2] [file h.py.2] -- No files should be stale or reprocessed in the first step since the large number -- of missing files will force build to give up on cache loading. [stale] [file b.py.3] def foo(x: int) -> None: pass [stale2 b] [out] == a.py:2: error: Too many arguments for "foo" == [case testAddModuleAfterCache4-only_when_cache] # cmd: mypy main a.py # cmd2: mypy main a.py b.py # cmd3: mypy main a.py b.py # flags: --ignore-missing-imports --follow-imports=skip import a import b [file a.py] def foo() -> None: pass [file b.py.2] import a a.foo(10) [file a.py.3] def foo(x: int) -> None: pass [out] == b.py:2: error: Too many arguments for "foo" == [case testAddModuleAfterCache5-only_when_cache] # cmd: mypy main a.py # cmd2: mypy main a.py b.py # cmd3: mypy main a.py b.py # flags: --ignore-missing-imports --follow-imports=skip import a import b [file a.py] def foo(x: int) -> None: pass [file a.py.2] def foo() -> None: pass [file b.py.2] import a a.foo(10) [stale a, b] [file a.py.3] def foo(x: int) -> None: pass [stale2 a] [out] == b.py:2: error: Too many arguments for "foo" == [case testAddModuleAfterCache6-only_when_cache] # cmd: mypy main a.py # cmd2: mypy main a.py b.py # cmd3: mypy main a.py b.py # flags: --ignore-missing-imports --follow-imports=skip import a [file a.py] import b b.foo() [file a.py.2] import b b.foo(0) [file b.py.2] def foo() -> None: pass [stale a, b] [file b.py.3] def foo(x: int) -> None: pass [stale2 b] [out] == a.py:2: error: Too many arguments for "foo" == [case testRenameAndDeleteModuleAfterCache-only_when_cache] import a [file a.py] from b1 import f f() [file b1.py] def f() -> None: pass [file b2.py.2] def f() -> None: pass [delete b1.py.2] [file a.py.2] from b2 import f f() -- in cache mode, there is no way to know about b1 yet [stale a, b2] [out] == [case testDeleteModuleAfterCache-only_when_cache] import a [file a.py] from b import f f() [file b.py] def f() -> None: pass [delete b.py.2] -- in cache mode, there is no way to know about b yet, -- but a should get flagged as changed by the initial cache -- check, since one of its dependencies is missing. [stale a] [out] == a.py:1: error: Cannot find implementation or library stub for module named 'b' a.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testRefreshImportIfMypyElse1] import a [file a.py] from b import foo MYPY = False if MYPY: x = 0 else: from fictional import x x = 1 [file b/__init__.py] [file b/foo.py] [file b/__init__.py.2] # Dummy change [builtins fixtures/bool.pyi] [out] == [case testImportStarPropagateChange1] from b import f f() [file b.py] from c import * [file c.py] def f() -> None: pass [file c.py.2] def f(x: int) -> None: pass [out] == main:2: error: Too few arguments for "f" [case testImportStarPropagateChange2] from b import * f() [file b.py] def f() -> None: pass [file b.py.2] def f(x: int) -> None: pass [out] == main:2: error: Too few arguments for "f" [case testImportStarAddMissingDependency1] from b import f f() [file b.py] from c import * [file c.py] [file c.py.2] def f(x: int) -> None: pass [out] main:1: error: Module 'b' has no attribute 'f' == main:2: error: Too few arguments for "f" [case testImportStarAddMissingDependency2] from b import * f() [file b.py] [file b.py.2] def f(x: int) -> None: pass [out] main:2: error: Name 'f' is not defined == main:2: error: Too few arguments for "f" [case testImportStarAddMissingDependencyWithinClass] class A: from b import * f() x: C [file b.py] [file b.py.2] def f(x: int) -> None: pass [file b.py.3] def f(x: int) -> None: pass class C: pass [file b.py.4] def f() -> None: pass class C: pass [out] main:3: error: Name 'f' is not defined main:4: error: Name 'C' is not defined == main:3: error: Too few arguments for "f" main:4: error: Name 'C' is not defined == main:3: error: Too few arguments for "f" == [case testImportStarAddMissingDependencyInsidePackage1] from p.b import f f() [file p/__init__.py] [file p/b.py] from p.c import * [file p/c.py] [file p/c.py.2] def f(x: int) -> None: pass [out] main:1: error: Module 'p.b' has no attribute 'f' == main:2: error: Too few arguments for "f" [case testImportStarAddMissingDependencyInsidePackage2] import p.a [file p/__init__.py] [file p/a.py] from p.b import * f() [file p/b.py] [file p/b.py.2] def f(x: int) -> None: pass [out] p/a.py:2: error: Name 'f' is not defined == p/a.py:2: error: Too few arguments for "f" [case testImportStarRemoveDependency1] from b import f f() [file b.py] from c import * [file c.py] def f() -> None: pass [file c.py.2] [out] == main:1: error: Module 'b' has no attribute 'f' [case testImportStarRemoveDependency2] from b import * f() [file b.py] def f() -> None: pass [file b.py.2] [out] == main:2: error: Name 'f' is not defined [case testImportStarWithinFunction] def f() -> None: from m import * f() [file m.py] [file m.py.2] def f(x: int) -> None: pass [file m.py.3] def f() -> None: pass [out] == main:3: error: Too few arguments for "f" == [case testImportStarMutuallyRecursive-skip] # FIXME: busted with new analyzer? import a [file a.py] from b import * [file b.py] from a import * [file b.py.2] from a import * x = 0 [file b.py.3] from a import * x = '' [out] == == [case testImportStarSomethingMoved] import p [file p.py] from r2 import * [file r2.py] class A: pass [file p.py.2] from r1 import * from r2 import * [file r2.py.2] [file r1.py.2] class A: pass [out] == [case testImportStarOverlap] from b import * from c import * # type: ignore [file b.py] from d import T [file c.py] from d import T [file c.py.2] from d import T z = 10 [file d.py] from typing import TypeVar T = TypeVar('T') [out] == [case testImportStarOverlap2] from b import * import typing def foo(x: typing.List[int]) -> int: return x[0] [file b.py] import typing z = 10 [file b.py.2] import typing z = '10' [builtins fixtures/list.pyi] [out] == [case testImportStarOverlap3] from b import * from c import typing def foo(x: typing.List[int]) -> int: return x[0] [file b.py] import typing z = 10 [file b.py.2] import typing z = '10' [file c.py] import typing z = 10 [builtins fixtures/list.pyi] [out] == [case testImportPartialAssign] import a [file a.py] from c import * from b import A, x [file b.py] A = 10 x = 1 [file b.py.2] class A: pass x = 1 [file c.py] x = 10 [out] == [case testDeleteFileWithErrors] # cmd: mypy main a.py # cmd2: mypy main # flags: --follow-imports=skip --ignore-missing-imports import a [file a.py] def f() -> None: 1() ''() [file b.py.2] # unrelated change [out] a.py:2: error: "int" not callable a.py:3: error: "str" not callable == [case testAddAndUseClass1] [file a.py] [file a.py.2] from b import Foo def bar(f: Foo) -> None: f.foo(12) [file b.py.2] class Foo: def foo(self, s: str) -> None: pass [out] == a.py:3: error: Argument 1 to "foo" of "Foo" has incompatible type "int"; expected "str" [case testAddAndUseClass2] [file a.py] [file a.py.3] from b import Foo def bar(f: Foo) -> None: f.foo(12) [file b.py.2] class Foo: def foo(self, s: str) -> None: pass [out] == == a.py:3: error: Argument 1 to "foo" of "Foo" has incompatible type "int"; expected "str" [case testAddAndUseClass3] # flags: --ignore-missing-imports [file a.py] [file a.py.2] from b import Foo def bar(f: Foo) -> None: f.foo(12) [file b.py.3] class Foo: def foo(self, s: str) -> None: pass [out] == == a.py:3: error: Argument 1 to "foo" of "Foo" has incompatible type "int"; expected "str" [case testAddAndUseClass4] [file a.py] [file a.py.2] from b import * def bar(f: Foo) -> None: f.foo(12) [file b.py.2] class Foo: def foo(self, s: str) -> None: pass [out] == a.py:3: error: Argument 1 to "foo" of "Foo" has incompatible type "int"; expected "str" [case testAddAndUseClass4] [file a.py] [file a.py.2] from p.b import * def bar(f: Foo) -> None: f.foo(12) [file p/__init__.py] [file p/b.py.2] class Foo: def foo(self, s: str) -> None: pass [out] == a.py:3: error: Argument 1 to "foo" of "Foo" has incompatible type "int"; expected "str" [case testAddAndUseClass5] [file a.py] [file a.py.2] from b import * def bar(f: Foo) -> None: f.foo(12) [file b.py.2] class Foo: def foo(self, s: str) -> None: pass [out] == a.py:3: error: Argument 1 to "foo" of "Foo" has incompatible type "int"; expected "str" [case testSkipButDontIgnore1] # cmd: mypy a.py c.py # flags: --follow-imports=skip [file a.py] import b from c import x [file b.py] 1+'lol' [file c.py] x = 1 [file c.py.2] x = '2' [file b.py.3] [out] == == [case testSkipButDontIgnore2] # cmd: mypy a.py c.py # flags: --follow-imports=skip [file a.py] from c import x import b [file b.py] [file c.py] x = 1 [file b.py] 1+'x' [file c.py.2] x = '2' [file c.py.3] x = 2 [delete b.py.3] [out] == == a.py:2: error: Cannot find implementation or library stub for module named 'b' a.py:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testErrorButDontIgnore1] # cmd: mypy a.py c.py # flags: --follow-imports=error [file a.py] from c import x import b [file b.py] [file c.py] x = 1 [file c.py.2] x = '2' [out] a.py:2: error: Import of 'b' ignored a.py:2: note: (Using --follow-imports=error, module not passed on command line) == a.py:2: error: Import of 'b' ignored a.py:2: note: (Using --follow-imports=error, module not passed on command line) [case testErrorButDontIgnore2] # cmd1: mypy a.py c.py b.py # cmd2: mypy a.py c.py # flags: --follow-imports=error [file a.py] from c import x import b [file b.py] [file c.py] x = 1 [file c.py.2] x = '2' [out] == a.py:2: error: Import of 'b' ignored a.py:2: note: (Using --follow-imports=error, module not passed on command line) -- TODO: This test fails because p.b does not depend on p (#4847) [case testErrorButDontIgnore3-skip] # cmd1: mypy a.py c.py p/b.py p/__init__.py # cmd2: mypy a.py c.py p/b.py # flags: --follow-imports=error [file a.py] from c import x from p.b import y [file p/b.py] y = 12 [file p/__init__.py] [file c.py] x = 1 [file c.py.2] x = '2' [out] == p/b.py: error: Ancestor package 'p' ignored p/b.py: note: (Using --follow-imports=error, submodule passed on command line) [case testErrorButDontIgnore4] # cmd: mypy a.py z.py p/b.py p/__init__.py # cmd2: mypy a.py p/b.py # flags: --follow-imports=error [file a.py] from p.b import y [file p/b.py] from z import x y = 12 [file p/__init__.py] [file z.py] x = 1 [delete z.py.2] [out] == p/b.py: error: Ancestor package 'p' ignored p/b.py: note: (Using --follow-imports=error, submodule passed on command line) p/b.py:1: error: Cannot find implementation or library stub for module named 'z' p/b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testTurnPackageToModule] [file a.py] [file b.py] import p x = p.f() [file p/__init__.py] def f() -> int: pass [delete p/__init__.py.2] [file p.py.2] def f() -> str: pass [file a.py.3] import b reveal_type(b.x) [out] == == a.py:2: note: Revealed type is 'builtins.str' [case testModuleToPackage] [file a.py] [file b.py] import p x = p.f() [file p.py] def f() -> str: pass [delete p.py.2] [file p/__init__.py.2] def f() -> int: pass [file a.py.3] import b reveal_type(b.x) [out] == == a.py:2: note: Revealed type is 'builtins.int' [case testQualifiedSubpackage1] [file c/__init__.py] [file c/a.py] from lurr import x from c.d import f [file c/d.py] def f() -> None: pass def g(x: int) -> None: pass [file lurr.py] x = 10 [file lurr.py.2] x = '10' [out] == [case testImportedMissingSubpackage] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy a.py b/__init__.py [file a.py] from b.foo import bar x = 10 [file b/__init__.py] [file a.py.2] from b.foo import bar x = '10' [out] == [case testFineAddedMissingStubs] # flags: --ignore-missing-imports from missing import f f(int()) [file missing.pyi.2] def f(x: str) -> None: pass [out] == main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineAddedMissingStubsPackage] # flags: --ignore-missing-imports import package.missing package.missing.f(int()) [file package/__init__.pyi.2] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] == main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineAddedMissingStubsPackageFrom] # flags: --ignore-missing-imports from package import missing missing.f(int()) [file package/__init__.pyi.2] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] == main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineAddedMissingStubsPackagePartial] # flags: --ignore-missing-imports import package.missing package.missing.f(int()) [file package/__init__.pyi] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] == main:3: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineAddedMissingStubsPackagePartialGetAttr] import package.missing package.missing.f(int()) [file package/__init__.pyi] from typing import Any def __getattr__(attr: str) -> Any: ... [file package/missing.pyi.2] def f(x: str) -> None: pass [out] == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineAddedMissingStubsIgnore] from missing import f # type: ignore f(int()) [file missing.pyi.2] def f(x: str) -> None: pass [out] == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineAddedMissingStubsIgnorePackage] import package.missing # type: ignore package.missing.f(int()) [file package/__init__.pyi.2] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineAddedMissingStubsIgnorePackageFrom] from package import missing # type: ignore missing.f(int()) [file package/__init__.pyi.2] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineAddedMissingStubsIgnorePackagePartial] import package.missing # type: ignore package.missing.f(int()) [file package/__init__.pyi] [file package/missing.pyi.2] def f(x: str) -> None: pass [out] == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testFineFollowImportSkipNotInvalidatedOnPresent] # flags: --follow-imports=skip # cmd: mypy main.py [file main.py] import other [file other.py] x = 1 [file other.py.2] x = 'hi' [stale] [rechecked] [out] == [case testFineFollowImportSkipNotInvalidatedOnPresentPackage] # flags: --follow-imports=skip # cmd: mypy main.py [file main.py] import other [file other/__init__.py] x = 1 [file other/__init__.py.2] x = 'hi' [stale] [rechecked] [out] == [case testFineFollowImportSkipNotInvalidatedOnAdded] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy main.py [file main.py] import other [file other.py.2] x = 1 [stale] [rechecked] [out] == -- TODO: Fix this: stubs should be followed normally even with follow-imports=skip [case testFineFollowImportSkipInvalidatedOnAddedStub-skip] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy main.py [file main.py] import other x: str = other.x [file other.pyi.2] x = 1 [stale main, other] [rechecked main, other] [out] == main:2: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testFineFollowImportSkipNotInvalidatedOnAddedStubOnFollowForStubs] # flags: --follow-imports=skip --ignore-missing-imports --config-file=tmp/mypy.ini # cmd: mypy main.py [file main.py] import other [file other.pyi.2] x = 1 [file mypy.ini] \[mypy] follow_imports_for_stubs = True [stale] [rechecked] [out] == [case testFineAddedSkippedStubsPackageFrom] # flags: --follow-imports=skip --ignore-missing-imports # cmd: mypy main.py # cmd2: mypy main.py package/__init__.py package/missing.py [file main.py] from package import missing missing.f(int()) [file package/__init__.py] [file package/missing.py] def f(x: str) -> None: pass [out] == main.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testIgnoredAttrReprocessedModule] import a [file a.py] import b x = b.x # type: ignore y: int = x [file b.py] import c [file b.py.2] import c x = c.x [file c.py] x: str [out] == a.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") mypy-0.761/test-data/unit/fine-grained-suggest.test0000644€tŠÔÚ€2›s®0000004756013576752246026511 0ustar jukkaDROPBOX\Domain Users00000000000000[case testSuggestCallsites1] # suggest: --callsites foo.foo [file foo.py] def foo(arg): return 12 var = 0 [file bar.py] from foo import foo def bar() -> None: foo('abc') foo(arg='xyz') args = [''] foo(*args) kwds = {'arg': ''} foo(**kwds) [builtins fixtures/dict.pyi] [out] bar.py:3: (str) bar.py:4: (arg=str) bar.py:6: (*typing.List[str]) bar.py:8: (**typing.Dict[str, str]) == [case testSuggestCallsitesStep2] # suggest2: --callsites foo.foo [file foo.py] def foo(arg): return 12 var = 0 [file bar.py] from foo import foo def bar() -> None: foo('abc') foo(arg='xyz') args = [''] foo(*args) kwds = {'arg': ''} foo(**kwds) [builtins fixtures/dict.pyi] [out] == bar.py:3: (str) bar.py:4: (arg=str) bar.py:6: (*typing.List[str]) bar.py:8: (**typing.Dict[str, str]) [case testMaxGuesses] # suggest: foo.foo # suggest: --max-guesses=2 foo.foo [file foo.py] # The idea here is that we can only find the union type with more guesses. def foo(x, y): if not isinstance(x, int): x+'1' foo(1, 2) foo('3', '4') [builtins fixtures/isinstancelist.pyi] [out] (Union[int, str], object) -> None (object, object) -> None == [case testSuggestInferFunc1] # flags: --strict-optional # suggest: foo.foo [file foo.py] def foo(arg, lol=None): if isinstance(arg, int): arg+1 else: assert arg arg+'1' [file bar.py] from foo import foo def bar() -> None: foo('abc') foo(lol=10, arg=10) foo(None) def untyped(x) -> None: foo(x) [builtins fixtures/isinstancelist.pyi] [out] (Union[str, int, None], Optional[int]) -> None == [case testSuggestInferFunc2] # flags: --strict-optional # suggest: foo.foo [file foo.py] def foo(arg): return arg [file bar.py] from foo import foo def thing() -> str: return '' def bar() -> None: # We stick it in a list so that the argument type is marked as "inferred", # which we want to make sure doesn't show up. x = ['hello'] foo(x[0]) [builtins fixtures/isinstancelist.pyi] [out] (str) -> str == [case testSuggestInferFuncAny1] # suggest: foo.foo # suggest: foo.bar [file foo.py] def foo(arg): return arg.x def bar(arg): pass [file bar.py] from foo import bar bar(None) [out] (Any) -> Any (Optional[Any]) -> None == [case testSuggestInferFuncAny2] # suggest: --no-any foo.foo # suggest: --no-any foo.bar [file foo.py] def foo(arg): return arg.x def bar(arg): pass [file bar.py] from foo import bar bar(None) [out] No guesses that match criteria! No guesses that match criteria! == [case testSuggestInferTuple] # suggest: --no-any foo.foo [file foo.py] def foo(): return 1, "1" [out] () -> Tuple[int, str] == [case testSuggestInferNamedTuple] # suggest: foo.foo [file foo.py] from typing import NamedTuple N = NamedTuple('N', [('x', int)]) def foo(): return N(1) [out] () -> foo.N == [case testSuggestInferTypedDict] # suggest: foo.foo [file foo.py] from typing_extensions import TypedDict TD = TypedDict('TD', {'x': int}) def foo(): return bar() def bar() -> TD: ... [builtins fixtures/dict.pyi] [out] () -> foo.TD == [case testSuggestWithNested] # suggest: foo.foo [file foo.py] def foo(): def bar(): return 1 return 'lol' [out] () -> str == [case testSuggestReexportNaming] # suggest: foo.foo [file foo.py] from bar import A def foo(x): return A(), A.C() [file bar.py] from baz import A [file baz.py] class A: class C: ... class B: ... [file caller.py] from foo import foo from baz import B foo(B()) [out] (baz.B) -> Tuple[foo.A, foo:A.C] == [case testSuggestInferInit] # suggest: foo.Foo.__init__ [file foo.py] class Foo: def __init__(self, arg): self.arg = arg [file bar.py] from foo import Foo Foo('lol') [out] (str) -> None == [case testSuggestTryText] # flags: --py2 # suggest: --try-text foo.foo [file foo.py] def foo(s): return s [file bar.py] from foo import foo foo('lol') [out] (Text) -> Text == [case testSuggestInferMethod1] # flags: --strict-optional # suggest: --no-any foo.Foo.foo [file foo.py] class Foo: def __init__(self) -> None: self.y = '10' def foo(self, arg, lol=None): if isinstance(arg, int): return arg+1 else: assert arg return arg+self.y [file bar.py] from foo import Foo def bar() -> None: x = Foo() x.foo('abc') x.foo(lol=10, arg=10) x.foo(None) [builtins fixtures/isinstancelist.pyi] [out] (Union[str, int, None], Optional[int]) -> object == [case testSuggestInferMethod2] # flags: --strict-optional # suggest: foo.Foo.foo [file foo.py] class Foo: def i(self, x: int) -> int: return x def s(self, x: str) -> str: return x def foo(self, arg, lol=None): if isinstance(arg, int): return self.i(arg) else: assert arg return self.s(arg) [file bar.py] from typing import Union from foo import Foo def bar() -> None: x = Foo() x.foo('abc') x.foo(lol=10, arg=10) a: Union[str, int] = x.foo(None) [builtins fixtures/isinstancelist.pyi] [out] (Union[str, int, None], Optional[int]) -> Union[int, str] == [case testSuggestInferMethod3] # flags: --strict-optional # suggest2: foo.Foo.foo [file foo.py] class Foo: def foo(self, lol = None): pass def lol(self) -> None: self.foo('lol') [file bar.py] from foo import Foo def bar() -> None: x = Foo() x.foo('abc') [builtins fixtures/isinstancelist.pyi] [out] == (Optional[str]) -> None [case testSuggestBackflow] # suggest: foo.foo # suggest: foo.bar # suggest: foo.baz # suggest: foo.Cls.spam # suggest: foo.method # suggest: foo.meet # suggest: foo.has_nested [file foo.py] from typing import Any, List, Optional class A: pass class B(A): def test(self, x: A) -> None: pass def take_optional_a(x: Optional[A]) -> None: pass def take_a(x: A) -> None: pass def take_b(x: B) -> None: pass def take_any(x: Any) -> None: pass def take_kws(a: A, b: B) -> None: pass def take_star(*a: A) -> None: pass def foo(x): take_b(x) def bar(x): take_b(x) bar(A()) def baz(x, y): take_kws(a=y, b=x) class Cls: def spam(self, x, y): take_star(x, y) def method(x): b = B() b.test(x) def meet(x, y): take_a(x) take_b(x) take_a(y) take_optional_a(y) take_any(y) foo(y) # unannotated def has_nested(x): def nested(): take_b(x) [builtins fixtures/isinstancelist.pyi] [out] (foo.B) -> None (foo.A) -> None (foo.B, foo.A) -> None (foo.A, foo.A) -> None (foo.A) -> None (foo.B, foo.A) -> None (foo.B) -> None == [case testSuggestInferFunctionUnreachable] # flags: --strict-optional # suggest: foo.foo [file foo.py] import sys def foo(lol): if sys.platform == 'nothing': return lol else: return lol + lol [file bar.py] from foo import foo foo('test') [builtins fixtures/isinstancelist.pyi] [out] (str) -> str == [case testSuggestInferMethodStep2] # flags: --strict-optional # suggest2: foo.Foo.foo [file foo.py] class Foo: def i(self, x: int) -> int: return x def s(self, x: str) -> str: return x def foo(self, arg, lol=None): if isinstance(arg, int): return self.i(arg) else: assert arg return self.s(arg) [file bar.py] from typing import Union from foo import Foo def bar() -> None: x = Foo() x.foo('abc') x.foo(lol=10, arg=10) a: Union[str, int] = x.foo(None) [builtins fixtures/isinstancelist.pyi] [out] == (Union[str, int, None], Optional[int]) -> Union[int, str] [case testSuggestInferNestedMethod] # flags: --strict-optional # suggest: foo.Foo.Bar.baz [file foo.py] class Foo: class Bar: def baz(self, lol): return lol [file bar.py] from foo import Foo def bar() -> None: x = Foo.Bar() x.baz('abc') [builtins fixtures/isinstancelist.pyi] [out] (str) -> str == [case testSuggestCallable] # flags: --strict-optional # suggest: foo.foo # suggest: foo.bar # suggest: --flex-any=0.9 foo.bar # suggest: foo.baz # suggest: foo.quux [file foo.py] def foo(f): return f(0, "lol") def bar(f): return f(0, "lol", 100) def baz(f): return f(y=1) + f(x=10, y=1) def quux(f): return f(1) [file bar.py] from typing import Any from foo import foo, bar, baz, quux def whatever(x: int, y: str) -> int: return 0 def starargs(*args: Any) -> int: return 0 def named(*, x: int = 0, y: int) -> str: return '0' # we don't properly handle default really. we just assume it is # actually required. def default(x: int = 0) -> str: return '0' def test() -> None: foo(whatever) bar(starargs) baz(named) quux(default) [out] (Callable[[int, str], int]) -> int (Callable[..., int]) -> int No guesses that match criteria! (Callable[..., str]) -> str (Callable[[int], str]) -> str == [case testSuggestNewSemanal] # flags: --strict-optional # suggest: foo.Foo.foo # suggest: foo.foo [file foo.py] class Foo: def __init__(self) -> None: self.y = '10' def foo(self, arg, lol=None): if isinstance(arg, int): return arg+1 else: assert arg return arg+self.y def foo(arg, lol=None): if isinstance(arg, int): arg+1 else: assert arg arg+'1' [file bar.py] from foo import Foo, foo def bar() -> None: x = Foo() x.foo('abc') x.foo(lol=10, arg=10) x.foo(None) def baz() -> None: foo('abc') foo(lol=10, arg=10) foo(None) [builtins fixtures/isinstancelist.pyi] [out] (Union[str, int, None], Optional[int]) -> object (Union[str, int, None], Optional[int]) -> None == [case testSuggestInferFuncDecorator1] # flags: --strict-optional # suggest: foo.foo [file foo.py] from typing import TypeVar F = TypeVar('F') def dec(x: F) -> F: return x @dec def foo(arg): return arg [file bar.py] from foo import foo def bar() -> None: foo('abc') [builtins fixtures/isinstancelist.pyi] [out] (str) -> str == [case testSuggestInferFuncDecorator2] # flags: --strict-optional # suggest: foo.foo [file foo.py] from typing import TypeVar, Callable, Any F = TypeVar('F', bound=Callable[..., Any]) def dec(x: F) -> F: return x @dec def foo(arg): return arg [file bar.py] from foo import foo def bar() -> None: foo('abc') [builtins fixtures/isinstancelist.pyi] [out] (str) -> str == [case testSuggestInferFuncDecorator3] # flags: --strict-optional # suggest: foo.foo [file foo.py] from typing import TypeVar, Callable, Any F = TypeVar('F', bound=Callable[..., Any]) def dec(s: str) -> Callable[[F], F]: def f(x: F) -> F: return x return f @dec('lol') def foo(arg): return arg [file bar.py] from foo import foo def bar() -> None: foo('abc') [builtins fixtures/isinstancelist.pyi] [out] (str) -> str == [case testSuggestInferFuncDecorator4] # flags: --strict-optional # suggest: foo.foo [file dec.py] from typing import TypeVar, Callable, Any F = TypeVar('F', bound=Callable[..., Any]) def dec(s: str) -> Callable[[F], F]: def f(x: F) -> F: return x return f [file foo.py] import dec @dec.dec('lol') def foo(arg): return arg [file bar.py] from foo import foo def bar() -> None: foo('abc') [builtins fixtures/isinstancelist.pyi] [out] (str) -> str == [case testSuggestFlexAny1] # flags: --strict-optional # suggest: --flex-any=0.4 m.foo # suggest: --flex-any=0.7 m.foo # suggest: --flex-any=0.4 m.bar # suggest: --flex-any=0.6 m.bar # suggest2: --flex-any=0.4 m.foo # suggest2: --flex-any=0.7 m.foo [file m.py] from typing import Any any: Any def foo(arg): return 0 def bar(x, y): return any [file n.py] from typing import Any any: Any from m import foo, bar def wtvr() -> None: foo(any) bar(1, 2) [file n.py.2] from typing import Any any: Any from m import foo, bar def wtvr() -> None: foo([any]) [builtins fixtures/isinstancelist.pyi] [out] (Any) -> int No guesses that match criteria! (int, int) -> Any No guesses that match criteria! == (typing.List[Any]) -> int (typing.List[Any]) -> int [case testSuggestFlexAny2] # flags: --strict-optional # suggest: --flex-any=0.5 m.baz # suggest: --flex-any=0.0 m.baz # suggest: --flex-any=0.5 m.F.foo # suggest: --flex-any=0.7 m.F.foo # suggest: --flex-any=0.7 m.noargs [file m.py] # Test mostly corner cases # Test that a None return doesn't get counted def baz(x): pass class F: # Test that self doesn't get counted def foo(self, x): return 0 # Make sure we don't crash on noarg functions def noargs(): pass [builtins fixtures/isinstancelist.pyi] [out] No guesses that match criteria! (Any) -> None (Any) -> int No guesses that match criteria! () -> None == [case testSuggestClassMethod] # flags: --strict-optional # suggest: foo.F.bar # suggest: foo.F.baz # suggest: foo.F.eggs [file foo.py] class F: @classmethod def bar(cls, x, y): return x @staticmethod def baz(x, y): return x @classmethod def spam(cls): # type: () -> None cls.eggs(4) @classmethod def eggs(cls, x): pass [file bar.py] from foo import F def bar(iany) -> None: F.bar(0, iany) F().bar(0, 5) F.baz('lol', iany) F().baz('lol', 10) [builtins fixtures/classmethod.pyi] [out] (int, int) -> int (str, int) -> str (int) -> None == [case testSuggestColonBasic] # suggest: tmp/foo.py:1 # suggest: tmp/bar/baz.py:2 [file foo.py] def func(arg): return 0 func('test') from bar.baz import C C().method('test') [file bar/__init__.py] [file bar/baz.py] class C: def method(self, x): return 0 [out] (str) -> int (str) -> int == [case testSuggestColonAfter] # suggest: tmp/foo.py:6 # suggest: tmp/foo.py:15 # suggest: tmp/foo.py:16 # suggest: tmp/foo.py:18 [file foo.py] from typing import TypeVar F = TypeVar('F') def foo(): # hi return 1 def dec(x: F) -> F: return x class A: @dec def bar(self): return 1.0 @dec def baz(): return 'test' [out] () -> int () -> float () -> str () -> str == [case testSuggestParent] # suggest: foo.B.foo # suggest: foo.B.bar # suggest: foo.C.foo [file foo.py] from typing import TypeVar, Callable, Any F = TypeVar('F', bound=Callable[..., Any]) def deco(f: F) -> F: ... class A: def foo(self, x: int) -> float: return 0.0 @deco def bar(self, x: int) -> float: return 0.0 class B(A): def foo(self, x): return 0.0 @deco def bar(self, x): return 0.0 class C(B): def foo(self, x): return 0.0 [out] (int) -> float (int) -> float (int) -> float == [case testSuggestColonBadLocation] # suggest: tmp/foo.py:7:8:9 [file foo.py] [out] Malformed location for function: tmp/foo.py:7:8:9. Must be either package.module.Class.method or path/to/file.py:line == [case testSuggestColonBadLine] # suggest: tmp/foo.py:bad [file foo.py] [out] Line number must be a number. Got bad == [case testSuggestColonBadFile] # suggest: tmp/foo.txt:1 [file foo.txt] def f(): pass [out] Source file is not a Python file == [case testSuggestColonClass] # suggest: tmp/foo.py:1 [file foo.py] class C: pass [out] Cannot find a function at line 1 == [case testSuggestColonDecorator] # suggest: tmp/foo.py:6 [file foo.py] from typing import TypeVar, Callable, Any F = TypeVar('F', bound=Callable[..., Any]) def deco(f: F) -> F: ... @deco def func(arg): return 0 func('test') [out] (str) -> int == [case testSuggestColonMethod] # suggest: tmp/foo.py:3 [file foo.py] class Out: class In: def method(self, x): return Out() x: Out.In x.method(x) [out] (foo:Out.In) -> foo.Out == [case testSuggestColonMethodJSON] # suggest: --json tmp/foo.py:3 [file foo.py] class Out: class In: def method(self, x): return Out() x: Out.In x.method(x) [out] \[{"func_name": "Out.In.method", "line": 3, "path": "tmp/foo.py", "samples": 0, "signature": {"arg_types": ["foo:Out.In"], "return_type": "foo.Out"}}] == [case testSuggestColonNonPackageDir] # cmd: mypy foo/bar/baz.py # suggest: tmp/foo/bar/baz.py:1 [file foo/bar/baz.py] def func(arg): return 0 func('test') [out] (str) -> int == [case testSuggestUseFixmeBasic] # suggest: --use-fixme=UNKNOWN foo.foo # suggest: --use-fixme=UNKNOWN foo.bar [file foo.py] def foo(): return g() def bar(x): return None def g(): ... x = bar(g()) [out] () -> UNKNOWN (UNKNOWN) -> None == [case testSuggestUseFixmeNoNested] # suggest: --use-fixme=UNKNOWN foo.foo [file foo.py] from typing import List, Any def foo(x, y): return x, y def f() -> List[Any]: ... def g(): ... z = foo(f(), g()) [builtins fixtures/isinstancelist.pyi] [out] (foo.List[Any], UNKNOWN) -> Tuple[foo.List[Any], Any] == [case testSuggestBadImport] # suggest: foo.foo [file foo.py] from nothing import Foo # type: ignore def foo(x: Foo): return 10 [out] (foo.Foo) -> int == [case testSuggestDict] # suggest: foo.foo # suggest: foo.bar # suggest: foo.baz # suggest: foo.quux # suggest: foo.spam [file foo.py] from typing import List, Any def foo(): return {'x': 5} def bar(): return {} def baz() -> List[Any]: return [{'x': 5}] def quux() -> List[Any]: return [1] def spam(x): pass spam({'x': 5}) [builtins fixtures/dict.pyi] [out] () -> typing.Dict[str, int] () -> typing.Dict[Any, Any] () -> foo:List[typing.Dict[str, int]] () -> foo.List[int] (typing.Dict[str, int]) -> None == [case testSuggestWithErrors] # suggest: foo.foo [file foo.py] 1+'no' def foo(): return 10 [out] foo.py:1: error: Unsupported operand types for + ("int" and "str") () -> int == foo.py:1: error: Unsupported operand types for + ("int" and "str") [case testSuggestWithBlockingError] # suggest: foo.foo [file foo.py] def foo(): return 10 ( [out] foo.py:4: error: unexpected EOF while parsing Command 'suggest' is only valid after a 'check' command (that produces no parse errors) == foo.py:4: error: unexpected EOF while parsing -- ) [case testSuggestRefine] # suggest: foo.foo # suggest: foo.spam # suggest: foo.eggs # suggest: foo.take_l # suggest: foo.union # suggest: foo.callable1 # suggest: foo.callable2 # suggest: foo.optional1 # suggest: foo.optional2 # suggest: foo.optional3 # suggest: foo.optional4 # suggest: foo.optional5 # suggest: foo.optional_any # suggest: foo.dict1 # suggest: foo.tuple1 [file foo.py] from typing import Any, List, Union, Callable, Optional, Set, Dict, Tuple def bar(): return 10 def foo(x: int, y): return x + y foo(bar(), 10) def spam(x: int, y: Any) -> Any: return x + y spam(bar(), 20) def eggs(x: int) -> List[Any]: a = [x] return a def take_l(x: List[Any]) -> Any: return x[0] test = [10, 20] take_l(test) def union(x: Union[int, str]): pass union(10) def add1(x: float) -> int: pass def callable1(f: Callable[[int], Any]): return f(10) callable1(add1) def callable2(f: Callable[..., Any]): return f(10) callable2(add1) def optional1(x: Optional[Any]): pass optional1(10) def optional2(x: Union[None, int, Any]): if x is None: pass elif isinstance(x, str): pass else: add1(x) optional2(10) optional2('test') def optional3(x: Optional[List[Any]]): assert not x return x[0] optional3(test) set_test = {1, 2} def optional4(x: Union[Set[Any], List[Any]]): pass optional4(test) optional4(set_test) def optional5(x: Optional[Any]): pass optional5(10) optional5(None) def optional_any(x: Optional[Any] = None): pass def dict1(d: Dict[int, Any]): pass d: Dict[Any, int] dict1(d) def tuple1(d: Tuple[int, Any]): pass t: Tuple[Any, int] tuple1(t) [builtins fixtures/isinstancelist.pyi] [out] (int, int) -> int (int, int) -> int (int) -> foo.List[int] (foo.List[int]) -> int (Union[int, str]) -> None (Callable[[int], int]) -> int (Callable[[float], int]) -> int (Optional[int]) -> None (Union[None, int, str]) -> None (Optional[foo.List[int]]) -> int (Union[foo.Set[int], foo.List[int]]) -> None (Optional[int]) -> None (Optional[Any]) -> None (foo.Dict[int, int]) -> None (Tuple[int, int]) -> None == [case testSuggestRefine2] # suggest: foo.optional5 [file foo.py] from typing import Optional, Any def optional5(x: Optional[Any]): pass optional5(10) optional5(None) [builtins fixtures/isinstancelist.pyi] [out] (Optional[int]) -> None == mypy-0.761/test-data/unit/fine-grained.test0000644€tŠÔÚ€2›s®0000052352013576752246025025 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for fine-grained incremental checking -- -- Test cases may define multiple versions of a file -- (e.g. m.py, m.py.2). There is always an initial batch -- pass that processes all files present initially, followed -- by one or more fine-grained incremental passes that use -- alternative versions of files, if available. If a file -- just has a single .py version, it is used for all passes. -- TODO: what if version for some passes but not all -- Output is laid out like this: -- -- [out] -- -- == -- -- -- -- Modules that are expected to be detected as changed by dmypy_server -- can be checked with [stale ...] -- Generally this should mean added, deleted, or changed files, though there -- are important edge cases related to the cache: deleted files won't be detected -- as changed in the initial run with the cache while modules that depended on them -- should be. -- -- Modules that are require a full-module reprocessing by update can be checked with -- [rechecked ...]. This should include any files detected as having changed as well -- as any files that contain targets that need to be reprocessed but which haven't -- been loaded yet. If there is no [rechecked...] directive, it inherits the value of -- [stale ...]. -- -- Specifications for later runs can be given with [stale2 ...], [stale3 ...], etc. -- -- Test runner can parse options from mypy.ini file. Updating this file in between -- incremental runs is not yet supported. -- -- Each test case run without caching and with caching (if the initial run passes), -- unless it has one a -only_when_cache or -only_when_nocache arguments. We sometimes -- skip caching test cases to speed up tests, if the caching variant is not useful. -- The caching test case variants get an implicit _cached suffix. [case testReprocessFunction] import m def g() -> int: return m.f() [file m.py] def f() -> int: pass [file m.py.2] def f() -> str: pass [out] == main:3: error: Incompatible return value type (got "str", expected "int") [case testReprocessTopLevel] import m m.f(1) def g() -> None: pass [file m.py] def f(x: int) -> None: pass [file m.py.2] def f(x: str) -> None: pass [out] == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testReprocessMethod] import m class B: def f(self, a: m.A) -> None: a.g() # E [file m.py] class A: def g(self) -> None: pass [file m.py.2] class A: def g(self, a: A) -> None: pass [out] == main:4: error: Too few arguments for "g" of "A" [case testReprocessMethodShowSource] # flags: --pretty --show-error-codes import m class B: def f(self, a: m.A) -> None: a.g() # E [file m.py] class A: def g(self) -> None: pass [file m.py.2] class A: def g(self, a: A) -> None: pass [out] == main:5: error: Too few arguments for "g" of "A" [call-arg] a.g() # E ^ [case testFunctionMissingModuleAttribute] import m def h() -> None: m.f(1) [file m.py] def f(x: int) -> None: pass [file m.py.2] def g(x: str) -> None: pass [builtins fixtures/fine_grained.pyi] [out] == main:3: error: Module has no attribute "f" [case testTopLevelMissingModuleAttribute] import m m.f(1) def g() -> None: pass [file m.py] def f(x: int) -> None: pass [file m.py.2] def g(x: int) -> None: pass [builtins fixtures/fine_grained.pyi] [out] == main:2: error: Module has no attribute "f" [case testClassChangedIntoFunction] import m def f(a: m.A) -> None: pass [file m.py] class A: pass [file m.py.2] def A() -> None: pass [out] == main:3: error: Function "m.A" is not valid as a type main:3: note: Perhaps you need "Callable[...]" or a callback protocol? [case testClassChangedIntoFunction2] import m class B: def f(self, a: m.A) -> None: pass [file m.py] class A: pass [file m.py.2] def A() -> None: pass [file n.py.3] [out] == main:4: error: Function "m.A" is not valid as a type main:4: note: Perhaps you need "Callable[...]" or a callback protocol? == main:4: error: Function "m.A" is not valid as a type main:4: note: Perhaps you need "Callable[...]" or a callback protocol? [case testAttributeTypeChanged] import m def f(a: m.A) -> int: return a.x [file m.py] class A: def f(self) -> None: self.x = 1 [file m.py.2] class A: def f(self) -> None: self.x = 'x' [out] == main:3: error: Incompatible return value type (got "str", expected "int") [case testAttributeRemoved] import m def f(a: m.A) -> int: return a.x [file m.py] class A: def f(self) -> None: self.x = 1 [file m.py.2] class A: def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "x" [case testVariableTypeBecomesInvalid] import m def f() -> None: a = None # type: m.A [file m.py] class A: pass [file m.py.2] [out] == main:3: error: Name 'm.A' is not defined [case testTwoIncrementalSteps] import m import n [file m.py] def f() -> None: pass [file n.py] import m def g() -> None: m.f() # E [file m.py.2] import n def f(x: int) -> None: n.g() # E [file n.py.3] import m def g(a: str) -> None: m.f('') # E [out] == n.py:3: error: Too few arguments for "f" == n.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" m.py:3: error: Too few arguments for "g" [case testTwoRounds] import m def h(a: m.A) -> int: return a.x [file m.py] import n class A: def g(self, b: n.B) -> None: self.x = b.f() [file n.py] class B: def f(self) -> int: pass [file n.py.2] class B: def f(self) -> str: pass [out] == main:3: error: Incompatible return value type (got "str", expected "int") [case testFixTypeError] import m def f(a: m.A) -> None: a.f(a) [file m.py] class A: def f(self, a: 'A') -> None: pass [file m.py.2] class A: def f(self) -> None: pass [file m.py.3] class A: def f(self, a: 'A') -> None: pass [out] == main:3: error: Too many arguments for "f" of "A" == [case testFixTypeError2] import m def f(a: m.A) -> None: a.f() [file m.py] class A: def f(self) -> None: pass [file m.py.2] class A: def g(self) -> None: pass [file m.py.3] class A: def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "f" == [case testFixSemanticAnalysisError] import m def f() -> None: m.A() [file m.py] class A: pass [file m.py.2] class B: pass [file m.py.3] class A: pass [builtins fixtures/fine_grained.pyi] [out] == main:3: error: Module has no attribute "A" == [case testContinueToReportTypeCheckError] import m def f(a: m.A) -> None: a.f() def g(a: m.A) -> None: a.g() [file m.py] class A: def f(self) -> None: pass def g(self) -> None: pass [file m.py.2] class A: pass [file m.py.3] class A: def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "f" main:5: error: "A" has no attribute "g" == main:5: error: "A" has no attribute "g" [case testContinueToReportSemanticAnalysisError] import m def f() -> None: m.A() def g() -> None: m.B() [file m.py] class A: pass class B: pass [file m.py.2] [file m.py.3] class A: pass [builtins fixtures/fine_grained.pyi] [out] == main:3: error: Module has no attribute "A" main:5: error: Module has no attribute "B" == main:5: error: Module has no attribute "B" [case testContinueToReportErrorAtTopLevel-only_when_nocache] -- Different cache/no-cache tests because: -- Error message ordering differs import n import m m.A().f() [file n.py] import m m.A().g() [file m.py] class A: def f(self) -> None: pass def g(self) -> None: pass [file m.py.2] class A: pass [file m.py.3] class A: def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "f" n.py:2: error: "A" has no attribute "g" == n.py:2: error: "A" has no attribute "g" [case testContinueToReportErrorAtTopLevel-only_when_cache] -- Different cache/no-cache tests because: -- Error message ordering differs import n import m m.A().f() [file n.py] import m m.A().g() [file m.py] class A: def f(self) -> None: pass def g(self) -> None: pass [file m.py.2] class A: pass [file m.py.3] class A: def f(self) -> None: pass [out] == n.py:2: error: "A" has no attribute "g" main:3: error: "A" has no attribute "f" == n.py:2: error: "A" has no attribute "g" [case testContinueToReportErrorInMethod] import m class C: def f(self, a: m.A) -> None: a.f() def g(self, a: m.A) -> None: a.g() [file m.py] class A: def f(self) -> None: pass def g(self) -> None: pass [file m.py.2] class A: pass [file m.py.3] class A: def f(self) -> None: pass [out] == main:4: error: "A" has no attribute "f" main:6: error: "A" has no attribute "g" == main:6: error: "A" has no attribute "g" [case testInitialBatchGeneratedError] import m def g() -> None: m.f() def h() -> None: m.g() [file m.py] def f(x: object) -> None: pass [file m.py.2] def f() -> None: pass [file m.py.3] def f() -> None: pass def g() -> None: pass [builtins fixtures/fine_grained.pyi] [out] main:3: error: Too few arguments for "f" main:5: error: Module has no attribute "g" == main:5: error: Module has no attribute "g" == [case testKeepReportingErrorIfNoChanges] import m def h() -> None: m.g() [file m.py] [file m.py.2] [builtins fixtures/fine_grained.pyi] [out] main:3: error: Module has no attribute "g" == main:3: error: Module has no attribute "g" [case testFixErrorAndReintroduce] import m def h() -> None: m.g() [file m.py] [file m.py.2] def g() -> None: pass [file m.py.3] [builtins fixtures/fine_grained.pyi] [out] main:3: error: Module has no attribute "g" == == main:3: error: Module has no attribute "g" [case testIgnoreWorksAfterUpdate] import a [file a.py] import b int() + str() # type: ignore [file b.py] x = 1 [file b.py.2] x = 2 [file b.py.3] x = 3 [delete b.py.4] [out] == == == a.py:1: error: Cannot find implementation or library stub for module named 'b' a.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testIgnoreWorksWithMissingImports] import a [file a.py] import b import xyz # type: ignore xyz.whatever [file b.py] x = 1 [file b.py.2] x = 2 [file b.py.3] x = 3 [file xyz.py.4] [out] == == == a.py:3: error: "object" has no attribute "whatever" [case testAddedIgnoreWithMissingImports] import a [file a.py] from b import x y: int = x [file b.py] from xyz import x [file b.py.2] from xyz import x # type: ignore [file xyz.py.3] x = str() [out] b.py:1: error: Cannot find implementation or library stub for module named 'xyz' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == == a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testRemovedIgnoreWithMissingImport] import a [file a.py] from b import x y: int = x [file b.py] from xyz import x # type: ignore [file b.py.2] from xyz import x [file xyz.py.3] x = str() [out] == b.py:1: error: Cannot find implementation or library stub for module named 'xyz' b.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testRemovedModuleUnderIgnore] import a [file a.py] import c from b import x # type: ignore y: int = x [file b.py] x = str() [file c.py] x = 1 [delete b.py.2] [file c.py.3] x = 3 [out] a.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") == == [case AddedModuleUnderIgnore] import a [file a.py] import c from b import x # type: ignore y: int = x [file c.py] x = 1 [file c.py.2] x = 2 [file b.py.3] # empty [out] == == [case testIgnoreInBetween] import a [file a.py] import b x: int = b.x [file b.py] import c x = c.C.x # type: ignore [file c.py] class C: pass [file c.py.2] class C: x: int [file c.py.3] # empty [file c.py.4] class C: x: str [out] == == == a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testIgnoredAttrReprocessedModule] import a [file a.py] import b x = b.x # type: ignore y: int = x [file b.py] import c [file b.py.2] import c x = c.x [file c.py] x: str [out] == a.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testIgnoredAttrReprocessedBase] import a [file a.py] import b def fun() -> None: x = b.C.x # type: ignore y: int = x [file b.py] import c class C: pass [file b.py.2] import c class C(c.B): pass [file c.py] class B: x: str [out] == a.py:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testIgnoredAttrReprocessedMeta] import a [file a.py] import b def fun() -> None: x = b.C.x # type: ignore y: int = x [file b.py] import c class C: pass [file b.py.2] import c class C(metaclass=c.M): pass [file c.py] class M(type): x: str [out] == a.py:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testDataclassUpdate1] # flags: --python-version 3.7 [file a.py] from dataclasses import dataclass @dataclass class A: x: int [file b.py] from dataclasses import dataclass from a import A @dataclass class B(A): y: int B(1, 2) [file a.py.2] from dataclasses import dataclass @dataclass class A: x: str [file a.py.3] from dataclasses import dataclass @dataclass class A: x: int [out] == b.py:8: error: Argument 1 to "B" has incompatible type "int"; expected "str" == [builtins fixtures/list.pyi] [case testDataclassUpdate2] # flags: --python-version 3.7 [file c.py] Foo = int [file c.py.2] Foo = str [file a.py] from dataclasses import dataclass from c import Foo @dataclass class A: x: Foo [file b.py] from dataclasses import dataclass from a import A @dataclass class B(A): y: int B(1, 2) [out] == b.py:8: error: Argument 1 to "B" has incompatible type "int"; expected "str" [builtins fixtures/list.pyi] [case testDataclassUpdate3] # flags: --python-version 3.7 from b import B B(1, 2) [file b.py] from a import A from dataclasses import dataclass @dataclass class B(A): b: int [file a.py] from dataclasses import dataclass @dataclass class A: a: int [file a.py.2] from dataclasses import dataclass @dataclass class A: a: int other: int [builtins fixtures/list.pyi] [out] == main:3: error: Too few arguments for "B" [case testDataclassUpdate4] # flags: --python-version 3.7 from b import B B(1, 2) [file b.py] from a import A from dataclasses import dataclass @dataclass(frozen=True) class B(A): b: int [file a.py] from dataclasses import dataclass @dataclass(frozen=True) class A: a: int [file a.py.2] from dataclasses import dataclass @dataclass(frozen=True) class A: a: int other: int [builtins fixtures/list.pyi] [out] == main:3: error: Too few arguments for "B" [case testDataclassUpdate5] # flags: --python-version 3.7 from b import B B(1, 2) [file b.py] from a import A from dataclasses import dataclass @dataclass class B(A): b: int [file a.py] from dataclasses import dataclass @dataclass(init=False) class A: a: int [file a.py.2] from dataclasses import dataclass @dataclass(init=False) class A: a: int other: int [file a.py.3] from dataclasses import dataclass @dataclass(init=False) class A: a: int [builtins fixtures/list.pyi] [out] == main:3: error: Too few arguments for "B" == [case testDataclassUpdate6] # flags: --python-version 3.7 from b import B B(1, 2) < B(1, 2) [file b.py] from a import A from dataclasses import dataclass @dataclass class B(A): b: int [file a.py] from dataclasses import dataclass @dataclass(order=True) class A: a: int [file a.py.2] from dataclasses import dataclass @dataclass class A: a: int [builtins fixtures/list.pyi] [out] == main:3: error: Unsupported left operand type for < ("B") [case testDataclassUpdate8] # flags: --python-version 3.7 from c import C C(1, 2, 3) [file c.py] from b import B from dataclasses import dataclass @dataclass class C(B): c: int [file b.py] from a import A from dataclasses import dataclass @dataclass class B(A): b: int [file a.py] from dataclasses import dataclass @dataclass class A: a: int [file a.py.2] from dataclasses import dataclass @dataclass class A: a: int other: int [builtins fixtures/list.pyi] [out] == main:3: error: Too few arguments for "C" [case testDataclassUpdate9] # flags: --python-version 3.7 from c import C C(1, 2, 3) [file c.py] from b import B from dataclasses import dataclass @dataclass class C(B): c: int [file b.py] from a import A from dataclasses import dataclass @dataclass class B(A): b: int [file a.py] from dataclasses import dataclass @dataclass(init=False) class A: a: int [file a.py.2] from dataclasses import dataclass @dataclass(init=False) class A: a: int other: int [file a.py.3] from dataclasses import dataclass @dataclass(init=False) class A: a: int [builtins fixtures/list.pyi] [out] == main:3: error: Too few arguments for "C" == [case testAttrsUpdate1] [file a.py] import attr @attr.s class A: a = attr.ib() # type: int [file b.py] from a import A import attr @attr.s class B(A): b = attr.ib() # type: int B(1, 2) [file a.py.2] import attr @attr.s class A: a = attr.ib() # type: int other = attr.ib() # type: int [builtins fixtures/list.pyi] [out] == b.py:7: error: Too few arguments for "B" [case testAttrsUpdate2] from b import B B(1, 2) [file b.py] from a import A import attr @attr.s class B(A): b = attr.ib() # type: int [file a.py] import attr @attr.s(init=False) class A: a = attr.ib() # type: int [file a.py.2] import attr @attr.s(init=False) class A: a = attr.ib() # type: int other = attr.ib() # type: int [builtins fixtures/list.pyi] [out] == main:2: error: Too few arguments for "B" [case testAttrsUpdate3] from b import B B(1, 2) [file b.py] from a import A import attr @attr.s(auto_attribs=True) class B(A): x: int [file a.py] import attr @attr.s(auto_attribs=True, init=False) class A: a: int [file a.py.2] import attr @attr.s(auto_attribs=True, init=False) class A: a: int other: int [builtins fixtures/list.pyi] [file a.py.3] import attr @attr.s(auto_attribs=True, init=False) class A: a: int [builtins fixtures/list.pyi] [out] == main:2: error: Too few arguments for "B" == [case testAttrsUpdate4] from b import B B(1, 2) < B(1, 2) [file b.py] from a import A import attr @attr.s(eq=False) class B(A): b = attr.ib() # type: int [file a.py] import attr @attr.s(init=False) class A: a = attr.ib() # type: int [file a.py.2] import attr @attr.s(eq=False, init=False) class A: a = attr.ib() # type: int [builtins fixtures/list.pyi] [out] == main:2: error: Unsupported left operand type for < ("B") [case testAttrsUpdateKwOnly] [file a.py] import attr @attr.s(kw_only=True) class A: a = attr.ib(15) # type: int [file b.py] from a import A import attr @attr.s(kw_only=True) class B(A): b = attr.ib("16") # type: str [file b.py.2] from a import A import attr @attr.s() class B(A): b = attr.ib("16") # type: str B(b="foo", a=7) [builtins fixtures/attr.pyi] [out] == b.py:5: error: Non keyword-only attributes are not allowed after a keyword-only attribute. [case testAttrsUpdateBaseKwOnly] from b import B B(5) [file a.py] import attr @attr.s() class A: a = attr.ib(15) # type: int [file b.py] from a import A import attr @attr.s(kw_only=True) class B(A): b = attr.ib("16") # type: str [file a.py.2] import attr @attr.s(kw_only=True) class A: a = attr.ib(15) # type: int [builtins fixtures/attr.pyi] [out] == main:2: error: Too many positional arguments for "B" [case testAddBaseClassMethodCausingInvalidOverride] import m class B(m.A): def f(self) -> str: pass [file m.py] class A: pass [file m.py.2] class A: def f(self) -> int: pass [file n.py.3] [out] == main:3: error: Return type "str" of "f" incompatible with return type "int" in supertype "A" == main:3: error: Return type "str" of "f" incompatible with return type "int" in supertype "A" [case testModifyBaseClassMethodCausingInvalidOverride] import m class B(m.A): def f(self) -> str: pass [file m.py] class A: def f(self) -> str: pass [file m.py.2] class A: def f(self) -> int: pass [out] == main:3: error: Return type "str" of "f" incompatible with return type "int" in supertype "A" [case testAddBaseClassAttributeCausingErrorInSubclass] import m class B(m.A): def a(self) -> None: x = 1 if int(): x = self.x def f(self) -> None: self.x = 1 def z(self) -> None: x = 1 if int(): x = self.x [file m.py] class A: pass [file m.py.2] class A: def g(self) -> None: self.x = 'a' [out] == main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:9: error: Incompatible types in assignment (expression has type "int", variable has type "str") main:14: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testChangeBaseClassAttributeType] import m class B(m.A): def f(sel) -> None: sel.x = 1 [file m.py] class A: def g(self) -> None: self.x = 1 [file m.py.2] class A: def g(self) -> None: self.x = 'a' [out] == main:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testRemoveAttributeInBaseClass] import m class B(m.A): def f(self) -> None: a = 1 a = self.x [file m.py] class A: def g(self) -> None: self.x = 1 [file m.py.2] class A: pass [out] == main:5: error: "B" has no attribute "x" [case testTestSignatureOfInheritedMethod] import m class B(m.A): def f(self) -> None: self.g() [file m.py] class A: def g(self) -> None: pass [file m.py.2] class A: def g(self, a: 'A') -> None: pass [out] == main:4: error: Too few arguments for "g" of "A" [case testRemoveBaseClass] import m class A(m.B): def f(self) -> None: self.g() self.x self.y = 1 [file m.py] class C: def g(self) -> None: self.x = 1 class B(C): pass [file m.py.2] class C: pass class B: pass [out] == main:4: error: "A" has no attribute "g" main:5: error: "A" has no attribute "x" [case testRemoveBaseClass2] import m class A(m.B): def f(self) -> None: self.g() self.x self.y = 1 [file m.py] class C: def g(self) -> None: self.x = 1 class B(C): pass [file m.py.2] class C: def g(self) -> None: self.x = 1 class B: pass [out] == main:4: error: "A" has no attribute "g" main:5: error: "A" has no attribute "x" [case testChangeInPackage] import m.n def f() -> None: m.n.g() [file m/__init__.py] [file m/n.py] def g() -> None: pass [file m/n.py.2] def g(x: int) -> None: pass [out] == main:3: error: Too few arguments for "g" [case testTriggerTargetInPackage] import m.n [file m/__init__.py] [file m/n.py] import a def f() -> None: a.g() [file a.py] def g() -> None: pass [file a.py.2] def g(x: int) -> None: pass [out] == m/n.py:3: error: Too few arguments for "g" [case testChangeInPackage__init__] import m import m.n def f() -> None: m.g() [file m/__init__.py] def g() -> None: pass [file m/__init__.py.2] def g(x: int) -> None: pass [file m/n.py] [out] == main:4: error: Too few arguments for "g" [case testTriggerTargetInPackage__init__] import m import m.n [file m/__init__.py] import a def f() -> None: a.g() [file a.py] def g() -> None: pass [file a.py.2] def g(x: int) -> None: pass [file m/n.py] [out] == m/__init__.py:3: error: Too few arguments for "g" [case testModuleAttributeTypeChanges] import m def f() -> None: x = 1 if int(): x = m.x [file m.py] x = 1 [file m.py.2] x = '' [out] == main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTwoStepsDueToModuleAttribute] import m x = m.f() def g() -> None: y = 1 if int(): y = x # E [file m.py] def f() -> int: pass [file m.py.2] def f() -> str: pass [out] == main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTwoStepsDueToMultipleNamespaces] import m x = m.f() def g() -> None: xx = 1 if int(): xx = x # E class A: def a(self) -> None: self.y = m.f() def b(self) -> None: yy = 1 if int(): yy = self.y class B: def c(self) -> None: self.z = m.f() def b(self) -> None: zz = 1 if int(): zz = self.z [file m.py] def f() -> int: pass [file m.py.2] def f() -> str: pass [out] == main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:15: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:23: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testConstructorSignatureChanged] import m def f() -> None: m.A() [file m.py] class A: def __init__(self) -> None: pass [file m.py.2] class A: def __init__(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "A" [case testConstructorSignatureChanged2] from typing import Callable import m def use(x: Callable[[], m.A]) -> None: x() def f() -> None: use(m.A) [file m.py] class A: def __init__(self) -> None: pass [file m.py.2] class A: def __init__(self, x: int) -> None: pass [out] == -- This is a bad error message main:7: error: Argument 1 to "use" has incompatible type "Type[A]"; expected "Callable[[], A]" [case testConstructorSignatureChanged3] from a import C class D(C): def g(self) -> None: super().__init__() D() [file a.py] class C: def __init__(self) -> None: pass [file a.py.2] class C: def __init__(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "__init__" of "C" main:5: error: Too few arguments for "D" [case testConstructorAdded] import m def f() -> None: m.A() [file m.py] class A: pass [file m.py.2] class A: def __init__(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "A" [case testConstructorDeleted] import m def f() -> None: m.A(1) [file m.py] class A: def __init__(self, x: int) -> None: pass [file m.py.2] class A: pass [out] == main:4: error: Too many arguments for "A" [case testBaseClassConstructorChanged] import m def f() -> None: m.B() [file m.py] class A: def __init__(self) -> None: pass class B(A): pass [file m.py.2] class A: def __init__(self, x: int) -> None: pass class B(A): pass [out] == main:4: error: Too few arguments for "B" [case testSuperField] from a import C class D(C): def g(self) -> int: return super().x [file a.py] class C: def __init__(self) -> None: self.x = 12 [file a.py.2] class C: def __init__(self) -> None: self.x = 'ar' [out] == main:4: error: Incompatible return value type (got "str", expected "int") [case testImportFrom] from m import f def g() -> None: f() [file m.py] def f() -> None: pass [file m.py.2] def f(x: int) -> None: pass [builtins fixtures/fine_grained.pyi] [out] == main:4: error: Too few arguments for "f" [case testImportFrom2] from m import f f() [file m.py] def f() -> None: pass [file m.py.2] def f(x: int) -> None: pass [out] == main:2: error: Too few arguments for "f" [case testImportFromTargetsClass] from m import C def f(c: C) -> None: c.g() [file m.py] class C: def g(self) -> None: pass [file m.py.2] class C: def g(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "g" of "C" [case testImportFromTargetsVariable] from m import x def f() -> None: y = 1 if int(): y = x [file m.py] x = 1 [file m.py.2] x = '' [out] == main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testImportFromSubmoduleOfPackage] from m import n def f() -> None: n.g() [file m/__init__.py] [file m/n.py] def g() -> None: pass [file m/n.py.2] def g(x: int) -> None: pass [out] == main:4: error: Too few arguments for "g" [case testImportedFunctionGetsImported] from m import f def g() -> None: f() [file m.py] from n import f [file n.py] def f() -> None: pass [file n.py.2] def f(x: int) -> None: pass [out] == main:4: error: Too few arguments for "f" [case testNestedClassMethodSignatureChanges] from m import A def f(x: A.B) -> None: x.g() [file m.py] class A: class B: def g(self) -> None: pass [file m.py.2] class A: class B: def g(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "g" of "B" [case testNestedClassAttributeTypeChanges] from m import A def f(x: A.B) -> None: z = 1 if int(): z = x.y [file m.py] class A: class B: def g(self) -> None: self.y = 1 [file m.py.2] class A: class B: def g(self) -> None: self.y = '' [out] == main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testReprocessMethodInNestedClass] from m import f class A: class B: def g(self) -> None: x = 1 if int(): x = f() [file m.py] def f() -> int: pass [file m.py.2] def f() -> str: pass [file n.py.3] [out] == main:8: error: Incompatible types in assignment (expression has type "str", variable has type "int") == main:8: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testReprocessMethodInNestedClassSemanal] import a [file a.py] class A: class B: def g() -> None: pass def foo(self) -> int: return 12 [file b.py.2] [file b.py.3] 2 [out] a.py:3: error: Method must have at least one argument == a.py:3: error: Method must have at least one argument == a.py:3: error: Method must have at least one argument [case testBaseClassDeleted] import m class A(m.C): def f(self) -> None: self.g() # No error here because m.C becomes an Any base class def g(self) -> None: self.x [file m.py] class C: def g(self) -> None: pass [file m.py.2] [out] main:7: error: "A" has no attribute "x" == main:3: error: Name 'm.C' is not defined [case testBaseClassOfNestedClassDeleted] import m class A: class B(m.C): def f(self) -> None: self.g() # No error here because m.C becomes an Any base class def g(self) -> None: self.x [file m.py] class C: def g(self) -> None: pass [file m.py.2] [out] main:8: error: "B" has no attribute "x" == main:4: error: Name 'm.C' is not defined [case testImportQualifiedModuleName] import a [file a.py] import b.c b.c.f() [file a.py.2] import b.c b.c.f() # dummy change [file b/__init__.py] [file b/c.py] def f() -> None: pass [out] == [case testTypeAliasRefresh] from typing import Callable from a import f C = Callable[[int], str] [file a.py] def f() -> None: pass [file a.py.2] [out] == main:2: error: Module 'a' has no attribute 'f' [case testTypeVarRefresh] from typing import TypeVar from a import f T = TypeVar('T') [file a.py] def f() -> None: pass [file a.py.2] [out] == main:2: error: Module 'a' has no attribute 'f' [case testRefreshTyping] from typing import Sized from c import A import z # Force typing to get refreshed by using a protocol from it x: Sized = A() [file c.py] class D: def __len__(self) -> int: return 0 A = D [file c.py.2] class C: def __len__(self) -> int: return 0 A = C [file z.py] from typing import List T = List[int] [file z.py.2] from typing import List T = List[int] # yo [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] [out] == [case testNamedTupleRefresh] from typing import NamedTuple from a import f N = NamedTuple('N', [('x', int)]) [file a.py] def f() -> None: pass [file a.py.2] [out] == main:2: error: Module 'a' has no attribute 'f' [case testModuleLevelAttributeRefresh] from typing import Callable from a import f x = 1 y = '' # type: str [file a.py] def f() -> None: pass [file a.py.2] [out] == main:2: error: Module 'a' has no attribute 'f' [case testClassBodyRefresh] from a import f class A: x = 1 y = '' # type: str def f(self) -> None: self.x = 1 [file a.py] f = 1 [file a.py.2] [out] == main:1: error: Module 'a' has no attribute 'f' [case testDecoratedMethodRefresh] from typing import Iterator, Callable, List from a import f import a def dec(f: Callable[['A'], Iterator[int]]) -> Callable[[int], int]: pass class A: @dec def f(self) -> Iterator[int]: self.x = a.g() # type: int return None [builtins fixtures/list.pyi] [file a.py] f = 1 def g() -> int: pass [file a.py.2] def f() -> None: pass def g() -> int: pass [file a.py.3] def f() -> None: pass def g() -> str: pass [out] == == main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTwoPassTypeChecking] import a [file a.py] [file a.py.2] class A: def __init__(self, b: B) -> None: self.a = b.a class B: def __init__(self) -> None: self.a = int() [file a.py.3] class A: def __init__(self, b: B) -> None: self.a = b.a reveal_type(self.a) # E class B: def __init__(self) -> None: self.a = int() [out] == == a.py:4: note: Revealed type is 'builtins.int' [case testStripRevealType] import a reveal_type(a.f()) [file a.py] def f() -> int: pass [file a.py.2] def f() -> str: pass [out] main:2: note: Revealed type is 'builtins.int' == main:2: note: Revealed type is 'builtins.str' [case testDecoratorTypeAfterReprocessing] import a reveal_type(a.f()) [file a.py] from contextlib import contextmanager from typing import Iterator import b @contextmanager def f() -> Iterator[None]: yield [file b.py] [delete b.py.2] [file b.py.3] [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [triggered] 2: , __main__ 3: , __main__, a [out] main:2: note: Revealed type is 'contextlib.GeneratorContextManager[None]' == a.py:3: error: Cannot find implementation or library stub for module named 'b' a.py:3: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: note: Revealed type is 'contextlib.GeneratorContextManager[None]' == main:2: note: Revealed type is 'contextlib.GeneratorContextManager[None]' [case testDecoratorSpecialCase1] import a [file a.py] import contextlib from typing import List, Iterator @contextlib.contextmanager def f(x: List[int]) -> Iterator[None]: x.append(1) yield def g() -> None: import b b.h(1) [file b.py] def h() -> None: pass [delete b.py.2] [file b.py.3] def h() -> None: pass [file a.py.4] import contextlib from typing import List, Iterator @contextlib.contextmanager def f(x: List[int]) -> Iterator[None]: x.append(1) yield def g() -> None: import b b.h(1) pass [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [triggered] 2: , , , a.g 3: , , , a 4: a.g [out] a.py:11: error: Too many arguments for "h" == a.py:10: error: Cannot find implementation or library stub for module named 'b' a.py:10: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == a.py:11: error: Too many arguments for "h" == a.py:11: error: Too many arguments for "h" [case testDecoratorSpecialCase2] import a [file a.py] from contextlib import contextmanager from typing import Iterator, List import b @contextmanager def f(x: List[int]) -> Iterator[None]: x.append(1) yield [file b.py] [delete b.py.2] [file b.py.3] [file a.py.4] from contextlib import contextmanager from typing import Iterator, List import b @contextmanager def f(x: List[int]) -> Iterator[None]: x.append(1) yield [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [out] == a.py:3: error: Cannot find implementation or library stub for module named 'b' a.py:3: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports == == [case testDecoratorMethodCompat] from typing import Callable, List, TypeVar import x class Base: pass _Base = TypeVar('_Base', bound=Base) def dec(f: Callable[[_Base], int]) -> Callable[[_Base], List[int]]: pass class B(Base): def foo(self) -> List[int]: pass class A(B): @dec def foo(self) -> int: x.lol() return 12 [file x.py] def lol() -> str: pass [file x.py.2] def lol() -> int: pass [file x.py.3] def lol() -> str: pass [builtins fixtures/list.pyi] [out] == == [case testPreviousErrorInDecoratedFunction] import a [file a.py] from typing import Callable import b def dec(x: Callable[[], None]) -> Callable[[], None]: return x @dec def f() -> None: 1 + '' [file b.py] [file b.py.2] 1 [file b.py.3] 2 [file a.py.4] from typing import Callable import b def dec(f: Callable[[], None]) -> Callable[[], None]: return f @dec def f() -> None: 1 + 2 [out] a.py:9: error: Unsupported operand types for + ("int" and "str") == a.py:9: error: Unsupported operand types for + ("int" and "str") == a.py:9: error: Unsupported operand types for + ("int" and "str") == [case testPreviousErrorInDecoratedMethodOverride] import a [file a.py] from typing import Callable from b import B def dec(x: Callable[['A'], int]) -> Callable[['A'], int]: return x class A(B): @dec def foo(self) -> int: return 12 [file b.py] class B: def foo(self) -> str: return 'hi' [file c.py.2] [file c.py.3] 1 [file b.py.4] class B: def foo(self) -> int: return 12 [out] a.py:9: error: Return type "int" of "foo" incompatible with return type "str" in supertype "B" == a.py:9: error: Return type "int" of "foo" incompatible with return type "str" in supertype "B" == a.py:9: error: Return type "int" of "foo" incompatible with return type "str" in supertype "B" == [case testPreviousErrorInMethodSemanal1] import a [file a.py] class A: def foo() -> int: pass [file c.py.2] [file c.py.3] 1 [file a.py.4] class A: def foo(self) -> int: pass [out] a.py:2: error: Method must have at least one argument == a.py:2: error: Method must have at least one argument == a.py:2: error: Method must have at least one argument == [case testPreviousErrorInMethodSemanal2] import a [file a.py] class A: def foo(self) -> None: nothing [file c.py.2] [file c.py.3] 1 [file a.py.4] class A: def foo(self) -> int: pass [out] a.py:3: error: Name 'nothing' is not defined == a.py:3: error: Name 'nothing' is not defined == a.py:3: error: Name 'nothing' is not defined == [case testPreviousErrorInMethodSemanalPass3] import a [file a.py] from typing import List class A: def __init__(self) -> None: self.x = [] # type: List[int, str] [file c.py.2] [file c.py.3] 1 [file a.py.4] from typing import List class A: def __init__(self) -> None: self.x = [] # type: List[int] [builtins fixtures/list.pyi] [out] a.py:4: error: "list" expects 1 type argument, but 2 given == a.py:4: error: "list" expects 1 type argument, but 2 given == a.py:4: error: "list" expects 1 type argument, but 2 given == [case testPreviousErrorInOverloadedFunctionSemanalPass3] import a [file a.py] from typing import overload, List @overload def f(x: str) -> None: ... @overload def f(x: int) -> List[int, str]: ... def f(x: object) -> object: pass [file c.py.2] [file c.py.3] 1 [file a.py.4] from typing import overload, List @overload def f(x: str) -> None: ... @overload def f(x: int) -> List[int]: ... def f(x: object) -> object: pass [builtins fixtures/list.pyi] [out] a.py:5: error: "list" expects 1 type argument, but 2 given == a.py:5: error: "list" expects 1 type argument, but 2 given == a.py:5: error: "list" expects 1 type argument, but 2 given == [case testPreviousErrorInOverloadedFunction] import a [file a.py] from typing import overload @overload def f(x: str) -> None: ... @overload def f(x: int) -> int: ... def f(x: object) -> None: pass [file b.py] [file b.py.2] 1 [file b.py.3] 2 [file a.py.4] from typing import overload @overload def f(x: str) -> None: ... @overload def f(x: int) -> None: ... def f(x: object) -> None: pass [out] a.py:6: error: Overloaded function implementation cannot produce return type of signature 2 == a.py:6: error: Overloaded function implementation cannot produce return type of signature 2 == a.py:6: error: Overloaded function implementation cannot produce return type of signature 2 == [case testPreviousErrorInOverloadedFunctionSemanal] import a [file a.py] from typing import overload @overload def f(x: str) -> None: ... @overload def f(x: int) -> None: ... [file b.py] [file b.py.2] 1 [file b.py.3] 2 [file a.py.4] from typing import overload @overload def f(x: str) -> None: ... @overload def f(x: int) -> None: ... def f(x: object) -> None: pass [out] a.py:2: error: An overloaded function outside a stub file must have an implementation == a.py:2: error: An overloaded function outside a stub file must have an implementation == a.py:2: error: An overloaded function outside a stub file must have an implementation == [case testPreviousErrorInDecoratedMethodSemanalPass3] import a [file a.py] from typing import Callable, TypeVar, Any, List T = TypeVar('T', bound=Callable) def dec(x: T) -> T: return x @dec def foo(self) -> List[str, int]: return [] [file c.py.2] [file c.py.3] [file a.py.4] from typing import Callable, TypeVar, Any, List T = TypeVar('T', bound=Callable[..., Any]) def dec(x: T) -> T: return x @dec def foo(self) -> List[str]: return [] [builtins fixtures/list.pyi] [out] a.py:8: error: "list" expects 1 type argument, but 2 given == a.py:8: error: "list" expects 1 type argument, but 2 given == a.py:8: error: "list" expects 1 type argument, but 2 given == [case testDecoratorUpdateMod] import a [file a.py] import mod @mod.deca @mod.decb(mod.C()) def func(x: mod.B) -> mod.B: x.x return x [file mod.py] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[B], B]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: x: int [file mod.py.2] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[str], str]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: x: int [file mod.py.3] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[B], B]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: y: int [file mod.py.4] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[B], B]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: def __init__(self, x: int) -> None: pass class B: x: int [out] == a.py:3: error: Argument 1 to "deca" has incompatible type "Callable[[B], B]"; expected "Callable[[str], str]" == a.py:6: error: "B" has no attribute "x" == a.py:4: error: Too few arguments for "C" [case testDecoratorUpdateFunc] import a [file a.py] import mod def outer() -> None: @mod.deca @mod.decb(mod.C()) def func(x: mod.B) -> mod.B: x.x return x [file mod.py] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[B], B]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: x: int [file mod.py.2] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[str], str]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: x: int [file mod.py.3] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[B], B]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: y: int [file mod.py.4] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[[B], B]) -> Callable[[str], str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: def __init__(self, x: int) -> None: pass class B: x: int [out] == a.py:4: error: Argument 1 to "deca" has incompatible type "Callable[[B], B]"; expected "Callable[[str], str]" == a.py:7: error: "B" has no attribute "x" == a.py:5: error: Too few arguments for "C" [case DecoratorUpdateMethod] import a [file a.py] import mod class D: @mod.deca @mod.decb(mod.C()) def func(self, x: mod.B) -> mod.B: x.x return x [file mod.py] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[..., B]) -> Callable[..., str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: x: int [file mod.py.2] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[..., str]) -> Callable[..., str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: x: int [file mod.py.3] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[..., B]) -> Callable[..., str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: pass class B: y: int [file mod.py.4] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deca(func: Callable[..., B]) -> Callable[..., str]: pass def decb(arg: C) -> Callable[[F], F]: pass class C: def __init__(self, x: int) -> None: pass class B: x: int [out] == a.py:4: error: Argument 1 to "deca" has incompatible type "Callable[[D, B], B]"; expected "Callable[..., str]" == a.py:7: error: "B" has no attribute "x" == a.py:5: error: Too few arguments for "C" [case testDecoratorUpdateDeeepNested] import a [file a.py] import mod def outer() -> None: def inner() -> None: @mod.dec def func(x: int) -> int: pass [file mod.py] from typing import Callable def dec(func: Callable[[int], int]) -> Callable[[str], str]: pass [file mod.py.2] from typing import Callable def dec(func: Callable[[str], str]) -> Callable[[str], str]: pass [out] == a.py:5: error: Argument 1 to "dec" has incompatible type "Callable[[int], int]"; expected "Callable[[str], str]" [case testDecoratorUpdateNestedClass] import a [file a.py] import mod class Outer: class Inner: c = mod.C() @c.dec def func(self, x: int) -> int: pass [file mod.py] from typing import Callable class C: def dec(self, func: Callable[..., int]) -> Callable[..., str]: pass [file mod.py.2] from typing import Callable class C: def dec(self, func: Callable[..., str]) -> Callable[..., str]: pass [out] == a.py:6: error: Argument 1 to "dec" of "C" has incompatible type "Callable[[Inner, int], int]"; expected "Callable[..., str]" [case testDecoratorUpdateClassInFunction] import a [file a.py] import mod def outer() -> None: class Inner: c = mod.C() @c.dec def func(self, x: mod.B) -> int: return x.x [file mod.py] from typing import Callable class C: def dec(self, func: Callable[..., int]) -> Callable[..., str]: pass class B: x: int [file mod.py.2] from typing import Callable class C: def dec(self, func: Callable[..., str]) -> Callable[..., str]: pass class B: x: int [file mod.py.3] from typing import Callable class C: def dec(self, func: Callable[..., int]) -> Callable[..., str]: pass class B: x: str [out] == a.py:6: error: Argument 1 to "dec" of "C" has incompatible type "Callable[[Inner, B], int]"; expected "Callable[..., str]" == a.py:8: error: Incompatible return value type (got "str", expected "int") [case testDecoratorUpdateMROUpdated] import a [file a.py] import mod @mod.dec def func(x: mod.B) -> int: pass [file mod.py] from typing import Callable class B: pass class C(B): pass def dec(f: Callable[[C], int]) -> Callable[[int], int]: pass [file mod.py.2] from typing import Callable class B: pass class C: pass def dec(f: Callable[[C], int]) -> Callable[[int], int]: pass [out] == a.py:3: error: Argument 1 to "dec" has incompatible type "Callable[[B], int]"; expected "Callable[[C], int]" [case testOverloadRefresh] from typing import overload import m @overload def f(x: m.A) -> None: ... @overload def f(x: int) -> None: ... def f(x: object) -> None: from n import g [file m.py] class A: pass [file n.py] def g() -> None: pass [delete m.py.2] [delete n.py.2] [out] == main:2: error: Cannot find implementation or library stub for module named 'm' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:7: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader main:9: error: Cannot find implementation or library stub for module named 'n' [case testOverloadSpecialCase] from typing import overload import m import sys class C: if sys.platform == 'nonexistent': def f(self, x): pass else: @overload def f(self, x: m.A) -> None: pass @overload def f(self, x: int) -> None: pass def f(self, x: object) -> None: from n import g [file m.py] class A: pass [file n.py] def g() -> None: pass [delete m.py.2] [delete n.py.2] [builtins fixtures/ops.pyi] [out] == main:2: error: Cannot find implementation or library stub for module named 'm' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:12: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader main:14: error: Cannot find implementation or library stub for module named 'n' [case testOverloadClassmethodDisappears] from typing import overload from m import Wrapper reveal_type(Wrapper.foo(3)) [file m.pyi] from typing import overload class Wrapper: @overload @classmethod def foo(self, x: int) -> int: ... @overload @classmethod def foo(self, x: str) -> str: ... [file m.pyi.2] from typing import overload class Wrapper: @overload def foo(cls, x: int) -> int: ... @overload def foo(cls, x: str) -> str: ... [builtins fixtures/classmethod.pyi] [out] main:3: note: Revealed type is 'builtins.int' == main:3: error: No overload variant of "foo" of "Wrapper" matches argument type "int" main:3: note: Possible overload variants: main:3: note: def foo(cls: Wrapper, x: int) -> int main:3: note: def foo(cls: Wrapper, x: str) -> str main:3: note: Revealed type is 'Any' [case testRefreshGenericClass] from typing import TypeVar, Generic from a import A X = TypeVar('X') class C(Generic[X]): def f(self, x: A) -> X: ... [file a.py] class A: pass [file a.py.2] [file a.py.3] class A: pass [out] == main:2: error: Module 'a' has no attribute 'A' == [case testRefreshGenericAndFailInPass3] # Failure in semantic analysis pass 3 from a import C a: C[int] [file a.py] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): pass [file a.py.2] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class C(Generic[T, S]): pass [file a.py.3] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): pass [out] == main:3: error: "C" expects 2 type arguments, but 1 given == [case testPrintStatement_python2] # flags: --py2 import a [file a.py] def f(x): # type: (int) -> int return 1 print f(1) [file a.py.2] def f(x): # type: (int) -> int return 1 print f('') [out] == a.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testUnannotatedClass] import a [file a.py] class A: def f(self, x): self.y = x self.g() def g(self): pass [file a.py.2] class A: def f(self, x, y): self.y = x self.z = y self.g() def g(self): pass [triggered] 2: , , [out] == [case testSuperBasics] import a [file a.py] class A: def f(self) -> None: pass class B(A): def f(self) -> None: super(B, self).f() [file a.py.2] class A: def f(self) -> None: pass class B(A): def f(self) -> None: super(B, self).f() [out] == [case testErrorInTypeCheckSecondPassThroughPropagation] import a def f() -> None: x = a.C() [file a.py] [file a.py.2] from typing import Generic, TypeVar T = TypeVar('T') class C(Generic[T]): pass [out] main:4: error: "object" has no attribute "C" == main:4: error: Need type annotation for 'x' [case testPartialTypeInNestedClass] import a class C: def f(self) -> None: a.g() class D: def __init__(self) -> None: self.x = {} def meth(self) -> None: self.x['a'] = 'b' [file a.py] def g() -> None: pass [file a.py.2] def g() -> int: pass [builtins fixtures/dict.pyi] [out] main:7: error: Need type annotation for 'x' (hint: "x: Dict[, ] = ...") == main:7: error: Need type annotation for 'x' (hint: "x: Dict[, ] = ...") [case testRefreshPartialTypeInClass] import a class D: def __init__(self) -> None: a.g() self.x = {} def meth(self) -> None: self.x['a'] = 'b' [file a.py] def g() -> None: pass [file a.py.2] def g() -> int: pass [builtins fixtures/dict.pyi] [out] main:5: error: Need type annotation for 'x' (hint: "x: Dict[, ] = ...") == main:5: error: Need type annotation for 'x' (hint: "x: Dict[, ] = ...") [case testRefreshPartialTypeInferredAttributeIndex] from c import C reveal_type(C().a) [file c.py] from b import f class C: def __init__(self) -> None: self.a = {} if bool(): self.a[0] = f() [file b.py] def f() -> int: ... [file b.py.2] from typing import List def f() -> str: ... [builtins fixtures/dict.pyi] [out] main:2: note: Revealed type is 'builtins.dict[builtins.int, builtins.int]' == main:2: note: Revealed type is 'builtins.dict[builtins.int, builtins.str]' [case testRefreshPartialTypeInferredAttributeAssign] from c import C reveal_type(C().a) [file c.py] from b import f class C: def __init__(self) -> None: self.a = [] if bool(): self.a = f() [file b.py] from typing import List def f() -> List[int]: ... [file b.py.2] from typing import List def f() -> List[str]: ... [builtins fixtures/list.pyi] [out] main:2: note: Revealed type is 'builtins.list[builtins.int]' == main:2: note: Revealed type is 'builtins.list[builtins.str]' [case testRefreshPartialTypeInferredAttributeAppend] from c import C reveal_type(C().a) [file c.py] from b import f class C: def __init__(self) -> None: self.a = [] if bool(): self.a.append(f()) [file b.py] def f() -> int: ... [file b.py.2] def f() -> str: ... [builtins fixtures/list.pyi] [out] main:2: note: Revealed type is 'builtins.list[builtins.int]' == main:2: note: Revealed type is 'builtins.list[builtins.str]' [case testRefreshTryExcept] import a def f() -> None: a.g() try: pass except BaseException as e: e [file a.py] def g() -> int: pass [file a.py.2] def g() -> str: pass [builtins fixtures/exception.pyi] [out] == [case testMroSpecialCase] import b import a [file a.py] class C: pass class D(C): 1() class E(D): pass [file b.py] import a [file a.py.2] class C: pass class D(C): 1() class E(D): pass # Something needs to change [file b.py.2] import a # Something needs to change [triggered] 2: a, a [out] a.py:3: error: "int" not callable == a.py:3: error: "int" not callable [case testMetaclassDefinition_python2] # flags: --py2 import abc import m m.f() class A: __metaclass__ = abc.ABCMeta [file m.py] def f(): pass [file m.py.2] def f(x=1): pass [out] == [case testMetaclassAttributes] import a [file a.py] from mod import C from typing import Type def f(arg: Type[C]) -> None: arg.x = int() [file mod.py] import submod class C(metaclass=submod.M): pass [file submod.py] class M(type): x: int [file submod.py.2] class M(type): x: str [file submod.py.3] class M(type): y: str [file submod.py.4] class M(type): x: int [out] == a.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") == a.py:4: error: "Type[C]" has no attribute "x" == [case testMetaclassAttributesDirect] import a [file a.py] from mod import C def f() -> None: C.x = int() [file mod.py] import submod class C(metaclass=submod.M): pass [file submod.py] class M(type): x: int [file submod.py.2] class M(type): x: str [file submod.py.3] class M(type): y: str [file submod.py.4] class M(type): x: int [out] == a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") == a.py:3: error: "Type[C]" has no attribute "x" == [case testMetaclassOperators] import a [file a.py] from mod import C from typing import Type def f(arg: Type[C]) -> None: arg + arg [file mod.py] import submod class C(metaclass=submod.M): pass [file submod.py] class M(type): def __add__(self, other: M) -> M: pass [file submod.py.2] class M(type): def __add__(self, other: int) -> M: pass [out] == a.py:4: error: Unsupported operand types for + ("Type[C]" and "Type[C]") [case testMetaclassOperatorsDirect] import a [file a.py] from mod import C def f() -> None: C + C [file mod.py] import submod class C(metaclass=submod.M): pass [file submod.py] class M(type): def __add__(self, other: int) -> M: pass [file submod.py.2] class M(type): def __add__(self, other: M) -> M: pass [out] a.py:3: error: Unsupported operand types for + ("Type[C]" and "Type[C]") == [case testMetaclassAttributesDirect_python2] # flags: --py2 import a [file a.py] from mod import C def f(): # type: () -> None C.x = int() [file mod.py] import submod class C: __metaclass__ = submod.M [file submod.py] class M(type): x = None # type: int [file submod.py.2] class M(type): x = None # type: str [file submod.py.3] class M(type): y = None # type: str [file submod.py.4] class M(type): x = None # type: int [out] == a.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") == a.py:4: error: "Type[C]" has no attribute "x" == [case testMetaclassOperators_python2] # flags: --py2 import a [file a.py] from mod import C from typing import Type def f(arg): # type: (Type[C]) -> None arg + arg [file mod.py] import submod class C: __metaclass__ = submod.M [file submod.py] class M(type): def __add__(self, other): # type: (M) -> M pass [file submod.py.2] class M(type): def __add__(self, other): # type: (int) -> M pass [out] == a.py:5: error: Unsupported operand types for + ("Type[C]" and "Type[C]") [case testFineMetaclassUpdate] import a [file a.py] from c import M import b def f(arg: M) -> None: pass f(b.B) [file b.py] import c class B: pass [file b.py.2] import c class B(metaclass=c.M): pass [file c.py] class M(type): pass [out] a.py:6: error: Argument 1 to "f" has incompatible type "Type[B]"; expected "M" == [case testFineMetaclassRecalculation] import a [file a.py] from b import B class M2(type): pass class D(B, metaclass=M2): pass [file b.py] import c class B: pass [file b.py.2] import c class B(metaclass=c.M): pass [file c.py] class M(type): pass [out] == a.py:3: error: Inconsistent metaclass structure for 'D' [case testFineMetaclassDeclaredUpdate] import a [file a.py] import b class B(metaclass=b.M): pass class D(B, metaclass=b.M2): pass [file b.py] class M(type): pass class M2(M): pass [file b.py.2] class M(type): pass class M2(type): pass [out] == a.py:3: error: Inconsistent metaclass structure for 'D' [case testFineMetaclassRemoveFromClass] import a [file a.py] import b def func() -> int: return b.B.x [file b.py] from c import M class B(metaclass=M): pass [file b.py.2] from c import M class B: pass [file c.py] class M(type): x: int [out] == a.py:3: error: "Type[B]" has no attribute "x" [case testFineMetaclassRemoveFromClass2] import a [file a.py] import b def func() -> None: b.test(b.B) [file b.py] import c def test(cls: c.M) -> None: pass class B(metaclass=c.M): pass [file b.py.2] import c def test(cls: c.M) -> None: pass class B: pass [file c.py] class M(type): x: int [out] == a.py:3: error: Argument 1 to "test" has incompatible type "Type[B]"; expected "M" [case testBadMetaclassCorrected] import a [file a.py] import b class C(metaclass=b.M): pass [file b.py] from c import M [file c.py] M = 1 [file c.py.2] class M(type): pass [out] a.py:2: error: Invalid metaclass 'b.M' == [case testFixedAttrOnAddedMetaclass] import a [file a.py] import b def fun() -> None: x: int = b.C.x [file b.py] import c class C: pass [file b.py.2] import c class C(metaclass=c.M): pass [file c.py] class M(type): x: int [out] a.py:3: error: "Type[C]" has no attribute "x" == [case testIndirectSubclassReferenceMetaclass] import a [file a.py] import b def f() -> None: b.x = int() [file b.py] import bb x = bb.D.x [file bb.py] import mod class D(mod.C): pass [file mod.py] import submod class C(metaclass=submod.M): pass [file submod.py] class M(type): x: int [file submod.py.2] class M(type): x: str [file submod.py.3] class M(type): y: str [file submod.py.4] class M(type): x: int [out] == a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") == b.py:2: error: "Type[D]" has no attribute "x" == [case testMetaclassDeletion] import a [file a.py] import b def func() -> None: b.B.x [file b.py] import c class B(metaclass=c.M): pass [file c.py] class M(type): x: int [file c.py.2] whatever: int [out] == b.py:2: error: Name 'c.M' is not defined a.py:3: error: "Type[B]" has no attribute "x" [case testFixMissingMetaclass] import a [file a.py] import b def func() -> None: b.B.x [file b.py] import c class B(metaclass=c.M): pass [file c.py] whatever: int [file c.py.2] class M(type): x: int [out] b.py:2: error: Name 'c.M' is not defined a.py:3: error: "Type[B]" has no attribute "x" == [case testGoodMetaclassSpoiled] import a [file a.py] import b class C(metaclass=b.M): pass [file b.py] class M(type): pass [file b.py.2] M = 1 [out] == a.py:2: error: Invalid metaclass 'b.M' [case testRefreshGenericSubclass] from typing import Generic, TypeVar import m m.x T = TypeVar('T') class C(Generic[T]): def __init__(self, x: T) -> None: pass class D(C[T]): def __init__(self, x: T) -> None: m.x super(D, self).__init__(x) [file m.py] x = 0 [file m.py.2] x = '' [out] == [case testRefreshNamedTupleSubclass] from typing import NamedTuple import m m.x N = NamedTuple('N', [('x', int)]) class C(N): pass [file m.py] x = 0 [file m.py.2] x = '' [out] == [case testNewTypeRefresh] import a [file a.py] from typing import Dict, NewType class A: pass N = NewType('N', A) a: Dict[N, int] def f(self, x: N) -> None: a.get(x) [file a.py.2] from typing import Dict, NewType # dummy change class A: pass N = NewType('N', A) a: Dict[N, int] def f(self, x: N) -> None: a.get(x) [builtins fixtures/dict.pyi] [out] == [case testRefreshFunctionalEnum] import a [file a.py] from typing import Dict from enum import Enum N = Enum('N', 'x') a: Dict[N, int] def f(self, x: N) -> None: a.get(x) [file a.py.2] from typing import Dict from enum import Enum N = Enum('N', 'x') a: Dict[N, int] def f(self, x: N) -> None: a.get(x) [builtins fixtures/dict.pyi] [out] == [case testFineGrainedCallable] import a [file a.py] def f(o: object) -> None: if callable(o): o() [file a.py.2] def f(o: object) -> None: if callable(o): o() [builtins fixtures/callable.pyi] [out] == [case testRefreshFunctionalNamedTuple] import a [file a.py] from typing import NamedTuple from b import L A = NamedTuple('A', []) a: A def g() -> None: x = L(A()) x.f(a) [file b.pyi] from typing import TypeVar, Generic, overload T = TypeVar('T') class L(Generic[T]): def __init__(self, x: T) -> None: pass @overload def f(self) -> None: pass @overload def f(self, a: T) -> None: pass [file a.py.2] from typing import NamedTuple from b import L A = NamedTuple('A', []) a: A def g() -> None: x = L(A()) x.f(a) [out] == [case testRefreshSubclassNestedInFunction1] from a import C def f() -> None: class D(C): pass [file a.py] class C: pass [file a.py.2] [out] == main:1: error: Module 'a' has no attribute 'C' [case testRefreshSubclassNestedInFunction2] from a import C def f() -> None: class D(C): def g(self) -> None: super().__init__() d = D() [file a.py] class C: def __init__(self) -> None: pass [file a.py.2] class C: def __init__(self, x: int) -> None: pass [out] == main:5: error: Too few arguments for "__init__" of "C" main:6: error: Too few arguments for "D" [case testInferAttributeTypeAndMultipleStaleTargets] import a class A: def g(self) -> None: a.x self.x = 1 def f(self) -> None: a.x b = self.x self.x = 1 [file a.py] x = 0 [file a.py.2] x = '' [out] == [case testNamedTupleUpdate] import b [file a.py] from typing import NamedTuple N = NamedTuple('N', [('x', int)]) x = N(1) [file a.py.2] from typing import NamedTuple N = NamedTuple('N', [('x', str)]) x = N('hi') [file b.py] import a def f(x: a.N) -> None: pass f(a.x) [out] == [case testNamedTupleUpdate2] import b [file a.py] from typing import NamedTuple N = NamedTuple('N', [('x', int)]) x = N(1) [file a.py.2] from typing import NamedTuple N = NamedTuple('N', [('y', int)]) x = N(2) [file b.py] import a def f(x: a.N) -> None: pass f(a.x) [out] == [case testNamedTupleUpdate3] import c [file a.py] from typing import NamedTuple N = NamedTuple('N', [('x', int)]) x = N(1) [file a.py.2] from typing import NamedTuple N = NamedTuple('N', [('x', str)]) x = N('hi') [file b.py] import a from typing import NamedTuple M = NamedTuple('M', [('z', 'a.N')]) x = M(a.x) [file c.py] import a import b from typing import Tuple def lol(n: Tuple[Tuple[int]]) -> None: pass def f(x: b.M) -> None: lol(x) f(b.x) lol(b.x) [out] == c.py:7: error: Argument 1 to "lol" has incompatible type "M"; expected "Tuple[Tuple[int]]" c.py:9: error: Argument 1 to "lol" has incompatible type "M"; expected "Tuple[Tuple[int]]" [case testNamedTupleUpdate4] import b [file a.py] from typing import NamedTuple class N(NamedTuple): x: int x = N(1) [file a.py.2] from typing import NamedTuple class N(NamedTuple): x: str x = N('hi') [file b.py] import a def f(x: a.N) -> None: pass f(a.x) [out] == [case testTypedDictRefresh] [builtins fixtures/dict.pyi] import a [file a.py] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) [file a.py.2] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) # dummy change [out] == [case testTypedDictUpdate] import b [file a.py] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) [file a.py.2] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': str}) p = Point(dict(x=42, y='lurr')) [file b.py] from a import Point def foo(x: Point) -> int: return x['x'] + x['y'] [builtins fixtures/dict.pyi] [out] == b.py:3: error: Unsupported operand types for + ("int" and "str") [case testTypedDictUpdate2] import b [file a.py] from mypy_extensions import TypedDict class Point(TypedDict): x: int y: int p = Point(dict(x=42, y=1337)) [file a.py.2] from mypy_extensions import TypedDict class Point(TypedDict): x: int y: str p = Point(dict(x=42, y='lurr')) [file b.py] from a import Point def foo(x: Point) -> int: return x['x'] + x['y'] [builtins fixtures/dict.pyi] [out] == b.py:3: error: Unsupported operand types for + ("int" and "str") [case testBasicAliasUpdate] import b [file a.py] N = int x = 1 [file a.py.2] N = str x = 'hi' [file b.py] import a def f(x: a.N) -> None: pass f(a.x) [out] == [case testBasicAliasUpdateGeneric] import b [file a.py] from typing import Dict, TypeVar T = TypeVar('T') D = Dict[int, T] x = {1: 1} [file a.py.2] from typing import Dict, TypeVar T = TypeVar('T') D = Dict[str, T] x = {'hi': 1} [file b.py] import a def f(x: a.D[int]) -> None: pass f(a.x) [builtins fixtures/dict.pyi] [out] == [case testAliasFineNormalMod] import b [file a.py] A = int [file a.py.2] A = str [file b.py] import a x: a.A = int() [out] == b.py:2: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAliasFineNormalFunc] import b [file a.py] A = int [file a.py.2] A = str [file b.py] import a def f(x: a.A): if int(): x = int() [out] == b.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAliasFineNormalClass] import b [file a.py] A = int [file a.py.2] A = str [file b.py] import a class C: x: a.A c = C() c.x = int() [out] == b.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAliasFineNormalClassBases] import b [file a.py] import c A = c.BaseI [file a.py.2] import c A = c.BaseS [file b.py] import a class C(a.A): x = int() [file c.py] class BaseI: x: int class BaseS: x: str [out] == b.py:3: error: Incompatible types in assignment (expression has type "int", base class "BaseS" defined the type as "str") [case testAliasFineGenericMod] import b [file a.py] from typing import Dict A = Dict[str, int] [file a.py.2] from typing import Dict A = Dict[str, str] [file b.py] import a x: a.A = {str(): int()} [builtins fixtures/dict.pyi] [out] == b.py:2: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" [case testAliasFineGenericFunc] import b [file a.py] from typing import Dict A = Dict[str, int] [file a.py.2] from typing import Dict A = Dict[str, str] [file b.py] import a def f(x: a.A): pass f({str(): int()}) [builtins fixtures/dict.pyi] [out] == b.py:4: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" [case testAliasFineForwardMod] import b [file b.py] x: A = int() A = int [file b.py.2] x: A = int() A = str [out] == b.py:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAliasFineForwardFunc] import b [file b.py] def f(x: A): x = int() A = int [file b.py.2] def f(x: A): if int(): x = int() A = str [out] == b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAliasFineChainedFunc] import b [file a.py] A = int [file a.py.2] A = str [file aa.py] import a B = a.A [file b.py] import aa def f(x: aa.B): if int(): x = int() [out] == b.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAliasFineChainedClass] import b [file a.py] A = int [file a.py.2] A = str [file aa.py] import a B = a.A [file b.py] import aa class C: x: aa.B c = C() c.x = int() [out] == b.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAliasFineNestedMod] import b [file a.py] from typing import Dict A = Dict[str, int] [file a.py.2] from typing import Dict A = Dict[str, str] [file aa.py] from typing import Dict import a B = Dict[str, a.A] [file b.py] import aa x: aa.B = {'first': {str(): int()}} [builtins fixtures/dict.pyi] [out] == b.py:3: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" [case testAliasFineNestedFunc] import b [file a.py] from typing import Dict A = Dict[str, int] [file a.py.2] from typing import Dict A = Dict[str, str] [file aa.py] from typing import Dict import a B = Dict[str, a.A] [file b.py] import aa def f(x: aa.B): if int(): x = {'first': {str(): int()}} [builtins fixtures/dict.pyi] [out] == b.py:4: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" [case testAliasFineNestedFuncDirect] import b [file a.py] from typing import Dict A = Dict[str, int] [file a.py.2] from typing import Dict A = Dict[str, str] [file aa.py] from typing import Dict import a E = Dict [file b.py] import aa def f(x: aa.E[str, aa.a.A]): if int(): x = {'first': {str(): int()}} [builtins fixtures/dict.pyi] [out] == b.py:4: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" [case testAliasFineNonGenericToGeneric] import b [file a.py] from typing import Dict, TypeVar T = TypeVar('T') A = Dict[T, int] [file a.py.2] A = str [file b.py] import a def f(x: a.A[str]): pass [builtins fixtures/dict.pyi] [out] == b.py:2: error: "str" expects no type arguments, but 1 given [case testAliasFineGenericToNonGeneric] import b [file a.py] A = str [file a.py.2] from typing import Dict, TypeVar T = TypeVar('T') A = Dict[T, int] [file b.py] import a def f(x: a.A): pass reveal_type(f) [builtins fixtures/dict.pyi] [out] b.py:4: note: Revealed type is 'def (x: builtins.str) -> Any' == b.py:4: note: Revealed type is 'def (x: builtins.dict[Any, builtins.int]) -> Any' [case testAliasFineChangedNumberOfTypeVars] import b [file a.py] from typing import Dict, TypeVar T = TypeVar('T') A = Dict[T, int] [file a.py.2] from typing import Dict, TypeVar T = TypeVar('T') S = TypeVar('S') A = Dict[T, S] [file b.py] import a def f(x: a.A[str]): pass [builtins fixtures/dict.pyi] [out] == b.py:2: error: Bad number of arguments for type alias, expected: 2, given: 1 [case testAliasFineAdded] import b [file a.py] [file a.py.2] A = int [file b.py] import a x: a.A [out] b.py:2: error: Name 'a.A' is not defined == [case testAliasFineDeleted] import b [file a.py] A = int [file a.py.2] [file b.py] import a x: a.A [out] == b.py:2: error: Name 'a.A' is not defined [case testAliasFineClassToAlias] import b [file a.py] class A: pass [file a.py.2] A = int [file b.py] import a x: a.A x = 1 [out] b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "A") == [case testAliasFineAliasToClass] import b [file a.py] A = int [file a.py.2] class A: pass [file b.py] import a x: a.A x = 1 [out] == b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "A") [case testAliasFineComponentDeleted] import b [file a.py] class B: pass [file a.py.2] x = 1 [file b.py] import a from typing import Dict, TypeVar T = TypeVar('T') A = Dict[T, a.B] def f(x: A[int]): pass [builtins fixtures/dict.pyi] [out] == b.py:4: error: Name 'a.B' is not defined [case testAliasFineTargetDeleted] import c [file a.py] A = int [file b.py] import a B = a.A [file b.py.2] x = 1 [file c.py] import b def f(x: b.B): pass [out] == c.py:2: error: Name 'b.B' is not defined [case testAliasFineClassInFunction] import b [file a.py] A = int [file a.py.2] A = str [file b.py] import a def f() -> None: class C: x: a.A = int() [out] == b.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAliasFineInitNormalMod] import c [file a.py] class A: def __init__(self, x: int) -> None: pass [file a.py.2] class A: def __init__(self, x: str) -> None: pass [file b.py] import a B = a.A [file c.py] from b import B B(int()) [out] == c.py:2: error: Argument 1 to "A" has incompatible type "int"; expected "str" [case testAliasFineInitNormalFunc] import c [file a.py] class A: def __init__(self, x: int) -> None: pass [file a.py.2] class A: def __init__(self, x: str) -> None: pass [file b.py] import a B = a.A [file c.py] from b import B def f() -> None: B(int()) [out] == c.py:3: error: Argument 1 to "A" has incompatible type "int"; expected "str" [case testAliasFineInitGenericMod] import c [file a.py] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): def __init__(self, x: T) -> None: pass [file a.py.2] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): def __init__(self, x: S) -> None: pass [file b.py] import a B = a.A[int, str] [file c.py] from b import B B(int()) [out] == c.py:2: error: Argument 1 to "A" has incompatible type "int"; expected "str" [case testAliasFineInitGenericFunc] import c [file a.py] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): def __init__(self, x: T) -> None: pass [file a.py.2] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): def __init__(self, x: S) -> None: pass [file b.py] import a B = a.A[int, str] [file c.py] from b import B def f() -> None: B(str()) [out] c.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int" == [case testAliasFineInitChainedMod] import d [file a.py] class A: def __init__(self, x: int) -> None: pass [file a.py.2] class A: def __init__(self, x: str) -> None: pass [file b.py] import a B = a.A [file c.py] import b C = b.B [file d.py] from c import C C(int()) [out] == d.py:2: error: Argument 1 to "A" has incompatible type "int"; expected "str" [case testAliasFineInitChainedFunc] import d [file a.py] class A: def __init__(self, x: int) -> None: pass [file a.py.2] class A: def __init__(self, x: str) -> None: pass [file b.py] import a B = a.A [file c.py] import b C = b.B [file d.py] from c import C def f() -> None: C(str()) [out] d.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int" == [case testNonePartialType1] import a a.y x = None def f() -> None: global x x = 1 [file a.py] y = 0 [file a.py.2] y = '' [out] main:4: error: Need type annotation for 'x' == main:4: error: Need type annotation for 'x' [case testNonePartialType2] import a a.y x = None def f(): global x x = 1 [file a.py] y = 0 [file a.py.2] y = '' [out] main:4: error: Need type annotation for 'x' == main:4: error: Need type annotation for 'x' [case testNonePartialType3] import a [file a.py] [file a.py.2] y = None def f() -> None: global y y = '' [out] == a.py:1: error: Need type annotation for 'y' [case testNonePartialType4] import a [file a.py] y = None def f() -> None: global y y = '' [file a.py.2] from typing import Optional y: Optional[str] = None def f() -> None: global y y = '' [out] a.py:1: error: Need type annotation for 'y' == [case testSkippedClass1] import a [file a.py] class A: pass [file a.py.2] import sys if sys.platform == 'xyz': class A: pass [builtins fixtures/ops.pyi] [out] == [case testSkippedClass2] import a [file a.py] import sys if sys.platform == 'xyz': class A: pass [file a.py.2] import sys if sys.platform == 'xyz': class A: pass [builtins fixtures/ops.pyi] [out] == [case testSkippedClass3] import a [file a.py] import sys if sys.platform == 'xyz': class A: pass [file a.py.2] class A: pass [builtins fixtures/ops.pyi] [out] == [case testSkippedClass4] import a [file a.py] import sys if sys.platform == 'xyz': class A: pass else: class A: pass [file a.py.2] import sys if sys.platform == 'xyz': class A: pass else: class A: pass [builtins fixtures/ops.pyi] [out] == [case testNewTypeDependencies1] from a import N def f(x: N) -> None: x.y = 1 [file a.py] from typing import NewType from b import C N = NewType('N', C) [file b.py] class C: y: int [file b.py.2] class C: y: str [out] == main:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testNewTypeDependencies2] from a import N from b import C, D def f(x: C) -> None: pass def g(x: N) -> None: f(x) [file a.py] from typing import NewType from b import D N = NewType('N', D) [file b.py] class C: pass class D(C): pass [file b.py.2] class C: pass class D: pass [out] == main:7: error: Argument 1 to "f" has incompatible type "N"; expected "C" [case testNewTypeDependencies3] from a import N def f(x: N) -> None: x.y [file a.py] from typing import NewType from b import C N = NewType('N', C) [file a.py.2] from typing import NewType from b import D N = NewType('N', D) [file b.py] class C: y: int class D: pass [out] == main:4: error: "N" has no attribute "y" [case testNamedTupleWithinFunction] from typing import NamedTuple import b def f() -> None: b.x n = NamedTuple('n', []) [file b.py] x = 0 [file b.py.2] x = '' [out] == [case testNamedTupleFallback] # This test will fail without semantic analyzer pass 2 patches import a [file a.py] import b [file b.py] from typing import NamedTuple import c c.x class N(NamedTuple): count: int [file c.py] x = 0 [file c.py.2] x = '' [builtins fixtures/tuple.pyi] [out] b.py:5: error: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], Any], int]") == b.py:5: error: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], Any], int]") [case testReprocessEllipses1] import a [file a.py] from typing import Tuple def foo(x: Tuple[int, ...]) -> None: pass [file a.py.2] from typing import Tuple def foo(x: Tuple[int, ...]) -> None: pass [builtins fixtures/tuple.pyi] [out] == [case testReprocessEllipses2] import a [file a.py] from typing import Callable def foo(x: Callable[..., int]) -> None: pass [file a.py.2] from typing import Callable def foo(x: Callable[..., int]) -> None: pass [out] == [case testReprocessCallableArg] import a [file a.py] from typing import Callable from mypy_extensions import Arg def a(f: Callable[[Arg(int, 'x')], int]) -> None: pass [file a.py.2] from typing import Callable from mypy_extensions import Arg def a(f: Callable[[Arg(int, 'x')], int]) -> None: pass [builtins fixtures/dict.pyi] [out] == [case testImplicitTuple1] import a [file a.py] # Bogus annotation in nested function masked because outer function # isn't annotated def unchecked(): def inner(): # type: () -> (str, int) return 'lol', 10 [file a.py.2] # dummy change def unchecked(): def inner(): # type: () -> (str, int) return 'lol', 10 [out] == [case testImplicitTuple2] import a [file a.py] def inner(): # type: () -> (str, int) return 'lol', 10 [file a.py.2] # dummy change def inner(): # type: () -> (str, int) return 'lol', 10 [out] a.py:1: error: Syntax error in type annotation a.py:1: note: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) == a.py:2: error: Syntax error in type annotation a.py:2: note: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) [case testImplicitTuple3] import a [file a.py] (x, y) = 1, 'hi' # type: (int, str) [file a.py.2] # dummy change (x, y) = 1, 'hi' # type: (int, str) [out] == [case testCastConfusion] import b [file a.py] from typing import cast class Thing: def foo(self) -> None: pass thing = cast(Thing, Thing()) [file b.py] from typing import Optional from a import Thing, thing class User: def __init__(self, x: Optional[Thing]) -> None: self.x = x if x else thing def use(self) -> None: self.x.foo() [file a.py.2] from typing import cast class Thing: def foo(self) -> None: pass thing = cast(Thing, Thing()) # update [file b.py.2] from typing import Optional from a import Thing, thing class User: def __init__(self, x: Optional[Thing]) -> None: self.x = x if x else thing def use(self) -> None: self.x.foo() # update [builtins fixtures/ops.pyi] [out] == [case testNoStrictOptionalModule] import a a.y = a.x [file a.py] from typing import Optional x: int y: int [file a.py.2] from typing import Optional x: Optional[int] y: int [file a.py.3] from typing import Optional x: Optional[str] y: int [out] == == main:2: error: Incompatible types in assignment (expression has type "Optional[str]", variable has type "int") [case testNoStrictOptionalFunction] import a from typing import Optional def f() -> None: x: Optional[int] a.g(x) [file a.py] from typing import Optional def g(x: Optional[int]) -> None: pass [file a.py.2] from typing import Optional def g(x: int) -> None: pass [file a.py.3] from typing import Optional def g(x: str) -> None: pass [out] == == main:5: error: Argument 1 to "g" has incompatible type "Optional[int]"; expected "str" [case testNoStrictOptionalMethod] import a from typing import Optional class C: def f(self) -> None: x: Optional[int] a.B().g(x) [file a.py] from typing import Optional class B: def g(self, x: Optional[int]) -> None: pass [file a.py.2] from typing import Optional class B: def g(self, x: int) -> None: pass [file a.py.3] from typing import Optional class B: def g(self, x: str) -> None: pass [out] == == main:6: error: Argument 1 to "g" of "B" has incompatible type "Optional[int]"; expected "str" [case testStrictOptionalModule] # flags: --strict-optional import a a.y = a.x [file a.py] from typing import Optional x: int y: int [file a.py.2] from typing import Optional x: Optional[int] y: int [out] == main:3: error: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int") [case testStrictOptionalFunction] # flags: --strict-optional import a from typing import Optional def f() -> None: x: Optional[int] a.g(x) [file a.py] from typing import Optional def g(x: Optional[int]) -> None: pass [file a.py.2] from typing import Optional def g(x: int) -> None: pass [out] == main:6: error: Argument 1 to "g" has incompatible type "Optional[int]"; expected "int" [case testStrictOptionalMethod] # flags: --strict-optional import a from typing import Optional class C: def f(self) -> None: x: Optional[int] a.B().g(x) [file a.py] from typing import Optional class B: def g(self, x: Optional[int]) -> None: pass [file a.py.2] from typing import Optional class B: def g(self, x: int) -> None: pass [out] == main:7: error: Argument 1 to "g" of "B" has incompatible type "Optional[int]"; expected "int" [case testPerFileStrictOptionalModule] import a [file mypy.ini] \[mypy] strict_optional = False \[mypy-a.*] strict_optional = True [file a.py] from typing import Optional import b x: int y: int = x [file b.py] from typing import Optional x: int y: int = x [file b.py.2] from typing import Optional x: Optional[int] y: int = x [file a.py.3] from typing import Optional import b x: Optional[int] y: int = x [out] == == a.py:4: error: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int") [case testPerFileStrictOptionalModuleOnly] import a [file mypy.ini] \[mypy] strict_optional = False \[mypy-a.*] strict_optional = True [file a.py] from typing import Optional import b y: int = b.x class Dummy: def f(self) -> None: pass [file b.py] from typing import Optional import c x: int y: int = c.x class Dummy: def f(self) -> None: pass [file c.py] from typing import Optional x: int [file c.py.2] from typing import Optional x: Optional[int] [file b.py.3] from typing import Optional import c x: Optional[int] y: int = c.x [file a.py.4] from typing import Optional import b y: Optional[int] = b.x class Dummy: def f(self) -> None: pass [out] == == a.py:3: error: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int") == [case testPerFileStrictOptionalFunction] import a [file mypy.ini] \[mypy] strict_optional = False \[mypy-b.*] strict_optional = True [file a.py] from typing import Optional import b def f() -> None: x: int x = b.g(x) [file b.py] from typing import Optional import c def g(x: Optional[int]) -> Optional[int]: return c.h(x) [file c.py] from typing import Optional def h(x: Optional[int]) -> int: pass [file c.py.2] from typing import Optional def h(x: int) -> int: pass [file b.py.3] from typing import Optional import c def g(x: int) -> Optional[int]: return c.h(x) [out] == b.py:4: error: Argument 1 to "h" has incompatible type "Optional[int]"; expected "int" == [case testPerFileStrictOptionalMethod] import a [file mypy.ini] \[mypy] strict_optional = False \[mypy-b.*] strict_optional = True [file a.py] from typing import Optional import b class A: def f(self) -> None: x: int x = b.B().g(x) [file b.py] from typing import Optional import c class B: def g(self, x: Optional[int]) -> Optional[int]: return c.C().h(x) [file c.py] from typing import Optional class C: def h(self, x: Optional[int]) -> int: pass [file c.py.2] from typing import Optional class C: def h(self, x: int) -> int: pass [file b.py.3] from typing import Optional import c class B: def g(self, x: int) -> Optional[int]: return c.C().h(x) [out] == b.py:5: error: Argument 1 to "h" of "C" has incompatible type "Optional[int]"; expected "int" == [case testTypeVarValuesFunction] import a [file a.py] from typing import TypeVar from c import A, B T = TypeVar('T', A, B) def f(x: T) -> T: x.x = int() return x [file c.py] class A: x: int class B: x: int [file c.py.2] class A: x: int class B: x: str [out] == a.py:6: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testTypeVarValuesClass] import a [file a.py] import c class C: x: c.D[c.A] [file c.py] from typing import TypeVar, Generic class A: pass class B: pass class C: pass T = TypeVar('T', A, B, C) class D(Generic[T]): pass [file c.py.2] from typing import TypeVar, Generic class A: pass class B: pass class C: pass T = TypeVar('T', B, C) class D(Generic[T]): pass [out] == a.py:3: error: Value of type variable "T" of "D" cannot be "A" [case testTypeVarValuesMethod1] import a [file a.py] from typing import Generic import c class G(Generic[c.T]): def f(self, x: c.T) -> None: x.x = int() [file c.py] from typing import TypeVar class A: x: int class B: x: int class C: x: str T = TypeVar('T', A, B, C) [file c.py.2] from typing import TypeVar class A: x: int class B: x: int class C: x: str T = TypeVar('T', A, B) [out] a.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") == [case testTypeVarValuesMethod2] import a [file a.py] from typing import Generic import c class G(Generic[c.T]): def f(self, x: c.T) -> None: x.x = int() [file c.py] from typing import TypeVar class A: x: int class B: x: int T = TypeVar('T', A, B) [file c.py.2] from typing import TypeVar class A: x: int class B: x: str T = TypeVar('T', A, B) [out] == a.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testTypeVarBoundFunction] import a [file a.py] from typing import TypeVar from c import B T = TypeVar('T', bound=B) def f(x: T) -> T: x.x = int() return x [file c.py] class B: x: int [file c.py.2] class B: x: str [out] == a.py:6: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testTypeVarBoundClass] import a [file a.py] import c class C: x: c.D[c.A] [file c.py] from typing import TypeVar, Generic class A: pass class B: pass T = TypeVar('T', bound=A) class D(Generic[T]): pass [file c.py.2] from typing import TypeVar, Generic class A: pass class B: pass T = TypeVar('T', bound=B) class D(Generic[T]): pass [out] == a.py:3: error: Type argument "c.A" of "D" must be a subtype of "c.B" [case testTypeVarValuesRuntime] from mod import I, S, D A = I x = D[S, A]() [file mod.py] import submod from typing import Generic class D(Generic[submod.T, submod.U]): pass class I: pass class S: pass [file submod.py] from typing import TypeVar T = TypeVar('T') U = TypeVar('U') [file submod.py.2] from typing import TypeVar T = TypeVar('T', int, str) U = TypeVar('U', int, str) [out] == main:3: error: Value of type variable "submod.T" of "D" cannot be "S" main:3: error: Value of type variable "submod.U" of "D" cannot be "I" [case testTypeVarBoundRuntime] from mod import I, S, D A = I x = D[S, A]() [file mod.py] import submod from typing import Generic class D(Generic[submod.T, submod.U]): pass class I: pass class S: pass [file submod.py] from typing import TypeVar T = TypeVar('T', bound=int) U = TypeVar('U', bound=int) [file submod.py.2] from typing import TypeVar T = TypeVar('T') U = TypeVar('U') [out] main:3: error: Value of type variable "submod.T" of "D" cannot be "S" main:3: error: Value of type variable "submod.U" of "D" cannot be "I" == [case testGenericFineCallableNormal] import a [file a.py] import b x: int = b.f(int()) [file b.py] from c import g f = g [file c.py] from typing import TypeVar class B: pass T = TypeVar('T') def g(x: T) -> T: pass [file c.py.2] from typing import TypeVar class B: pass T = TypeVar('T', str, B) def g(x: T) -> T: pass [out] == a.py:2: error: Value of type variable "T" of function cannot be "int" [case testGenericFineCallableNamed] import a [file a.py] import b x: int = b.f(x=int()) [file b.py] from c import g f = g [file c.py] from typing import TypeVar class B: pass T = TypeVar('T') def g(x: T) -> T: pass [file c.py.2] from typing import TypeVar class B: pass T = TypeVar('T') def g(y: T) -> T: pass [out] == a.py:2: error: Unexpected keyword argument "x" c.py:4: note: Called function defined here [case testGenericFineCallableInBound] import a [file a.py] import b x: int = b.f()(int()) [file b.py] from c import g f = g [file c.py] from typing import Callable, TypeVar class B: pass T = TypeVar('T') def g() -> Callable[[T], T]: pass [file c.py.2] from typing import Callable, TypeVar class B: pass T = TypeVar('T', str, B) def g() -> Callable[[T], T]: pass [out] == a.py:2: error: Value of type variable "T" of function cannot be "int" [case testGenericFineCallableAddedBound] import a [file a.py] import b x: int = b.f(int()) [file b.py] from c import g f = g [file c.py] from typing import TypeVar class B: pass T = TypeVar('T') def g(x: T) -> T: pass [file c.py.2] from typing import TypeVar class B: pass T = TypeVar('T', bound=B) def g(x: T) -> T: pass [out] == a.py:2: error: Value of type variable "T" of function cannot be "int" [case testGenericFineCallableBoundDeleted-only_when_cache] # See https://github.com/python/mypy/issues/4783 import a [file a.py] import b x: int = b.f(int()) [file b.py] from c import g f = g [file c.py] from typing import TypeVar import d T = TypeVar('T', bound=d.B) def g(x: T) -> T: pass [file d.py] class B: pass [file d.py.2] # empty [out] a.py:2: error: Value of type variable "T" of function cannot be "int" == c.py:3: error: Name 'd.B' is not defined [case testGenericFineCallableToNonGeneric] import a [file a.py] import b x: int = b.f(x=int()) [file b.py] from c import g f = g [file c.py] from typing import TypeVar T = TypeVar('T') def g(x: T) -> T: pass [file c.py.2] from typing import TypeVar class T: pass def g(x: T) -> T: pass [out] == a.py:2: error: Incompatible types in assignment (expression has type "T", variable has type "int") a.py:2: error: Argument "x" has incompatible type "int"; expected "T" [case testGenericFineCallableToGenericClass] import a [file a.py] import b x: int = b.f(x=int()) [file b.py] from c import g f = g [file c.py] from typing import TypeVar, Generic T = TypeVar('T') def g(x: T) -> T: pass [file c.py.2] from typing import TypeVar, Generic T = TypeVar('T') class g(Generic[T]): def __init__(self, x: T) -> None: pass [out] == a.py:2: error: Incompatible types in assignment (expression has type "g[int]", variable has type "int") [case testMakeClassNoLongerAbstract1] [file z.py] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass [file b.py] from z import I class Foo(I): pass def x() -> Foo: return None [file z.py.2] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): pass [file b.py.2] from z import I class Foo(I): pass def x() -> Foo: return Foo() [out] == [case testMakeClassNoLongerAbstract2] -- this version never failed, but it is just a file-renaming -- away from the above test that did [file a.py] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass [file b.py] from a import I class Foo(I): pass def x() -> Foo: return None [file a.py.2] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): pass [file b.py.2] from a import I class Foo(I): pass def x() -> Foo: return Foo() [out] == [case testRefreshClassBasedEnum] import aa [file aa.py] import a [file a.py] from enum import Enum import b b.x class C(Enum): X = 0 [file b.py] x = 0 [file b.py.2] x = '' [file aa.py.3] from a import C c: C c = C.X if int(): c = 1 [out] == == aa.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "C") [case testRefreshClassBasedIntEnum] import aa [file aa.py] import a [file a.py] from enum import IntEnum import b b.x class C(IntEnum): X = 0 x: int x = C.X [file b.py] x = 0 [file b.py.2] x = '' [file aa.py.3] from a import C c: C c = C.X if int(): c = 1 n: int n = C.X if int(): n = c [out] == == aa.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "C") [case testClassBasedEnumPropagation1] import a [file a.py] from b import C def f(x: C) -> None: pass f(C.X) f(C.Y) [file b.py] from enum import Enum class C(Enum): X = 0 Y = 1 [file b.py.2] from enum import Enum class C(Enum): X = 0 [typing fixtures/typing-full.pyi] [out] == a.py:5: error: "Type[C]" has no attribute "Y" [case testClassBasedEnumPropagation2] import a [file a.py] from b import C def f(x: int) -> None: pass f(C.X) f(C.Y) [file b.py] class C: X = 0 Y = 1 [file b.py.2] from enum import Enum class C(Enum): X = 0 Y = 1 [out] == a.py:4: error: Argument 1 to "f" has incompatible type "C"; expected "int" a.py:5: error: Argument 1 to "f" has incompatible type "C"; expected "int" [case testRefreshFuncBasedEnum] import aa [file aa.py] import a [file a.py] from enum import Enum import b b.x C = Enum('C', [('X', 0)]) [file b.py] x = 0 [file b.py.2] x = '' [file aa.py.3] from a import C c: C c = C.X if int(): c = 1 [out] == == aa.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "C") [case testRefreshFuncBasedIntEnum] import aa [file aa.py] import a [file a.py] from enum import IntEnum import b b.x C = IntEnum('C', 'X') x: int x = C.X [file b.py] x = 0 [file b.py.2] x = '' [file aa.py.3] from a import C c: C c = C.X if int(): c = 1 # Error n: int n = C.X n = c [out] == == aa.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "C") [case testFuncBasedEnumPropagation1] import a [file a.py] from b import C def f(x: C) -> None: pass f(C.X) f(C.Y) [file b.py] from enum import Enum C = Enum('C', 'X Y') [file b.py.2] from enum import Enum C = Enum('C', 'X') [typing fixtures/typing-full.pyi] [out] == a.py:5: error: "Type[C]" has no attribute "Y" [case testFuncBasedEnumPropagation2] import a [file a.py] from b import C def f(x: int) -> None: pass f(C.X) f(C.Y) [file b.py] class C: X = 0 Y = 1 [file b.py.2] from enum import Enum C = Enum('C', [('X', 0), ('Y', 1)]) [out] == a.py:4: error: Argument 1 to "f" has incompatible type "C"; expected "int" a.py:5: error: Argument 1 to "f" has incompatible type "C"; expected "int" [case testChangeTypeVarToFunction] import a from typing import Generic Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass [file a.py] from typing import TypeVar T = TypeVar('T') [file a.py.2] from typing import TypeVar def T() -> None: pass [out] == main:4: error: "C" expects no type arguments, but 1 given main:4: error: Function "a.T" is not valid as a type main:4: note: Perhaps you need "Callable[...]" or a callback protocol? main:6: error: Free type variable expected in Generic[...] main:7: error: Function "a.T" is not valid as a type main:7: note: Perhaps you need "Callable[...]" or a callback protocol? main:10: error: Function "a.T" is not valid as a type main:10: note: Perhaps you need "Callable[...]" or a callback protocol? main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeTypeVarToModule] import a from typing import Generic Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass [file a.py] from typing import TypeVar T = TypeVar('T') [file T.py.2] [file a.py.3] from typing import TypeVar import T [out] == == main:4: error: "C" expects no type arguments, but 1 given main:4: error: Module "T" is not valid as a type main:6: error: Free type variable expected in Generic[...] main:7: error: Module "T" is not valid as a type main:10: error: Module "T" is not valid as a type main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeClassToModule] import a x: a.C def f() -> None: a.C() class A: def meth(self) -> None: def inner() -> a.C: pass [file a.py] class C: pass [file C.py.2] [file a.py.3] import C [builtins fixtures/module.pyi] [out] == == main:3: error: Module "C" is not valid as a type main:5: error: Module not callable main:8: error: Module "C" is not valid as a type [case testChangeTypeVarToTypeAlias] import a from typing import Generic Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass [file a.py] from typing import TypeVar T = TypeVar('T') [file a.py.2] from typing import TypeVar T = int [out] == main:4: error: "C" expects no type arguments, but 1 given main:6: error: Free type variable expected in Generic[...] main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeTypeAliasToModule] import a x: a.C def f() -> None: a.C() class A: def meth(self) -> None: def inner() -> a.C: pass [file a.py] import b C = b.D [file b.py] class D: pass [file D.py.2] [file b.py.3] import D [builtins fixtures/module.pyi] [out] == == main:3: error: Module "D" is not valid as a type main:5: error: Module not callable main:8: error: Module "D" is not valid as a type [case testChangeTypeAliasToModuleUnqualified] from a import C x: C def f() -> None: C() class A: def meth(self) -> None: def inner() -> C: pass [file a.py] from b import D C = D [file b.py] class D: pass [file D.py.2] [file b.py.3] import D [builtins fixtures/module.pyi] [out] == == main:3: error: Module "D" is not valid as a type main:5: error: Module not callable main:8: error: Module "D" is not valid as a type [case testChangeFunctionToVariableAndRefreshUsingStaleDependency] import a import c [file a.py] import c def f() -> c.A: pass [file a.py.2] f = 1 [file c.py] class A: pass [file c.py.3] [out] == == [case testChangeFunctionToTypeVarAndRefreshUsingStaleDependency] import a import c [file a.py] import c def f() -> c.A: pass [file a.py.2] from typing import TypeVar f = TypeVar('f') [file c.py] class A: pass [file c.py.3] [out] == == [case testChangeFunctionToModuleAndRefreshUsingStaleDependency] import a import c [file a.py] import c def f() -> c.A: pass [file a.py.2] import c as f [file c.py] class A: pass [file c.py.3] [out] == == [case testChangeFunctionToTypeAliasAndRefreshUsingStaleDependency1] import a import c [file a.py] import c def f() -> c.A: pass [file a.py.2] f = int [file c.py] class A: pass [file c.py.3] [out] == == [case testChangeFunctionToTypeAliasAndRefreshUsingStaleDependency2] import a import c [file a.py] import c def f() -> c.A: pass [file a.py.2] from typing import List f = List[int] [file c.py] class A: pass [file c.py.3] [builtins fixtures/list.pyi] [out] == == [case testChangeFunctionToClassAndRefreshUsingStaleDependency] import a import c [file a.py] import c def f() -> c.A: pass [file a.py.2] class f: pass [file c.py] class A: pass [file c.py.3] [out] == == [case testClassToVariableAndRefreshUsingStaleDependency] import a import c [file a.py] import c class A: def f(self) -> c.A: pass [file a.py.2] A = 0 [file c.py] class A: pass [file c.py.3] [out] == == [case testFunctionToImportedFunctionAndRefreshUsingStaleDependency] import a import c [file a.py] import c def f() -> c.A: pass [file a.py.2] from d import f [file c.py] class A: pass [file c.py.3] [file d.py] def g() -> None: pass def f() -> None: g() [out] == == [case testMethodToVariableAndRefreshUsingStaleDependency] import a import c [file a.py] import c class B: def f(self) -> c.A: pass [file a.py.2] class B: f = 0 [file c.py] class A: pass [file c.py.3] [out] == == [case testChangeGenericFunctionToVariable] import a x: int y: int = a.f(x) class Dummy: def g(self) -> None: a.f(x) [file a.py] from typing import TypeVar T = TypeVar('T') def f(x: T) -> T: pass [file a.py.2] from typing import TypeVar T = TypeVar('T') f = 42 [out] == main:3: error: "int" not callable main:6: error: "int" not callable [case testChangeGenericClassToVariable] import a x: int a.A(x) class Dummy: def g(self) -> None: a.A(x) [file a.py] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, x: T) -> None: pass [file a.py.2] from typing import TypeVar, Generic T = TypeVar('T') A = 'no way' [out] == main:3: error: "str" not callable main:6: error: "str" not callable [case testChangeGenericMethodToVariable] import a x: int y: int = a.A(x).f() class Dummy: def g(self) -> None: a.A(x).f() [file a.py] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, x: T) -> None: pass def f(self) -> T: pass [file a.py.2] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): f: T def __init__(self, x: T) -> None: pass [out] == main:3: error: "int" not callable main:6: error: "int" not callable [case testRefreshNestedClassWithSelfReference] import a [file a.py] import b def f(self) -> None: b.y class C: z: C [file b.py] y = 0 [file b.py.2] y = '' [file b.py.3] y = 0 [out] == == [case testMultipleAssignment] import a [file a.py] from b import f def h(x: str) -> None: pass class C: def __init__(self) -> None: self.a, self.b = f() def g(self) -> None: h(self.a) [file b.py] from typing import Tuple def f() -> Tuple[str, int]: pass [file b.py.2] from typing import Tuple def f() -> Tuple[int, object]: pass [file b.py.3] from typing import Tuple def f() -> Tuple[str, int]: pass [out] == a.py:10: error: Argument 1 to "h" has incompatible type "int"; expected "str" == [case testMultipleLvalues] import a [file a.py] from b import f def h(x: str) -> None: pass class C: def __init__(self) -> None: self.a = self.b = f() def g(self) -> None: h(self.a) h(self.b) [file b.py] def f() -> str: pass [file b.py.2] def f() -> int: pass [file b.py.3] def f() -> str: pass [out] == a.py:10: error: Argument 1 to "h" has incompatible type "int"; expected "str" a.py:11: error: Argument 1 to "h" has incompatible type "int"; expected "str" == [case testNoOpUpdateFineGrainedIncremental1] # cmd: mypy a.py [file a.py] 1() [file b.py.2] # Note: this file is not part of the build [file a.py.3] x = 1 [out] a.py:1: error: "int" not callable == a.py:1: error: "int" not callable == [case testNoOpUpdateFineGrainedIncremental2] # cmd: mypy a.py [file a.py] 1() [file a.py.2] 1() [file a.py.3] x = 1 [file a.py.4] x = 1 [out] a.py:1: error: "int" not callable == a.py:1: error: "int" not callable == == [case testNonExistentFileOnCommandLine1] # cmd: mypy a.py nonexistent.py [file a.py] [file a.py.2] 1() [out] mypy: can't read file 'tmp/nonexistent.py': No such file or directory == mypy: can't read file 'tmp/nonexistent.py': No such file or directory [case testNonExistentFileOnCommandLine2] # cmd: mypy a.py # cmd2: mypy a.py nonexistent.py [file a.py] [file a.py.2] 1() [out] == a.py:1: error: "int" not callable [case testNonExistentFileOnCommandLine3] # cmd: mypy a.py # cmd2: mypy a.py nonexistent.py [file a.py] [file nonexistent.py] [delete nonexistent.py.2] [out] == [case testNonExistentFileOnCommandLine4] # cmd: mypy a.py nonexistent.py [file a.py] [file nonexistent.py] [delete nonexistent.py.2] [out] == [case testNonExistentFileOnCommandLine5] # cmd: mypy a.py nonexistent_stub.pyi # TODO: Should generate an error for missing file [file a.py] [file nonexistent_stub.pyi] [delete nonexistent_stub.pyi.2] [out] == [case testDunderNewUpdatedMethod] import a [file a.py] import b class A: def func(self) -> None: b.C(int()) [file b.py] class C: def __new__(cls, x: str) -> C: pass [file b.py.2] class C: def __new__(cls, x: int) -> C: pass [out] a.py:4: error: Argument 1 to "C" has incompatible type "int"; expected "str" == [case testDunderNewUpdatedSubclass] import a [file a.py] import b b.D(int()) [file b.py] from c import C class D(C): pass [file c.py] class C: def __new__(cls, x: str) -> C: pass [file c.py.2] class C: def __new__(cls, x: int) -> C: pass [out] a.py:3: error: Argument 1 to "D" has incompatible type "int"; expected "str" == [case testDunderNewUpdatedAlias] import a [file a.py] import b b.D(int()) [file b.py] from c import C D = C [file c.py] class C: def __new__(cls, x: int) -> C: pass [file c.py.2] class C: def __new__(cls, x: str) -> C: pass [out] == a.py:3: error: Argument 1 to "C" has incompatible type "int"; expected "str" [case testDunderNewUpdatedCallable] import a [file a.py] from typing import Callable, Any import b def func(arg: Callable[[int], Any]) -> None: pass func(b.C) [file b.py] class C: def __new__(cls, x: int) -> C: pass [file b.py.2] class C: def __new__(cls, x: str) -> C: pass [out] == a.py:6: error: Argument 1 to "func" has incompatible type "Type[C]"; expected "Callable[[int], Any]" [case testDunderNewDefine] import a [file a.py] import b class A: def func(self) -> None: b.C() [file b.py] class C: pass [file b.py.2] class C: def __new__(cls, x: int) -> C: pass [out] == a.py:4: error: Too few arguments for "C" [case testDunderNewInsteadOfInit] import a [file a.py] import b class A: def func(self) -> None: b.C(int()) [file b.py] class C: def __init__(cls, x: int) -> None: pass [file b.py.2] class C: def __new__(cls, x: int) -> C: pass [file b.py.3] class C: pass [out] == == a.py:4: error: Too many arguments for "C" -- Protocol tests [case testProtocolUpdateTypeInVariable] import a [file a.py] import b class C: x: int x: b.P = C() [file b.py] from typing import Protocol class P(Protocol): x: int [file b.py.2] from typing import Protocol class P(Protocol): x: str [out] == a.py:4: error: Incompatible types in assignment (expression has type "C", variable has type "P") a.py:4: note: Following member(s) of "C" have conflicts: a.py:4: note: x: expected "str", got "int" [case testProtocolUpdateTypeInFunction] import a [file a.py] import b class C: x: int c: C def f() -> None: def g(x: b.P) -> None: pass g(c) [file b.py] from typing import Protocol class P(Protocol): x: int [file b.py.2] from typing import Protocol class P(Protocol): x: str [out] == a.py:8: error: Argument 1 to "g" has incompatible type "C"; expected "P" a.py:8: note: Following member(s) of "C" have conflicts: a.py:8: note: x: expected "str", got "int" [case testProtocolUpdateTypeInClass] import a [file a.py] import b class C: x: int class A: class B: x: b.P y: B A().y.x = C() [file b.py] from typing import Protocol class P(Protocol): x: int [file b.py.2] from typing import Protocol class P(Protocol): x: str [out] == a.py:8: error: Incompatible types in assignment (expression has type "C", variable has type "P") a.py:8: note: Following member(s) of "C" have conflicts: a.py:8: note: x: expected "str", got "int" [case testProtocolAddAttrInFunction] import a [file a.py] import b class C: x: int def f() -> None: c: C def g(x: b.P) -> None: pass g(c) [file b.py] from typing import Protocol class P(Protocol): x: int [file b.py.2] from typing import Protocol class P(Protocol): x: int y: str [out] == a.py:8: error: Argument 1 to "g" has incompatible type "C"; expected "P" a.py:8: note: 'C' is missing following 'P' protocol member: a.py:8: note: y [case testProtocolRemoveAttrInClass] import a [file a.py] import b class C: x: int class A: class B: x: b.P y: B A().y.x = C() [file b.py] from typing import Protocol class P(Protocol): x: int y: str [file b.py.2] from typing import Protocol class P(Protocol): x: int [out] a.py:8: error: Incompatible types in assignment (expression has type "C", variable has type "P") a.py:8: note: 'C' is missing following 'P' protocol member: a.py:8: note: y == [case testProtocolConcreteUpdateTypeFunction] import a [file a.py] import b from typing import Protocol class P(Protocol): x: int def f() -> None: def g(x: P) -> None: pass g(b.C()) [file b.py] class C: x: int [file b.py.2] class C: x: str [out] == a.py:8: error: Argument 1 to "g" has incompatible type "C"; expected "P" a.py:8: note: Following member(s) of "C" have conflicts: a.py:8: note: x: expected "int", got "str" [case testProtocolConcreteUpdateTypeMethodGeneric] import a [file a.py] import b from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol[T]): x: T class C: def g(self, x: P[int]) -> None: pass def do(self) -> None: self.g(b.C()) [file b.py] class C: x: int [file b.py.2] class C: x: str [out] == a.py:10: error: Argument 1 to "g" of "C" has incompatible type "C"; expected "P[int]" a.py:10: note: Following member(s) of "C" have conflicts: a.py:10: note: x: expected "int", got "str" [case testProtocolConcreteRemoveAttrVariable] import a [file a.py] import b, c cc: c.C x: b.P = cc [file b.py] from typing import Protocol class P(Protocol): x: int [file c.py] class C: x: int [file c.py.2] class C: pass [out] == a.py:3: error: Incompatible types in assignment (expression has type "C", variable has type "P") [case testProtocolUpdateBaseGeneric] import a [file a.py] import b, c def g(x: c.P) -> None: pass g(b.C()) [file b.py] class C: x: int [file c.py] from typing import Protocol import d class P(d.PBase[int], Protocol): pass [file c.py.2] from typing import Protocol import d class P(d.PBase[str], Protocol): pass [file d.py] from typing import Protocol, TypeVar T = TypeVar('T') class PBase(Protocol[T]): x: T [out] == a.py:4: error: Argument 1 to "g" has incompatible type "C"; expected "P" a.py:4: note: Following member(s) of "C" have conflicts: a.py:4: note: x: expected "str", got "int" [case testProtocolConcreteUpdateBaseGeneric] import a [file a.py] import b from typing import Protocol class P(Protocol): x: int def f(x: P) -> None: pass f(b.B()) [file b.py] import c class B(c.C[int]): pass [file b.py.2] import c class B(c.C[str]): pass [file c.py] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): x: T [out] == a.py:7: error: Argument 1 to "f" has incompatible type "B"; expected "P" a.py:7: note: Following member(s) of "B" have conflicts: a.py:7: note: x: expected "int", got "str" [case testProtocolChangeGeneric] import a [file a.py] import b, c x: b.P = c.C() [file b.py] import b2 from typing import Protocol class P(b2.P2[str], Protocol): pass [file b2.py] from typing import Protocol, TypeVar T = TypeVar('T') class P2(Protocol[T]): x: T [file b2.py.2] from typing import Protocol, TypeVar T = TypeVar('T') class P2(Protocol): x: int [file c.py] class C: x: int [out] a.py:2: error: Incompatible types in assignment (expression has type "C", variable has type "P") a.py:2: note: Following member(s) of "C" have conflicts: a.py:2: note: x: expected "str", got "int" == b.py:3: error: "P2" expects no type arguments, but 1 given [case testProtocolToNonProtocol] import a [file a.py] import b, c b.f(c.C()) [file b.py] import d def f(x: d.D) -> None: pass [file c.py] import d class C: x: int [file d.py] from typing import Protocol class D(Protocol): x: int [file d.py.2] class D: x: int [file c.py.3] import d class C(d.D): pass [out] == a.py:2: error: Argument 1 to "f" has incompatible type "C"; expected "D" == [case testNonProtocolToProtocol] import a [file a.py] import b, c b.f(c.C()) [file b.py] import d def f(x: d.D) -> None: pass [file c.py] import d class C(d.D): pass [file d.py] class D: x: int [file d.py.2] from typing import Protocol class D(Protocol): x: int [file c.py.3] import d class C: x: int [out] == a.py:2: error: Cannot instantiate abstract class 'C' with abstract attribute 'x' == [case testInvalidateProtocolViaSuperClass] import a [file a.py] import b, c def func(x: c.P) -> None: pass func(b.B()) [file b.py] class B: x: int y: str [file c.py] from typing import Protocol import d class P(d.PBase, Protocol): x: int [file d.py] from typing import Protocol class PBase(Protocol): y: str [file d.py.2] from typing import Protocol class PBase(Protocol): y: int [out] == a.py:4: error: Argument 1 to "func" has incompatible type "B"; expected "P" a.py:4: note: Following member(s) of "B" have conflicts: a.py:4: note: y: expected "int", got "str" [case testProtocolInvalidateConcreteViaSuperClassUpdateType] import a [file a.py] import b def func(x: b.P) -> None: pass func(b.B()) [file b.py] from typing import Protocol import c class P(Protocol): x: int class B(c.C): pass [file c.py] class C: x: int [file c.py.2] class C: x: str [out] == a.py:4: error: Argument 1 to "func" has incompatible type "B"; expected "P" a.py:4: note: Following member(s) of "B" have conflicts: a.py:4: note: x: expected "int", got "str" [case testProtocolInvalidateConcreteViaSuperClassAddAttr] import a [file a.py] import b def func(x: b.P) -> None: pass bb: b.B func(bb) [file b.py] from typing import Protocol import c class P(Protocol): x: int class B(c.C): pass [file c.py] class C: pass [file c.py.2] class C: x: int [out] a.py:5: error: Argument 1 to "func" has incompatible type "B"; expected "P" == [case testProtocolInvalidateConcreteViaSuperClassRemoveAttr] import a [file a.py] import b def func(x: b.P) -> None: pass func(b.B()) [file b.py] from typing import Protocol import c class P(Protocol): x: int class B(c.C): pass [file c.py] class C: x: int [file c.py.2] class C: pass [out] == a.py:4: error: Argument 1 to "func" has incompatible type "B"; expected "P" [case testTwoProtocolsTwoFilesCrossedUpdateType-only_when_nocache] # this test and the next one (TwoProtocolsTwoFilesCrossedDeleteAttr) has errors ordered # opposite way with and without cache, therefore skip one of each. import a [file a.py] import b1 import b2 [file b1.py] import b2, d from typing import Protocol class P1(Protocol): x: int def f(x: b2.P2) -> None: pass f(d.D()) [file b2.py] import b1, d from typing import Protocol class P2(Protocol): x: int def f(x: b1.P1) -> None: pass f(d.D()) [file d.py] class D: x: int [file d.py.2] class D: x: str [out] == b1.py:7: error: Argument 1 to "f" has incompatible type "D"; expected "P2" b1.py:7: note: Following member(s) of "D" have conflicts: b1.py:7: note: x: expected "int", got "str" b2.py:7: error: Argument 1 to "f" has incompatible type "D"; expected "P1" b2.py:7: note: Following member(s) of "D" have conflicts: b2.py:7: note: x: expected "int", got "str" [case testTwoProtocolsTwoFilesCrossedDeleteAttr-only_when_cache] import a [file a.py] import b1 import b2 [file b1.py] import b2, d from typing import Protocol class P1(Protocol): x: int def f(x: b2.P2) -> None: pass f(d.D()) [file b2.py] import b1, d from typing import Protocol class P2(Protocol): x: int def f(x: b1.P1) -> None: pass f(d.D()) [file d.py] class D: x: int [file d.py.2] class D: y: int [out] b2.py:7: error: Argument 1 to "f" has incompatible type "D"; expected "P2" (diff) b1.py:7: error: Argument 1 to "f" has incompatible type "D"; expected "P1" [case testProtocolsInvalidateByRemovingBase] import a [file a.py] import b def func(x: b.P) -> None: pass func(b.B()) [file b.py] from typing import Protocol import c class P(Protocol): x: int class B(c.C): pass [file c.py] import d class C(d.D): pass [file c.py.2] import d class C: pass [file d.py] class D: x: int [out] == a.py:4: error: Argument 1 to "func" has incompatible type "B"; expected "P" [case testProtocolsInvalidateByRemovingMetaclass] import a [file a.py] import b def func(x: b.P) -> None: pass func(b.B) [file b.py] from typing import Protocol import c class P(Protocol): x: int class B(c.C): pass [file c.py] import d class C(metaclass=d.M): pass [file c.py.2] import d class C: pass [file d.py] class M(type): x: int [out] == a.py:4: error: Argument 1 to "func" has incompatible type "Type[B]"; expected "P" [case testProtocolVsProtocolSubUpdated] import a [file a.py] import b, c x: b.SuperP y: c.SubP x = y [file b.py] from typing import Protocol class SuperP(Protocol): x: int [file c.py] from typing import Protocol import d class SubP(d.PBase, Protocol): y: str [file d.py] from typing import Protocol class PBase(Protocol): x: int [file d.py.2] from typing import Protocol class PBase(Protocol): x: str [out] == a.py:4: error: Incompatible types in assignment (expression has type "SubP", variable has type "SuperP") a.py:4: note: Following member(s) of "SubP" have conflicts: a.py:4: note: x: expected "int", got "str" [case testProtocolVsProtocolSuperUpdated] import a [file a.py] import b, c x: b.SuperP y: c.SubP x = y [file b.py] from typing import Protocol import d class SuperP(d.PBase, Protocol): pass [file c.py] from typing import Protocol class SubP(Protocol): x: int [file d.py] from typing import Protocol class PBase(Protocol): x: int [file d.py.2] from typing import Protocol class PBase(Protocol): y: int [out] == a.py:4: error: Incompatible types in assignment (expression has type "SubP", variable has type "SuperP") [case testProtocolVsProtocolSuperUpdated2] import a [file a.py] import b, c x: b.SuperP y: c.SubP x = y [file b.py] from typing import Protocol import d class SuperP(d.PBase, Protocol): x: int [file c.py] from typing import Protocol class SubP(Protocol): x: int y: int [file d.py] from typing import Protocol class PBase(Protocol): y: int [file d.py.2] from typing import Protocol class PBase(Protocol): y: int z: int [out] == a.py:4: error: Incompatible types in assignment (expression has type "SubP", variable has type "SuperP") a.py:4: note: 'SubP' is missing following 'SuperP' protocol member: a.py:4: note: z [case testProtocolVsProtocolSuperUpdated3] import a [file a.py] import b, c x: b.SuperP y: c.SubP x = y [file b.py] from typing import Protocol import d class SuperP(d.PBase, Protocol): x: int [file c.py] from typing import Protocol class SubP(Protocol): x: int y: int [file d.py] from typing import Protocol import e class PBase(Protocol): y: int [file d.py.2] from typing import Protocol import e class PBase(e.NewP, Protocol): y: int [file e.py] from typing import Protocol class NewP(Protocol): z: int [out] == a.py:4: error: Incompatible types in assignment (expression has type "SubP", variable has type "SuperP") a.py:4: note: 'SubP' is missing following 'SuperP' protocol member: a.py:4: note: z [case testProtocolMultipleUpdates] import a [file a.py] import b, c x: b.P = c.C() [file b.py] from typing import Protocol import b2 class P(b2.P2, Protocol): x: int [file b2.py] from typing import Protocol class P2(Protocol): y: int [file c.py] import c2 class C(c2.C2): x: int [file c2.py] class C2: y: int [file b2.py.2] from typing import Protocol class P2(Protocol): y: int z: int [file c2.py.3] class C2: y: int z: int [file c2.py.4] class C2: y: int z: str [out] == a.py:2: error: Incompatible types in assignment (expression has type "C", variable has type "P") a.py:2: note: 'C' is missing following 'P' protocol member: a.py:2: note: z == == a.py:2: error: Incompatible types in assignment (expression has type "C", variable has type "P") a.py:2: note: Following member(s) of "C" have conflicts: a.py:2: note: z: expected "int", got "str" [case testWeAreCarefulWithBuiltinProtocols] import a x: a.A for i in x: pass [file a.py] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass [file a.py.2] class A: pass [out] == main:3: error: "A" has no attribute "__iter__" (not iterable) [case testWeAreCarefullWithBuiltinProtocolsBase] import a x: a.A for i in x: pass [file a.py] import b class A(b.B): pass [file a.py.2] class A: pass [file b.py] from typing import Iterator class B: def __iter__(self) -> Iterator[int]: pass [out] == main:3: error: "A" has no attribute "__iter__" (not iterable) [case testOverloadsSimpleFrom] import a [file a.py] import mod def f() -> None: x: str = mod.f(str()) [file mod.py] from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass def f(x): pass [file mod.py.2] from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: str) -> int: pass def f(x): pass [out] == a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testOverloadsSimpleToNested] from typing import overload, Any import mod def outer() -> None: @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass def f(x: Any) -> Any: y: int = mod.f() [file mod.py] def f() -> int: pass [file mod.py.2] def f() -> str: pass [out] == main:9: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testOverloadsRemovedOverload] import mod def f() -> None: x: str = mod.f(str()) [file mod.py] class C: pass from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass @overload def f(x: C) -> int: pass def f(x): pass [file mod.py.2] class C: pass from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: C) -> int: pass def f(x): pass [out] == main:3: error: No overload variant of "f" matches argument type "str" main:3: note: Possible overload variants: main:3: note: def f(x: int) -> None main:3: note: def f(x: C) -> int [case testOverloadsDeleted] import mod def f() -> None: x: str = mod.f(str()) [file mod.py] from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass def f(x): pass [file mod.py.2] from typing import overload [builtins fixtures/module.pyi] [out] == main:3: error: Module has no attribute "f" [case testOverloadsUpdatedTypeRecheckImplementation] from typing import overload import mod class Outer: @overload def f(self, x: mod.D) -> mod.D: pass @overload def f(self, x: mod.E) -> mod.E: pass def f(self, x: mod.C) -> mod.C: x.x = int() return x [file mod.py] import submod class C(submod.B): pass class D(C): pass class E(C): pass [file submod.py] import base class B(base.AI): pass [file submod.py.2] import base class B(base.AS): pass [file base.py] class AI: x: int class AS: x: str [out] == main:9: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testOverloadsUpdatedTypeRechekConsistency] from typing import overload import mod class Outer: @overload def f(self, x: mod.D) -> mod.D: pass @overload def f(self, x: mod.E) -> mod.E: pass def f(self, x: mod.C) -> mod.C: pass [file mod.py] class C: pass class D(C): pass class E(C): pass [file mod.py.2] class C: pass class D(C): pass class E: pass [out] == main:8: error: Overloaded function implementation does not accept all possible arguments of signature 2 main:8: error: Overloaded function implementation cannot produce return type of signature 2 [case testOverloadsGenericTypevarUpdated] import a [file a.py] import b b.f(int()) [file b.py] from typing import overload import c class C: pass @overload def f(x: C) -> None: pass @overload def f(x: c.T) -> c.T: pass def f(x): pass [file c.py] from typing import TypeVar T = TypeVar('T', int, str) [file c.py.2] from typing import TypeVar T = TypeVar('T', bound=str) [out] == a.py:2: error: No overload variant of "f" matches argument type "int" a.py:2: note: Possible overload variants: a.py:2: note: def f(x: C) -> None a.py:2: note: def [c.T <: str] f(x: c.T) -> c.T [case testOverloadsGenericToNonGeneric] import a [file a.py] import b b.f(int()) [file b.py] from typing import overload import c class C: pass @overload def f(x: C) -> None: pass @overload def f(x: c.T) -> c.T: pass def f(x): pass [file c.py] from typing import TypeVar T = TypeVar('T', bound=int) [file c.py.2] from typing import TypeVar class T: pass [out] == a.py:2: error: No overload variant of "f" matches argument type "int" a.py:2: note: Possible overload variants: a.py:2: note: def f(x: C) -> None a.py:2: note: def f(x: T) -> T [case testOverloadsToNonOverloaded] import a [file a.py] import mod def f() -> None: x: str = mod.f(str()) [file mod.py] from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass def f(x): pass [file mod.py.2] from typing import overload def f(x: int) -> int: pass [out] == a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") a.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testOverloadsUpdateFunctionToOverloaded] import a [file a.py] import mod def f() -> None: x: str = mod.f(str()) [file mod.py] from typing import overload def f(x: str) -> str: pass [file mod.py.2] from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: str) -> int: pass def f(x): pass [out] == a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testOverloadedUpdateToClass] import a [file a.py] import mod def f() -> None: x: str = mod.f(str()) [file mod.py] from typing import overload @overload def f(x: int) -> None: pass @overload def f(x: str) -> str: pass def f(x): pass [file mod.py.2] from typing import overload class f: def __init__(self, x: str) -> None: pass [out] == a.py:3: error: Incompatible types in assignment (expression has type "f", variable has type "str") [case testDepsFromOverloadUpdatedAttrRecheckImpl] import mod x = mod.f [file mod.py] from typing import overload, Any import submod @overload def f(x: int) -> submod.A: pass @overload def f(x: str) -> submod.B: pass def f(x) -> Any: y: submod.C y.x = int() [file submod.py] import other class A: pass class B: pass C = other.C [file other.py] class C: x: int [file other.py.2] class C: x: str [out] == mod.py:9: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testOverloadedMethodSupertype] from typing import overload, Any import b class Child(b.Parent): @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Any) -> Any: ... [file b.py] from typing import overload, Any class C: pass class Parent: @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> str: ... def f(self, arg: Any) -> Any: ... [file b.py.2] from typing import overload, Any class C: pass class Parent: @overload def f(self, arg: int) -> int: ... @overload def f(self, arg: str) -> C: ... def f(self, arg: Any) -> Any: ... [out] == main:4: error: Signature of "f" incompatible with supertype "Parent" [case testOverloadedInitSupertype] import a [file a.py] from b import B B(int()) [file b.py] import c class B(c.C): pass [file c.py] from typing import overload class C: def __init__(self, x: int) -> None: pass [file c.py.2] from typing import overload class C: @overload def __init__(self, x: str) -> None: pass @overload def __init__(self, x: str, y: int) -> None: pass def __init__(self, *args, **kwargs) -> None: pass [builtins fixtures/dict.pyi] [out] == a.py:2: error: No overload variant of "B" matches argument type "int" a.py:2: note: Possible overload variant: a.py:2: note: def __init__(self, x: str) -> B a.py:2: note: <1 more non-matching overload not shown> [case testOverloadedToNormalMethodMetaclass] import a [file a.py] import b b.B.f(int()) [file b.py] import c class B(metaclass=c.M): pass [file c.py] from typing import overload class M(type): @overload def f(cls, x: str) -> str: pass @overload def f(cls, x: int) -> None: pass def f(cls, x): pass [file c.py.2] from typing import overload class M(type): def f(cls, x: str) -> str: pass [out] == a.py:2: error: Argument 1 to "f" of "M" has incompatible type "int"; expected "str" [case testYieldFrom] from typing import Iterator from a import f def g() -> Iterator[int]: a = "string" if int(): a = yield from f() [file a.py] from typing import Generator def f() -> Generator[int, None, str]: yield 5 return "ham" [file a.py.2] from typing import Generator class A: pass def f() -> Generator[int, None, A]: yield 5 return A() [out] == main:7: error: Incompatible types in assignment (expression has type "A", variable has type "str") [case testFString] from a import g f'{g(1)}' [file a.py] def g(x: int) -> str: pass [file a.py.2] def g(x: str) -> str: pass [builtins fixtures/f_string.pyi] [out] == main:2: error: Argument 1 to "g" has incompatible type "int"; expected "str" [case testExtendedUnpacking-only_when_nocache] from typing import List from a import g def f() -> List[int]: a, *b = g() return b [file a.py] from typing import Tuple def g() -> Tuple[str, int, int]: pass [file a.py.2] from typing import Tuple def g() -> Tuple[str, str]: pass [builtins fixtures/tuple.pyi] [out] == main:5: error: Incompatible return value type (got "List[str]", expected "List[int]") [case testUnpackInExpression1-only_when_nocache] from typing import Tuple, List from a import t def f() -> Tuple[int, int]: return (1, *t()) def g() -> List[int]: return [1, *t()] [file a.py] from typing import Tuple def t() -> Tuple[int]: ... [file a.py.2] from typing import Tuple def t() -> Tuple[str]: ... [builtins fixtures/list.pyi] [out] == main:5: error: Incompatible return value type (got "Tuple[int, str]", expected "Tuple[int, int]") main:8: error: List item 1 has incompatible type "Tuple[str]"; expected "int" [case testUnpackInExpression2-only_when_nocache] from typing import Set from a import t def f() -> Set[int]: return {1, *t()} [file a.py] from typing import Tuple def t() -> Tuple[int]: pass [file a.py.2] from typing import Tuple def t() -> Tuple[str]: pass [builtins fixtures/set.pyi] [out] == main:5: error: Argument 2 to has incompatible type "*Tuple[str]"; expected "int" [case testUnpackInExpression3-only_when_nocache] from typing import Dict from a import d def f() -> Dict[int, str]: return {1: '', **d()} [file a.py] from typing import Dict def d() -> Dict[int, str]: pass [file a.py.2] from typing import Dict def d() -> Dict[int, int]: pass [builtins fixtures/dict.pyi] [out] == main:5: error: Argument 1 to "update" of "dict" has incompatible type "Dict[int, int]"; expected "Mapping[int, str]" [case testAwaitAndAsyncDef-only_when_nocache] from a import g async def f() -> int: return await g() [file a.py] async def g() -> int: return 0 [file a.py.2] async def g() -> str: return '' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] == main:4: error: Incompatible return value type (got "str", expected "int") [case testAwaitAnd__await__-only_when_nocache] from a import C async def f(c: C) -> int: return await c [file a.py] from typing import Any, Generator class C: def __await__(self) -> Generator[Any, None, int]: yield return 0 [file a.py.2] from typing import Any, Generator class C: def __await__(self) -> Generator[Any, None, str]: yield return '' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] == main:4: error: Incompatible return value type (got "str", expected "int") [case test__aiter__and__anext__] from a import C async def f() -> int: async for x in C(): pass return x [file a.py] class C: def __aiter__(self) -> D: pass class D: def __aiter__(self) -> D: pass async def __anext__(self) -> int: return 0 [file a.py.2] class C: def __aiter__(self) -> D: pass class D: def __aiter__(self) -> D: pass async def __anext__(self) -> str: return '' [file a.py.3] class C: def __aiter__(self) -> E: pass class E: def __aiter__(self) -> E: pass async def __anext__(self) -> object: return 0 [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] == main:6: error: Incompatible return value type (got "str", expected "int") == main:6: error: Incompatible return value type (got "object", expected "int") [case testAsyncWith2-only_when_nocache] from a import C async def f() -> int: async with C() as x: return x async def g() -> None: async with C(): pass [file a.py] class C: async def __aenter__(self) -> int: pass async def __aexit__(self, x, y, z) -> None: pass [file a.py.2] class C: async def __aenter__(self) -> str: pass async def __aexit__(self, x, y, z) -> None: pass [file a.py.3] from typing import Awaitable class C: async def __aenter__(self) -> int: pass async def __aexit__(self, x, y, z) -> None: pass [file a.py.4] from typing import Awaitable class C: async def __aenter__(self) -> int: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] == main:5: error: Incompatible return value type (got "str", expected "int") == == main:4: error: "C" has no attribute "__aexit__" main:8: error: "C" has no attribute "__aexit__" [case testLiskovFineVariable] import b class A(b.B): x: str def f(x: b.B) -> None: x.x + int() f(A()) [file b.py] class B: x: str [file b.py.2] class B: x: int [out] main:5: error: Unsupported operand types for + ("str" and "int") == main:3: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testLiskovFineVariableInFunction] from b import B def outer() -> None: class A(B): x: str def f(x: B) -> None: x.x + int() [file b.py] class B: x: str [file b.py.2] class B: x: int [out] main:6: error: Unsupported operand types for + ("str" and "int") == main:4: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testLiskovFineDecorator] import b from c import deco class A(b.B): @deco def m(self) -> str: pass def f(x: b.B) -> None: x.m() + int() f(A()) [file b.py] from c import deco class B: @deco def m(self) -> str: pass [file b.py.2] from c import deco class B: @deco def m(self) -> int: pass [file c.py] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deco(f: F) -> F: pass [out] main:7: error: Unsupported operand types for + ("str" and "int") == main:5: error: Return type "str" of "m" incompatible with return type "int" in supertype "B" [case testLiskovFineVariableClean-only_when_nocache] import b class A(b.B): x: str [file b.py] class B: x: str [file b.py.2] class B: x: int [out] == main:3: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testLiskovFineVariableCleanDefInMethod-only_when_nocache] import b class A(b.B): def meth(self) -> None: self.x: str [file b.py] class B: x: str [file b.py.2] class B: x: int [out] == main:4: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testLiskovFineVariableCleanDefInMethodStar-only_when_nocache] from typing import List import b class A(b.B): def meth(self) -> None: self.x, *self.y = None, None # type: str, List[str] [file b.py] from typing import List class B: y: List[str] [file b.py.2] from typing import List class B: y: List[int] [builtins fixtures/list.pyi] [out] == main:5: error: Incompatible types in assignment (expression has type "List[str]", base class "B" defined the type as "List[int]") [case testLiskovFineVariableCleanDefInMethodNested-only_when_nocache] from b import B def outer() -> None: class A(B): def meth(self) -> None: self.x: str [file b.py] class B: x: str [file b.py.2] class B: x: int [out] == main:5: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testLiskovFineVariableInFunctionClean-only_when_nocache] from b import B def outer() -> None: class A(B): x: str [file b.py] class B: x: str [file b.py.2] class B: x: int [out] == main:4: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testLiskovFineDecoratorClean-only_when_nocache] import b from c import deco class A(b.B): @deco def m(self) -> str: pass [file b.py] from c import deco class B: @deco def m(self) -> str: pass [file b.py.2] from c import deco class B: @deco def m(self) -> int: pass [file c.py] from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deco(f: F) -> F: pass [out] == main:5: error: Return type "str" of "m" incompatible with return type "int" in supertype "B" [case testAddAbstractMethod] from b import D D() [file b.py] from a import C class D(C): def f(self) -> None: pass [file a.py] from abc import abstractmethod class C: @abstractmethod def f(self) -> None: pass [file a.py.2] from abc import abstractmethod class C: @abstractmethod def f(self) -> None: pass @abstractmethod def g(self) -> None: pass [file a.py.3] from abc import abstractmethod class C: @abstractmethod def f(self) -> None: pass def g(self) -> None: pass [out] == main:2: error: Cannot instantiate abstract class 'D' with abstract attribute 'g' == [case testMakeClassAbstract] from a import C c = C() [file a.py] from abc import abstractmethod class C: pass [file a.py.2] from abc import abstractmethod class C: @abstractmethod def f(self) -> None: pass [out] == main:2: error: Cannot instantiate abstract class 'C' with abstract attribute 'f' [case testMakeMethodNoLongerAbstract1] [file z.py] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass @abstractmethod def g(self) -> None: pass [file b.py] import z def x() -> Foo: return Foo() class Foo(z.I): def f(self) -> None: pass def g(self) -> None: pass [file z.py.2] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): def f(self) -> None: pass @abstractmethod def g(self) -> None: pass [file b.py.2] import z def x() -> Foo: return Foo() class Foo(z.I): def g(self) -> None: pass [out] == [case testMakeMethodNoLongerAbstract2] -- this version never failed, but it is just a file-renaming -- away from the above test that did [file a.py] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass @abstractmethod def g(self) -> None: pass [file b.py] import a def x() -> Foo: return Foo() class Foo(a.I): def f(self) -> None: pass def g(self) -> None: pass [file a.py.2] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): def f(self) -> None: pass @abstractmethod def g(self) -> None: pass [file b.py.2] import a def x() -> Foo: return Foo() class Foo(a.I): def g(self) -> None: pass [out] == [case testImplicitOptionalRefresh1] # flags: --strict-optional from x import f def foo(x: int = None) -> None: f() [file x.py] def f() -> int: return 0 [file x.py.2] def f() -> str: return '0' [out] == [case testRefreshIgnoreErrors1] [file mypy.ini] \[mypy] \[mypy-b] ignore_errors = True [file a.py] y = '1' [file a.py.2] y = 1 [file b.py] from a import y def fu() -> None: 1+'lurr' y [out] == [case testRefreshIgnoreErrors2] [file mypy.ini] \[mypy] \[mypy-b] ignore_errors = True [file b.py] def fu() -> int: 1+'lurr' return 1 [file b.py.2] def fu() -> int: 1+'lurr' return 2 [out] == [case testRefreshOptions] [file mypy.ini] \[mypy] disallow_any_generics = True \[mypy-b] disallow_any_generics = False [file a.py] y = '1' [file a.py.2] y = 1 [file b.py] from typing import List from a import y x = [] # type: List [builtins fixtures/list.pyi] [out] == [case testNamedTupleFallbackModule] import b [file b.py] from a import A def f(a: A): pass [file b.py.2] from a import A def f(a: A): reveal_type(a) [file a.py] from typing import NamedTuple F = [('x', int)] A = NamedTuple('A', F) # type: ignore [builtins fixtures/list.pyi] [out] == b.py:3: note: Revealed type is 'Tuple[, fallback=a.A]' [case testImportOnTopOfAlias1] from a import A x: A [file a.py] from typing import TypeVar, List T = TypeVar('T') A = List[T] [file a.py.2] from typing import TypeVar, List T = TypeVar('T') A = List[T] from b import A [file b.py] # empty [builtins fixtures/list.pyi] [out] == a.py:4: error: Module 'b' has no attribute 'A' a.py:4: error: Name 'A' already defined on line 3 -- the order of errors is different with cache [case testImportOnTopOfAlias2] from a import A x: A [file a.py] from typing import TypeVar, List T = TypeVar('T') A = List[T] [file a.py.2] from typing import TypeVar, List T = TypeVar('T') A = List[T] from b import A as A [file b.py] def A(x: str) -> str: pass [builtins fixtures/list.pyi] [out] == a.py:4: error: Incompatible import of "A" (imported name has type "Callable[[str], str]", local name has type "Type[List[Any]]") [case testFakeOverloadCrash] import b [file a.py] def dec(fun): pass a = 1 [file a.py.2] def dec(fun): pass a = 2 [file b.py] from a import dec @dec def a(): pass @dec def a(): pass [out] b.py:5: error: Name 'a' already defined on line 2 == b.py:5: error: Name 'a' already defined on line 2 [case testFakeOverloadCrash2] # this test just should not crash import a [file a.py] T = TypeVar("T") def foo(func): return func @foo def bar(x: T) -> T: pass @foo def bar(x: T) -> T: pass [file a.py.2] T = TypeVar("T") def foo(func): return func @foo def bar(x: T) -> T: pass @foo def bar(x: T) -> T: pass x = 1 [out] a.py:1: error: Name 'TypeVar' is not defined a.py:1: note: Did you forget to import it from "typing"? (Suggestion: "from typing import TypeVar") a.py:7: error: Variable "a.T" is not valid as a type a.py:10: error: Name 'bar' already defined on line 6 a.py:11: error: Variable "a.T" is not valid as a type == a.py:1: error: Name 'TypeVar' is not defined a.py:1: note: Did you forget to import it from "typing"? (Suggestion: "from typing import TypeVar") a.py:7: error: Variable "a.T" is not valid as a type a.py:10: error: Name 'bar' already defined on line 6 a.py:11: error: Variable "a.T" is not valid as a type [case testRefreshForWithTypeComment1] [file a.py] from typing import List import b def foo(l: List[int]) -> None: for x in l: # type: object pass x = object() b.x [file b.py] x = 1 [file b.py.2] x = '1' [builtins fixtures/list.pyi] [out] == [case testRefreshForWithTypeComment2] from typing import List, Any import m def f(x: List[Any]) -> None: for a in x: # type: m.A pass [file m.py] class A: pass [file m.py.2] [builtins fixtures/list.pyi] [out] == main:4: error: Name 'm.A' is not defined [case testIdLikeDecoForwardCrash] import b [file b.py] from typing import Callable, Any, TypeVar F = TypeVar('F_BadName', bound=Callable[..., Any]) # type: ignore def deco(func: F) -> F: # type: ignore pass @deco def test(x: int, y: int) -> str: pass [file b.py.2] from typing import Callable, Any, TypeVar F = TypeVar('F_BadName', bound=Callable[..., Any]) # type: ignore def deco(func: F) -> F: # type: ignore pass @deco def test(x: int, y: int) -> str: pass x = 1 [out] == [case testIdLikeDecoForwardCrashAlias] import b [file b.py] from typing import Callable, Any, TypeVar F = TypeVar('F', bound=Func) def deco(func: F) -> F: pass @deco def test(x: int, y: int) -> str: pass Func = Callable[..., Any] [file b.py.2] from typing import Callable, Any, TypeVar F = TypeVar('F', bound=Func) def deco(func: F) -> F: pass @deco def test(x: int, y: int) -> str: pass x = 1 Func = Callable[..., Any] [out] == [case testIdLikeDecoForwardCrash_python2] # flags: --py2 import b [file b.py] from typing import Callable, Any, TypeVar F = TypeVar('F_BadName', bound=Callable[..., Any]) # type: ignore def deco(func): # type: ignore # type: (F) -> F pass @deco def test(x, y): # type: (int, int) -> str pass [file b.py.2] from typing import Callable, Any, TypeVar F = TypeVar('F_BadName', bound=Callable[..., Any]) # type: ignore def deco(func): # type: ignore # type: (F) -> F pass @deco def test(x, y): # type: (int, int) -> str pass x = 1 [out] == [case testIdLikeDecoForwardCrashAlias_python2] # flags: --py2 import b [file b.py] from typing import Callable, Any, TypeVar F = TypeVar('F', bound=Func) def deco(func): # type: (F) -> F pass @deco def test(x, y): # type: (int, int) -> str pass Func = Callable[..., Any] [file b.py.2] from typing import Callable, Any, TypeVar F = TypeVar('F', bound=Func) def deco(func): # type: (F) -> F pass @deco def test(x, y): # type: (int, int) -> str pass x = 1 Func = Callable[..., Any] [out] == -- Test cases for final qualifier [case testFinalAddFinalVarAssignFine] import mod from a import D from mod import x x = 2 def outer() -> None: mod.x = 2 x = 2 # This is OK because it creates a local variable d: D d.y = 2 d.z = 2 D.y = 2 [file a.py] import mod class D(mod.C): pass [file mod.py] x = 1 class C: y = 1 def __init__(self) -> None: self.z = 1 [file mod.py.2] from typing import Final x: Final = 1 class C: y: Final = 1 def __init__(self) -> None: self.z: Final = 1 [out] == main:5: error: Cannot assign to final name "x" main:7: error: Cannot assign to final name "x" main:10: error: Cannot assign to final attribute "y" main:11: error: Cannot assign to final attribute "z" main:12: error: Cannot assign to final attribute "y" [case testFinalAddFinalVarOverrideFine] from mod import C class D(C): x = 2 def __init__(self) -> None: self.y = 2 class E(C): y = 2 def __init__(self) -> None: self.x = 2 [file mod.py] class C: x = 1 def __init__(self) -> None: self.y = 1 [file mod.py.2] from typing import Final class C: x: Final = 1 def __init__(self) -> None: self.y: Final = 1 [out] == main:4: error: Cannot assign to final name "x" main:6: error: Cannot assign to final attribute "y" main:8: error: Cannot assign to final name "y" main:10: error: Cannot assign to final attribute "x" [case testFinalAddFinalMethodOverrideFine] from mod import C class D(C): def meth(self) -> int: ... [file mod.py] class C: def meth(self) -> int: ... [file mod.py.2] from typing import final class C: @final def meth(self) -> int: ... [out] == main:4: error: Cannot override final attribute "meth" (previously declared in base class "C") [case testFinalAddFinalMethodOverrideWithVarFine] from mod import C from typing import Any class D(C): meth: Any = 2 def __init__(self) -> None: self.other: Any = 2 [file mod.py] class C: def meth(self) -> int: ... def other(self) -> int: ... [file mod.py.2] from typing import final class C: @final def meth(self) -> int: ... @final def other(self) -> int: ... [out] == main:5: error: Cannot override final attribute "meth" (previously declared in base class "C") main:7: error: Cannot assign to final attribute "other" main:7: error: Cannot override final attribute "other" (previously declared in base class "C") [case testFinalAddFinalMethodOverrideOverloadFine] from typing import overload from mod import C def outer() -> None: class D(C): @overload def meth(self, x: int) -> int: ... @overload def meth(self, x: str) -> str: ... def meth(self, x): pass [file mod.pyi] from typing import overload class C: @overload def meth(self, x: int) -> int: ... @overload def meth(self, x: str) -> str: ... [file mod.pyi.2] from typing import final, overload class C: @final @overload def meth(self, x: int) -> int: ... @overload def meth(self, x: str) -> str: ... [out] == main:6: error: Cannot override final attribute "meth" (previously declared in base class "C") [case testFinalAddFinalPropertyWithVarFine] from mod import C def outer() -> None: class D(C): p = 2 class E(C): def __init__(self) -> None: self.p: int = 2 [file mod.py] class C: @property def p(self) -> int: pass [file mod.py.2] from typing import final class C: @final @property def p(self) -> int: pass [builtins fixtures/property.pyi] [out] == main:5: error: Cannot override final attribute "p" (previously declared in base class "C") main:8: error: Cannot assign to final attribute "p" main:8: error: Cannot override final attribute "p" (previously declared in base class "C") [case testFinalBodyReprocessedAndStillFinal] import a [file a.py] from c import C class A: def meth(self) -> None: ... [file a.py.3] from c import C class A(C): def meth(self) -> None: ... [file c.py] from typing import final from d import D class C: @final def meth(self) -> None: D(int()) [file d.py] class D: def __init__(self, x: int) -> None: ... [file d.py.2] from typing import Optional class D: def __init__(self, x: Optional[int]) -> None: ... [out] == == a.py:3: error: Cannot override final attribute "meth" (previously declared in base class "C") [case testFinalBodyReprocessedAndStillFinalOverloaded] import a [file a.py] from c import C class A: def meth(self) -> None: ... [file a.py.3] from c import C class A(C): def meth(self) -> None: ... [file c.py] from typing import final, overload, Union from d import D class C: @overload def meth(self, x: int) -> int: ... @overload def meth(self, x: str) -> str: ... @final def meth(self, x: Union[int, str]) -> Union[int, str]: D(int()) return x [file d.py] class D: def __init__(self, x: int) -> None: ... [file d.py.2] from typing import Optional class D: def __init__(self, x: Optional[int]) -> None: ... [out] == == a.py:3: error: Cannot override final attribute "meth" (previously declared in base class "C") a.py:3: error: Signature of "meth" incompatible with supertype "C" [case testIfMypyUnreachableClass] from a import x MYPY = False if MYPY: pass else: class A: pass y: int = x [file a.py] x = 1 [file a.py.2] x = 2 [builtins fixtures/bool.pyi] [out] == [case testIfTypeCheckingUnreachableClass] from a import x from typing import TYPE_CHECKING if not TYPE_CHECKING: class A(int): pass else: A = int y: A = x [file a.py] x = 1 [file a.py.2] x = 2 [file a.py.3] x = 'no way' [builtins fixtures/bool.pyi] [out] == == main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNamedTupleForwardFunctionDirect] # flags: --ignore-missing-imports from typing import NamedTuple from b import B NT = NamedTuple('NT', [('x', B)]) [file b.py.2] def func(x): pass B = func [out] == main:5: error: Variable "b.B" is not valid as a type [case testNamedTupleForwardFunctionIndirect] # flags: --ignore-missing-imports from typing import NamedTuple from a import A NT = NamedTuple('NT', [('x', A)]) [file a.py] from b import B A = B [file b.py.2] def func(x): pass B = func [out] == main:5: error: Variable "a.A" is not valid as a type [case testNamedTupleForwardFunctionIndirectReveal] # flags: --ignore-missing-imports import m [file m.py] from typing import NamedTuple from a import A NT = NamedTuple('NT', [('x', A)]) [file m.py.3] from typing import NamedTuple from a import A NT = NamedTuple('NT', [('x', A)]) reveal_type(NT.x) x: NT reveal_type(x.x) [file a.py] from b import B A = B [file b.py.2] def func(x): pass B = func [out] == m.py:4: error: Variable "a.A" is not valid as a type == m.py:4: error: Variable "a.A" is not valid as a type m.py:5: note: Revealed type is 'A?' m.py:7: note: Revealed type is 'A?' [case testAliasForwardFunctionDirect] # flags: --ignore-missing-imports from typing import Optional from b import B Alias = Optional[B] [file b.py.2] def func(x): pass B = int() [out] == main:5: error: Variable "b.B" is not valid as a type [case testAliasForwardFunctionIndirect] # flags: --ignore-missing-imports from typing import Optional from a import A Alias = Optional[A] [file a.py] from b import B A = B [file b.py.2] def func(x): pass B = func [out] == main:5: error: Variable "a.A" is not valid as a type [case testLiteralFineGrainedVarConversion] import mod reveal_type(mod.x) [file mod.py] x = 1 [file mod.py.2] from typing_extensions import Literal x: Literal[1] = 1 [file mod.py.3] from typing_extensions import Literal x: Literal[1] = 2 [out] main:2: note: Revealed type is 'builtins.int' == main:2: note: Revealed type is 'Literal[1]' == mod.py:2: error: Incompatible types in assignment (expression has type "Literal[2]", variable has type "Literal[1]") main:2: note: Revealed type is 'Literal[1]' [case testLiteralFineGrainedFunctionConversion] from mod import foo foo(3) [file mod.py] def foo(x: int) -> None: pass [file mod.py.2] from typing_extensions import Literal def foo(x: Literal[3]) -> None: pass [file mod.py.3] from typing_extensions import Literal def foo(x: Literal[4]) -> None: pass [out] == == main:2: error: Argument 1 to "foo" has incompatible type "Literal[3]"; expected "Literal[4]" [case testLiteralFineGrainedAlias] from mod import Alias a: Alias = 1 [file mod.py] Alias = int [file mod.py.2] from typing_extensions import Literal Alias = Literal[1] [file mod.py.3] from typing_extensions import Literal Alias = Literal[2] [out] == == main:2: error: Incompatible types in assignment (expression has type "Literal[1]", variable has type "Literal[2]") [case testLiteralFineGrainedOverload] from mod import foo reveal_type(foo(4)) [file mod.py] from typing import overload from typing_extensions import Literal @overload def foo(x: int) -> str: ... @overload def foo(x: Literal['bar']) -> int: ... def foo(x): pass [file mod.py.2] from typing import overload from typing_extensions import Literal @overload def foo(x: Literal[4]) -> Literal['foo']: ... @overload def foo(x: int) -> str: ... @overload def foo(x: Literal['bar']) -> int: ... def foo(x): pass [out] main:2: note: Revealed type is 'builtins.str' == main:2: note: Revealed type is 'Literal['foo']' [case testLiteralFineGrainedChainedDefinitions] from mod1 import foo from typing_extensions import Literal def expect_3(x: Literal[3]) -> None: pass expect_3(foo) [file mod1.py] from mod2 import bar foo = bar [file mod2.py] from mod3 import qux as bar [file mod3.py] from typing_extensions import Literal qux: Literal[3] [file mod3.py.2] from typing_extensions import Literal qux: Literal[4] [out] == main:4: error: Argument 1 to "expect_3" has incompatible type "Literal[4]"; expected "Literal[3]" [case testLiteralFineGrainedChainedAliases] from mod1 import Alias1 from typing_extensions import Literal x: Alias1 def expect_3(x: Literal[3]) -> None: pass expect_3(x) [file mod1.py] from mod2 import Alias2 Alias1 = Alias2 [file mod2.py] from mod3 import Alias3 Alias2 = Alias3 [file mod3.py] from typing_extensions import Literal Alias3 = Literal[3] [file mod3.py.2] from typing_extensions import Literal Alias3 = Literal[4] [out] == main:5: error: Argument 1 to "expect_3" has incompatible type "Literal[4]"; expected "Literal[3]" [case testLiteralFineGrainedChainedFunctionDefinitions] from mod1 import func1 from typing_extensions import Literal def expect_3(x: Literal[3]) -> None: pass expect_3(func1()) [file mod1.py] from mod2 import func2 as func1 [file mod2.py] from mod3 import func3 func2 = func3 [file mod3.py] from typing_extensions import Literal def func3() -> Literal[3]: pass [file mod3.py.2] from typing_extensions import Literal def func3() -> Literal[4]: pass [out] == main:4: error: Argument 1 to "expect_3" has incompatible type "Literal[4]"; expected "Literal[3]" [case testLiteralFineGrainedChainedTypeVarInference] from mod1 import foo reveal_type(foo) [file mod1.py] from typing import TypeVar from mod2 import bar T = TypeVar('T', bound=int) def func(x: T) -> T: return x foo = func(bar) [file mod2.py] bar = 3 [file mod2.py.2] from typing_extensions import Literal bar: Literal[3] = 3 [out] main:2: note: Revealed type is 'builtins.int*' == main:2: note: Revealed type is 'Literal[3]' [case testLiteralFineGrainedChainedViaFinal] from mod1 import foo from typing_extensions import Literal def expect_3(x: Literal[3]) -> None: pass expect_3(foo) [file mod1.py] from typing_extensions import Final from mod2 import bar foo: Final = bar [file mod2.py] from mod3 import qux as bar [file mod3.py] from typing_extensions import Final qux: Final = 3 [file mod3.py.2] from typing_extensions import Final qux: Final = 4 [file mod3.py.3] from typing_extensions import Final qux: Final[int] = 4 [out] == main:4: error: Argument 1 to "expect_3" has incompatible type "Literal[4]"; expected "Literal[3]" == main:4: error: Argument 1 to "expect_3" has incompatible type "int"; expected "Literal[3]" [case testLiteralFineGrainedStringConversionPython3] from mod1 import foo reveal_type(foo) [file mod1.py] from mod2 import bar foo = bar() [file mod2.py] from typing_extensions import Literal def bar() -> Literal["foo"]: pass [file mod2.py.2] from typing_extensions import Literal def bar() -> Literal[u"foo"]: pass [file mod2.py.3] from typing_extensions import Literal def bar() -> Literal[b"foo"]: pass [out] main:2: note: Revealed type is 'Literal['foo']' == main:2: note: Revealed type is 'Literal['foo']' == main:2: note: Revealed type is 'Literal[b'foo']' [case testLiteralFineGrainedStringConversionPython2] # flags: --python-version 2.7 from mod1 import foo reveal_type(foo) [file mod1.py] from mod2 import bar foo = bar() [file mod2.py] from typing_extensions import Literal def bar(): # type: () -> Literal["foo"] pass [file mod2.py.2] from typing_extensions import Literal def bar(): # type: () -> Literal[b"foo"] pass [file mod2.py.3] from __future__ import unicode_literals from typing_extensions import Literal def bar(): # type: () -> Literal["foo"] pass [file mod2.py.4] from __future__ import unicode_literals from typing_extensions import Literal def bar(): # type: () -> Literal[b"foo"] pass [file mod2.py.5] from typing_extensions import Literal def bar(): # type: () -> Literal[u"foo"] pass [out] main:3: note: Revealed type is 'Literal['foo']' == main:3: note: Revealed type is 'Literal['foo']' == main:3: note: Revealed type is 'Literal[u'foo']' == main:3: note: Revealed type is 'Literal['foo']' == main:3: note: Revealed type is 'Literal[u'foo']' [case testReprocessModuleTopLevelWhileMethodDefinesAttr] import a [file a.py] from b import B B().x [file a.py.3] from b import B x = B().x [file b.py] from c import f f(int()) class B: def meth(self) -> None: self.x: int [file c.py] def f(x: int) -> None: ... [file c.py.2] from typing import Union def f(x: Union[int, str]) -> None: ... [targets2 c, b] [targets3 a] [out] == == [case testCheckReprocessedTargets-only_when_nocache] from b import B class C(B): def meth(self) -> None: from b import f f() [file b.py] class B: ... def f() -> int: ... [file b.py.2] class A: ... class B(A): ... def f() -> int: ... [file b.py.3] class A: ... class B(A): ... def f() -> str: ... [targets2 b, __main__, __main__.C.meth, __main__, __main__.C.meth] [targets3 b, __main__.C.meth] [out] == == [case testReprocessModuleTopLevelWhileMethodDefinesAttrExplicit] import a [file a.py] from b import B B().x [file b.py] from c import f f(int()) class A: x: int class B(A): def meth(self) -> None: self.x: int [file c.py] def f(x: int) -> None: ... [file c.py.2] from typing import Union def f(x: Union[int, str]) -> None: ... [file a.py.3] from b import B # Double-check the variable is still accessible. B().x [targets2 c, b] [targets3 a] [out] == == [case testReprocessModuleTopLevelWhileMethodDefinesAttrBothExplicitAndInClass] import a [file a.py] from b import B B().x [file b.py] from c import f f(int()) class A: x: int class B(A): x: int def meth(self) -> None: self.x: int [file c.py] def f(x: int) -> None: ... [file c.py.2] from typing import Union def f(x: Union[int, str]) -> None: ... [file a.py.3] from b import B # Double-check the variable is still accessible. B().x [targets2 c, b] [targets3 a] [out] == == [case testReprocessModuleTopLevelWhileMethodDefinesAttrProtocol] import a [file a.py] from b import B B().x [file b.py] from typing import Protocol from c import f f(int()) class A(Protocol): x: int class B(A): def meth(self) -> None: self.x = 42 [file c.py] def f(x: int) -> None: ... [file c.py.2] from typing import Union def f(x: Union[int, str]) -> None: ... [file a.py.3] from b import B # Double-check the variable is still accessible. B().x [targets2 c, b] [targets3 a] [out] == == [case testNewSemanticAnalyzerUpdateMethodAndClass] import m m.x class A: def f(self) -> None: self.x = 0 m.y def g(self) -> None: m.x [file m.py] x = 0 y = 0 [file m.py.2] x = '' y = 0 [file m.py.3] x = '' y = '' [out] == == [case testInlineConfigFineGrained1] import a [file a.py] # mypy: no-warn-no-return from typing import List def foo() -> List: 20 [file a.py.2] # mypy: disallow-any-generics, no-warn-no-return from typing import List def foo() -> List: 20 [file a.py.3] # mypy: no-warn-no-return from typing import List def foo() -> List: 20 [file a.py.4] from typing import List def foo() -> List: 20 [out] == a.py:4: error: Missing type parameters for generic type "List" == == a.py:2: error: Missing return statement [builtins fixtures/list.pyi] [case testInlineConfigFineGrained2] import a [file a.py] # mypy: bogus-flag [file b.py.2] [out] a.py:1: error: Unrecognized option: bogus_flag = True == a.py:1: error: Unrecognized option: bogus_flag = True [case testWrongNumberOfArguments] [file a.py] def bar(x): # type: () -> None pass def baz(x): # type: () -> None pass def f(): # type: () -> None def g(x): # type: () -> None pass [file c.py] def bar(x): # type: () -> None pass [file b.py] x = '' [file b.py.2] x = 1 [out] c.py:1: error: Type signature has too few arguments a.py:1: error: Type signature has too few arguments a.py:5: error: Type signature has too few arguments a.py:11: error: Type signature has too few arguments == a.py:1: error: Type signature has too few arguments a.py:5: error: Type signature has too few arguments a.py:11: error: Type signature has too few arguments c.py:1: error: Type signature has too few arguments [case testErrorReportingNewAnalyzer] # flags: --disallow-any-generics from a import A def f() -> None: x: A [file a.py] class A: ... [file a.py.2] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): ... [out] == main:5: error: Missing type parameters for generic type "A" [case testStripNewAnalyzer] # flags: --ignore-missing-imports import a [file a.py] from typing import List from b import B class A: def __init__(self) -> None: self.x: List[int] = [] def method(self) -> None: B() self.x = [] [file b.py] class B: ... [delete b.py.2] [builtins fixtures/list.pyi] [out] == [case testClassVariableOrderingRefresh] from b import bar def foo(x: str) -> None: pass class Something: def run(self) -> None: bar() foo(self.IDS[0]) IDS = [87] [file b.py] def bar() -> int: return 0 [file b.py.2] def bar() -> str: return '0' [builtins fixtures/list.pyi] [out] main:9: error: Argument 1 to "foo" has incompatible type "int"; expected "str" == main:9: error: Argument 1 to "foo" has incompatible type "int"; expected "str" [case testInfiniteLoop] [file a.py] from b import f from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def dec(x: F) -> F: return x @dec def foo(self): class A: @classmethod def asdf(cls, x: 'A') -> None: pass @dec def bar(self): class B: @classmethod def asdf(cls, x: 'B') -> None: pass f() [file b.py] def f() -> int: pass [file b.py.2] def f() -> str: pass [builtins fixtures/classmethod.pyi] [out] == [case testInfiniteLoop2] [file a.py] from b import f from typing import Callable, TypeVar, NamedTuple F = TypeVar('F', bound=Callable) def dec(x: F) -> F: return x @dec def foo(self): N = NamedTuple('N', [('x', int)]) def g(x: N) -> None: pass @dec def bar(self): N = NamedTuple('N', [('x', int)]) def g(x: N) -> None: pass f() [file b.py] def f() -> int: pass [file b.py.2] def f() -> str: pass [builtins fixtures/classmethod.pyi] [out] == [case testFileAddedAndImported] # flags: --ignore-missing-imports --follow-imports=skip # cmd: mypy a.py # cmd2: mypy a.py b.py [file a.py] from b import bad x = 42 [file b.py.2] def good() -> None: ... [out] == a.py:1: error: Module 'b' has no attribute 'bad' [case testFileAddedAndImported2] # flags: --ignore-missing-imports --follow-imports=skip # cmd: mypy -m a # cmd2: mypy -m a b [file a.py] x = 42 [file a.py.2] from b import bad x = 42 [file b.py.2] def good() -> None: ... [out] == a.py:1: error: Module 'b' has no attribute 'bad' [case testTypedDictCrashFallbackAfterDeletedMeet] # flags: --ignore-missing-imports from z import get_data from a import Data for it in get_data()['things']: it['id'] [file z.py] from a import Data def get_data() -> Data: ... [file a.py] from typing import TypedDict, List, Union class File(TypedDict): id: int name: str class User(TypedDict): id: int path: str class Data(TypedDict): things: List[Union[File, User]] [delete a.py.2] [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] == [case testTypedDictCrashFallbackAfterDeletedJoin] # flags: --ignore-missing-imports from z import get_data from a import Data x = [get_data()[0], get_data()[1]] [file z.py] from a import Data def get_data() -> Data: ... [file a.py] from typing import TypedDict, Tuple class File(TypedDict): id: int name: str class User(TypedDict): id: int path: str Data = Tuple[User, File] [delete a.py.2] [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] == [case testClassRedef] # An issue involved serializing these caused crashes in the past [file a.py] class A: pass x = 0 [file a.py.2] class A: a = A() x = '0' [file b.py] from a import A, x class A: # type: ignore pass [out] == [case testAddAttributeThroughNewBaseClass] import a [file a.py] class C: def __init__(self) -> None: self.x = 0 [file a.py.2] from b import B class C(B): def __init__(self) -> None: self.x = 0 [file b.py.2] class B: def __init__(self) -> None: self.x = 0 [out] == mypy-0.761/test-data/unit/fixtures/0000755€tŠÔÚ€2›s®0000000000013576752267023441 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/unit/fixtures/__init_subclass__.pyi0000644€tŠÔÚ€2›s®0000000030313576752246027613 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub with object.__init_subclass__ class object: def __init_subclass__(cls) -> None: pass class type: pass class int: pass class bool: pass class str: pass class function: pass mypy-0.761/test-data/unit/fixtures/__new__.pyi0000644€tŠÔÚ€2›s®0000000041713576752246025550 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub with object.__new__ from typing import Any class object: def __init__(self) -> None: pass def __new__(cls) -> Any: pass class type: def __init__(self, x) -> None: pass class int: pass class bool: pass class str: pass class function: pass mypy-0.761/test-data/unit/fixtures/alias.pyi0000644€tŠÔÚ€2›s®0000000033513576752246025253 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins test fixture with a type alias 'bytes' class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class int: pass class str: pass class function: pass bytes = str mypy-0.761/test-data/unit/fixtures/args.pyi0000644€tŠÔÚ€2›s®0000000150613576752246025117 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used to support *args, **kwargs. from typing import TypeVar, Generic, Iterable, Tuple, Dict, Any, overload, Mapping Tco = TypeVar('Tco', covariant=True) T = TypeVar('T') S = TypeVar('S') class object: def __init__(self) -> None: pass def __eq__(self, o: object) -> bool: pass def __ne__(self, o: object) -> bool: pass class type: @overload def __init__(self, o: object) -> None: pass @overload def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: pass def __call__(self, *args: Any, **kwargs: Any) -> Any: pass class tuple(Iterable[Tco], Generic[Tco]): pass class dict(Iterable[T], Mapping[T, S], Generic[T, S]): pass class int: def __eq__(self, o: object) -> bool: pass class str: pass class bool: pass class function: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/async_await.pyi0000644€tŠÔÚ€2›s®0000000132313576752246026462 0ustar jukkaDROPBOX\Domain Users00000000000000import typing T = typing.TypeVar('T') U = typing.TypeVar('U') class list(typing.Sequence[T]): def __iter__(self) -> typing.Iterator[T]: ... def __getitem__(self, i: int) -> T: ... def __contains__(self, item: object) -> bool: ... class object: def __init__(self) -> None: pass class type: pass class function: pass class int: pass class str: pass class bool(int): pass class dict(typing.Generic[T, U]): pass class set(typing.Generic[T]): pass class tuple(typing.Generic[T]): pass class BaseException: pass class StopIteration(BaseException): pass class StopAsyncIteration(BaseException): pass def iter(obj: typing.Any) -> typing.Any: pass def next(obj: typing.Any) -> typing.Any: pass class ellipsis: ... mypy-0.761/test-data/unit/fixtures/attr.pyi0000644€tŠÔÚ€2›s®0000000132113576752246025130 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used to support @attr.s tests. from typing import Union, overload class object: def __init__(self) -> None: pass def __eq__(self, o: object) -> bool: pass def __ne__(self, o: object) -> bool: pass class type: pass class bytes: pass class function: pass class bool: pass class float: pass class int: @overload def __init__(self, x: Union[str, bytes, int] = ...) -> None: ... @overload def __init__(self, x: Union[str, bytes], base: int) -> None: ... class complex: @overload def __init__(self, real: float = ..., im: float = ...) -> None: ... @overload def __init__(self, real: str = ...) -> None: ... class str: pass class unicode: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/bool.pyi0000644€tŠÔÚ€2›s®0000000064013576752246025114 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub used in boolean-related test cases. from typing import Generic, TypeVar T = TypeVar('T') class object: def __init__(self) -> None: pass def __eq__(self, other: object) -> bool: pass def __ne__(self, other: object) -> bool: pass class type: pass class tuple(Generic[T]): pass class function: pass class bool: pass class int: pass class str: pass class unicode: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/bool_py2.pyi0000644€tŠÔÚ€2›s®0000000050713576752246025710 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub used in boolean-related test cases. from typing import Generic, TypeVar import sys T = TypeVar('T') class object: def __init__(self) -> None: pass class type: pass class tuple(Generic[T]): pass class function: pass class bool: pass class int: pass class str: pass class unicode: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/callable.pyi0000644€tŠÔÚ€2›s®0000000116213576752246025720 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, Tuple, TypeVar, Union T = TypeVar('T') class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class tuple(Generic[T]): pass class function: pass def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass def callable(x: object) -> bool: pass class int: def __add__(self, other: 'int') -> 'int': pass def __eq__(self, other: 'int') -> 'bool': pass class float: pass class bool(int): pass class str: def __add__(self, other: 'str') -> 'str': pass def __eq__(self, other: 'str') -> bool: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/classmethod.pyi0000644€tŠÔÚ€2›s®0000000074513576752246026475 0ustar jukkaDROPBOX\Domain Users00000000000000import typing _T = typing.TypeVar('_T') class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass def mro(self) -> typing.Any: pass class function: pass # Dummy definitions. class classmethod: pass class staticmethod: pass class int: @classmethod def from_bytes(cls, bytes: bytes, byteorder: str) -> int: pass class str: pass class bytes: pass class bool: pass class ellipsis: pass class tuple(typing.Generic[_T]): pass mypy-0.761/test-data/unit/fixtures/complex.pyi0000644€tŠÔÚ€2›s®0000000044013576752246025626 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used for some float/complex test cases. # Please don't add tuple to this file, it is used to test incomplete fixtures. class object: def __init__(self): pass class type: pass class function: pass class int: pass class float: pass class complex: pass class str: pass mypy-0.761/test-data/unit/fixtures/complex_tuple.pyi0000644€tŠÔÚ€2›s®0000000040513576752246027040 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, TypeVar _T = TypeVar('_T') class object: def __init__(self): pass class tuple(Generic[_T]): pass class type: pass class function: pass class int: pass class float: pass class complex: pass class str: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/dict.pyi0000644€tŠÔÚ€2›s®0000000315713576752246025112 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in dictionary-related test cases. from typing import ( TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union, Sequence ) T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') class object: def __init__(self) -> None: pass def __init_subclass__(cls) -> None: pass def __eq__(self, other: object) -> bool: pass class type: pass class dict(Mapping[KT, VT]): @overload def __init__(self, **kwargs: VT) -> None: pass @overload def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass def __getitem__(self, key: KT) -> VT: pass def __setitem__(self, k: KT, v: VT) -> None: pass def __iter__(self) -> Iterator[KT]: pass def __contains__(self, item: object) -> int: pass def update(self, a: Mapping[KT, VT]) -> None: pass @overload def get(self, k: KT) -> Optional[VT]: pass @overload def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass def __len__(self) -> int: ... class int: # for convenience def __add__(self, x: int) -> int: pass class str: pass # for keyword argument key type class unicode: pass # needed for py2 docstrings class list(Sequence[T]): # needed by some test cases def __getitem__(self, x: int) -> T: pass def __iter__(self) -> Iterator[T]: pass def __mul__(self, x: int) -> list[T]: pass def __contains__(self, item: object) -> bool: pass class tuple(Generic[T]): pass class function: pass class float: pass class bool(int): pass class ellipsis: pass def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass class BaseException: pass mypy-0.761/test-data/unit/fixtures/divmod.pyi0000644€tŠÔÚ€2›s®0000000110413576752246025437 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Tuple, SupportsInt class object: def __init__(self): pass class int(SupportsInt): def __divmod__(self, other: int) -> Tuple[int, int]: pass def __rdivmod__(self, other: int) -> Tuple[int, int]: pass class float(SupportsInt): def __divmod__(self, other: float) -> Tuple[float, float]: pass def __rdivmod__(self, other: float) -> Tuple[float, float]: pass class tuple: pass class function: pass class str: pass class type: pass class ellipsis: pass _N = TypeVar('_N', int, float) def divmod(_x: _N, _y: _N) -> Tuple[_N, _N]: ... mypy-0.761/test-data/unit/fixtures/exception.pyi0000644€tŠÔÚ€2›s®0000000112113576752246026152 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, TypeVar T = TypeVar('T') class object: def __init__(self): pass class type: pass class tuple(Generic[T]): pass class function: pass class int: pass class str: pass class unicode: pass class bool: pass class ellipsis: pass # Note: this is a slight simplification. In Python 2, the inheritance hierarchy # is actually Exception -> StandardError -> RuntimeError -> ... class BaseException: def __init__(self, *args: object) -> None: ... class Exception(BaseException): pass class RuntimeError(Exception): pass class NotImplementedError(RuntimeError): pass mypy-0.761/test-data/unit/fixtures/f_string.pyi0000644€tŠÔÚ€2›s®0000000146413576752246026001 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used for format-string-related test cases. # We need str and list, and str needs join and format methods. from typing import TypeVar, Generic, Iterable, Iterator, List, overload T = TypeVar('T') class object: def __init__(self): pass class type: def __init__(self, x) -> None: pass class ellipsis: pass class list(Iterable[T], Generic[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: Iterable[T]) -> None: pass def append(self, x: T) -> None: pass class tuple(Generic[T]): pass class function: pass class int: def __add__(self, i: int) -> int: pass class float: pass class bool(int): pass class str: def __add__(self, s: str) -> str: pass def format(self, *args) -> str: pass def join(self, l: List[str]) -> str: pass mypy-0.761/test-data/unit/fixtures/fine_grained.pyi0000644€tŠÔÚ€2›s®0000000116213576752246026573 0ustar jukkaDROPBOX\Domain Users00000000000000# Small stub for fine-grained incremental checking test cases # # TODO: Migrate to regular stubs once fine-grained incremental is robust # enough to handle them. import types from typing import TypeVar, Generic T = TypeVar('T') class Any: pass class object: def __init__(self) -> None: pass class type: def __init__(self, x: Any) -> None: pass class int: def __add__(self, other: 'int') -> 'int': pass class str: def __add__(self, other: 'str') -> 'str': pass class float: pass class bytes: pass class tuple(Generic[T]): pass class function: pass class ellipsis: pass class list(Generic[T]): pass mypy-0.761/test-data/unit/fixtures/float.pyi0000644€tŠÔÚ€2›s®0000000142413576752246025267 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, TypeVar T = TypeVar('T') Any = 0 class object: def __init__(self) -> None: pass class type: def __init__(self, x: Any) -> None: pass class str: def __add__(self, other: 'str') -> 'str': pass def __rmul__(self, n: int) -> str: ... class bytes: pass class tuple(Generic[T]): pass class function: pass class ellipsis: pass class int: def __abs__(self) -> int: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: int) -> int: ... def __neg__(self) -> int: ... def __rmul__(self, x: int) -> int: ... class float: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... mypy-0.761/test-data/unit/fixtures/floatdict.pyi0000644€tŠÔÚ€2›s®0000000376213576752246026142 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') Any = 0 class object: def __init__(self) -> None: pass class type: def __init__(self, x: Any) -> None: pass class str: def __add__(self, other: 'str') -> 'str': pass def __rmul__(self, n: int) -> str: ... class bytes: pass class tuple(Generic[T]): pass class slice: pass class function: pass class ellipsis: pass class list(Iterable[T], Generic[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: Iterable[T]) -> None: pass def __iter__(self) -> Iterator[T]: pass def __add__(self, x: list[T]) -> list[T]: pass def __mul__(self, x: int) -> list[T]: pass def __getitem__(self, x: int) -> T: pass def append(self, x: T) -> None: pass def extend(self, x: Iterable[T]) -> None: pass class dict(Iterable[KT], Mapping[KT, VT], Generic[KT, VT]): @overload def __init__(self, **kwargs: VT) -> None: pass @overload def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass def __setitem__(self, k: KT, v: VT) -> None: pass def __getitem__(self, k: KT) -> VT: pass def __iter__(self) -> Iterator[KT]: pass def update(self, a: Mapping[KT, VT]) -> None: pass @overload def get(self, k: KT) -> Optional[VT]: pass @overload def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass class int: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... def __truediv__(self, x: int) -> int: ... def __rtruediv__(self, x: int) -> int: ... class float: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... def __truediv__(self, x: float) -> float: ... def __rtruediv__(self, x: float) -> float: ... mypy-0.761/test-data/unit/fixtures/floatdict_python2.pyi0000644€tŠÔÚ€2›s®0000000376413576752246027627 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') Any = 0 class object: def __init__(self) -> None: pass class type: def __init__(self, x: Any) -> None: pass class str: def __add__(self, other: 'str') -> 'str': pass def __rmul__(self, n: int) -> str: ... class unicode: pass class tuple(Generic[T]): pass class slice: pass class function: pass class ellipsis: pass class list(Iterable[T], Generic[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: Iterable[T]) -> None: pass def __iter__(self) -> Iterator[T]: pass def __add__(self, x: list[T]) -> list[T]: pass def __mul__(self, x: int) -> list[T]: pass def __getitem__(self, x: int) -> T: pass def append(self, x: T) -> None: pass def extend(self, x: Iterable[T]) -> None: pass class dict(Iterable[KT], Mapping[KT, VT], Generic[KT, VT]): @overload def __init__(self, **kwargs: VT) -> None: pass @overload def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass def __setitem__(self, k: KT, v: VT) -> None: pass def __getitem__(self, k: KT) -> VT: pass def __iter__(self) -> Iterator[KT]: pass def update(self, a: Mapping[KT, VT]) -> None: pass @overload def get(self, k: KT) -> Optional[VT]: pass @overload def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass class int: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... def __truediv__(self, x: int) -> int: ... def __rtruediv__(self, x: int) -> int: ... class float: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... def __truediv__(self, x: float) -> float: ... def __rtruediv__(self, x: float) -> float: ... mypy-0.761/test-data/unit/fixtures/for.pyi0000644€tŠÔÚ€2›s®0000000076613576752246024760 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub used in for statement test cases from typing import TypeVar, Generic, Iterable, Iterator, Generator from abc import abstractmethod, ABCMeta t = TypeVar('t') class object: def __init__(self) -> None: pass class type: pass class tuple(Generic[t]): def __iter__(self) -> Iterator[t]: pass class function: pass class bool: pass class int: pass # for convenience class str: pass # for convenience class list(Iterable[t], Generic[t]): def __iter__(self) -> Iterator[t]: pass mypy-0.761/test-data/unit/fixtures/function.pyi0000644€tŠÔÚ€2›s®0000000016213576752246026005 0ustar jukkaDROPBOX\Domain Users00000000000000class object: def __init__(self): pass class type: pass class function: pass class int: pass class str: pass mypy-0.761/test-data/unit/fixtures/isinstance.pyi0000644€tŠÔÚ€2›s®0000000120413576752246026316 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, TypeVar, Generic, Union, cast, Any, Type T = TypeVar('T') class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class tuple(Generic[T]): pass class function: pass def isinstance(x: object, t: Union[Type[object], Tuple[Type[object], ...]]) -> bool: pass def issubclass(x: object, t: Union[Type[object], Tuple[Type[object], ...]]) -> bool: pass class int: def __add__(self, other: 'int') -> 'int': pass class float: pass class bool(int): pass class str: def __add__(self, other: 'str') -> 'str': pass class ellipsis: pass NotImplemented = cast(Any, None) mypy-0.761/test-data/unit/fixtures/isinstancelist.pyi0000644€tŠÔÚ€2›s®0000000317713576752246027225 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Iterable, Iterator, TypeVar, List, Mapping, overload, Tuple, Set, Union, Generic, Sequence ) class object: def __init__(self) -> None: pass def __eq__(self, other: object) -> bool: pass class type: def __init__(self, x) -> None: pass class function: pass class ellipsis: pass class classmethod: pass def isinstance(x: object, t: Union[type, Tuple]) -> bool: pass def issubclass(x: object, t: Union[type, Tuple]) -> bool: pass class int: def __add__(self, x: int) -> int: pass class float: pass class bool(int): pass class str: def __add__(self, x: str) -> str: pass def __getitem__(self, x: int) -> str: pass T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') class tuple(Generic[T]): def __len__(self) -> int: pass class list(Sequence[T]): def __iter__(self) -> Iterator[T]: pass def __mul__(self, x: int) -> list[T]: pass def __setitem__(self, x: int, v: T) -> None: pass def __getitem__(self, x: int) -> T: pass def __add__(self, x: List[T]) -> T: pass def __contains__(self, item: object) -> bool: pass class dict(Mapping[KT, VT]): @overload def __init__(self, **kwargs: VT) -> None: pass @overload def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass def __setitem__(self, k: KT, v: VT) -> None: pass def __iter__(self) -> Iterator[KT]: pass def update(self, a: Mapping[KT, VT]) -> None: pass class set(Generic[T]): def __iter__(self) -> Iterator[T]: pass def add(self, x: T) -> None: pass def discard(self, x: T) -> None: pass def update(self, x: Set[T]) -> None: pass def pop(self) -> T: pass mypy-0.761/test-data/unit/fixtures/list.pyi0000644€tŠÔÚ€2›s®0000000164413576752246025141 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in list-related test cases. from typing import TypeVar, Generic, Iterable, Iterator, Sequence, overload T = TypeVar('T') class object: def __init__(self) -> None: pass class type: pass class ellipsis: pass class list(Sequence[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: Iterable[T]) -> None: pass def __iter__(self) -> Iterator[T]: pass def __contains__(self, item: object) -> bool: pass def __add__(self, x: list[T]) -> list[T]: pass def __mul__(self, x: int) -> list[T]: pass def __getitem__(self, x: int) -> T: pass def __setitem__(self, x: int, v: T) -> None: pass def append(self, x: T) -> None: pass def extend(self, x: Iterable[T]) -> None: pass class tuple(Generic[T]): pass class function: pass class int: pass class float: pass class str: pass class bool(int): pass property = object() # Dummy definition. mypy-0.761/test-data/unit/fixtures/module.pyi0000644€tŠÔÚ€2›s®0000000066213576752246025452 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Generic, TypeVar, Sequence from types import ModuleType T = TypeVar('T') S = TypeVar('S') class list(Generic[T], Sequence[T]): pass class object: def __init__(self) -> None: pass class type: pass class function: pass class int: pass class str: pass class bool: pass class tuple(Generic[T]): pass class dict(Generic[T, S]): pass class ellipsis: pass classmethod = object() staticmethod = object() mypy-0.761/test-data/unit/fixtures/module_all.pyi0000644€tŠÔÚ€2›s®0000000071613576752246026302 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, Sequence, TypeVar from types import ModuleType _T = TypeVar('_T') class object: def __init__(self) -> None: pass class type: pass class function: pass class int: pass class str: pass class bool: pass class list(Generic[_T], Sequence[_T]): def append(self, x: _T): pass def extend(self, x: Sequence[_T]): pass def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass class tuple(Generic[_T]): pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/module_all_python2.pyi0000644€tŠÔÚ€2›s®0000000063613576752246027766 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, Sequence, TypeVar _T = TypeVar('_T') class object: def __init__(self) -> None: pass class type: pass class function: pass class int: pass class str: pass class unicode: pass class list(Generic[_T], Sequence[_T]): def append(self, x: _T): pass def extend(self, x: Sequence[_T]): pass def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass class tuple(Generic[_T]): pass mypy-0.761/test-data/unit/fixtures/notimplemented.pyi0000644€tŠÔÚ€2›s®0000000040113576752246027200 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub used in NotImplemented related cases. from typing import Any, cast class object: def __init__(self) -> None: pass class type: pass class function: pass class bool: pass class int: pass class str: pass NotImplemented = cast(Any, None) mypy-0.761/test-data/unit/fixtures/object_with_init_subclass.pyi0000644€tŠÔÚ€2›s®0000000416413576752246031411 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Sequence, Iterator, TypeVar, Mapping, Iterable, Optional, Union, overload, Tuple, Generic, List class object: def __init__(self) -> None: ... def __init_subclass__(cls) -> None: ... T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') # copy pased from primitives.pyi class type: def __init__(self, x) -> None: pass class int: # Note: this is a simplification of the actual signature def __init__(self, x: object = ..., base: int = ...) -> None: pass def __add__(self, i: int) -> int: pass class float: def __float__(self) -> float: pass class complex: pass class bool(int): pass class str(Sequence[str]): def __add__(self, s: str) -> str: pass def __iter__(self) -> Iterator[str]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> str: pass def format(self, *args) -> str: pass class bytes(Sequence[int]): def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> int: pass class bytearray: pass class tuple(Generic[T]): pass class function: pass class ellipsis: pass # copy-pasted from list.pyi class list(Sequence[T]): def __iter__(self) -> Iterator[T]: pass def __mul__(self, x: int) -> list[T]: pass def __setitem__(self, x: int, v: T) -> None: pass def __getitem__(self, x: int) -> T: pass def __add__(self, x: List[T]) -> T: pass def __contains__(self, item: object) -> bool: pass # copy-pasted from dict.pyi class dict(Mapping[KT, VT]): @overload def __init__(self, **kwargs: VT) -> None: pass @overload def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass def __getitem__(self, key: KT) -> VT: pass def __setitem__(self, k: KT, v: VT) -> None: pass def __iter__(self) -> Iterator[KT]: pass def __contains__(self, item: object) -> int: pass def update(self, a: Mapping[KT, VT]) -> None: pass @overload def get(self, k: KT) -> Optional[VT]: pass @overload def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass def __len__(self) -> int: ... mypy-0.761/test-data/unit/fixtures/ops.pyi0000644€tŠÔÚ€2›s®0000000454113576752246024766 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, Any, Generic, Sequence, Tuple, TypeVar, Optional Tco = TypeVar('Tco', covariant=True) # This is an extension of transform builtins with additional operations. class object: def __init__(self) -> None: pass def __eq__(self, o: 'object') -> 'bool': pass def __ne__(self, o: 'object') -> 'bool': pass class type: pass class slice: pass class tuple(Sequence[Tco], Generic[Tco]): def __getitem__(self, x: int) -> Tco: pass def __eq__(self, x: object) -> bool: pass def __ne__(self, x: object) -> bool: pass def __lt__(self, x: 'tuple') -> bool: pass def __le__(self, x: 'tuple') -> bool: pass def __gt__(self, x: 'tuple') -> bool: pass def __ge__(self, x: 'tuple') -> bool: pass class function: pass class bool: pass class str: def __init__(self, x: 'int') -> None: pass def __add__(self, x: 'str') -> 'str': pass def __eq__(self, x: object) -> bool: pass def startswith(self, x: 'str') -> bool: pass def strip(self) -> 'str': pass class unicode: pass class int: def __add__(self, x: 'int') -> 'int': pass def __radd__(self, x: 'int') -> 'int': pass def __sub__(self, x: 'int') -> 'int': pass def __mul__(self, x: 'int') -> 'int': pass def __div__(self, x: 'int') -> 'int': pass def __rdiv__(self, x: 'int') -> 'int': pass def __truediv__(self, x: 'int') -> 'int': pass def __rtruediv__(self, x: 'int') -> 'int': pass def __mod__(self, x: 'int') -> 'int': pass def __floordiv__(self, x: 'int') -> 'int': pass def __pow__(self, x: 'int', __modulo: Optional[int] = ...) -> Any: pass def __pos__(self) -> 'int': pass def __neg__(self) -> 'int': pass def __eq__(self, x: object) -> bool: pass def __ne__(self, x: object) -> bool: pass def __lt__(self, x: 'int') -> bool: pass def __le__(self, x: 'int') -> bool: pass def __gt__(self, x: 'int') -> bool: pass def __ge__(self, x: 'int') -> bool: pass class float: def __add__(self, x: 'float') -> 'float': pass def __radd__(self, x: 'float') -> 'float': pass def __div__(self, x: 'float') -> 'float': pass def __rdiv__(self, x: 'float') -> 'float': pass def __truediv__(self, x: 'float') -> 'float': pass def __rtruediv__(self, x: 'float') -> 'float': pass class BaseException: pass def __print(a1=None, a2=None, a3=None, a4=None): pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/primitives.pyi0000644€tŠÔÚ€2›s®0000000355413576752246026363 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub with non-generic primitive types from typing import Generic, TypeVar, Sequence, Iterator, Mapping T = TypeVar('T') V = TypeVar('V') class object: def __init__(self) -> None: pass def __str__(self) -> str: pass def __eq__(self, other: object) -> bool: pass def __ne__(self, other: object) -> bool: pass class type: def __init__(self, x) -> None: pass class int: # Note: this is a simplification of the actual signature def __init__(self, x: object = ..., base: int = ...) -> None: pass def __add__(self, i: int) -> int: pass class float: def __float__(self) -> float: pass class complex: pass class bool(int): pass class str(Sequence[str]): def __add__(self, s: str) -> str: pass def __iter__(self) -> Iterator[str]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> str: pass def format(self, *args, **kwargs) -> str: pass class bytes(Sequence[int]): def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> int: pass class bytearray(Sequence[int]): def __init__(self, x: bytes) -> None: pass def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> int: pass class memoryview(Sequence[int]): def __init__(self, x: bytes) -> None: pass def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> int: pass class tuple(Generic[T]): pass class list(Sequence[T]): def __iter__(self) -> Iterator[T]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> T: pass class dict(Mapping[T, V]): def __iter__(self) -> Iterator[T]: pass class function: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/property.pyi0000644€tŠÔÚ€2›s®0000000053513576752246026050 0ustar jukkaDROPBOX\Domain Users00000000000000import typing _T = typing.TypeVar('_T') class object: def __init__(self) -> None: pass class type: def __init__(self, x: typing.Any) -> None: pass class function: pass property = object() # Dummy definition class int: pass class str: pass class bytes: pass class bool: pass class ellipsis: pass class tuple(typing.Generic[_T]): pass mypy-0.761/test-data/unit/fixtures/property_py2.pyi0000644€tŠÔÚ€2›s®0000000053713576752246026644 0ustar jukkaDROPBOX\Domain Users00000000000000import typing _T = typing.TypeVar('_T') class object: def __init__(self) -> None: pass class type: def __init__(self, x: typing.Any) -> None: pass class function: pass property = object() # Dummy definition class int: pass class str: pass class unicode: pass class bool: pass class ellipsis: pass class tuple(typing.Generic[_T]): pass mypy-0.761/test-data/unit/fixtures/python2.pyi0000644€tŠÔÚ€2›s®0000000174213576752246025570 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, Iterable, TypeVar, Sequence, Iterator class object: def __init__(self) -> None: pass def __eq__(self, other: object) -> bool: pass def __ne__(self, other: object) -> bool: pass class type: def __init__(self, x) -> None: pass class function: pass class int: pass class str: def format(self, *args, **kwars) -> str: ... class unicode: def format(self, *args, **kwars) -> unicode: ... class bool(int): pass T = TypeVar('T') S = TypeVar('S') class list(Iterable[T], Generic[T]): def __iter__(self) -> Iterator[T]: pass def __getitem__(self, item: int) -> T: pass class tuple(Iterable[T]): def __iter__(self) -> Iterator[T]: pass class dict(Generic[T, S]): pass class bytearray(Sequence[int]): def __init__(self, string: str) -> None: pass def __contains__(self, item: object) -> bool: pass def __iter__(self) -> Iterator[int]: pass def __getitem__(self, item: int) -> int: pass # Definition of None is implicit mypy-0.761/test-data/unit/fixtures/set.pyi0000644€tŠÔÚ€2›s®0000000110613576752246024752 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in set-related test cases. from typing import TypeVar, Generic, Iterator, Iterable, Set T = TypeVar('T') class object: def __init__(self) -> None: pass class type: pass class tuple(Generic[T]): pass class function: pass class int: pass class str: pass class bool: pass class ellipsis: pass class set(Iterable[T], Generic[T]): def __iter__(self) -> Iterator[T]: pass def __contains__(self, item: object) -> bool: pass def add(self, x: T) -> None: pass def discard(self, x: T) -> None: pass def update(self, x: Set[T]) -> None: pass mypy-0.761/test-data/unit/fixtures/slice.pyi0000644€tŠÔÚ€2›s®0000000043313576752246025260 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in slicing test cases. from typing import Generic, TypeVar T = TypeVar('T') class object: def __init__(self): pass class type: pass class tuple(Generic[T]): pass class function: pass class int: pass class str: pass class slice: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/staticmethod.pyi0000644€tŠÔÚ€2›s®0000000054213576752246026652 0ustar jukkaDROPBOX\Domain Users00000000000000import typing class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class function: pass staticmethod = object() # Dummy definition. class int: @staticmethod def from_bytes(bytes: bytes, byteorder: str) -> int: pass class str: pass class unicode: pass class bytes: pass class ellipsis: pass mypy-0.761/test-data/unit/fixtures/transform.pyi0000644€tŠÔÚ€2›s®0000000164713576752246026204 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stubs used implicitly in program transformation test cases. class object: def __init__(self) -> None: pass class type: pass # str is handy for debugging; allows outputting messages. class str: pass # Primitive types int/float have special coercion behaviour (they may have # a different representation from ordinary values). class int: pass class float: pass # The functions below are special functions used in test cases; their # implementations are actually in the __dynchk module, but they are defined # here so that the semantic analyzer and the type checker are happy without # having to analyze the entire __dynchk module all the time. # # The transformation implementation has special case handling for these # functions; it's a bit ugly but it works for now. def __print(a1=None, a2=None, a3=None, a4=None): # Do not use *args since this would require list and break many test # cases. pass mypy-0.761/test-data/unit/fixtures/tuple-simple.pyi0000644€tŠÔÚ€2›s®0000000072413576752246026604 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in some tuple-related test cases. # # This is a simpler version of tuple.py which is useful # and makes some test cases easier to write/debug. from typing import Iterable, TypeVar, Generic T = TypeVar('T') class object: def __init__(self): pass class type: pass class tuple(Generic[T]): def __getitem__(self, x: int) -> T: pass class function: pass # We need int for indexing tuples. class int: pass class str: pass # For convenience mypy-0.761/test-data/unit/fixtures/tuple.pyi0000644€tŠÔÚ€2›s®0000000226613576752246025320 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in tuple-related test cases. from typing import Iterable, Iterator, TypeVar, Generic, Sequence, Any, overload, Tuple Tco = TypeVar('Tco', covariant=True) class object: def __init__(self): pass class type: def __init__(self, *a) -> None: pass def __call__(self, *a) -> object: pass class tuple(Sequence[Tco], Generic[Tco]): def __iter__(self) -> Iterator[Tco]: pass def __contains__(self, item: object) -> bool: pass def __getitem__(self, x: int) -> Tco: pass def __rmul__(self, n: int) -> tuple: pass def __add__(self, x: Tuple[Tco, ...]) -> Tuple[Tco, ...]: pass def count(self, obj: Any) -> int: pass class function: pass class ellipsis: pass # We need int and slice for indexing tuples. class int: def __neg__(self) -> 'int': pass class slice: pass class bool: pass class str: pass # For convenience class unicode: pass T = TypeVar('T') class list(Sequence[T], Generic[T]): @overload def __getitem__(self, i: int) -> T: ... @overload def __getitem__(self, s: slice) -> list[T]: ... def isinstance(x: object, t: type) -> bool: pass def sum(iterable: Iterable[T], start: T = None) -> T: pass class BaseException: pass mypy-0.761/test-data/unit/fixtures/type.pyi0000644€tŠÔÚ€2›s®0000000064513576752246025147 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub used in type-related test cases. from typing import Generic, TypeVar, List T = TypeVar('T') class object: def __init__(self) -> None: pass def __str__(self) -> 'str': pass class list(Generic[T]): pass class type: __name__: str def mro(self) -> List['type']: pass class tuple(Generic[T]): pass class function: pass class bool: pass class int: pass class str: pass class unicode: pass mypy-0.761/test-data/unit/fixtures/typing-full.pyi0000644€tŠÔÚ€2›s®0000001132613576752246026436 0ustar jukkaDROPBOX\Domain Users00000000000000# More complete stub for typing module. # # Use [typing fixtures/typing-full.pyi] to use this instead of lib-stub/typing.pyi # in a particular test case. # # Many of the definitions have special handling in the type checker, so they # can just be initialized to anything. from abc import abstractmethod, ABCMeta class GenericMeta(type): pass cast = 0 overload = 0 Any = 0 Union = 0 Optional = 0 TypeVar = 0 Generic = 0 Protocol = 0 Tuple = 0 Callable = 0 _promote = 0 NamedTuple = 0 Type = 0 no_type_check = 0 ClassVar = 0 Final = 0 Literal = 0 TypedDict = 0 NoReturn = 0 NewType = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) U = TypeVar('U') V = TypeVar('V') S = TypeVar('S') # Note: definitions below are different from typeshed, variances are declared # to silence the protocol variance checks. Maybe it is better to use type: ignore? @runtime_checkable class Container(Protocol[T_co]): @abstractmethod # Use int because bool isn't in the default test builtins def __contains__(self, arg: object) -> int: pass @runtime_checkable class Sized(Protocol): @abstractmethod def __len__(self) -> int: pass @runtime_checkable class Iterable(Protocol[T_co]): @abstractmethod def __iter__(self) -> 'Iterator[T_co]': pass @runtime_checkable class Iterator(Iterable[T_co], Protocol): @abstractmethod def __next__(self) -> T_co: pass class Generator(Iterator[T], Generic[T, U, V]): @abstractmethod def send(self, value: U) -> T: pass @abstractmethod def throw(self, typ: Any, val: Any=None, tb: Any=None) -> None: pass @abstractmethod def close(self) -> None: pass @abstractmethod def __iter__(self) -> 'Generator[T, U, V]': pass class AsyncGenerator(AsyncIterator[T], Generic[T, U]): @abstractmethod def __anext__(self) -> Awaitable[T]: pass @abstractmethod def asend(self, value: U) -> Awaitable[T]: pass @abstractmethod def athrow(self, typ: Any, val: Any=None, tb: Any=None) -> Awaitable[T]: pass @abstractmethod def aclose(self) -> Awaitable[T]: pass @abstractmethod def __aiter__(self) -> 'AsyncGenerator[T, U]': pass @runtime_checkable class Awaitable(Protocol[T]): @abstractmethod def __await__(self) -> Generator[Any, Any, T]: pass class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S], metaclass=ABCMeta): pass class Coroutine(Awaitable[V], Generic[T, U, V]): @abstractmethod def send(self, value: U) -> T: pass @abstractmethod def throw(self, typ: Any, val: Any=None, tb: Any=None) -> None: pass @abstractmethod def close(self) -> None: pass @runtime_checkable class AsyncIterable(Protocol[T]): @abstractmethod def __aiter__(self) -> 'AsyncIterator[T]': pass @runtime_checkable class AsyncIterator(AsyncIterable[T], Protocol): def __aiter__(self) -> 'AsyncIterator[T]': return self @abstractmethod def __anext__(self) -> Awaitable[T]: pass class Sequence(Iterable[T_co], Container[T_co]): @abstractmethod def __getitem__(self, n: Any) -> T_co: pass class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta): def __getitem__(self, key: T) -> T_co: pass @overload def get(self, k: T) -> Optional[T_co]: pass @overload def get(self, k: T, default: Union[T_co, V]) -> Union[T_co, V]: pass def values(self) -> Iterable[T_co]: pass # Approximate return type def __len__(self) -> int: ... def __contains__(self, arg: object) -> int: pass class MutableMapping(Mapping[T, U], metaclass=ABCMeta): def __setitem__(self, k: T, v: U) -> None: pass class SupportsInt(Protocol): def __int__(self) -> int: pass class SupportsAbs(Protocol[T_co]): def __abs__(self) -> T_co: pass def runtime_checkable(cls: T) -> T: return cls class ContextManager(Generic[T]): def __enter__(self) -> T: pass # Use Any because not all the precise types are in the fixtures. def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass TYPE_CHECKING = 1 # Fallback type for all typed dicts (does not exist at runtime). class _TypedDict(Mapping[str, object]): # Needed to make this class non-abstract. It is explicitly declared abstract in # typeshed, but we don't want to import abc here, as it would slow down the tests. def __iter__(self) -> Iterator[str]: ... def copy(self: T) -> T: ... # Using NoReturn so that only calls using the plugin hook can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... # Mypy expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: T = ...) -> object: ... def update(self: T, __m: T) -> None: ... def __delitem__(self, k: NoReturn) -> None: ... mypy-0.761/test-data/unit/fixtures/union.pyi0000644€tŠÔÚ€2›s®0000000053413576752246025313 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in tuple-related test cases. from isinstance import isinstance from typing import Iterable, TypeVar, Generic T = TypeVar('T') class object: def __init__(self): pass class type: pass class function: pass class tuple(Generic[T]): pass # We need int for indexing tuples. class int: pass class str: pass # For convenience mypy-0.761/test-data/unit/hacks.txt0000644€tŠÔÚ€2›s®0000000664213576752246023427 0ustar jukkaDROPBOX\Domain Users00000000000000Weird legacy stuff in test cases ================================ Due to historical reasons, test cases contain things that may appear baffling without extra context. This file attempts to describe most of them. Strict optional is disabled be default -------------------------------------- Strict optional checking is enabled in mypy by default, but test cases must enable it explicitly, either through `# flags: --strict-optional` or by including `optional` as a substring in your test file name. The reason for this is that many test cases written before strict optional was implemented use the idiom `x = None # type: t`, and updating all of these test cases would take a lot of work. Dummy if statements to prevent redefinition ------------------------------------------- Many test cases use if statements to prevent an assignment from creating a new variable. This in anticipation of allowing assignments to redefine variables by default. Conditional assignments will continue to refine a previously defined variable instead of defining a new one. When the test cases were written, we didn't anticipate that variables could be allowed to be redefined, and adding if statements was the easiest way to migrate these tests. Example: ``` x = 0 if int(): x = '' # Always generates an error since this is not a redefinition y = 0 y = '' # This could be valid if a new 'y' is defined here ``` Note that some of the checks may turn out to be redundant, as the exact rules for what constitues a redefinition are still up for debate. This is okay since the extra if statements generally don't otherwise affect semantics. There are a few ways this is used, depending on the context: * `if int():` is the most common one. Assignments in the if body won't redefine variables defined before the if statement. * `if 1:` is used if the body of the if statement returns a value, and mypy would complain about a missing return statement otherwise. This works since `if 1:` is treated as an always taken condition, whereas `if int():` is not recognized as such. * `if str():` is used if the builtins fixture doesn't define `int` for some reason. Function definition to prevent redefinition ------------------------------------------- Sometimes test cases assume that a variable is not redefined, and we insert a dummy function definition to prevent this, since variables won't be able to be redefined across a function definition. Example: ``` x = 0 def f(): pass x = '' # Does not redefine x because of the definition of f() above ``` Dummy variable reference to allow redefinition ---------------------------------------------- The plan is to only allow a variable to be redefined if the value has been accessed. This wouldn't count as redefinition, since `x` is never read: ``` x = 0 x = '' # Not a redefinition ``` Sometimes we add a dummy variable access to allow redefinition in the future, or to trigger the redefinition machinery even if redefinition should not be okay: ``` x = 0 x x = '' # Could be a redefinition ``` The reason for this special case is type comments with dummy initializers, where the second assignment should never be treated as a redefinition: ``` x = None # type: int x = '' # Should not redefine x, since it has only been declared ``` Similarly, if there is only a variable annotation, the first assignment won't redefine the variable, as this would override the declared type: ``` x: int x = '' # Should not redefine x ``` mypy-0.761/test-data/unit/lib-stub/0000755€tŠÔÚ€2›s®0000000000013576752267023311 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/unit/lib-stub/__builtin__.pyi0000644€tŠÔÚ€2›s®0000000074213576752246026276 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, TypeVar _T = TypeVar('_T') Any = 0 class object: def __init__(self): # type: () -> None pass class type: def __init__(self, x): # type: (Any) -> None pass # These are provided here for convenience. class int: pass class float: pass class str: pass class unicode: pass class tuple(Generic[_T]): pass class function: pass class ellipsis: pass def print(*args, end=''): pass # Definition of None is implicit mypy-0.761/test-data/unit/lib-stub/abc.pyi0000644€tŠÔÚ€2›s®0000000032313576752246024554 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Type, Any, TypeVar T = TypeVar('T', bound=Type[Any]) class ABC(type): pass class ABCMeta(type): def register(cls, tp: T) -> T: pass abstractmethod = object() abstractproperty = object() mypy-0.761/test-data/unit/lib-stub/attr.pyi0000644€tŠÔÚ€2›s®0000001044713576752246025011 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, overload, Callable, Any, Type, Optional, Union, Sequence, Mapping _T = TypeVar('_T') _C = TypeVar('_C', bound=type) _ValidatorType = Callable[[Any, Any, _T], Any] _ConverterType = Callable[[Any], _T] _FilterType = Callable[[Any, Any], bool] _ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] # This form catches explicit None or no default but with no other arguments returns Any. @overload def attrib(default: None = ..., validator: None = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., convert: None = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: None = ..., converter: None = ..., factory: None = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., ) -> Any: ... # This form catches an explicit None or no default and infers the type from the other arguments. @overload def attrib(default: None = ..., validator: Optional[_ValidatorArgType[_T]] = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., convert: Optional[_ConverterType[_T]] = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: Optional[Type[_T]] = ..., converter: Optional[_ConverterType[_T]] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., ) -> _T: ... # This form catches an explicit default argument. @overload def attrib(default: _T, validator: Optional[_ValidatorArgType[_T]] = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., convert: Optional[_ConverterType[_T]] = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: Optional[Type[_T]] = ..., converter: Optional[_ConverterType[_T]] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., ) -> _T: ... # This form covers type=non-Type: e.g. forward references (str), Any @overload def attrib(default: Optional[_T] = ..., validator: Optional[_ValidatorArgType[_T]] = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., convert: Optional[_ConverterType[_T]] = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: object = ..., converter: Optional[_ConverterType[_T]] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., ) -> Any: ... @overload def attrs(maybe_cls: _C, these: Optional[Mapping[str, Any]] = ..., repr_ns: Optional[str] = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., ) -> _C: ... @overload def attrs(maybe_cls: None = ..., these: Optional[Mapping[str, Any]] = ..., repr_ns: Optional[str] = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., ) -> Callable[[_C], _C]: ... # aliases s = attributes = attrs ib = attr = attrib dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) mypy-0.761/test-data/unit/lib-stub/blocker.pyi0000644€tŠÔÚ€2›s®0000000006613576752246025454 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub file that generates a blocking parse error x y mypy-0.761/test-data/unit/lib-stub/blocker2.pyi0000644€tŠÔÚ€2›s®0000000010713576752246025532 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub file that generates a blocking semantic analysis error continue mypy-0.761/test-data/unit/lib-stub/broken.pyi0000644€tŠÔÚ€2›s®0000000005213576752246025306 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub file that generates an error x = y mypy-0.761/test-data/unit/lib-stub/builtins.pyi0000644€tŠÔÚ€2›s®0000000101513576752246025657 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, TypeVar _T = TypeVar('_T') class object: def __init__(self) -> None: pass class type: def __init__(self, x: object) -> None: pass # These are provided here for convenience. class int: def __add__(self, other: int) -> int: pass def __rmul__(self, other: int) -> int: pass class float: pass class str: def __add__(self, other: 'str') -> 'str': pass class bytes: pass class tuple(Generic[_T]): pass class function: pass class ellipsis: pass # Definition of None is implicit mypy-0.761/test-data/unit/lib-stub/collections.pyi0000644€tŠÔÚ€2›s®0000000067313576752246026355 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterable, Union, Optional, Dict, TypeVar def namedtuple( typename: str, field_names: Union[str, Iterable[str]], *, # really bool but many tests don't have bool available rename: int = ..., module: Optional[str] = ..., defaults: Optional[Iterable[Any]] = ... ) -> Any: ... K = TypeVar('K') V = TypeVar('V') class OrderedDict(Dict[K, V]): def __setitem__(self, k: K, v: V) -> None: ... mypy-0.761/test-data/unit/lib-stub/contextlib.pyi0000644€tŠÔÚ€2›s®0000000117613576752246026211 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Generic, TypeVar, Callable, Iterator from typing import ContextManager as ContextManager _T = TypeVar('_T') class GeneratorContextManager(ContextManager[_T], Generic[_T]): def __call__(self, func: Callable[..., _T]) -> Callable[..., _T]: ... def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., GeneratorContextManager[_T]]: ... if sys.version_info >= (3, 7): from typing import AsyncIterator from typing import AsyncContextManager as AsyncContextManager def asynccontextmanager(func: Callable[..., AsyncIterator[_T]]) -> Callable[..., AsyncContextManager[_T]]: ... mypy-0.761/test-data/unit/lib-stub/dataclasses.pyi0000644€tŠÔÚ€2›s®0000000167613576752246026332 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generic, Mapping, Optional, TypeVar, overload, Type _T = TypeVar('_T') class InitVar(Generic[_T]): ... @overload def dataclass(_cls: Type[_T]) -> Type[_T]: ... @overload def dataclass(*, init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., unsafe_hash: bool = ..., frozen: bool = ...) -> Callable[[Type[_T]], Type[_T]]: ... @overload def field(*, default: _T, init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> _T: ... @overload def field(*, default_factory: Callable[[], _T], init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> _T: ... @overload def field(*, init: bool = ..., repr: bool = ..., hash: Optional[bool] = ..., compare: bool = ..., metadata: Optional[Mapping[str, Any]] = ...) -> Any: ... mypy-0.761/test-data/unit/lib-stub/enum.pyi0000644€tŠÔÚ€2›s®0000000203113576752246024771 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, TypeVar, Union, Type, Sized, Iterator _T = TypeVar('_T') class EnumMeta(type, Sized): def __len__(self) -> int: pass # to make it non-abstract def __iter__(self: Type[_T]) -> Iterator[_T]: pass def __reversed__(self: Type[_T]) -> Iterator[_T]: pass def __getitem__(self: Type[_T], name: str) -> _T: pass class Enum(metaclass=EnumMeta): def __new__(cls: Type[_T], value: object) -> _T: pass def __repr__(self) -> str: pass def __str__(self) -> str: pass def __format__(self, format_spec: str) -> str: pass def __hash__(self) -> Any: pass def __reduce_ex__(self, proto: Any) -> Any: pass name: str value: Any _name_: str _value_: Any class IntEnum(int, Enum): value: int def unique(enumeration: _T) -> _T: pass # In reality Flag and IntFlag are 3.6 only class Flag(Enum): def __or__(self: _T, other: Union[int, _T]) -> _T: pass class IntFlag(int, Flag): def __and__(self: _T, other: Union[int, _T]) -> _T: pass class auto(IntFlag): value: Anymypy-0.761/test-data/unit/lib-stub/future/0000755€tŠÔÚ€2›s®0000000000013576752267024623 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/unit/lib-stub/future/__init__.pyi0000644€tŠÔÚ€2›s®0000000006713576752246027105 0ustar jukkaDROPBOX\Domain Users00000000000000from __future__ import absolute_import, print_function mypy-0.761/test-data/unit/lib-stub/future/utils.pyi0000644€tŠÔÚ€2›s®0000000013113576752246026476 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Type def with_metaclass(meta: Type[type], *bases: type) -> type: pass mypy-0.761/test-data/unit/lib-stub/mypy_extensions.pyi0000644€tŠÔÚ€2›s®0000000356613576752246027320 0ustar jukkaDROPBOX\Domain Users00000000000000# NOTE: Requires fixtures/dict.pyi from typing import ( Any, Dict, Type, TypeVar, Optional, Any, Generic, Mapping, NoReturn as NoReturn, Iterator ) import sys _T = TypeVar('_T') _U = TypeVar('_U') def Arg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def DefaultArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def NamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def DefaultNamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def VarArg(type: _T = ...) -> _T: ... def KwArg(type: _T = ...) -> _T: ... # Fallback type for all typed dicts (does not exist at runtime). class _TypedDict(Mapping[str, object]): # Needed to make this class non-abstract. It is explicitly declared abstract in # typeshed, but we don't want to import abc here, as it would slow down the tests. def __iter__(self) -> Iterator[str]: ... def copy(self: _T) -> _T: ... # Using NoReturn so that only calls using the plugin hook can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... # Mypy expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... def update(self: _T, __m: _T) -> None: ... if sys.version_info < (3, 0): def has_key(self, k: str) -> bool: ... def __delitem__(self, k: NoReturn) -> None: ... def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ... # This is intended as a class decorator, but mypy rejects abstract classes # when a Type[_T] is expected, so we can't give it the type we want. def trait(cls: Any) -> Any: ... # The real type is in the comment but it isn't safe to use **kwargs in # a lib-stub because the fixtures might not have dict. Argh! # def mypyc_attr(*attrs: str, **kwattrs: object) -> Callable[[_T], _T]: ... mypyc_attr: Any class FlexibleAlias(Generic[_T, _U]): ... mypy-0.761/test-data/unit/lib-stub/six.pyi0000644€tŠÔÚ€2›s®0000000024613576752246024636 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Type, Callable def with_metaclass(mcls: Type[type], *args: type) -> type: pass def add_metaclass(mcls: Type[type]) -> Callable[[type], type]: pass mypy-0.761/test-data/unit/lib-stub/sys.pyi0000644€tŠÔÚ€2›s®0000000005613576752246024650 0ustar jukkaDROPBOX\Domain Users00000000000000version_info = (0, 0, 0, '', 0) platform = '' mypy-0.761/test-data/unit/lib-stub/types.pyi0000644€tŠÔÚ€2›s®0000000022713576752246025176 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar _T = TypeVar('_T') def coroutine(func: _T) -> _T: pass class bool: ... class ModuleType: __file__ = ... # type: str mypy-0.761/test-data/unit/lib-stub/typing.pyi0000644€tŠÔÚ€2›s®0000000204513576752246025344 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub for typing module. Many of the definitions have special handling in # the type checker, so they can just be initialized to anything. cast = 0 overload = 0 Any = 0 Union = 0 Optional = 0 TypeVar = 0 Generic = 0 Protocol = 0 Tuple = 0 Callable = 0 NamedTuple = 0 Type = 0 ClassVar = 0 Final = 0 NoReturn = 0 NewType = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) U = TypeVar('U') V = TypeVar('V') class Iterable(Protocol[T_co]): def __iter__(self) -> Iterator[T_co]: pass class Iterator(Iterable[T_co], Protocol): def __next__(self) -> T_co: pass class Generator(Iterator[T], Generic[T, U, V]): def __iter__(self) -> Generator[T, U, V]: pass class Sequence(Iterable[T_co]): def __getitem__(self, n: Any) -> T_co: pass class Mapping(Generic[T, T_co]): def __getitem__(self, key: T) -> T_co: pass class SupportsInt(Protocol): def __int__(self) -> int: pass class SupportsFloat(Protocol): def __float__(self) -> float: pass # This is an unofficial extension. def final(meth: T) -> T: pass TYPE_CHECKING = 1 mypy-0.761/test-data/unit/lib-stub/typing_extensions.pyi0000644€tŠÔÚ€2›s®0000000257013576752246027626 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Any, Mapping, Iterator, NoReturn, Dict, Type from typing import TYPE_CHECKING as TYPE_CHECKING from typing import NewType as NewType import sys _T = TypeVar('_T') class _SpecialForm: def __getitem__(self, typeargs: Any) -> Any: pass Protocol: _SpecialForm = ... def runtime_checkable(x: _T) -> _T: pass runtime = runtime_checkable Final: _SpecialForm = ... def final(x: _T) -> _T: pass Literal: _SpecialForm = ... Annotated: _SpecialForm = ... # Fallback type for all typed dicts (does not exist at runtime). class _TypedDict(Mapping[str, object]): # Needed to make this class non-abstract. It is explicitly declared abstract in # typeshed, but we don't want to import abc here, as it would slow down the tests. def __iter__(self) -> Iterator[str]: ... def copy(self: _T) -> _T: ... # Using NoReturn so that only calls using the plugin hook can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... # Mypy expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... def update(self: _T, __m: _T) -> None: ... if sys.version_info < (3, 0): def has_key(self, k: str) -> bool: ... def __delitem__(self, k: NoReturn) -> None: ... def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ... mypy-0.761/test-data/unit/merge.test0000644€tŠÔÚ€2›s®0000007034613576752246023577 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for AST merge (used for fine-grained incremental checking) -- -- Each test case has two versions of the module 'target' (target.py and -- target.py.next). A test cases type checks both of them, merges the ASTs, -- and finally dumps certain parts of the ASTs for both versions (==> -- separates the first and second versions). A test case passes if the -- dumped output is as expected. -- -- The dumped output uses to denote identities of objects. Objects -- suffixed by the same refer to the same object; and (if -- N != M) refer to different objects. The objective of these test cases -- is to verify that identities of publicly visible AST nodes is -- preserved across merge. Other AST nodes may get new identities. -- -- Each test case dumps one of four kinds of information: -- -- 1) ASTs (test case has no magic suffix) -- 2) Symbol tables (_symtable test case name suffix) -- 3) TypeInfos (_typeinfo suffix) -- 4) Inferred types (_types suffix) -- -- If you need to dump multiple different kinds of information, write -- multiple test cases. [case testFunction] import target [file target.py] def f() -> int: pass [file target.py.next] def f() -> int: pass [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py FuncDef:1<2>( f def () -> builtins.int<3> Block:1<4>( PassStmt:2<5>()))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py FuncDef:1<2>( f def () -> builtins.int<3> Block:1<6>( PassStmt:2<7>()))) [case testClass] import target [file target.py] class A: def f(self, x: str) -> int: pass [file target.py.next] class A: def f(self, x: int) -> str: pass [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A FuncDef:2<3>( f Args( Var(self) Var(x)) def (self: target.A<4>, x: builtins.str<5>) -> builtins.int<6> Block:2<7>( PassStmt:3<8>())))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<9>( A FuncDef:2<3>( f Args( Var(self) Var(x)) def (self: target.A<4>, x: builtins.int<6>) -> builtins.str<5> Block:2<10>( PassStmt:3<11>())))) [case testClass_typeinfo] import target [file target.py] class A: def f(self, x: str) -> int: pass def g(self, x: str) -> int: pass [file target.py.next] class A: def f(self, x: int) -> str: pass def h(self, x: int) -> str: pass [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( f<2> g<3>)) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( f<2> h<4>)) [case testConstructInstance] import target [file target.py] class A: def f(self) -> B: return B() class B: pass [file target.py.next] class B: pass class A: def f(self) -> B: 1 return B() [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) -> target.B<5> Block:2<6>( ReturnStmt:3<7>( CallExpr:3<8>( NameExpr(B [target.B<5>]) Args()))))) ClassDef:4<9>( B PassStmt:4<10>())) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<11>( B PassStmt:1<12>()) ClassDef:2<13>( A FuncDef:3<3>( f Args( Var(self)) def (self: target.A<4>) -> target.B<5> Block:3<14>( ExpressionStmt:4<15>( IntExpr(1)) ReturnStmt:5<16>( CallExpr:5<17>( NameExpr(B [target.B<5>]) Args())))))) [case testCallMethod] import target [file target.py] class A: def f(self) -> None: self.f() [file target.py.next] class A: def f(self) -> None: self.f() [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) Block:2<5>( ExpressionStmt:3<6>( CallExpr:3<7>( MemberExpr:3<8>( NameExpr(self [l<9>]) f) Args())))))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<10>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) Block:2<11>( ExpressionStmt:3<12>( CallExpr:3<13>( MemberExpr:3<14>( NameExpr(self [l<15>]) f) Args())))))) [case testClassAttribute] import target [file target.py] class A: def f(self) -> None: self.x = 1 self.x [file target.py.next] class A: def f(self) -> None: self.x = 1 self.x [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) Block:2<5>( AssignmentStmt:3<6>( MemberExpr:3<8>( NameExpr(self [l<9>]) x*<7>) IntExpr(1)) ExpressionStmt:4<10>( MemberExpr:4<11>( NameExpr(self [l<9>]) x)))))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<12>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) Block:2<13>( AssignmentStmt:3<14>( MemberExpr:3<15>( NameExpr(self [l<16>]) x*<7>) IntExpr(1)) ExpressionStmt:4<17>( MemberExpr:4<18>( NameExpr(self [l<16>]) x)))))) [case testClassAttribute_typeinfo] import target [file target.py] class A: def f(self) -> None: self.x = 1 self.x self.y = A() [file target.py.next] class A: def f(self) -> None: self.x = 1 self.x self.y = A() [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( f<2> x<3> (builtins.int<4>) y<5> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( f<2> x<3> (builtins.int<4>) y<5> (target.A<0>))) [case testFunction_symtable] import target [file target.py] def f() -> int: pass [file target.py.next] def f() -> int: pass [out] __main__: target: MypyFile<0> target: f: FuncDef<1> ==> __main__: target: MypyFile<0> target: f: FuncDef<1> [case testClass_symtable] import target [file target.py] class A: pass class B: pass [file target.py.next] class A: pass class C: pass [out] __main__: target: MypyFile<0> target: A: TypeInfo<1> B: TypeInfo<2> ==> __main__: target: MypyFile<0> target: A: TypeInfo<1> C: TypeInfo<3> [case testTopLevelExpression] import target [file target.py] class A: pass A() [file target.py.next] class A: pass class B: pass A() B() [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A PassStmt:1<3>()) ExpressionStmt:2<4>( CallExpr:2<5>( NameExpr(A [target.A<6>]) Args()))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<7>( A PassStmt:1<8>()) ClassDef:2<9>( B PassStmt:2<10>()) ExpressionStmt:3<11>( CallExpr:3<12>( NameExpr(A [target.A<6>]) Args())) ExpressionStmt:4<13>( CallExpr:4<14>( NameExpr(B [target.B<15>]) Args()))) [case testExpression_types] import target [file target.py] class A: pass def f(a: A) -> None: 1 a [file target.py.next] class A: pass def f(a: A) -> None: a 1 [out] ## target IntExpr:3: Literal[1]?<0> NameExpr:4: target.A<1> ==> ## target NameExpr:3: target.A<1> IntExpr:4: Literal[1]?<0> [case testClassAttribute_types] import target [file target.py] class A: def f(self) -> None: self.x = A() self.x self.y = 1 self.y [file target.py.next] class A: def f(self) -> None: self.y = 1 self.y self.x = A() self.x [out] ## target CallExpr:3: target.A<0> MemberExpr:3: target.A<0> NameExpr:3: def () -> target.A<0> NameExpr:3: target.A<0> MemberExpr:4: target.A<0> NameExpr:4: target.A<0> IntExpr:5: Literal[1]?<1> MemberExpr:5: builtins.int<1> NameExpr:5: target.A<0> MemberExpr:6: builtins.int<1> NameExpr:6: target.A<0> ==> ## target IntExpr:3: Literal[1]?<1> MemberExpr:3: builtins.int<1> NameExpr:3: target.A<0> MemberExpr:4: builtins.int<1> NameExpr:4: target.A<0> CallExpr:5: target.A<0> MemberExpr:5: target.A<0> NameExpr:5: def () -> target.A<0> NameExpr:5: target.A<0> MemberExpr:6: target.A<0> NameExpr:6: target.A<0> [case testMethod_types] import target [file target.py] class A: def f(self) -> A: return self.f() [file target.py.next] class A: # Extra line to change line numbers def f(self) -> A: return self.f() [out] ## target CallExpr:3: target.A<0> MemberExpr:3: def () -> target.A<0> NameExpr:3: target.A<0> ==> ## target CallExpr:4: target.A<0> MemberExpr:4: def () -> target.A<0> NameExpr:4: target.A<0> [case testRenameFunction] import target [file target.py] def f() -> int: pass [file target.py.next] def g() -> int: pass [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py FuncDef:1<2>( f def () -> builtins.int<3> Block:1<4>( PassStmt:1<5>()))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py FuncDef:1<6>( g def () -> builtins.int<3> Block:1<7>( PassStmt:1<8>()))) [case testRenameFunction_symtable] import target [file target.py] def f() -> int: pass [file target.py.next] def g() -> int: pass [out] __main__: target: MypyFile<0> target: f: FuncDef<1> ==> __main__: target: MypyFile<0> target: g: FuncDef<2> [case testMergeWithBaseClass_typeinfo] import target [file target.py] class A: pass class B(A): def f(self) -> None: pass [file target.py.next] class C: pass class A: pass class B(A): def f(self) -> None: pass [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names()) TypeInfo<2>( Name(target.B) Bases(target.A<0>) Mro(target.B<2>, target.A<0>, builtins.object<1>) Names( f<3>)) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names()) TypeInfo<2>( Name(target.B) Bases(target.A<0>) Mro(target.B<2>, target.A<0>, builtins.object<1>) Names( f<3>)) TypeInfo<4>( Name(target.C) Bases(builtins.object<1>) Mro(target.C<4>, builtins.object<1>) Names()) [case testModuleAttribute] import target [file target.py] x = 1 [file target.py.next] x = 2 [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py AssignmentStmt:1<2>( NameExpr(x [target.x<3>]) IntExpr(1) builtins.int<4>)) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py AssignmentStmt:1<5>( NameExpr(x [target.x<3>]) IntExpr(2) builtins.int<4>)) [case testNestedClassMethod_typeinfo] import target [file target.py] class A: class B: def f(self) -> None: pass [file target.py.next] class A: class B: def f(self) -> None: pass [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( B<2>)) TypeInfo<2>( Name(target.A.B) Bases(builtins.object<1>) Mro(target.A.B<2>, builtins.object<1>) Names( f<3>)) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( B<2>)) TypeInfo<2>( Name(target.A.B) Bases(builtins.object<1>) Mro(target.A.B<2>, builtins.object<1>) Names( f<3>)) [case testNamedTuple_typeinfo] import target [file target.py] from typing import NamedTuple class A: pass N = NamedTuple('N', [('x', A)]) [file target.py.next] from typing import NamedTuple class A: pass N = NamedTuple('N', [('x', A), ('y', A)]) [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names()) TypeInfo<2>( Name(target.N) Bases(builtins.tuple[target.A<0>]<3>) Mro(target.N<2>, builtins.tuple<3>, builtins.object<1>) Names( _NT<4> __annotations__<5> (builtins.object<1>) __doc__<6> (builtins.str<7>) __new__<8> _asdict<9> _field_defaults<10> (builtins.object<1>) _field_types<11> (builtins.object<1>) _fields<12> (Tuple[builtins.str<7>]) _make<13> _replace<14> _source<15> (builtins.str<7>) x<16> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names()) TypeInfo<2>( Name(target.N) Bases(builtins.tuple[target.A<0>]<3>) Mro(target.N<2>, builtins.tuple<3>, builtins.object<1>) Names( _NT<4> __annotations__<5> (builtins.object<1>) __doc__<6> (builtins.str<7>) __new__<8> _asdict<9> _field_defaults<10> (builtins.object<1>) _field_types<11> (builtins.object<1>) _fields<12> (Tuple[builtins.str<7>, builtins.str<7>]) _make<13> _replace<14> _source<15> (builtins.str<7>) x<16> (target.A<0>) y<17> (target.A<0>))) [case testUnionType_types] import target [file target.py] from typing import Union class A: pass a: A [file target.py.next] from typing import Union class A: pass a: Union[A, int] [out] ## target NameExpr:3: target.A<0> ==> ## target NameExpr:3: Union[target.A<0>, builtins.int<1>] [case testTypeType_types] import target [file target.py] from typing import Type class A: pass a: Type[A] [file target.py.next] from typing import Type class A: pass a: Type[A] [out] ## target NameExpr:3: Type[target.A<0>] ==> ## target NameExpr:3: Type[target.A<0>] [case testTypeVar_types] import target [file target.py] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class A(Generic[T]): x: T [file target.py.next] from typing import TypeVar T = TypeVar('T', bound='A') class A(Generic[T]): x: T [out] ## target CallExpr:2: Any NameExpr:2: Any TypeVarExpr:2: Any NameExpr:4: T`1(upper_bound=builtins.int<0>) ==> ## target CallExpr:2: Any NameExpr:2: Any TypeVarExpr:2: Any NameExpr:4: T`1(upper_bound=target.A[Any]<1>) [case testUnboundType_types] import target [file target.py] from typing import TypeVar, Generic class A: pass foo: int x: foo[A] [file target.py.next] from typing import TypeVar, Generic class A: pass foo: int x: foo[A] [out] tmp/target.py:4: error: Variable "target.foo" is not valid as a type ## target NameExpr:3: builtins.int<0> NameExpr:4: foo?[target.A<1>] ==> ## target NameExpr:3: builtins.int<0> NameExpr:4: foo?[target.A<1>] [case testOverloaded_types] import target [file target.py] from typing import overload class A: pass @overload def f(x: A) -> A: pass @overload def f(x: int) -> int: pass def f(x): pass g = f [file target.py.next] from typing import overload class A: pass @overload def f(x: A) -> A: pass @overload def f(x: str) -> str: pass def f(x): pass g = f [out] -- TODO: It is unclear why this works correctly... ## target NameExpr:11: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.int<1>) -> builtins.int<1>) NameExpr:11: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.int<1>) -> builtins.int<1>) ==> ## target NameExpr:12: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.str<2>) -> builtins.str<2>) NameExpr:12: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.str<2>) -> builtins.str<2>) [case testOverloaded] import target [file target.py] from typing import overload class A: pass @overload def f(x: A) -> A: pass @overload def f(x: int) -> int: pass def f(x): pass [file target.py.next] from typing import overload class A: pass class B: pass @overload def f(x: A) -> B: pass @overload def f(x: str) -> str: pass def f(x): pass [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ImportFrom:1(typing, [overload]) ClassDef:2<2>( A PassStmt:2<3>()) OverloadedFuncDef:4<4>( FuncDef:9<5>( f Args( Var(x)) Block:9<6>( PassStmt:9<7>())) Overload(def (x: target.A<8>) -> target.A<8>, def (x: builtins.int<9>) -> builtins.int<9>) Decorator:4<10>( Var(f) NameExpr(overload [typing.overload<11>]) FuncDef:5<12>( f Args( Var(x)) def (x: target.A<8>) -> target.A<8> Block:5<13>( PassStmt:5<14>()))) Decorator:6<15>( Var(f) NameExpr(overload [typing.overload<11>]) FuncDef:7<16>( f Args( Var(x)) def (x: builtins.int<9>) -> builtins.int<9> Block:7<17>( PassStmt:7<18>()))))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ImportFrom:1(typing, [overload]) ClassDef:3<19>( A PassStmt:3<20>()) ClassDef:4<21>( B PassStmt:4<22>()) OverloadedFuncDef:6<4>( FuncDef:11<23>( f Args( Var(x)) Block:11<24>( PassStmt:11<25>())) Overload(def (x: target.A<8>) -> target.B<26>, def (x: builtins.str<27>) -> builtins.str<27>) Decorator:6<28>( Var(f) NameExpr(overload [typing.overload<11>]) FuncDef:7<29>( f Args( Var(x)) def (x: target.A<8>) -> target.B<26> Block:7<30>( PassStmt:7<31>()))) Decorator:8<32>( Var(f) NameExpr(overload [typing.overload<11>]) FuncDef:9<33>( f Args( Var(x)) def (x: builtins.str<27>) -> builtins.str<27> Block:9<34>( PassStmt:9<35>()))))) [case testTypeVar_symtable] import target [file target.py] from typing import TypeVar T = TypeVar('T') [file target.py.next] from typing import TypeVar T = TypeVar('T', bound=int) [out] __main__: target: MypyFile<0> target: T: TypeVarExpr<1> TypeVar: Var<2> ==> __main__: target: MypyFile<0> target: T: TypeVarExpr<1> TypeVar: Var<2> [case testTypeAlias_symtable] import target [file target.py] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass X = A[int] [file target.py.next] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass X = A[str] [out] __main__: target: MypyFile<0> target: A: TypeInfo<1> Generic: Var<2> T: TypeVarExpr<3> TypeVar: Var<4> X: TypeAlias<5> ==> __main__: target: MypyFile<0> target: A: TypeInfo<1> Generic: Var<2> T: TypeVarExpr<3> TypeVar: Var<4> X: TypeAlias<5> [case testGenericFunction_types] import target [file target.py] from typing import TypeVar class A: pass T = TypeVar('T', bound=A) def f(x: T) -> T: pass f [file target.py.next] from typing import TypeVar class A: pass T = TypeVar('T', bound=A) def f(x: T, y: A) -> T: pass f [out] ## target CallExpr:3: Any NameExpr:3: Any TypeVarExpr:3: Any NameExpr:5: def [T <: target.A<0>] (x: T`-1(upper_bound=target.A<0>)) -> T`-1(upper_bound=target.A<0>) ==> ## target CallExpr:3: Any NameExpr:3: Any TypeVarExpr:3: Any NameExpr:5: def [T <: target.A<0>] (x: T`-1(upper_bound=target.A<0>), y: target.A<0>) -> T`-1(upper_bound=target.A<0>) [case testMergeOverloaded_types] import target [file target.py] from _x import A a: A [file target.py.next] from _x import A a: A [file _x.pyi] from typing import Generic, TypeVar, overload T = TypeVar('T') class C(Generic[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: int) -> None: pass A = C[int] [out] ## target NameExpr:2: _x.C[builtins.int<0>]<1> ==> ## target NameExpr:2: _x.C[builtins.int<0>]<1> [case testRefreshVar_symtable] from typing import TypeVar from target import f x = 1 y = '' # type: str [file target.py] f = 1 [file target.py.next] [out] __main__: TypeVar: Var<0> f: Var<1>(builtins.int<2>) x: Var<3>(builtins.int<2>) y: Var<4>(builtins.str<5>) target: f: Var<1>(builtins.int<2>) ==> __main__: TypeVar: Var<0> f: Var<6>(Any) x: Var<3>(builtins.int<2>) y: Var<4>(builtins.str<5>) target: [case testRefreshTypeVar_symtable] from typing import TypeVar from target import f T = TypeVar('T') [file target.py] f = 1 [file target.py.next] [out] __main__: T: TypeVarExpr<0> TypeVar: Var<1> f: Var<2>(builtins.int<3>) target: f: Var<2>(builtins.int<3>) ==> __main__: T: TypeVarExpr<0> TypeVar: Var<1> f: Var<4>(Any) target: [case testRefreshNamedTuple_symtable] from typing import NamedTuple from target import f N = NamedTuple('N', [('x', int)]) [file target.py] f = 1 [file target.py.next] [out] __main__: N: TypeInfo<0> NamedTuple: Var<1> f: Var<2>(builtins.int<3>) target: f: Var<2>(builtins.int<3>) ==> __main__: N: TypeInfo<0> NamedTuple: Var<1> f: Var<4>(Any) target: [case testRefreshAttributeDefinedInClassBody_typeinfo] from target import f class A: a = 1 b = '' # type: str [file target.py] f = 1 [file target.py.next] [out] TypeInfo<0>( Name(__main__.A) Bases(builtins.object<1>) Mro(__main__.A<0>, builtins.object<1>) Names( a<2> (builtins.int<3>) b<4> (builtins.str<5>))) ==> TypeInfo<0>( Name(__main__.A) Bases(builtins.object<1>) Mro(__main__.A<0>, builtins.object<1>) Names( a<2> (builtins.int<3>) b<4> (builtins.str<5>))) [case testDecorator_symtable] import target [file target.py] from contextlib import contextmanager from typing import Iterator, List, Tuple @contextmanager def f(x: List[Tuple[int]]) -> Iterator[None]: yield [file target.py.next] from contextlib import contextmanager from typing import Iterator, List, Tuple @contextmanager def f(x: List[Tuple[int]]) -> Iterator[None]: yield [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [out] __main__: target: MypyFile<0> target: Iterator: TypeInfo<1> List: TypeAlias<2> Tuple: Var<3> contextmanager: FuncDef<4> f: Decorator<5> ==> __main__: target: MypyFile<0> target: Iterator: TypeInfo<1> List: TypeAlias<2> Tuple: Var<3> contextmanager: FuncDef<4> f: Decorator<5> [case testConditionalFunctionDefinition] import target [file target.py] import sys class A: pass class B: pass if sys.platform == 'nonexistent': def f(x: A) -> None: pass else: def f(x: B) -> None: pass [file target.py.next] import sys class A: pass class B: pass if sys.platform == 'nonexistent': def f(x: A, y: int) -> None: pass else: def f(x: B, y: int) -> None: pass [builtins fixtures/ops.pyi] [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py Import:1(sys) ClassDef:2<2>( A PassStmt:2<3>()) ClassDef:3<4>( B PassStmt:3<5>()) IfStmt:4<6>( If( ComparisonExpr:4<7>( == MemberExpr:4<9>( NameExpr(sys<10>) platform [sys.platform<8>]) StrExpr(nonexistent))) Then( FuncDef:5<11>( f Args( Var(x)) def (x: A?) -> None? Block:5<12>( PassStmt:5<13>()))) Else( FuncDef:7<14>( f Args( Var(x)) def (x: target.B<15>) Block:7<16>( PassStmt:7<17>()))))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py Import:1(sys) ClassDef:2<18>( A PassStmt:2<19>()) ClassDef:3<20>( B PassStmt:3<21>()) IfStmt:4<22>( If( ComparisonExpr:4<23>( == MemberExpr:4<24>( NameExpr(sys<10>) platform [sys.platform<8>]) StrExpr(nonexistent))) Then( FuncDef:5<25>( f Args( Var(x) Var(y)) def (x: A?, y: int?) -> None? Block:5<26>( PassStmt:5<27>()))) Else( FuncDef:7<14>( f Args( Var(x) Var(y)) def (x: target.B<15>, y: builtins.int<28>) Block:7<29>( PassStmt:7<30>()))))) [case testMergeTypedDict_symtable] import target [file target.py] from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'a': A}) d: D [file target.py.next] from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'a': A, 'b': int}) d: D [builtins fixtures/dict.pyi] [out] __main__: target: MypyFile<0> target: A: TypeInfo<1> D: TypeInfo<2> TypedDict: FuncDef<3> d: Var<4>(TypedDict('target.D', {'a': target.A<1>})) ==> __main__: target: MypyFile<0> target: A: TypeInfo<1> D: TypeInfo<2> TypedDict: FuncDef<3> d: Var<4>(TypedDict('target.D', {'a': target.A<1>, 'b': builtins.int<5>})) [case testNewType_symtable] import target [file target.py] from typing import NewType class A: pass B = NewType('B', A) C = NewType('C', A) [file target.py.next] from typing import NewType class A: pass B = NewType('B', A) C = NewType('C', B) D = NewType('D', int) [out] __main__: target: MypyFile<0> target: A: TypeInfo<1> B: TypeInfo<2> C: TypeInfo<3> NewType: Var<4> ==> __main__: target: MypyFile<0> target: A: TypeInfo<1> B: TypeInfo<2> C: TypeInfo<3> D: TypeInfo<5> NewType: Var<4> [case testCallable_symtable-skip] # The TypeInfo is currently not being merged correctly import target [file target.py] def g(o: object) -> None: if callable(o): pass [file target.py.next] def g(o: object) -> None: if callable(o): o() [builtins fixtures/callable.pyi] [out] __main__: target: MypyFile<0> target: : TypeInfo<1> g: FuncDef<2> ==> __main__: target: MypyFile<0> target: : TypeInfo<1> g: FuncDef<2> [case testMetaclass_typeinfo] import target [file target.py] class M(type): pass class C(metaclass=M): pass [file target.py.next] class M(type): pass class C(metaclass=M): pass # dummy change [out] TypeInfo<0>( Name(target.C) Bases(builtins.object<1>) Mro(target.C<0>, builtins.object<1>) Names() DeclaredMetaclass(target.M<2>) MetaclassType(target.M<2>)) TypeInfo<2>( Name(target.M) Bases(builtins.type<3>) Mro(target.M<2>, builtins.type<3>, builtins.object<1>) Names()) ==> TypeInfo<0>( Name(target.C) Bases(builtins.object<1>) Mro(target.C<0>, builtins.object<1>) Names() DeclaredMetaclass(target.M<2>) MetaclassType(target.M<2>)) TypeInfo<2>( Name(target.M) Bases(builtins.type<3>) Mro(target.M<2>, builtins.type<3>, builtins.object<1>) Names()) [case testCast_symtable] import target [file target.py] from typing import cast class Thing: pass thing = cast(Thing, Thing()) [file target.py.next] from typing import cast class Thing: pass thing = cast(Thing, Thing()) [out] __main__: target: MypyFile<0> target: Thing: TypeInfo<1> cast: Var<2> thing: Var<3>(target.Thing<1>) ==> __main__: target: MypyFile<0> target: Thing: TypeInfo<1> cast: Var<2> thing: Var<3>(target.Thing<1>) [case testClassBasedEnum_typeinfo] import target [file target.py] from enum import Enum class A(Enum): X = 0 [file target.py.next] from enum import Enum class A(Enum): X = 0 Y = 1 [out] TypeInfo<0>( Name(target.A) Bases(enum.Enum<1>) Mro(target.A<0>, enum.Enum<1>, builtins.object<2>) Names( X<3> (builtins.int<4>)) MetaclassType(enum.EnumMeta<5>)) ==> TypeInfo<0>( Name(target.A) Bases(enum.Enum<1>) Mro(target.A<0>, enum.Enum<1>, builtins.object<2>) Names( X<3> (builtins.int<4>) Y<6> (builtins.int<4>)) MetaclassType(enum.EnumMeta<5>)) [case testLiteralMerge] import target [file target.py] from typing_extensions import Literal def foo(x: Literal[3]) -> Literal['a']: pass bar: Literal[4] = 4 [file target.py.next] from typing_extensions import Literal def foo(x: Literal['3']) -> Literal['b']: pass bar: Literal[5] = 5 [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ImportFrom:1(typing_extensions, [Literal]) FuncDef:2<2>( foo Args( Var(x)) def (x: Literal[3]) -> Literal['a'] Block:2<3>( PassStmt:2<4>())) AssignmentStmt:3<5>( NameExpr(bar [target.bar<6>]) IntExpr(4) Literal[4])) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ImportFrom:1(typing_extensions, [Literal]) FuncDef:2<2>( foo Args( Var(x)) def (x: Literal['3']) -> Literal['b'] Block:2<7>( PassStmt:2<8>())) AssignmentStmt:3<9>( NameExpr(bar [target.bar<6>]) IntExpr(5) Literal[5])) mypy-0.761/test-data/unit/parse-errors.test0000644€tŠÔÚ€2›s®0000002146213576752246025117 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for parser errors. Each test case consists of two sections. -- The first section contains [case NAME] followed by the input code, while -- the second section contains [out] followed by the output from the parser. -- -- The input file name in errors is "file". -- -- Comments starting with "--" in this file will be ignored, except for lines -- starting with "----" that are not ignored. The first two dashes of these -- lines are interpreted as escapes and removed. [case testInvalidFunction] def f() pass [out] file:1: error: invalid syntax [case testUnexpectedIndent] 1 2 [out] file:2: error: unexpected indent [case testInconsistentIndent] if x: 1 1 [out] file:3: error: unexpected indent [case testInconsistentIndent2] if x: 1 1 [out] file:3: error: unindent does not match any outer indentation level [case testInvalidBinaryOp] 1> a* a+1* [out] file:1: error: invalid syntax [case testDoubleStar] **a [out] file:1: error: invalid syntax [case testMissingSuperClass] class A(: pass [out] file:1: error: invalid syntax [case testUnexpectedEof] if 1: [out] file:1: error: unexpected EOF while parsing [case testInvalidKeywordArguments1] f(x=y, z) [out] file:1: error: positional argument follows keyword argument [case testInvalidKeywordArguments2] f(**x, y) [out] file:1: error: positional argument follows keyword argument unpacking [case testInvalidBareAsteriskAndVarArgs2] def f(*x: A, *) -> None: pass [out] file:1: error: invalid syntax [case testInvalidBareAsteriskAndVarArgs3] def f(*, *x: A) -> None: pass [out] file:1: error: invalid syntax [case testInvalidBareAsteriskAndVarArgs4] def f(*, **x: A) -> None: pass [out] file:1: error: named arguments must follow bare * [case testInvalidBareAsterisk1] def f(*) -> None: pass [out] file:1: error: named arguments must follow bare * [case testInvalidBareAsterisk2] def f(x, *) -> None: pass [out] file:1: error: named arguments must follow bare * [case testInvalidFuncDefArgs1] def f(x = y, x): pass [out] file:1: error: non-default argument follows default argument [case testInvalidFuncDefArgs3] def f(**x, y): pass [out] file:1: error: invalid syntax [case testInvalidFuncDefArgs4] def f(**x, y=x): pass [out] file:1: error: invalid syntax [case testInvalidTypeComment] 0 x = 0 # type: A A [out] file:2: error: syntax error in type comment 'A A' [case testInvalidTypeComment2] 0 x = 0 # type: A[ [out] file:2: error: syntax error in type comment 'A[' [case testInvalidTypeComment3] 0 x = 0 # type: [out] file:2: error: syntax error in type comment '' [case testInvalidTypeComment4] 0 x = 0 # type: * [out] file:2: error: syntax error in type comment '*' [case testInvalidTypeComment5] 0 x = 0 # type:# some comment [out] file:2: error: syntax error in type comment '' [case testInvalidTypeComment6] 0 x = 0 # type: *# comment #6 [out] file:2: error: syntax error in type comment '*' [case testInvalidTypeComment7] 0 x = 0 # type: A B #comment #7 [out] file:2: error: syntax error in type comment 'A B' [case testInvalidSignatureInComment1] def f(): # type: x pass [out] file:1: error: syntax error in type comment 'x' file:1: note: Suggestion: wrap argument types in parentheses [case testInvalidSignatureInComment2] def f(): # type: pass [out] file:1: error: syntax error in type comment '' [case testInvalidSignatureInComment3] def f(): # type: ( pass [out] file:1: error: syntax error in type comment '(' [case testInvalidSignatureInComment4] def f(): # type: (. pass [out] file:1: error: syntax error in type comment '(.' [case testInvalidSignatureInComment5] def f(): # type: (x pass [out] file:1: error: syntax error in type comment '(x' [case testInvalidSignatureInComment6] def f(): # type: (x) pass [out] file:1: error: syntax error in type comment '(x)' [case testInvalidSignatureInComment7] def f(): # type: (x) - pass [out] file:1: error: syntax error in type comment '(x) -' [case testInvalidSignatureInComment8] def f(): # type: (x) -> pass [out] file:1: error: syntax error in type comment '(x) ->' [case testInvalidSignatureInComment9] def f(): # type: (x) -> . pass [out] file:1: error: syntax error in type comment '(x) -> .' [case testInvalidSignatureInComment10] def f(): # type: (x) -> x x pass [out] file:1: error: syntax error in type comment '(x) -> x x' [case testInvalidSignatureInComment11] def f(): # type: # abc comment pass [out] file:1: error: syntax error in type comment '' [case testInvalidSignatureInComment12] def f(): # type: (x) -> x x # comment #2 pass [out] file:1: error: syntax error in type comment '(x) -> x x' [case testDuplicateSignatures1] def f() -> None: # type: () -> None pass def f(): # type: () -> None pass [out] file:1: error: Function has duplicate type signatures [case testDuplicateSignatures2] def f(x, y: Z): # type: (x, y) -> z pass [out] file:1: error: Function has duplicate type signatures [case testTooManyTypes] def f(x, y): # type: (X, Y, Z) -> z pass [out] file:1: error: Type signature has too many arguments [case testTooFewTypes] def f(x, y): # type: (X) -> z pass [out] file:1: error: Type signature has too few arguments [case testCommentFunctionAnnotationVarArgMispatch-skip] # see mypy issue #1997 def f(x): # type: (*X) -> Y pass def g(*x): # type: (X) -> Y pass [out] file:1: error: Inconsistent use of '*' in function signature file:3: error: Inconsistent use of '*' in function signature [case testCommentFunctionAnnotationVarArgMispatch2-skip] # see mypy issue #1997 def f(*x, **y): # type: (**X, *Y) -> Z pass def g(*x, **y): # type: (*X, *Y) -> Z pass [out] file:1: error: Inconsistent use of '*' in function signature file:3: error: syntax error in type comment file:3: error: Inconsistent use of '*' in function signature file:3: error: Inconsistent use of '**' in function signature [case testPrintStatementInPython35] # flags: --python-version 3.5 print 1 [out] file:2: error: Missing parentheses in call to 'print' [case testPrintStatementInPython37] # flags: --python-version 3.7 print 1 [out] file:2: error: Missing parentheses in call to 'print'. Did you mean print(1)? [case testInvalidConditionInConditionalExpression] 1 if 2, 3 else 4 [out] file:1: error: invalid syntax [case testInvalidConditionInConditionalExpression2] 1 if x for y in z else 4 [out] file:1: error: invalid syntax [case testInvalidConditionInConditionalExpression2] 1 if x else for y in z [out] file:1: error: invalid syntax [case testYieldFromNotRightParameter] def f(): yield from [out] file:2: error: invalid syntax [case testYieldFromAfterReturn] def f(): return yield from h() [out] file:2: error: invalid syntax [case testImportDotModule] import .x [out] file:1: error: invalid syntax [case testImportDot] import . [out] file:1: error: invalid syntax [case testInvalidFunctionName] def while(): pass [out] file:1: error: invalid syntax [case testInvalidEllipsis1] ...0 ..._ ...a [out] file:1: error: invalid syntax [case testBlockStatementInSingleLineIf] if 1: if 2: pass [out] file:1: error: invalid syntax [case testBlockStatementInSingleLineIf2] if 1: while 2: pass [out] file:1: error: invalid syntax [case testBlockStatementInSingleLineIf3] if 1: for x in y: pass [out] file:1: error: invalid syntax [case testUnexpectedEllipsis] a = a... [out] file:1: error: invalid syntax [case testParseErrorBeforeUnicodeLiteral] x u'y' [out] file:1: error: invalid syntax [case testParseErrorInExtendedSlicing] x[:, [out] file:1: error: unexpected EOF while parsing [case testParseErrorInExtendedSlicing2] x[:,:: [out] file:1: error: unexpected EOF while parsing [case testParseErrorInExtendedSlicing3] x[:,: [out] file:1: error: unexpected EOF while parsing [case testInvalidEncoding] # foo # coding: uft-8 [out] file:0: error: unknown encoding: uft-8 [case testInvalidEncoding2] # coding=Uft.8 [out] file:0: error: unknown encoding: Uft.8 [case testInvalidEncoding3] #!/usr/bin python # vim: set fileencoding=uft8 : [out] file:0: error: unknown encoding: uft8 [case testDoubleEncoding] # coding: uft8 # coding: utf8 # The first coding cookie should be used and fail. [out] file:0: error: unknown encoding: uft8 [case testDoubleEncoding2] # Again the first cookie should be used and fail. # coding: uft8 # coding: utf8 [out] file:0: error: unknown encoding: uft8 [case testLongLiteralInPython3] 2L 0x2L [out] file:1: error: invalid syntax [case testPython2LegacyInequalityInPython3] 1 <> 2 [out] file:1: error: invalid syntax [case testLambdaInListComprehensionInPython3] ([ 0 for x in 1, 2 if 3 ]) [out] file:1: error: invalid syntax [case testTupleArgListInPython3] def f(x, (y, z)): pass [out] file:1: error: invalid syntax [case testBackquoteInPython3] `1 + 2` [out] file:1: error: invalid syntax [case testSmartQuotes] foo = ‘bar’ [out] file:1: error: invalid character in identifier [case testExceptCommaInPython3] try: pass except KeyError, IndexError: pass [out] file:3: error: invalid syntax mypy-0.761/test-data/unit/parse-python2.test0000644€tŠÔÚ€2›s®0000003144213576752246025205 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for parser -- Python 2 syntax. -- -- See parse.test for a description of this file format. [case testEmptyFile] [out] MypyFile:1() [case testStringLiterals] 'bar' u'foo' ur'foo' u'''bar''' b'foo' [out] MypyFile:1( ExpressionStmt:1( StrExpr(bar)) ExpressionStmt:2( UnicodeExpr(foo)) ExpressionStmt:3( UnicodeExpr(foo)) ExpressionStmt:4( UnicodeExpr(bar)) ExpressionStmt:5( StrExpr(foo))) [case testSimplePrint] print 1 print 2, 3 print (4, 5) [out] MypyFile:1( PrintStmt:1( IntExpr(1) Newline) PrintStmt:2( IntExpr(2) IntExpr(3) Newline) PrintStmt:3( TupleExpr:3( IntExpr(4) IntExpr(5)) Newline)) [case testPrintWithNoArgs] print [out] MypyFile:1( PrintStmt:1( Newline)) [case testPrintWithTarget] print >>foo [out] MypyFile:1( PrintStmt:1( Target( NameExpr(foo)) Newline)) [case testPrintWithTargetAndArgs] print >>foo, x [out] MypyFile:1( PrintStmt:1( NameExpr(x) Target( NameExpr(foo)) Newline)) [case testPrintWithTargetAndArgsAndTrailingComma] print >>foo, x, y, [out] MypyFile:1( PrintStmt:1( NameExpr(x) NameExpr(y) Target( NameExpr(foo)))) [case testSimpleWithTrailingComma] print 1, print 2, 3, print (4, 5), [out] MypyFile:1( PrintStmt:1( IntExpr(1)) PrintStmt:2( IntExpr(2) IntExpr(3)) PrintStmt:3( TupleExpr:3( IntExpr(4) IntExpr(5)))) [case testOctalIntLiteral] 00 01 0377 [out] MypyFile:1( ExpressionStmt:1( IntExpr(0)) ExpressionStmt:2( IntExpr(1)) ExpressionStmt:3( IntExpr(255))) [case testLongLiteral] 0L 123L 012L 0x123l [out] MypyFile:1( ExpressionStmt:1( IntExpr(0)) ExpressionStmt:2( IntExpr(123)) ExpressionStmt:3( IntExpr(10)) ExpressionStmt:4( IntExpr(291))) [case testTryExceptWithComma] try: x except Exception, e: y [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( NameExpr(x))) NameExpr(Exception) NameExpr(e) Block:3( ExpressionStmt:4( NameExpr(y))))) [case testTryExceptWithNestedComma] try: x except (KeyError, IndexError): y [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( NameExpr(x))) TupleExpr:3( NameExpr(KeyError) NameExpr(IndexError)) Block:3( ExpressionStmt:4( NameExpr(y))))) [case testExecStatement] exec a [out] MypyFile:1( ExecStmt:1( NameExpr(a))) [case testExecStatementWithIn] exec a in globals() [out] MypyFile:1( ExecStmt:1( NameExpr(a) CallExpr:1( NameExpr(globals) Args()))) [case testExecStatementWithInAnd2Expressions] exec a in x, y [out] MypyFile:1( ExecStmt:1( NameExpr(a) NameExpr(x) NameExpr(y))) [case testEllipsisInExpression_python2] x = ... # E: invalid syntax [out] [case testStrLiteralConcatenationWithMixedLiteralTypes] u'foo' 'bar' 'bar' u'foo' [out] MypyFile:1( ExpressionStmt:1( UnicodeExpr(foobar)) ExpressionStmt:2( UnicodeExpr(barfoo))) [case testLegacyInequality] 1 <> 2 [out] MypyFile:1( ExpressionStmt:1( ComparisonExpr:1( != IntExpr(1) IntExpr(2)))) [case testListComprehensionInPython2] ([ 0 for x in 1, 2 if 3 ]) [out] MypyFile:1( ExpressionStmt:1( ListComprehension:1( GeneratorExpr:1( IntExpr(0) NameExpr(x) TupleExpr:1( IntExpr(1) IntExpr(2)) IntExpr(3))))) [case testTupleArgListInPython2] def f(x, (y, z)): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(__tuple_arg_2)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(y) NameExpr(z)) NameExpr(__tuple_arg_2)) PassStmt:1()))) [case testTupleArgListWithTwoTupleArgsInPython2] def f((x, y), (z, zz)): pass [out] MypyFile:1( FuncDef:1( f Args( Var(__tuple_arg_1) Var(__tuple_arg_2)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(__tuple_arg_1)) AssignmentStmt:1( TupleExpr:1( NameExpr(z) NameExpr(zz)) NameExpr(__tuple_arg_2)) PassStmt:1()))) [case testTupleArgListWithInitializerInPython2] def f((y, z) = (1, 2)): pass [out] MypyFile:1( FuncDef:1( f Args( default( Var(__tuple_arg_1) TupleExpr:1( IntExpr(1) IntExpr(2)))) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(y) NameExpr(z)) NameExpr(__tuple_arg_1)) PassStmt:1()))) [case testLambdaTupleArgListInPython2] lambda (x, y): z [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( Var(__tuple_arg_1)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(__tuple_arg_1)) ReturnStmt:1( NameExpr(z)))))) [case testLambdaSingletonTupleArgListInPython2] lambda (x,): z [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( Var(__tuple_arg_1)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x)) NameExpr(__tuple_arg_1)) ReturnStmt:1( NameExpr(z)))))) [case testLambdaNoTupleArgListInPython2] lambda (x): z [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( Var(x)) Block:1( ReturnStmt:1( NameExpr(z)))))) [case testInvalidExprInTupleArgListInPython2_1] def f(x, ()): pass [out] main:1: error: invalid syntax [case testInvalidExprInTupleArgListInPython2_2] def f(x, (y, x[1])): pass [out] main:1: error: invalid syntax [case testListLiteralAsTupleArgInPython2] def f(x, [x]): pass [out] main:1: error: invalid syntax [case testTupleArgAfterStarArgInPython2] def f(*a, (b, c)): pass [out] main:1: error: invalid syntax [case testTupleArgAfterStarStarArgInPython2] def f(*a, (b, c)): pass [out] main:1: error: invalid syntax [case testParenthesizedArgumentInPython2] def f(x, (y)): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(y)) Block:1( PassStmt:1()))) [case testDuplicateNameInTupleArgList_python2] def f(a, (a, b)): pass def g((x, (x, y))): pass [out] main:1: error: Duplicate argument 'a' in function definition main:3: error: Duplicate argument 'x' in function definition [case testBackquotesInPython2] `1 + 2` [out] MypyFile:1( ExpressionStmt:1( BackquoteExpr:1( OpExpr:1( + IntExpr(1) IntExpr(2))))) [case testBackquoteSpecialCasesInPython2] `1, 2` [out] MypyFile:1( ExpressionStmt:1( BackquoteExpr:1( TupleExpr:1( IntExpr(1) IntExpr(2))))) [case testSuperInPython2] class A: def f(self): super(A, self).x [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) Block:2( ExpressionStmt:3( SuperExpr:3( x CallExpr:3( NameExpr(super) Args( NameExpr(A) NameExpr(self))))))))) [case testTypeCommentsInPython2] x = 1 # type: List[int] def f(x, y=0): # type: (List[int], str) -> None pass [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1) List?[int?]) FuncDef:3( f Args( Var(x) default( Var(y) IntExpr(0))) def (x: List?[int?], y: str? =) -> None? Block:3( PassStmt:5()))) [case testMultiLineTypeCommentInPython2] def f(x, # type: List[int] y, z=1, # type: str ): # type: (...) -> None pass [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(y) default( Var(z) IntExpr(1))) def (x: List?[int?], y: Any, z: str? =) -> None? Block:1( PassStmt:6()))) [case testIfStmtInPython2] if x: y elif z: a else: b [out] MypyFile:1( IfStmt:1( If( NameExpr(x)) Then( ExpressionStmt:2( NameExpr(y))) Else( IfStmt:3( If( NameExpr(z)) Then( ExpressionStmt:4( NameExpr(a))) Else( ExpressionStmt:6( NameExpr(b))))))) [case testWhileStmtInPython2] while x: y else: z [out] MypyFile:1( WhileStmt:1( NameExpr(x) Block:1( ExpressionStmt:2( NameExpr(y))) Else( ExpressionStmt:4( NameExpr(z))))) [case testForStmtInPython2] for x, y in z: a else: b [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(z) Block:1( ExpressionStmt:2( NameExpr(a))) Else( ExpressionStmt:4( NameExpr(b))))) [case testWithStmtInPython2] with x as y: z [out] MypyFile:1( WithStmt:1( Expr( NameExpr(x)) Target( NameExpr(y)) Block:1( ExpressionStmt:2( NameExpr(z))))) [case testExpressionsInPython2] x[y] x + y ~z x.y ([x, y]) {x, y} {x: y} x < y > z [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(x) NameExpr(y))) ExpressionStmt:2( OpExpr:2( + NameExpr(x) NameExpr(y))) ExpressionStmt:3( UnaryExpr:3( ~ NameExpr(z))) ExpressionStmt:4( MemberExpr:4( NameExpr(x) y)) ExpressionStmt:5( ListExpr:5( NameExpr(x) NameExpr(y))) ExpressionStmt:6( SetExpr:6( NameExpr(x) NameExpr(y))) ExpressionStmt:7( DictExpr:7( NameExpr(x) NameExpr(y))) ExpressionStmt:8( ComparisonExpr:8( < > NameExpr(x) NameExpr(y) NameExpr(z)))) [case testSlicingInPython2] x[y:] x[y:z] x[::y] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(x) SliceExpr:1( NameExpr(y) ))) ExpressionStmt:2( IndexExpr:2( NameExpr(x) SliceExpr:2( NameExpr(y) NameExpr(z)))) ExpressionStmt:3( IndexExpr:3( NameExpr(x) SliceExpr:3( NameExpr(y))))) [case testStarArgsInPython2] def f(*x): # type: (*int) -> None pass f(x, *y) [out] MypyFile:1( FuncDef:1( f def (*x: int?) -> None? VarArg( Var(x)) Block:1( PassStmt:2())) ExpressionStmt:3( CallExpr:3( NameExpr(f) Args( NameExpr(x) NameExpr(y)) VarArg))) [case testKwArgsInPython2] def f(**x): # type: (**int) -> None pass f(x, **y) [out] MypyFile:1( FuncDef:1( f def (**x: int?) -> None? DictVarArg( Var(x)) Block:1( PassStmt:2())) ExpressionStmt:3( CallExpr:3( NameExpr(f) Args( NameExpr(x)) DictVarArg( NameExpr(y))))) [case testBoolOpInPython2] x and y or z [out] MypyFile:1( ExpressionStmt:1( OpExpr:1( or OpExpr:1( and NameExpr(x) NameExpr(y)) NameExpr(z)))) [case testImportsInPython2] from x import y, z as zz import m import n as nn from aa import * [out] MypyFile:1( ImportFrom:1(x, [y, z : zz]) Import:2(m) Import:3(n : nn) ImportAll:4(aa)) [case testTryFinallyInPython2] try: x finally: y [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( NameExpr(x))) Finally( ExpressionStmt:4( NameExpr(y))))) [case testRaiseInPython2] raise raise x [out] MypyFile:1( RaiseStmt:1() RaiseStmt:2( NameExpr(x))) [case testAssignmentInPython2] x = y x, (y, z) = aa [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) NameExpr(y)) AssignmentStmt:2( TupleExpr:2( NameExpr(x) TupleExpr:2( NameExpr(y) NameExpr(z))) NameExpr(aa))) [case testAugmentedAssignmentInPython2] x += y x *= 2 [out] MypyFile:1( OperatorAssignmentStmt:1( + NameExpr(x) NameExpr(y)) OperatorAssignmentStmt:2( * NameExpr(x) IntExpr(2))) [case testDelStatementInPython2] del x del x.y, x[y] [out] MypyFile:1( DelStmt:1( NameExpr(x)) DelStmt:2( TupleExpr:2( MemberExpr:2( NameExpr(x) y) IndexExpr:2( NameExpr(x) NameExpr(y))))) [case testClassDecoratorInPython2] @dec() class C: pass [out] MypyFile:1( ClassDef:2( C Decorators( CallExpr:1( NameExpr(dec) Args())) PassStmt:3())) [case testFunctionDecaratorInPython2] @dec() def f(): pass [out] MypyFile:1( Decorator:1( Var(f) CallExpr:1( NameExpr(dec) Args()) FuncDef:2( f Block:2( PassStmt:3())))) [case testOverloadedFunctionInPython2] @overload def g(): pass @overload def g(): pass def g(): pass [out] MypyFile:1( OverloadedFuncDef:1( Decorator:1( Var(g) NameExpr(overload) FuncDef:2( g Block:2( PassStmt:3()))) Decorator:4( Var(g) NameExpr(overload) FuncDef:5( g Block:5( PassStmt:6()))) FuncDef:7( g Block:7( PassStmt:8())))) mypy-0.761/test-data/unit/parse.test0000644€tŠÔÚ€2›s®0000015605513576752246023614 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for parser. Each test case consists of two sections. -- The first section contains [case NAME] followed by the input code, while -- the second section contains [out] followed by the output from the parser. -- -- Lines starting with "--" in this file will be ignored, except for lines -- starting with "----" that are not ignored. The first two dashes of these -- lines are interpreted as escapes and removed. [case testEmptyFile] [out] MypyFile:1() [case testExpressionStatement] 1 [out] MypyFile:1( ExpressionStmt:1( IntExpr(1))) [case testAssignment] x = 1 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1))) [case testExpressionBasics] x = f(1, None) 123 * (2 + x) "hello".lower() -1.23 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) CallExpr:1( NameExpr(f) Args( IntExpr(1) NameExpr(None)))) ExpressionStmt:2( OpExpr:2( * IntExpr(123) OpExpr:2( + IntExpr(2) NameExpr(x)))) ExpressionStmt:3( CallExpr:3( MemberExpr:3( StrExpr(hello) lower) Args())) ExpressionStmt:4( UnaryExpr:4( - FloatExpr(1.23)))) [case testSingleQuotedStr] '' 'foo' 'foo\ bar' [out] MypyFile:1( ExpressionStmt:1( StrExpr()) ExpressionStmt:2( StrExpr(foo)) ExpressionStmt:3( StrExpr(foobar))) [case testDoubleQuotedStr] "" "foo" "foo\ bar" [out] MypyFile:1( ExpressionStmt:1( StrExpr()) ExpressionStmt:2( StrExpr(foo)) ExpressionStmt:3( StrExpr(foobar))) [case testRawStr] r'x\n\'' r"x\n\"" [out] MypyFile:1( ExpressionStmt:1( StrExpr(x\n\')) ExpressionStmt:2( StrExpr(x\n\"))) --" fix syntax highlight [case testBytes] b'foo' b"foo\ bar" br'x\n\'' [out] MypyFile:1( ExpressionStmt:1( BytesExpr(foo)) ExpressionStmt:2( BytesExpr(foobar)) ExpressionStmt:3( BytesExpr(x\\n\\'))) [case testEscapesInStrings] '\r\n\t\x2f\u123f' b'\r\n\t\x2f\u123f' [out] MypyFile:1( ExpressionStmt:1( StrExpr(\u000d\u000a\u0009/\u123f)) ExpressionStmt:2( BytesExpr(\r\n\t/\\\u123f))) -- Note \\u in the b'...' case (\u sequence not translated) [case testEscapedQuote] '\'' [out] MypyFile:1( ExpressionStmt:1( StrExpr('))) --' [case testOctalEscapes] '\0\1\177\1234' b'\1\476' [out] MypyFile:1( ExpressionStmt:1( StrExpr(\u0000\u0001\u007fS4)) ExpressionStmt:2( BytesExpr(\x01>))) [case testUnicodeLiteralInPython3] u'foo' [out] MypyFile:1( ExpressionStmt:1( StrExpr(foo))) [case testArrays] a = [] a = [1, 2] a[[1]] = a[2] [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) ListExpr:1()) AssignmentStmt:2( NameExpr(a) ListExpr:2( IntExpr(1) IntExpr(2))) AssignmentStmt:3( IndexExpr:3( NameExpr(a) ListExpr:3( IntExpr(1))) IndexExpr:3( NameExpr(a) IntExpr(2)))) [case testTuples] () (1,) (1, foo) a, b = 1, (2, 3) [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1()) ExpressionStmt:2( TupleExpr:2( IntExpr(1))) ExpressionStmt:3( TupleExpr:3( IntExpr(1) NameExpr(foo))) AssignmentStmt:4( TupleExpr:4( NameExpr(a) NameExpr(b)) TupleExpr:4( IntExpr(1) TupleExpr:4( IntExpr(2) IntExpr(3))))) [case testSimpleFunction] def main(): 1 [out] MypyFile:1( FuncDef:1( main Block:1( ExpressionStmt:2( IntExpr(1))))) [case testPass] def f(): pass [out] MypyFile:1( FuncDef:1( f Block:1( PassStmt:2()))) [case testIf] if 1: 2 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( ExpressionStmt:2( IntExpr(2))))) [case testIfElse] if 1: 2 else: 3 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( ExpressionStmt:2( IntExpr(2))) Else( ExpressionStmt:4( IntExpr(3))))) [case testIfElif] if 1: 2 elif 3: 4 elif 5: 6 else: 7 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( ExpressionStmt:2( IntExpr(2))) Else( IfStmt:3( If( IntExpr(3)) Then( ExpressionStmt:4( IntExpr(4))) Else( IfStmt:5( If( IntExpr(5)) Then( ExpressionStmt:6( IntExpr(6))) Else( ExpressionStmt:8( IntExpr(7))))))))) [case testWhile] while 1: pass [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( PassStmt:2()))) [case testReturn] def f(): return 1 [out] MypyFile:1( FuncDef:1( f Block:1( ReturnStmt:2( IntExpr(1))))) [case testReturnWithoutValue] def f(): return [out] MypyFile:1( FuncDef:1( f Block:1( ReturnStmt:2()))) [case testBreak] while 1: break [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( BreakStmt:2()))) [case testLargeBlock] if 1: x = 1 while 2: pass y = 2 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( AssignmentStmt:2( NameExpr(x) IntExpr(1)) WhileStmt:3( IntExpr(2) Block:3( PassStmt:4())) AssignmentStmt:5( NameExpr(y) IntExpr(2))))) [case testSimpleClass] class A: def f(self): pass [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) Block:2( PassStmt:3())))) [case testGlobalVarWithType] x = 0 # type: int y = False # type: bool [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(0) int?) AssignmentStmt:2( NameExpr(y) NameExpr(False) bool?)) [case testLocalVarWithType] def f(): x = 0 # type: int y = False # type: bool a = None # type: Any [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(x) IntExpr(0) int?) AssignmentStmt:3( NameExpr(y) NameExpr(False) bool?) AssignmentStmt:4( NameExpr(a) NameExpr(None) Any?)))) [case testFunctionDefWithType] def f(y: str) -> int: return class A: def f(self, a: int, b: Any) -> x: pass def g(self) -> Any: pass [out] MypyFile:1( FuncDef:1( f Args( Var(y)) def (y: str?) -> int? Block:1( ReturnStmt:2())) ClassDef:3( A FuncDef:4( f Args( Var(self) Var(a) Var(b)) def (self: Any, a: int?, b: Any?) -> x? Block:4( PassStmt:5())) FuncDef:6( g Args( Var(self)) def (self: Any) -> Any? Block:6( PassStmt:7())))) [case testFuncWithNoneReturn] def f() -> None: pass [out] MypyFile:1( FuncDef:1( f def () -> None? Block:1( PassStmt:2()))) [case testVarDefWithGenericType] x = None # type: List[str] y = None # type: Dict[int, Any] [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) NameExpr(None) List?[str?]) AssignmentStmt:2( NameExpr(y) NameExpr(None) Dict?[int?, Any?])) [case testSignatureWithGenericTypes] def f(y: t[Any, x]) -> a[b[c], d]: pass [out] MypyFile:1( FuncDef:1( f Args( Var(y)) def (y: t?[Any?, x?]) -> a?[b?[c?], d?] Block:1( PassStmt:2()))) [case testParsingExpressionsWithLessAndGreaterThan] # The operators < > can sometimes be confused with generic types. x = a < b > c f(x < b, y > c) a < b > 1 x < b, y > 2 (a < b > c) [out] MypyFile:1( AssignmentStmt:2( NameExpr(x) ComparisonExpr:2( < > NameExpr(a) NameExpr(b) NameExpr(c))) ExpressionStmt:3( CallExpr:3( NameExpr(f) Args( ComparisonExpr:3( < NameExpr(x) NameExpr(b)) ComparisonExpr:3( > NameExpr(y) NameExpr(c))))) ExpressionStmt:4( ComparisonExpr:4( < > NameExpr(a) NameExpr(b) IntExpr(1))) ExpressionStmt:5( TupleExpr:5( ComparisonExpr:5( < NameExpr(x) NameExpr(b)) ComparisonExpr:5( > NameExpr(y) IntExpr(2)))) ExpressionStmt:6( ComparisonExpr:6( < > NameExpr(a) NameExpr(b) NameExpr(c)))) [case testLineContinuation] if (1 + 2): pass [out] MypyFile:1( IfStmt:1( If( OpExpr:1( + IntExpr(1) IntExpr(2))) Then( PassStmt:3()))) [case testMultipleVarDef] x, y = z # type: int, a[c] [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(z) Tuple[int?, a?[c?]])) [case testMultipleVarDef2] (xx, z, i) = 1 # type: (a[c], Any, int) [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(xx) NameExpr(z) NameExpr(i)) IntExpr(1) Tuple[a?[c?], Any?, int?])) [case testMultipleVarDef3] (xx, (z, i)) = 1 # type: (a[c], (Any, int)) [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(xx) TupleExpr:1( NameExpr(z) NameExpr(i))) IntExpr(1) Tuple[a?[c?], Tuple[Any?, int?]])) [case testAnnotateAssignmentViaSelf] class A: def __init__(self): self.x = 1 # type: int [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self) x) IntExpr(1) int?))))) [case testCommentAfterTypeComment] x = 0 # type: int # bar! [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(0) int?)) [case testMultilineAssignmentAndAnnotations] (x, y) = (1, 2) # type: foo, bar [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) TupleExpr:2( IntExpr(1) IntExpr(2)) Tuple[foo?, bar?])) [case testWhitespaceAndCommentAnnotation] x = 1#type:int [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1) int?)) [case testWhitespaceAndCommentAnnotation2] x = 1# type: int [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1) int?)) [case testWhitespaceAndCommentAnnotation3] x = 1# type : int # not recognized! [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1))) [case testInvalidAnnotation] x=1 ##type: int y=1 #.type: int z=1 # Type: int [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1)) AssignmentStmt:2( NameExpr(y) IntExpr(1)) AssignmentStmt:3( NameExpr(z) IntExpr(1))) [case testEmptyClass] class C: pass [out] MypyFile:1( ClassDef:1( C PassStmt:2())) [case testOperatorPrecedence] a | b ^ c a & b << c [out] MypyFile:1( ExpressionStmt:1( OpExpr:1( | NameExpr(a) OpExpr:1( ^ NameExpr(b) NameExpr(c)))) ExpressionStmt:2( OpExpr:2( & NameExpr(a) OpExpr:2( << NameExpr(b) NameExpr(c))))) [case testOperatorAssociativity] 1 - 2 + 3 1 << 2 << 3 [out] MypyFile:1( ExpressionStmt:1( OpExpr:1( + OpExpr:1( - IntExpr(1) IntExpr(2)) IntExpr(3))) ExpressionStmt:2( OpExpr:2( << OpExpr:2( << IntExpr(1) IntExpr(2)) IntExpr(3)))) [case testUnaryOperators] -2 * +3 * ~3 * 2 ~3**2 [out] MypyFile:1( ExpressionStmt:1( OpExpr:1( * OpExpr:1( * OpExpr:1( * UnaryExpr:1( - IntExpr(2)) UnaryExpr:1( + IntExpr(3))) UnaryExpr:1( ~ IntExpr(3))) IntExpr(2))) ExpressionStmt:2( UnaryExpr:2( ~ OpExpr:2( ** IntExpr(3) IntExpr(2))))) [case testSingleLineBodies] if 1: pass while 1: pass def f(): pass def g() -> int: return 1 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( PassStmt:1())) WhileStmt:2( IntExpr(1) Block:2( PassStmt:2())) FuncDef:3( f Block:3( PassStmt:3())) FuncDef:4( g def () -> int? Block:4( ReturnStmt:4( IntExpr(1))))) [case testForStatement] for x in y: pass for x, (y, w) in z: 1 for [x, (y, w)] in z: 1 [out] MypyFile:1( ForStmt:1( NameExpr(x) NameExpr(y) Block:1( PassStmt:2())) ForStmt:3( TupleExpr:3( NameExpr(x) TupleExpr:3( NameExpr(y) NameExpr(w))) NameExpr(z) Block:3( ExpressionStmt:4( IntExpr(1)))) ForStmt:5( TupleExpr:5( NameExpr(x) TupleExpr:5( NameExpr(y) NameExpr(w))) NameExpr(z) Block:5( ExpressionStmt:6( IntExpr(1))))) [case testGlobalDecl] global x def f(): global x, y [out] MypyFile:1( GlobalDecl:1( x) FuncDef:2( f Block:2( GlobalDecl:3( x y)))) [case testNonlocalDecl] def f(): def g(): nonlocal x, y [out] MypyFile:1( FuncDef:1( f Block:1( FuncDef:2( g Block:2( NonlocalDecl:3( x y)))))) [case testRaiseStatement] raise foo [out] MypyFile:1( RaiseStmt:1( NameExpr(foo))) [case testRaiseWithoutArg] try: pass except: raise [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) Block:3( RaiseStmt:4()))) [case testRaiseFrom] raise e from x [out] MypyFile:1( RaiseStmt:1( NameExpr(e) NameExpr(x))) [case testBaseclasses] class A(B): pass class A(B[T], C[Any, d[x]]): pass [out] MypyFile:1( ClassDef:1( A BaseTypeExpr( NameExpr(B)) PassStmt:2()) ClassDef:3( A BaseTypeExpr( IndexExpr:3( NameExpr(B) NameExpr(T)) IndexExpr:3( NameExpr(C) TupleExpr:3( NameExpr(Any) IndexExpr:3( NameExpr(d) NameExpr(x))))) PassStmt:4())) [case testIsNot] x is not y [out] MypyFile:1( ExpressionStmt:1( ComparisonExpr:1( is not NameExpr(x) NameExpr(y)))) [case testNotIn] x not in y not x not in y x not in y | z [out] MypyFile:1( ExpressionStmt:1( ComparisonExpr:1( not in NameExpr(x) NameExpr(y))) ExpressionStmt:2( UnaryExpr:2( not ComparisonExpr:2( not in NameExpr(x) NameExpr(y)))) ExpressionStmt:3( ComparisonExpr:3( not in NameExpr(x) OpExpr:3( | NameExpr(y) NameExpr(z))))) [case testNotAsBinaryOp] x not y # E: invalid syntax [out] [case testNotIs] x not is y # E: invalid syntax [out] [case testBinaryNegAsBinaryOp] 1 ~ 2 # E: invalid syntax [out] [case testDictionaryExpression] {} {1:x} {1:x, 2 or 1:2 and 3} [out] MypyFile:1( ExpressionStmt:1( DictExpr:1()) ExpressionStmt:2( DictExpr:2( IntExpr(1) NameExpr(x))) ExpressionStmt:3( DictExpr:3( IntExpr(1) NameExpr(x) OpExpr:3( or IntExpr(2) IntExpr(1)) OpExpr:3( and IntExpr(2) IntExpr(3))))) [case testImport] import x import y.z.foo, __foo__.bar [out] MypyFile:1( Import:1(x) Import:2(y.z.foo, __foo__.bar)) [case testVariableTypeWithQualifiedName] x = None # type: x.y [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) NameExpr(None) x.y?)) [case testTypeInSignatureWithQualifiedName] def f() -> x.y[a.b.c]: pass [out] MypyFile:1( FuncDef:1( f def () -> x.y?[a.b.c?] Block:1( PassStmt:1()))) [case testImportFrom] from m import x from m.n import x, y, z [out] MypyFile:1( ImportFrom:1(m, [x]) ImportFrom:2(m.n, [x, y, z])) [case testImportFromAs] from m import x as y from x import y, z as a, c as c [out] MypyFile:1( ImportFrom:1(m, [x : y]) ImportFrom:2(x, [y, z : a, c : c])) [case testImportStar] from x import * [out] MypyFile:1( ImportAll:1(x)) [case testImportsInDifferentPlaces] 1 import x def f(): from x import y from z import * [out] MypyFile:1( ExpressionStmt:1( IntExpr(1)) Import:2(x) FuncDef:3( f Block:3( ImportFrom:4(x, [y]) ImportAll:5(z)))) [case testImportWithExtraComma] from x import (y, z,) [out] MypyFile:1( ImportFrom:1(x, [y, z])) [case testDefaultArgs] def f(x=1): pass def g(x, y=1+2, z=(1, 2)): pass [out] MypyFile:1( FuncDef:1( f Args( default( Var(x) IntExpr(1))) Block:1( PassStmt:2())) FuncDef:3( g Args( Var(x) default( Var(y) OpExpr:3( + IntExpr(1) IntExpr(2))) default( Var(z) TupleExpr:3( IntExpr(1) IntExpr(2)))) Block:3( PassStmt:4()))) [case testTryFinally] try: 1 finally: 2 [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( IntExpr(1))) Finally( ExpressionStmt:4( IntExpr(2))))) [case testTry] try: 1 except x: 2 [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( IntExpr(1))) NameExpr(x) Block:3( ExpressionStmt:4( IntExpr(2))))) [case testComplexTry] try: 1 except x as y: 2 except x.y: 3 [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( IntExpr(1))) NameExpr(x) NameExpr(y) Block:3( ExpressionStmt:4( IntExpr(2))) MemberExpr:5( NameExpr(x) y) Block:5( ExpressionStmt:6( IntExpr(3))))) [case testGeneratorExpression] (x for y in z) [out] MypyFile:1( ExpressionStmt:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z)))) [case testGeneratorExpressionNested] (x for y, (p, q) in z) [out] MypyFile:1( ExpressionStmt:1( GeneratorExpr:1( NameExpr(x) TupleExpr:1( NameExpr(y) TupleExpr:1( NameExpr(p) NameExpr(q))) NameExpr(z)))) [case testListComprehension] x=[x for y in z] [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) ListComprehension:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z))))) [case testComplexListComprehension] x=[(x, y) for y, z in (1, 2)] [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) ListComprehension:1( GeneratorExpr:1( TupleExpr:1( NameExpr(x) NameExpr(y)) TupleExpr:1( NameExpr(y) NameExpr(z)) TupleExpr:1( IntExpr(1) IntExpr(2)))))) [case testListComprehension2] ([x + 1 for x in a]) [out] MypyFile:1( ExpressionStmt:1( ListComprehension:1( GeneratorExpr:1( OpExpr:1( + NameExpr(x) IntExpr(1)) NameExpr(x) NameExpr(a))))) [case testSlices] x[1:2] x[:1] x[1:] x[:] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(x) SliceExpr:1( IntExpr(1) IntExpr(2)))) ExpressionStmt:2( IndexExpr:2( NameExpr(x) SliceExpr:2( IntExpr(1)))) ExpressionStmt:3( IndexExpr:3( NameExpr(x) SliceExpr:3( IntExpr(1) ))) ExpressionStmt:4( IndexExpr:4( NameExpr(x) SliceExpr:4( )))) [case testSliceWithStride] x[1:2:3] x[1::2] x[:1:2] x[::2] x[1:2:] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(x) SliceExpr:1( IntExpr(1) IntExpr(2) IntExpr(3)))) ExpressionStmt:2( IndexExpr:2( NameExpr(x) SliceExpr:2( IntExpr(1) IntExpr(2)))) ExpressionStmt:3( IndexExpr:3( NameExpr(x) SliceExpr:3( IntExpr(1) IntExpr(2)))) ExpressionStmt:4( IndexExpr:4( NameExpr(x) SliceExpr:4( IntExpr(2)))) ExpressionStmt:5( IndexExpr:5( NameExpr(x) SliceExpr:5( IntExpr(1) IntExpr(2))))) [case testYield] def f(): yield x + 1 [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( YieldExpr:2( OpExpr:2( + NameExpr(x) IntExpr(1))))))) [case testYieldFrom] def f(): yield from h() [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( YieldFromExpr:2( CallExpr:2( NameExpr(h) Args())))))) [case testYieldFromAssignment] def f(): a = yield from h() [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(a) YieldFromExpr:2( CallExpr:2( NameExpr(h) Args())))))) [case testDel] del x del x[0], y[1] [out] MypyFile:1( DelStmt:1( NameExpr(x)) DelStmt:2( TupleExpr:2( IndexExpr:2( NameExpr(x) IntExpr(0)) IndexExpr:2( NameExpr(y) IntExpr(1))))) [case testExtraCommas] 1, 2, +[1, 2,] f(1,) {1:2,} [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( IntExpr(1) IntExpr(2))) ExpressionStmt:2( UnaryExpr:2( + ListExpr:2( IntExpr(1) IntExpr(2)))) ExpressionStmt:3( CallExpr:3( NameExpr(f) Args( IntExpr(1)))) ExpressionStmt:4( DictExpr:4( IntExpr(1) IntExpr(2)))) [case testExtraCommaInFunc] def f(x,): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( PassStmt:2()))) [case testLambda] lambda: 1 lambda x: y + 1 lambda x, y: 1 [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Block:1( ReturnStmt:1( IntExpr(1))))) ExpressionStmt:2( LambdaExpr:2( Args( Var(x)) Block:2( ReturnStmt:2( OpExpr:2( + NameExpr(y) IntExpr(1)))))) ExpressionStmt:3( LambdaExpr:3( Args( Var(x) Var(y)) Block:3( ReturnStmt:3( IntExpr(1)))))) [case testComplexLambda] lambda x=2: x [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( default( Var(x) IntExpr(2))) Block:1( ReturnStmt:1( NameExpr(x)))))) [case testLambdaPrecedence] lambda x: 1, 2 [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( LambdaExpr:1( Args( Var(x)) Block:1( ReturnStmt:1( IntExpr(1)))) IntExpr(2)))) [case testForIndicesInParens] for (i, j) in x: pass [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(i) NameExpr(j)) NameExpr(x) Block:1( PassStmt:2()))) [case testForAndTrailingCommaAfterIndexVar] for i, in x: pass [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(i)) NameExpr(x) Block:1( PassStmt:2()))) [case testListComprehensionAndTrailingCommaAfterIndexVar] x = [a for b, in c] [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) ListComprehension:1( GeneratorExpr:1( NameExpr(a) TupleExpr:1( NameExpr(b)) NameExpr(c))))) [case testForAndTrailingCommaAfterIndexVars] for i, j, in x: pass [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(i) NameExpr(j)) NameExpr(x) Block:1( PassStmt:2()))) [case testGeneratorWithCondition] (x for y in z if 0) [out] MypyFile:1( ExpressionStmt:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z) IntExpr(0)))) [case testListComprehensionWithCondition] raise [x for y in z if 0] [out] MypyFile:1( RaiseStmt:1( ListComprehension:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z) IntExpr(0))))) [case testListComprehensionWithConditions] raise [x for y in z if 0 if 1] [out] MypyFile:1( RaiseStmt:1( ListComprehension:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z) IntExpr(0) IntExpr(1))))) [case testListComprehensionWithCrazyConditions] raise [x for y in z if (1 if 2 else 3) if 1] [out] MypyFile:1( RaiseStmt:1( ListComprehension:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z) ConditionalExpr:1( Condition( IntExpr(2)) IntExpr(1) IntExpr(3)) IntExpr(1))))) [case testDictionaryComprehension] a = {x: y for x, y in xys} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) DictionaryComprehension:1( NameExpr(x) NameExpr(y) TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(xys)))) [case testDictionaryComprehensionComplex] a = {x: y for x, y in xys for p, q in pqs if c} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) DictionaryComprehension:1( NameExpr(x) NameExpr(y) TupleExpr:1( NameExpr(x) NameExpr(y)) TupleExpr:1( NameExpr(p) NameExpr(q)) NameExpr(xys) NameExpr(pqs) NameExpr(c)))) [case testSetComprehension] a = {i for i in l} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) SetComprehension:1( GeneratorExpr:1( NameExpr(i) NameExpr(i) NameExpr(l))))) [case testSetComprehensionComplex] a = {x + p for x in xys for p in pqs if c} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) SetComprehension:1( GeneratorExpr:1( OpExpr:1( + NameExpr(x) NameExpr(p)) NameExpr(x) NameExpr(p) NameExpr(xys) NameExpr(pqs) NameExpr(c))))) [case testWithStatement] with open('foo') as f: pass [out] MypyFile:1( WithStmt:1( Expr( CallExpr:1( NameExpr(open) Args( StrExpr(foo)))) Target( NameExpr(f)) Block:1( PassStmt:2()))) [case testWithStatementWithoutTarget] with foo: pass [out] MypyFile:1( WithStmt:1( Expr( NameExpr(foo)) Block:1( PassStmt:2()))) [case testHexOctBinLiterals] 0xa, 0Xaf, 0o7, 0O12, 0b1, 0B101 [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( IntExpr(10) IntExpr(175) IntExpr(7) IntExpr(10) IntExpr(1) IntExpr(5)))) [case testImportFromWithParens] from x import (y) from x import (y, z) [out] MypyFile:1( ImportFrom:1(x, [y]) ImportFrom:2(x, [y, z])) [case testContinueStmt] while 1: continue [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( ContinueStmt:2()))) [case testStrLiteralConcatenate] 'f' 'bar' ('x' 'y' 'z') [out] MypyFile:1( ExpressionStmt:1( StrExpr(fbar)) ExpressionStmt:2( StrExpr(xyz))) [case testCatchAllExcept] try: 1 except: pass try: 1 except x: pass except: 2 [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( IntExpr(1))) Block:3( PassStmt:4())) TryStmt:5( Block:5( ExpressionStmt:6( IntExpr(1))) NameExpr(x) Block:7( PassStmt:8()) Block:9( ExpressionStmt:10( IntExpr(2))))) [case testTryElse] try: pass except x: 1 else: 2 [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(x) Block:3( ExpressionStmt:4( IntExpr(1))) Else( ExpressionStmt:6( IntExpr(2))))) [case testExceptWithMultipleTypes] try: pass except (x, y): pass except (a, b, c) as e: pass [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) TupleExpr:3( NameExpr(x) NameExpr(y)) Block:3( PassStmt:4()) TupleExpr:5( NameExpr(a) NameExpr(b) NameExpr(c)) NameExpr(e) Block:5( PassStmt:6()))) [case testNestedFunctions] def f(): def g(): pass def h() -> int: def g() -> int: pass [out] MypyFile:1( FuncDef:1( f Block:1( FuncDef:2( g Block:2( PassStmt:3())))) FuncDef:4( h def () -> int? Block:4( FuncDef:5( g def () -> int? Block:5( PassStmt:6()))))) [case testStatementsAndDocStringsInClassBody] class A: "doc string" x = y def f(self): pass [out] MypyFile:1( ClassDef:1( A ExpressionStmt:2( StrExpr(doc string)) AssignmentStmt:3( NameExpr(x) NameExpr(y)) FuncDef:4( f Args( Var(self)) Block:4( PassStmt:5())))) [case testSingleLineClass] class a: pass [out] MypyFile:1( ClassDef:1( a PassStmt:1())) [case testDecorator] @property def f(): pass [out] MypyFile:1( Decorator:1( Var(f) NameExpr(property) FuncDef:2( f Block:2( PassStmt:3())))) [case testComplexDecorator] @foo(bar, 1) @zar def f() -> int: pass [out] MypyFile:1( Decorator:1( Var(f) CallExpr:1( NameExpr(foo) Args( NameExpr(bar) IntExpr(1))) NameExpr(zar) FuncDef:3( f def () -> int? Block:3( PassStmt:4())))) [case testKeywordArgInCall] f(x=1) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args() KwArgs( x IntExpr(1))))) [case testComplexKeywordArgs] f(x, y=1 or 2, z=y) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args( NameExpr(x)) KwArgs( y OpExpr:1( or IntExpr(1) IntExpr(2))) KwArgs( z NameExpr(y))))) [case testChainedAssignment] x = z = 1 [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x) NameExpr(z)) IntExpr(1))) [case testVarArgs] def f(x, *a): pass f(1, *2) [out] MypyFile:1( FuncDef:1( f Args( Var(x)) VarArg( Var(a)) Block:1( PassStmt:1())) ExpressionStmt:2( CallExpr:2( NameExpr(f) Args( IntExpr(1) IntExpr(2)) VarArg))) [case testVarArgWithType] def f(x: str, *a: int): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: str?, *a: int?) -> Any VarArg( Var(a)) Block:1( PassStmt:1()))) [case testDictVarArgs] def f(x, **a): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) DictVarArg( Var(a)) Block:1( PassStmt:1()))) [case testBothVarArgs] def f(x, *a, **b): pass def g(*a, **b): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) VarArg( Var(a)) DictVarArg( Var(b)) Block:1( PassStmt:1())) FuncDef:2( g VarArg( Var(a)) DictVarArg( Var(b)) Block:2( PassStmt:2()))) [case testDictVarArgsWithType] def f(x: X, **a: A) -> None: pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?, **a: A?) -> None? DictVarArg( Var(a)) Block:1( PassStmt:1()))) [case testCallDictVarArgs] f(**x) f(x, **y) f(*x, **y) f(x, *y, **z) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args() DictVarArg( NameExpr(x)))) ExpressionStmt:2( CallExpr:2( NameExpr(f) Args( NameExpr(x)) DictVarArg( NameExpr(y)))) ExpressionStmt:3( CallExpr:3( NameExpr(f) Args( NameExpr(x)) VarArg DictVarArg( NameExpr(y)))) ExpressionStmt:4( CallExpr:4( NameExpr(f) Args( NameExpr(x) NameExpr(y)) VarArg DictVarArg( NameExpr(z))))) [case testAssert] assert x == y [out] MypyFile:1( AssertStmt:1( ComparisonExpr:1( == NameExpr(x) NameExpr(y)))) [case testYieldWithoutExpressions] def f(): yield [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( YieldExpr:2())))) [case testConditionalExpression] x if y else z [out] MypyFile:1( ExpressionStmt:1( ConditionalExpr:1( Condition( NameExpr(y)) NameExpr(x) NameExpr(z)))) [case testConditionalExpressionInListComprehension] a = [x if y else z for a in b] [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) ListComprehension:1( GeneratorExpr:1( ConditionalExpr:1( Condition( NameExpr(y)) NameExpr(x) NameExpr(z)) NameExpr(a) NameExpr(b))))) [case testConditionalExpressionInTuple] 1 if 2 else 3, 4 [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( ConditionalExpr:1( Condition( IntExpr(2)) IntExpr(1) IntExpr(3)) IntExpr(4)))) [case testSetLiteral] {x or y} {1, 2} [out] MypyFile:1( ExpressionStmt:1( SetExpr:1( OpExpr:1( or NameExpr(x) NameExpr(y)))) ExpressionStmt:2( SetExpr:2( IntExpr(1) IntExpr(2)))) [case testSetLiteralWithExtraComma] {x,} [out] MypyFile:1( ExpressionStmt:1( SetExpr:1( NameExpr(x)))) [case testImportAs] import x as y import x, z as y, a.b as c, d as d [out] MypyFile:1( Import:1(x : y) Import:2(x, z : y, a.b : c, d : d)) [case testForAndElse] for x in y: pass else: x [out] MypyFile:1( ForStmt:1( NameExpr(x) NameExpr(y) Block:1( PassStmt:2()) Else( ExpressionStmt:4( NameExpr(x))))) [case testWhileAndElse] while x: pass else: y [out] MypyFile:1( WhileStmt:1( NameExpr(x) Block:1( PassStmt:2()) Else( ExpressionStmt:4( NameExpr(y))))) [case testWithAndMultipleOperands] with x as y, a as b: pass with x(), y(): pass [out] MypyFile:1( WithStmt:1( Expr( NameExpr(x)) Target( NameExpr(y)) Expr( NameExpr(a)) Target( NameExpr(b)) Block:1( PassStmt:2())) WithStmt:3( Expr( CallExpr:3( NameExpr(x) Args())) Expr( CallExpr:3( NameExpr(y) Args())) Block:3( PassStmt:4()))) [case testOperatorAssignment] x += 1 x -= 1 x *= 1 x /= 1 x //= 1 x %= 1 x **= 1 x |= 1 x &= 1 x ^= 1 x >>= 1 x <<= 1 [out] MypyFile:1( OperatorAssignmentStmt:1( + NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:2( - NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:3( * NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:4( / NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:5( // NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:6( % NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:7( ** NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:8( | NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:9( & NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:10( ^ NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:11( >> NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:12( << NameExpr(x) IntExpr(1))) [case testNestedClasses] class A: class B: pass class C: pass [out] MypyFile:1( ClassDef:1( A ClassDef:2( B PassStmt:3()) ClassDef:4( C PassStmt:5()))) [case testTryWithExceptAndFinally] try: pass except x: x finally: y [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(x) Block:3( ExpressionStmt:4( NameExpr(x))) Finally( ExpressionStmt:6( NameExpr(y))))) [case testBareAsteriskInFuncDef] def f(x, *, y=1): pass [out] MypyFile:1( FuncDef:1( f MaxPos(1) Args( Var(x) default( Var(y) IntExpr(1))) Block:1( PassStmt:1()))) [case testBareAsteriskInFuncDefWithSignature] def f(x: A, *, y: B = 1) -> None: pass [out] MypyFile:1( FuncDef:1( f MaxPos(1) Args( Var(x) default( Var(y) IntExpr(1))) def (x: A?, *, y: B? =) -> None? Block:1( PassStmt:1()))) [case testBareAsteriskNamedDefault] def f(*, y: B = 1) -> None: pass [out] MypyFile:1( FuncDef:1( f MaxPos(0) Args( default( Var(y) IntExpr(1))) def (*, y: B? =) -> None? Block:1( PassStmt:1()))) [case testBareAsteriskNamedNoDefault] def f(*, y: B) -> None: pass [out] MypyFile:1( FuncDef:1( f MaxPos(0) Args( Var(y)) def (*, y: B?) -> None? Block:1( PassStmt:1()))) [case testSuperExpr] super().x [out] MypyFile:1( ExpressionStmt:1( SuperExpr:1( x CallExpr:1( NameExpr(super) Args())))) [case testKeywordAndDictArgs] f(x = y, **kwargs) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args() KwArgs( x NameExpr(y)) DictVarArg( NameExpr(kwargs))))) [case testSimpleFunctionType] f = None # type: Callable[[], None] [out] MypyFile:1( AssignmentStmt:1( NameExpr(f) NameExpr(None) Callable?[, None?])) [case testFunctionTypeWithArgument] f = None # type: Callable[[str], int] [out] MypyFile:1( AssignmentStmt:1( NameExpr(f) NameExpr(None) Callable?[, int?])) [case testFunctionTypeWithTwoArguments] f = None # type: Callable[[a[b], x.y], List[int]] [out] MypyFile:1( AssignmentStmt:1( NameExpr(f) NameExpr(None) Callable?[, List?[int?]])) [case testFunctionTypeWithExtraComma] def f(x: Callable[[str,], int]): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: Callable?[, int?]) -> Any Block:1( PassStmt:1()))) [case testSimpleStringLiteralType] def f() -> 'A': pass [out] MypyFile:1( FuncDef:1( f def () -> A? Block:1( PassStmt:1()))) [case testGenericStringLiteralType] def f() -> 'A[B, C]': pass [out] MypyFile:1( FuncDef:1( f def () -> A?[B?, C?] Block:1( PassStmt:1()))) [case testPartialStringLiteralType] def f() -> A['B', C]: pass [out] MypyFile:1( FuncDef:1( f def () -> A?[B?, C?] Block:1( PassStmt:1()))) [case testWhitespaceInStringLiteralType] def f() -> ' A [ X ] ': pass [out] MypyFile:1( FuncDef:1( f def () -> A?[X?] Block:1( PassStmt:1()))) [case testEscapeInStringLiteralType] def f() -> '\x41': pass [out] MypyFile:1( FuncDef:1( f def () -> A? Block:1( PassStmt:1()))) [case testMetaclass] class Foo(metaclass=Bar): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(NameExpr(Bar)) PassStmt:1())) [case testQualifiedMetaclass] class Foo(metaclass=foo.Bar): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(MemberExpr:1( NameExpr(foo) Bar)) PassStmt:1())) [case testBaseAndMetaclass] class Foo(foo.bar[x], metaclass=Bar): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(NameExpr(Bar)) BaseTypeExpr( IndexExpr:1( MemberExpr:1( NameExpr(foo) bar) NameExpr(x))) PassStmt:1())) [case testClassKeywordArgs] class Foo(_root=None): pass [out] MypyFile:1( ClassDef:1( Foo PassStmt:1())) [case testClassKeywordArgsBeforeMeta] class Foo(_root=None, metaclass=Bar): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(NameExpr(Bar)) PassStmt:1())) [case testClassKeywordArgsAfterMeta] class Foo(metaclass=Bar, _root=None): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(NameExpr(Bar)) PassStmt:1())) [case testNamesThatAreNoLongerKeywords] any = interface [out] MypyFile:1( AssignmentStmt:1( NameExpr(any) NameExpr(interface))) [case testFunctionOverload] @overload def f() -> x: pass @overload def f() -> y: pass [out] MypyFile:1( OverloadedFuncDef:1( Decorator:1( Var(f) NameExpr(overload) FuncDef:2( f def () -> x? Block:2( PassStmt:2()))) Decorator:3( Var(f) NameExpr(overload) FuncDef:4( f def () -> y? Block:4( PassStmt:4()))))) [case testFunctionOverloadAndOtherStatements] x @overload def f() -> x: pass @overload def f() -> y: pass x [out] MypyFile:1( ExpressionStmt:1( NameExpr(x)) OverloadedFuncDef:2( Decorator:2( Var(f) NameExpr(overload) FuncDef:3( f def () -> x? Block:3( PassStmt:3()))) Decorator:4( Var(f) NameExpr(overload) FuncDef:5( f def () -> y? Block:5( PassStmt:5())))) ExpressionStmt:6( NameExpr(x))) [case testFunctionOverloadWithThreeVariants] @overload def f() -> x: pass @overload def f() -> y: pass @overload def f(y): pass [out] MypyFile:1( OverloadedFuncDef:1( Decorator:1( Var(f) NameExpr(overload) FuncDef:2( f def () -> x? Block:2( PassStmt:2()))) Decorator:3( Var(f) NameExpr(overload) FuncDef:4( f def () -> y? Block:4( PassStmt:4()))) Decorator:5( Var(f) NameExpr(overload) FuncDef:6( f Args( Var(y)) Block:6( PassStmt:6()))))) [case testDecoratorsThatAreNotOverloads] @foo def f() -> x: pass @foo def g() -> y: pass [out] MypyFile:1( Decorator:1( Var(f) NameExpr(foo) FuncDef:2( f def () -> x? Block:2( PassStmt:2()))) Decorator:3( Var(g) NameExpr(foo) FuncDef:4( g def () -> y? Block:4( PassStmt:4())))) [case testFunctionOverloadWithinFunction] def f(): @overload def g(): pass @overload def g() -> x: pass [out] MypyFile:1( FuncDef:1( f Block:1( OverloadedFuncDef:2( Decorator:2( Var(g) NameExpr(overload) FuncDef:3( g Block:3( PassStmt:3()))) Decorator:4( Var(g) NameExpr(overload) FuncDef:5( g def () -> x? Block:5( PassStmt:5()))))))) [case testCommentFunctionAnnotation] def f(): # type: () -> A pass def g(x): # type: (A) -> B pass [out] MypyFile:1( FuncDef:1( f def () -> A? Block:1( PassStmt:2())) FuncDef:3( g Args( Var(x)) def (x: A?) -> B? Block:3( PassStmt:4()))) [case testCommentMethodAnnotation] class A: def f(self): # type: () -> A pass def g(xself, x): # type: (A) -> B pass [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) def (self: Any) -> A? Block:2( PassStmt:3())) FuncDef:4( g Args( Var(xself) Var(x)) def (xself: Any, x: A?) -> B? Block:4( PassStmt:5())))) [case testCommentMethodAnnotationAndNestedFunction] class A: def f(self): # type: () -> A def g(x): # type: (A) -> B pass [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) def (self: Any) -> A? Block:2( FuncDef:3( g Args( Var(x)) def (x: A?) -> B? Block:3( PassStmt:4())))))) [case testCommentFunctionAnnotationOnSeparateLine] def f(x): # type: (X) -> Y pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?) -> Y? Block:1( PassStmt:3()))) [case testCommentFunctionAnnotationOnSeparateLine2] def f(x): # type: (X) -> Y # bar pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?) -> Y? Block:1( PassStmt:5()))) [case testCommentFunctionAnnotationAndVarArg] def f(x, *y): # type: (X, *Y) -> Z pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?, *y: Y?) -> Z? VarArg( Var(y)) Block:1( PassStmt:2()))) [case testCommentFunctionAnnotationAndAllVarArgs] def f(x, *y, **z): # type: (X, *Y, **Z) -> A pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?, *y: Y?, **z: Z?) -> A? VarArg( Var(y)) DictVarArg( Var(z)) Block:1( PassStmt:2()))) [case testClassDecorator] @foo class X: pass @foo(bar) @x.y class Z: pass [out] MypyFile:1( ClassDef:2( X Decorators( NameExpr(foo)) PassStmt:2()) ClassDef:5( Z Decorators( CallExpr:3( NameExpr(foo) Args( NameExpr(bar))) MemberExpr:4( NameExpr(x) y)) PassStmt:5())) [case testTrailingSemicolon] def x(): pass; def y(): pass [out] MypyFile:1( FuncDef:1( x Block:1( PassStmt:2())) FuncDef:4( y Block:4( PassStmt:5()))) [case testEmptySuperClass] class A(): pass [out] MypyFile:1( ClassDef:1( A PassStmt:2())) [case testStarExpression] *a *a, b a, *b a, (*x, y) a, (x, *y) [out] MypyFile:1( ExpressionStmt:1( StarExpr:1( NameExpr(a))) ExpressionStmt:2( TupleExpr:2( StarExpr:2( NameExpr(a)) NameExpr(b))) ExpressionStmt:3( TupleExpr:3( NameExpr(a) StarExpr:3( NameExpr(b)))) ExpressionStmt:4( TupleExpr:4( NameExpr(a) TupleExpr:4( StarExpr:4( NameExpr(x)) NameExpr(y)))) ExpressionStmt:5( TupleExpr:5( NameExpr(a) TupleExpr:5( NameExpr(x) StarExpr:5( NameExpr(y)))))) [case testStarExpressionParenthesis] *(a) *(a,b) [out] MypyFile:1( ExpressionStmt:1( StarExpr:1( NameExpr(a))) ExpressionStmt:2( StarExpr:2( TupleExpr:2( NameExpr(a) NameExpr(b))))) [case testStarExpressionInFor] for *a in b: pass for a, *b in c: pass for *a, b in c: pass [out] MypyFile:1( ForStmt:1( StarExpr:1( NameExpr(a)) NameExpr(b) Block:1( PassStmt:2())) ForStmt:4( TupleExpr:4( NameExpr(a) StarExpr:4( NameExpr(b))) NameExpr(c) Block:4( PassStmt:5())) ForStmt:7( TupleExpr:7( StarExpr:7( NameExpr(a)) NameExpr(b)) NameExpr(c) Block:7( PassStmt:8()))) [case testStarExprInGeneratorExpr] (x for y, *p in z) (x for *p, y in z) (x for y, *p, q in z) [out] MypyFile:1( ExpressionStmt:1( GeneratorExpr:1( NameExpr(x) TupleExpr:1( NameExpr(y) StarExpr:1( NameExpr(p))) NameExpr(z))) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x) TupleExpr:2( StarExpr:2( NameExpr(p)) NameExpr(y)) NameExpr(z))) ExpressionStmt:3( GeneratorExpr:3( NameExpr(x) TupleExpr:3( NameExpr(y) StarExpr:3( NameExpr(p)) NameExpr(q)) NameExpr(z)))) [case testParseNamedtupleBaseclass] class A(namedtuple('x', ['y'])): pass [out] MypyFile:1( ClassDef:1( A BaseTypeExpr( CallExpr:1( NameExpr(namedtuple) Args( StrExpr(x) ListExpr:1( StrExpr(y))))) PassStmt:1())) [case testEllipsis] ... a[1,...,2] ....__class__ [out] MypyFile:1( ExpressionStmt:1( Ellipsis) ExpressionStmt:2( IndexExpr:2( NameExpr(a) TupleExpr:2( IntExpr(1) Ellipsis IntExpr(2)))) ExpressionStmt:3( MemberExpr:3( Ellipsis __class__))) [case testFunctionWithManyKindsOfArgs] def f(x, *args, y=None, **kw): pass [out] MypyFile:1( FuncDef:1( f MaxPos(1) Args( Var(x) default( Var(y) NameExpr(None))) VarArg( Var(args)) DictVarArg( Var(kw)) Block:1( PassStmt:1()))) [case testIfWithSemicolons] if 1: a; b [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( ExpressionStmt:1( NameExpr(a)) ExpressionStmt:1( NameExpr(b))))) [case testIfWithSemicolonsNested] while 2: if 1: a; b [out] MypyFile:1( WhileStmt:1( IntExpr(2) Block:1( IfStmt:2( If( IntExpr(1)) Then( ExpressionStmt:2( NameExpr(a)) ExpressionStmt:2( NameExpr(b))))))) [case testIfElseWithSemicolons] if 1: global x; y = 1 else: x = 1; return 3 4 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( GlobalDecl:1( x) AssignmentStmt:1( NameExpr(y) IntExpr(1))) Else( AssignmentStmt:2( NameExpr(x) IntExpr(1)) ReturnStmt:2( IntExpr(3)))) ExpressionStmt:3( IntExpr(4))) [case testIfElseWithSemicolonsNested] while 2: if 1: global x; y = 1 else: x = 1; return 3 4 [out] MypyFile:1( WhileStmt:1( IntExpr(2) Block:1( IfStmt:2( If( IntExpr(1)) Then( GlobalDecl:2( x) AssignmentStmt:2( NameExpr(y) IntExpr(1))) Else( AssignmentStmt:3( NameExpr(x) IntExpr(1)) ReturnStmt:3( IntExpr(3)))))) ExpressionStmt:4( IntExpr(4))) [case testKeywordArgumentAfterStarArgumentInCall] f(x=1, *y) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args( NameExpr(y)) VarArg KwArgs( x IntExpr(1))))) [case testConditionalExpressionInSetComprehension] { 1 if x else 2 for x in y } [out] MypyFile:1( ExpressionStmt:1( SetComprehension:1( GeneratorExpr:1( ConditionalExpr:1( Condition( NameExpr(x)) IntExpr(1) IntExpr(2)) NameExpr(x) NameExpr(y))))) [case testConditionalExpressionInListComprehension] a = [ 1 if x else 2 for x in y ] [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) ListComprehension:1( GeneratorExpr:1( ConditionalExpr:1( Condition( NameExpr(x)) IntExpr(1) IntExpr(2)) NameExpr(x) NameExpr(y))))) [case testComplexWithLvalue] with x as y.z: pass [out] MypyFile:1( WithStmt:1( Expr( NameExpr(x)) Target( MemberExpr:1( NameExpr(y) z)) Block:1( PassStmt:1()))) [case testRelativeImportWithEllipsis] from ... import x [out] MypyFile:1( ImportFrom:1(..., [x])) [case testRelativeImportWithEllipsis2] from .... import x [out] MypyFile:1( ImportFrom:1(...., [x])) [case testParseExtendedSlicing] a[:, :] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(a) TupleExpr:-1( SliceExpr:-1( ) SliceExpr:-1( ))))) [case testParseExtendedSlicing2] a[1:2:, :,] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(a) TupleExpr:-1( SliceExpr:-1( IntExpr(1) IntExpr(2)) SliceExpr:-1( ))))) [case testParseExtendedSlicing3] a[1:2:3, ..., 1] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(a) TupleExpr:-1( SliceExpr:-1( IntExpr(1) IntExpr(2) IntExpr(3)) Ellipsis IntExpr(1))))) [case testParseIfExprInDictExpr] test = { 'spam': 'eggs' if True else 'bacon' } [out] MypyFile:1( AssignmentStmt:1( NameExpr(test) DictExpr:1( StrExpr(spam) ConditionalExpr:1( Condition( NameExpr(True)) StrExpr(eggs) StrExpr(bacon))))) [case testIgnoreLine] import x # type: ignore [out] MypyFile:1( Import:1(x) IgnoredLines(1)) [case testIgnore2Lines] x y # type: ignore z # type: ignore [out] MypyFile:1( ExpressionStmt:1( NameExpr(x)) ExpressionStmt:2( NameExpr(y)) ExpressionStmt:3( NameExpr(z)) IgnoredLines(2, 3)) [case testCommentedOutIgnoreAnnotation] y ## type: ignore [out] MypyFile:1( ExpressionStmt:1( NameExpr(y))) [case testSpaceInIgnoreAnnotations] y # type: ignore # foo y #type:ignore [out] MypyFile:1( ExpressionStmt:1( NameExpr(y)) ExpressionStmt:2( NameExpr(y)) IgnoredLines(1, 2)) [case testIgnoreAnnotationAndMultilineStatement] x = { 1: 2 # type: ignore } y = { # type: ignore 1: 2 } # type: ignore [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) DictExpr:1( IntExpr(1) IntExpr(2))) AssignmentStmt:4( NameExpr(y) DictExpr:4( IntExpr(1) IntExpr(2))) IgnoredLines(2, 4, 6)) [case testIgnoreAnnotationAndMultilineStatement2] from m import ( # type: ignore x, y ) [out] MypyFile:1( ImportFrom:1(m, [x, y]) IgnoredLines(1)) [case testYieldExpression] def f(): x = yield f() [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(x) YieldExpr:2( CallExpr:2( NameExpr(f) Args())))))) [case testForWithSingleItemTuple] for x in 1,: pass [out] MypyFile:1( ForStmt:1( NameExpr(x) TupleExpr:1( IntExpr(1)) Block:1( PassStmt:1()))) [case testIsoLatinUnixEncoding] # coding: iso-latin-1-unix [out] MypyFile:1() [case testLatinUnixEncoding] # coding: latin-1-unix [out] MypyFile:1() [case testLatinUnixEncoding] # coding: iso-latin-1 [out] MypyFile:1() [case testYieldExpressionInParens] def f(): (yield) [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( YieldExpr:2())))) [case testFStringSimple] x = 'mypy' f'Hello {x}' [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) StrExpr(mypy)) ExpressionStmt:2( CallExpr:2( MemberExpr:2( StrExpr() join) Args( ListExpr:2( StrExpr(Hello ) CallExpr:2( MemberExpr:2( StrExpr({:{}}) format) Args( NameExpr(x) StrExpr()))))))) [case testFStringWithConversion] x = 'mypy' F'Hello {x!r}' [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) StrExpr(mypy)) ExpressionStmt:2( CallExpr:2( MemberExpr:2( StrExpr() join) Args( ListExpr:2( StrExpr(Hello ) CallExpr:2( MemberExpr:2( StrExpr({!r:{}}) format) Args( NameExpr(x) StrExpr()))))))) [case testFStringWithOnlyFormatSpecifier] x = 'mypy' f'Hello {x:<30}' [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) StrExpr(mypy)) ExpressionStmt:2( CallExpr:2( MemberExpr:2( StrExpr() join) Args( ListExpr:2( StrExpr(Hello ) CallExpr:2( MemberExpr:2( StrExpr({:{}}) format) Args( NameExpr(x) StrExpr(<30)))))))) [case testFStringWithFormatSpecifierAndConversion] x = 'mypy' f'Hello {x!s:<30}' [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) StrExpr(mypy)) ExpressionStmt:2( CallExpr:2( MemberExpr:2( StrExpr() join) Args( ListExpr:2( StrExpr(Hello ) CallExpr:2( MemberExpr:2( StrExpr({!s:{}}) format) Args( NameExpr(x) StrExpr(<30)))))))) [case testFStringWithFormatSpecifierExpression] x = 'mypy' y = 30 f'Hello {x!s:<{y+y}}' [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) StrExpr(mypy)) AssignmentStmt:2( NameExpr(y) IntExpr(30)) ExpressionStmt:3( CallExpr:3( MemberExpr:3( StrExpr() join) Args( ListExpr:3( StrExpr(Hello ) CallExpr:3( MemberExpr:3( StrExpr({!s:{}}) format) Args( NameExpr(x) CallExpr:3( MemberExpr:3( StrExpr() join) Args( ListExpr:3( StrExpr(<) CallExpr:3( MemberExpr:3( StrExpr({:{}}) format) Args( OpExpr:3( + NameExpr(y) NameExpr(y)) StrExpr())))))))))))) mypy-0.761/test-data/unit/plugins/0000755€tŠÔÚ€2›s®0000000000013576752267023251 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.761/test-data/unit/plugins/arg_kinds.py0000644€tŠÔÚ€2›s®0000000175613576752246025572 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Optional, Callable from mypy.nodes import Context from mypy.plugin import Plugin, MethodContext, FunctionContext from mypy.types import Type class ArgKindsPlugin(Plugin): def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: if 'func' in fullname: return extract_arg_kinds_from_function return None def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: if 'Class.method' in fullname: return extract_arg_kinds_from_method return None def extract_arg_kinds_from_function(ctx: FunctionContext) -> Type: ctx.api.fail(str(ctx.arg_kinds), ctx.context) return ctx.default_return_type def extract_arg_kinds_from_method(ctx: MethodContext) -> Type: ctx.api.fail(str(ctx.arg_kinds), ctx.context) return ctx.default_return_type def plugin(version): return ArgKindsPlugin mypy-0.761/test-data/unit/plugins/arg_names.py0000644€tŠÔÚ€2›s®0000000266713576752246025567 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Callable from mypy.plugin import Plugin, MethodContext, FunctionContext from mypy.types import Type class ArgNamesPlugin(Plugin): def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: if fullname in {'mod.func', 'mod.func_unfilled', 'mod.func_star_expr', 'mod.ClassInit', 'mod.Outer.NestedClassInit'}: return extract_classname_and_set_as_return_type_function return None def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: if fullname in {'mod.Class.method', 'mod.Class.myclassmethod', 'mod.Class.mystaticmethod', 'mod.ClassUnfilled.method', 'mod.ClassStarExpr.method', 'mod.ClassChild.method', 'mod.ClassChild.myclassmethod'}: return extract_classname_and_set_as_return_type_method return None def extract_classname_and_set_as_return_type_function(ctx: FunctionContext) -> Type: classname = ctx.args[ctx.callee_arg_names.index('classname')][0].value return ctx.api.named_generic_type(classname, []) def extract_classname_and_set_as_return_type_method(ctx: MethodContext) -> Type: classname = ctx.args[ctx.callee_arg_names.index('classname')][0].value return ctx.api.named_generic_type(classname, []) def plugin(version): return ArgNamesPlugin mypy-0.761/test-data/unit/plugins/attrhook.py0000644€tŠÔÚ€2›s®0000000106313576752246025453 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Callable from mypy.plugin import Plugin, AttributeContext from mypy.types import Type, Instance class AttrPlugin(Plugin): def get_attribute_hook(self, fullname: str) -> Optional[Callable[[AttributeContext], Type]]: if fullname == 'm.Signal.__call__': return signal_call_callback return None def signal_call_callback(ctx: AttributeContext) -> Type: if isinstance(ctx.type, Instance): return ctx.type.args[0] return ctx.default_attr_type def plugin(version): return AttrPlugin mypy-0.761/test-data/unit/plugins/attrhook2.py0000644€tŠÔÚ€2›s®0000000142313576752246025535 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Callable from mypy.plugin import Plugin, AttributeContext from mypy.types import Type, AnyType, TypeOfAny class AttrPlugin(Plugin): def get_attribute_hook(self, fullname: str) -> Optional[Callable[[AttributeContext], Type]]: if fullname == 'm.Magic.magic_field': return magic_field_callback if fullname == 'm.Magic.nonexistent_field': return nonexistent_field_callback return None def magic_field_callback(ctx: AttributeContext) -> Type: return ctx.api.named_generic_type("builtins.str", []) def nonexistent_field_callback(ctx: AttributeContext) -> Type: ctx.api.fail("Field does not exist", ctx.context) return AnyType(TypeOfAny.from_error) def plugin(version): return AttrPlugin mypy-0.761/test-data/unit/plugins/badreturn.py0000644€tŠÔÚ€2›s®0000000003613576752246025605 0ustar jukkaDROPBOX\Domain Users00000000000000def plugin(version): pass mypy-0.761/test-data/unit/plugins/badreturn2.py0000644€tŠÔÚ€2›s®0000000010313576752246025662 0ustar jukkaDROPBOX\Domain Users00000000000000class MyPlugin: pass def plugin(version): return MyPlugin mypy-0.761/test-data/unit/plugins/callable_instance.py0000644€tŠÔÚ€2›s®0000000125713576752246027250 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import MethodContext, Plugin from mypy.types import Instance, Type class CallableInstancePlugin(Plugin): def get_function_hook(self, fullname): assert not fullname.endswith(' of Foo') def get_method_hook(self, fullname): # Ensure that all names are fully qualified assert not fullname.endswith(' of Foo') if fullname == '__main__.Class.__call__': return my_hook return None def my_hook(ctx: MethodContext) -> Type: if isinstance(ctx.type, Instance) and len(ctx.type.args) == 1: return ctx.type.args[0] return ctx.default_return_type def plugin(version): return CallableInstancePlugin mypy-0.761/test-data/unit/plugins/class_callable.py0000644€tŠÔÚ€2›s®0000000214313576752246026544 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin from mypy.nodes import NameExpr from mypy.types import UnionType, NoneType, Instance class AttrPlugin(Plugin): def get_function_hook(self, fullname): if fullname.startswith('mod.Attr'): return attr_hook return None def attr_hook(ctx): assert isinstance(ctx.default_return_type, Instance) if ctx.default_return_type.type.fullname == 'mod.Attr': attr_base = ctx.default_return_type else: attr_base = None for base in ctx.default_return_type.type.bases: if base.type.fullname == 'mod.Attr': attr_base = base break assert attr_base is not None last_arg_exprs = ctx.args[-1] if any(isinstance(expr, NameExpr) and expr.name == 'True' for expr in last_arg_exprs): return attr_base assert len(attr_base.args) == 1 arg_type = attr_base.args[0] return Instance(attr_base.type, [UnionType([arg_type, NoneType()])], line=ctx.default_return_type.line, column=ctx.default_return_type.column) def plugin(version): return AttrPlugin mypy-0.761/test-data/unit/plugins/common_api_incremental.py0000644€tŠÔÚ€2›s®0000000245313576752246030326 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin from mypy.nodes import ( ClassDef, Block, TypeInfo, SymbolTable, SymbolTableNode, MDEF, GDEF, Var ) class DynPlugin(Plugin): def get_dynamic_class_hook(self, fullname): if fullname == 'lib.declarative_base': return add_info_hook return None def get_base_class_hook(self, fullname: str): sym = self.lookup_fully_qualified(fullname) if sym and isinstance(sym.node, TypeInfo): if sym.node.metadata.get('magic'): return add_magic_hook return None def add_info_hook(ctx) -> None: class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) info.metadata['magic'] = True def add_magic_hook(ctx) -> None: info = ctx.cls.info str_type = ctx.api.named_type_or_none('builtins.str', []) assert str_type is not None var = Var('__magic__', str_type) var.info = info info.names['__magic__'] = SymbolTableNode(MDEF, var) def plugin(version): return DynPlugin mypy-0.761/test-data/unit/plugins/config_data.py0000644€tŠÔÚ€2›s®0000000060613576752246026060 0ustar jukkaDROPBOX\Domain Users00000000000000import os import json from typing import Any from mypy.plugin import Plugin, ReportConfigContext class ConfigDataPlugin(Plugin): def report_config_data(self, ctx: ReportConfigContext) -> Any: path = os.path.join('tmp/test.json') with open(path) as f: data = json.load(f) return data.get(ctx.id) def plugin(version): return ConfigDataPlugin mypy-0.761/test-data/unit/plugins/customentry.py0000644€tŠÔÚ€2›s®0000000052413576752246026215 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname == '__main__.f': return my_hook assert fullname is not None return None def my_hook(ctx): return ctx.api.named_generic_type('builtins.int', []) def register(version): return MyPlugin mypy-0.761/test-data/unit/plugins/customize_mro.py0000644€tŠÔÚ€2›s®0000000035113576752246026516 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin class DummyPlugin(Plugin): def get_customize_class_mro_hook(self, fullname): def analyze(classdef_ctx): pass return analyze def plugin(version): return DummyPlugin mypy-0.761/test-data/unit/plugins/depshook.py0000644€tŠÔÚ€2›s®0000000055413576752246025440 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Callable, List, Tuple from mypy.plugin import Plugin from mypy.nodes import MypyFile class DepsPlugin(Plugin): def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: if file.fullname == '__main__': return [(10, 'err', -1)] return [] def plugin(version): return DepsPlugin mypy-0.761/test-data/unit/plugins/dyn_class.py0000644€tŠÔÚ€2›s®0000000311713576752246025601 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin from mypy.nodes import ( ClassDef, Block, TypeInfo, SymbolTable, SymbolTableNode, GDEF, Var ) from mypy.types import Instance DECL_BASES = set() class DynPlugin(Plugin): def get_dynamic_class_hook(self, fullname): if fullname == 'mod.declarative_base': return add_info_hook return None def get_base_class_hook(self, fullname: str): if fullname in DECL_BASES: return replace_col_hook return None def add_info_hook(ctx): class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) DECL_BASES.add(class_def.fullname) def replace_col_hook(ctx): info = ctx.cls.info for sym in info.names.values(): node = sym.node if isinstance(node, Var) and isinstance(node.type, Instance): if node.type.type.fullname == 'mod.Column': new_sym = ctx.api.lookup_fully_qualified_or_none('mod.Instr') if new_sym: new_info = new_sym.node assert isinstance(new_info, TypeInfo) node.type = Instance(new_info, node.type.args.copy(), node.type.line, node.type.column) def plugin(version): return DynPlugin mypy-0.761/test-data/unit/plugins/dyn_class_from_method.py0000644€tŠÔÚ€2›s®0000000176313576752246030171 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.nodes import (Block, ClassDef, GDEF, SymbolTable, SymbolTableNode, TypeInfo) from mypy.plugin import DynamicClassDefContext, Plugin from mypy.types import Instance class DynPlugin(Plugin): def get_dynamic_class_hook(self, fullname): if 'from_queryset' in fullname: return add_info_hook return None def add_info_hook(ctx: DynamicClassDefContext): class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info queryset_type_fullname = ctx.call.args[0].fullname queryset_info = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname).node # type: TypeInfo obj = ctx.api.builtin_type('builtins.object') info.mro = [info, queryset_info, obj.type] info.bases = [Instance(queryset_info, [])] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) def plugin(version): return DynPlugin mypy-0.761/test-data/unit/plugins/fnplugin.py0000644€tŠÔÚ€2›s®0000000052213576752246025441 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname == '__main__.f': return my_hook assert fullname is not None return None def my_hook(ctx): return ctx.api.named_generic_type('builtins.int', []) def plugin(version): return MyPlugin mypy-0.761/test-data/unit/plugins/fully_qualified_test_hook.py0000644€tŠÔÚ€2›s®0000000114013576752246031051 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import CallableType, MethodSigContext, Plugin class FullyQualifiedTestPlugin(Plugin): def get_method_signature_hook(self, fullname): # Ensure that all names are fully qualified if 'FullyQualifiedTest' in fullname: assert fullname.startswith('__main__.') and not ' of ' in fullname, fullname return my_hook return None def my_hook(ctx: MethodSigContext) -> CallableType: return ctx.default_signature.copy_modified(ret_type=ctx.api.named_generic_type('builtins.int', [])) def plugin(version): return FullyQualifiedTestPlugin mypy-0.761/test-data/unit/plugins/method_sig_hook.py0000644€tŠÔÚ€2›s®0000000203313576752246026760 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import CallableType, CheckerPluginInterface, MethodSigContext, Plugin from mypy.types import Instance, Type class MethodSigPlugin(Plugin): def get_method_signature_hook(self, fullname): # Ensure that all names are fully qualified assert not fullname.endswith(' of Foo') if fullname.startswith('__main__.Foo.'): return my_hook return None def _str_to_int(api: CheckerPluginInterface, typ: Type) -> Type: if isinstance(typ, Instance): if typ.type.fullname == 'builtins.str': return api.named_generic_type('builtins.int', []) elif typ.args: return typ.copy_modified(args=[_str_to_int(api, t) for t in typ.args]) return typ def my_hook(ctx: MethodSigContext) -> CallableType: return ctx.default_signature.copy_modified( arg_types=[_str_to_int(ctx.api, t) for t in ctx.default_signature.arg_types], ret_type=_str_to_int(ctx.api, ctx.default_signature.ret_type), ) def plugin(version): return MethodSigPlugin mypy-0.761/test-data/unit/plugins/named_callable.py0000644€tŠÔÚ€2›s®0000000150613576752246026525 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin from mypy.types import CallableType class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname == 'm.decorator1': return decorator_call_hook if fullname == 'm._decorated': # This is a dummy name generated by the plugin return decorate_hook return None def decorator_call_hook(ctx): if isinstance(ctx.default_return_type, CallableType): return ctx.default_return_type.copy_modified(name='m._decorated') return ctx.default_return_type def decorate_hook(ctx): if isinstance(ctx.default_return_type, CallableType): return ctx.default_return_type.copy_modified( ret_type=ctx.api.named_generic_type('builtins.str', [])) return ctx.default_return_type def plugin(version): return MyPlugin mypy-0.761/test-data/unit/plugins/noentry.py0000644€tŠÔÚ€2›s®0000000001713576752246025314 0ustar jukkaDROPBOX\Domain Users00000000000000# empty plugin mypy-0.761/test-data/unit/plugins/plugin2.py0000644€tŠÔÚ€2›s®0000000047613576752246025207 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin class Plugin2(Plugin): def get_function_hook(self, fullname): if fullname in ('__main__.f', '__main__.g'): return str_hook return None def str_hook(ctx): return ctx.api.named_generic_type('builtins.str', []) def plugin(version): return Plugin2 mypy-0.761/test-data/unit/plugins/type_anal_hook.py0000644€tŠÔÚ€2›s®0000000302513576752246026614 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Callable from mypy.plugin import Plugin, AnalyzeTypeContext from mypy.types import Type, UnboundType, TypeList, AnyType, CallableType, TypeOfAny # The official name changed to NoneType but we have an alias for plugin compat reasons # so we'll keep testing that here. from mypy.types import NoneTyp class TypeAnalyzePlugin(Plugin): def get_type_analyze_hook(self, fullname: str ) -> Optional[Callable[[AnalyzeTypeContext], Type]]: if fullname == 'm.Signal': return signal_type_analyze_callback return None def signal_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type: if (len(ctx.type.args) != 1 or not isinstance(ctx.type.args[0], TypeList)): ctx.api.fail('Invalid "Signal" type (expected "Signal[[t, ...]]")', ctx.context) return AnyType(TypeOfAny.from_error) args = ctx.type.args[0] assert isinstance(args, TypeList) analyzed = ctx.api.analyze_callable_args(args) if analyzed is None: return AnyType(TypeOfAny.from_error) # Error generated elsewhere arg_types, arg_kinds, arg_names = analyzed arg_types = [ctx.api.analyze_type(arg) for arg in arg_types] type_arg = CallableType(arg_types, arg_kinds, arg_names, NoneTyp(), ctx.api.named_type('builtins.function', [])) return ctx.api.named_type('m.Signal', [type_arg]) def plugin(version): return TypeAnalyzePlugin mypy-0.761/test-data/unit/plugins/union_method.py0000644€tŠÔÚ€2›s®0000000304713576752246026314 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import ( CallableType, CheckerPluginInterface, MethodSigContext, MethodContext, Plugin ) from mypy.types import Instance, Type class MethodPlugin(Plugin): def get_method_signature_hook(self, fullname): if fullname.startswith('__main__.Foo.'): return my_meth_sig_hook return None def get_method_hook(self, fullname): if fullname.startswith('__main__.Bar.'): return my_meth_hook return None def _str_to_int(api: CheckerPluginInterface, typ: Type) -> Type: if isinstance(typ, Instance): if typ.type.fullname == 'builtins.str': return api.named_generic_type('builtins.int', []) elif typ.args: return typ.copy_modified(args=[_str_to_int(api, t) for t in typ.args]) return typ def _float_to_int(api: CheckerPluginInterface, typ: Type) -> Type: if isinstance(typ, Instance): if typ.type.fullname == 'builtins.float': return api.named_generic_type('builtins.int', []) elif typ.args: return typ.copy_modified(args=[_float_to_int(api, t) for t in typ.args]) return typ def my_meth_sig_hook(ctx: MethodSigContext) -> CallableType: return ctx.default_signature.copy_modified( arg_types=[_str_to_int(ctx.api, t) for t in ctx.default_signature.arg_types], ret_type=_str_to_int(ctx.api, ctx.default_signature.ret_type), ) def my_meth_hook(ctx: MethodContext) -> Type: return _float_to_int(ctx.api, ctx.default_return_type) def plugin(version): return MethodPlugin mypy-0.761/test-data/unit/python2eval.test0000644€tŠÔÚ€2›s®0000002255213576752246024747 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type checking mypy programs using full stubs and running -- using CPython (Python 2 mode). -- -- These are mostly regression tests -- no attempt is made to make these -- complete. [case testAbs2_python2] n = None # type: int f = None # type: float n = abs(1) abs(1) + 'x' # Error f = abs(1.1) abs(1.1) + 'x' # Error [out] _program.py:4: error: Unsupported operand types for + ("int" and "str") _program.py:6: error: Unsupported operand types for + ("float" and "str") [case testUnicode_python2] x = unicode('xyz', 'latin1') print x x = u'foo' print repr(x) [out] xyz u'foo' [case testXrangeAndRange_python2] for i in xrange(2): print i for i in range(3): print i [out] 0 1 0 1 2 [case testIterator_python2] import typing, sys x = iter('bar') print x.next(), x.next() [out] b a [case testEncodeAndDecode_python2] print 'a'.encode('latin1') print 'b'.decode('latin1') print u'c'.encode('latin1') print u'd'.decode('latin1') [out] a b c d [case testHasKey_python2] d = {1: 'x'} print d.has_key(1) print d.has_key(2) [out] True False [case testIntegerDivision_python2] x = 1 / 2 x() [out] _program.py:2: error: "int" not callable [case testFloatDivision_python2] x = 1.0 / 2.0 x = 1.0 / 2 x = 1 / 2.0 x = 1.5 [out] [case testAnyStr_python2] from typing import AnyStr def f(x): # type: (AnyStr) -> AnyStr if isinstance(x, str): return 'foo' else: return u'zar' print f('') print f(u'') [out] foo zar [case testGenericPatterns_python2] from typing import Pattern import re p = None # type: Pattern[unicode] p = re.compile(u'foo*') b = None # type: Pattern[str] b = re.compile('foo*') print(p.match(u'fooo').group(0)) [out] fooo [case testGenericMatch_python2] from typing import Match import re def f(m): # type: (Match[str]) -> None print(m.group(0)) f(re.match('x*', 'xxy')) [out] xx [case testFromFuturePrintFunction_python2] from __future__ import print_function print('a', 'b') [out] a b [case testFromFutureImportUnicodeLiterals_python2] from __future__ import unicode_literals print '>', ['a', b'b', u'c'] [out] > [u'a', 'b', u'c'] [case testUnicodeLiteralsKwargs_python2] from __future__ import unicode_literals def f(**kwargs): # type: (...) -> None pass params = {'a': 'b'} f(**params) [out] [case testUnicodeStringKwargs_python2] def f(**kwargs): # type: (...) -> None pass params = {u'a': 'b'} f(**params) [out] [case testStrKwargs_python2] def f(**kwargs): # type: (...) -> None pass params = {'a': 'b'} f(**params) [out] [case testFromFutureImportUnicodeLiterals2_python2] from __future__ import unicode_literals def f(x): # type: (str) -> None pass f(b'') f(u'') f('') [out] _program.py:5: error: Argument 1 to "f" has incompatible type "unicode"; expected "str" _program.py:6: error: Argument 1 to "f" has incompatible type "unicode"; expected "str" [case testStrUnicodeCompatibility_python2] def f(s): # type: (unicode) -> None pass f(u'') f('') [out] [case testStrUnicodeCompatibilityInBuiltins_python2] 'x'.count('x') 'x'.count(u'x') [out] [case testTupleAsSubtypeOfSequence_python2] from typing import TypeVar, Sequence T = TypeVar('T') def f(a): # type: (Sequence[T]) -> None print a f(tuple()) [out] () [case testIOTypes_python2] from typing import IO, TextIO, BinaryIO, Any class X(IO[str]): pass class Y(TextIO): pass class Z(BinaryIO): pass [out] [case testOpenReturnType_python2] import typing f = open('/tmp/xyz', 'w') f.write(u'foo') f.write('bar') f.close() [out] _program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "unicode"; expected "str" [case testPrintFunctionWithFileArg_python2] from __future__ import print_function import typing if 1 == 2: # Don't want to run the code below, since it would create a file. f = open('/tmp/xyz', 'w') print('foo', file=f) f.close() print('ok') [out] ok [case testStringIO_python2] import typing import io c = io.StringIO() c.write(u'\x89') print(repr(c.getvalue())) [out] u'\x89' [case testBytesIO_python2] import typing import io c = io.BytesIO() c.write('\x89') print(repr(c.getvalue())) [out] '\x89' [case testTextIOWrapper_python2] import typing import io b = io.BytesIO(u'\xab'.encode('utf8')) w = io.TextIOWrapper(b, encoding='utf8') print(repr(w.read())) [out] u'\xab' [case testIoOpen_python2] import typing import io if 1 == 2: # Only type check, do not execute f = io.open('/tmp/xyz', 'w', encoding='utf8') f.write(u'\xab') f.close() print 'ok' [out] ok [case testStrAdd_python2] import typing s = '' u = u'' n = 0 if int(): n = s + '' # E s = s + u'' # E [out] _program.py:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") _program.py:7: error: Incompatible types in assignment (expression has type "unicode", variable has type "str") [case testStrJoin_python2] s = '' u = u'' n = 0 if int(): n = ''.join(['']) # Error if int(): s = ''.join([u'']) # Error [out] _program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") _program.py:7: error: Incompatible types in assignment (expression has type "unicode", variable has type "str") [case testNamedTuple_python2] from typing import NamedTuple from collections import namedtuple X = namedtuple('X', ['a', 'b']) x = X(a=1, b='s') x.c x.a N = NamedTuple(u'N', [(u'x', int)]) n = namedtuple(u'n', u'x y') [out] _program.py:5: error: "X" has no attribute "c" [case testAssignToComplexReal_python2] import typing x = 4j y = x.real if int(): y = x # Error x.imag = 2.0 # Error [out] _program.py:5: error: Incompatible types in assignment (expression has type "complex", variable has type "float") _program.py:6: error: Property "imag" defined in "complex" is read-only [case testComplexArithmetic_python2] import typing print 5 + 8j print 3j * 2.0 print 4j / 2.0 [out] (5+8j) 6j 2j [case testSuperNew_python2] from typing import Dict, Any class MyType(type): def __new__(cls, name, bases, namespace): # type: (str, tuple, Dict[str, Any]) -> Any return super(MyType, cls).__new__(cls, name + 'x', bases, namespace) class A(object): __metaclass__ = MyType print(type(A()).__name__) [out] Ax [case testUnicodeAndOverloading_python2] from m import f f(1) f('') f(u'') f(b'') [file m.pyi] from typing import overload @overload def f(x): # type: (bytearray) -> int pass @overload def f(x): # type: (unicode) -> int pass [out] _program.py:2: error: No overload variant of "f" matches argument type "int" _program.py:2: note: Possible overload variants: _program.py:2: note: def f(x: bytearray) -> int _program.py:2: note: def f(x: unicode) -> int [case testByteArrayStrCompatibility_python2] def f(x): # type: (str) -> None pass f(bytearray('foo')) [case testAbstractProperty_python2] from abc import abstractproperty, ABCMeta class A: __metaclass__ = ABCMeta @abstractproperty def x(self): # type: () -> int pass class B(A): @property def x(self): # type: () -> int return 3 b = B() print b.x + 1 [out] 4 [case testReModuleBytes_python2] # Regression tests for various overloads in the re module -- bytes version import re if False: bre = b'a+' bpat = re.compile(bre) bpat = re.compile(bpat) re.search(bre, b'').groups() re.search(bre, u'') re.search(bpat, b'').groups() re.search(bpat, u'') # match(), split(), findall(), finditer() are much the same, so skip those. # sub(), subn() have more overloads and we are checking these: re.sub(bre, b'', b'') + b'' re.sub(bpat, b'', b'') + b'' re.sub(bre, lambda m: b'', b'') + b'' re.sub(bpat, lambda m: b'', b'') + b'' re.subn(bre, b'', b'')[0] + b'' re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] [case testReModuleString_python2] # Regression tests for various overloads in the re module -- string version import re ure = u'a+' upat = re.compile(ure) upat = re.compile(upat) re.search(ure, u'a').groups() re.search(ure, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence re.search(upat, u'a').groups() re.search(upat, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence # match(), split(), findall(), finditer() are much the same, so skip those. # sus(), susn() have more overloads and we are checking these: re.sub(ure, u'', u'') + u'' re.sub(upat, u'', u'') + u'' re.sub(ure, lambda m: u'', u'') + u'' re.sub(upat, lambda m: u'', u'') + u'' re.subn(ure, u'', u'')[0] + u'' re.subn(upat, u'', u'')[0] + u'' re.subn(ure, lambda m: u'', u'')[0] + u'' re.subn(upat, lambda m: u'', u'')[0] + u'' [out] [case testYieldRegressionTypingAwaitable_python2] # Make sure we don't reference typing.Awaitable in Python 2 mode. def g(): # type: () -> int yield [out] _program.py:2: error: The return type of a generator function should be "Generator" or one of its supertypes [case testOsPathJoinWorksWithAny_python2] import os def f(): # no annotation return 'tests' path = 'test' path = os.path.join(f(), 'test.py') [out] [case testBytesWorkInPython2WithFullStubs_python2] MYPY = False if MYPY: import lib [file lib.pyi] x = b'abc' [out] [case testNestedGenericFailedInference] from collections import defaultdict def foo() -> None: x = defaultdict(list) # type: ignore x['lol'].append(10) reveal_type(x) [out] _testNestedGenericFailedInference.py:5: note: Revealed type is 'collections.defaultdict[Any, builtins.list[Any]]' mypy-0.761/test-data/unit/pythoneval-asyncio.test0000644€tŠÔÚ€2›s®0000003107313576752246026326 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type checking mypy programs using full stubs and running -- using CPython. -- -- These are mostly regression tests -- no attempt is made to make these -- complete. -- -- This test file check Asyncio and yield from interaction [case testImportAsyncio] import asyncio print('Imported') [out] Imported [case testSimpleCoroutineSleep] from typing import Any, Generator import asyncio from asyncio import Future @asyncio.coroutine def greet_every_two_seconds() -> 'Generator[Any, None, None]': n = 0 while n < 5: print('Prev', n) yield from asyncio.sleep(0.1) print('After', n) n += 1 loop = asyncio.get_event_loop() try: loop.run_until_complete(greet_every_two_seconds()) finally: loop.close() [out] Prev 0 After 0 Prev 1 After 1 Prev 2 After 2 Prev 3 After 3 Prev 4 After 4 [case testCoroutineCallingOtherCoroutine] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def compute(x: int, y: int) -> 'Generator[Any, None, int]': print("Compute %s + %s ..." % (x, y)) yield from asyncio.sleep(0.1) return x + y # Here the int is wrapped in Future[int] @asyncio.coroutine def print_sum(x: int, y: int) -> 'Generator[Any, None, None]': result = yield from compute(x, y) # The type of result will be int (is extracted from Future[int] print("%s + %s = %s" % (x, y, result)) loop = asyncio.get_event_loop() loop.run_until_complete(print_sum(1, 2)) loop.close() [out] Compute 1 + 2 ... 1 + 2 = 3 [case testCoroutineChangingFuture] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(0.1) future.set_result('Future is done!') loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) loop.run_until_complete(future) print(future.result()) loop.close() [out] Future is done! [case testFunctionAssignedAsCallback] import typing from typing import Generator, Any import asyncio from asyncio import Future, AbstractEventLoop @asyncio.coroutine def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result('Callback works!') def got_result(future: 'Future[str]') -> None: print(future.result()) loop.stop() loop = asyncio.get_event_loop() # type: AbstractEventLoop future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) # Here create a task with the function. (The Task need a Future[T] as first argument) future.add_done_callback(got_result) # and assign the callback to the future try: loop.run_forever() finally: loop.close() [out] Callback works! [case testMultipleTasks] import typing from typing import Generator, Any import asyncio from asyncio import Task, Future @asyncio.coroutine def factorial(name, number) -> 'Generator[Any, None, None]': f = 1 for i in range(2, number+1): print("Task %s: Compute factorial(%s)..." % (name, i)) yield from asyncio.sleep(0.1) f *= i print("Task %s: factorial(%s) = %s" % (name, number, f)) loop = asyncio.get_event_loop() tasks = [ asyncio.Task(factorial("A", 2)), asyncio.Task(factorial("B", 3)), asyncio.Task(factorial("C", 4))] loop.run_until_complete(asyncio.wait(tasks)) loop.close() [out] Task A: Compute factorial(2)... Task B: Compute factorial(2)... Task C: Compute factorial(2)... Task A: factorial(2) = 2 Task B: Compute factorial(3)... Task C: Compute factorial(3)... Task B: factorial(3) = 6 Task C: Compute factorial(4)... Task C: factorial(4) = 24 [case testConcatenatedCoroutines] import typing from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def h4() -> 'Generator[Any, None, int]': x = yield from future return x @asyncio.coroutine def h3() -> 'Generator[Any, None, int]': x = yield from h4() print("h3: %s" % x) return x @asyncio.coroutine def h2() -> 'Generator[Any, None, int]': x = yield from h3() print("h2: %s" % x) return x @asyncio.coroutine def h() -> 'Generator[Any, None, None]': x = yield from h2() print("h: %s" % x) loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[int] future.set_result(42) loop.run_until_complete(h()) print("Outside %s" % future.result()) loop.close() [out] h3: 42 h2: 42 h: 42 Outside 42 [case testConcatenatedCoroutinesReturningFutures] import typing from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def h4() -> 'Generator[Any, None, Future[int]]': yield from asyncio.sleep(0.1) f = asyncio.Future() #type: Future[int] return f @asyncio.coroutine def h3() -> 'Generator[Any, None, Future[Future[int]]]': x = yield from h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f @asyncio.coroutine def h() -> 'Generator[Any, None, None]': print("Before") x = yield from h3() y = yield from x z = yield from y print(z) def normalize(future): # The str conversion seems inconsistent; not sure exactly why. Normalize # the result. return str(future).replace(' Future> [case testCoroutineWithOwnClass] import typing from typing import Generator, Any import asyncio from asyncio import Future class A: def __init__(self, x: int) -> None: self.x = x @asyncio.coroutine def h() -> 'Generator[Any, None, None]': x = yield from future print("h: %s" % x.x) loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[A] future.set_result(A(42)) loop.run_until_complete(h()) print("Outside %s" % future.result().x) loop.close() [out] h: 42 Outside 42 -- Errors [case testErrorAssigningCoroutineThatDontReturn] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def greet() -> 'Generator[Any, None, None]': yield from asyncio.sleep(0.2) print('Hello World') @asyncio.coroutine def test() -> 'Generator[Any, None, None]': yield from greet() x = yield from greet() # Error loop = asyncio.get_event_loop() try: loop.run_until_complete(test()) finally: loop.close() [out] _program.py:13: error: Function does not return a value [case testErrorReturnIsNotTheSameType] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def compute(x: int, y: int) -> 'Generator[Any, None, int]': print("Compute %s + %s ..." % (x, y)) yield from asyncio.sleep(0.1) return str(x + y) # Error @asyncio.coroutine def print_sum(x: int, y: int) -> 'Generator[Any, None, None]': result = yield from compute(x, y) print("%s + %s = %s" % (x, y, result)) loop = asyncio.get_event_loop() loop.run_until_complete(print_sum(1, 2)) loop.close() [out] _program.py:9: error: Incompatible return value type (got "str", expected "int") [case testErrorSetFutureDifferentInternalType] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result(42) # Error loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) loop.run_until_complete(future) print(future.result()) loop.close() [out] _program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "int"; expected "str" [case testErrorUsingDifferentFutureType] from typing import Any, Generator import asyncio from asyncio import Future @asyncio.coroutine def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result(42) loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) # Error loop.run_until_complete(future) print(future.result()) loop.close() [out] _program.py:12: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]" [case testErrorUsingDifferentFutureTypeAndSetFutureDifferentInternalType] from typing import Generator, Any import asyncio from asyncio import Future asyncio.coroutine def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result('42') #Try to set an str as result to a Future[int] loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) # Error loop.run_until_complete(future) print(future.result()) loop.close() [out] _program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "str"; expected "int" _program.py:12: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]" [case testErrorSettingCallbackWithDifferentFutureType] import typing from typing import Generator, Any import asyncio from asyncio import Future, AbstractEventLoop @asyncio.coroutine def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result('Future is done!') def got_result(future: 'Future[int]') -> None: print(future.result()) loop.stop() loop = asyncio.get_event_loop() # type: AbstractEventLoop future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) future.add_done_callback(got_result) # Error try: loop.run_forever() finally: loop.close() [out] _program.py:18: error: Argument 1 to "add_done_callback" of "Future" has incompatible type "Callable[[Future[int]], None]"; expected "Callable[[Future[str]], Any]" [case testErrorOneMoreFutureInReturnType] import typing from typing import Any, Generator import asyncio from asyncio import Future @asyncio.coroutine def h4() -> 'Generator[Any, None, Future[int]]': yield from asyncio.sleep(1) f = asyncio.Future() #type: Future[int] return f @asyncio.coroutine def h3() -> 'Generator[Any, None, Future[Future[Future[int]]]]': x = yield from h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f @asyncio.coroutine def h() -> 'Generator[Any, None, None]': print("Before") x = yield from h3() y = yield from x z = yield from y print(z) print(y) print(x) loop = asyncio.get_event_loop() loop.run_until_complete(h()) loop.close() [out] _program.py:18: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[Future[Future[int]]]") [case testErrorOneLessFutureInReturnType] import typing from typing import Any, Generator import asyncio from asyncio import Future @asyncio.coroutine def h4() -> 'Generator[Any, None, Future[int]]': yield from asyncio.sleep(1) f = asyncio.Future() #type: Future[int] return f @asyncio.coroutine def h3() -> 'Generator[Any, None, Future[int]]': x = yield from h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f @asyncio.coroutine def h() -> 'Generator[Any, None, None]': print("Before") x = yield from h3() y = yield from x print(y) print(x) loop = asyncio.get_event_loop() loop.run_until_complete(h()) loop.close() [out] _program.py:18: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[int]") [case testErrorAssignmentDifferentType] import typing from typing import Generator, Any import asyncio from asyncio import Future class A: def __init__(self, x: int) -> None: self.x = x class B: def __init__(self, x: int) -> None: self.x = x @asyncio.coroutine def h() -> 'Generator[Any, None, None]': x = yield from future # type: B # Error print("h: %s" % x.x) loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[A] future.set_result(A(42)) loop.run_until_complete(h()) loop.close() [out] _program.py:16: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testForwardRefToBadAsyncShouldNotCrash_newsemanal] from typing import TypeVar import asyncio T = TypeVar('T') P = whatever # type: ignore def test() -> None: reveal_type(bad) bad(0) @asyncio.coroutine def bad(arg: P) -> T: pass [out] _program.py:8: note: Revealed type is 'def [T] (arg: P?) -> T`-1' _program.py:12: error: Variable "_testForwardRefToBadAsyncShouldNotCrash_newsemanal.P" is not valid as a type mypy-0.761/test-data/unit/pythoneval.test0000644€tŠÔÚ€2›s®0000011141013576752246024655 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type checking mypy programs using full stubs and running -- using CPython. -- -- These are mostly regression tests -- no attempt is made to make these -- complete. [case testHello] import typing print('hello, world') [out] hello, world [case testReversed] from typing import Reversible class A(Reversible): def __iter__(self): return iter('oof') def __reversed__(self): return iter('foo') print(list(reversed(range(5)))) print(list(reversed([1,2,3]))) print(list(reversed('abc'))) print(list(reversed(A()))) [out] -- Escape bracket at line beginning \[4, 3, 2, 1, 0] \[3, 2, 1] \['c', 'b', 'a'] \['f', 'o', 'o'] [case testIntAndFloatConversion] from typing import SupportsInt, SupportsFloat class A(SupportsInt): def __int__(self): return 5 class B(SupportsFloat): def __float__(self): return 1.2 print(int(1)) print(int(6.2)) print(int('3')) print(int(b'4')) print(int(A())) print(float(-9)) print(float(B())) [out] 1 6 3 4 5 -9.0 1.2 [case testAbs] from typing import SupportsAbs class A(SupportsAbs[float]): def __abs__(self) -> float: return 5.5 print(abs(-1)) print(abs(-1.2)) print(abs(A())) [out] 1 1.2 5.5 [case testAbs2] n = None # type: int f = None # type: float n = abs(1) abs(1) + 'x' # Error f = abs(1.1) abs(1.1) + 'x' # Error [out] _program.py:4: error: Unsupported operand types for + ("int" and "str") _program.py:6: error: Unsupported operand types for + ("float" and "str") [case testRound] from typing import SupportsRound class A(SupportsRound): def __round__(self, ndigits=0): return 'x%d' % ndigits print(round(1.6)) print(round(A())) print(round(A(), 2)) [out] 2 x0 x2 [case testCallMethodViaTypeObject] import typing print(list.__add__([1, 2], [3, 4])) [out] \[1, 2, 3, 4] [case testInheritedClassAttribute] import typing class A: x = 1 def f(self) -> None: print('f') class B(A): pass B.f(None) print(B.x) [out] f 1 [case testModuleAttributes] import math import typing print(math.__name__) print(type(math.__dict__)) print(type(math.__doc__ or '')) print(math.__class__) [out] math [case testSpecialAttributes] import typing class A: """A docstring!""" print(A().__doc__) print(A().__class__) [out] A docstring! [case testFunctionAttributes] import typing ord.__class__ print(type(ord.__doc__ + '')) print(ord.__name__) print(ord.__module__) [out] ord builtins [case testTypeAttributes] import typing print(str.__class__) print(type(str.__doc__)) print(str.__name__) print(str.__module__) print(str.__dict__ is not None) [out] str builtins True [case testBoolCompatibilityWithInt] import typing x = 0 x = True print(bool('x')) print(bool('')) [out] True False [case testCallBuiltinTypeObjectsWithoutArguments] import typing print(int()) print(repr(str())) print(repr(bytes())) print(float()) print(bool()) [out] 0 '' b'' 0.0 False [case testIntegerDivision] import typing x = 1 / 2 x = 1.5 [out] [case testIntMethods] import typing print(int.from_bytes(b'ab', 'big')) n = 0 print(n.from_bytes(b'ac', 'big')) print(n.from_bytes([2, 3], 'big')) print(n.to_bytes(2, 'big')) [out] 24930 24931 515 b'\x00\x00' [case testFloatMethods] import typing print(1.5.as_integer_ratio()) print(1.5.hex()) print(2.0.is_integer()) print(float.fromhex('0x1.8')) [out] (3, 2) 0x1.8000000000000p+0 True 1.5 [case testDictFromkeys] import typing d = dict.fromkeys('foo') d['x'] = 2 d2 = dict.fromkeys([1, 2], b'') d2[2] = b'foo' [out] [case testIsinstanceWithTuple] from typing import cast, Any x = cast(Any, (1, 'x')) if isinstance(x, tuple): print(x[0], x[1]) [out] 1 x [case testAnyStr] from typing import AnyStr def f(x: AnyStr) -> AnyStr: if isinstance(x, str): return 'foo' else: return b'zar' print(f('')) print(f(b'')) [out] foo b'zar' [case testNameNotImportedFromTyping] import typing cast(int, 2) [out] _program.py:2: error: Name 'cast' is not defined _program.py:2: note: Did you forget to import it from "typing"? (Suggestion: "from typing import cast") [case testBinaryIOType] from typing import BinaryIO def f(f: BinaryIO) -> None: f.write(b'foo') f.write(bytearray(b'foo')) [out] [case testIOTypes] from typing import IO import sys def txt(f: IO[str]) -> None: f.write('foo') f.write(b'foo') def bin(f: IO[bytes]) -> None: f.write(b'foo') f.write(bytearray(b'foo')) txt(sys.stdout) bin(sys.stdout) [out] _program.py:5: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str" _program.py:10: error: Argument 1 to "bin" has incompatible type "TextIO"; expected "IO[bytes]" [case testBuiltinOpen] f = open('x') f.write('x') f.write(b'x') f.foobar() [out] _program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str" _program.py:4: error: "TextIO" has no attribute "foobar" [case testOpenReturnTypeInference] reveal_type(open('x')) reveal_type(open('x', 'r')) reveal_type(open('x', 'rb')) mode = 'rb' reveal_type(open('x', mode)) [out] _program.py:1: note: Revealed type is 'typing.TextIO' _program.py:2: note: Revealed type is 'typing.TextIO' _program.py:3: note: Revealed type is 'typing.BinaryIO' _program.py:5: note: Revealed type is 'typing.IO[Any]' [case testOpenReturnTypeInferenceSpecialCases] reveal_type(open()) reveal_type(open(mode='rb', file='x')) reveal_type(open(file='x', mode='rb')) mode = 'rb' reveal_type(open(mode=mode, file='r')) [out] _testOpenReturnTypeInferenceSpecialCases.py:1: error: Too few arguments for "open" _testOpenReturnTypeInferenceSpecialCases.py:1: note: Revealed type is 'typing.TextIO' _testOpenReturnTypeInferenceSpecialCases.py:2: note: Revealed type is 'typing.BinaryIO' _testOpenReturnTypeInferenceSpecialCases.py:3: note: Revealed type is 'typing.BinaryIO' _testOpenReturnTypeInferenceSpecialCases.py:5: note: Revealed type is 'typing.IO[Any]' [case testPathOpenReturnTypeInference] from pathlib import Path p = Path("x") reveal_type(p.open()) reveal_type(p.open('r')) reveal_type(p.open('rb')) mode = 'rb' reveal_type(p.open(mode)) [out] _program.py:3: note: Revealed type is 'typing.TextIO' _program.py:4: note: Revealed type is 'typing.TextIO' _program.py:5: note: Revealed type is 'typing.BinaryIO' _program.py:7: note: Revealed type is 'typing.IO[Any]' [case testPathOpenReturnTypeInferenceSpecialCases] from pathlib import Path p = Path("x") reveal_type(p.open(mode='rb', errors='replace')) reveal_type(p.open(errors='replace', mode='rb')) mode = 'rb' reveal_type(p.open(mode=mode, errors='replace')) [out] _program.py:3: note: Revealed type is 'typing.BinaryIO' _program.py:4: note: Revealed type is 'typing.BinaryIO' _program.py:6: note: Revealed type is 'typing.IO[Any]' [case testGenericPatterns] from typing import Pattern import re p = None # type: Pattern[str] p = re.compile('foo*') b = None # type: Pattern[bytes] b = re.compile(b'foo*') print(p.match('fooo').group(0)) [out] fooo [case testGenericMatch] from typing import Match import re def f(m: Match[bytes]) -> None: print(m.group(0)) f(re.match(b'x*', b'xxy')) [out] b'xx' [case testIntFloatDucktyping] x = None # type: float x = 2.2 x = 2 def f(x: float) -> None: pass f(1.1) f(1) [out] [case testsFloatOperations] import typing print(1.5 + 1.5) print(1.5 + 1) [out] 3.0 2.5 [case testMathFunctionWithIntArgument] import typing import math math.sin(2) math.sin(2.2) [case testAbsReturnType] f = None # type: float n = None # type: int n = abs(2) f = abs(2.2) abs(2.2) + 'x' [out] _program.py:6: error: Unsupported operand types for + ("float" and "str") [case testROperatorMethods] b = None # type: bytes s = None # type: str if int(): s = b'foo' * 5 # Error if int(): b = 5 * b'foo' if int(): b = b'foo' * 5 if int(): s = 5 * 'foo' if int(): s = 'foo' * 5 [out] _program.py:4: error: Incompatible types in assignment (expression has type "bytes", variable has type "str") [case testROperatorMethods2] import typing print(2 / 0.5) print(' ', 2 * [3, 4]) [out] 4.0 [3, 4, 3, 4] [case testNotImplemented] import typing class A: def __add__(self, x: int) -> int: if isinstance(x, int): return x + 1 return NotImplemented class B: def __radd__(self, x: A) -> str: return 'x' print(A() + 1) print(A() + B()) [out] 2 x [case testMappingMethods] # Regression test from typing import Mapping x = {'x': 'y'} # type: Mapping[str, str] print('x' in x) print('y' in x) [out] True False [case testOverlappingOperatorMethods] class X: pass class A: def __add__(self, x) -> int: if isinstance(x, X): return 1 return NotImplemented class B: def __radd__(self, x: A) -> str: return 'x' class C(X, B): pass b = None # type: B b = C() print(A() + b) [out] _program.py:9: error: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping [case testBytesAndBytearrayComparisons] import typing print(b'ab' < bytearray(b'b')) print(bytearray(b'ab') < b'a') [out] True False [case testBytesAndBytearrayComparisons2] import typing '' < b'' b'' < '' '' < bytearray() bytearray() < '' [out] _program.py:2: error: Unsupported operand types for < ("str" and "bytes") _program.py:3: error: Unsupported operand types for < ("bytes" and "str") _program.py:4: error: Unsupported operand types for < ("str" and "bytearray") _program.py:5: error: Unsupported operand types for < ("bytearray" and "str") [case testInplaceOperatorMethod] import typing a = [1] print('', a.__iadd__([2])) print('', a) [out] [1, 2] [1, 2] [case testListInplaceAdd] import typing a = [1] a += iter([2, 3]) print(tuple(a)) [out] (1, 2, 3) [case testInferHeterogeneousListOfIterables] from typing import Sequence s = ['x', 'y'] # type: Sequence[str] a = [['x', 'x'], 'fo', s, iter('foo'), {'aa'}] for i, x in enumerate(a): print(i, next(iter(x))) [out] 0 x 1 f 2 x 3 f 4 aa [case testTextIOProperties] import typing import sys print(type(sys.stdin.encoding)) print(type(sys.stdin.errors)) sys.stdin.line_buffering sys.stdin.buffer sys.stdin.newlines [out] [case testIOProperties] import typing import sys print(sys.stdin.name) print(sys.stdin.buffer.mode) [out] rb [case testFromFuturePrintFunction] from __future__ import print_function print('a', 'b') [out] a b [case testListMethods] import typing import sys l = [0, 1, 2, 3, 4] if sys.version >= '3.3': l.clear() else: l = [] l.append(0) print('>', l) if sys.version >= '3.3': m = l.copy() else: m = l[:] m.extend([1, 2, 3, 4]) print('>', m) print(l.index(0)) print(l.index(0, 0)) print(l.index(0, 0, 1)) try: print(l.index(1)) print('expected ValueError') except ValueError: pass l.insert(0, 1) print('>', l) print(l.pop(0)) print(l.pop()) m.remove(0) try: m.remove(0) print('expected ValueError') except ValueError: pass m.reverse() m.sort() m.sort(key=lambda x: -x) m.sort(reverse=False) m.sort(key=lambda x: -x, reverse=True) print('>', m) [out] > [0] > [0, 1, 2, 3, 4] 0 0 0 > [1, 0] 1 0 > [1, 2, 3, 4] [case testListOperators] import typing l = [0, 1] print('+', l + [2]) print('*', l * 2) print('*', 2 * l) print('in', 1 in l) print('==', l == [1, 2]) print('!=', l != [1, 2]) print('>', l > [1, 2, 3]) print('>=', l >= [1, 2, 3]) print('<', l < [1, 2, 3]) print('<=', l <= [1, 2, 3]) print('>[0]', l[0]) l += [2] print('+=', l) l *= 2 print('*=', l) print('iter', list(iter(l))) print('len', len(l)) print('repr', repr(l)) l[:3] = [] print('setslice', l) print('reversed', list(reversed(l))) [out] + [0, 1, 2] * [0, 1, 0, 1] * [0, 1, 0, 1] in True == False != True > False >= False < True <= True >[0] 0 += [0, 1, 2] *= [0, 1, 2, 0, 1, 2] iter [0, 1, 2, 0, 1, 2] len 6 repr [0, 1, 2, 0, 1, 2] setslice [0, 1, 2] reversed [2, 1, 0] [case testTupleAsSubtypeOfSequence] from typing import TypeVar, Sequence T = TypeVar('T') def f(a: Sequence[T]) -> None: print(a) f(tuple()) [out] () [case testMapWithLambdaSpecialCase] from typing import List, Iterator a = [[1], [3]] b = map(lambda y: y[0], a) print('>', list(b)) [out] > [1, 3] [case testInternalBuiltinDefinition] import typing def f(x: _T) -> None: pass s: FrozenSet [out] _program.py:2: error: Name '_T' is not defined _program.py:3: error: Name 'FrozenSet' is not defined [case testVarArgsFunctionSubtyping] import typing def f(*args: str) -> str: return args[0] map(f, ['x']) map(f, [1]) [out] _program.py:4: error: Argument 1 to "map" has incompatible type "Callable[[VarArg(str)], str]"; expected "Callable[[int], str]" [case testMapStr] import typing x = range(3) a = list(map(str, x)) a + 1 [out] _program.py:4: error: Unsupported operand types for + ("List[str]" and "int") [case testRelativeImport] import typing from m import x print(x) [file m/__init__.py] from .n import x [file m/n.py] x = 1 [out] 1 [case testRelativeImport2] import typing from m.n import x print(x) [file m/__init__.py] [file m/n.py] from .nn import x [file m/nn.py] x = 2 [out] 2 [case testPyiTakesPrecedenceOverPy] import m m.f(1) [file m.py] def f(x): print(x) [file m.pyi] import typing def f(x: str) -> None: pass [out] _program.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testComplexArithmetic] import typing print(5 + 8j) print(3j * 2.0) print(4J / 2.0) [out] (5+8j) 6j 2j [case testComplexArithmetic2] x = 5 + 8j if int(): x = '' # E y = 3j * 2.0 if int(): y = '' # E [out] _program.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "complex") _program.py:6: error: Incompatible types in assignment (expression has type "str", variable has type "complex") [case testSuperNew] from typing import Dict, Any class MyType(type): def __new__(cls, name: str, bases: tuple, namespace: Dict[str, Any]) -> Any: return super().__new__(cls, name + 'x', bases, namespace) class A(metaclass=MyType): pass print(type(A()).__name__) [out] Ax [case testSubclassBothGenericAndNonGenericABC] from typing import Generic, TypeVar from abc import ABCMeta T = TypeVar('T') class A(metaclass=ABCMeta): pass class B(Generic[T]): pass class C(A, B): pass class D(B, A): pass class E(A, B[T], Generic[T]): pass class F(B[T], A, Generic[T]): pass def f(e: E[int], f: F[int]) -> None: pass [out] [case testTypeVariableTypeComparability] from typing import TypeVar T = TypeVar('T') def eq(x: T, y: T, z: T) -> T: if x == y: return y else: return z print(eq(1, 2, 3)) print(eq('x', 'x', 'z')) [out] 3 x [case testIntDecimalCompatibility] import typing from decimal import Decimal print(Decimal(1) + 2) print(Decimal(1) - 2) print(1 + Decimal('2.34')) print(1 - Decimal('2.34')) print(2 * Decimal('2.34')) [out] 3 -1 3.34 -1.34 4.68 [case testInstantiateBuiltinTypes] from typing import Dict, Set, List d = dict() # type: Dict[int, str] s = set() # type: Set[int] l = list() # type: List[int] str() bytes() bytearray() int() float() complex() slice(1) bool() [case testVariableLengthTupleError] from typing import Tuple def p(t: Tuple[str, ...]) -> None: n = 5 print(t[n]) for s in t: s() ''.startswith(('x', 'y')) ''.startswith(('x', b'y')) [out] _program.py:6: error: "str" not callable _program.py:8: error: Argument 1 to "startswith" of "str" has incompatible type "Tuple[str, bytes]"; expected "Union[str, Tuple[str, ...]]" [case testMultiplyTupleByInteger] n = 4 t = ('',) * n t + 1 [out] _program.py:3: error: No overload variant of "__add__" of "tuple" matches argument type "int" _program.py:3: note: Possible overload variants: _program.py:3: note: def __add__(self, Tuple[str, ...]) -> Tuple[str, ...] _program.py:3: note: def __add__(self, Tuple[Any, ...]) -> Tuple[Any, ...] [case testMultiplyTupleByIntegerReverse] n = 4 t = n * ('',) t + 1 [out] _program.py:3: error: No overload variant of "__add__" of "tuple" matches argument type "int" _program.py:3: note: Possible overload variants: _program.py:3: note: def __add__(self, Tuple[str, ...]) -> Tuple[str, ...] _program.py:3: note: def __add__(self, Tuple[Any, ...]) -> Tuple[Any, ...] [case testDictWithKeywordArgs] from typing import Dict, Any, List d1 = dict(a=1, b=2) # type: Dict[str, int] d2 = dict(a=1, b='') # type: Dict[str, int] # E d3 = dict(a=1, b=1) d3.xyz # E d4 = dict(a=1, b='') # type: Dict[str, Any] result = dict(x=[], y=[]) # type: Dict[str, List[str]] [out] _program.py:3: error: Dict entry 1 has incompatible type "str": "str"; expected "str": "int" _program.py:5: error: "Dict[str, int]" has no attribute "xyz" [case testDefaultDict] import typing as t from collections import defaultdict T = t.TypeVar('T') d1 = defaultdict(list) # type: t.DefaultDict[int, str] d2 = defaultdict() # type: t.DefaultDict[int, str] d2[0] = '0' d2['0'] = 0 def tst(dct: t.DefaultDict[int, T]) -> T: return dct[0] collections = ['coins', 'stamps', 'comics'] # type: t.List[str] d3 = defaultdict(str) # type: t.DefaultDict[int, str] collections[2] tst(defaultdict(list, {0: []})) tst(defaultdict(list, {'0': []})) class MyDDict(t.DefaultDict[int,T], t.Generic[T]): pass MyDDict(dict)['0'] MyDDict(dict)[0] [out] _program.py:6: error: Argument 1 to "defaultdict" has incompatible type "Type[List[Any]]"; expected "Callable[[], str]" _program.py:9: error: Invalid index type "str" for "defaultdict[int, str]"; expected type "int" _program.py:9: error: Incompatible types in assignment (expression has type "int", target has type "str") _program.py:19: error: Dict entry 0 has incompatible type "str": "List[]"; expected "int": "List[]" _program.py:23: error: Invalid index type "str" for "MyDDict[Dict[_KT, _VT]]"; expected type "int" [case testNoSubcriptionOfStdlibCollections] import collections from collections import Counter from typing import TypeVar collections.defaultdict[int, str]() Counter[int]() T = TypeVar('T') DDint = collections.defaultdict[T, int] d = DDint[str]() d[0] = 1 def f(d: collections.defaultdict[int, str]) -> None: ... [out] _program.py:5: error: "defaultdict" is not subscriptable _program.py:6: error: "Counter" is not subscriptable _program.py:9: error: "defaultdict" is not subscriptable _program.py:12: error: Invalid index type "int" for "defaultdict[str, int]"; expected type "str" _program.py:14: error: "defaultdict" is not subscriptable, use "typing.DefaultDict" instead [case testCollectionsAliases] import typing as t import collections as c o1 = c.Counter() # type: t.Counter[int] reveal_type(o1) o1['string'] o2 = c.ChainMap() # type: t.ChainMap[int, str] reveal_type(o2) o3 = c.deque() # type: t.Deque[int] reveal_type(o3) o4 = t.Counter[int]() reveal_type(o4) o5 = t.ChainMap[int, str]() reveal_type(o5) o6 = t.Deque[int]() reveal_type(o6) [out] _testCollectionsAliases.py:5: note: Revealed type is 'collections.Counter[builtins.int]' _testCollectionsAliases.py:6: error: Invalid index type "str" for "Counter[int]"; expected type "int" _testCollectionsAliases.py:9: note: Revealed type is 'collections.ChainMap[builtins.int, builtins.str]' _testCollectionsAliases.py:12: note: Revealed type is 'collections.deque[builtins.int]' _testCollectionsAliases.py:15: note: Revealed type is 'collections.Counter[builtins.int*]' _testCollectionsAliases.py:18: note: Revealed type is 'collections.ChainMap[builtins.int*, builtins.str*]' _testCollectionsAliases.py:21: note: Revealed type is 'collections.deque[builtins.int*]' [case testChainMapUnimported] ChainMap[int, str]() [out] _testChainMapUnimported.py:1: error: Name 'ChainMap' is not defined [case testDequeWrongCase] import collections import typing collections.Deque() typing.deque() [out] _testDequeWrongCase.py:4: error: Module has no attribute "Deque"; maybe "deque"? _testDequeWrongCase.py:5: error: Module has no attribute "deque"; maybe "Deque"? [case testDictUpdateInference] from typing import Dict, Optional d = {} # type: Dict[str, Optional[int]] d.update({str(i): None for i in range(4)}) [case testSuperAndSetattr] class A: def __init__(self) -> None: super().__setattr__('a', 1) super().__setattr__(1, 'a') [out] _program.py:4: error: Argument 1 to "__setattr__" of "object" has incompatible type "int"; expected "str" [case testMetaclassAndSuper] from typing import Any class A(type): def __new__(cls, name, bases, namespace) -> Any: return super().__new__(cls, '', (object,), {'x': 7}) class B(metaclass=A): pass print(getattr(B(), 'x')) [out] 7 [case testSortedNoError] from typing import Iterable, Callable, TypeVar, List, Dict T = TypeVar('T') def sorted(x: Iterable[T], *, key: Callable[[T], object] = None) -> None: ... a = None # type: List[Dict[str, str]] sorted(a, key=lambda y: y['']) [case testAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): @property def x(self) -> int: return 3 b = B() print(b.x + 1) [out] 4 [case testInferenceWithLambda] from typing import TypeVar, Iterable, Iterator, List import itertools _T = TypeVar('_T') def f(iterable): # type: (Iterable[_T]) -> Iterator[List[_T]] grouped = itertools.groupby(enumerate(iterable), lambda pair: pair[0] // 2) return ([elem for _, elem in group] for _, group in grouped) [case testReModuleBytes] # Regression tests for various overloads in the re module -- bytes version import re bre = b'a+' bpat = re.compile(bre) bpat = re.compile(bpat) re.search(bre, b'').groups() re.search(bre, u'') # Error re.search(bpat, b'').groups() re.search(bpat, u'') # Error # match(), split(), findall(), finditer() are much the same, so skip those. # sub(), subn() have more overloads and we are checking these: re.sub(bre, b'', b'') + b'' re.sub(bpat, b'', b'') + b'' re.sub(bre, lambda m: b'', b'') + b'' re.sub(bpat, lambda m: b'', b'') + b'' re.subn(bre, b'', b'')[0] + b'' re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] _program.py:7: error: Value of type variable "AnyStr" of "search" cannot be "object" _program.py:9: error: Cannot infer type argument 1 of "search" [case testReModuleString] # Regression tests for various overloads in the re module -- string version import re sre = 'a+' spat = re.compile(sre) spat = re.compile(spat) re.search(sre, '').groups() re.search(sre, b'') # Error re.search(spat, '').groups() re.search(spat, b'') # Error # match(), split(), findall(), finditer() are much the same, so skip those. # sus(), susn() have more overloads and we are checking these: re.sub(sre, '', '') + '' re.sub(spat, '', '') + '' re.sub(sre, lambda m: '', '') + '' re.sub(spat, lambda m: '', '') + '' re.subn(sre, '', '')[0] + '' re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] _program.py:7: error: Value of type variable "AnyStr" of "search" cannot be "object" _program.py:9: error: Cannot infer type argument 1 of "search" [case testListSetitemTuple] from typing import List, Tuple a = [] # type: List[Tuple[str, int]] a[0] = 'x', 1 a[1] = 2, 'y' a[:] = [('z', 3)] [out] _program.py:4: error: Incompatible types in assignment (expression has type "Tuple[int, str]", target has type "Tuple[str, int]") [case testContextManager] import contextlib from contextlib import contextmanager from typing import Iterator @contextmanager def f(x: int) -> Iterator[str]: yield 'foo' @contextlib.contextmanager def g(*x: str) -> Iterator[int]: yield 1 reveal_type(f) reveal_type(g) with f('') as s: reveal_type(s) [out] _program.py:13: note: Revealed type is 'def (x: builtins.int) -> contextlib._GeneratorContextManager[builtins.str*]' _program.py:14: note: Revealed type is 'def (*x: builtins.str) -> contextlib._GeneratorContextManager[builtins.int*]' _program.py:16: error: Argument 1 to "f" has incompatible type "str"; expected "int" _program.py:17: note: Revealed type is 'builtins.str*' [case testTypedDictGet] # Test that TypedDict get plugin works with typeshed stubs # TODO: Make it possible to use strict optional here from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'x': int, 'y': str}) d: D reveal_type(d.get('x')) reveal_type(d.get('y')) d.get('z') d.get() s = '' reveal_type(d.get(s)) [out] _testTypedDictGet.py:7: note: Revealed type is 'builtins.int' _testTypedDictGet.py:8: note: Revealed type is 'builtins.str' _testTypedDictGet.py:9: error: TypedDict "D" has no key 'z' _testTypedDictGet.py:10: error: All overload variants of "get" of "Mapping" require at least one argument _testTypedDictGet.py:10: note: Possible overload variants: _testTypedDictGet.py:10: note: def get(self, k: str) -> object _testTypedDictGet.py:10: note: def [_T] get(self, k: str, default: object) -> object _testTypedDictGet.py:12: note: Revealed type is 'builtins.object*' [case testTypedDictMappingMethods] from mypy_extensions import TypedDict Cell = TypedDict('Cell', {'value': int}) c = Cell(value=42) for x in c: reveal_type(x) reveal_type(iter(c)) reveal_type(len(c)) reveal_type('value' in c) reveal_type(c.keys()) reveal_type(c.items()) reveal_type(c.values()) reveal_type(c.copy()) reveal_type(c.setdefault('value', False)) c.update({'value': 2}) c.update({'invalid': 2}) c.pop('value') c == c c != c Cell2 = TypedDict('Cell2', {'value': int}, total=False) c2 = Cell2() reveal_type(c2.pop('value')) [out] _testTypedDictMappingMethods.py:5: note: Revealed type is 'builtins.str*' _testTypedDictMappingMethods.py:6: note: Revealed type is 'typing.Iterator[builtins.str*]' _testTypedDictMappingMethods.py:7: note: Revealed type is 'builtins.int' _testTypedDictMappingMethods.py:8: note: Revealed type is 'builtins.bool' _testTypedDictMappingMethods.py:9: note: Revealed type is 'typing.AbstractSet[builtins.str*]' _testTypedDictMappingMethods.py:10: note: Revealed type is 'typing.AbstractSet[Tuple[builtins.str*, builtins.object*]]' _testTypedDictMappingMethods.py:11: note: Revealed type is 'typing.ValuesView[builtins.object*]' _testTypedDictMappingMethods.py:12: note: Revealed type is 'TypedDict('_testTypedDictMappingMethods.Cell', {'value': builtins.int})' _testTypedDictMappingMethods.py:13: note: Revealed type is 'builtins.int' _testTypedDictMappingMethods.py:15: error: Unexpected TypedDict key 'invalid' _testTypedDictMappingMethods.py:16: error: Key 'value' of TypedDict "Cell" cannot be deleted _testTypedDictMappingMethods.py:21: note: Revealed type is 'builtins.int' [case testCrashOnComplexCheckWithNamedTupleNext] from typing import NamedTuple MyNamedTuple = NamedTuple('MyNamedTuple', [('parent', 'MyNamedTuple')]) # type: ignore def foo(mymap) -> MyNamedTuple: return next((mymap[key] for key in mymap), None) [out] [case testCanConvertTypedDictToAnySuperclassOfMapping] from mypy_extensions import TypedDict from typing import Sized, Iterable, Container Point = TypedDict('Point', {'x': int, 'y': int}) p: Point s: Sized = p it: Iterable[str] = p c: Container[str] = p o: object = p it2: Iterable[int] = p [out] _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: error: Incompatible types in assignment (expression has type "Point", variable has type "Iterable[int]") _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: Following member(s) of "Point" have conflicts: _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: Expected: _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: def __iter__(self) -> Iterator[int] _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: Got: _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: def __iter__(self) -> Iterator[str] [case testAsyncioGatherPreciseType] import asyncio from typing import Tuple async def get_location(arg: str) -> Tuple[str, str]: return arg, arg async def main() -> None: ((a_x, a_y),) = await asyncio.gather(get_location('start')) reveal_type(a_x) reveal_type(a_y) reveal_type(asyncio.gather(*[asyncio.sleep(1), asyncio.sleep(1)])) [out] _testAsyncioGatherPreciseType.py:9: note: Revealed type is 'builtins.str' _testAsyncioGatherPreciseType.py:10: note: Revealed type is 'builtins.str' _testAsyncioGatherPreciseType.py:11: note: Revealed type is 'asyncio.futures.Future[builtins.list[Any]]' [case testMultipleInheritanceWorksWithTupleTypeGeneric] from typing import SupportsAbs, NamedTuple class Point(NamedTuple('Point', [('x', int), ('y', int)]), SupportsAbs[int]): def __abs__(p) -> int: return abs(p.x) + abs(p.y) def test(a: Point) -> bool: return abs(a) == 2 [out] [case testNoCrashOnGenericUnionUnpacking] from typing import Union, Dict TEST = {'key': ('a', 'b')} def test() -> None: a, b = TEST.get('foo', ('x', 'y')) reveal_type(a) reveal_type(b) def test2() -> None: a, b = TEST.get('foo', (1, 2)) reveal_type(a) reveal_type(b) x: Union[Dict[int, int], Dict[str, str]] = dict(a='b') for a, b in x.items(): reveal_type(a) reveal_type(b) [out] _testNoCrashOnGenericUnionUnpacking.py:6: note: Revealed type is 'builtins.str' _testNoCrashOnGenericUnionUnpacking.py:7: note: Revealed type is 'builtins.str' _testNoCrashOnGenericUnionUnpacking.py:10: note: Revealed type is 'Union[builtins.str, builtins.int]' _testNoCrashOnGenericUnionUnpacking.py:11: note: Revealed type is 'Union[builtins.str, builtins.int]' _testNoCrashOnGenericUnionUnpacking.py:15: note: Revealed type is 'Union[builtins.int*, builtins.str*]' _testNoCrashOnGenericUnionUnpacking.py:16: note: Revealed type is 'Union[builtins.int*, builtins.str*]' [case testMetaclassOpAccess] from typing import Type class A: pass class Meta(type): def __mul__(self, other: int) -> Type[A]: pass def __add__(self, other: int) -> Type[C]: pass def __radd__(self, other: int) -> Type[C]: pass class C(metaclass=Meta): pass bar: Type[C] def get_c_type() -> Type[C]: pass res = bar * 4 other = 4 + get_c_type() + 5 reveal_type(res) reveal_type(other) [out] _testMetaclassOpAccess.py:21: note: Revealed type is 'Type[_testMetaclassOpAccess.A]' _testMetaclassOpAccess.py:22: note: Revealed type is 'Type[_testMetaclassOpAccess.C]' [case testMetaclassOpAccessUnion] from typing import Type, Union class MetaA(type): def __mul__(self, other: int) -> str: pass class A(metaclass=MetaA): pass class MetaB(type): def __mul__(self, other: int) -> int: pass class B(metaclass=MetaB): pass bar: Type[Union[A, B]] res = bar * 4 reveal_type(res) [out] _testMetaclassOpAccessUnion.py:16: note: Revealed type is 'Union[builtins.str, builtins.int]' [case testMetaclassOpAccessAny] from typing import Type from nonexistent import C bar: Type[C] bar * 4 + bar + 3 # should not produce more errors [out] _testMetaclassOpAccessAny.py:2: error: Cannot find implementation or library stub for module named 'nonexistent' _testMetaclassOpAccessAny.py:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testEnumIterationAndPreciseElementType] # Regression test for #2305 from enum import Enum class E(Enum): A = 'a' (reveal_type(e) for e in E) for e in E: reveal_type(e) [out] _testEnumIterationAndPreciseElementType.py:5: note: Revealed type is '_testEnumIterationAndPreciseElementType.E*' _testEnumIterationAndPreciseElementType.py:7: note: Revealed type is '_testEnumIterationAndPreciseElementType.E*' [case testEnumIterable] from enum import Enum from typing import Iterable class E(Enum): A = 'a' def f(ie: Iterable[E]): pass f(E) [case testIntEnumIterable] from enum import IntEnum from typing import Iterable class N(IntEnum): X = 1 def f(ni: Iterable[N]): pass def g(ii: Iterable[int]): pass f(N) g(N) reveal_type(list(N)) [out] _testIntEnumIterable.py:11: note: Revealed type is 'builtins.list[_testIntEnumIterable.N*]' [case testDerivedEnumIterable] from enum import Enum from typing import Iterable class E(str, Enum): A = 'foo' def f(ei: Iterable[E]): pass def g(si: Iterable[str]): pass f(E) g(E) [case testInvalidSlots] class A: __slots__ = 1 class B: __slots__ = (1, 2) [out] _testInvalidSlots.py:2: error: Incompatible types in assignment (expression has type "int", base class "object" defined the type as "Union[str, Iterable[str]]") _testInvalidSlots.py:4: error: Incompatible types in assignment (expression has type "Tuple[int, int]", base class "object" defined the type as "Union[str, Iterable[str]]") [case testDictWithStarStarSpecialCase] from typing import Dict def f() -> Dict[int, str]: return {1: '', **d()} def d() -> Dict[int, int]: return {} [out] _testDictWithStarStarSpecialCase.py:4: error: Argument 1 to "update" of "dict" has incompatible type "Dict[int, int]"; expected "Mapping[int, str]" [case testLoadsOfOverloads] from typing import overload, Any, TypeVar, Iterable, List, Dict, Callable, Union S = TypeVar('S') T = TypeVar('T') @overload def simple_map() -> None: ... @overload def simple_map(func: Callable[[T], S], one: Iterable[T]) -> S: ... @overload def simple_map(func: Callable[..., S], *iterables: Iterable[Any]) -> S: ... def simple_map(*args): pass def format_row(*entries: object) -> str: pass class DateTime: pass JsonBlob = Dict[str, Any] Column = Union[List[str], List[int], List[bool], List[float], List[DateTime], List[JsonBlob]] def print_custom_table() -> None: a = None # type: Column for row in simple_map(format_row, a, a, a, a, a, a, a, a): # 8 columns reveal_type(row) [out] _testLoadsOfOverloads.py:24: note: Revealed type is 'builtins.str*' [case testReduceWithAnyInstance] from typing import Iterable from functools import reduce M = Iterable def f(m1: M, m2): ... def g(ms: 'T[M]') -> None: reduce(f, ms) T = Iterable [out] [case testNamedTupleNew] # This is an eval test because there was a snag found only with full stubs from typing import NamedTuple Base = NamedTuple('Base', [('param', int)]) class Child(Base): def __new__(cls, param: int = 1) -> 'Child': return Base.__new__(cls, param) Base(param=10) Child(param=10) reveal_type(Child()) from collections import namedtuple X = namedtuple('X', ['a', 'b']) x = X(a=1, b='s') [out] _testNamedTupleNew.py:12: note: Revealed type is 'Tuple[builtins.int, fallback=_testNamedTupleNew.Child]' [case testNewAnalyzerBasicTypeshed_newsemanal] from typing import Dict, List, Tuple x: Dict[str, List[int]] reveal_type(x['test'][0]) [out] _testNewAnalyzerBasicTypeshed_newsemanal.py:4: note: Revealed type is 'builtins.int*' [case testNewAnalyzerTypedDictInStub_newsemanal] import stub reveal_type(stub.thing) [file stub.pyi] from typing_extensions import TypedDict class StuffDict(TypedDict): foo: str bar: int def thing(stuff: StuffDict) -> int: ... [out] _testNewAnalyzerTypedDictInStub_newsemanal.py:2: note: Revealed type is 'def (stuff: TypedDict('stub.StuffDict', {'foo': builtins.str, 'bar': builtins.int})) -> builtins.int' [case testStrictEqualityWhitelist] # mypy: strict-equality {1} == frozenset({1}) frozenset({1}) == {1} frozenset({1}) == [1] # Error {1: 2}.keys() == {1} {1: 2}.keys() == frozenset({1}) {1: 2}.items() == {(1, 2)} {1: 2}.keys() == {'no'} # Error {1: 2}.values() == {2} # Error {1: 2}.keys() == [1] # Error [out] _testStrictEqualityWhitelist.py:5: error: Non-overlapping equality check (left operand type: "FrozenSet[int]", right operand type: "List[int]") _testStrictEqualityWhitelist.py:11: error: Non-overlapping equality check (left operand type: "KeysView[int]", right operand type: "Set[str]") _testStrictEqualityWhitelist.py:12: error: Non-overlapping equality check (left operand type: "ValuesView[int]", right operand type: "Set[int]") _testStrictEqualityWhitelist.py:13: error: Non-overlapping equality check (left operand type: "KeysView[int]", right operand type: "List[int]") [case testUnreachableWithStdlibContextManagers] # mypy: warn-unreachable, strict-optional from contextlib import suppress # This test overlaps with some of the warn-unreachable tests in check-unreachable-code, # but 'open(...)' is a very common function so we want to make sure we don't regress # against it specifically def f_open() -> str: with open("foo.txt", "r") as f: return f.read() print("noop") # contextlib.suppress is less common, but it's a fairly prominent example of an # exception-suppressing context manager, so it'd be good to double-check. def f_suppresses() -> int: with suppress(Exception): return 3 print("noop") [out] _testUnreachableWithStdlibContextManagers.py:11: error: Statement is unreachable _testUnreachableWithStdlibContextManagers.py:15: error: Missing return statement [case testUnreachableWithStdlibContextManagersNoStrictOptional] # mypy: warn-unreachable, no-strict-optional from contextlib import suppress # When strict-optional is disabled, 'open' should still behave in the same way as before def f_open() -> str: with open("foo.txt", "r") as f: return f.read() print("noop") # ...but unfortunately, we can't def f_suppresses() -> int: with suppress(Exception): return 3 print("noop") [out] _testUnreachableWithStdlibContextManagersNoStrictOptional.py:9: error: Statement is unreachable _testUnreachableWithStdlibContextManagersNoStrictOptional.py:15: error: Statement is unreachable [case testAsyncioFutureWait] # mypy: strict-optional from asyncio import Future, wait from typing import List async def foo() -> None: f = [] # type: List[Future[None]] await wait(f) mypy-0.761/test-data/unit/reports.test0000644€tŠÔÚ€2›s®0000003400313576752246024164 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for reports -- -- This file follows syntax of cmdline.test. [case testConfigErrorUnknownReport] # cmd: mypy -c pass [file mypy.ini] \[mypy] bad_report = . [out] mypy.ini: [mypy]: Unrecognized report type: bad_report == Return code: 0 [case testCoberturaParser] # cmd: mypy --cobertura-xml-report build pkg [file pkg/__init__.py] [file pkg/a.py] from typing import Dict def foo() -> Dict: z = {'hello': 'world'} return z [file pkg/subpkg/__init__.py] [file pkg/subpkg/a.py] def bar() -> str: return 'world' def untyped_function(): return 42 [outfile build/cobertura.xml] $PWD [case testAnyExprReportDivisionByZero] # cmd: mypy --any-exprs-report=out -c 'pass' [case testClassDefIsNotTreatedAsEmpty] # cmd: mypy --html-report report n.py [file n.py] class A(object): pass # line indented with tab; hex 1f here: () [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
class A(object):
	pass  # line indented with tab; hex 1f here: (?)
[case testTypeVarTreatedAsEmptyLine] # cmd: mypy --html-report report n.py [file n.py] from typing import TypeVar T = TypeVar('T') [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
3
from typing import TypeVar

T = TypeVar('T')
[case testUnreachableCodeMarkedAsAny] # cmd: mypy --html-report report n.py [file any.py] from typing import Any def any_f(x: Any) -> None: pass [file n.py] from any import any_f def bar(x): # type: (str) -> None any_f(x) assert False any_f(x) [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
3
4
5
6
from any import any_f
def bar(x):
    # type: (str) -> None
    any_f(x)
    assert False
    any_f(x)
[case testHtmlReportMemberExprNoUnanalyzed] # cmd: mypy --html-report report n.py [file n.py] import sys old_stdout = sys.stdout [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
3
import sys

old_stdout = sys.stdout
[case testAnyExprReportIncludesDeadCode] # cmd: mypy --any-exprs-report report i.py j.py [file i.py] def bar(x): # type: (str) -> None print(x) assert False print(x) # dead code! [file j.py] def bar(x): # type: (str) -> None print(x) assert False [file report/types-of-anys.txt] [outfile report/any-exprs.txt] Name Anys Exprs Coverage --------------------------------- i 1 6 83.33% j 0 5 100.00% --------------------------------- Total 1 11 90.91% [case testAnyExprReportHigherKindedTypesAreNotAny] # cmd: mypy --any-exprs-report report i.py [file i.py] from enum import Enum from mypy_extensions import TypedDict from typing import NewType, NamedTuple, TypeVar from typing import TypeVar T = TypeVar('T') # no error def f(t: T) -> T: return t Point = NamedTuple('Point', [('x', int), ('y', int)]) # no error def origin() -> Point: return Point(x=0, y=0) NT = NewType('NT', int) # no error def nt() -> NT: return NT(1) E = Enum('E', '1, 2, 3') # no error def k(s: E) -> None: pass Movie = TypedDict('Movie', {'name': str, 'year': int}) def g(m: Movie) -> Movie: return m [file report/types-of-anys.txt] [outfile report/any-exprs.txt] Name Anys Exprs Coverage --------------------------------- i 0 14 100.00% --------------------------------- Total 0 14 100.00% [case testAnyExpressionsReportTypesOfAny] # cmd: mypy --python-version=3.6 --any-exprs-report report n.py [file n.py] from typing import Any, List from nonexistent import C # type: ignore def any_f(x: Any) -> None: # Explicit pass def a(x) -> None: # Unannotated any_f(x) x: Any = 2 # Explicit y: C = None # Unimported def b() -> List: # Omitted Generics return [1, 2, 3] g = 1 z = g.does_not_exist() # type: ignore # Error [file report/any-exprs.txt] [outfile report/types-of-anys.txt] Name Unannotated Explicit Unimported Omitted Generics Error Special Form Implementation Artifact ----------------------------------------------------------------------------------------------------------------- n 2 4 2 1 3 0 0 ----------------------------------------------------------------------------------------------------------------- Total 2 4 2 1 3 0 0 [case testAnyExpressionsReportUnqualifiedError] # cmd: mypy --any-exprs-report report n.py [file n.py] z = does_not_exist() # type: ignore # Error [file report/any-exprs.txt] [outfile report/types-of-anys.txt] Name Unannotated Explicit Unimported Omitted Generics Error Special Form Implementation Artifact ----------------------------------------------------------------------------------------------------------------- n 0 0 0 0 3 0 0 ----------------------------------------------------------------------------------------------------------------- Total 0 0 0 0 3 0 0 [case testAnyExpressionsReportUntypedDef] # cmd: mypy --any-exprs-report report n.py [file n.py] def foo(): x = 0 f = 0 [file report/any-exprs.txt] [outfile report/types-of-anys.txt] Name Unannotated Explicit Unimported Omitted Generics Error Special Form Implementation Artifact ----------------------------------------------------------------------------------------------------------------- n 0 0 0 0 0 0 0 ----------------------------------------------------------------------------------------------------------------- Total 0 0 0 0 0 0 0 [case testTrickyCoverage] # cmd: mypy --linecoverage-report=report n.py [file n.py] import attr def blah(x): return x @blah def f(x: int) -> None: pass class Foo: @blah #hi def f(self, x: int) -> None: pass @attr.s class Z(object): pass [case testCoverageIgnoresCache] -- Performs two runs to verify that cached information does not prevent -- modules from being included in reports. # cmd: mypy --linecount-report report a.py [file a.py] empty = False [out] [out2] [outfile report/linecount.txt] 1 1 0 0 total 1 1 0 0 a [case testAnyExprReportIgnoresSpecialForms] # cmd: mypy --any-exprs-report report i.py j.py k.py l.py [file i.py] async def some_function() -> None: pass [file j.py] from typing import Any async def some_function() -> Any: pass [file k.py] from typing import NamedTuple def a() -> None: _FuzzyMatch(0, 0) _FuzzyMatch = NamedTuple('_FuzzyMatch', [ ('match_length', int), ('start_pos', int), ]) def b() -> None: _FuzzyMatch(0, 0) [file l.py] async def some_function(x) -> None: pass [file report/any-exprs.txt] [outfile report/types-of-anys.txt] Name Unannotated Explicit Unimported Omitted Generics Error Special Form Implementation Artifact ----------------------------------------------------------------------------------------------------------------- i 0 0 0 0 0 0 0 j 0 1 0 0 0 0 0 k 0 0 0 0 0 0 0 l 1 0 0 0 0 0 0 ----------------------------------------------------------------------------------------------------------------- Total 1 1 0 0 0 0 0 [case testSpecialAnyHtmlReport] # cmd: mypy --html-report report n.py [file n.py] from typing import Callable SourceToDisplay = Callable[[int], int] DisplayToSource = Callable[[int], int] [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
3
4
from typing import Callable

SourceToDisplay = Callable[[int], int]
DisplayToSource = Callable[[int], int]
mypy-0.761/test-data/unit/semanal-abstractclasses.test0000644€tŠÔÚ€2›s®0000000461613576752246027274 0ustar jukkaDROPBOX\Domain Users00000000000000[case testAbstractMethods] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def g(self) -> 'A': pass @abstractmethod def f(self) -> 'A': return self [out] MypyFile:1( ImportFrom:1(abc, [abstractmethod, ABCMeta]) Import:2(typing) ClassDef:4( A Metaclass(NameExpr(ABCMeta [abc.ABCMeta])) Decorator:5( Var(g) FuncDef:6( g Args( Var(self)) def (self: __main__.A) -> __main__.A Abstract Block:6( PassStmt:6()))) Decorator:7( Var(f) FuncDef:8( f Args( Var(self)) def (self: __main__.A) -> __main__.A Abstract Block:8( ReturnStmt:8( NameExpr(self [l]))))))) [case testClassInheritingTwoAbstractClasses] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): pass class B(metaclass=ABCMeta): pass class C(A, B): pass [out] MypyFile:1( ImportFrom:1(abc, [abstractmethod, ABCMeta]) Import:2(typing) ClassDef:4( A Metaclass(NameExpr(ABCMeta [abc.ABCMeta])) PassStmt:4()) ClassDef:5( B Metaclass(NameExpr(ABCMeta [abc.ABCMeta])) PassStmt:5()) ClassDef:6( C BaseType( __main__.A __main__.B) PassStmt:6())) [case testAbstractGenericClass] from abc import abstractmethod from typing import Generic, TypeVar T = TypeVar('T') class A(Generic[T]): @abstractmethod def f(self) -> 'A[T]': pass [out] MypyFile:1( ImportFrom:1(abc, [abstractmethod]) ImportFrom:2(typing, [Generic, TypeVar]) AssignmentStmt:3( NameExpr(T* [__main__.T]) TypeVarExpr:3()) ClassDef:4( A TypeVars( T) Decorator:5( Var(f) FuncDef:6( f Args( Var(self)) def (self: __main__.A[T`1]) -> __main__.A[T`1] Abstract Block:6( PassStmt:6()))))) [case testFullyQualifiedAbstractMethodDecl] import abc from abc import ABCMeta import typing class A(metaclass=ABCMeta): @abc.abstractmethod def g(self) -> 'A': pass [out] MypyFile:1( Import:1(abc) ImportFrom:2(abc, [ABCMeta]) Import:3(typing) ClassDef:5( A Metaclass(NameExpr(ABCMeta [abc.ABCMeta])) Decorator:6( Var(g) FuncDef:7( g Args( Var(self)) def (self: __main__.A) -> __main__.A Abstract Block:7( PassStmt:7()))))) mypy-0.761/test-data/unit/semanal-basic.test0000644€tŠÔÚ€2›s®0000002056513576752246025175 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] MypyFile:1() [case testGlobalVariable] x = 1 x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) ExpressionStmt:2( NameExpr(x [__main__.x]))) [case testMultipleGlobals] x = y = 2 z = 3 (x, y, z) [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(2)) AssignmentStmt:2( NameExpr(z* [__main__.z]) IntExpr(3)) ExpressionStmt:3( TupleExpr:3( NameExpr(x [__main__.x]) NameExpr(y [__main__.y]) NameExpr(z [__main__.z])))) [case testEmptyFunction] def f(): pass f() [out] MypyFile:1( FuncDef:1( f Block:1( PassStmt:1())) ExpressionStmt:2( CallExpr:2( NameExpr(f [__main__.f]) Args()))) [case testAccessingGlobalNameBeforeDefinition] x f() x = 1 def f(): pass [out] MypyFile:1( ExpressionStmt:1( NameExpr(x [__main__.x])) ExpressionStmt:2( CallExpr:2( NameExpr(f [__main__.f]) Args())) AssignmentStmt:3( NameExpr(x* [__main__.x]) IntExpr(1)) FuncDef:4( f Block:4( PassStmt:4()))) [case testFunctionArgs] def f(x, y): (x, y) [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(y)) Block:1( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) NameExpr(y [l])))))) [case testLocalVar] def f(): x = 1 x [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(x* [l]) IntExpr(1)) ExpressionStmt:3( NameExpr(x [l]))))) [case testAccessGlobalInFn] def f(): x g() x = 1 def g(): pass [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( NameExpr(x [__main__.x])) ExpressionStmt:3( CallExpr:3( NameExpr(g [__main__.g]) Args())))) AssignmentStmt:4( NameExpr(x* [__main__.x]) IntExpr(1)) FuncDef:5( g Block:5( PassStmt:5()))) [case testAssignmentAfterInit] x = 1 x = 2 def f(y): y = 1 z = 1 z = 2 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(2)) FuncDef:3( f Args( Var(y)) Block:3( AssignmentStmt:4( NameExpr(y [l]) IntExpr(1)) AssignmentStmt:5( NameExpr(z* [l]) IntExpr(1)) AssignmentStmt:6( NameExpr(z [l]) IntExpr(2))))) [case testLocalAndGlobalAliasing] x = 1 def f(): x = 2 x x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) FuncDef:2( f Block:2( AssignmentStmt:3( NameExpr(x* [l]) IntExpr(2)) ExpressionStmt:4( NameExpr(x [l])))) ExpressionStmt:5( NameExpr(x [__main__.x]))) [case testArgumentInitializers] def f(x = f, y = object): x, y [out] MypyFile:1( FuncDef:1( f Args( default( Var(x) NameExpr(f [__main__.f])) default( Var(y) NameExpr(object [builtins.object]))) Block:1( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) NameExpr(y [l])))))) [case testVarArgs] def f(x, *y): x, y [out] MypyFile:1( FuncDef:1( f Args( Var(x)) VarArg( Var(y)) Block:1( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) NameExpr(y [l])))))) [case testGlobalDecl] x = None def f(): global x x = None x class A: pass [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) FuncDef:2( f Block:2( GlobalDecl:3( x) AssignmentStmt:4( NameExpr(x [__main__.x]) NameExpr(None [builtins.None])) ExpressionStmt:5( NameExpr(x [__main__.x])))) ClassDef:6( A PassStmt:6())) [case testMultipleNamesInGlobalDecl] x, y = None, None def f(): global x, y x = y [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) TupleExpr:1( NameExpr(None [builtins.None]) NameExpr(None [builtins.None]))) FuncDef:2( f Block:2( GlobalDecl:3( x y) AssignmentStmt:4( NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))))) [case testGlobalDeclScope] x = None def f(): global x def g(): x = None [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) FuncDef:2( f Block:2( GlobalDecl:3( x))) FuncDef:4( g Block:4( AssignmentStmt:5( NameExpr(x* [l]) NameExpr(None [builtins.None]))))) [case testGlobalDeclScope] x = None def f(): global x def g(): x = None [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) FuncDef:2( f Block:2( GlobalDecl:3( x))) FuncDef:4( g Block:4( AssignmentStmt:5( NameExpr(x* [l]) NameExpr(None [builtins.None]))))) [case testGlobaWithinMethod] x = None class A: def f(self): global x x = self [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) ClassDef:2( A FuncDef:3( f Args( Var(self)) Block:3( GlobalDecl:4( x) AssignmentStmt:5( NameExpr(x [__main__.x]) NameExpr(self [l])))))) [case testGlobalDefinedInBlock] # flags: --allow-redefinition if object: x = object() x = x x [out] MypyFile:1( IfStmt:2( If( NameExpr(object [builtins.object])) Then( AssignmentStmt:3( NameExpr(x'* [__main__.x']) CallExpr:3( NameExpr(object [builtins.object]) Args())) AssignmentStmt:4( NameExpr(x* [__main__.x]) NameExpr(x' [__main__.x'])))) ExpressionStmt:5( NameExpr(x [__main__.x]))) [case testNonlocalDecl] def g(): x = None def f(): nonlocal x x = None x [out] MypyFile:1( FuncDef:1( g Block:1( AssignmentStmt:2( NameExpr(x* [l]) NameExpr(None [builtins.None])) FuncDef:3( f Block:3( NonlocalDecl:4( x) AssignmentStmt:5( NameExpr(x [l]) NameExpr(None [builtins.None])) ExpressionStmt:6( NameExpr(x [l]))))))) [case testMultipleNamesInNonlocalDecl] def g(): x, y = None, None def f(z): nonlocal x, y x = y [out] MypyFile:1( FuncDef:1( g Block:1( AssignmentStmt:2( TupleExpr:2( NameExpr(x* [l]) NameExpr(y* [l])) TupleExpr:2( NameExpr(None [builtins.None]) NameExpr(None [builtins.None]))) FuncDef:3( f Args( Var(z)) Block:3( NonlocalDecl:4( x y) AssignmentStmt:5( NameExpr(x [l]) NameExpr(y [l]))))))) [case testNestedFunctions] def f(x): def g(y): z = y + x return g [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( FuncDef:2( g Args( Var(y)) Block:2( AssignmentStmt:3( NameExpr(z* [l]) OpExpr:3( + NameExpr(y [l]) NameExpr(x [l]))))) ReturnStmt:4( NameExpr(g [l]))))) [case testNestedFunctionWithOverlappingName] def f(x): def g(): x = 1 [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( FuncDef:2( g Block:2( AssignmentStmt:3( NameExpr(x* [l]) IntExpr(1))))))) [case testFinalValuesOnVar] from typing import Final, Any def func() -> Any: ... x: Final = 1 y: Final = 1.0 s: Final = "hi" t: Final = True n: Final = func() [out] MypyFile:1( ImportFrom:1(typing, [Final, Any]) FuncDef:3( func def () -> Any Block:3( ExpressionStmt:3( Ellipsis))) AssignmentStmt:4( NameExpr(x* [__main__.x] = 1) IntExpr(1)) AssignmentStmt:5( NameExpr(y* [__main__.y] = 1.0) FloatExpr(1.0)) AssignmentStmt:6( NameExpr(s* [__main__.s] = hi) StrExpr(hi)) AssignmentStmt:7( NameExpr(t* [__main__.t] = True) NameExpr(True [builtins.True])) AssignmentStmt:8( NameExpr(n* [__main__.n] = None) CallExpr:8( NameExpr(func [__main__.func]) Args()))) mypy-0.761/test-data/unit/semanal-classes.test0000644€tŠÔÚ€2›s®0000002575413576752246025556 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases related to classes for the semantic analyzer. [case testSimpleClass] class A: pass x = A [out] MypyFile:1( ClassDef:1( A PassStmt:1()) AssignmentStmt:2( NameExpr(x* [__main__.x]) NameExpr(A [__main__.A]))) [case testMethods] class A: def __init__(self, x): y = x def f(self): y = self [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(self) Var(x)) Block:2( AssignmentStmt:3( NameExpr(y* [l]) NameExpr(x [l])))) FuncDef:4( f Args( Var(self)) Block:4( AssignmentStmt:5( NameExpr(y* [l]) NameExpr(self [l])))))) [case testMemberDefinitionInInit] class A: def __init__(self): self.x = 1 self.y = 2 [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) x*) IntExpr(1)) AssignmentStmt:4( MemberExpr:4( NameExpr(self [l]) y*) IntExpr(2)))))) [case testMemberAssignmentViaSelfOutsideInit] class A: def f(self): self.x = 1 def __init__(self): self.y = 1 [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) x*) IntExpr(1))))) FuncDef:4( __init__ Args( Var(self)) Block:4( AssignmentStmt:5( MemberExpr:5( NameExpr(self [l]) y) IntExpr(1))))) [case testMemberAssignmentNotViaSelf] class A: def __init__(x, self): self.y = 1 # not really self class B: def __init__(x): self = x self.z = 1 [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(x) Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) y) IntExpr(1))))) ClassDef:4( B FuncDef:5( __init__ Args( Var(x)) Block:5( AssignmentStmt:6( NameExpr(self* [l]) NameExpr(x [l])) AssignmentStmt:7( MemberExpr:7( NameExpr(self [l]) z) IntExpr(1)))))) [case testNonStandardNameForSelfAndInit] class A: def __init__(x): x.y = 1 [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(x)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(x [l]) y*) IntExpr(1)))))) [case testAssignmentAfterAttributeInit] class A: def __init__(self): self.x = 1 self.x = 2 [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) x*) IntExpr(1)) AssignmentStmt:4( MemberExpr:4( NameExpr(self [l]) x) IntExpr(2)))))) [case testOverloadedMethod] from typing import overload class A: @overload def f(self) -> None: self @overload def f(self, x: 'A') -> None: self def f(self, *args): self [out] MypyFile:1( ImportFrom:1(typing, [overload]) ClassDef:2( A OverloadedFuncDef:3( FuncDef:7( f Args( Var(self)) VarArg( Var(args)) Block:7( ExpressionStmt:7( NameExpr(self [l])))) Overload(def (self: __main__.A), \ def (self: __main__.A, x: __main__.A)) Decorator:3( Var(f) NameExpr(overload [typing.overload]) FuncDef:4( f Args( Var(self)) def (self: __main__.A) Block:4( ExpressionStmt:4( NameExpr(self [l]))))) Decorator:5( Var(f) NameExpr(overload [typing.overload]) FuncDef:6( f Args( Var(self) Var(x)) def (self: __main__.A, x: __main__.A) Block:6( ExpressionStmt:6( NameExpr(self [l])))))))) [case testAttributeWithoutType] class A: a = object [out] MypyFile:1( ClassDef:1( A AssignmentStmt:2( NameExpr(a* [m]) NameExpr(object [builtins.object])))) [case testDataAttributeRefInClassBody] class A: x = 1 y = x [out] MypyFile:1( ClassDef:1( A AssignmentStmt:2( NameExpr(x* [m]) IntExpr(1)) AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [__main__.A.x])))) [case testMethodRefInClassBody] class A: def f(self): pass g = f [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) Block:2( PassStmt:2())) AssignmentStmt:3( NameExpr(g* [m]) NameExpr(f [__main__.A.f])))) [case testIfStatementInClassBody] class A: if A: x = 1 else: x = 2 [out] MypyFile:1( ClassDef:1( A IfStmt:2( If( NameExpr(A [__main__.A])) Then( AssignmentStmt:3( NameExpr(x* [m]) IntExpr(1))) Else( AssignmentStmt:5( NameExpr(x [__main__.A.x]) IntExpr(2)))))) [case testForStatementInClassBody] class A: for x in [1, 2]: y = x [out] MypyFile:1( ClassDef:1( A ForStmt:2( NameExpr(x* [m]) ListExpr:2( IntExpr(1) IntExpr(2)) Block:2( AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [__main__.A.x])))))) [case testReferenceToClassWithinFunction] def f(): class A: pass A [out] MypyFile:1( FuncDef:1( f Block:1( ClassDef:2( A PassStmt:2()) ExpressionStmt:3( NameExpr(A [__main__.A@2]))))) [case testReferenceToClassWithinClass] class A: class B: pass B [out] MypyFile:1( ClassDef:1( A ClassDef:2( B PassStmt:2()) ExpressionStmt:3( NameExpr(B [__main__.A.B])))) [case testClassWithBaseClassWithinClass] class A: class B: pass class C(B): pass [out] MypyFile:1( ClassDef:1( A ClassDef:2( B PassStmt:2()) ClassDef:3( C BaseType( __main__.A.B) PassStmt:3()))) [case testDeclarationReferenceToNestedClass] def f() -> None: class A: pass x = None # type: A [out] MypyFile:1( FuncDef:1( f def () Block:1( ClassDef:2( A PassStmt:2()) AssignmentStmt:3( NameExpr(x [l]) NameExpr(None [builtins.None]) __main__.A@2)))) [case testAccessToLocalInOuterScopeWithinNestedClass] def f(x): class A: y = x def g(self): z = x [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( ClassDef:2( A AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [l])) FuncDef:4( g Args( Var(self)) Block:4( AssignmentStmt:5( NameExpr(z* [l]) NameExpr(x [l])))))))) [case testQualifiedMetaclass] import abc class A(metaclass=abc.ABCMeta): pass [out] MypyFile:1( Import:1(abc) ClassDef:2( A Metaclass(MemberExpr:2( NameExpr(abc) ABCMeta [abc.ABCMeta])) PassStmt:2())) [case testStaticMethod] class A: @staticmethod def f(z: int) -> str: pass [builtins fixtures/staticmethod.pyi] [out] MypyFile:1( ClassDef:1( A Decorator:2( Var(f) FuncDef:3( f Args( Var(z)) def (z: builtins.int) -> builtins.str Static Block:3( PassStmt:3()))))) [case testStaticMethodWithNoArgs] class A: @staticmethod def f() -> str: pass [builtins fixtures/staticmethod.pyi] [out] MypyFile:1( ClassDef:1( A Decorator:2( Var(f) FuncDef:3( f def () -> builtins.str Static Block:3( PassStmt:3()))))) [case testClassMethod] class A: @classmethod def f(cls, z: int) -> str: pass [builtins fixtures/classmethod.pyi] [out] MypyFile:1( ClassDef:1( A Decorator:2( Var(f) FuncDef:3( f Args( Var(cls) Var(z)) def (cls: Type[__main__.A], z: builtins.int) -> builtins.str Class Block:3( PassStmt:3()))))) [case testClassMethodWithNoArgs] class A: @classmethod def f(cls) -> str: pass [builtins fixtures/classmethod.pyi] [out] MypyFile:1( ClassDef:1( A Decorator:2( Var(f) FuncDef:3( f Args( Var(cls)) def (cls: Type[__main__.A]) -> builtins.str Class Block:3( PassStmt:3()))))) [case testProperty] import typing class A: @property def f(self) -> str: pass [builtins fixtures/property.pyi] [out] MypyFile:1( Import:1(typing) ClassDef:2( A Decorator:3( Var(f) FuncDef:4( f Args( Var(self)) def (self: __main__.A) -> builtins.str Property Block:4( PassStmt:4()))))) [case testClassDecorator] import typing @object class A: pass [out] MypyFile:1( Import:1(typing) ClassDef:3( A Decorators( NameExpr(object [builtins.object])) PassStmt:3())) [case testClassAttributeAsMethodDefaultArgumentValue] import typing class A: X = 1 def f(self, x : int = X) -> None: pass [out] MypyFile:1( Import:1(typing) ClassDef:2( A AssignmentStmt:3( NameExpr(X* [m]) IntExpr(1)) FuncDef:4( f Args( Var(self) default( Var(x) NameExpr(X [__main__.A.X]))) def (self: __main__.A, x: builtins.int =) Block:4( PassStmt:4())))) [case testInvalidBaseClass] from typing import Any, Callable class A(None): pass class B(Any): pass class C(Callable[[], int]): pass [out] main:3: error: Invalid base class "None" main:5: error: Invalid base class "Callable" [case testTupleAsBaseClass] import m [file m.pyi] from typing import Tuple class A(Tuple[int, str]): pass [builtins fixtures/tuple.pyi] [out] MypyFile:1( Import:2(m)) MypyFile:1( tmp/m.pyi ImportFrom:1(typing, [Tuple]) ClassDef:2( A TupleType( Tuple[builtins.int, builtins.str]) BaseType( builtins.tuple[builtins.object]) PassStmt:2())) [case testBaseClassFromIgnoredModule] import m # type: ignore class B(m.A): pass [out] MypyFile:1( Import:1(m) ClassDef:2( B FallbackToAny PassStmt:3()) IgnoredLines(1)) [case testBaseClassFromIgnoredModuleUsingImportFrom] from m import A # type: ignore class B(A, int): pass [out] MypyFile:1( ImportFrom:1(m, [A]) ClassDef:2( B FallbackToAny BaseType( builtins.int) PassStmt:3()) IgnoredLines(1)) [case testBaseClassWithExplicitAnyType] from typing import Any A = 1 # type: Any class B(A): pass [out] MypyFile:1( ImportFrom:1(typing, [Any]) AssignmentStmt:2( NameExpr(A [__main__.A]) IntExpr(1) Any) ClassDef:3( B FallbackToAny PassStmt:4())) mypy-0.761/test-data/unit/semanal-classvar.test0000644€tŠÔÚ€2›s®0000001255113576752246025726 0ustar jukkaDROPBOX\Domain Users00000000000000[case testClassVarDef] from typing import ClassVar class A: x = 1 # type: ClassVar[int] [out] MypyFile:1( ImportFrom:1(typing, [ClassVar]) ClassDef:2( A AssignmentStmt:3( NameExpr(x [m]) IntExpr(1) builtins.int))) [case testClassVarDefInModuleScope] from typing import ClassVar x = None # type: ClassVar[int] [out] main:2: error: ClassVar can only be used for assignments in class body [case testClassVarDefInFuncScope] from typing import ClassVar def f() -> None: x = None # type: ClassVar[int] [out] main:3: error: ClassVar can only be used for assignments in class body [case testClassVarDefInMethod] from typing import ClassVar class A: def f(self) -> None: x = None # type: ClassVar [out] main:4: error: ClassVar can only be used for assignments in class body [case testClassVarTooManyArguments] from typing import ClassVar class A: x = 1 # type: ClassVar[int, str] [out] main:3: error: ClassVar[...] must have at most one type argument [case testClassVarWithoutArguments] from typing import ClassVar class A: x = 1 # type: ClassVar [out] MypyFile:1( ImportFrom:1(typing, [ClassVar]) ClassDef:2( A AssignmentStmt:3( NameExpr(x [m]) IntExpr(1) Any))) [case testClassVarWithTypeVar] from typing import ClassVar, TypeVar T = TypeVar('T') class A: x = None # type: ClassVar[T] [out] main:5: error: Type variable "__main__.T" is unbound main:5: note: (Hint: Use "Generic[T]" or "Protocol[T]" base class to bind "T" inside a class) main:5: note: (Hint: Use "T" in function signature to bind "T" inside a function) [case testClassVarInFunctionArgs] from typing import ClassVar def f(x: str, y: ClassVar) -> None: pass [out] main:2: error: ClassVar can only be used for assignments in class body [case testClassVarInMethodArgs] from typing import ClassVar class A: def f(x: str, y: ClassVar) -> None: pass [out] main:3: error: ClassVar can only be used for assignments in class body [case testClassVarFunctionRetType] from typing import ClassVar def f() -> ClassVar: pass [out] main:2: error: ClassVar can only be used for assignments in class body [case testClassVarMethodRetType] from typing import ClassVar class A: def f(self) -> ClassVar: pass [out] main:3: error: ClassVar can only be used for assignments in class body [case testMultipleClassVarInFunctionSig] from typing import ClassVar def f(x: ClassVar, y: ClassVar) -> ClassVar: pass [out] main:2: error: ClassVar can only be used for assignments in class body [case testClassVarInCallableArgs] from typing import Callable, ClassVar, Any f = None # type: Callable[[int, ClassVar], Any] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testClassVarInCallableRet] from typing import Callable, ClassVar f = None # type: Callable[..., ClassVar] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testClassVarInUnion] from typing import ClassVar, Union x = None # type: Union[ClassVar, str] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testClassVarInUnionAsAttribute] from typing import ClassVar, Union class A: x = None # type: Union[ClassVar, str] [out] main:3: error: Invalid type: ClassVar nested inside other type [case testListWithClassVars] from typing import ClassVar, List x = [] # type: List[ClassVar] [builtins fixtures/list.pyi] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testTupleClassVar] from typing import ClassVar, Tuple x = None # type: Tuple[ClassVar, int] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testMultipleLvaluesWithList] from typing import ClassVar, Tuple class A: [x, y] = None, None # type: Tuple[ClassVar, ClassVar] [builtins fixtures/tuple.pyi] [out] main:3: error: Invalid type: ClassVar nested inside other type [case testDeeplyNested] from typing import Callable, ClassVar, Union class A: pass class B: x = None # type: Union[str, Callable[[A, ClassVar], int]] [out] main:4: error: Invalid type: ClassVar nested inside other type [case testClassVarInClassVar] from typing import ClassVar class A: x = None # type: ClassVar[ClassVar[int]] [out] main:3: error: Invalid type: ClassVar nested inside other type [case testInsideGeneric] from typing import ClassVar, Generic, TypeVar T = TypeVar('T') class A(Generic[T]): pass class B: x = None # type: A[ClassVar] [out] main:5: error: Invalid type: ClassVar nested inside other type [case testDefineOnSelf] from typing import ClassVar class A: def __init__(self) -> None: self.x = None # type: ClassVar [out] main:4: error: ClassVar can only be used for assignments in class body [case testForIndex] from typing import ClassVar for i in []: # type: ClassVar pass [out] main:2: error: ClassVar can only be used for assignments in class body [case testForIndexInClassBody] from typing import ClassVar class A: for i in []: # type: ClassVar pass [out] main:3: error: ClassVar can only be used for assignments in class body [case testWithStmt] from typing import ClassVar class A: pass with A() as x: # type: ClassVar pass [out] main:3: error: ClassVar can only be used for assignments in class body [case testWithStmtInClassBody] from typing import ClassVar class A: pass class B: with A() as x: # type: ClassVar pass [out] main:4: error: ClassVar can only be used for assignments in class body mypy-0.761/test-data/unit/semanal-errors.test0000644€tŠÔÚ€2›s®0000010207613576752246025426 0ustar jukkaDROPBOX\Domain Users00000000000000[case testUndefinedVariableInGlobalStatement] import typing x y [out] main:2: error: Name 'x' is not defined main:3: error: Name 'y' is not defined [case testUndefinedVariableWithinFunctionContext] import typing def f() -> None: x y [out] main:3: error: Name 'x' is not defined main:4: error: Name 'y' is not defined [case testMethodScope] import typing class A: def f(self): pass f [out] main:4: error: Name 'f' is not defined [case testMethodScope2] import typing class A: def f(self): pass class B: def g(self) -> None: f # error g # error [out] main:6: error: Name 'f' is not defined main:7: error: Name 'g' is not defined [case testInvalidType] import typing x = None # type: X [out] main:2: error: Name 'X' is not defined [case testInvalidGenericArg] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass x = 0 # type: A[y] [out] main:4: error: Name 'y' is not defined [case testInvalidNumberOfGenericArgsInTypeDecl] from typing import TypeVar, Generic t = TypeVar('t') class A: pass class B(Generic[t]): pass x = 0 # type: B[A, A] y = 0 # type: A[A] [out] main:5: error: "B" expects 1 type argument, but 2 given main:6: error: "A" expects no type arguments, but 1 given [case testInvalidNumberOfGenericArgsInUndefinedArg] class A: pass x = None # type: A[int] # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInNestedBlock] class A: pass class B: def f(self) -> None: while 1: x = None # type: A[int] \ # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInSignature] import typing class A: pass def f() -> A[int]: pass # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInOverloadedSignature] from typing import overload class A: pass @overload def f(): pass @overload def f(x: A[int]) -> None: pass # E: "A" expects no type arguments, but 1 given def f(*args): pass [out] [case testInvalidNumberOfGenericArgsInBaseType] import typing class A: pass class B(A[int]): pass # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInCast] from typing import cast class A: pass x = cast(A[int], 1) # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInNestedGenericType] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B: pass def f() -> A[B[int]]: pass # E: "B" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInTupleType] from typing import Tuple class A: pass x = None # type: Tuple[A[int]] # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInFunctionType] from typing import Callable class A: pass x = None # type: Callable[[A[int]], int] # E: "A" expects no type arguments, but 1 given y = None # type: Callable[[], A[int]] # E: "A" expects no type arguments, but 1 given [out] [case testVarOrFuncAsType] import typing def f(): pass x = 1 y = 0 # type: f z = 0 # type: x [out] main:5: error: Function "__main__.f" is not valid as a type main:5: note: Perhaps you need "Callable[...]" or a callback protocol? main:6: error: Variable "__main__.x" is not valid as a type [case testGlobalVarRedefinition] import typing class A: pass x = 0 # type: A x = 0 # type: A [out] main:4: error: Name 'x' already defined on line 3 [case testLocalVarRedefinition] import typing class A: pass def f() -> None: x = 0 # type: A x = 0 # type: A [out] main:5: error: Name 'x' already defined on line 4 [case testClassVarRedefinition] import typing class A: x = 0 # type: object x = 0 # type: object [out] main:4: error: Name 'x' already defined on line 3 [case testMultipleClassDefinitions] import typing class A: pass class A: pass [out] main:3: error: Name 'A' already defined on line 2 [case testMultipleMixedDefinitions] import typing x = 1 def x(): pass class x: pass [out] main:3: error: Name 'x' already defined on line 2 main:4: error: Name 'x' already defined on line 2 [case testNameNotImported] import typing from m import y x [file m.py] x = y = 1 [out] main:3: error: Name 'x' is not defined [case testMissingNameInImportFrom] import typing from m import y [file m.py] x = 1 [out] main:2: error: Module 'm' has no attribute 'y' [case testMissingModule] import typing import m [out] main:2: error: Cannot find implementation or library stub for module named 'm' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testMissingModule2] import typing from m import x [out] main:2: error: Cannot find implementation or library stub for module named 'm' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testMissingModule3] import typing from m import * [out] main:2: error: Cannot find implementation or library stub for module named 'm' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testMissingModuleRelativeImport] import typing import m [file m/__init__.py] from .x import y [out] tmp/m/__init__.py:1: error: Cannot find implementation or library stub for module named 'm.x' tmp/m/__init__.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testMissingModuleRelativeImport2] import typing import m.a [file m/__init__.py] [file m/a.py] from .x import y [out] tmp/m/a.py:1: error: Cannot find implementation or library stub for module named 'm.x' tmp/m/a.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports [case testModuleNotImported] import typing import _m _n.x [file _m.py] import _n [file _n.py] x = 1 [out] main:3: error: Name '_n' is not defined [case testImportAsteriskPlusUnderscore] import typing from _m import * _x __x__ [file _m.py] _x = __x__ = 1 [out] main:3: error: Name '_x' is not defined main:4: error: Name '__x__' is not defined [case testRelativeImportAtTopLevelModule] from . import m [out] main:1: error: No parent module -- cannot perform relative import [case testRelativeImportAtTopLevelModule2] from .. import m [out] main:1: error: No parent module -- cannot perform relative import [case testUndefinedTypeWithQualifiedName] import typing import m def f() -> m.c: pass def g() -> n.c: pass [file m.py] [out] main:3: error: Name 'm.c' is not defined main:4: error: Name 'n' is not defined [case testMissingPackage] import typing import m.n [out] main:2: error: Cannot find implementation or library stub for module named 'm.n' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Cannot find implementation or library stub for module named 'm' [case testMissingPackage2] import typing from m.n import x from a.b import * [out] main:2: error: Cannot find implementation or library stub for module named 'm.n' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:3: error: Cannot find implementation or library stub for module named 'a.b' [case testErrorInImportedModule] import m [file m.py] import typing x = y [out] tmp/m.py:2: error: Name 'y' is not defined [case testErrorInImportedModule2] import m.n [file m/__init__.py] [file m/n.py] import k [file k.py] import typing x = y [out] tmp/k.py:2: error: Name 'y' is not defined [case testPackageWithoutInitFile] import typing import m.n m.n.x [file m/n.py] x = 1 [out] main:2: error: Cannot find implementation or library stub for module named 'm.n' main:2: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports main:2: error: Cannot find implementation or library stub for module named 'm' [case testBreakOutsideLoop] break def f(): break [out] main:1: error: 'break' outside loop main:3: error: 'break' outside loop [case testContinueOutsideLoop] continue def f(): continue [out] main:1: error: 'continue' outside loop main:3: error: 'continue' outside loop [case testReturnOutsideFunction] def f(): pass return return 1 [out] main:2: error: 'return' outside function main:3: error: 'return' outside function [case testYieldOutsideFunction] yield 1 yield [out] main:1: error: 'yield' outside function main:2: error: 'yield' outside function [case testInvalidLvalues1] 1 = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues2] (1) = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues3] (1, 1) = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues4] [1, 1] = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues5] () = 1 [out] main:1: error: can't assign to () [case testInvalidLvalues6] x = y = z = 1 # ok x, (y, 1) = 1 [out] main:2: error: can't assign to literal [case testInvalidLvalues7] x, [y, 1] = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues8] x, [y, [z, 1]] = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues9] x, (y) = 1 # ok x, (y, (z, z)) = 1 # ok x, (y, (z, 1)) = 1 [out] main:3: error: can't assign to literal [case testInvalidLvalues10] x + x = 1 [out] main:1: error: can't assign to operator [case testInvalidLvalues11] -x = 1 [out] main:1: error: can't assign to operator [case testInvalidLvalues12] 1.1 = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues13] 'x' = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues14] x() = 1 [out] main:1: error: can't assign to function call [case testTwoStarExpressions] a, *b, *c = 1 *a, (*b, c) = 1 a, (*b, *c) = 1 [*a, *b] = 1 [out] main:1: error: Two starred expressions in assignment main:3: error: Two starred expressions in assignment main:4: error: Two starred expressions in assignment [case testTwoStarExpressionsInForStmt] z = 1 for a, *b, *c in z: pass for *a, (*b, c) in z: pass for a, (*b, *c) in z: pass for [*a, *b] in z: pass [out] main:2: error: Two starred expressions in assignment main:6: error: Two starred expressions in assignment main:8: error: Two starred expressions in assignment [case testTwoStarExpressionsInGeneratorExpr] (a for a, *b, *c in []) (a for *a, (*b, c) in []) (a for a, (*b, *c) in []) [out] main:1: error: Name 'a' is not defined main:1: error: Two starred expressions in assignment main:3: error: Two starred expressions in assignment [case testStarExpressionRhs] b = 1 c = 1 d = 1 a = *b [out] main:4: error: Can use starred expression only as assignment target [case testStarExpressionInExp] a = 1 *a + 1 [out] main:2: error: Can use starred expression only as assignment target [case testInvalidDel1] x = 1 del x(1) # E: can't delete function call [out] [case testInvalidDel2] x = 1 del x + 1 # E: can't delete operator [out] [case testInvalidDel3] del z # E: Name 'z' is not defined [out] [case testFunctionTvarScope] from typing import TypeVar t = TypeVar('t') def f(x: t) -> t: pass x = 0 # type: t [out] main:5: error: Type variable "__main__.t" is unbound main:5: note: (Hint: Use "Generic[t]" or "Protocol[t]" base class to bind "t" inside a class) main:5: note: (Hint: Use "t" in function signature to bind "t" inside a function) [case testClassTvarScope] from typing import Generic, TypeVar t = TypeVar('t') class c(Generic[t]): pass x = 0 # type: t [out] main:5: error: Type variable "__main__.t" is unbound main:5: note: (Hint: Use "Generic[t]" or "Protocol[t]" base class to bind "t" inside a class) main:5: note: (Hint: Use "t" in function signature to bind "t" inside a function) [case testExpressionRefersToTypeVariable] from typing import TypeVar, Generic t = TypeVar('t') class c(Generic[t]): def f(self) -> None: x = t def f(y: t): x = t [out] main:4: error: 't' is a type variable and only valid in type context main:5: error: 't' is a type variable and only valid in type context [case testMissingSelf] import typing class A: def f(): pass [out] main:3: error: Method must have at least one argument [case testInvalidBaseClass] import typing class A(B): pass [out] main:2: error: Name 'B' is not defined [case testSuperOutsideClass] class A: pass super().x def f() -> None: super().y [out] main:2: error: "super" used outside class main:3: error: "super" used outside class [case testMissingSelfInMethod] import typing class A: def f() -> None: pass def g(): pass [out] main:3: error: Method must have at least one argument main:4: error: Method must have at least one argument [case testMultipleMethodDefinition] import typing class A: def f(self) -> None: pass def g(self) -> None: pass def f(self, x: object) -> None: pass [out] main:5: error: Name 'f' already defined on line 3 [case testInvalidGlobalDecl] import typing def f() -> None: global x x = None [out] main:4: error: Name 'x' is not defined [case testInvalidNonlocalDecl] import typing def f(): def g() -> None: nonlocal x x = None [out] main:4: error: No binding for nonlocal 'x' found main:5: error: Name 'x' is not defined [case testNonlocalDeclNotMatchingGlobal] import typing x = None def f() -> None: nonlocal x x = None [out] main:4: error: No binding for nonlocal 'x' found main:5: error: Name 'x' is not defined [case testNonlocalDeclConflictingWithParameter] import typing def g(): x = None def f(x) -> None: nonlocal x x = None [out] main:5: error: Name 'x' is already defined in local scope before nonlocal declaration [case testNonlocalDeclOutsideFunction] x = 2 nonlocal x [out] main:2: error: nonlocal declaration not allowed at module level [case testGlobalAndNonlocalDecl] import typing x = 1 def f(): x = 1 def g() -> None: global x nonlocal x x = None [out] main:7: error: Name 'x' is nonlocal and global [case testNonlocalAndGlobalDecl] import typing x = 1 def f(): x = 1 def g() -> None: nonlocal x global x x = None [out] main:7: error: Name 'x' is nonlocal and global [case testNestedFunctionAndScoping] import typing def f(x) -> None: def g(y): z = x z y x [out] main:5: error: Name 'z' is not defined main:6: error: Name 'y' is not defined [case testMultipleNestedFunctionDef] import typing def f(x) -> None: def g(): pass x = 1 def g(): pass [out] main:5: error: Name 'g' already defined on line 3 [case testRedefinedOverloadedFunction] from typing import overload, Any def f() -> None: @overload def p(o: object) -> None: pass # no error @overload def p(o: Any) -> None: pass # no error x = 1 def p(): pass # fail [out] main:3: error: An overloaded function outside a stub file must have an implementation main:8: error: Name 'p' already defined on line 3 [case testNestedFunctionInMethod] import typing class A: def f(self) -> None: def g() -> None: x y [out] main:5: error: Name 'x' is not defined main:6: error: Name 'y' is not defined [case testImportScope] import typing def f() -> None: import x x.y # E: Name 'x' is not defined [file x.py] y = 1 [out] [case testImportScope2] import typing def f() -> None: from x import y y y # E: Name 'y' is not defined [file x.py] y = 1 [out] [case testImportScope3] import typing def f() -> None: from x import * y y # E: Name 'y' is not defined [file x.py] y = 1 [out] [case testImportScope4] import typing class A: from x import * y y # E: Name 'y' is not defined [file x.py] y = 1 [out] [case testScopeOfNestedClass] import typing def f(): class A: pass A A # E: Name 'A' is not defined [out] [case testScopeOfNestedClass2] import typing class A: class B: pass B # E: Name 'B' is not defined [out] [case testScopeOfNestedClass3] import typing class A: def f(self): class B: pass B # E: Name 'B' is not defined B # E: Name 'B' is not defined [out] [case testInvalidNestedClassReferenceInDecl] import typing class A: pass foo = 0 # type: A.x # E: Name 'A.x' is not defined [out] [case testTvarScopingWithNestedClass] from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class A(Generic[t]): class B(Generic[s]): x = 0 # type: A[s] y = 0 # type: A[t] # E: Type variable "__main__.t" is unbound \ # N: (Hint: Use "Generic[t]" or "Protocol[t]" base class to bind "t" inside a class) \ # N: (Hint: Use "t" in function signature to bind "t" inside a function) z = 0 # type: A[s] # E: Type variable "__main__.s" is unbound \ # N: (Hint: Use "Generic[s]" or "Protocol[s]" base class to bind "s" inside a class) \ # N: (Hint: Use "s" in function signature to bind "s" inside a function) a = 0 # type: A[t] [out] [case testTestExtendPrimitives] class C(bool): pass # E: 'bool' is not a valid base class class A(int): pass # ok class B(float): pass # ok class D(str): pass # ok [builtins fixtures/primitives.pyi] [out] [case testCyclicInheritance1] class A(A): pass # E: Cannot resolve name "A" (possible cyclic definition) [out] [case testCyclicInheritance2] class A(B): pass # E: Cannot resolve name "B" (possible cyclic definition) class B(A): pass [out] [case testAssignToTypeDef] import typing class A: pass A = None # E: Cannot assign to a type [out] [case testInvalidCastTargetSyntax] from typing import cast, TypeVar, Generic t = TypeVar('t') class C(Generic[t]): pass cast(str + str, None) # E: Cast target is not a type cast(C[str][str], None) # E: Cast target is not a type cast(C[str + str], None) # E: Cast target is not a type cast([int, str], None) # E: Bracketed expression "[...]" is not valid as a type \ # N: Did you mean "List[...]"? [out] [case testInvalidCastTargetType] from typing import cast x = 0 cast(x, None) # E: Variable "__main__.x" is not valid as a type cast(t, None) # E: Name 't' is not defined cast(__builtins__.x, None) # E: Name '__builtins__.x' is not defined [out] [case testInvalidCastTargetType2] from typing import cast x = 0 cast(str[str], None) # E: "str" expects no type arguments, but 1 given [out] [case testInvalidNumberOfArgsToCast] from typing import cast cast(str) # E: 'cast' expects 2 arguments cast(str, None, None) # E: 'cast' expects 2 arguments [out] [case testInvalidKindsOfArgsToCast] from typing import cast cast(str, *None) # E: 'cast' must be called with 2 positional arguments cast(str, target=None) # E: 'cast' must be called with 2 positional arguments [out] [case testInvalidAnyCall] from typing import Any Any(str, None) # E: Any(...) is no longer supported. Use cast(Any, ...) instead Any(arg=str) # E: Any(...) is no longer supported. Use cast(Any, ...) instead [out] [case testTypeListAsType] def f(x:[int, str]) -> None: # E: Bracketed expression "[...]" is not valid as a type \ # N: Did you mean "List[...]"? pass [out] [case testInvalidFunctionType] from typing import Callable x = None # type: Callable[int, str] y = None # type: Callable[int] z = None # type: Callable[int, int, int] [out] main:2: error: The first argument to Callable must be a list of types or "..." main:3: error: Please use "Callable[[], ]" or "Callable" main:4: error: Please use "Callable[[], ]" or "Callable" [case testAbstractGlobalFunction] import typing from abc import abstractmethod @abstractmethod def foo(): pass [out] main:3: error: 'abstractmethod' used with a non-method [case testAbstractNestedFunction] import typing from abc import abstractmethod def g() -> None: @abstractmethod def foo(): pass [out] main:4: error: 'abstractmethod' used with a non-method [case testInvalidTypeDeclaration] import typing def f(): pass f() = 1 # type: int [out] main:3: error: can't assign to function call [case testIndexedAssignmentWithTypeDeclaration] import typing None[1] = 1 # type: int [out] main:2: error: Unexpected type declaration [case testNonSelfMemberAssignmentWithTypeDeclaration] import typing None.x = 1 # type: int [out] main:2: error: Type cannot be declared in assignment to non-self attribute [case testNonSelfMemberAssignmentWithTypeDeclarationInMethod] import typing class A: def f(self, x) -> None: x.y = 1 # type: int [out] main:4: error: Type cannot be declared in assignment to non-self attribute [case testInvalidTypeInTypeApplication] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass A[TypeVar] # E: Variable "typing.TypeVar" is not valid as a type [out] [case testInvalidTypeInTypeApplication2] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass A[1] # E: Invalid type: try using Literal[1] instead? [out] [case testVariableDeclWithInvalidNumberOfTypes] x, y = 1, 2 # type: int, str, int # E: Incompatible number of tuple items [out] [case testVariableDeclWithInvalidNumberOfTypesNested] x, (y, z) = 1, (2, 3) # type: int, (str, int, int) # E: Incompatible number of tuple items [out] [case testVariableDeclWithInvalidNumberOfTypesNested2] x, (y, z) = 1, (2, 3) # type: int, (str, ) # E: Incompatible number of tuple items [out] [case testVariableDeclWithInvalidNumberOfTypesNested3] x, (y, z) = 1, (2, 3) # type: int, str # E: Tuple type expected for multiple variables [out] [case testVariableDeclWithInvalidNumberOfTypesNested4] x, (y, z) = 1, (2, 3) # type: int, str, int # E: Incompatible number of tuple items [out] [case testVariableDeclWithInvalidNumberOfTypesNested5] x, (y, ) = 1, (2, ) # type: int, str # E: Tuple type expected for multiple variables [out] [case testVariableDeclWithInvalidType] x, y = 1, 2 # type: int # E: Tuple type expected for multiple variables [out] [case testInvalidLvalueWithExplicitType] a = 1 a() = None # type: int # E: can't assign to function call [out] [case testInvalidLvalueWithExplicitType2] a = 1 a[1] = None # type: int # E: Unexpected type declaration a.x = None # type: int \ # E: Type cannot be declared in assignment to non-self attribute [out] [case testInvalidLvalueWithExplicitType3] a = 1 a.y, a.x = None, None # type: int, int \ # E: Type cannot be declared in assignment to non-self attribute a[1], a[2] = None, None # type: int, int \ # E: Unexpected type declaration [out] [case testMissingGenericImport] from typing import TypeVar T = TypeVar('T') class A(Generic[T]): pass [out] main:3: error: Name 'Generic' is not defined [case testInvalidTypeWithinGeneric] from typing import Generic class A(Generic[int]): pass # E: Free type variable expected in Generic[...] [out] [case testInvalidTypeWithinNestedGenericClass] from typing import Generic, TypeVar T = TypeVar('T') class A(Generic[T]): class B(Generic[T]): pass \ # E: Free type variable expected in Generic[...] [out] [case testIncludingGenericTwiceInBaseClassList] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Generic[T], Generic[S]): pass \ # E: Only single Generic[...] or Protocol[...] can be in bases [out] [case testInvalidMetaclass] class A(metaclass=x): pass # E: Name 'x' is not defined [out] [case testInvalidQualifiedMetaclass] import abc class A(metaclass=abc.Foo): pass # E: Name 'abc.Foo' is not defined [out] [case testNonClassMetaclass] def f(): pass class A(metaclass=f): pass # E: Invalid metaclass 'f' [out] [case testInvalidTypevarArguments] from typing import TypeVar a = TypeVar() # E: Too few arguments for TypeVar() b = TypeVar(x='b') # E: TypeVar() expects a string literal as first argument c = TypeVar(1) # E: TypeVar() expects a string literal as first argument d = TypeVar('D') # E: String argument 1 'D' to TypeVar(...) does not match variable name 'd' e = TypeVar('e', int, str, x=1) # E: Unexpected argument to TypeVar(): x f = TypeVar('f', (int, str), int) # E: Type expected g = TypeVar('g', int) # E: TypeVar cannot have only a single constraint h = TypeVar('h', x=(int, str)) # E: Unexpected argument to TypeVar(): x i = TypeVar('i', bound=1) # E: TypeVar 'bound' must be a type [out] [case testMoreInvalidTypevarArguments] from typing import TypeVar T = TypeVar('T', int, str, bound=bool) # E: TypeVar cannot have both values and an upper bound S = TypeVar('S', covariant=True, contravariant=True) \ # E: TypeVar cannot be both covariant and contravariant [builtins fixtures/bool.pyi] [case testInvalidTypevarValues] from typing import TypeVar b = TypeVar('b', *[int]) # E: Unexpected argument to TypeVar() c = TypeVar('c', int, 2) # E: Invalid type: try using Literal[2] instead? [out] [case testObsoleteTypevarValuesSyntax] from typing import TypeVar a = TypeVar('a', values=(int, str)) [out] main:2: error: TypeVar 'values' argument not supported main:2: error: Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...)) [case testLocalTypevarScope] from typing import TypeVar def f() -> None: T = TypeVar('T') def g(x: T) -> None: pass # E: Name 'T' is not defined [out] [case testClassTypevarScope] from typing import TypeVar class A: T = TypeVar('T') def g(x: T) -> None: pass # E: Name 'T' is not defined [out] [case testRedefineVariableAsTypevar] from typing import TypeVar x = 0 x = TypeVar('x') # E: Cannot redefine 'x' as a type variable [out] [case testTypevarWithType] from typing import TypeVar x = TypeVar('x') # type: int # E: Cannot declare the type of a type variable [out] [case testRedefineTypevar] from typing import TypeVar t = TypeVar('t') t = 1 # E: Invalid assignment target [out] [case testRedefineTypevar2] from typing import TypeVar t = TypeVar('t') def t(): pass # E: Name 't' already defined on line 2 [out] [case testRedefineTypevar3] from typing import TypeVar t = TypeVar('t') class t: pass # E: Name 't' already defined on line 2 [out] [case testRedefineTypevar4] from typing import TypeVar t = TypeVar('t') from typing import Generic as t # E: Name 't' already defined on line 2 [out] [case testInvalidStrLiteralType] def f(x: 'foo'): pass # E: Name 'foo' is not defined [out] [case testInvalidStrLiteralStrayBrace] def f(x: 'int['): pass # E: Invalid type comment or annotation [out] [case testInvalidStrLiteralSpaces] def f(x: 'A B'): pass # E: Invalid type comment or annotation [out] [case testInvalidMultilineLiteralType] def f() -> "A\nB": pass # E: Invalid type comment or annotation [out] [case testInconsistentOverload] from typing import overload def dec(x): pass @dec # E: The implementation for an overloaded function must come last def f(): pass @overload def f(): pass [out] [case testInconsistentOverload2] from typing import overload def dec(x): pass @dec # E: The implementation for an overloaded function must come last def f(): pass @overload def f(): pass [out] [case testMissingOverloadDecorator] from typing import overload def dec(x): pass @dec def f(): pass @dec # E: Name 'f' already defined on line 3 def f(): pass [out] [case testIncompatibleSignatureInComment] import typing def f(): # type: (int) -> int pass def g(x): # type: () -> int pass [out] main:2: error: Type signature has too many arguments main:4: error: Type signature has too few arguments [case testStaticmethodAndNonMethod] import typing @staticmethod def f(): pass class A: def g(self) -> None: @staticmethod def h(): pass [builtins fixtures/staticmethod.pyi] [out] main:2: error: 'staticmethod' used with a non-method main:6: error: 'staticmethod' used with a non-method [case testClassmethodAndNonMethod] import typing @classmethod def f(): pass class A: def g(self) -> None: @classmethod def h(): pass [builtins fixtures/classmethod.pyi] [out] main:2: error: 'classmethod' used with a non-method main:6: error: 'classmethod' used with a non-method [case testNonMethodProperty] import typing @property # E: 'property' used with a non-method def f() -> int: pass [builtins fixtures/property.pyi] [out] [case testInvalidArgCountForProperty] import typing class A: @property def f(self, x) -> int: pass # E: Too many arguments @property def g() -> int: pass # E: Method must have at least one argument [builtins fixtures/property.pyi] [out] [case testOverloadedProperty] from typing import overload class A: @overload # E: Decorated property not supported @property def f(self) -> int: pass @property # E: Decorated property not supported @overload def f(self) -> int: pass [builtins fixtures/property.pyi] [out] [case testOverloadedProperty2] from typing import overload class A: @overload # E: An overloaded function outside a stub file must have an implementation def f(self) -> int: pass @property # E: Decorated property not supported @overload def f(self) -> int: pass [builtins fixtures/property.pyi] [out] [case testDecoratedProperty] import typing def dec(f): pass class A: @dec # E: Decorated property not supported @property def f(self) -> int: pass @property # E: Decorated property not supported @dec def g(self) -> int: pass [builtins fixtures/property.pyi] [out] [case testImportTwoModulesWithSameNameInFunction] import typing def f() -> None: import x import y as x # E: Name 'x' already defined (by an import) x.y [file x.py] y = 1 [file y.py] [out] [case testImportTwoModulesWithSameNameInGlobalContext] import typing import x import y as x # E: Name 'x' already defined (by an import) x.y [file x.py] y = 1 [file y.py] [out] [case testListTypeAliasWithoutImport] import typing def f() -> List[int]: pass [builtins fixtures/list.pyi] [out] main:2: error: Name 'List' is not defined main:2: note: Did you forget to import it from "typing"? (Suggestion: "from typing import List") [case testInvalidWithTarget] def f(): pass with f() as 1: pass # E: can't assign to literal [out] [case testInvalidTypeAnnotation] import typing def f() -> None: 1[2] = 1 # type: int [out] main:3: error: Unexpected type declaration [case testInvalidTypeAnnotation2] import typing def f() -> None: f() = 1 # type: int [out] main:3: error: can't assign to function call [case testInvalidReferenceToAttributeOfOuterClass] class A: class X: pass class B: y = X # E: Name 'X' is not defined [out] [case testStubPackage] from m import x from m import y # E: Module 'm' has no attribute 'y' [file m/__init__.pyi] x = 1 [out] [case testStubPackageSubModule] from m import x from m import y # E: Module 'm' has no attribute 'y' from m.m2 import y from m.m2 import z # E: Module 'm.m2' has no attribute 'z' [file m/__init__.pyi] x = 1 [file m/m2.pyi] y = 1 [out] [case testMissingStubForThirdPartyModule] import __dummy_third_party1 [out] main:1: error: No library stub file for module '__dummy_third_party1' main:1: note: (Stub files are from https://github.com/python/typeshed) [case testMissingStubForStdLibModule] import __dummy_stdlib1 [out] main:1: error: No library stub file for standard library module '__dummy_stdlib1' main:1: note: (Stub files are from https://github.com/python/typeshed) [case testMissingStubForTwoModules] import __dummy_stdlib1 import __dummy_stdlib2 [out] main:1: error: No library stub file for standard library module '__dummy_stdlib1' main:1: note: (Stub files are from https://github.com/python/typeshed) main:2: error: No library stub file for standard library module '__dummy_stdlib2' [case testListComprehensionSpecialScoping] class A: x = 1 y = 1 z = 1 [x for i in z if y] [out] main:5: error: Name 'x' is not defined main:5: error: Name 'y' is not defined [case testTypeRedeclarationNoSpuriousWarnings] from typing import Tuple a = 1 # type: int a = 's' # type: str a = ('spam', 'spam', 'eggs', 'spam') # type: Tuple[str] [out] main:3: error: Name 'a' already defined on line 2 main:4: error: Name 'a' already defined on line 2 [case testDuplicateDefFromImport] from m import A class A: # E: Name 'A' already defined (possibly by an import) pass [file m.py] class A: pass [out] [case testDuplicateDefDec] from typing import Any def dec(x: Any) -> Any: return x @dec def f() -> None: pass @dec # E: Name 'f' already defined on line 4 def f() -> None: pass [out] [case testDuplicateDefOverload] from typing import overload, Any if 1: @overload def f(x: int) -> None: pass @overload def f(x: str) -> None: pass def f(x: Any) -> None: pass else: def f(x: str) -> None: # E: Name 'f' already defined on line 3 pass [out] [case testDuplicateDefNT] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) class N: # E: Name 'N' already defined on line 2 pass [out] [case testDuplicateDefTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) class Point: # E: Name 'Point' already defined on line 2 pass [builtins fixtures/dict.pyi] [out] [case testTypeVarClassDup] from typing import TypeVar T = TypeVar('T') class T: ... # E: Name 'T' already defined on line 2 [out] [case testAliasDup] from typing import List A = List[int] class A: ... # E: Name 'A' already defined on line 2 [builtins fixtures/list.pyi] [out] [case testNoInvalidTypeInDynamicFunctions] from typing import Dict, TypeVar T = TypeVar('T') def f(): # Note no annotation x: Dict[str, T] = {} y: T z: x def nested(): pass t: nested def g() -> None: x: Dict[str, T] = {} # E: Type variable "__main__.T" is unbound \ # N: (Hint: Use "Generic[T]" or "Protocol[T]" base class to bind "T" inside a class) \ # N: (Hint: Use "T" in function signature to bind "T" inside a function) [builtins fixtures/dict.pyi] [out] mypy-0.761/test-data/unit/semanal-expressions.test0000644€tŠÔÚ€2›s®0000001656513576752246026503 0ustar jukkaDROPBOX\Domain Users00000000000000[case testLiterals] (1, 'x', 1.1, 1.1j) [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( IntExpr(1) StrExpr(x) FloatExpr(1.1) ComplexExpr(1.1j)))) [case testMemberExpr] x = 1 x.y [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) ExpressionStmt:2( MemberExpr:2( NameExpr(x [__main__.x]) y))) [case testIndexExpr] x = y = 1 x[y] [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( IndexExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))) [case testBinaryOperations] x = y = 1 x + y x | y x is not y x == y [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( OpExpr:2( + NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))) ExpressionStmt:3( OpExpr:3( | NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))) ExpressionStmt:4( ComparisonExpr:4( is not NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))) ExpressionStmt:5( ComparisonExpr:5( == NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))) [case testUnaryOperations] x = 1 -x ~x +x not x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) ExpressionStmt:2( UnaryExpr:2( - NameExpr(x [__main__.x]))) ExpressionStmt:3( UnaryExpr:3( ~ NameExpr(x [__main__.x]))) ExpressionStmt:4( UnaryExpr:4( + NameExpr(x [__main__.x]))) ExpressionStmt:5( UnaryExpr:5( not NameExpr(x [__main__.x])))) [case testSlices] x = y = z = 1 x[y:z:x] x[:] x[:y] [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y]) NameExpr(z* [__main__.z])) IntExpr(1)) ExpressionStmt:2( IndexExpr:2( NameExpr(x [__main__.x]) SliceExpr:2( NameExpr(y [__main__.y]) NameExpr(z [__main__.z]) NameExpr(x [__main__.x])))) ExpressionStmt:3( IndexExpr:3( NameExpr(x [__main__.x]) SliceExpr:3( ))) ExpressionStmt:4( IndexExpr:4( NameExpr(x [__main__.x]) SliceExpr:4( NameExpr(y [__main__.y]))))) [case testTupleLiteral] x = y = 1 x, y [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( TupleExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))) [case testListLiteral] x = y = 1 ([], [x, y]) [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( TupleExpr:2( ListExpr:2() ListExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))))) [case testDictLiterals] x = y = 1 { x : y, y : x } [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( DictExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y]) NameExpr(y [__main__.y]) NameExpr(x [__main__.x])))) [case testListComprehension] a = 0 ([x + 1 for x in a]) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( ListComprehension:2( GeneratorExpr:2( OpExpr:2( + NameExpr(x [l]) IntExpr(1)) NameExpr(x* [l]) NameExpr(a [__main__.a]))))) [case testListComprehensionInFunction] def f(a) -> None: [x for x in a] [out] MypyFile:1( FuncDef:1( f Args( Var(a)) def (a: Any) Block:1( ExpressionStmt:2( ListComprehension:2( GeneratorExpr:2( NameExpr(x [l]) NameExpr(x* [l]) NameExpr(a [l]))))))) [case testListComprehensionWithCondition] a = 0 b = [x for x in a if x] [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) AssignmentStmt:2( NameExpr(b* [__main__.b]) ListComprehension:2( GeneratorExpr:2( NameExpr(x [l]) NameExpr(x* [l]) NameExpr(a [__main__.a]) NameExpr(x [l]))))) [case testSetComprehension] a = 0 ({x + 1 for x in a}) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( SetComprehension:2( GeneratorExpr:2( OpExpr:2( + NameExpr(x [l]) IntExpr(1)) NameExpr(x* [l]) NameExpr(a [__main__.a]))))) [case testSetComprehensionWithCondition] a = 0 b = {x for x in a if x} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) AssignmentStmt:2( NameExpr(b* [__main__.b]) SetComprehension:2( GeneratorExpr:2( NameExpr(x [l]) NameExpr(x* [l]) NameExpr(a [__main__.a]) NameExpr(x [l]))))) [case testDictionaryComprehension] a = 0 ({x: x + 1 for x in a}) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( DictionaryComprehension:2( NameExpr(x [l]) OpExpr:2( + NameExpr(x [l]) IntExpr(1)) NameExpr(x* [l]) NameExpr(a [__main__.a])))) [case testDictionaryComprehensionWithCondition] a = 0 b = {x: x + 1 for x in a if x} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) AssignmentStmt:2( NameExpr(b* [__main__.b]) DictionaryComprehension:2( NameExpr(x [l]) OpExpr:2( + NameExpr(x [l]) IntExpr(1)) NameExpr(x* [l]) NameExpr(a [__main__.a]) NameExpr(x [l])))) [case testGeneratorExpression] a = 0 (x for x in a) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) NameExpr(x* [l]) NameExpr(a [__main__.a])))) [case testGeneratorExpressionNestedIndex] a = 0 (x for x, (y, z) in a) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) TupleExpr:2( NameExpr(x* [l]) TupleExpr:2( NameExpr(y* [l]) NameExpr(z* [l]))) NameExpr(a [__main__.a])))) [case testLambda] x = 0 lambda: x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(0)) ExpressionStmt:2( LambdaExpr:2( Block:2( ReturnStmt:2( NameExpr(x [__main__.x])))))) [case testLambdaWithArguments] lambda x, y: x + y [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( Var(x) Var(y)) Block:1( ReturnStmt:1( OpExpr:1( + NameExpr(x [l]) NameExpr(y [l]))))))) [case testConditionalExpression] int if None else str [out] MypyFile:1( ExpressionStmt:1( ConditionalExpr:1( Condition( NameExpr(None [builtins.None])) NameExpr(int [builtins.int]) NameExpr(str [builtins.str])))) [case testDictWithKeywordArgs] dict(a=1, b=str()) [builtins fixtures/dict.pyi] [out] MypyFile:1( ExpressionStmt:1( DictExpr:1( StrExpr(a) IntExpr(1) StrExpr(b) CallExpr:1( NameExpr(str [builtins.str]) Args())))) mypy-0.761/test-data/unit/semanal-modules.test0000644€tŠÔÚ€2›s®0000003551613576752246025566 0ustar jukkaDROPBOX\Domain Users00000000000000-- NOTE: If a module has a name starting or ending with _, it is skipped in -- output. [case testImport] import x x.y [file x.py] y = 1 [out] MypyFile:1( Import:1(x) ExpressionStmt:2( MemberExpr:2( NameExpr(x) y [x.y]))) MypyFile:1( tmp/x.py AssignmentStmt:1( NameExpr(y* [x.y]) IntExpr(1))) [case testImportedNameInType] import m x = None # type: m.c [file m.py] class c: pass [out] MypyFile:1( Import:1(m) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) m.c)) MypyFile:1( tmp/m.py ClassDef:1( c PassStmt:1())) [case testImportFrom] from m import y x = y [file m.py] y = 1 [out] MypyFile:1( ImportFrom:1(m, [y]) AssignmentStmt:2( NameExpr(x* [__main__.x]) NameExpr(y [m.y]))) MypyFile:1( tmp/m.py AssignmentStmt:1( NameExpr(y* [m.y]) IntExpr(1))) [case testImportFromType] from m import c x = None # type: c [file m.py] class c: pass [out] MypyFile:1( ImportFrom:1(m, [c]) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) m.c)) MypyFile:1( tmp/m.py ClassDef:1( c PassStmt:1())) [case testImportMultiple] import _m, _n _m.x, _n.y [file _m.py] x = 1 [file _n.py] y = 2 [out] MypyFile:1( Import:1(_m, _n) ExpressionStmt:2( TupleExpr:2( MemberExpr:2( NameExpr(_m) x [_m.x]) MemberExpr:2( NameExpr(_n) y [_n.y])))) [case testImportAs] import _m as n n.x [file _m.py] x = 1 [out] MypyFile:1( Import:1(_m : n) ExpressionStmt:2( MemberExpr:2( NameExpr(n [_m]) x [_m.x]))) [case testImportFromMultiple] from _m import x, y x, y [file _m.py] x = y = 1 [out] MypyFile:1( ImportFrom:1(_m, [x, y]) ExpressionStmt:2( TupleExpr:2( NameExpr(x [_m.x]) NameExpr(y [_m.y])))) [case testImportFromAs] from _m import y as z z [file _m.py] y = 1 [out] MypyFile:1( ImportFrom:1(_m, [y : z]) ExpressionStmt:2( NameExpr(z [_m.y]))) [case testAccessImportedName] from m import x y = x [file m.py] from _n import x [file _n.py] x = 1 [out] MypyFile:1( ImportFrom:1(m, [x]) AssignmentStmt:2( NameExpr(y* [__main__.y]) NameExpr(x [_n.x]))) MypyFile:1( tmp/m.py ImportFrom:1(_n, [x])) [case testAccessImportedName2] import _m y = _m.x [file _m.py] from _n import x [file _n.py] x = 1 [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( NameExpr(y* [__main__.y]) MemberExpr:2( NameExpr(_m) x [_n.x]))) [case testAccessingImportedNameInType] from _m import c x = None # type: c [file _m.py] from _n import c [file _n.py] class c: pass [out] MypyFile:1( ImportFrom:1(_m, [c]) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) _n.c)) [case testAccessingImportedNameInType2] import _m x = None # type: _m.c [file _m.py] from _n import c [file _n.py] class c: pass [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) _n.c)) [case testAccessingImportedModule] from _m import _n _n.x [file _m.py] import _n [file _n.py] x = 1 [out] MypyFile:1( ImportFrom:1(_m, [_n]) ExpressionStmt:2( MemberExpr:2( NameExpr(_n) x [_n.x]))) [case testAccessingImportedModule] import _m _m._n.x [file _m.py] import _n [file _n.py] x = 1 [out] MypyFile:1( Import:1(_m) ExpressionStmt:2( MemberExpr:2( MemberExpr:2( NameExpr(_m) _n) x [_n.x]))) [case testAccessTypeViaDoubleIndirection] from _m import c a = None # type: c [file _m.py] from _n import c [file _n.py] class c: pass [out] MypyFile:1( ImportFrom:1(_m, [c]) AssignmentStmt:2( NameExpr(a [__main__.a]) NameExpr(None [builtins.None]) _n.c)) [case testAccessTypeViaDoubleIndirection2] import _m a = None # type: _m.c [file _m.py] from _n import c [file _n.py] class c: pass [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( NameExpr(a [__main__.a]) NameExpr(None [builtins.None]) _n.c)) [case testImportAsterisk] from _m import * x, y [file _m.py] x = y = 1 [out] MypyFile:1( ImportAll:1(_m) ExpressionStmt:2( TupleExpr:2( NameExpr(x [_m.x]) NameExpr(y [_m.y])))) [case testImportAsteriskAndImportedNames] from _m import * n_.x, y [file _m.py] import n_ from n_ import y [file n_.py] x = y = 1 [out] MypyFile:1( ImportAll:1(_m) ExpressionStmt:2( TupleExpr:2( MemberExpr:2( NameExpr(n_) x [n_.x]) NameExpr(y [n_.y])))) [case testImportAsteriskAndImportedNamesInTypes] from _m import * x = None # type: n_.c y = None # type: d [file _m.py] import n_ from n_ import d [file n_.py] class c: pass class d: pass [out] MypyFile:1( ImportAll:1(_m) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) n_.c) AssignmentStmt:3( NameExpr(y [__main__.y]) NameExpr(None [builtins.None]) n_.d)) [case testModuleInSubdir] import _m _m.x [file _m/__init__.py] x = 1 [out] MypyFile:1( Import:1(_m) ExpressionStmt:2( MemberExpr:2( NameExpr(_m) x [_m.x]))) [case testNestedModules] import m.n m.n.x, m.y [file m/__init__.py] y = 1 [file m/n.py] x = 1 [out] MypyFile:1( Import:1(m.n) ExpressionStmt:2( TupleExpr:2( MemberExpr:2( MemberExpr:2( NameExpr(m) n [m.n]) x [m.n.x]) MemberExpr:2( NameExpr(m) y [m.y])))) MypyFile:1( tmp/m/n.py AssignmentStmt:1( NameExpr(x* [m.n.x]) IntExpr(1))) [case testImportFromSubmodule] from m._n import x x [file m/__init__.py] [file m/_n.py] x = 1 [out] MypyFile:1( ImportFrom:1(m._n, [x]) ExpressionStmt:2( NameExpr(x [m._n.x]))) [case testImportAllFromSubmodule] from m._n import * x, y [file m/__init__.py] [file m/_n.py] x = y = 1 [out] MypyFile:1( ImportAll:1(m._n) ExpressionStmt:2( TupleExpr:2( NameExpr(x [m._n.x]) NameExpr(y [m._n.y])))) [case testSubmodulesAndTypes] import m._n x = None # type: m._n.c [file m/__init__.py] [file m/_n.py] class c: pass [out] MypyFile:1( Import:1(m._n) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) m._n.c)) [case testSubmodulesAndTypes] from m._n import c x = None # type: c [file m/__init__.py] [file m/_n.py] class c: pass [out] MypyFile:1( ImportFrom:1(m._n, [c]) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) m._n.c)) [case testFromPackageImportModule] from m import _n _n.x [file m/__init__.py] [file m/_n.py] x = 1 [out] MypyFile:1( ImportFrom:1(m, [_n]) ExpressionStmt:2( MemberExpr:2( NameExpr(_n [m._n]) x [m._n.x]))) [case testDeeplyNestedModule] import m.n.k m.n.k.x m.n.b m.a [file m/__init__.py] a = 1 [file m/n/__init__.py] b = 1 [file m/n/k.py] x = 1 [out] MypyFile:1( Import:1(m.n.k) ExpressionStmt:2( MemberExpr:2( MemberExpr:2( MemberExpr:2( NameExpr(m) n [m.n]) k [m.n.k]) x [m.n.k.x])) ExpressionStmt:3( MemberExpr:3( MemberExpr:3( NameExpr(m) n [m.n]) b [m.n.b])) ExpressionStmt:4( MemberExpr:4( NameExpr(m) a [m.a]))) MypyFile:1( tmp/m/n/k.py AssignmentStmt:1( NameExpr(x* [m.n.k.x]) IntExpr(1))) [case testImportInSubmodule] import m._n y = m._n.x [file m/__init__.py] [file m/_n.py] from m._k import x [file m/_k.py] x = 1 [out] MypyFile:1( Import:1(m._n) AssignmentStmt:2( NameExpr(y* [__main__.y]) MemberExpr:2( MemberExpr:2( NameExpr(m) _n [m._n]) x [m._k.x]))) [case testBuiltinsUsingModule] o = None # type: __builtins__.object [out] MypyFile:1( AssignmentStmt:1( NameExpr(o [__main__.o]) NameExpr(None [builtins.None]) builtins.object)) [case testImplicitAccessToBuiltins] object [out] MypyFile:1( ExpressionStmt:1( NameExpr(object [builtins.object]))) [case testAssignmentToModuleAttribute] import _m _m.x = ( _m.x) [file _m.py] x = None [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( MemberExpr:2( NameExpr(_m) x [_m.x]) MemberExpr:3( NameExpr(_m) x [_m.x]))) [case testAssignmentThatRefersToModule] import _m _m.x[None] = None [file _m.py] x = None [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( IndexExpr:2( MemberExpr:2( NameExpr(_m) x [_m.x]) NameExpr(None [builtins.None])) NameExpr(None [builtins.None]))) [case testImportInBlock] if 1: import _x _x.y [file _x.py] y = 1 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( Import:2(_x) ExpressionStmt:3( MemberExpr:3( NameExpr(_x) y [_x.y]))))) [case testImportInFunction] def f() -> None: import _x _x.y [file _x.py] y = 1 [out] MypyFile:1( FuncDef:1( f def () Block:1( Import:2(_x) ExpressionStmt:3( MemberExpr:3( NameExpr(_x) y [_x.y]))))) [case testImportInClassBody] class A: from _x import y z = y [file _x.py] y = 1 [out] MypyFile:1( ClassDef:1( A ImportFrom:2(_x, [y]) AssignmentStmt:3( NameExpr(z* [m]) NameExpr(y [_x.y])))) [case testImportInClassBody2] class A: import _x z = _x.y [file _x.py] y = 1 [out] MypyFile:1( ClassDef:1( A Import:2(_x) AssignmentStmt:3( NameExpr(z* [m]) MemberExpr:3( NameExpr(_x) y [_x.y])))) [case testImportModuleTwice] def f() -> None: import x import x x.y [file x.py] y = 1 [out] MypyFile:1( FuncDef:1( f def () Block:1( Import:2(x) Import:3(x) ExpressionStmt:4( MemberExpr:4( NameExpr(x) y [x.y]))))) MypyFile:1( tmp/x.py AssignmentStmt:1( NameExpr(y* [x.y]) IntExpr(1))) [case testRelativeImport0] import m.x m.x.z.y [file m/__init__.py] [file m/x.py] from . import z [file m/z.py] y = 1 [out] MypyFile:1( Import:1(m.x) ExpressionStmt:2( MemberExpr:2( MemberExpr:2( MemberExpr:2( NameExpr(m) x [m.x]) z [m.z]) y [m.z.y]))) MypyFile:1( tmp/m/x.py ImportFrom:1(., [z])) MypyFile:1( tmp/m/z.py AssignmentStmt:1( NameExpr(y* [m.z.y]) IntExpr(1))) [case testRelativeImport1] import m.t.b as b b.x.y b.z.y [file m/__init__.py] [file m/x.py] y = 1 [file m/z.py] y = 3 [file m/t/__init__.py] [file m/t/b.py] from .. import x, z [out] MypyFile:1( Import:1(m.t.b : b) ExpressionStmt:2( MemberExpr:2( MemberExpr:2( NameExpr(b [m.t.b]) x [m.x]) y [m.x.y])) ExpressionStmt:3( MemberExpr:3( MemberExpr:3( NameExpr(b [m.t.b]) z [m.z]) y [m.z.y]))) MypyFile:1( tmp/m/t/b.py ImportFrom:1(.., [x, z])) MypyFile:1( tmp/m/x.py AssignmentStmt:1( NameExpr(y* [m.x.y]) IntExpr(1))) MypyFile:1( tmp/m/z.py AssignmentStmt:1( NameExpr(y* [m.z.y]) IntExpr(3))) [case testRelativeImport2] import m.t.b as b b.xy b.zy [file m/__init__.py] [file m/x.py] y = 1 [file m/z.py] y = 3 [file m/t/__init__.py] [file m/t/b.py] from ..x import y as xy from ..z import y as zy [out] MypyFile:1( Import:1(m.t.b : b) ExpressionStmt:2( MemberExpr:2( NameExpr(b [m.t.b]) xy [m.x.y])) ExpressionStmt:3( MemberExpr:3( NameExpr(b [m.t.b]) zy [m.z.y]))) MypyFile:1( tmp/m/t/b.py ImportFrom:1(..x, [y : xy]) ImportFrom:2(..z, [y : zy])) MypyFile:1( tmp/m/x.py AssignmentStmt:1( NameExpr(y* [m.x.y]) IntExpr(1))) MypyFile:1( tmp/m/z.py AssignmentStmt:1( NameExpr(y* [m.z.y]) IntExpr(3))) [case testRelativeImport3] import m.t m.zy m.xy m.t.y [file m/__init__.py] from .x import * from .z import * [file m/x.py] from .z import zy as xy [file m/z.py] zy = 3 [file m/t/__init__.py] from .b import * [file m/t/b.py] from .. import xy as y [out] MypyFile:1( Import:1(m.t) ExpressionStmt:2( MemberExpr:2( NameExpr(m) zy [m.z.zy])) ExpressionStmt:3( MemberExpr:3( NameExpr(m) xy [m.z.zy])) ExpressionStmt:4( MemberExpr:4( MemberExpr:4( NameExpr(m) t [m.t]) y [m.z.zy]))) MypyFile:1( tmp/m/t/b.py ImportFrom:1(.., [xy : y])) MypyFile:1( tmp/m/x.py ImportFrom:1(.z, [zy : xy])) MypyFile:1( tmp/m/z.py AssignmentStmt:1( NameExpr(zy* [m.z.zy]) IntExpr(3))) [case testRelativeImportFromSameModule] import m.x [file m/__init__.py] [file m/x.py] from .x import nonexistent [out] tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent' [case testImportFromSameModule] import m.x [file m/__init__.py] [file m/x.py] from m.x import nonexistent [out] tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent' [case testImportMisspellingSingleCandidate] import f [file m/__init__.py] [file m/x.py] def some_function(): pass [file f.py] from m.x import somefunction [out] tmp/f.py:1: error: Module 'm.x' has no attribute 'somefunction'; maybe "some_function"? [case testImportMisspellingMultipleCandidates] import f [file m/__init__.py] [file m/x.py] def some_function(): pass def somef_unction(): pass [file f.py] from m.x import somefunction [out] tmp/f.py:1: error: Module 'm.x' has no attribute 'somefunction'; maybe "somef_unction" or "some_function"? [case testImportMisspellingMultipleCandidatesTruncated] import f [file m/__init__.py] [file m/x.py] def some_function(): pass def somef_unction(): pass def somefu_nction(): pass def somefun_ction(): pass [file f.py] from m.x import somefunction [out] tmp/f.py:1: error: Module 'm.x' has no attribute 'somefunction'; maybe "somefun_ction", "somefu_nction", or "somef_unction"? [case testFromImportAsInStub] from m import * x y # E: Name 'y' is not defined [file m.pyi] from m2 import x as x from m2 import y [file m2.py] x = 1 y = 2 [out] [case testFromImportAsInNonStub] from m_ import * x y [file m_.py] from m2_ import x as x from m2_ import y [file m2_.py] x = 1 y = 2 [out] MypyFile:1( ImportAll:1(m_) ExpressionStmt:2( NameExpr(x [m2_.x])) ExpressionStmt:3( NameExpr(y [m2_.y]))) [case testImportAsInStub] from m import * m2 m3 # E: Name 'm3' is not defined [file m.pyi] import m2 as m2 import m3 [file m2.py] [file m3.py] [out] [case testImportAsInNonStub] from m_ import * m2_ m3_ [file m_.py] import m2_ as m2_ import m3_ [file m2_.py] [file m3_.py] [out] MypyFile:1( ImportAll:1(m_) ExpressionStmt:2( NameExpr(m2_)) ExpressionStmt:3( NameExpr(m3_))) [case testErrorsInMultipleModules] import m x [file m.py] y [out] tmp/m.py:1: error: Name 'y' is not defined main:2: error: Name 'x' is not defined [case testImportTwice] import typing from x import a, a # ok (we could give a warning, but this is valid) def f() -> None: from x import a from x import a # ok import x import x # ok, since we may import multiple submodules of a package [file x.py] a = 1 [out] MypyFile:1( Import:1(typing) ImportFrom:2(x, [a, a]) FuncDef:3( f def () Block:3( ImportFrom:4(x, [a]) ImportFrom:5(x, [a]))) Import:6(x) Import:7(x)) MypyFile:1( tmp/x.py AssignmentStmt:1( NameExpr(a* [x.a]) IntExpr(1))) mypy-0.761/test-data/unit/semanal-namedtuple.test0000644€tŠÔÚ€2›s®0000001110713576752246026242 0ustar jukkaDROPBOX\Domain Users00000000000000-- Semantic analysis of named tuples [case testSimpleNamedtuple] from collections import namedtuple N = namedtuple('N', ['a']) def f() -> N: pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any])) FuncDef:3( f def () -> Tuple[Any, fallback=__main__.N] Block:3( PassStmt:3()))) [case testTwoItemNamedtuple] from collections import namedtuple N = namedtuple('N', ['a', 'xyz']) def f() -> N: pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any, Any])) FuncDef:3( f def () -> Tuple[Any, Any, fallback=__main__.N] Block:3( PassStmt:3()))) [case testTwoItemNamedtupleWithTupleFieldNames] from collections import namedtuple N = namedtuple('N', ('a', 'xyz')) def f() -> N: pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any, Any])) FuncDef:3( f def () -> Tuple[Any, Any, fallback=__main__.N] Block:3( PassStmt:3()))) [case testTwoItemNamedtupleWithShorthandSyntax] from collections import namedtuple N = namedtuple('N', ' a xyz ') def f() -> N: pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any, Any])) FuncDef:3( f def () -> Tuple[Any, Any, fallback=__main__.N] Block:3( PassStmt:3()))) [case testNamedTupleWithItemTypes] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str]))) [case testNamedTupleWithTupleFieldNamesWithItemTypes] from typing import NamedTuple N = NamedTuple('N', (('a', int), ('b', str))) [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str]))) [case testNamedTupleBaseClass] from collections import namedtuple N = namedtuple('N', ['x']) class A(N): pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any])) ClassDef:3( A TupleType( Tuple[Any, fallback=__main__.N]) BaseType( __main__.N) PassStmt:3())) [case testNamedTupleBaseClass2] from collections import namedtuple class A(namedtuple('N', ['x'])): pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) ClassDef:2( A TupleType( Tuple[Any, fallback=__main__.N@2]) BaseType( __main__.N@2) PassStmt:2())) [case testNamedTupleBaseClassWithItemTypes] from typing import NamedTuple class A(NamedTuple('N', [('x', int)])): pass [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) ClassDef:2( A TupleType( Tuple[builtins.int, fallback=__main__.N@2]) BaseType( __main__.N@2) PassStmt:2())) -- Errors [case testNamedTupleWithTooFewArguments] from collections import namedtuple N = namedtuple('N') # E: Too few arguments for namedtuple() [case testNamedTupleWithInvalidName] from collections import namedtuple N = namedtuple(1, ['x']) # E: namedtuple() expects a string literal as the first argument [case testNamedTupleWithInvalidItems] from collections import namedtuple N = namedtuple('N', 1) # E: List or tuple literal expected as the second argument to namedtuple() [case testNamedTupleWithInvalidItems2] from collections import namedtuple N = namedtuple('N', ['x', 1]) # E: String literal expected as namedtuple() item [case testNamedTupleWithUnderscoreItemName] from collections import namedtuple N = namedtuple('N', ['_fallback']) # E: namedtuple() field names cannot start with an underscore: _fallback -- NOTE: The following code works at runtime but is not yet supported by mypy. -- Keyword arguments may potentially be supported in the future. [case testNamedTupleWithNonpositionalArgs] from collections import namedtuple N = namedtuple(typename='N', field_names=['x']) # E: Unexpected arguments to namedtuple() [case testInvalidNamedTupleBaseClass] from typing import NamedTuple class A(NamedTuple('N', [1])): pass # E: Tuple expected as NamedTuple() field class B(A): pass [case testInvalidNamedTupleBaseClass2] class A(NamedTuple('N', [1])): pass class B(A): pass [out] main:2: error: Unsupported dynamic base class "NamedTuple" main:2: error: Name 'NamedTuple' is not defined mypy-0.761/test-data/unit/semanal-python2.test0000644€tŠÔÚ€2›s®0000000300013576752246025500 0ustar jukkaDROPBOX\Domain Users00000000000000-- Python 2 semantic analysis test cases. [case testPrintStatement_python2] print int, None [out] MypyFile:1( PrintStmt:1( NameExpr(int [builtins.int]) NameExpr(None [builtins.None]) Newline)) [case testPrintStatementWithTarget] print >>int, None [out] MypyFile:1( PrintStmt:1( NameExpr(None [builtins.None]) Target( NameExpr(int [builtins.int])) Newline)) [case testExecStatement] exec None exec None in int exec None in int, str [out] MypyFile:1( ExecStmt:1( NameExpr(None [builtins.None])) ExecStmt:2( NameExpr(None [builtins.None]) NameExpr(int [builtins.int])) ExecStmt:3( NameExpr(None [builtins.None]) NameExpr(int [builtins.int]) NameExpr(str [builtins.str]))) [case testVariableLengthTuple_python2] from typing import Tuple, cast cast(Tuple[int, ...], ()) [builtins_py2 fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Tuple, cast]) ExpressionStmt:2( CastExpr:2( TupleExpr:2() builtins.tuple[builtins.int]))) [case testTupleArgList_python2] def f(x, (y, z)): x = y [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(__tuple_arg_2)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(y* [l]) NameExpr(z* [l])) NameExpr(__tuple_arg_2 [l])) AssignmentStmt:2( NameExpr(x [l]) NameExpr(y [l]))))) [case testBackquoteExpr_python2] `object` [out] MypyFile:1( ExpressionStmt:1( BackquoteExpr:1( NameExpr(object [builtins.object])))) mypy-0.761/test-data/unit/semanal-statements.test0000644€tŠÔÚ€2›s®0000004504613576752246026304 0ustar jukkaDROPBOX\Domain Users00000000000000[case testReturn] def f(x): return x def g(): return [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( ReturnStmt:1( NameExpr(x [l])))) FuncDef:2( g Block:2( ReturnStmt:2()))) [case testRaise] raise object() [out] MypyFile:1( RaiseStmt:1( CallExpr:1( NameExpr(object [builtins.object]) Args()))) [case testYield] def f(): yield f [out] MypyFile:1( FuncDef:1( f Generator Block:1( ExpressionStmt:1( YieldExpr:1( NameExpr(f [__main__.f])))))) [case testAssert] assert object [out] MypyFile:1( AssertStmt:1( NameExpr(object [builtins.object]))) [case testOperatorAssignment] x = y = 1 x += y y |= x [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) OperatorAssignmentStmt:2( + NameExpr(x [__main__.x]) NameExpr(y [__main__.y])) OperatorAssignmentStmt:3( | NameExpr(y [__main__.y]) NameExpr(x [__main__.x]))) [case testWhile] x = y = 1 while x: y [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) WhileStmt:2( NameExpr(x [__main__.x]) Block:2( ExpressionStmt:3( NameExpr(y [__main__.y]))))) [case testFor] for x in object: x [out] MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) NameExpr(object [builtins.object]) Block:1( ExpressionStmt:2( NameExpr(x [__main__.x]))))) [case testForInFunction] def f(): for x in f: x [out] MypyFile:1( FuncDef:1( f Block:1( ForStmt:2( NameExpr(x* [l]) NameExpr(f [__main__.f]) Block:2( ExpressionStmt:3( NameExpr(x [l]))))))) [case testMultipleForIndexVars] for x, y in []: x, y [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) ListExpr:1() Block:1( ExpressionStmt:2( TupleExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))))) [case testForIndexVarScope] for x in []: pass x [out] MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) ListExpr:1() Block:1( PassStmt:2())) ExpressionStmt:3( NameExpr(x [__main__.x]))) [case testForIndexVarScope2] def f(): for x in []: pass x [out] MypyFile:1( FuncDef:1( f Block:1( ForStmt:2( NameExpr(x* [l]) ListExpr:2() Block:2( PassStmt:3())) ExpressionStmt:4( NameExpr(x [l]))))) [case testReusingForLoopIndexVariable] # flags: --allow-redefinition for x in None: pass for x in None: pass [out] MypyFile:1( ForStmt:2( NameExpr(x'* [__main__.x']) NameExpr(None [builtins.None]) Block:2( PassStmt:3())) ForStmt:4( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None]) Block:4( PassStmt:5()))) [case testReusingForLoopIndexVariable2] # flags: --allow-redefinition def f(): for x in None: pass for x in None: pass [out] MypyFile:1( FuncDef:2( f Block:2( ForStmt:3( NameExpr(x* [l]) NameExpr(None [builtins.None]) Block:3( PassStmt:4())) ForStmt:5( NameExpr(x'* [l]) NameExpr(None [builtins.None]) Block:5( PassStmt:6()))))) [case testLoopWithElse] for x in []: pass else: x while 1: pass else: x [out] MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) ListExpr:1() Block:1( PassStmt:2()) Else( ExpressionStmt:4( NameExpr(x [__main__.x])))) WhileStmt:5( IntExpr(1) Block:5( PassStmt:6()) Else( ExpressionStmt:8( NameExpr(x [__main__.x]))))) [case testBreak] while 1: break for x in []: break [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( BreakStmt:2())) ForStmt:3( NameExpr(x* [__main__.x]) ListExpr:3() Block:3( BreakStmt:4()))) [case testContinue] while 1: continue for x in []: continue [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( ContinueStmt:2())) ForStmt:3( NameExpr(x* [__main__.x]) ListExpr:3() Block:3( ContinueStmt:4()))) [case testIf] x = 1 if x: x elif x: x elif x: x else: x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) IfStmt:2( If( NameExpr(x [__main__.x])) Then( ExpressionStmt:3( NameExpr(x [__main__.x]))) Else( IfStmt:4( If( NameExpr(x [__main__.x])) Then( ExpressionStmt:5( NameExpr(x [__main__.x]))) Else( IfStmt:6( If( NameExpr(x [__main__.x])) Then( ExpressionStmt:7( NameExpr(x [__main__.x]))) Else( ExpressionStmt:9( NameExpr(x [__main__.x]))))))))) [case testSimpleIf] if object: object [out] MypyFile:1( IfStmt:1( If( NameExpr(object [builtins.object])) Then( ExpressionStmt:2( NameExpr(object [builtins.object]))))) [case testLvalues] x = y = 1 xx = 1 x.m = 1 x[y] = 1 x2, y2 = 1 [x3, y3] = 1 (x4, y4) = 1 [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) AssignmentStmt:2( NameExpr(xx* [__main__.xx]) IntExpr(1)) AssignmentStmt:3( MemberExpr:3( NameExpr(x [__main__.x]) m) IntExpr(1)) AssignmentStmt:4( IndexExpr:4( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])) IntExpr(1)) AssignmentStmt:5( TupleExpr:5( NameExpr(x2* [__main__.x2]) NameExpr(y2* [__main__.y2])) IntExpr(1)) AssignmentStmt:6( TupleExpr:6( NameExpr(x3* [__main__.x3]) NameExpr(y3* [__main__.y3])) IntExpr(1)) AssignmentStmt:7( TupleExpr:7( NameExpr(x4* [__main__.x4]) NameExpr(y4* [__main__.y4])) IntExpr(1))) [case testStarLvalues] # flags: --allow-redefinition *x, y = 1 *x, (y, *z) = 1 *(x, q), r = 1 [out] MypyFile:1( AssignmentStmt:2( TupleExpr:2( StarExpr:2( NameExpr(x'* [__main__.x'])) NameExpr(y'* [__main__.y'])) IntExpr(1)) AssignmentStmt:3( TupleExpr:3( StarExpr:3( NameExpr(x''* [__main__.x''])) TupleExpr:3( NameExpr(y* [__main__.y]) StarExpr:3( NameExpr(z* [__main__.z])))) IntExpr(1)) AssignmentStmt:4( TupleExpr:4( StarExpr:4( TupleExpr:4( NameExpr(x* [__main__.x]) NameExpr(q* [__main__.q]))) NameExpr(r* [__main__.r])) IntExpr(1))) [case testMultipleDefinition] # flags: --allow-redefinition x, y = 1 x, y = 2 [out] MypyFile:1( AssignmentStmt:2( TupleExpr:2( NameExpr(x'* [__main__.x']) NameExpr(y'* [__main__.y'])) IntExpr(1)) AssignmentStmt:3( TupleExpr:3( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(2))) [case testComplexDefinitions] (x) = 1 ([y]) = 2 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y])) IntExpr(2))) [case testLocalComplexDefinition] def f(): (x) = 1 x [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(x* [l]) IntExpr(1)) ExpressionStmt:3( NameExpr(x [l]))))) [case testMultipleDefOnlySomeNew] x = 1 y, x = 1 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) NameExpr(x [__main__.x])) IntExpr(1))) [case testMultipleDefOnlySomeNewNestedTuples] x = 1 y, (x, z) = 1 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) TupleExpr:2( NameExpr(x [__main__.x]) NameExpr(z* [__main__.z]))) IntExpr(1))) [case testMultipleDefOnlySomeNewNestedLists] x = 1 if x: y, [x, z] = 1 [p, [x, r]] = 1 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) IfStmt:2( If( NameExpr(x [__main__.x])) Then( AssignmentStmt:3( TupleExpr:3( NameExpr(y* [__main__.y]) TupleExpr:3( NameExpr(x [__main__.x]) NameExpr(z* [__main__.z]))) IntExpr(1)) AssignmentStmt:4( TupleExpr:4( NameExpr(p* [__main__.p]) TupleExpr:4( NameExpr(x [__main__.x]) NameExpr(r* [__main__.r]))) IntExpr(1))))) [case testIndexedDel] x = y = 1 del x[y] [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) DelStmt:2( IndexExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))) [case testDelGlobalName] x = 1 del x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) DelStmt:2( NameExpr(x [__main__.x]))) [case testDelLocalName] def f(x): del x [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( DelStmt:2( NameExpr(x [l]))))) [case testDelMultipleThings] def f(x, y): del x, y[0] [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(y)) Block:1( DelStmt:2( TupleExpr:2( NameExpr(x [l]) IndexExpr:2( NameExpr(y [l]) IntExpr(0))))))) [case testDelMultipleThingsInvalid] def f(x, y) -> None: del x, y + 1 [out] main:2: error: can't delete operator [case testTry] class c: pass try: c except object: c except c as e: e except: c finally: c [out] MypyFile:1( ClassDef:1( c PassStmt:1()) TryStmt:2( Block:2( ExpressionStmt:3( NameExpr(c [__main__.c]))) NameExpr(object [builtins.object]) Block:4( ExpressionStmt:5( NameExpr(c [__main__.c]))) NameExpr(c [__main__.c]) NameExpr(e* [__main__.e]) Block:6( ExpressionStmt:7( NameExpr(e [__main__.e]))) Block:8( ExpressionStmt:9( NameExpr(c [__main__.c]))) Finally( ExpressionStmt:11( NameExpr(c [__main__.c]))))) [case testTryElse] try: pass except: pass else: object [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) Block:3( PassStmt:4()) Else( ExpressionStmt:6( NameExpr(object [builtins.object]))))) [case testTryWithOnlyFinally] try: pass finally: pass [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) Finally( PassStmt:4()))) [case testExceptWithMultipleTypes] class c: pass try: pass except (c, object) as e: e [out] MypyFile:1( ClassDef:1( c PassStmt:1()) TryStmt:2( Block:2( PassStmt:3()) TupleExpr:4( NameExpr(c [__main__.c]) NameExpr(object [builtins.object])) NameExpr(e* [__main__.e]) Block:4( ExpressionStmt:5( NameExpr(e [__main__.e]))))) [case testRaiseWithoutExpr] raise [out] MypyFile:1( RaiseStmt:1()) [case testWith] with object: object [out] MypyFile:1( WithStmt:1( Expr( NameExpr(object [builtins.object])) Block:1( ExpressionStmt:2( NameExpr(object [builtins.object]))))) [case testWithAndVariable] with object as x: x [out] MypyFile:1( WithStmt:1( Expr( NameExpr(object [builtins.object])) Target( NameExpr(x* [__main__.x])) Block:1( ExpressionStmt:2( NameExpr(x [__main__.x]))))) [case testWithInFunction] def f(): with f as x: x [out] MypyFile:1( FuncDef:1( f Block:1( WithStmt:2( Expr( NameExpr(f [__main__.f])) Target( NameExpr(x* [l])) Block:2( ExpressionStmt:3( NameExpr(x [l]))))))) [case testComplexWith] with object, object: pass with object as a, object as b: pass [out] MypyFile:1( WithStmt:1( Expr( NameExpr(object [builtins.object])) Expr( NameExpr(object [builtins.object])) Block:1( PassStmt:2())) WithStmt:3( Expr( NameExpr(object [builtins.object])) Target( NameExpr(a* [__main__.a])) Expr( NameExpr(object [builtins.object])) Target( NameExpr(b* [__main__.b])) Block:3( PassStmt:4()))) [case testVariableInBlock] while object: x = None if x: x = x [out] MypyFile:1( WhileStmt:1( NameExpr(object [builtins.object]) Block:1( AssignmentStmt:2( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) IfStmt:3( If( NameExpr(x [__main__.x])) Then( AssignmentStmt:4( NameExpr(x [__main__.x]) NameExpr(x [__main__.x]))))))) [case testVariableInExceptHandler] try: pass except object as o: x = None o = x [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(object [builtins.object]) NameExpr(o* [__main__.o]) Block:3( AssignmentStmt:4( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) AssignmentStmt:5( NameExpr(o [__main__.o]) NameExpr(x [__main__.x]))))) [case testCallInExceptHandler] try: pass except object as o: o = object() [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(object [builtins.object]) NameExpr(o* [__main__.o]) Block:3( AssignmentStmt:4( NameExpr(o [__main__.o]) CallExpr:4( NameExpr(object [builtins.object]) Args()))))) [case testTryExceptWithMultipleHandlers] try: pass except BaseException as e: pass except Err as f: f = BaseException() # Fail f = Err() class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(BaseException [builtins.BaseException]) NameExpr(e* [__main__.e]) Block:3( PassStmt:4()) NameExpr(Err [__main__.Err]) NameExpr(f* [__main__.f]) Block:5( AssignmentStmt:6( NameExpr(f [__main__.f]) CallExpr:6( NameExpr(BaseException [builtins.BaseException]) Args())) AssignmentStmt:7( NameExpr(f [__main__.f]) CallExpr:7( NameExpr(Err [__main__.Err]) Args())))) ClassDef:8( Err BaseType( builtins.BaseException) PassStmt:8())) [case testMultipleAssignmentWithPartialNewDef] # flags: --allow-redefinition o = None x, o = o, o [out] MypyFile:1( AssignmentStmt:2( NameExpr(o'* [__main__.o']) NameExpr(None [builtins.None])) AssignmentStmt:3( TupleExpr:3( NameExpr(x* [__main__.x]) NameExpr(o* [__main__.o])) TupleExpr:3( NameExpr(o' [__main__.o']) NameExpr(o' [__main__.o'])))) [case testFunctionDecorator] def decorate(f): pass @decorate def g(): g() [out] MypyFile:1( FuncDef:1( decorate Args( Var(f)) Block:1( PassStmt:1())) Decorator:2( Var(g) NameExpr(decorate [__main__.decorate]) FuncDef:3( g Block:3( ExpressionStmt:4( CallExpr:4( NameExpr(g [__main__.g]) Args())))))) [case testTryWithinFunction] def f() -> None: try: pass except object as o: pass [out] MypyFile:1( FuncDef:1( f def () Block:1( TryStmt:2( Block:2( PassStmt:3()) NameExpr(object [builtins.object]) NameExpr(o* [l]) Block:4( PassStmt:5()))))) [case testReuseExceptionVariable] def f() -> None: try: pass except object as o: pass except object as o: pass [out] MypyFile:1( FuncDef:1( f def () Block:1( TryStmt:2( Block:2( PassStmt:3()) NameExpr(object [builtins.object]) NameExpr(o* [l]) Block:4( PassStmt:5()) NameExpr(object [builtins.object]) NameExpr(o [l]) Block:6( PassStmt:7()))))) [case testWithMultiple] def f(a): pass def main(): with f(0) as a, f(a) as b: x = a, b [out] MypyFile:1( FuncDef:1( f Args( Var(a)) Block:1( PassStmt:2())) FuncDef:3( main Block:3( WithStmt:4( Expr( CallExpr:4( NameExpr(f [__main__.f]) Args( IntExpr(0)))) Target( NameExpr(a* [l])) Expr( CallExpr:4( NameExpr(f [__main__.f]) Args( NameExpr(a [l])))) Target( NameExpr(b* [l])) Block:4( AssignmentStmt:5( NameExpr(x* [l]) TupleExpr:5( NameExpr(a [l]) NameExpr(b [l])))))))) [case testRenameGlobalVariable] # flags: --allow-redefinition def f(a): pass x = 0 f(x) x = '' f(x) [out] MypyFile:1( FuncDef:2( f Args( Var(a)) Block:2( PassStmt:2())) AssignmentStmt:3( NameExpr(x'* [__main__.x']) IntExpr(0)) ExpressionStmt:4( CallExpr:4( NameExpr(f [__main__.f]) Args( NameExpr(x' [__main__.x'])))) AssignmentStmt:5( NameExpr(x* [__main__.x]) StrExpr()) ExpressionStmt:6( CallExpr:6( NameExpr(f [__main__.f]) Args( NameExpr(x [__main__.x]))))) [case testNoRenameGlobalVariable] # flags: --disallow-redefinition def f(a): pass x = 0 f(x) x = '' f(x) [out] MypyFile:1( FuncDef:2( f Args( Var(a)) Block:2( PassStmt:2())) AssignmentStmt:3( NameExpr(x* [__main__.x]) IntExpr(0)) ExpressionStmt:4( CallExpr:4( NameExpr(f [__main__.f]) Args( NameExpr(x [__main__.x])))) AssignmentStmt:5( NameExpr(x [__main__.x]) StrExpr()) ExpressionStmt:6( CallExpr:6( NameExpr(f [__main__.f]) Args( NameExpr(x [__main__.x]))))) [case testRenameLocalVariable] # flags: --allow-redefinition def f(a): f(a) a = '' f(a) [out] MypyFile:1( FuncDef:2( f Args( Var(a)) Block:2( ExpressionStmt:3( CallExpr:3( NameExpr(f [__main__.f]) Args( NameExpr(a [l])))) AssignmentStmt:4( NameExpr(a'* [l]) StrExpr()) ExpressionStmt:5( CallExpr:5( NameExpr(f [__main__.f]) Args( NameExpr(a' [l]))))))) [case testCannotRenameExternalVarWithinClass] # flags: --allow-redefinition x = 0 x class A: x = 1 x = '' [out] MypyFile:1( AssignmentStmt:2( NameExpr(x* [__main__.x]) IntExpr(0)) ExpressionStmt:3( NameExpr(x [__main__.x])) ClassDef:4( A AssignmentStmt:5( NameExpr(x* [m]) IntExpr(1))) AssignmentStmt:6( NameExpr(x [__main__.x]) StrExpr())) mypy-0.761/test-data/unit/semanal-symtable.test0000644€tŠÔÚ€2›s®0000000354213576752246025730 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] -- Note that builtins are ignored to simplify output. __main__: SymbolTable() [case testVarDef] x = 1 [out] __main__: SymbolTable( x : Gdef/Var (__main__.x)) [case testFuncDef] def f(): pass [out] __main__: SymbolTable( f : Gdef/FuncDef (__main__.f)) [case testEmptyClassDef] class c: pass [out] __main__: SymbolTable( c : Gdef/TypeInfo (__main__.c)) [case testImport] import m [file m.py] x = 1 [out] __main__: SymbolTable( m : Gdef/MypyFile (m)) m: SymbolTable( x : Gdef/Var (m.x)) [case testImportFromModule] from m import x [file m.py] class x: pass y = 1 [out] __main__: SymbolTable( x : Gdef/TypeInfo (m.x)) m: SymbolTable( x : Gdef/TypeInfo (m.x) y : Gdef/Var (m.y)) [case testImportAs] from m import x as xx [file m.py] class x: pass y = 1 [out] __main__: SymbolTable( xx : Gdef/TypeInfo (m.x)) m: SymbolTable( x : Gdef/TypeInfo (m.x) y : Gdef/Var (m.y)) [case testFailingImports] from sys import non_existing1 # type: ignore from xyz import non_existing2 # type: ignore if int(): from sys import non_existing3 # type: ignore import non_existing4 # type: ignore [out] __main__: SymbolTable( non_existing1 : Gdef/Var (__main__.non_existing1) : Any non_existing2 : Gdef/Var (__main__.non_existing2) : Any non_existing3 : Gdef/Var (__main__.non_existing3) : Any non_existing4 : Gdef/Var (__main__.non_existing4) : Any) sys: SymbolTable( platform : Gdef/Var (sys.platform) version_info : Gdef/Var (sys.version_info)) [case testDecorator] from typing import Callable def dec(f: Callable[[], None]) -> Callable[[], None]: return f @dec def g() -> None: pass [out] __main__: SymbolTable( Callable : Gdef/Var (typing.Callable) dec : Gdef/FuncDef (__main__.dec) : def (f: def ()) -> def () g : Gdef/Decorator (__main__.g) : def ()) mypy-0.761/test-data/unit/semanal-typealiases.test0000644€tŠÔÚ€2›s®0000002076313576752246026437 0ustar jukkaDROPBOX\Domain Users00000000000000[case testListTypeAlias] from typing import List def f() -> List[int]: pass [builtins fixtures/list.pyi] [out] MypyFile:1( ImportFrom:1(typing, [List]) FuncDef:2( f def () -> builtins.list[builtins.int] Block:2( PassStmt:2()))) [case testDictTypeAlias] from typing import Dict def f() -> Dict[int, str]: pass [builtins fixtures/dict.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Dict]) FuncDef:2( f def () -> builtins.dict[builtins.int, builtins.str] Block:2( PassStmt:2()))) [case testQualifiedTypeAlias] import typing def f() -> typing.List[int]: pass [builtins fixtures/list.pyi] [out] MypyFile:1( Import:1(typing) FuncDef:2( f def () -> builtins.list[builtins.int] Block:2( PassStmt:2()))) [case testTypeApplicationWithTypeAlias] from typing import List List[List[int]] [builtins fixtures/list.pyi] [out] MypyFile:1( ImportFrom:1(typing, [List]) ExpressionStmt:2( TypeApplication:2( NameExpr(List [typing.List]) Types( builtins.list[builtins.int])))) [case testTypeApplicationWithQualifiedTypeAlias] import typing typing.List[typing.List[int]] [builtins fixtures/list.pyi] [out] MypyFile:1( Import:1(typing) ExpressionStmt:2( TypeApplication:2( MemberExpr:2( NameExpr(typing) List [typing.List]) Types( builtins.list[builtins.int])))) [case testSimpleTypeAlias] import typing class A: pass A2 = A def f(x: A2) -> A: pass [out] MypyFile:1( Import:1(typing) ClassDef:2( A PassStmt:2()) AssignmentStmt:3( NameExpr(A2* [__main__.A2]) NameExpr(A [__main__.A])) FuncDef:4( f Args( Var(x)) def (x: __main__.A) -> __main__.A Block:4( PassStmt:4()))) [case testQualifiedSimpleTypeAlias] import typing import _m A2 = _m.A x = 1 # type: A2 [file _m.py] import typing class A: pass [out] MypyFile:1( Import:1(typing) Import:2(_m) AssignmentStmt:3( NameExpr(A2* [__main__.A2]) MemberExpr:3( NameExpr(_m) A [_m.A])) AssignmentStmt:4( NameExpr(x [__main__.x]) IntExpr(1) _m.A)) [case testUnionTypeAlias] from typing import Union U = Union[int, str] def f(x: U) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) AssignmentStmt:2( NameExpr(U* [__main__.U]) TypeAliasExpr(Union[builtins.int, builtins.str])) FuncDef:3( f Args( Var(x)) def (x: Union[builtins.int, builtins.str]) Block:3( PassStmt:3()))) [case testUnionTypeAlias2] from typing import Union class A: pass U = Union[int, A] def f(x: U) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) ClassDef:2( A PassStmt:2()) AssignmentStmt:3( NameExpr(U* [__main__.U]) TypeAliasExpr(Union[builtins.int, __main__.A])) FuncDef:4( f Args( Var(x)) def (x: Union[builtins.int, __main__.A]) Block:4( PassStmt:4()))) [case testUnionTypeAliasWithQualifiedUnion] import typing U = typing.Union[int, str] def f(x: U) -> None: pass [out] MypyFile:1( Import:1(typing) AssignmentStmt:2( NameExpr(U* [__main__.U]) TypeAliasExpr(Union[builtins.int, builtins.str])) FuncDef:3( f Args( Var(x)) def (x: Union[builtins.int, builtins.str]) Block:3( PassStmt:3()))) [case testTupleTypeAlias] from typing import Tuple T = Tuple[int, str] def f(x: T) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Tuple]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeAliasExpr(Tuple[builtins.int, builtins.str])) FuncDef:3( f Args( Var(x)) def (x: Tuple[builtins.int, builtins.str]) Block:3( PassStmt:3()))) [case testCallableTypeAlias] from typing import Callable C = Callable[[int], None] def f(x: C) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Callable]) AssignmentStmt:2( NameExpr(C* [__main__.C]) TypeAliasExpr(def (builtins.int))) FuncDef:3( f Args( Var(x)) def (x: def (builtins.int)) Block:3( PassStmt:3()))) [case testGenericTypeAlias] from typing import Generic, TypeVar T = TypeVar('T') class G(Generic[T]): pass A = G[int] def f(x: A) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Generic, TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2()) ClassDef:3( G TypeVars( T) PassStmt:3()) AssignmentStmt:4( NameExpr(A* [__main__.A]) TypeAliasExpr(__main__.G[builtins.int])) FuncDef:5( f Args( Var(x)) def (x: __main__.G[builtins.int]) Block:5( PassStmt:5()))) [case testGenericTypeAlias2] from typing import List A = List[int] def f(x: A) -> None: pass [builtins fixtures/list.pyi] [out] MypyFile:1( ImportFrom:1(typing, [List]) AssignmentStmt:2( NameExpr(A* [__main__.A]) TypeAliasExpr(builtins.list[builtins.int])) FuncDef:3( f Args( Var(x)) def (x: builtins.list[builtins.int]) Block:3( PassStmt:3()))) [case testImportUnionTypeAlias] import typing from _m import U def f(x: U) -> None: pass [file _m.py] from typing import Union class A: pass U = Union[int, A] [out] MypyFile:1( Import:1(typing) ImportFrom:2(_m, [U]) FuncDef:3( f Args( Var(x)) def (x: Union[builtins.int, _m.A]) Block:3( PassStmt:3()))) [case testImportUnionTypeAlias2] import typing import _m def f(x: _m.U) -> None: pass [file _m.py] from typing import Union class A: pass U = Union[int, A] [out] MypyFile:1( Import:1(typing) Import:2(_m) FuncDef:3( f Args( Var(x)) def (x: Union[builtins.int, _m.A]) Block:3( PassStmt:3()))) [case testImportSimpleTypeAlias] import typing from _m import A def f(x: A) -> None: pass [file _m.py] import typing A = int [out] MypyFile:1( Import:1(typing) ImportFrom:2(_m, [A]) FuncDef:3( f Args( Var(x)) def (x: builtins.int) Block:3( PassStmt:3()))) [case testImportSimpleTypeAlias2] import typing import _m def f(x: _m.A) -> None: pass [file _m.py] import typing A = int [out] MypyFile:1( Import:1(typing) Import:2(_m) FuncDef:3( f Args( Var(x)) def (x: builtins.int) Block:3( PassStmt:3()))) [case testAnyTypeAlias] from typing import Any A = Any a = 1 # type: A [out] MypyFile:1( ImportFrom:1(typing, [Any]) AssignmentStmt:2( NameExpr(A* [__main__.A]) NameExpr(Any [typing.Any])) AssignmentStmt:3( NameExpr(a [__main__.a]) IntExpr(1) Any)) [case testAnyTypeAlias2] import typing A = typing.Any a = 1 # type: A [out] MypyFile:1( Import:1(typing) AssignmentStmt:2( NameExpr(A* [__main__.A]) MemberExpr:2( NameExpr(typing) Any [typing.Any])) AssignmentStmt:3( NameExpr(a [__main__.a]) IntExpr(1) Any)) [case testTypeAliasAlias] from typing import Union U = Union[int, str] U2 = U x = 1 # type: U2 [out] MypyFile:1( ImportFrom:1(typing, [Union]) AssignmentStmt:2( NameExpr(U* [__main__.U]) TypeAliasExpr(Union[builtins.int, builtins.str])) AssignmentStmt:3( NameExpr(U2* [__main__.U2]) NameExpr(U [__main__.U])) AssignmentStmt:4( NameExpr(x [__main__.x]) IntExpr(1) Union[builtins.int, builtins.str])) [case testTypeAliasOfImportedAlias] from typing import Union from _m import U U2 = U x = 1 # type: U2 [file _m.py] from typing import Union U = Union[int, str] [out] MypyFile:1( ImportFrom:1(typing, [Union]) ImportFrom:2(_m, [U]) AssignmentStmt:3( NameExpr(U2* [__main__.U2]) NameExpr(U [_m.U])) AssignmentStmt:4( NameExpr(x [__main__.x]) IntExpr(1) Union[builtins.int, builtins.str])) [case testListTypeDoesNotGenerateAlias] import typing A = [int, str] a = 1 # type: A # E: Variable "__main__.A" is not valid as a type [case testCantUseStringLiteralAsTypeAlias] from typing import Union A = 'Union[int, str]' a = 1 # type: A # E: Variable "__main__.A" is not valid as a type [case testStringLiteralTypeAsAliasComponent] from typing import Union A = Union['int', str] a = 1 # type: A [out] MypyFile:1( ImportFrom:1(typing, [Union]) AssignmentStmt:2( NameExpr(A* [__main__.A]) TypeAliasExpr(Union[builtins.int, builtins.str])) AssignmentStmt:3( NameExpr(a [__main__.a]) IntExpr(1) Union[builtins.int, builtins.str])) [case testComplexTypeAlias] from typing import Union, Tuple, Any A = Union['int', Tuple[int, Any]] a = 1 # type: A [out] MypyFile:1( ImportFrom:1(typing, [Union, Tuple, Any]) AssignmentStmt:2( NameExpr(A* [__main__.A]) TypeAliasExpr(Union[builtins.int, Tuple[builtins.int, Any]])) AssignmentStmt:3( NameExpr(a [__main__.a]) IntExpr(1) Union[builtins.int, Tuple[builtins.int, Any]])) mypy-0.761/test-data/unit/semanal-typeddict.test0000644€tŠÔÚ€2›s®0000000272013576752246026076 0ustar jukkaDROPBOX\Domain Users00000000000000-- Create Type -- TODO: Implement support for this syntax. --[case testCanCreateTypedDictTypeWithKeywordArguments] --from mypy_extensions import TypedDict --Point = TypedDict('Point', x=int, y=int) --[builtins fixtures/dict.pyi] --[out] --MypyFile:1( -- ImportFrom:1(mypy_extensions, [TypedDict]) -- AssignmentStmt:2( -- NameExpr(Point* [__main__.Point]) -- TypedDictExpr:2(Point))) -- TODO: Implement support for this syntax. --[case testCanCreateTypedDictTypeWithDictCall] --from mypy_extensions import TypedDict --Point = TypedDict('Point', dict(x=int, y=int)) --[builtins fixtures/dict.pyi] --[out] --MypyFile:1( -- ImportFrom:1(mypy_extensions, [TypedDict]) -- AssignmentStmt:2( -- NameExpr(Point* [__main__.Point]) -- TypedDictExpr:2(Point))) [case testCanCreateTypedDictTypeWithDictLiteral] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) [builtins fixtures/dict.pyi] [out] MypyFile:1( ImportFrom:1(mypy_extensions, [TypedDict]) AssignmentStmt:2( NameExpr(Point* [__main__.Point]) TypedDictExpr:2(Point))) [case testTypedDictWithDocString] from mypy_extensions import TypedDict class A(TypedDict): """foo""" x: str [builtins fixtures/dict.pyi] [out] MypyFile:1( ImportFrom:1(mypy_extensions, [TypedDict]) ClassDef:2( A BaseType( mypy_extensions._TypedDict) ExpressionStmt:3( StrExpr(foo)) AssignmentStmt:4( NameExpr(x) TempNode:4( Any) str?))) mypy-0.761/test-data/unit/semanal-typeinfo.test0000644€tŠÔÚ€2›s®0000000333613576752246025746 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] TypeInfoMap() [case testEmptyClass] class c: pass [out] TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) Mro(__main__.c, builtins.object) Names())) [case testClassWithMethod] class c: def f(self): pass [out] TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) Mro(__main__.c, builtins.object) Names( f))) [case testClassWithAttributes] class c: def __init__(self, x): self.y = x self.z = 1 [out] TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) Mro(__main__.c, builtins.object) Names( __init__ y z))) [case testBaseClass] class base: pass class c(base): pass [out] TypeInfoMap( __main__.base : TypeInfo( Name(__main__.base) Bases(builtins.object) Mro(__main__.base, builtins.object) Names()) __main__.c : TypeInfo( Name(__main__.c) Bases(__main__.base) Mro(__main__.c, __main__.base, builtins.object) Names())) [case testClassAndAbstractClass] from abc import abstractmethod, ABCMeta import typing class i(metaclass=ABCMeta): pass class c(i): pass [out] TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(__main__.i) Mro(__main__.c, __main__.i, builtins.object) Names() MetaclassType(abc.ABCMeta)) __main__.i : TypeInfo( Name(__main__.i) Bases(builtins.object) Mro(__main__.i, builtins.object) Names() DeclaredMetaclass(abc.ABCMeta) MetaclassType(abc.ABCMeta))) [case testAttributeWithoutType] class A: a = A [out] TypeInfoMap( __main__.A : TypeInfo( Name(__main__.A) Bases(builtins.object) Mro(__main__.A, builtins.object) Names( a))) mypy-0.761/test-data/unit/semanal-types.test0000644€tŠÔÚ€2›s®0000007161213576752246025257 0ustar jukkaDROPBOX\Domain Users00000000000000[case testVarWithType] import typing class A: pass x = A() # type: A y = x [out] MypyFile:1( Import:1(typing) ClassDef:2( A PassStmt:2()) AssignmentStmt:3( NameExpr(x [__main__.x]) CallExpr:3( NameExpr(A [__main__.A]) Args()) __main__.A) AssignmentStmt:4( NameExpr(y* [__main__.y]) NameExpr(x [__main__.x]))) [case testLocalVarWithType] class A: pass def f(): x = None # type: A y = x [out] MypyFile:1( ClassDef:1( A PassStmt:1()) FuncDef:2( f Block:2( AssignmentStmt:3( NameExpr(x [l]) NameExpr(None [builtins.None]) __main__.A) AssignmentStmt:4( NameExpr(y* [l]) NameExpr(x [l]))))) [case testAnyType] from typing import Any x = None # type: Any y = x [out] MypyFile:1( ImportFrom:1(typing, [Any]) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) Any) AssignmentStmt:3( NameExpr(y* [__main__.y]) NameExpr(x [__main__.x]))) [case testMemberVarWithType] import typing class A: def __init__(self): self.x = None # type: int [out] MypyFile:1( Import:1(typing) ClassDef:2( A FuncDef:3( __init__ Args( Var(self)) Block:3( AssignmentStmt:4( MemberExpr:4( NameExpr(self [l]) x) NameExpr(None [builtins.None]) builtins.int))))) [case testClassVarWithType] import typing class A: x = None # type: int x = 1 [out] MypyFile:1( Import:1(typing) ClassDef:2( A AssignmentStmt:3( NameExpr(x [m]) NameExpr(None [builtins.None]) builtins.int) AssignmentStmt:4( NameExpr(x [__main__.A.x]) IntExpr(1)))) [case testFunctionSig] from typing import Any class A: pass def f(x: A) -> A: pass def g(x: Any, y: A) -> None: z = x, y [out] MypyFile:1( ImportFrom:1(typing, [Any]) ClassDef:2( A PassStmt:2()) FuncDef:3( f Args( Var(x)) def (x: __main__.A) -> __main__.A Block:3( PassStmt:3())) FuncDef:4( g Args( Var(x) Var(y)) def (x: Any, y: __main__.A) Block:4( AssignmentStmt:5( NameExpr(z* [l]) TupleExpr:5( NameExpr(x [l]) NameExpr(y [l])))))) [case testBaseclass] class A: pass class B(A): pass [out] MypyFile:1( ClassDef:1( A PassStmt:1()) ClassDef:2( B BaseType( __main__.A) PassStmt:2())) [case testMultipleVarDef] class A: pass class B: pass a, b = None, None # type: (A, B) x = a, b [out] MypyFile:1( ClassDef:2( A PassStmt:2()) ClassDef:3( B PassStmt:3()) AssignmentStmt:4( TupleExpr:4( NameExpr(a [__main__.a]) NameExpr(b [__main__.b])) TupleExpr:4( NameExpr(None [builtins.None]) NameExpr(None [builtins.None])) Tuple[__main__.A, __main__.B]) AssignmentStmt:5( NameExpr(x* [__main__.x]) TupleExpr:5( NameExpr(a [__main__.a]) NameExpr(b [__main__.b])))) [case testGenericType] from typing import TypeVar, Generic, Any t = TypeVar('t') class A(Generic[t]): pass class B: pass x = None # type: A[B] y = None # type: A[Any] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, Any]) AssignmentStmt:3( NameExpr(t* [__main__.t]) TypeVarExpr:3()) ClassDef:5( A TypeVars( t) PassStmt:5()) ClassDef:6( B PassStmt:6()) AssignmentStmt:7( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) __main__.A[__main__.B]) AssignmentStmt:8( NameExpr(y [__main__.y]) NameExpr(None [builtins.None]) __main__.A[Any])) [case testGenericType2] from typing import TypeVar, Generic, Any t = TypeVar('t') s = TypeVar('s') class A(Generic[t, s]): pass class B: pass x = None # type: A[B, Any] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, Any]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( A TypeVars( t s) PassStmt:4()) ClassDef:5( B PassStmt:5()) AssignmentStmt:6( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) __main__.A[__main__.B, Any])) [case testAssignmentAfterDef] class A: pass a = None # type: A a = 1 def f(): b = None # type: A b = 1 [out] MypyFile:1( ClassDef:3( A PassStmt:3()) AssignmentStmt:4( NameExpr(a [__main__.a]) NameExpr(None [builtins.None]) __main__.A) AssignmentStmt:5( NameExpr(a [__main__.a]) IntExpr(1)) FuncDef:6( f Block:6( AssignmentStmt:7( NameExpr(b [l]) NameExpr(None [builtins.None]) __main__.A) AssignmentStmt:8( NameExpr(b [l]) IntExpr(1))))) [case testCast] from typing import TypeVar, Generic, Any, cast t = TypeVar('t') class c: pass class d(Generic[t]): pass cast(Any, 1) cast(c, 1) cast(d[c], c) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, Any, cast]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( c PassStmt:3()) ClassDef:4( d TypeVars( t) PassStmt:4()) ExpressionStmt:5( CastExpr:5( IntExpr(1) Any)) ExpressionStmt:6( CastExpr:6( IntExpr(1) __main__.c)) ExpressionStmt:7( CastExpr:7( NameExpr(c [__main__.c]) __main__.d[__main__.c]))) [case testCastToQualifiedTypeAndCast] import typing import _m typing.cast(_m.C, object) [file _m.py] class C: pass [out] MypyFile:1( Import:1(typing) Import:2(_m) ExpressionStmt:3( CastExpr:3( NameExpr(object [builtins.object]) _m.C))) [case testLongQualifiedCast] import typing import _m._n typing.cast(_m._n.C, object) [file _m/__init__.py] [file _m/_n.py] class C: pass [out] MypyFile:1( Import:1(typing) Import:2(_m._n) ExpressionStmt:3( CastExpr:3( NameExpr(object [builtins.object]) _m._n.C))) [case testCastTargetWithTwoTypeArgs] from typing import TypeVar, Generic, cast t = TypeVar('t') s = TypeVar('s') class C(Generic[t, s]): pass cast(C[str, int], C) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, cast]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( C TypeVars( t s) PassStmt:4()) ExpressionStmt:5( CastExpr:5( NameExpr(C [__main__.C]) __main__.C[builtins.str, builtins.int]))) [case testCastToTupleType] from typing import Tuple, cast cast(Tuple[int, str], None) [out] MypyFile:1( ImportFrom:1(typing, [Tuple, cast]) ExpressionStmt:2( CastExpr:2( NameExpr(None [builtins.None]) Tuple[builtins.int, builtins.str]))) [case testCastToFunctionType] from typing import Callable, cast cast(Callable[[int], str], None) [out] MypyFile:1( ImportFrom:1(typing, [Callable, cast]) ExpressionStmt:2( CastExpr:2( NameExpr(None [builtins.None]) def (builtins.int) -> builtins.str))) [case testCastToStringLiteralType] from typing import cast cast('int', 1) [out] MypyFile:1( ImportFrom:1(typing, [cast]) ExpressionStmt:2( CastExpr:2( IntExpr(1) builtins.int))) [case testFunctionTypeVariable] from typing import TypeVar t = TypeVar('t') def f(x: t) -> None: y = None # type: t [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) FuncDef:3( f Args( Var(x)) def [t] (x: t`-1) Block:3( AssignmentStmt:4( NameExpr(y [l]) NameExpr(None [builtins.None]) t`-1)))) [case testTwoFunctionTypeVariables] from typing import TypeVar t = TypeVar('t') u = TypeVar('u') def f(x: t, y: u, z: t) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(u* [__main__.u]) TypeVarExpr:3()) FuncDef:4( f Args( Var(x) Var(y) Var(z)) def [t, u] (x: t`-1, y: u`-2, z: t`-1) Block:4( PassStmt:4()))) [case testNestedGenericFunctionTypeVariable] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: A[t], y) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) FuncDef:4( f Args( Var(x) Var(y)) def [t] (x: __main__.A[t`-1], y: Any) Block:4( PassStmt:4()))) [case testNestedGenericFunctionTypeVariable2] from typing import TypeVar, Tuple, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: Tuple[int, t]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Tuple, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) FuncDef:4( f Args( Var(x)) def [t] (x: Tuple[builtins.int, t`-1]) Block:4( PassStmt:4()))) [case testNestedGenericFunctionTypeVariable3] from typing import TypeVar, Callable, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: Callable[[int, t], int]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Callable, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) FuncDef:4( f Args( Var(x)) def [t] (x: def (builtins.int, t`-1) -> builtins.int) Block:4( PassStmt:4()))) [case testNestedGenericFunctionTypeVariable4] from typing import TypeVar, Callable, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: Callable[[], t]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Callable, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) FuncDef:4( f Args( Var(x)) def [t] (x: def () -> t`-1) Block:4( PassStmt:4()))) [case testGenericFunctionTypeVariableInReturnType] from typing import TypeVar t = TypeVar('t') def f() -> t: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) FuncDef:3( f def [t] () -> t`-1 Block:3( PassStmt:3()))) [case testSelfType] class A: def f(self, o: object) -> None: pass [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self) Var(o)) def (self: __main__.A, o: builtins.object) Block:2( PassStmt:2())))) [case testNestedGenericFunction] from typing import TypeVar t = TypeVar('t') def f() -> None: def g() -> t: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) FuncDef:3( f def () Block:3( FuncDef:4( g def [t] () -> t`-1 Block:4( PassStmt:4()))))) [case testClassTvar] from typing import TypeVar, Generic t = TypeVar('t') class c(Generic[t]): def f(self) -> t: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:3( NameExpr(t* [__main__.t]) TypeVarExpr:3()) ClassDef:5( c TypeVars( t) FuncDef:6( f Args( Var(self)) def (self: __main__.c[t`1]) -> t`1 Block:6( PassStmt:6())))) [case testClassTvar2] from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class c(Generic[t, s]): def f(self, x: s) -> t: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:3( NameExpr(t* [__main__.t]) TypeVarExpr:3()) AssignmentStmt:4( NameExpr(s* [__main__.s]) TypeVarExpr:4()) ClassDef:6( c TypeVars( t s) FuncDef:7( f Args( Var(self) Var(x)) def (self: __main__.c[t`1, s`2], x: s`2) -> t`1 Block:7( PassStmt:7())))) [case testGenericBaseClass] from typing import TypeVar, Generic t = TypeVar('t') class d(Generic[t]): pass class c(d[t], Generic[t]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( d TypeVars( t) PassStmt:3()) ClassDef:4( c TypeVars( t) BaseType( __main__.d[t`1]) PassStmt:4())) [case testTupleType] from typing import Tuple t = None # type: tuple t1 = None # type: Tuple[object] t2 = None # type: Tuple[int, object] [builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Tuple]) AssignmentStmt:2( NameExpr(t [__main__.t]) NameExpr(None [builtins.None]) builtins.tuple[Any]) AssignmentStmt:3( NameExpr(t1 [__main__.t1]) NameExpr(None [builtins.None]) Tuple[builtins.object]) AssignmentStmt:4( NameExpr(t2 [__main__.t2]) NameExpr(None [builtins.None]) Tuple[builtins.int, builtins.object])) [case testVariableLengthTuple] from typing import Tuple t = None # type: Tuple[int, ...] [builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Tuple]) AssignmentStmt:2( NameExpr(t [__main__.t]) NameExpr(None [builtins.None]) builtins.tuple[builtins.int])) [case testInvalidTupleType] from typing import Tuple t = None # type: Tuple[int, str, ...] # E: Unexpected '...' [out] [case testFunctionTypes] from typing import Callable f = None # type: Callable[[object, int], str] g = None # type: Callable[[], None] [out] MypyFile:1( ImportFrom:1(typing, [Callable]) AssignmentStmt:2( NameExpr(f [__main__.f]) NameExpr(None [builtins.None]) def (builtins.object, builtins.int) -> builtins.str) AssignmentStmt:3( NameExpr(g [__main__.g]) NameExpr(None [builtins.None]) def ())) [case testOverloadedFunction] from typing import overload, Any @overload def f(a: object) -> int: a @overload def f(a: str) -> object: a def f(a: Any) -> Any: return a [out] MypyFile:1( ImportFrom:1(typing, [overload, Any]) OverloadedFuncDef:2( FuncDef:7( f Args( Var(a)) def (a: Any) -> Any Block:7( ReturnStmt:7( NameExpr(a [l])))) Overload(def (a: builtins.object) -> builtins.int, \ def (a: builtins.str) -> builtins.object) Decorator:2( Var(f) NameExpr(overload [typing.overload]) FuncDef:3( f Args( Var(a)) def (a: builtins.object) -> builtins.int Block:3( ExpressionStmt:3( NameExpr(a [l]))))) Decorator:4( Var(f) NameExpr(overload [typing.overload]) FuncDef:5( f Args( Var(a)) def (a: builtins.str) -> builtins.object Block:5( ExpressionStmt:5( NameExpr(a [l]))))))) [case testReferenceToOverloadedFunction] from typing import overload @overload def f() -> None: pass @overload def f(x: int) -> None: pass def f(*args) -> None: pass x = f [out] MypyFile:1( ImportFrom:1(typing, [overload]) OverloadedFuncDef:2( FuncDef:7( f def (*args: Any) VarArg( Var(args)) Block:7( PassStmt:7())) Overload(def (), def (x: builtins.int)) Decorator:2( Var(f) NameExpr(overload [typing.overload]) FuncDef:3( f def () Block:3( PassStmt:3()))) Decorator:4( Var(f) NameExpr(overload [typing.overload]) FuncDef:5( f Args( Var(x)) def (x: builtins.int) Block:5( PassStmt:5())))) AssignmentStmt:9( NameExpr(x* [__main__.x]) NameExpr(f [__main__.f]))) [case testNestedOverloadedFunction] from typing import overload def f(): @overload def g(): pass @overload def g(x): pass def g(*args): pass y = g [out] MypyFile:1( ImportFrom:1(typing, [overload]) FuncDef:2( f Block:2( OverloadedFuncDef:3( FuncDef:8( g VarArg( Var(args)) Block:8( PassStmt:8())) Overload(def () -> Any, def (x: Any) -> Any) Decorator:3( Var(g) NameExpr(overload [typing.overload]) FuncDef:4( g Block:4( PassStmt:4()))) Decorator:5( Var(g) NameExpr(overload [typing.overload]) FuncDef:6( g Args( Var(x)) Block:6( PassStmt:6())))) AssignmentStmt:10( NameExpr(y* [l]) NameExpr(g [l]))))) [case testImplicitGenericTypeArgs] from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class A(Generic[t, s]): pass x = None # type: A [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( A TypeVars( t s) PassStmt:4()) AssignmentStmt:5( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) __main__.A[Any, Any])) [case testImplicitTypeArgsAndGenericBaseClass] from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class B(Generic[s]): pass class A(B, Generic[t]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( B TypeVars( s) PassStmt:4()) ClassDef:5( A TypeVars( t) BaseType( __main__.B[Any]) PassStmt:5())) [case testTypeApplication] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass x = A[int]() [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) AssignmentStmt:4( NameExpr(x* [__main__.x]) CallExpr:4( TypeApplication:4( NameExpr(A [__main__.A]) Types( builtins.int)) Args()))) [case testTypeApplicationWithTwoTypeArgs] from typing import TypeVar, Generic, Any t = TypeVar('t') s = TypeVar('s') class A(Generic[t, s]): pass x = A[int, Any]() [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, Any]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( A TypeVars( t s) PassStmt:4()) AssignmentStmt:5( NameExpr(x* [__main__.x]) CallExpr:5( TypeApplication:5( NameExpr(A [__main__.A]) Types( builtins.int Any)) Args()))) [case testFunctionTypeApplication] from typing import TypeVar t = TypeVar('t') def f(x: t) -> None: pass f[int](1) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) FuncDef:3( f Args( Var(x)) def [t] (x: t`-1) Block:3( PassStmt:3())) ExpressionStmt:4( CallExpr:4( TypeApplication:4( NameExpr(f [__main__.f]) Types( builtins.int)) Args( IntExpr(1))))) [case testTypeApplicationWithStringLiteralType] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass A['int']() [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) ExpressionStmt:4( CallExpr:4( TypeApplication:4( NameExpr(A [__main__.A]) Types( builtins.int)) Args()))) [case testVarArgsAndKeywordArgs] def g(*x: int, y: str = ''): pass [out] MypyFile:1( FuncDef:1( g MaxPos(0) Args( default( Var(y) StrExpr())) def (*x: builtins.int, *, y: builtins.str =) -> Any VarArg( Var(x)) Block:1( PassStmt:1()))) [case testQualifiedGeneric] from typing import TypeVar import typing T = TypeVar('T') class A(typing.Generic[T]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) Import:2(typing) AssignmentStmt:3( NameExpr(T* [__main__.T]) TypeVarExpr:3()) ClassDef:4( A TypeVars( T) PassStmt:4())) [case testQualifiedTypevar] import typing T = typing.TypeVar('T') def f(x: T) -> T: pass [out] MypyFile:1( Import:1(typing) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2()) FuncDef:3( f Args( Var(x)) def [T] (x: T`-1) -> T`-1 Block:3( PassStmt:3()))) [case testAliasedTypevar] from typing import TypeVar as tv T = tv('T') def f(x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar : tv]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2()) FuncDef:3( f Args( Var(x)) def [T] (x: T`-1) -> T`-1 Block:3( PassStmt:3()))) [case testLocalTypevar] from typing import TypeVar def f(): T = TypeVar('T') def g(x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) FuncDef:2( f Block:2( AssignmentStmt:3( NameExpr(T* [l]) TypeVarExpr:3()) FuncDef:4( g Args( Var(x)) def [T] (x: T`-1) -> T`-1 Block:4( PassStmt:4()))))) [case testClassLevelTypevar] from typing import TypeVar class A: T = TypeVar('T') def g(self, x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) ClassDef:2( A AssignmentStmt:3( NameExpr(T* [m]) TypeVarExpr:3()) FuncDef:4( g Args( Var(self) Var(x)) def [T] (self: __main__.A, x: T`-1) -> T`-1 Block:4( PassStmt:4())))) [case testImportTypevar] from typing import Generic from _m import T class A(Generic[T]): y = None # type: T [file _m.py] from typing import TypeVar T = TypeVar('T') [out] MypyFile:1( ImportFrom:1(typing, [Generic]) ImportFrom:2(_m, [T]) ClassDef:3( A TypeVars( T) AssignmentStmt:4( NameExpr(y [m]) NameExpr(None [builtins.None]) T`1))) [case testQualifiedReferenceToTypevarInClass] from typing import Generic import _m class A(Generic[_m.T]): a = None # type: _m.T def f(self, x: _m.T): b = None # type: _m.T [file _m.py] from typing import TypeVar T = TypeVar('T') [out] MypyFile:1( ImportFrom:1(typing, [Generic]) Import:2(_m) ClassDef:3( A TypeVars( _m.T) AssignmentStmt:4( NameExpr(a [m]) NameExpr(None [builtins.None]) _m.T`1) FuncDef:5( f Args( Var(self) Var(x)) def (self: __main__.A[_m.T`1], x: _m.T`1) -> Any Block:5( AssignmentStmt:6( NameExpr(b [l]) NameExpr(None [builtins.None]) _m.T`1))))) [case testQualifiedReferenceToTypevarInFunctionSignature] import _m def f(x: _m.T) -> None: a = None # type: _m.T [file _m.py] from typing import TypeVar T = TypeVar('T') [out] MypyFile:1( Import:1(_m) FuncDef:2( f Args( Var(x)) def [_m.T] (x: _m.T`-1) Block:2( AssignmentStmt:3( NameExpr(a [l]) NameExpr(None [builtins.None]) _m.T`-1)))) [case testFunctionCommentAnnotation] from typing import Any def f(x): # type: (int) -> Any x = 1 [out] MypyFile:1( ImportFrom:1(typing, [Any]) FuncDef:2( f Args( Var(x)) def (x: builtins.int) -> Any Block:2( AssignmentStmt:3( NameExpr(x [l]) IntExpr(1))))) [case testMethodCommentAnnotation] import typing class A: def f(self, x): # type: (int) -> str x = 1 [out] MypyFile:1( Import:1(typing) ClassDef:2( A FuncDef:3( f Args( Var(self) Var(x)) def (self: __main__.A, x: builtins.int) -> builtins.str Block:3( AssignmentStmt:4( NameExpr(x [l]) IntExpr(1)))))) [case testTypevarWithValues] from typing import TypeVar, Any T = TypeVar('T', int, str) S = TypeVar('S', Any, int, str) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Any]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Values( builtins.int builtins.str))) AssignmentStmt:3( NameExpr(S* [__main__.S]) TypeVarExpr:3( Values( Any builtins.int builtins.str)))) [case testTypevarWithValuesAndVariance] from typing import TypeVar T = TypeVar('T', int, str, covariant=True) [builtins fixtures/bool.pyi] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Variance(COVARIANT) Values( builtins.int builtins.str)))) [case testTypevarWithBound] from typing import TypeVar T = TypeVar('T', bound=int) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( UpperBound(builtins.int)))) [case testGenericFunctionWithValueSet] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Values( builtins.int builtins.str))) FuncDef:3( f Args( Var(x)) def [T in (builtins.int, builtins.str)] (x: T`-1) -> T`-1 Block:3( PassStmt:3()))) [case testGenericClassWithValueSet] from typing import TypeVar, Generic T = TypeVar('T', int, str) class C(Generic[T]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Values( builtins.int builtins.str))) ClassDef:3( C TypeVars( T in (builtins.int, builtins.str)) PassStmt:3())) [case testGenericFunctionWithBound] from typing import TypeVar T = TypeVar('T', bound=int) def f(x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( UpperBound(builtins.int))) FuncDef:3( f Args( Var(x)) def [T <: builtins.int] (x: T`-1) -> T`-1 Block:3( PassStmt:3()))) [case testGenericClassWithBound] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class C(Generic[T]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( UpperBound(builtins.int))) ClassDef:3( C TypeVars( T <: builtins.int) PassStmt:3())) [case testSimpleDucktypeDecorator] from typing import _promote @_promote(str) class S: pass [typing fixtures/typing-full.pyi] [out] MypyFile:1( ImportFrom:1(typing, [_promote]) ClassDef:3( S Promote(builtins.str) Decorators( PromoteExpr:2(builtins.str)) PassStmt:3())) [case testUnionType] from typing import Union def f(x: Union[int, str]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) FuncDef:2( f Args( Var(x)) def (x: Union[builtins.int, builtins.str]) Block:2( PassStmt:2()))) [case testUnionTypeWithNoneItem] from typing import Union def f(x: Union[int, None]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) FuncDef:2( f Args( Var(x)) def (x: Union[builtins.int, None]) Block:2( PassStmt:2()))) [case testUnionTypeWithNoneItemAndTwoItems] from typing import Union def f(x: Union[int, None, str]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) FuncDef:2( f Args( Var(x)) def (x: Union[builtins.int, None, builtins.str]) Block:2( PassStmt:2()))) [case testUnionTypeWithSingleItem] from typing import Union def f(x: Union[int]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) FuncDef:2( f Args( Var(x)) def (x: builtins.int) Block:2( PassStmt:2()))) [case testOptionalTypes] from typing import Optional x = 1 # type: Optional[int] [out] MypyFile:1( ImportFrom:1(typing, [Optional]) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(1) Union[builtins.int, None])) [case testInvalidOptionalType] from typing import Optional x = 1 # type: Optional[int, str] # E: Optional[...] must have exactly one type argument y = 1 # type: Optional # E: Optional[...] must have exactly one type argument [out] [case testCoAndContravariantTypeVar] from typing import TypeVar T = TypeVar('T', covariant=True) S = TypeVar('S', contravariant=True) [builtins fixtures/bool.pyi] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Variance(COVARIANT))) AssignmentStmt:3( NameExpr(S* [__main__.S]) TypeVarExpr:3( Variance(CONTRAVARIANT)))) [case testTupleExpressionAsType] def f(x: (int, int)) -> None: pass [out] main:1: error: Syntax error in type annotation main:1: note: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) [case tesQualifiedTypeNameBasedOnAny] from typing import Any x = 0 # type: Any z = 0 # type: x.y [out] MypyFile:1( ImportFrom:1(typing, [Any]) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(0) Any) AssignmentStmt:3( NameExpr(z [__main__.z]) IntExpr(0) Any)) mypy-0.761/test-data/unit/semenal-literal.test0000644€tŠÔÚ€2›s®0000000121013576752246025536 0ustar jukkaDROPBOX\Domain Users00000000000000[case testLiteralSemanalBasicAssignment] from typing_extensions import Literal foo: Literal[3] [out] MypyFile:1( ImportFrom:1(typing_extensions, [Literal]) AssignmentStmt:2( NameExpr(foo [__main__.foo]) TempNode:2( Any) Literal[3])) [case testLiteralSemanalInFunction] from typing_extensions import Literal def foo(a: Literal[1], b: Literal[" foo "]) -> Literal[True]: pass [builtins fixtures/bool.pyi] [out] MypyFile:1( ImportFrom:1(typing_extensions, [Literal]) FuncDef:2( foo Args( Var(a) Var(b)) def (a: Literal[1], b: Literal[' foo ']) -> Literal[True] Block:2( PassStmt:2()))) mypy-0.761/test-data/unit/stubgen.test0000644€tŠÔÚ€2›s®0000007645613576752246024157 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for stubgen that generate stubs from Python code [case testEmptyFile] [out] [case testSingleFunction] def f(): x = 1 [out] def f() -> None: ... [case testTwoFunctions] def f(a, b): x = 1 def g(arg): pass [out] from typing import Any def f(a: Any, b: Any) -> None: ... def g(arg: Any) -> None: ... [case testDefaultArgInt] def f(a, b=2): ... def g(b=-1, c=0): ... [out] from typing import Any def f(a: Any, b: int = ...) -> None: ... def g(b: int = ..., c: int = ...) -> None: ... [case testDefaultArgNone] def f(x=None): ... [out] from typing import Any, Optional def f(x: Optional[Any] = ...) -> None: ... [case testDefaultArgBool] def f(x=True, y=False): ... [out] def f(x: bool = ..., y: bool = ...) -> None: ... [case testDefaultArgStr] def f(x='foo'): ... [out] def f(x: str = ...) -> None: ... [case testDefaultArgBytes] def f(x=b'foo'): ... [out] def f(x: bytes = ...) -> None: ... [case testDefaultArgFloat] def f(x=1.2): ... [out] def f(x: float = ...) -> None: ... [case testDefaultArgOther] def f(x=ord): ... [out] from typing import Any def f(x: Any = ...) -> None: ... [case testPreserveFunctionAnnotation] def f(x: Foo) -> Bar: ... [out] def f(x: Foo) -> Bar: ... [case testPreserveVarAnnotation] x: Foo [out] x: Foo [case testPreserveVarAnnotationWithoutQuotes] x: 'Foo' [out] x: Foo [case testVarArgs] def f(x, *y): ... [out] from typing import Any def f(x: Any, *y: Any) -> None: ... [case testKwVarArgs] def f(x, **y): ... [out] from typing import Any def f(x: Any, **y: Any) -> None: ... [case testVarArgsWithKwVarArgs] def f(a, *b, **c): ... def g(a, *b, c=1): ... def h(a, *b, c=1, **d): ... def i(a, *, b=1): ... def j(a, *, b=1, **c): ... [out] from typing import Any def f(a: Any, *b: Any, **c: Any) -> None: ... def g(a: Any, *b: Any, c: int = ...) -> None: ... def h(a: Any, *b: Any, c: int = ..., **d: Any) -> None: ... def i(a: Any, *, b: int = ...) -> None: ... def j(a: Any, *, b: int = ..., **c: Any) -> None: ... [case testClass] class A: def f(self, x): x = 1 def g(): ... [out] from typing import Any class A: def f(self, x: Any) -> None: ... def g() -> None: ... [case testVariable] x = 1 [out] x: int [case testAnnotatedVariable] x: int = 1 [out] x: int [case testAnnotatedVariableGeneric] x: Foo[int, str] = ... [out] x: Foo[int, str] [case testAnnotatedVariableOldSyntax] x = 1 # type: int [out] x: int [case testAnnotatedVariableNone] x: None [out] x: None [case testAnnotatedVariableNoneOldSyntax] x = None # type: None [out] x: None [case testMultipleVariable] x = y = 1 [out] x: int y: int [case testClassVariable] class C: x = 1 [out] class C: x: int = ... [case testSelfAssignment] class C: def __init__(self): self.x = 1 x.y = 2 [out] class C: x: int = ... def __init__(self) -> None: ... [case testSelfAndClassBodyAssignment] x = 1 class C: x = 1 def __init__(self): self.x = 1 self.x = 1 [out] x: int class C: x: int = ... def __init__(self) -> None: ... [case testEmptyClass] class A: ... [out] class A: ... [case testSkipPrivateFunction] def _f(): ... def g(): ... [out] def g() -> None: ... [case testIncludePrivateFunction] # flags: --include-private def _f(): ... def g(): ... [out] def _f() -> None: ... def g() -> None: ... [case testSkipPrivateMethod] class A: def _f(self): ... [out] class A: ... [case testIncludePrivateMethod] # flags: --include-private class A: def _f(self): ... [out] class A: def _f(self) -> None: ... [case testSkipPrivateVar] _x = 1 class A: _y = 1 [out] class A: ... [case testIncludePrivateVar] # flags: --include-private _x = 1 class A: _y = 1 [out] _x: int class A: _y: int = ... [case testSpecialInternalVar] __all__ = [] __author__ = '' __version__ = '' [out] [case testBaseClass] class A: ... class B(A): ... [out] class A: ... class B(A): ... [case testDecoratedFunction] @decorator def foo(x): ... [out] from typing import Any def foo(x: Any) -> None: ... [case testMultipleAssignment] x, y = 1, 2 [out] from typing import Any x: Any y: Any [case testMultipleAssignmentAnnotated] x, y = 1, "2" # type: int, str [out] x: int y: str [case testMultipleAssignment2] [x, y] = 1, 2 [out] from typing import Any x: Any y: Any [case testKeywordOnlyArg] def f(x, *, y=1): ... def g(x, *, y=1, z=2): ... [out] from typing import Any def f(x: Any, *, y: int = ...) -> None: ... def g(x: Any, *, y: int = ..., z: int = ...) -> None: ... [case testProperty] class A: @property def f(self): return 1 @f.setter def f(self, x): ... def h(self): self.f = 1 [out] from typing import Any class A: @property def f(self): ... @f.setter def f(self, x: Any) -> None: ... def h(self) -> None: ... [case testStaticMethod] class A: @staticmethod def f(x): ... [out] from typing import Any class A: @staticmethod def f(x: Any) -> None: ... [case testClassMethod] class A: @classmethod def f(cls): ... [out] class A: @classmethod def f(cls) -> None: ... [case testIfMainCheck] def a(): ... if __name__ == '__main__': x = 1 def f(): ... def b(): ... [out] def a() -> None: ... def b() -> None: ... [case testImportStar] from x import * from a.b import * def f(): ... [out] from x import * from a.b import * def f() -> None: ... [case testNoSpacesBetweenEmptyClasses] class X: def g(self): ... class A: ... class B: ... class C: def f(self): ... [out] class X: def g(self) -> None: ... class A: ... class B: ... class C: def f(self) -> None: ... [case testExceptionBaseClasses] class A(Exception): ... class B(ValueError): ... [out] class A(Exception): ... class B(ValueError): ... [case testOmitSomeSpecialMethods] class A: def __str__(self): ... def __repr__(self): ... def __eq__(self): ... def __getstate__(self): ... def __setstate__(self, state): ... [out] from typing import Any class A: def __eq__(self) -> Any: ... -- Tests that will perform runtime imports of modules. -- Don't use `_import` suffix if there are unquoted forward references. [case testOmitDefsNotInAll_import] __all__ = [] + ['f'] def f(): ... def g(): ... [out] def f() -> None: ... [case testOmitDefsNotInAll_semanal] __all__ = ['f'] def f(): ... def g(): ... [out] def f() -> None: ... [case testVarDefsNotInAll_import] __all__ = [] + ['f', 'g'] def f(): ... x = 1 y = 1 def g(): ... [out] def f() -> None: ... def g() -> None: ... [case testIncludeClassNotInAll_import] __all__ = [] + ['f'] def f(): ... class A: ... [out] def f() -> None: ... class A: ... [case testAllAndClass_import] __all__ = ['A'] class A: x = 1 def f(self): ... [out] class A: x: int = ... def f(self) -> None: ... [case testSkipMultiplePrivateDefs] class A: ... _x = 1 _y = 1 _z = 1 class C: ... [out] class A: ... class C: ... [case testIncludeMultiplePrivateDefs] # flags: --include-private class A: ... _x = 1 _y = 1 _z = 1 class C: ... [out] class A: ... _x: int _y: int _z: int class C: ... [case testIncludeFromImportIfInAll_import] from re import match, search, sub __all__ = ['match', 'sub', 'x'] x = 1 [out] from re import match as match, sub as sub x: int [case testExportModule_import] import re __all__ = ['re', 'x'] x = 1 y = 2 [out] import re as re x: int [case testExportModule_import] import re __all__ = ['re', 'x'] x = 1 y = 2 [out] import re as re x: int [case testExportModuleAs_import] import re as rex __all__ = ['rex', 'x'] x = 1 y = 2 [out] import re as rex x: int [case testExportModuleInPackage_import] import urllib.parse as p __all__ = ['p'] [out] import urllib.parse as p [case testExportPackageOfAModule_import] import urllib.parse __all__ = ['urllib'] [out] import urllib as urllib [case testRelativeImportAll] from .x import * [out] from .x import * [case testCommentForUndefinedName_import] __all__ = ['f', 'x', 'C', 'g'] def f(): ... x = 1 class C: def g(self): ... [out] def f() -> None: ... x: int class C: def g(self) -> None: ... # Names in __all__ with no definition: # g [case testIgnoreSlots] class A: __slots__ = () [out] class A: ... [case testSkipPrivateProperty] class A: @property def _foo(self): ... [out] class A: ... [case testIncludePrivateProperty] # flags: --include-private class A: @property def _foo(self): ... [out] class A: @property def _foo(self) -> None: ... [case testSkipPrivateStaticAndClassMethod] class A: @staticmethod def _foo(): ... @classmethod def _bar(cls): ... [out] class A: ... [case testIncludePrivateStaticAndClassMethod] # flags: --include-private class A: @staticmethod def _foo(): ... @classmethod def _bar(cls): ... [out] class A: @staticmethod def _foo() -> None: ... @classmethod def _bar(cls) -> None: ... [case testNamedtuple] import collections, x X = collections.namedtuple('X', ['a', 'b']) [out] from collections import namedtuple X = namedtuple('X', ['a', 'b']) [case testNamedtupleAltSyntax] from collections import namedtuple, xx X = namedtuple('X', 'a b') xx [out] from collections import namedtuple X = namedtuple('X', 'a b') [case testNamedtupleWithUnderscore] from collections import namedtuple as _namedtuple def f(): ... X = _namedtuple('X', 'a b') def g(): ... [out] from collections import namedtuple def f() -> None: ... X = namedtuple('X', 'a b') def g() -> None: ... [case testNamedtupleBaseClass] import collections, x _X = collections.namedtuple('_X', ['a', 'b']) class Y(_X): ... [out] from collections import namedtuple _X = namedtuple('_X', ['a', 'b']) class Y(_X): ... [case testNamedtupleAltSyntaxFieldsTuples] from collections import namedtuple, xx X = namedtuple('X', ()) Y = namedtuple('Y', ('a',)) Z = namedtuple('Z', ('a', 'b', 'c', 'd', 'e')) xx [out] from collections import namedtuple X = namedtuple('X', []) Y = namedtuple('Y', ['a']) Z = namedtuple('Z', ['a', 'b', 'c', 'd', 'e']) [case testDynamicNamedTuple] from collections import namedtuple N = namedtuple('N', ['x', 'y'] + ['z']) [out] from typing import Any N: Any [case testArbitraryBaseClass] import x class D(x.C): ... [out] import x class D(x.C): ... [case testArbitraryBaseClass] import x.y class D(x.y.C): ... [out] import x.y class D(x.y.C): ... [case testUnqualifiedArbitraryBaseClassWithNoDef] class A(int): ... [out] class A(int): ... [case testUnqualifiedArbitraryBaseClass] from x import X class A(X): ... [out] from x import X class A(X): ... [case testUnqualifiedArbitraryBaseClassWithImportAs] from x import X as _X class A(_X): ... [out] from x import X as _X class A(_X): ... [case testGenericClass] class D(Generic[T]): ... [out] class D(Generic[T]): ... [case testObjectBaseClass] class A(object): ... [out] class A: ... [case testEmptyLines] def x(): ... def f(): class A: def f(self): self.x = 1 def g(): ... [out] def x() -> None: ... def f() -> None: ... def g() -> None: ... [case testNestedClass] class A: class B: x = 1 def f(self): ... def g(self): ... [out] class A: class B: x: int = ... def f(self) -> None: ... def g(self) -> None: ... [case testExportViaRelativeImport] from .api import get [out] from .api import get as get [case testExportViaRelativePackageImport] from .packages.urllib3.contrib import parse [out] from .packages.urllib3.contrib import parse as parse [case testNoExportViaRelativeImport] from . import get get() [out] [case testRelativeImportAndBase] from .x import X class A(X): pass [out] from .x import X class A(X): ... [case testDuplicateDef] def syslog(a): pass def syslog(a): pass [out] from typing import Any def syslog(a: Any) -> None: ... [case testAsyncAwait_fast_parser] async def f(a): x = await y [out] from typing import Any async def f(a: Any) -> None: ... [case testInferOptionalOnlyFunc] class A: x = None def __init__(self, a=None): self.x = [] def method(self, a=None): self.x = [] [out] from typing import Any, Optional class A: x: Any = ... def __init__(self, a: Optional[Any] = ...) -> None: ... def method(self, a: Optional[Any] = ...) -> None: ... [case testAnnotationImportsFrom] import foo from collections import defaultdict x: defaultdict [out] from collections import defaultdict x: defaultdict [case testAnnotationImports] import foo import collections x: collections.defaultdict [out] import collections x: collections.defaultdict [case testAnnotationImports] from typing import List import collections x: List[collections.defaultdict] [out] import collections from typing import List x: List[collections.defaultdict] [case testAnnotationFwRefs] x: C class C: attr: C y: C [out] x: C class C: attr: C y: C [case testTypeVarPreserved] tv = TypeVar('tv') [out] from typing import TypeVar tv = TypeVar('tv') [case testTypeVarArgsPreserved] tv = TypeVar('tv', int, str) [out] from typing import TypeVar tv = TypeVar('tv', int, str) [case testTypeVarNamedArgsPreserved] tv = TypeVar('tv', bound=bool, covariant=True) [out] from typing import TypeVar tv = TypeVar('tv', bound=bool, covariant=True) [case testTypeAliasPreserved] alias = str [out] alias = str [case testDeepTypeAliasPreserved] alias = Dict[str, List[str]] [out] alias = Dict[str, List[str]] [case testDeepGenericTypeAliasPreserved] from typing import TypeVar T = TypeVar('T') alias = Union[T, List[T]] [out] from typing import TypeVar T = TypeVar('T') alias = Union[T, List[T]] [case testEllipsisAliasPreserved] alias = Tuple[int, ...] [out] alias = Tuple[int, ...] [case testCallableAliasPreserved] alias1 = Callable[..., int] alias2 = Callable[[str, bool], None] [out] alias1 = Callable[..., int] alias2 = Callable[[str, bool], None] [case testAliasPullsImport] from module import Container alias = Container[Any] [out] from module import Container from typing import Any alias = Container[Any] [case testAliasOnlyToplevel] class Foo: alias = str [out] from typing import Any class Foo: alias: Any = ... [case testAliasExceptions] noalias1 = None noalias2 = ... noalias3 = True [out] from typing import Any noalias1: Any noalias2: Any noalias3: bool -- More features/fixes: -- do not export deleted names [case testFunctionNoReturnInfersReturnNone] def f(): x = 1 [out] def f() -> None: ... [case testFunctionReturnNoReturnType] def f(): return 1 def g(): return [out] def f(): ... def g() -> None: ... [case testFunctionEllipsisInfersReturnNone] def f(): ... [out] def f() -> None: ... [case testCallable] from typing import Callable x: Callable[[int, int], int] [out] from typing import Callable x: Callable[[int, int], int] [case testAwaitDef] class F: async def f(self): return 1 async def g(): return 2 [out] class F: async def f(self): ... async def g(): ... [case testCoroutineImportAsyncio] import asyncio class F: @asyncio.coroutine def f(self): return 1 @asyncio.coroutine def g(): return 2 @asyncio.coroutine def h(): return 3 [out] import asyncio class F: @asyncio.coroutine def f(self): ... @asyncio.coroutine def g(): ... @asyncio.coroutine def h(): ... [case testCoroutineImportAsyncioCoroutines] import asyncio.coroutines class F: @asyncio.coroutines.coroutine def f(self): return 1 @asyncio.coroutines.coroutine def g(): return 2 [out] import asyncio.coroutines class F: @asyncio.coroutines.coroutine def f(self): ... @asyncio.coroutines.coroutine def g(): ... [case testCoroutineImportAsyncioCoroutinesSub] import asyncio class F: @asyncio.coroutines.coroutine def f(self): return 1 @asyncio.coroutines.coroutine def g(): return 2 [out] import asyncio class F: @asyncio.coroutines.coroutine def f(self): ... @asyncio.coroutines.coroutine def g(): ... [case testCoroutineImportTypes] import types class F: @types.coroutine def f(self): return 1 @types.coroutine def g(): return 2 [out] import types class F: @types.coroutine def f(self): ... @types.coroutine def g(): ... [case testCoroutineFromAsyncioImportCoroutine] from asyncio import coroutine class F: @coroutine def f(self): return 1 @coroutine def g(): return 2 [out] from asyncio import coroutine class F: @coroutine def f(self): ... @coroutine def g(): ... [case testCoroutineFromAsyncioCoroutinesImportCoroutine] from asyncio.coroutines import coroutine class F: @coroutine def f(self): return 1 @coroutine def g(): return 2 [out] from asyncio.coroutines import coroutine class F: @coroutine def f(self): ... @coroutine def g(): ... [case testCoroutineFromTypesImportCoroutine] from types import coroutine class F: @coroutine def f(self): return 1 @coroutine def g(): return 2 [out] from types import coroutine class F: @coroutine def f(self): ... @coroutine def g(): ... [case testCoroutineFromAsyncioImportCoroutineAsC] from asyncio import coroutine as c class F: @c def f(self): return 1 @c def g(): return 2 [out] from asyncio import coroutine as c class F: @c def f(self): ... @c def g(): ... [case testCoroutineFromAsyncioCoroutinesImportCoroutineAsC] from asyncio.coroutines import coroutine as c class F: @c def f(self): return 1 @c def g(): return 2 [out] from asyncio.coroutines import coroutine as c class F: @c def f(self): ... @c def g(): ... [case testCoroutineFromTypesImportCoroutineAsC] from types import coroutine as c class F: @c def f(self): return 1 @c def g(): return 2 [out] from types import coroutine as c class F: @c def f(self): ... @c def g(): ... [case testCoroutineImportAsyncioAsA] import asyncio as a class F: @a.coroutine def f(self): return 1 @a.coroutine def g(): return 2 [out] import asyncio as a class F: @a.coroutine def f(self): ... @a.coroutine def g(): ... [case testCoroutineImportAsyncioCoroutinesAsC] import asyncio.coroutines as c class F: @c.coroutine def f(self): return 1 @c.coroutine def g(): return 2 [out] import asyncio.coroutines as c class F: @c.coroutine def f(self): ... @c.coroutine def g(): ... [case testCoroutineImportAsyncioCoroutinesSubAsA] import asyncio as a class F: @a.coroutines.coroutine def f(self): return 1 @a.coroutines.coroutine def g(): return 2 [out] import asyncio as a class F: @a.coroutines.coroutine def f(self): ... @a.coroutines.coroutine def g(): ... [case testCoroutineImportTypesAsT] import types as t class F: @t.coroutine def f(self): return 1 @t.coroutine def g(): return 2 [out] import types as t class F: @t.coroutine def f(self): ... @t.coroutine def g(): ... [case testCoroutineSpecialCase_import] import asyncio __all__ = ['C'] @asyncio.coroutine def f(): pass class C: def f(self): pass [out] import asyncio class C: def f(self) -> None: ... -- Tests for stub generation from semantically analyzed trees. -- These tests are much slower, so use the `_semanal` suffix only when needed. [case testNestedClass_semanal] class Outer: class Inner: pass A = Outer.Inner [out] class Outer: class Inner: ... A = Outer.Inner [case testFunctionAlias_semanal] from asyncio import coroutine @coroutine def start_server(): ... start = start_server [out] from asyncio import coroutine @coroutine def start_server() -> None: ... start = start_server [case testModuleAlias_semanal] import a b = a [file a.py] x = 1 [out] import a b = a [case testBadAliasNested_semanal] import a x = registry[a.f] [file a.py] def f(): ... [out] from typing import Any x: Any [case testCrossModuleClass_semanal] import a class C: x: A def f(self) -> A: ... A = a.A [file a.py] class A: ... [out] import a class C: x: A def f(self) -> A: ... A = a.A [case testCrossModuleFunction_semanal] import a g = a.f [file a.py] def f(): ... [out] import a g = a.f [case testPrivateAliasesExcluded_semanal] import a, _a class C: ... A = a._A B = _a.f _C = C [file a.py] class _A: ... [file _a.py] def f(): ... [out] from typing import Any class C: ... A: Any B: Any [case testPrivateAliasesIncluded_semanal] # flags: --include-private import a, _a class C: ... A = a._A B = _a.f _C = C [file a.py] class _A: ... [file _a.py] def f(): ... [out] import _a import a class C: ... A = a._A B = _a.f _C = C [case testFinalWrapped_semanal] from typing import Final x: Final = 1 y: Final = x z: Final[object] t: Final [out] from typing import Any, Final x: Final[int] y: Final[Any] z: Final[object] t: Final[Any] [case testFinalInvalid_semanal] Final = 'boom' x: Final = 1 [out] Final: str x: Final [case testNoFunctionNested_semanal] import a from typing import Dict, Any funcs: Dict[Any, Any] f = funcs[a.f] [out] from typing import Any, Dict funcs: Dict[Any, Any] f: Any [case testAbstractMethodNameExpr] from abc import ABCMeta, abstractmethod class A(metaclass=ABCMeta): @abstractmethod def meth(self): pass [out] from abc import ABCMeta, abstractmethod class A(metaclass=ABCMeta): @abstractmethod def meth(self): ... [case testAbstractMethodMemberExpr] import abc class A(metaclass=abc.ABCMeta): @abc.abstractmethod def meth(self): pass [out] import abc class A(metaclass=abc.ABCMeta): @abc.abstractmethod def meth(self): ... [case testABCMeta_semanal] from base import Base from abc import abstractmethod class C(Base): @abstractmethod def other(self): pass [file base.py] from abc import abstractmethod, ABCMeta class Base(metaclass=ABCMeta): @abstractmethod def meth(self): pass [out] import abc from abc import abstractmethod from base import Base from typing import Any class C(Base, metaclass=abc.ABCMeta): @abstractmethod def other(self) -> Any: ... [case testInvalidNumberOfArgsInAnnotation] def f(x): # type: () -> int return '' [out] from typing import Any def f(x: Any): ... [case testFunctionPartiallyAnnotated] def f(x) -> None: pass def g(x, y: str): pass class A: def f(self, x) -> None: pass [out] from typing import Any def f(x: Any) -> None: ... def g(x: Any, y: str) -> Any: ... class A: def f(self, x: Any) -> None: ... [case testPlacementOfDecorators] class A: @property def x(self): self.y = 'y' return 'x' class B: @property def x(self): return 'x' @x.setter def x(self, value): self.y = 'y' [out] from typing import Any class A: y: str = ... @property def x(self): ... class B: @property def x(self): ... y: str = ... @x.setter def x(self, value: Any) -> None: ... [case testMisplacedTypeComment] def f(): x = 0 # type: str y = '' [out] def f() -> None: ... [case testConditionalImportAll_semanal] __all__ = ['cookielib'] if object(): from http import cookiejar as cookielib else: import cookielib [out] import FIXME as cookielib [case testCannotCalculateMRO_semanal] class X: pass class int(int, X): # Cycle pass class A: pass class B(A): pass class C(B): pass class D(A, B): pass # No consistent method resolution order class E(C, D): pass # Ditto [out] class X: ... class int(int, X): ... class A: ... class B(A): ... class C(B): ... class D(A, B): ... class E(C, D): ... [case testUnreachableCode_semanal] MYPY = False class A: pass if MYPY: class C(A): def f(self) -> None: pass else: def f(i): return i class C(A): def g(self) -> None: pass [out] MYPY: bool class A: ... class C(A): def f(self) -> None: ... [case testAbstractProperty1_semanal] import other import abc class A: @abc.abstractproperty def x(self): pass [out] import abc from typing import Any class A(metaclass=abc.ABCMeta): @property @abc.abstractmethod def x(self) -> Any: ... [case testAbstractProperty2_semanal] import other from abc import abstractproperty class A: @abstractproperty def x(self): pass [out] import abc from typing import Any class A(metaclass=abc.ABCMeta): @property @abc.abstractmethod def x(self) -> Any: ... [case testAbstractProperty3_semanal] import other from abc import abstractproperty as alias_name class A: @alias_name def x(self): pass [out] import abc from typing import Any class A(metaclass=abc.ABCMeta): @property @abc.abstractmethod def x(self) -> Any: ... [case testClassWithNameAnyOrOptional] def f(x=object()): return 1 def g(x=None): pass x = g() class Any: pass def Optional(): return 0 [out] from typing import Any as _Any, Optional as _Optional def f(x: _Any = ...): ... def g(x: _Optional[_Any] = ...) -> None: ... x: _Any class Any: ... def Optional(): ... [case testExportedNameImported] # modules: main a b from a import C class D(C): pass [file a.py] from b import C [file b.py] class C: pass [out] # main.pyi from a import C class D(C): ... # a.pyi from b import C as C # b.pyi class C: ... [case testVendoredSix] from p1.vendored import six from p1.vendor.six import foobar from p1.packages.six.moves import http_client from .packages.six.moves import queue from p1.vendored.six.moves.http_client import foo from p1.vendored.six.moves.urllib.parse import bar class C(http_client.HTTPMessage): pass class D(six.Iterator): pass [out] import six from six import foobar as foobar from six.moves import http_client, queue as queue from six.moves.http_client import foo as foo from six.moves.urllib.parse import bar as bar class C(http_client.HTTPMessage): ... class D(six.Iterator): ... [case testVendoredPackage] # modules: main p.vendored.requests p.sub.requests from p.vendored.requests import Request from p.sub.requests import Request2 x = Request() y = Request2() [file p/__init__.py] [file p/vendored/__init__.py] [file p/vendored/requests.py] class Request: pass [file p/sub/__init__.py] [file p/sub/requests.py] class Request2: pass [out] # main.pyi from typing import Any x: Any y: Any # p/sub/requests.pyi class Request2: ... [case testTestFiles] # modules: p p.x p.tests p.tests.test_foo [file p/__init__.py] def f(): pass [file p/x.py] def g(): pass [file p/tests/__init__.py] [file p/tests/test_foo.py] def test_thing(): pass [out] # p/__init__.pyi def f() -> None: ... # p/x.pyi def g() -> None: ... [case testTestFiles_import] # modules: p p.x p.tests p.tests.test_foo [file p/__init__.py] def f(): pass [file p/x.py] def g(): pass [file p/tests/__init__.py] [file p/tests/test_foo.py] def test_thing(): pass [out] # p/__init__.pyi def f() -> None: ... # p/x.pyi def g() -> None: ... [case testVerboseFlag] # Just test that --verbose does not break anything in a basic test case. # flags: --verbose def f(x, y): pass [out] from typing import Any def f(x: Any, y: Any) -> None: ... [case testImportedModuleExits_import] # modules: a b c [file a.py] def g(): pass [file b.py] import sys def f(): pass sys.exit(1) [file c.py] x = 0 [out] # a.pyi def g() -> None: ... # b.pyi def f() -> None: ... # c.pyi x: int [case testImportedModuleHardExits_import] # modules: a b c [file a.py] def g(): pass [file b.py] import os def f(): pass os._exit(1) # Kill process [file c.py] x = 0 [out] # a.pyi def g() -> None: ... # b.pyi def f() -> None: ... # c.pyi x: int [case testImportedModuleHardExits2_import] # modules: p/a p/b p/c [file p/__init__.py] [file p/a.py] def g(): pass [file p/b.py] import os def f(): pass os._exit(1) # Kill process [file p/c.py] x = 0 [out] # p/a.pyi def g() -> None: ... # p/b.pyi def f() -> None: ... # p/c.pyi x: int [case testImportedModuleHardExits3_import] # modules: p p/a [file p/__init__.py] import os def f(): pass os._exit(1) # Kill process [file p/a.py] def g(): pass [out] # p/__init__.pyi def f() -> None: ... # p/a.pyi def g() -> None: ... [case testImportedModuleHardExits4_import] # flags: -p p # modules: p p/a [file p/__init__.py] def ff(): pass [file p/a.py] import os def gg(): pass os._exit(1) # Kill process [out] # p/__init__.pyi def ff() -> None: ... # p/a.pyi def gg() -> None: ... [case testExportInternalImportsByDefault] # modules: p p/a [file p/__init__.py] from p.a import A, f from m import C a: A c: C f() [file p/a.py] class A: pass def f(): pass [file m.py] class C: pass [out] # p/__init__.pyi from m import C from p.a import A as A, f as f a: A c: C # p/a.pyi class A: ... def f() -> None: ... [case testNoExportOfInternalImportsIfAll_import] # modules: p p/a [file p/__init__.py] from p.a import A __all__ = ['a'] a = None # type: A b = 0 # type: int [file p/a.py] class A: pass [out] # p/__init__.pyi from p.a import A a: A # p/a.pyi class A: ... [case testExportInternalImportsByDefaultFromUnderscorePackage] # modules: p [file p.py] from _p import A from _m import B from _pm import C a: A b: B c: C [file _p.py] class A: pass [file _m.py] class B: pass [file _pm.py] class C: pass [out] from _m import B from _p import A as A from _pm import C a: A b: B c: C [case testDisableExportOfInternalImports] # flags: --export-less # modules: p p/a [file p/__init__.py] from p.a import A, B from m import C a: A c: C [file p/a.py] class A: pass class B: pass [file m.py] class C: pass [out] # p/__init__.pyi from m import C from p.a import A, B as B a: A c: C # p/a.pyi class A: ... class B: ... [case testExportInternalImportsByDefaultUsingRelativeImport] # modules: p.a [file p/__init__.py] [file p/a.py] from .b import f f() [file p/b.py] def f(): pass [out] from .b import f as f [case testExportInternalImportsByDefaultSkipPrivate] # modules: p.a [file p/__init__.py] [file p/a.py] from .b import _f, _g as _g, _i from p.b import _h _f() _h() [file p/b.py] def _f(): pass def _g(): pass def _h(): pass def _i(): pass x = 0 [out] [case testExportInternalImportsByDefaultIncludePrivate] # flags: --include-private # modules: p.a [file p/__init__.py] [file p/a.py] from .b import _f _f() [file p/b.py] def _f(): pass [out] from .b import _f as _f [case testHideDunderModuleAttributes] from m import ( __about__, __author__, __copyright__, __email__, __license__, __summary__, __title__, __uri__, __version__ ) class A: __uri__ = 0 [file m.py] __about__ = '' __author__ = '' __copyright__ = '' __email__ = '' __license__ = '' __summary__ = '' __title__ = '' __uri__ = '' __version__ = '' [out] class A: ... [case testHideDunderModuleAttributesWithAll_import] from m import ( __about__, __author__, __copyright__, __email__, __license__, __summary__, __title__, __uri__, __version__ ) __all__ = ['__about__', '__author__', '__version__'] [file m.py] __about__ = '' __author__ = '' __copyright__ = '' __email__ = '' __license__ = '' __summary__ = '' __title__ = '' __uri__ = '' __version__ = '' [out] [case testAttrsClass_semanal] import attr @attr.s class C: x = attr.ib() [out] from typing import Any class C: x: Any = ... def __init__(self, x: Any) -> None: ... def __ne__(self, other: Any) -> Any: ... def __eq__(self, other: Any) -> Any: ... def __lt__(self, other: Any) -> Any: ... def __le__(self, other: Any) -> Any: ... def __gt__(self, other: Any) -> Any: ... def __ge__(self, other: Any) -> Any: ... [case testNamedTupleInClass] from collections import namedtuple class C: N = namedtuple('N', ['x', 'y']) [out] from collections import namedtuple class C: N = namedtuple('N', ['x', 'y']) mypy-0.761/test-data/unit/typexport-basic.test0000644€tŠÔÚ€2›s®0000005651313576752246025635 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for exporting node types from the type checker. -- -- Each test case consists of at least two sections. -- The first section contains [case NAME] followed by the input code, -- while the second section contains [out] followed by the output from the type -- checker. -- -- The first line of input code should be a regexp in comment that describes -- the information to dump (prefix with ##). The regexp is matched against -- the following items: -- -- * each name of an expression node -- * each type string of a node (e.g. OpExpr) -- -- Lines starting with "--" in this file will be ignored. -- Expressions -- ----------- [case testConstructorCall] import typing A() B() class A: pass class B: pass [out] CallExpr(2) : A NameExpr(2) : def () -> A CallExpr(3) : B NameExpr(3) : def () -> B [case testLiterals] import typing 5 2.3 'foo' [builtins fixtures/primitives.pyi] [out] IntExpr(2) : Literal[5]? FloatExpr(3) : builtins.float StrExpr(4) : Literal['foo']? [case testNameExpression] a = None # type: A a # node def f(aa: 'A') -> None: b = None # type: B aa # node b # node class A: def g(self) -> None: self # node class B: pass [out] NameExpr(3) : A NameExpr(6) : A NameExpr(7) : B NameExpr(10) : A [case testEllipsis] import typing ... [out] EllipsisExpr(2) : builtins.ellipsis [case testMemberAccess] ## MemberExpr|CallExpr a = None # type: A a.m a.f a.f() class A: m = None # type: A def f(self) -> 'B': pass class B: pass [out] MemberExpr(4) : A MemberExpr(5) : def () -> B CallExpr(6) : B MemberExpr(6) : def () -> B [case testCastExpression] ## CastExpr|[a-z] from typing import Any, cast d = None # type: Any b = None # type: B class A: pass class B(A): pass cast(A, d) cast(A, b) cast(B, b) [out] CastExpr(7) : A NameExpr(7) : Any CastExpr(8) : A NameExpr(8) : B CastExpr(9) : B NameExpr(9) : B [case testArithmeticOps] ## OpExpr import typing a = 1 + 2 1.2 * 3 2.2 - 3 1 / 2 [file builtins.py] class object: def __init__(self) -> None: pass class function: pass class int: def __add__(self, x: int) -> int: pass def __truediv__(self, x: int) -> float: pass class float: def __mul__(self, x: int) -> float: pass def __sub__(self, x: int) -> float: pass class type: pass class str: pass [out] OpExpr(3) : builtins.int OpExpr(4) : builtins.float OpExpr(5) : builtins.float OpExpr(6) : builtins.float [case testComparisonOps] ## ComparisonExpr import typing 1 == object() 1 == 2 2 < 3 1 < 2 < 3 8 > 3 4 < 6 > 2 [file builtins.py] class object: def __init__(self) -> None: pass class int: def __eq__(self, x: object) -> bool: pass def __lt__(self, x: int) -> bool: pass def __gt__(self, x: int) -> int: pass class bool: pass class type: pass class function: pass class str: pass [out] ComparisonExpr(3) : builtins.bool ComparisonExpr(4) : builtins.bool ComparisonExpr(5) : builtins.bool ComparisonExpr(6) : builtins.bool ComparisonExpr(7) : builtins.int ComparisonExpr(8) : builtins.object [case testBooleanOps] ## OpExpr|UnaryExpr import typing a = 1 a and a a or a not a [builtins fixtures/bool.pyi] [out] OpExpr(4) : builtins.int OpExpr(5) : builtins.int UnaryExpr(6) : builtins.bool [case testBooleanOpsOnBools] ## OpExpr|UnaryExpr import typing a = bool() a and a a or a not a [builtins fixtures/bool.pyi] [out] OpExpr(4) : builtins.bool OpExpr(5) : builtins.bool UnaryExpr(6) : builtins.bool [case testFunctionCall] ## CallExpr from typing import Tuple f( A(), B()) class A: pass class B: pass def f(a: A, b: B) -> Tuple[A, B]: pass [builtins fixtures/tuple-simple.pyi] [out] CallExpr(3) : Tuple[A, B] CallExpr(4) : A CallExpr(5) : B -- Statements -- ---------- [case testSimpleAssignment] from typing import Any a = None # type: A b = a # type: Any if b: b = a a = b class A: pass [out] NameExpr(3) : A NameExpr(4) : Any NameExpr(5) : A NameExpr(5) : Any NameExpr(6) : A NameExpr(6) : Any [case testMemberAssignment] from typing import Any class A: a = None # type: A b = None # type: Any def f(self) -> None: self.b = self.a self.a.a = self.b [out] MemberExpr(6) : A MemberExpr(6) : Any NameExpr(6) : A NameExpr(6) : A MemberExpr(7) : A MemberExpr(7) : A MemberExpr(7) : A NameExpr(7) : A NameExpr(7) : A [case testIf] a = None # type: bool if a: 1 elif not a: 1 [builtins fixtures/bool.pyi] [out] NameExpr(3) : builtins.bool IntExpr(4) : Literal[1]? NameExpr(5) : builtins.bool UnaryExpr(5) : builtins.bool IntExpr(6) : Literal[1]? [case testWhile] a = None # type: bool while a: a [builtins fixtures/bool.pyi] [out] NameExpr(3) : builtins.bool NameExpr(4) : builtins.bool -- Simple type inference -- --------------------- [case testInferSingleType] import typing x = () [builtins fixtures/primitives.pyi] [out] NameExpr(2) : Tuple[] TupleExpr(2) : Tuple[] [case testInferTwoTypes] ## NameExpr import typing (s, i) = 'x', 1 [builtins fixtures/primitives.pyi] [out] NameExpr(3) : builtins.str NameExpr(4) : builtins.int [case testInferSingleLocalVarType] import typing def f() -> None: x = () [builtins fixtures/primitives.pyi] [out] NameExpr(3) : Tuple[] TupleExpr(3) : Tuple[] -- Basic generics -- -------------- [case testImplicitBoundTypeVarsForMethod] ## MemberExpr from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> T: pass class B: pass def g() -> None: a = None # type: A[B] f = a.f [out] MemberExpr(9) : def () -> B [case testImplicitBoundTypeVarsForSelfMethodReference] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> T: return self.f() [out] CallExpr(5) : T`1 MemberExpr(5) : def () -> T`1 NameExpr(5) : A[T`1] [case testGenericFunctionCallWithTypeApp-skip] ## CallExpr|TypeApplication|NameExpr from typing import Any, TypeVar, Tuple T = TypeVar('T') class A: pass f[A](A()) f[Any](A()) def f(a: T) -> Tuple[T, T]: pass [builtins fixtures/tuple.pyi] [out] CallExpr(5) : A CallExpr(5) : Tuple[A, A] NameExpr(5) : def () -> A NameExpr(5) : def (a: A) -> Tuple[A, A] TypeApplication(5) : def (a: A) -> Tuple[A, A] CallExpr(6) : A CallExpr(6) : Tuple[Any, Any] NameExpr(6) : def () -> A NameExpr(6) : def (a: Any) -> Tuple[Any, Any] TypeApplication(6) : def (a: Any) -> Tuple[Any, Any] -- NOTE: Type applications are not supported for generic methods, so the -- following test cases are commented out. --[case testGenericMethodCallWithTypeApp] --## CallExpr|MemberExpr|TypeApplication --from typing import Any, TypeVar, Tuple --T = TypeVar('T') --class A: -- def f(self, a: T) -> Tuple[T, T]: pass --a.f[A](a) --a.f[Any](a) --a = None # type: A --[builtins fixtures/tuple.py] --[out] --CallExpr(2) : Tuple[A, A] --MemberExpr(2) : def (A a) -> Tuple[A, A] --TypeApplication(2) : def (A a) -> Tuple[A, A] --CallExpr(3) : Tuple[Any, Any] --MemberExpr(3) : def (any a) -> Tuple[Any, Any] --TypeApplication(3) : def (any a) -> Tuple[Any, Any] --[case testGenericMethodCallInGenericTypeWithTypeApp] --## CallExpr|MemberExpr|TypeApplication --from typing import Any, TypeVar, Generic, Tuple --T = TypeVar('T') --S = TypeVar('S') --class B: pass --class C: pass --a.f[B](b) --a.f[Any](b) --class A(Generic[T]): -- def f(self, a: S) -> Tuple[T, S]: pass --a = None # type: A[C] --b = None # type: B --[builtins fixtures/tuple.py] --[out] --CallExpr(6) : Tuple[C, B] --MemberExpr(6) : def (B a) -> Tuple[C, B] --TypeApplication(6) : def (B a) -> Tuple[C, B] --CallExpr(7) : Tuple[C, Any] --MemberExpr(7) : def (any a) -> Tuple[C, Any] --TypeApplication(7) : def (any a) -> Tuple[C, Any] [case testGenericTypeVariableInference] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, a: T) -> None: pass class B: pass A(A(B())) [out] CallExpr(6) : A[A[B]] CallExpr(6) : A[B] CallExpr(6) : B NameExpr(6) : def (a: A[B]) -> A[A[B]] NameExpr(6) : def (a: B) -> A[B] NameExpr(6) : def () -> B -- Generic inheritance -- ------------------- [case testInheritedMethodReferenceWithGenericInheritance] from typing import TypeVar, Generic T = TypeVar('T') class C: pass class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[C]): def g(self, c: C) -> None: self.f(c) [out] CallExpr(8) : None MemberExpr(8) : def (a: C) NameExpr(8) : C NameExpr(8) : B [case testInheritedMethodReferenceWithGenericSubclass] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') class C: pass class A(Generic[S, T]): def f(self, a: C) -> None: pass class B(A[C, T], Generic[T]): def g(self, c: C) -> None: self.f(c) [out] CallExpr(9) : None MemberExpr(9) : def (a: C) NameExpr(9) : C NameExpr(9) : B[T`1] [case testExternalReferenceWithGenericInheritance] from typing import TypeVar, Generic T = TypeVar('T') class C: pass class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[C]): pass b = None # type: B c = None # type: C b.f(c) [out] CallExpr(9) : None MemberExpr(9) : def (a: C) NameExpr(9) : B NameExpr(9) : C -- Implicit Any types -- ------------------ [case testDynamicallyTypedFunction] def f(x): y = x + o z = o z o = None # type: object [out] NameExpr(3) : builtins.object NameExpr(3) : Any NameExpr(3) : Any OpExpr(3) : Any NameExpr(4) : builtins.object NameExpr(4) : Any NameExpr(5) : Any [case testDynamicallyTypedMethod] class A: def f(self, x): y = ( o) # Place y and o on separate lines x y o = None # type: object [out] NameExpr(4) : Any NameExpr(5) : builtins.object NameExpr(6) : Any NameExpr(7) : Any [case testDynamicallyTypedConstructor] class A: def __init__(self, x): y = o x y o = None # type: object [out] NameExpr(4) : builtins.object NameExpr(4) : Any NameExpr(5) : Any NameExpr(6) : Any [case testCallInDynamicallyTypedFunction] def f(): g(o) def g(a: object) -> object: pass o = None # type: object [out] CallExpr(3) : Any NameExpr(3) : def (a: builtins.object) -> builtins.object NameExpr(3) : builtins.object [case testExpressionInDynamicallyTypedFn] import typing def f(): x = None x.f() [out] CallExpr(4) : Any MemberExpr(4) : Any NameExpr(4) : Any [case testGenericCall] from typing import TypeVar, Generic T = TypeVar('T') def f() -> None: a1 = A(b) # type: A[B] a2 = A(b) # type: A[object] class A(Generic[T]): def __init__(self, a: T) -> None: pass class B: pass b = None # type: B [out] CallExpr(4) : A[B] NameExpr(4) : def (a: B) -> A[B] NameExpr(4) : B CallExpr(5) : A[builtins.object] NameExpr(5) : def (a: builtins.object) -> A[builtins.object] NameExpr(5) : B [case testGenericCallInDynamicallyTypedFunction] from typing import TypeVar, Generic T = TypeVar('T') def f(): A() class A(Generic[T]): pass [out] CallExpr(4) : Any NameExpr(4) : def [T] () -> A[T`1] [case testGenericCallInDynamicallyTypedFunction2] from typing import TypeVar, Generic T = TypeVar('T') def f(): A(f) class A(Generic[T]): def __init__(self, x: T) -> None: pass [out] CallExpr(4) : Any NameExpr(4) : def [T] (x: T`1) -> A[T`1] NameExpr(4) : def () -> Any [case testGenericCallInDynamicallyTypedFunction3] from typing import TypeVar t = TypeVar('t') def f(): g(None) def g(x: t) -> t: pass [out] CallExpr(4) : Any NameExpr(4) : def [t] (x: t`-1) -> t`-1 -- Generic types and type inference -- -------------------------------- [case testInferenceInArgumentContext] ## CallExpr from typing import TypeVar, Generic T = TypeVar('T') f(g()) f(h(b)) f(h(c)) b = None # type: B c = None # type: C def f(a: 'A[B]') -> None: pass def g() -> 'A[T]': pass def h(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass class C(B): pass [out] CallExpr(4) : None CallExpr(4) : A[B] CallExpr(5) : None CallExpr(5) : A[B] CallExpr(6) : None CallExpr(6) : A[B] [case testInferGenericTypeForLocalVariable] from typing import TypeVar, Generic T = TypeVar('T') def f() -> None: a = A(b) a a2, a3 = A(b), A(c) a2 a3 b = None # type: B c = None # type: C class A(Generic[T]): def __init__(self, x: T) -> None: pass class B: pass class C: pass [out] CallExpr(4) : A[B] NameExpr(4) : def (x: B) -> A[B] NameExpr(4) : A[B] NameExpr(4) : B NameExpr(5) : A[B] CallExpr(6) : A[B] CallExpr(6) : A[C] NameExpr(6) : def (x: B) -> A[B] NameExpr(6) : def (x: C) -> A[C] NameExpr(6) : A[B] NameExpr(6) : A[C] NameExpr(6) : B NameExpr(6) : C NameExpr(7) : A[B] NameExpr(8) : A[C] [case testNestedGenericCalls] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') def h() -> None: g(f(c)) c = None # type: C class A(Generic[T]): pass class B(Generic[T]): pass class C: pass def f(a: T) -> A[T]: pass def g(a: S) -> B[S]: pass [out] CallExpr(5) : A[C] CallExpr(5) : B[A[C]] NameExpr(5) : C NameExpr(5) : def (a: C) -> A[C] NameExpr(5) : def (a: A[C]) -> B[A[C]] [case testInferListLiterals] from typing import List a = [] # type: List[A] class A: pass [builtins fixtures/list.pyi] [out] ListExpr(2) : builtins.list[A] [case testInferGenericTypeInTypeAnyContext] from typing import Any a = [] # type: Any [builtins fixtures/list.pyi] [out] ListExpr(2) : builtins.list[Any] [case testHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') map( f, [A()]) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass [builtins fixtures/list.pyi] [out] CallExpr(4) : builtins.list[B] NameExpr(4) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] NameExpr(5) : def (a: A) -> B CallExpr(6) : A ListExpr(6) : builtins.list[A] NameExpr(6) : def () -> A -- Lambdas -- ------- [case testLambdaWithTypeInferredFromContext] from typing import Callable f = lambda x: x.a # type: Callable[[B], A] class A: pass class B: a = None # type: A [out] LambdaExpr(2) : def (B) -> A MemberExpr(2) : A NameExpr(2) : B [case testLambdaWithInferredType] ## LambdaExpr|NameExpr import typing f = lambda: 1 [out] LambdaExpr(3) : def () -> Literal[1]? NameExpr(3) : def () -> builtins.int [case testLambdaWithInferredType2] ## LambdaExpr|NameExpr import typing f = lambda: [1] [builtins fixtures/list.pyi] [out] LambdaExpr(3) : def () -> builtins.list[builtins.int] NameExpr(3) : def () -> builtins.list[builtins.int] [case testLambdaWithInferredType2] from typing import List, Callable f = lambda x: [] # type: Callable[[B], List[A]] class A: pass class B: a = None # type: A [builtins fixtures/list.pyi] [out] LambdaExpr(2) : def (B) -> builtins.list[A] ListExpr(2) : builtins.list[A] [case testLambdaAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( lambda x: f(x), l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass [builtins fixtures/list.pyi] [out] CallExpr(5) : builtins.list[B] NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] CallExpr(6) : B LambdaExpr(6) : def (A) -> B NameExpr(6) : def (a: A) -> B NameExpr(6) : builtins.list[A] NameExpr(6) : A [case testLambdaAndHigherOrderFunction2] ## LambdaExpr|NameExpr|ListExpr from typing import TypeVar, List, Callable t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( lambda x: [f(x)], l) def map(f: Callable[[t], List[s]], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass [builtins fixtures/list.pyi] [out] NameExpr(6) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] LambdaExpr(7) : def (A) -> builtins.list[B] ListExpr(7) : builtins.list[B] NameExpr(7) : def (a: A) -> B NameExpr(7) : builtins.list[A] NameExpr(7) : A [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( [lambda x: x], l) def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass class A: pass [builtins fixtures/list.pyi] [out] -- TODO We probably should not silently infer 'Any' types in statically typed -- context. Perhaps just fail instead? CallExpr(5) : builtins.list[Any] NameExpr(5) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] LambdaExpr(6) : def (A) -> A ListExpr(6) : builtins.list[def (A) -> Any] NameExpr(6) : A NameExpr(7) : builtins.list[A] [case testLambdaAndHigherOrderFunction3] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( lambda x: x.b, l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: b = None # type: B class B: pass [builtins fixtures/list.pyi] [out] CallExpr(5) : builtins.list[B] NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] LambdaExpr(6) : def (A) -> B MemberExpr(6) : B NameExpr(6) : A NameExpr(7) : builtins.list[A] [case testLambdaAndHigherOrderFunctionAndKeywordArgs] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( a=l, f=lambda x: x.b) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: b = None # type: B class B: pass [builtins fixtures/list.pyi] [out] CallExpr(5) : builtins.list[B] NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] NameExpr(6) : builtins.list[A] LambdaExpr(7) : def (A) -> B MemberExpr(7) : B NameExpr(7) : A -- Boolean operations -- ------------------ [case testBooleanOr] from typing import List a = None # type: List[A] a or [] a = a or [] if int(): a = [] or a class A: pass [builtins fixtures/list.pyi] [out] ListExpr(3) : builtins.list[A] NameExpr(3) : builtins.list[A] OpExpr(3) : builtins.list[A] ListExpr(4) : builtins.list[A] NameExpr(4) : builtins.list[A] NameExpr(4) : builtins.list[A] OpExpr(4) : builtins.list[A] CallExpr(5) : builtins.int NameExpr(5) : def () -> builtins.int ListExpr(6) : builtins.list[A] NameExpr(6) : builtins.list[A] NameExpr(6) : builtins.list[A] OpExpr(6) : builtins.list[A] -- Class attributes -- ---------------- [case testUnboundMethod] ## MemberExpr import typing class A: def f(self) -> None: pass A.f [out] MemberExpr(5) : def (self: A) [case testUnboundMethodWithImplicitSig] ## MemberExpr import typing class A: def f(self): pass A.f [out] MemberExpr(5) : def (self: A) -> Any [case testOverloadedUnboundMethod] ## MemberExpr from typing import overload class A: @overload def f(self) -> None: pass @overload def f(self, __x: object) -> None: pass def f(self, *args) -> None: pass A.f [out] MemberExpr(10) : Overload(def (self: A), def (self: A, builtins.object)) [case testOverloadedUnboundMethodWithImplicitSig] ## MemberExpr from typing import overload class A: @overload def f(self): pass @overload def f(self, __x): pass def f(self, *args): pass A.f [out] MemberExpr(10) : Overload(def (self: A) -> Any, def (self: A, Any) -> Any) [case testUnboundMethodWithInheritance] ## MemberExpr import typing class A: def __init__(self) -> None: pass def f(self) -> None: pass class B(A): pass B.f [out] MemberExpr(8) : def (self: A) [case testUnboundGenericMethod] ## MemberExpr from typing import TypeVar t = TypeVar('t') class B: pass class A: def f(self, x: t) -> None: pass A.f(A(), B()) [out] MemberExpr(7) : def (self: A, x: B) [case testUnboundMethodOfGenericClass] ## MemberExpr from typing import TypeVar, Generic t = TypeVar('t') class B: pass class A(Generic[t]): def f(self, x: t) -> None: pass A.f a_b = A() # type: A[B] A.f(a_b, B()) [out] MemberExpr(7) : def [t] (self: A[t`1], x: t`1) MemberExpr(9) : def (self: A[B], x: B) [case testUnboundOverloadedMethodOfGenericClass] ## CallExpr from typing import TypeVar, Generic, overload t = TypeVar('t') class B: pass class A(Generic[t]): @overload def f(self, x: t) -> t: pass @overload def f(self) -> object: pass def f(self, *args): pass ab, b = None, None # type: (A[B], B) A.f(ab, b) [out] CallExpr(13) : B [case testUnboundMethodOfGenericClassWithImplicitSig] ## MemberExpr from typing import TypeVar, Generic t = TypeVar('t') class B: pass class A(Generic[t]): def f(self, x): pass A.f(None, None) [out] MemberExpr(7) : def (self: A[t`1], x: Any) -> Any [case testGenericMethodOfGenericClass] ## MemberExpr from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class B: pass class A(Generic[t]): def f(self, y: s) -> None: pass ab = None # type: A[B] o = None # type: object A.f(ab, o) [out] MemberExpr(10) : def (self: A[B], y: builtins.object) -- Type variables with value restriction -- ------------------------------------- [case testTypeVariableWithValueRestriction] ## NameExpr from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') [out] NameExpr(5) : def (x: builtins.int) NameExpr(6) : def (x: builtins.str) [case testTypeVariableWithValueRestrictionAndSubtype] ## NameExpr|CallExpr from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: pass class S(str): pass s = None # type: S f(s) [out] CallExpr(7) : builtins.str NameExpr(7) : def (x: builtins.str) -> builtins.str NameExpr(7) : S -- Binary operations -- ----------------- [case testBinaryOperatorWithAnyLeftOperand] ## OpExpr from typing import Any, cast class B: def __add__(self, x: int) -> str: pass class A: def __radd__(self, x: B) -> int: pass cast(Any, 1) + A() B() + A() [out] OpExpr(7) : Any OpExpr(8) : builtins.int [case testBinaryOperatorWithAnyRightOperand] ## OpExpr from typing import Any, cast class A: def __add__(self, x: str) -> int: pass A() + cast(Any, 1) [out] OpExpr(5) : Any -- Callable overloading -- -------------------- [case testOverloadedFunctionType] ## CallExpr from typing import overload @overload def f(x: int) -> str: pass @overload def f(x: str) -> int: pass def f(x): pass f(1) f('') [out] CallExpr(8) : builtins.str CallExpr(9) : builtins.int [case testOverlappingOverloadedFunctionType] ## CallExpr from typing import overload, Any class A: pass class B(A): pass @overload def f(x: B) -> B: pass @overload def f(x: A) -> A: pass def f(x) -> Any: pass a = None # type: A b = None # type: B f(a) f(b) [out] CallExpr(14) : A CallExpr(15) : B [case testOverloadedErasedType] from typing import Callable from typing import List from typing import overload from typing import TypeVar T = TypeVar("T") V = TypeVar("V") def fun(s: int) -> int: pass def m(fun: Callable[[T], V], iter: List[T]) -> None: pass nums = [1] # type: List[int] m(fun, nums) [builtins fixtures/list.pyi] [out] IntExpr(13) : Literal[1]? ListExpr(13) : builtins.list[builtins.int] CallExpr(14) : None NameExpr(14) : def (s: builtins.int) -> builtins.int NameExpr(14) : def (fun: def (builtins.int) -> builtins.int, iter: builtins.list[builtins.int]) NameExpr(15) : builtins.list[builtins.int] -- Special cases -- ------------- [case testImplicitDataAttributeInit] ## NameExpr import typing class A: def __init__(self) -> None: self.x = ( A()) [out] NameExpr(5) : A NameExpr(6) : def () -> A [case testListMultiplicationInContext] ## ListExpr|OpExpr|IntExpr from typing import List a = [None] * 3 # type: List[str] [builtins fixtures/list.pyi] [out] IntExpr(3) : Literal[3]? ListExpr(3) : builtins.list[builtins.str] OpExpr(3) : builtins.list[builtins.str] [case testStringFormatting] ## .* '%d' % 1 [builtins fixtures/primitives.pyi] [out] IntExpr(2) : Literal[1]? OpExpr(2) : builtins.str StrExpr(2) : Literal['%d']? -- TODO -- -- test expressions -- list literal -- tuple literal -- unary minus -- indexing -- super expression -- more complex lambda (multiple arguments etc.) -- list comprehension -- generator expression -- overloads -- other things -- type inference -- default argument value -- for loop variable -- exception variable -- varargs -- generics -- explicit types -- type of 'None' (currently stripped, but sometimes we may want to dump it)